|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9976544175136826, |
|
"eval_steps": 200, |
|
"global_step": 1917, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0416666666666667e-07, |
|
"loss": 2.8182, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.208333333333334e-07, |
|
"loss": 3.0356, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 3.1992, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 2.8777, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 2.562, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.604166666666667e-06, |
|
"loss": 2.2127, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.8877, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.6458333333333333e-06, |
|
"loss": 1.7507, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.7018, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 1.6585, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 1.6698, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.729166666666667e-06, |
|
"loss": 1.6659, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.6713, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.770833333333334e-06, |
|
"loss": 1.5587, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.291666666666667e-06, |
|
"loss": 1.599, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 1.6189, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.5679, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.854166666666667e-06, |
|
"loss": 1.5739, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.6119, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.895833333333334e-06, |
|
"loss": 1.6028, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 1.5658, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.0937500000000002e-05, |
|
"loss": 1.5361, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1458333333333333e-05, |
|
"loss": 1.5203, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1979166666666669e-05, |
|
"loss": 1.5684, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.6045, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3020833333333334e-05, |
|
"loss": 1.5143, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3541666666666668e-05, |
|
"loss": 1.5401, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4062500000000001e-05, |
|
"loss": 1.6193, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.4583333333333333e-05, |
|
"loss": 1.5448, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.5104166666666668e-05, |
|
"loss": 1.5015, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 1.5301, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6145833333333334e-05, |
|
"loss": 1.5092, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.5311, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.71875e-05, |
|
"loss": 1.4993, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7708333333333335e-05, |
|
"loss": 1.477, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8229166666666668e-05, |
|
"loss": 1.4945, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.4925, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9270833333333335e-05, |
|
"loss": 1.5362, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.979166666666667e-05, |
|
"loss": 1.455, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9999850743593963e-05, |
|
"loss": 1.48, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9998938637247684e-05, |
|
"loss": 1.4643, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 1.5671130418777466, |
|
"eval_runtime": 157.1346, |
|
"eval_samples_per_second": 10.691, |
|
"eval_steps_per_second": 0.115, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.999719742032076e-05, |
|
"loss": 1.4893, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9994627237194654e-05, |
|
"loss": 1.5084, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9991228300988586e-05, |
|
"loss": 1.3884, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9987000893541863e-05, |
|
"loss": 1.4807, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9981945365390517e-05, |
|
"loss": 1.4622, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.997606213573823e-05, |
|
"loss": 1.4563, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.996935169242157e-05, |
|
"loss": 1.5046, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9961814591869558e-05, |
|
"loss": 1.4768, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.99534514590575e-05, |
|
"loss": 1.482, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9944262987455188e-05, |
|
"loss": 1.4478, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9934249938969396e-05, |
|
"loss": 1.4305, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.992341314388069e-05, |
|
"loss": 1.4418, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9911753500774592e-05, |
|
"loss": 1.443, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9899271976467058e-05, |
|
"loss": 1.4759, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.988596960592432e-05, |
|
"loss": 1.4295, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9871847492177066e-05, |
|
"loss": 1.4307, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9856906806228988e-05, |
|
"loss": 1.3803, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.984114878695964e-05, |
|
"loss": 1.4525, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9824574741021765e-05, |
|
"loss": 1.4445, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9807186042732908e-05, |
|
"loss": 1.4506, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9788984133961482e-05, |
|
"loss": 1.4059, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.976997052400719e-05, |
|
"loss": 1.3565, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9750146789475885e-05, |
|
"loss": 1.5339, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9729514574148842e-05, |
|
"loss": 1.3888, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.970807558884644e-05, |
|
"loss": 1.3368, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9685831611286312e-05, |
|
"loss": 1.3662, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9662784485935945e-05, |
|
"loss": 1.3689, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9638936123859715e-05, |
|
"loss": 1.3697, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.961428850256044e-05, |
|
"loss": 1.4146, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9588843665815404e-05, |
|
"loss": 1.3581, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9562603723506873e-05, |
|
"loss": 1.3546, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9535570851447166e-05, |
|
"loss": 1.4145, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9507747291198213e-05, |
|
"loss": 1.3306, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9479135349885706e-05, |
|
"loss": 1.4176, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.944973740000778e-05, |
|
"loss": 1.3604, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9419555879238295e-05, |
|
"loss": 1.3808, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9388593290224686e-05, |
|
"loss": 1.3122, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9356852200380466e-05, |
|
"loss": 1.334, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9324335241672327e-05, |
|
"loss": 1.35, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9291045110401897e-05, |
|
"loss": 1.3793, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 1.551980972290039, |
|
"eval_runtime": 156.7574, |
|
"eval_samples_per_second": 10.717, |
|
"eval_steps_per_second": 0.115, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.925698456698216e-05, |
|
"loss": 1.2668, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.922215643570857e-05, |
|
"loss": 1.3094, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.918656360452486e-05, |
|
"loss": 1.3081, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9150209024783564e-05, |
|
"loss": 1.3432, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9113095711001307e-05, |
|
"loss": 1.2616, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9075226740608825e-05, |
|
"loss": 1.3454, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9036605253695804e-05, |
|
"loss": 1.3153, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.899723445275049e-05, |
|
"loss": 1.2732, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.895711760239413e-05, |
|
"loss": 1.3395, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8916258029110305e-05, |
|
"loss": 1.2987, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.887465912096906e-05, |
|
"loss": 1.2983, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.883232432734599e-05, |
|
"loss": 1.3718, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8789257158636202e-05, |
|
"loss": 1.271, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8745461185963253e-05, |
|
"loss": 1.2844, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8700940040883016e-05, |
|
"loss": 1.2939, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8655697415082556e-05, |
|
"loss": 1.3428, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8609737060074016e-05, |
|
"loss": 1.298, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8563062786883553e-05, |
|
"loss": 1.3309, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.851567846573531e-05, |
|
"loss": 1.2789, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8467588025730506e-05, |
|
"loss": 1.2516, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8418795454521644e-05, |
|
"loss": 1.2503, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8369304797981843e-05, |
|
"loss": 1.3005, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8319120159869354e-05, |
|
"loss": 1.2923, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.826824570148729e-05, |
|
"loss": 1.3106, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8216685641338562e-05, |
|
"loss": 1.2344, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8164444254776078e-05, |
|
"loss": 1.2377, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8111525873648236e-05, |
|
"loss": 1.327, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8057934885939734e-05, |
|
"loss": 1.2719, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.800367573540771e-05, |
|
"loss": 1.2113, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7948752921213266e-05, |
|
"loss": 1.2606, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.789317099754841e-05, |
|
"loss": 1.2449, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.78369345732584e-05, |
|
"loss": 1.2485, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7780048311459606e-05, |
|
"loss": 1.2291, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7722516929152828e-05, |
|
"loss": 1.2958, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7664345196832164e-05, |
|
"loss": 1.226, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.760553793808945e-05, |
|
"loss": 1.3091, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.7546100029214286e-05, |
|
"loss": 1.2398, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.748603639878968e-05, |
|
"loss": 1.2309, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.74253520272834e-05, |
|
"loss": 1.2148, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7364051946634953e-05, |
|
"loss": 1.2455, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"eval_loss": 1.5388672351837158, |
|
"eval_runtime": 156.7467, |
|
"eval_samples_per_second": 10.718, |
|
"eval_steps_per_second": 0.115, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7302141239838382e-05, |
|
"loss": 1.2279, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7239625040520764e-05, |
|
"loss": 1.2069, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.717650853251651e-05, |
|
"loss": 1.2273, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.711279694943757e-05, |
|
"loss": 1.2578, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.704849557423941e-05, |
|
"loss": 1.2212, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6983609738782993e-05, |
|
"loss": 1.167, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6918144823392634e-05, |
|
"loss": 1.1778, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.685210625640988e-05, |
|
"loss": 1.2402, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.6785499513743383e-05, |
|
"loss": 1.1427, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.671833011841485e-05, |
|
"loss": 1.2951, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6650603640101058e-05, |
|
"loss": 1.1606, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.6582325694672032e-05, |
|
"loss": 1.1625, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.6513501943725366e-05, |
|
"loss": 1.136, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6444138094116783e-05, |
|
"loss": 1.1339, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.63742398974869e-05, |
|
"loss": 1.106, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.6303813149784313e-05, |
|
"loss": 1.07, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.623286369078501e-05, |
|
"loss": 1.032, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.616139740360811e-05, |
|
"loss": 1.06, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.6089420214228063e-05, |
|
"loss": 1.0766, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.6016938090983254e-05, |
|
"loss": 1.0617, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5943957044081117e-05, |
|
"loss": 0.9463, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.5870483125099753e-05, |
|
"loss": 0.9983, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.579652242648616e-05, |
|
"loss": 1.0127, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.5722081081051032e-05, |
|
"loss": 0.9263, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.564716526146022e-05, |
|
"loss": 0.9849, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.5571781179722912e-05, |
|
"loss": 0.9191, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.5495935086676533e-05, |
|
"loss": 0.8851, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.541963327146841e-05, |
|
"loss": 0.9047, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.534288206103429e-05, |
|
"loss": 0.8974, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.526568781957371e-05, |
|
"loss": 0.8163, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.5188056948022286e-05, |
|
"loss": 0.8438, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.510999588352092e-05, |
|
"loss": 0.9428, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.503151109888207e-05, |
|
"loss": 0.8177, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4952609102052997e-05, |
|
"loss": 0.8081, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4873296435576145e-05, |
|
"loss": 0.8396, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.479357967604663e-05, |
|
"loss": 0.8492, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4713465433566892e-05, |
|
"loss": 0.7578, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.463296035119862e-05, |
|
"loss": 0.8193, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4552071104411874e-05, |
|
"loss": 0.7517, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4470804400531589e-05, |
|
"loss": 0.7817, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 1.6069754362106323, |
|
"eval_runtime": 156.7669, |
|
"eval_samples_per_second": 10.717, |
|
"eval_steps_per_second": 0.115, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4389166978181374e-05, |
|
"loss": 0.755, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4307165606724777e-05, |
|
"loss": 0.7288, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4224807085703943e-05, |
|
"loss": 0.748, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4142098244275809e-05, |
|
"loss": 0.7248, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4059045940645834e-05, |
|
"loss": 0.7504, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3975657061499316e-05, |
|
"loss": 0.7172, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3891938521430349e-05, |
|
"loss": 0.7421, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3807897262368453e-05, |
|
"loss": 0.7366, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.372354025300297e-05, |
|
"loss": 0.7313, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3638874488205213e-05, |
|
"loss": 0.747, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3553906988448451e-05, |
|
"loss": 0.6819, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.346864479922577e-05, |
|
"loss": 0.7418, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.338309499046587e-05, |
|
"loss": 0.7585, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3297264655946816e-05, |
|
"loss": 0.7039, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3211160912707837e-05, |
|
"loss": 0.7319, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3124790900459169e-05, |
|
"loss": 0.7456, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3038161780990036e-05, |
|
"loss": 0.6698, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.2951280737574799e-05, |
|
"loss": 0.6732, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.286415497437732e-05, |
|
"loss": 0.717, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2776791715853585e-05, |
|
"loss": 0.682, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2689198206152657e-05, |
|
"loss": 0.7005, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2601381708516007e-05, |
|
"loss": 0.7217, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.251334950467522e-05, |
|
"loss": 0.7263, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.2425108894248216e-05, |
|
"loss": 0.6788, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2336667194133956e-05, |
|
"loss": 0.6699, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2248031737905732e-05, |
|
"loss": 0.6929, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.215920987520307e-05, |
|
"loss": 0.7046, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.2070208971122282e-05, |
|
"loss": 0.6893, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1981036405605782e-05, |
|
"loss": 0.6525, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.189169957283011e-05, |
|
"loss": 0.663, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1802205880592839e-05, |
|
"loss": 0.7718, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.171256274969829e-05, |
|
"loss": 0.6544, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.1622777613342236e-05, |
|
"loss": 0.6781, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.1532857916495504e-05, |
|
"loss": 0.6656, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.144281111528667e-05, |
|
"loss": 0.6576, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.1352644676383782e-05, |
|
"loss": 0.6909, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1262366076375224e-05, |
|
"loss": 0.6666, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1171982801149774e-05, |
|
"loss": 0.6861, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.1081502345275857e-05, |
|
"loss": 0.6716, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0990932211380099e-05, |
|
"loss": 0.6668, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_loss": 1.6513299942016602, |
|
"eval_runtime": 156.7333, |
|
"eval_samples_per_second": 10.719, |
|
"eval_steps_per_second": 0.115, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0900279909525226e-05, |
|
"loss": 0.6705, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.080955295658731e-05, |
|
"loss": 0.6733, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0718758875632486e-05, |
|
"loss": 0.6846, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0627905195293135e-05, |
|
"loss": 0.7057, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0536999449143606e-05, |
|
"loss": 0.6605, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.0446049175075544e-05, |
|
"loss": 0.6479, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.0355061914672831e-05, |
|
"loss": 0.6846, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0264045212586266e-05, |
|
"loss": 0.671, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0173006615907936e-05, |
|
"loss": 0.6168, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.0081953673545432e-05, |
|
"loss": 0.6671, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.990893935595885e-06, |
|
"loss": 0.6657, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.899834952719914e-06, |
|
"loss": 0.6369, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.808784275515526e-06, |
|
"loss": 0.6647, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.717749453892026e-06, |
|
"loss": 0.6537, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.626738036443978e-06, |
|
"loss": 0.6657, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.535757569825266e-06, |
|
"loss": 0.5965, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.444815598123346e-06, |
|
"loss": 0.6714, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.353919662233676e-06, |
|
"loss": 0.6744, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.263077299234433e-06, |
|
"loss": 0.6134, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.172296041761534e-06, |
|
"loss": 0.7298, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.08158341738404e-06, |
|
"loss": 0.6081, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.990946947979955e-06, |
|
"loss": 0.6759, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.900394149112543e-06, |
|
"loss": 0.6457, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.80993252940709e-06, |
|
"loss": 0.6624, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.719569589928353e-06, |
|
"loss": 0.6479, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.629312823558505e-06, |
|
"loss": 0.7002, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.539169714375885e-06, |
|
"loss": 0.6545, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.44914773703438e-06, |
|
"loss": 0.6361, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.35925435614365e-06, |
|
"loss": 0.6112, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.269497025650146e-06, |
|
"loss": 0.6393, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.179883188219052e-06, |
|
"loss": 0.6603, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.090420274617115e-06, |
|
"loss": 0.6318, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.001115703096508e-06, |
|
"loss": 0.6195, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.911976878779696e-06, |
|
"loss": 0.6316, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.823011193045413e-06, |
|
"loss": 0.6545, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.73422602291576e-06, |
|
"loss": 0.6382, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.645628730444524e-06, |
|
"loss": 0.5979, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.557226662106679e-06, |
|
"loss": 0.6102, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.469027148189268e-06, |
|
"loss": 0.634, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.3810375021835275e-06, |
|
"loss": 0.6304, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"eval_loss": 1.6570870876312256, |
|
"eval_runtime": 156.7819, |
|
"eval_samples_per_second": 10.716, |
|
"eval_steps_per_second": 0.115, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.293265020178495e-06, |
|
"loss": 0.6332, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.205716980255985e-06, |
|
"loss": 0.6291, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.118400641887116e-06, |
|
"loss": 0.6478, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.031323245330337e-06, |
|
"loss": 0.6427, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.944492011031086e-06, |
|
"loss": 0.6246, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.857914139023058e-06, |
|
"loss": 0.6024, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.771596808331175e-06, |
|
"loss": 0.6071, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.685547176376321e-06, |
|
"loss": 0.6288, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.59977237838183e-06, |
|
"loss": 0.6176, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.5142795267818505e-06, |
|
"loss": 0.6029, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.4290757106315674e-06, |
|
"loss": 0.6083, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.344167995019395e-06, |
|
"loss": 0.6416, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.2595634204811215e-06, |
|
"loss": 0.6119, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.17526900241614e-06, |
|
"loss": 0.5819, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.091291730505684e-06, |
|
"loss": 0.6075, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.007638568133303e-06, |
|
"loss": 0.5938, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.924316451807407e-06, |
|
"loss": 0.5575, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.841332290586126e-06, |
|
"loss": 0.6598, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.758692965504391e-06, |
|
"loss": 0.5775, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.676405329003387e-06, |
|
"loss": 0.5695, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.594476204362303e-06, |
|
"loss": 0.5571, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.512912385132605e-06, |
|
"loss": 0.53, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.431720634574663e-06, |
|
"loss": 0.5683, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.350907685096983e-06, |
|
"loss": 0.5176, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.270480237697927e-06, |
|
"loss": 0.4881, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.190444961410082e-06, |
|
"loss": 0.5312, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.110808492747258e-06, |
|
"loss": 0.5465, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.031577435154207e-06, |
|
"loss": 0.5183, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.952758358459037e-06, |
|
"loss": 0.4748, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.874357798328464e-06, |
|
"loss": 0.4878, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.796382255725866e-06, |
|
"loss": 0.5076, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.7188381963722385e-06, |
|
"loss": 0.446, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.641732050210032e-06, |
|
"loss": 0.4942, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.565070210870015e-06, |
|
"loss": 0.4677, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.488859035141071e-06, |
|
"loss": 0.4091, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.413104842443149e-06, |
|
"loss": 0.4909, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.3378139143032095e-06, |
|
"loss": 0.4623, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.262992493834386e-06, |
|
"loss": 0.4044, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.188646785218316e-06, |
|
"loss": 0.4041, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.114782953190648e-06, |
|
"loss": 0.4784, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"eval_loss": 1.7886532545089722, |
|
"eval_runtime": 156.7662, |
|
"eval_samples_per_second": 10.717, |
|
"eval_steps_per_second": 0.115, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.0414071225299235e-06, |
|
"loss": 0.3985, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.968525377549657e-06, |
|
"loss": 0.4067, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.896143761593862e-06, |
|
"loss": 0.4328, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.824268276535917e-06, |
|
"loss": 0.3958, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.752904882280899e-06, |
|
"loss": 0.3838, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.682059496271383e-06, |
|
"loss": 0.4085, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6117379929967857e-06, |
|
"loss": 0.3872, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.5419462035062313e-06, |
|
"loss": 0.3611, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.472689914925056e-06, |
|
"loss": 0.402, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.4039748699749287e-06, |
|
"loss": 0.3653, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.3358067664976866e-06, |
|
"loss": 0.3987, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.2681912569828445e-06, |
|
"loss": 0.3804, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2011339480989136e-06, |
|
"loss": 0.371, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.134640400228479e-06, |
|
"loss": 0.3803, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0687161270071465e-06, |
|
"loss": 0.3564, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.003366594866345e-06, |
|
"loss": 0.3853, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.938597222580063e-06, |
|
"loss": 0.379, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.874413380815506e-06, |
|
"loss": 0.3617, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8108203916877753e-06, |
|
"loss": 0.3623, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7478235283185504e-06, |
|
"loss": 0.4036, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.685428014398859e-06, |
|
"loss": 0.4094, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.623639023755902e-06, |
|
"loss": 0.3709, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.562461679924072e-06, |
|
"loss": 0.3435, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.501901055720074e-06, |
|
"loss": 0.3846, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.4419621728223297e-06, |
|
"loss": 0.3511, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.382650001354543e-06, |
|
"loss": 0.3472, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3239694594736074e-06, |
|
"loss": 0.3691, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2659254129617712e-06, |
|
"loss": 0.3452, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.2085226748231792e-06, |
|
"loss": 0.3513, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1517660048847765e-06, |
|
"loss": 0.3793, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.0956601094016217e-06, |
|
"loss": 0.3861, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0402096406666562e-06, |
|
"loss": 0.3409, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9854191966249183e-06, |
|
"loss": 0.3369, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9312933204922947e-06, |
|
"loss": 0.3651, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8778365003787934e-06, |
|
"loss": 0.3489, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8250531689163997e-06, |
|
"loss": 0.3641, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7729477028915064e-06, |
|
"loss": 0.3247, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7215244228820027e-06, |
|
"loss": 0.3408, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6707875928990059e-06, |
|
"loss": 0.3997, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6207414200333027e-06, |
|
"loss": 0.3548, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"eval_loss": 1.8198195695877075, |
|
"eval_runtime": 156.7807, |
|
"eval_samples_per_second": 10.716, |
|
"eval_steps_per_second": 0.115, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.571390054106482e-06, |
|
"loss": 0.331, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5227375873268445e-06, |
|
"loss": 0.3458, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4747880539500637e-06, |
|
"loss": 0.3405, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4275454299446834e-06, |
|
"loss": 0.3551, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.381013632662418e-06, |
|
"loss": 0.3642, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.335196520513332e-06, |
|
"loss": 0.3685, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2900978926458985e-06, |
|
"loss": 0.3565, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2457214886319757e-06, |
|
"loss": 0.3412, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2020709881567184e-06, |
|
"loss": 0.3583, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.15915001071347e-06, |
|
"loss": 0.367, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1169621153036204e-06, |
|
"loss": 0.3557, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0755108001415038e-06, |
|
"loss": 0.3775, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0347995023643198e-06, |
|
"loss": 0.3633, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.948315977471412e-07, |
|
"loss": 0.3239, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.556104004229717e-07, |
|
"loss": 0.3703, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.171391626079629e-07, |
|
"loss": 0.3527, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.794210743317233e-07, |
|
"loss": 0.3314, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.42459263172809e-07, |
|
"loss": 0.3622, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.062567939993838e-07, |
|
"loss": 0.3617, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.708166687150842e-07, |
|
"loss": 0.3334, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.361418260100984e-07, |
|
"loss": 0.3574, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.022351411174866e-07, |
|
"loss": 0.3457, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.690994255747774e-07, |
|
"loss": 0.364, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.367374269908233e-07, |
|
"loss": 0.3171, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.051518288179847e-07, |
|
"loss": 0.3757, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.74345250129601e-07, |
|
"loss": 0.3497, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.443202454028306e-07, |
|
"loss": 0.3464, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.150793043068269e-07, |
|
"loss": 0.3905, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.866248514963001e-07, |
|
"loss": 0.3562, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.589592464104609e-07, |
|
"loss": 0.369, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3208478307738e-07, |
|
"loss": 0.3614, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0600368992376074e-07, |
|
"loss": 0.3555, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.8071812959016965e-07, |
|
"loss": 0.3618, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.5623019875169916e-07, |
|
"loss": 0.3667, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.325419279441211e-07, |
|
"loss": 0.364, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.096552813955045e-07, |
|
"loss": 0.3693, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.875721568633527e-07, |
|
"loss": 0.3265, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.6629438547723484e-07, |
|
"loss": 0.3447, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.458237315869527e-07, |
|
"loss": 0.3602, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.2616189261623568e-07, |
|
"loss": 0.3506, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"eval_loss": 1.8299107551574707, |
|
"eval_runtime": 156.7929, |
|
"eval_samples_per_second": 10.715, |
|
"eval_steps_per_second": 0.115, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.0731049892199873e-07, |
|
"loss": 0.3721, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.8927111365914297e-07, |
|
"loss": 0.3383, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.7204523265095252e-07, |
|
"loss": 0.3455, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.55634284265046e-07, |
|
"loss": 0.3635, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.400396292949513e-07, |
|
"loss": 0.3151, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2526256084725351e-07, |
|
"loss": 0.3347, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1130430423438642e-07, |
|
"loss": 0.3795, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.816601687301585e-08, |
|
"loss": 0.3486, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.584878818807652e-08, |
|
"loss": 0.3646, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.435363952243313e-08, |
|
"loss": 0.3665, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.368152405219042e-08, |
|
"loss": 0.3876, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.3833326707656284e-08, |
|
"loss": 0.3748, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.4809864099965014e-08, |
|
"loss": 0.3486, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.661188445336361e-08, |
|
"loss": 0.3427, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9240067543165883e-08, |
|
"loss": 0.3381, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.2695024639390884e-08, |
|
"loss": 0.3717, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.697729845607121e-08, |
|
"loss": 0.3411, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.20873631062568e-08, |
|
"loss": 0.3595, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.025624062695247e-09, |
|
"loss": 0.3628, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.792418124214271e-09, |
|
"loss": 0.3785, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.3880133877962617e-09, |
|
"loss": 0.3523, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.126092263405217e-10, |
|
"loss": 0.3398, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.633627213870064e-11, |
|
"loss": 0.3745, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1917, |
|
"total_flos": 889695087230976.0, |
|
"train_loss": 0.8639222900259289, |
|
"train_runtime": 46368.3285, |
|
"train_samples_per_second": 1.985, |
|
"train_steps_per_second": 0.041 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1917, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 889695087230976.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|