|
{ |
|
"best_metric": 0.9644592530889907, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-img_orientation/checkpoint-5060", |
|
"epoch": 9.990128331688055, |
|
"eval_steps": 500, |
|
"global_step": 5060, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940711462450593e-07, |
|
"loss": 1.4592, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.881422924901187e-07, |
|
"loss": 1.4858, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4822134387351778e-06, |
|
"loss": 1.4896, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9762845849802374e-06, |
|
"loss": 1.4569, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.4703557312252965e-06, |
|
"loss": 1.4498, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9644268774703556e-06, |
|
"loss": 1.447, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.458498023715415e-06, |
|
"loss": 1.4281, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.952569169960475e-06, |
|
"loss": 1.4338, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.4466403162055334e-06, |
|
"loss": 1.3824, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.940711462450593e-06, |
|
"loss": 1.3705, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 1.3752, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5.928853754940711e-06, |
|
"loss": 1.3846, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.422924901185772e-06, |
|
"loss": 1.3351, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 6.91699604743083e-06, |
|
"loss": 1.3125, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.41106719367589e-06, |
|
"loss": 1.2808, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 7.90513833992095e-06, |
|
"loss": 1.2548, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.399209486166009e-06, |
|
"loss": 1.2701, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.893280632411067e-06, |
|
"loss": 1.2131, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.387351778656126e-06, |
|
"loss": 1.1775, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.881422924901186e-06, |
|
"loss": 1.18, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.0375494071146246e-05, |
|
"loss": 1.1267, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 1.1209, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.1363636363636365e-05, |
|
"loss": 1.0627, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.1857707509881423e-05, |
|
"loss": 1.0255, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2351778656126482e-05, |
|
"loss": 1.0156, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.2845849802371543e-05, |
|
"loss": 0.9846, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.3339920948616603e-05, |
|
"loss": 0.9665, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.383399209486166e-05, |
|
"loss": 0.9782, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.432806324110672e-05, |
|
"loss": 0.9253, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.482213438735178e-05, |
|
"loss": 0.9409, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.531620553359684e-05, |
|
"loss": 0.9238, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.58102766798419e-05, |
|
"loss": 0.9089, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 0.862, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.6798418972332018e-05, |
|
"loss": 0.8538, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7292490118577078e-05, |
|
"loss": 0.8107, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.7786561264822134e-05, |
|
"loss": 0.8423, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8280632411067193e-05, |
|
"loss": 0.8111, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8774703557312253e-05, |
|
"loss": 0.8144, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9268774703557312e-05, |
|
"loss": 0.8346, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9762845849802372e-05, |
|
"loss": 0.8434, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.025691699604743e-05, |
|
"loss": 0.7528, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.075098814229249e-05, |
|
"loss": 0.7725, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.124505928853755e-05, |
|
"loss": 0.8127, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 0.7387, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.223320158102767e-05, |
|
"loss": 0.7528, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.7394, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.3221343873517785e-05, |
|
"loss": 0.7006, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.3715415019762845e-05, |
|
"loss": 0.7025, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.4209486166007905e-05, |
|
"loss": 0.7039, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4703557312252964e-05, |
|
"loss": 0.7291, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5197628458498024e-05, |
|
"loss": 0.7329, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5691699604743087e-05, |
|
"loss": 0.7343, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.6185770750988143e-05, |
|
"loss": 0.718, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.6679841897233206e-05, |
|
"loss": 0.6806, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 0.6916, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.766798418972332e-05, |
|
"loss": 0.6653, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.8162055335968378e-05, |
|
"loss": 0.6981, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.865612648221344e-05, |
|
"loss": 0.7156, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.9150197628458497e-05, |
|
"loss": 0.6568, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.964426877470356e-05, |
|
"loss": 0.6272, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.0138339920948616e-05, |
|
"loss": 0.7185, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.063241106719368e-05, |
|
"loss": 0.706, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.1126482213438735e-05, |
|
"loss": 0.6658, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.16205533596838e-05, |
|
"loss": 0.6912, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.2114624505928854e-05, |
|
"loss": 0.7093, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 0.6647, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.310276679841897e-05, |
|
"loss": 0.6608, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.3596837944664036e-05, |
|
"loss": 0.6939, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 0.6768, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.4584980237154155e-05, |
|
"loss": 0.6267, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.507905138339921e-05, |
|
"loss": 0.6497, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.557312252964427e-05, |
|
"loss": 0.6502, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.606719367588933e-05, |
|
"loss": 0.5986, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.656126482213439e-05, |
|
"loss": 0.6067, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.705533596837945e-05, |
|
"loss": 0.5892, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7549407114624506e-05, |
|
"loss": 0.5727, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 0.6368, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.8537549407114625e-05, |
|
"loss": 0.6179, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.903162055335969e-05, |
|
"loss": 0.605, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.9525691699604744e-05, |
|
"loss": 0.6226, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.001976284584981e-05, |
|
"loss": 0.5928, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.051383399209486e-05, |
|
"loss": 0.5718, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1007905138339926e-05, |
|
"loss": 0.6157, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.150197628458498e-05, |
|
"loss": 0.6212, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.1996047430830045e-05, |
|
"loss": 0.6013, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.24901185770751e-05, |
|
"loss": 0.6258, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.2984189723320164e-05, |
|
"loss": 0.6218, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 0.5553, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.397233201581028e-05, |
|
"loss": 0.5395, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.446640316205534e-05, |
|
"loss": 0.5608, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.4960474308300396e-05, |
|
"loss": 0.5666, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.5756, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.5948616600790515e-05, |
|
"loss": 0.5478, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.644268774703557e-05, |
|
"loss": 0.606, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.6936758893280634e-05, |
|
"loss": 0.528, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.743083003952569e-05, |
|
"loss": 0.5618, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.792490118577075e-05, |
|
"loss": 0.5192, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.841897233201581e-05, |
|
"loss": 0.5076, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 0.5003, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.940711462450593e-05, |
|
"loss": 0.5879, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.990118577075099e-05, |
|
"loss": 0.5605, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8340969040677495, |
|
"eval_loss": 0.3984481990337372, |
|
"eval_runtime": 59.046, |
|
"eval_samples_per_second": 121.99, |
|
"eval_steps_per_second": 3.828, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.995608256477822e-05, |
|
"loss": 0.5479, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.990118577075099e-05, |
|
"loss": 0.518, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.984628897672376e-05, |
|
"loss": 0.5083, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.9791392182696533e-05, |
|
"loss": 0.5253, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.97364953886693e-05, |
|
"loss": 0.5205, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.9681598594642076e-05, |
|
"loss": 0.531, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.9626701800614844e-05, |
|
"loss": 0.5069, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.957180500658762e-05, |
|
"loss": 0.4927, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.9516908212560386e-05, |
|
"loss": 0.509, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.946201141853316e-05, |
|
"loss": 0.5099, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.940711462450593e-05, |
|
"loss": 0.4991, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.93522178304787e-05, |
|
"loss": 0.5536, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.929732103645147e-05, |
|
"loss": 0.543, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.9242424242424245e-05, |
|
"loss": 0.4853, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.918752744839701e-05, |
|
"loss": 0.5034, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.913263065436979e-05, |
|
"loss": 0.4749, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.9077733860342555e-05, |
|
"loss": 0.4729, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.902283706631533e-05, |
|
"loss": 0.4958, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.89679402722881e-05, |
|
"loss": 0.489, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 0.45, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.885814668423364e-05, |
|
"loss": 0.4878, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.8803249890206414e-05, |
|
"loss": 0.4705, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.874835309617918e-05, |
|
"loss": 0.4443, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.869345630215196e-05, |
|
"loss": 0.4299, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.8638559508124724e-05, |
|
"loss": 0.5095, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.85836627140975e-05, |
|
"loss": 0.5156, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.852876592007027e-05, |
|
"loss": 0.4791, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.847386912604304e-05, |
|
"loss": 0.5134, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.841897233201581e-05, |
|
"loss": 0.484, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.8364075537988584e-05, |
|
"loss": 0.4688, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.830917874396135e-05, |
|
"loss": 0.5097, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.8254281949934126e-05, |
|
"loss": 0.4583, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.8199385155906894e-05, |
|
"loss": 0.4538, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.814448836187967e-05, |
|
"loss": 0.4504, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8089591567852436e-05, |
|
"loss": 0.4156, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.803469477382521e-05, |
|
"loss": 0.4592, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 0.4762, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.792490118577075e-05, |
|
"loss": 0.457, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.787000439174352e-05, |
|
"loss": 0.4488, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.7815107597716295e-05, |
|
"loss": 0.4291, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.776021080368906e-05, |
|
"loss": 0.4569, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.770531400966184e-05, |
|
"loss": 0.4081, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.7650417215634605e-05, |
|
"loss": 0.4677, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.759552042160738e-05, |
|
"loss": 0.4763, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.754062362758015e-05, |
|
"loss": 0.4089, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.748572683355292e-05, |
|
"loss": 0.4569, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.743083003952569e-05, |
|
"loss": 0.3866, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.7375933245498465e-05, |
|
"loss": 0.4174, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.732103645147123e-05, |
|
"loss": 0.3901, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.726613965744401e-05, |
|
"loss": 0.407, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.7211242863416775e-05, |
|
"loss": 0.4823, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.715634606938955e-05, |
|
"loss": 0.4327, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.710144927536232e-05, |
|
"loss": 0.4128, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.704655248133509e-05, |
|
"loss": 0.4277, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.699165568730786e-05, |
|
"loss": 0.4157, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6936758893280634e-05, |
|
"loss": 0.4317, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.68818620992534e-05, |
|
"loss": 0.3777, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.6826965305226176e-05, |
|
"loss": 0.4186, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.6772068511198944e-05, |
|
"loss": 0.4104, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.671717171717172e-05, |
|
"loss": 0.4559, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.6662274923144486e-05, |
|
"loss": 0.4254, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.660737812911726e-05, |
|
"loss": 0.4348, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.655248133509003e-05, |
|
"loss": 0.368, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.64975845410628e-05, |
|
"loss": 0.3829, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.644268774703557e-05, |
|
"loss": 0.4385, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.6387790953008345e-05, |
|
"loss": 0.4329, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.633289415898111e-05, |
|
"loss": 0.3856, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.627799736495389e-05, |
|
"loss": 0.3997, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.6223100570926656e-05, |
|
"loss": 0.3997, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.616820377689943e-05, |
|
"loss": 0.4309, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.61133069828722e-05, |
|
"loss": 0.399, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.605841018884497e-05, |
|
"loss": 0.3673, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.600351339481774e-05, |
|
"loss": 0.4053, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.5948616600790515e-05, |
|
"loss": 0.4114, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.589371980676328e-05, |
|
"loss": 0.3661, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.583882301273606e-05, |
|
"loss": 0.4, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.5783926218708825e-05, |
|
"loss": 0.4087, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.57290294246816e-05, |
|
"loss": 0.3832, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.567413263065437e-05, |
|
"loss": 0.39, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.561923583662714e-05, |
|
"loss": 0.3714, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.556433904259991e-05, |
|
"loss": 0.4495, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.5509442248572684e-05, |
|
"loss": 0.3735, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.3078, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.5399648660518226e-05, |
|
"loss": 0.4047, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.5344751866491e-05, |
|
"loss": 0.3236, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.528985507246377e-05, |
|
"loss": 0.3545, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.523495827843654e-05, |
|
"loss": 0.3642, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.518006148440931e-05, |
|
"loss": 0.396, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.5125164690382086e-05, |
|
"loss": 0.4409, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.507026789635485e-05, |
|
"loss": 0.443, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.501537110232763e-05, |
|
"loss": 0.3904, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.4960474308300396e-05, |
|
"loss": 0.318, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.490557751427317e-05, |
|
"loss": 0.3897, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.485068072024594e-05, |
|
"loss": 0.3899, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.479578392621871e-05, |
|
"loss": 0.3284, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.474088713219148e-05, |
|
"loss": 0.3643, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.4685990338164255e-05, |
|
"loss": 0.3709, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.463109354413702e-05, |
|
"loss": 0.4323, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.45761967501098e-05, |
|
"loss": 0.3455, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.452129995608257e-05, |
|
"loss": 0.3306, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.446640316205534e-05, |
|
"loss": 0.3828, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9271137026239067, |
|
"eval_loss": 0.19439160823822021, |
|
"eval_runtime": 56.6068, |
|
"eval_samples_per_second": 127.246, |
|
"eval_steps_per_second": 3.992, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.4411506368028114e-05, |
|
"loss": 0.3869, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.435660957400088e-05, |
|
"loss": 0.3884, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.4301712779973656e-05, |
|
"loss": 0.3364, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.4246815985946424e-05, |
|
"loss": 0.3704, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.41919191919192e-05, |
|
"loss": 0.3097, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.4137022397891966e-05, |
|
"loss": 0.3818, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.408212560386474e-05, |
|
"loss": 0.2919, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.402722880983751e-05, |
|
"loss": 0.3812, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.397233201581028e-05, |
|
"loss": 0.3698, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.391743522178305e-05, |
|
"loss": 0.3532, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3862538427755826e-05, |
|
"loss": 0.3003, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3807641633728593e-05, |
|
"loss": 0.3557, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.375274483970137e-05, |
|
"loss": 0.3604, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.3697848045674136e-05, |
|
"loss": 0.3176, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.364295125164691e-05, |
|
"loss": 0.3359, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.358805445761968e-05, |
|
"loss": 0.3304, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.353315766359245e-05, |
|
"loss": 0.4163, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 0.3113, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.3423364075537995e-05, |
|
"loss": 0.385, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.336846728151076e-05, |
|
"loss": 0.3725, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.331357048748354e-05, |
|
"loss": 0.3176, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.3258673693456305e-05, |
|
"loss": 0.36, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.320377689942908e-05, |
|
"loss": 0.4071, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.314888010540185e-05, |
|
"loss": 0.37, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.309398331137462e-05, |
|
"loss": 0.3998, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.303908651734739e-05, |
|
"loss": 0.3969, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.2984189723320164e-05, |
|
"loss": 0.3709, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 0.3271, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.2874396135265707e-05, |
|
"loss": 0.3253, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.2819499341238474e-05, |
|
"loss": 0.3562, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.276460254721125e-05, |
|
"loss": 0.3014, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.2709705753184017e-05, |
|
"loss": 0.3219, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.265480895915679e-05, |
|
"loss": 0.3709, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.259991216512956e-05, |
|
"loss": 0.3491, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.2545015371102333e-05, |
|
"loss": 0.325, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.24901185770751e-05, |
|
"loss": 0.3032, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.2435221783047876e-05, |
|
"loss": 0.3409, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.2380324989020644e-05, |
|
"loss": 0.3407, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.232542819499342e-05, |
|
"loss": 0.3141, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.2270531400966186e-05, |
|
"loss": 0.3827, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.221563460693896e-05, |
|
"loss": 0.2971, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.216073781291173e-05, |
|
"loss": 0.3426, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.21058410188845e-05, |
|
"loss": 0.3105, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.205094422485727e-05, |
|
"loss": 0.3295, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.1996047430830045e-05, |
|
"loss": 0.3699, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.194115063680281e-05, |
|
"loss": 0.3273, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.188625384277559e-05, |
|
"loss": 0.3026, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.1831357048748355e-05, |
|
"loss": 0.3141, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.177646025472113e-05, |
|
"loss": 0.3449, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.17215634606939e-05, |
|
"loss": 0.3389, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.365, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.161176987263944e-05, |
|
"loss": 0.3608, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.1556873078612214e-05, |
|
"loss": 0.3357, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.150197628458498e-05, |
|
"loss": 0.3363, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.144707949055776e-05, |
|
"loss": 0.3015, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.1392182696530524e-05, |
|
"loss": 0.3427, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.13372859025033e-05, |
|
"loss": 0.3129, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.128238910847607e-05, |
|
"loss": 0.3151, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.122749231444884e-05, |
|
"loss": 0.3125, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.117259552042161e-05, |
|
"loss": 0.33, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.1117698726394384e-05, |
|
"loss": 0.3707, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.106280193236715e-05, |
|
"loss": 0.3487, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.1007905138339926e-05, |
|
"loss": 0.3476, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.0953008344312694e-05, |
|
"loss": 0.3383, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.089811155028547e-05, |
|
"loss": 0.3196, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.0843214756258236e-05, |
|
"loss": 0.2989, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.078831796223101e-05, |
|
"loss": 0.3474, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.073342116820378e-05, |
|
"loss": 0.29, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.067852437417655e-05, |
|
"loss": 0.3027, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.062362758014932e-05, |
|
"loss": 0.3283, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.0568730786122095e-05, |
|
"loss": 0.3101, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.051383399209486e-05, |
|
"loss": 0.2508, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.045893719806764e-05, |
|
"loss": 0.2954, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 0.3299, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.034914361001318e-05, |
|
"loss": 0.313, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.029424681598595e-05, |
|
"loss": 0.2935, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.023935002195872e-05, |
|
"loss": 0.3189, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.018445322793149e-05, |
|
"loss": 0.3173, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.0129556433904265e-05, |
|
"loss": 0.3194, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.007465963987703e-05, |
|
"loss": 0.2948, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.001976284584981e-05, |
|
"loss": 0.3118, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.9964866051822575e-05, |
|
"loss": 0.2921, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.990996925779535e-05, |
|
"loss": 0.3052, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.985507246376812e-05, |
|
"loss": 0.311, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.980017566974089e-05, |
|
"loss": 0.3254, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.974527887571366e-05, |
|
"loss": 0.3075, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.9690382081686434e-05, |
|
"loss": 0.3385, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.96354852876592e-05, |
|
"loss": 0.3218, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.9580588493631976e-05, |
|
"loss": 0.2768, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.9525691699604744e-05, |
|
"loss": 0.3511, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.947079490557752e-05, |
|
"loss": 0.3292, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.9415898111550286e-05, |
|
"loss": 0.3433, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.936100131752306e-05, |
|
"loss": 0.349, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.930610452349583e-05, |
|
"loss": 0.3071, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.92512077294686e-05, |
|
"loss": 0.3099, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.919631093544137e-05, |
|
"loss": 0.3361, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.9141414141414145e-05, |
|
"loss": 0.3458, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.908651734738691e-05, |
|
"loss": 0.3123, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.903162055335969e-05, |
|
"loss": 0.3333, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.8976723759332456e-05, |
|
"loss": 0.2589, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.892182696530523e-05, |
|
"loss": 0.3092, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9339164237123421, |
|
"eval_loss": 0.18616199493408203, |
|
"eval_runtime": 60.3206, |
|
"eval_samples_per_second": 119.412, |
|
"eval_steps_per_second": 3.747, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.8866930171278e-05, |
|
"loss": 0.3178, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.881203337725077e-05, |
|
"loss": 0.258, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.875713658322354e-05, |
|
"loss": 0.3299, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.8702239789196315e-05, |
|
"loss": 0.3179, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.864734299516908e-05, |
|
"loss": 0.2643, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.859244620114186e-05, |
|
"loss": 0.2651, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.8537549407114625e-05, |
|
"loss": 0.2704, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.84826526130874e-05, |
|
"loss": 0.2892, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.842775581906017e-05, |
|
"loss": 0.2936, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.837285902503294e-05, |
|
"loss": 0.3029, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.831796223100571e-05, |
|
"loss": 0.3092, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.8263065436978484e-05, |
|
"loss": 0.2671, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.820816864295125e-05, |
|
"loss": 0.2763, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.8153271848924026e-05, |
|
"loss": 0.2686, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.8098375054896794e-05, |
|
"loss": 0.2841, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 0.3333, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.7988581466842336e-05, |
|
"loss": 0.3049, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.793368467281511e-05, |
|
"loss": 0.3465, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.2749, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.782389108476065e-05, |
|
"loss": 0.2889, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.776899429073342e-05, |
|
"loss": 0.252, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.7714097496706196e-05, |
|
"loss": 0.3016, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.7659200702678963e-05, |
|
"loss": 0.2675, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.760430390865174e-05, |
|
"loss": 0.3014, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.7549407114624506e-05, |
|
"loss": 0.2811, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.749451032059728e-05, |
|
"loss": 0.3281, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.743961352657005e-05, |
|
"loss": 0.3067, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.738471673254282e-05, |
|
"loss": 0.2923, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.732981993851559e-05, |
|
"loss": 0.2726, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.7274923144488365e-05, |
|
"loss": 0.2797, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.722002635046113e-05, |
|
"loss": 0.2907, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.716512955643391e-05, |
|
"loss": 0.2887, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.7110232762406675e-05, |
|
"loss": 0.259, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.705533596837945e-05, |
|
"loss": 0.2646, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.700043917435222e-05, |
|
"loss": 0.2677, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.694554238032499e-05, |
|
"loss": 0.2906, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.689064558629776e-05, |
|
"loss": 0.3032, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.6835748792270534e-05, |
|
"loss": 0.2534, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.67808519982433e-05, |
|
"loss": 0.2893, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.6725955204216077e-05, |
|
"loss": 0.2656, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.6671058410188844e-05, |
|
"loss": 0.3215, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.661616161616162e-05, |
|
"loss": 0.2714, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.656126482213439e-05, |
|
"loss": 0.2773, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.650636802810716e-05, |
|
"loss": 0.325, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.645147123407993e-05, |
|
"loss": 0.3256, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.6396574440052703e-05, |
|
"loss": 0.2753, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.634167764602547e-05, |
|
"loss": 0.2767, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.6286780851998246e-05, |
|
"loss": 0.2906, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.6231884057971014e-05, |
|
"loss": 0.2801, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.617698726394379e-05, |
|
"loss": 0.2713, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.6122090469916556e-05, |
|
"loss": 0.2684, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.606719367588933e-05, |
|
"loss": 0.3168, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.60122968818621e-05, |
|
"loss": 0.2862, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.595740008783487e-05, |
|
"loss": 0.2946, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.590250329380764e-05, |
|
"loss": 0.3028, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.5847606499780415e-05, |
|
"loss": 0.2121, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.579270970575318e-05, |
|
"loss": 0.266, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.573781291172596e-05, |
|
"loss": 0.2431, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.5682916117698725e-05, |
|
"loss": 0.2799, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.56280193236715e-05, |
|
"loss": 0.3164, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.557312252964427e-05, |
|
"loss": 0.3282, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.551822573561704e-05, |
|
"loss": 0.2408, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.546332894158981e-05, |
|
"loss": 0.3084, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.5408432147562584e-05, |
|
"loss": 0.3061, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 0.3037, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.529863855950813e-05, |
|
"loss": 0.3262, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.5243741765480894e-05, |
|
"loss": 0.2455, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.518884497145367e-05, |
|
"loss": 0.2946, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.513394817742644e-05, |
|
"loss": 0.3204, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.507905138339921e-05, |
|
"loss": 0.2821, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.502415458937198e-05, |
|
"loss": 0.3126, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.4969257795344754e-05, |
|
"loss": 0.2931, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.491436100131753e-05, |
|
"loss": 0.2356, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.4859464207290296e-05, |
|
"loss": 0.278, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.480456741326307e-05, |
|
"loss": 0.3163, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.474967061923584e-05, |
|
"loss": 0.2999, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.469477382520861e-05, |
|
"loss": 0.3335, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.463987703118138e-05, |
|
"loss": 0.2905, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.4584980237154155e-05, |
|
"loss": 0.272, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.453008344312692e-05, |
|
"loss": 0.2826, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.44751866490997e-05, |
|
"loss": 0.2596, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.4420289855072465e-05, |
|
"loss": 0.3008, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.436539306104524e-05, |
|
"loss": 0.2568, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.431049626701801e-05, |
|
"loss": 0.2733, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.425559947299078e-05, |
|
"loss": 0.2458, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.420070267896355e-05, |
|
"loss": 0.2874, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.4145805884936324e-05, |
|
"loss": 0.2647, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 0.2877, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.403601229688187e-05, |
|
"loss": 0.2919, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.3981115502854635e-05, |
|
"loss": 0.2409, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.392621870882741e-05, |
|
"loss": 0.2317, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.387132191480018e-05, |
|
"loss": 0.2826, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.381642512077295e-05, |
|
"loss": 0.2619, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.376152832674572e-05, |
|
"loss": 0.2673, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.3706631532718494e-05, |
|
"loss": 0.2701, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.365173473869126e-05, |
|
"loss": 0.2914, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.3596837944664036e-05, |
|
"loss": 0.2293, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.3541941150636804e-05, |
|
"loss": 0.2553, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.348704435660958e-05, |
|
"loss": 0.2703, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.3432147562582346e-05, |
|
"loss": 0.31, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.337725076855512e-05, |
|
"loss": 0.2638, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.332235397452789e-05, |
|
"loss": 0.3234, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9509926419547411, |
|
"eval_loss": 0.1415332406759262, |
|
"eval_runtime": 57.5306, |
|
"eval_samples_per_second": 125.203, |
|
"eval_steps_per_second": 3.928, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.326745718050066e-05, |
|
"loss": 0.2739, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.321256038647343e-05, |
|
"loss": 0.2871, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.3157663592446205e-05, |
|
"loss": 0.2511, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.310276679841897e-05, |
|
"loss": 0.2759, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.304787000439175e-05, |
|
"loss": 0.2745, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.2992973210364515e-05, |
|
"loss": 0.2517, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.293807641633729e-05, |
|
"loss": 0.2064, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.288317962231006e-05, |
|
"loss": 0.2692, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 0.2577, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.27733860342556e-05, |
|
"loss": 0.2808, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.2718489240228375e-05, |
|
"loss": 0.239, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.266359244620114e-05, |
|
"loss": 0.2026, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 0.2609, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.2553798858146685e-05, |
|
"loss": 0.3016, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.249890206411946e-05, |
|
"loss": 0.2376, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.244400527009223e-05, |
|
"loss": 0.1988, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.2389108476065e-05, |
|
"loss": 0.2098, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.233421168203777e-05, |
|
"loss": 0.2665, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.2279314888010544e-05, |
|
"loss": 0.2874, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.222441809398331e-05, |
|
"loss": 0.2534, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.2169521299956086e-05, |
|
"loss": 0.241, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.2114624505928854e-05, |
|
"loss": 0.2557, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.205972771190163e-05, |
|
"loss": 0.198, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.2004830917874396e-05, |
|
"loss": 0.2894, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.194993412384717e-05, |
|
"loss": 0.2577, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.189503732981994e-05, |
|
"loss": 0.2643, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.184014053579271e-05, |
|
"loss": 0.2646, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.178524374176548e-05, |
|
"loss": 0.2811, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.1730346947738256e-05, |
|
"loss": 0.2307, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.167545015371102e-05, |
|
"loss": 0.2806, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.16205533596838e-05, |
|
"loss": 0.2435, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.1565656565656566e-05, |
|
"loss": 0.2504, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.151075977162934e-05, |
|
"loss": 0.2581, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.145586297760211e-05, |
|
"loss": 0.2789, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.140096618357488e-05, |
|
"loss": 0.2356, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.134606938954765e-05, |
|
"loss": 0.2446, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.1291172595520425e-05, |
|
"loss": 0.2216, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.123627580149319e-05, |
|
"loss": 0.251, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.118137900746597e-05, |
|
"loss": 0.3334, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.1126482213438735e-05, |
|
"loss": 0.2887, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.107158541941151e-05, |
|
"loss": 0.2859, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.101668862538428e-05, |
|
"loss": 0.2506, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.096179183135705e-05, |
|
"loss": 0.2924, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.090689503732982e-05, |
|
"loss": 0.2325, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.0851998243302594e-05, |
|
"loss": 0.3132, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.079710144927536e-05, |
|
"loss": 0.2523, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.0742204655248136e-05, |
|
"loss": 0.3011, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.0687307861220904e-05, |
|
"loss": 0.2265, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.063241106719368e-05, |
|
"loss": 0.2277, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.0577514273166447e-05, |
|
"loss": 0.2404, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.052261747913922e-05, |
|
"loss": 0.2384, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.046772068511199e-05, |
|
"loss": 0.3213, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.0412823891084763e-05, |
|
"loss": 0.2381, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.035792709705753e-05, |
|
"loss": 0.2217, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.2634, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.0248133509003073e-05, |
|
"loss": 0.2703, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.0193236714975848e-05, |
|
"loss": 0.2275, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.0138339920948616e-05, |
|
"loss": 0.2222, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.008344312692139e-05, |
|
"loss": 0.2954, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.0028546332894158e-05, |
|
"loss": 0.2291, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.9973649538866933e-05, |
|
"loss": 0.2391, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.99187527448397e-05, |
|
"loss": 0.2824, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.9863855950812475e-05, |
|
"loss": 0.2537, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.9808959156785243e-05, |
|
"loss": 0.2309, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.9754062362758017e-05, |
|
"loss": 0.2496, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.9699165568730785e-05, |
|
"loss": 0.2901, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.964426877470356e-05, |
|
"loss": 0.2245, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.9589371980676327e-05, |
|
"loss": 0.2389, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.9534475186649102e-05, |
|
"loss": 0.2585, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.947957839262187e-05, |
|
"loss": 0.2273, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.9424681598594644e-05, |
|
"loss": 0.2558, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.9369784804567412e-05, |
|
"loss": 0.2546, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.9314888010540187e-05, |
|
"loss": 0.2914, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.9259991216512954e-05, |
|
"loss": 0.2189, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.920509442248573e-05, |
|
"loss": 0.2408, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.9150197628458497e-05, |
|
"loss": 0.2787, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.909530083443127e-05, |
|
"loss": 0.258, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.904040404040404e-05, |
|
"loss": 0.2511, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.8985507246376814e-05, |
|
"loss": 0.296, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.893061045234958e-05, |
|
"loss": 0.2472, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.8875713658322356e-05, |
|
"loss": 0.3105, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.8820816864295124e-05, |
|
"loss": 0.2086, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.8765920070267898e-05, |
|
"loss": 0.2587, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.8711023276240666e-05, |
|
"loss": 0.2589, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.865612648221344e-05, |
|
"loss": 0.2438, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.860122968818621e-05, |
|
"loss": 0.2387, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.8546332894158983e-05, |
|
"loss": 0.2281, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.849143610013175e-05, |
|
"loss": 0.2697, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.8436539306104525e-05, |
|
"loss": 0.2196, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.8381642512077293e-05, |
|
"loss": 0.2275, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.8326745718050068e-05, |
|
"loss": 0.2124, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.8271848924022835e-05, |
|
"loss": 0.2012, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.821695212999561e-05, |
|
"loss": 0.2661, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.8162055335968378e-05, |
|
"loss": 0.2411, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.8107158541941152e-05, |
|
"loss": 0.2315, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.805226174791392e-05, |
|
"loss": 0.2484, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.7997364953886694e-05, |
|
"loss": 0.2917, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.7942468159859466e-05, |
|
"loss": 0.2232, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.7887571365832237e-05, |
|
"loss": 0.2354, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.7832674571805008e-05, |
|
"loss": 0.2264, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2471, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9516867971678468, |
|
"eval_loss": 0.1354868859052658, |
|
"eval_runtime": 55.6825, |
|
"eval_samples_per_second": 129.358, |
|
"eval_steps_per_second": 4.059, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.772288098375055e-05, |
|
"loss": 0.2822, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.766798418972332e-05, |
|
"loss": 0.2347, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.7613087395696093e-05, |
|
"loss": 0.2532, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.7558190601668864e-05, |
|
"loss": 0.255, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.7503293807641635e-05, |
|
"loss": 0.2039, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.7448397013614406e-05, |
|
"loss": 0.2302, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.7393500219587177e-05, |
|
"loss": 0.2307, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.733860342555995e-05, |
|
"loss": 0.2257, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.728370663153272e-05, |
|
"loss": 0.197, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.722880983750549e-05, |
|
"loss": 0.2506, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 0.2753, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.7119016249451033e-05, |
|
"loss": 0.2375, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.7064119455423804e-05, |
|
"loss": 0.2539, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.700922266139658e-05, |
|
"loss": 0.2405, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.6954325867369347e-05, |
|
"loss": 0.2373, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.689942907334212e-05, |
|
"loss": 0.2239, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.684453227931489e-05, |
|
"loss": 0.2269, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.6789635485287663e-05, |
|
"loss": 0.2729, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.673473869126043e-05, |
|
"loss": 0.2304, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.6679841897233206e-05, |
|
"loss": 0.1909, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.6624945103205973e-05, |
|
"loss": 0.2701, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.6570048309178748e-05, |
|
"loss": 0.2218, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.6515151515151516e-05, |
|
"loss": 0.2861, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.646025472112429e-05, |
|
"loss": 0.214, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.6405357927097058e-05, |
|
"loss": 0.236, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.6350461133069833e-05, |
|
"loss": 0.2425, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.62955643390426e-05, |
|
"loss": 0.2258, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.6240667545015375e-05, |
|
"loss": 0.2094, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.6185770750988143e-05, |
|
"loss": 0.2215, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.6130873956960917e-05, |
|
"loss": 0.2377, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.6075977162933685e-05, |
|
"loss": 0.2112, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.602108036890646e-05, |
|
"loss": 0.2711, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.5966183574879227e-05, |
|
"loss": 0.223, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.5911286780852002e-05, |
|
"loss": 0.2186, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.585638998682477e-05, |
|
"loss": 0.1815, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.5801493192797544e-05, |
|
"loss": 0.2389, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.5746596398770312e-05, |
|
"loss": 0.2409, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.5691699604743087e-05, |
|
"loss": 0.2447, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.5636802810715854e-05, |
|
"loss": 0.2117, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.558190601668863e-05, |
|
"loss": 0.2003, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.5527009222661397e-05, |
|
"loss": 0.1982, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.547211242863417e-05, |
|
"loss": 0.2075, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.541721563460694e-05, |
|
"loss": 0.2235, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.5362318840579714e-05, |
|
"loss": 0.2563, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.530742204655248e-05, |
|
"loss": 0.2057, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 0.2802, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.5197628458498024e-05, |
|
"loss": 0.2579, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.5142731664470798e-05, |
|
"loss": 0.2343, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.5087834870443566e-05, |
|
"loss": 0.2513, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.503293807641634e-05, |
|
"loss": 0.1924, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.497804128238911e-05, |
|
"loss": 0.202, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.492314448836188e-05, |
|
"loss": 0.2316, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.486824769433465e-05, |
|
"loss": 0.2654, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.4813350900307422e-05, |
|
"loss": 0.2436, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.4758454106280193e-05, |
|
"loss": 0.2398, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.4703557312252964e-05, |
|
"loss": 0.2228, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.4648660518225735e-05, |
|
"loss": 0.1975, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.4593763724198506e-05, |
|
"loss": 0.2061, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.4538866930171278e-05, |
|
"loss": 0.2173, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.448397013614405e-05, |
|
"loss": 0.2261, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.442907334211682e-05, |
|
"loss": 0.2527, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.437417654808959e-05, |
|
"loss": 0.2266, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.4319279754062362e-05, |
|
"loss": 0.2762, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.4264382960035133e-05, |
|
"loss": 0.1894, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.4209486166007905e-05, |
|
"loss": 0.2052, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.4154589371980676e-05, |
|
"loss": 0.2532, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.4099692577953447e-05, |
|
"loss": 0.2106, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.4044795783926218e-05, |
|
"loss": 0.2246, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.398989898989899e-05, |
|
"loss": 0.2183, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.393500219587176e-05, |
|
"loss": 0.199, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.388010540184453e-05, |
|
"loss": 0.2168, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.3825208607817303e-05, |
|
"loss": 0.2123, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.3770311813790074e-05, |
|
"loss": 0.2638, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.3715415019762845e-05, |
|
"loss": 0.201, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.3660518225735616e-05, |
|
"loss": 0.2523, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.3605621431708387e-05, |
|
"loss": 0.2399, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.355072463768116e-05, |
|
"loss": 0.2469, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.349582784365393e-05, |
|
"loss": 0.2199, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.34409310496267e-05, |
|
"loss": 0.2248, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.3386034255599472e-05, |
|
"loss": 0.2267, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.3331137461572243e-05, |
|
"loss": 0.2354, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.3276240667545014e-05, |
|
"loss": 0.2129, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.3221343873517785e-05, |
|
"loss": 0.2154, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.3166447079490557e-05, |
|
"loss": 0.2021, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.3111550285463328e-05, |
|
"loss": 0.2255, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.30566534914361e-05, |
|
"loss": 0.2313, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.300175669740887e-05, |
|
"loss": 0.2081, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.294685990338164e-05, |
|
"loss": 0.2159, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.2891963109354412e-05, |
|
"loss": 0.1563, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.2837066315327184e-05, |
|
"loss": 0.1981, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.2782169521299955e-05, |
|
"loss": 0.2089, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.2486, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.26723759332455e-05, |
|
"loss": 0.2718, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.261747913921827e-05, |
|
"loss": 0.2095, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.2562582345191043e-05, |
|
"loss": 0.1995, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.2507685551163814e-05, |
|
"loss": 0.2598, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.2452788757136585e-05, |
|
"loss": 0.2183, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.2397891963109356e-05, |
|
"loss": 0.2533, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.2342995169082127e-05, |
|
"loss": 0.2181, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.22880983750549e-05, |
|
"loss": 0.2189, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.223320158102767e-05, |
|
"loss": 0.251, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9605719838955991, |
|
"eval_loss": 0.11695768684148788, |
|
"eval_runtime": 55.7492, |
|
"eval_samples_per_second": 129.204, |
|
"eval_steps_per_second": 4.054, |
|
"step": 3039 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.217830478700044e-05, |
|
"loss": 0.2208, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 2.2123407992973212e-05, |
|
"loss": 0.1756, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.2068511198945983e-05, |
|
"loss": 0.2053, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 2.2013614404918754e-05, |
|
"loss": 0.206, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 2.1958717610891526e-05, |
|
"loss": 0.2643, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 2.1903820816864297e-05, |
|
"loss": 0.1734, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.1848924022837068e-05, |
|
"loss": 0.1912, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 2.179402722880984e-05, |
|
"loss": 0.2051, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 0.1956, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 2.168423364075538e-05, |
|
"loss": 0.1869, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.1629336846728153e-05, |
|
"loss": 0.18, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 2.1574440052700924e-05, |
|
"loss": 0.2012, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 2.1519543258673695e-05, |
|
"loss": 0.2281, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 2.1464646464646466e-05, |
|
"loss": 0.2269, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 2.1409749670619237e-05, |
|
"loss": 0.2017, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.1354852876592008e-05, |
|
"loss": 0.197, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 2.129995608256478e-05, |
|
"loss": 0.2207, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 2.124505928853755e-05, |
|
"loss": 0.2028, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.1190162494510322e-05, |
|
"loss": 0.2297, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 2.1135265700483093e-05, |
|
"loss": 0.2418, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 2.1080368906455864e-05, |
|
"loss": 0.2176, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 2.1025472112428635e-05, |
|
"loss": 0.1928, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.0970575318401406e-05, |
|
"loss": 0.2259, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 2.0915678524374178e-05, |
|
"loss": 0.1937, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 2.086078173034695e-05, |
|
"loss": 0.1743, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 2.080588493631972e-05, |
|
"loss": 0.2158, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.075098814229249e-05, |
|
"loss": 0.231, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.0696091348265262e-05, |
|
"loss": 0.2313, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 2.0641194554238033e-05, |
|
"loss": 0.2747, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 2.0586297760210805e-05, |
|
"loss": 0.2224, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.0531400966183576e-05, |
|
"loss": 0.1668, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.0476504172156347e-05, |
|
"loss": 0.1898, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.0421607378129118e-05, |
|
"loss": 0.2185, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 2.036671058410189e-05, |
|
"loss": 0.2131, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.031181379007466e-05, |
|
"loss": 0.2115, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 2.025691699604743e-05, |
|
"loss": 0.1943, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 0.2359, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 2.0147123407992974e-05, |
|
"loss": 0.2007, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.0092226613965745e-05, |
|
"loss": 0.2263, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 2.0037329819938516e-05, |
|
"loss": 0.2219, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.9982433025911287e-05, |
|
"loss": 0.1944, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.992753623188406e-05, |
|
"loss": 0.196, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.987263943785683e-05, |
|
"loss": 0.2084, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.98177426438296e-05, |
|
"loss": 0.2648, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.9762845849802372e-05, |
|
"loss": 0.1994, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.9707949055775143e-05, |
|
"loss": 0.2067, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.9653052261747914e-05, |
|
"loss": 0.2194, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.9598155467720685e-05, |
|
"loss": 0.184, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.9543258673693457e-05, |
|
"loss": 0.1721, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.9488361879666228e-05, |
|
"loss": 0.2043, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.9433465085639e-05, |
|
"loss": 0.191, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.937856829161177e-05, |
|
"loss": 0.2281, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.932367149758454e-05, |
|
"loss": 0.236, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.9268774703557312e-05, |
|
"loss": 0.2139, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9213877909530084e-05, |
|
"loss": 0.2243, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9158981115502855e-05, |
|
"loss": 0.179, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.9104084321475626e-05, |
|
"loss": 0.2566, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.9049187527448397e-05, |
|
"loss": 0.2037, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.8994290733421168e-05, |
|
"loss": 0.204, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.893939393939394e-05, |
|
"loss": 0.2511, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.888449714536671e-05, |
|
"loss": 0.1753, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.8829600351339482e-05, |
|
"loss": 0.1771, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.8774703557312253e-05, |
|
"loss": 0.1706, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.8719806763285024e-05, |
|
"loss": 0.2043, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.8664909969257795e-05, |
|
"loss": 0.2094, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.8610013175230566e-05, |
|
"loss": 0.2081, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.8555116381203338e-05, |
|
"loss": 0.1786, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.850021958717611e-05, |
|
"loss": 0.2318, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.844532279314888e-05, |
|
"loss": 0.2302, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.839042599912165e-05, |
|
"loss": 0.2016, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.8335529205094422e-05, |
|
"loss": 0.211, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.8280632411067193e-05, |
|
"loss": 0.2098, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.8225735617039964e-05, |
|
"loss": 0.1902, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.8170838823012736e-05, |
|
"loss": 0.2447, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.8115942028985507e-05, |
|
"loss": 0.2227, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.8061045234958278e-05, |
|
"loss": 0.2288, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.800614844093105e-05, |
|
"loss": 0.2334, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.795125164690382e-05, |
|
"loss": 0.2185, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.789635485287659e-05, |
|
"loss": 0.2335, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.7841458058849363e-05, |
|
"loss": 0.1949, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.7786561264822134e-05, |
|
"loss": 0.1765, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.7731664470794905e-05, |
|
"loss": 0.1937, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 0.1902, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.7621870882740447e-05, |
|
"loss": 0.2053, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.756697408871322e-05, |
|
"loss": 0.2061, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.751207729468599e-05, |
|
"loss": 0.2203, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.7457180500658764e-05, |
|
"loss": 0.1871, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.7402283706631535e-05, |
|
"loss": 0.1913, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.7347386912604306e-05, |
|
"loss": 0.2058, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.7292490118577078e-05, |
|
"loss": 0.1994, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.723759332454985e-05, |
|
"loss": 0.1995, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.718269653052262e-05, |
|
"loss": 0.1919, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.712779973649539e-05, |
|
"loss": 0.2, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.7072902942468162e-05, |
|
"loss": 0.2207, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.7018006148440933e-05, |
|
"loss": 0.2617, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.6963109354413705e-05, |
|
"loss": 0.2475, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.6908212560386476e-05, |
|
"loss": 0.2197, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.6853315766359247e-05, |
|
"loss": 0.1771, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.6798418972332018e-05, |
|
"loss": 0.2056, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.674352217830479e-05, |
|
"loss": 0.2303, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.668862538427756e-05, |
|
"loss": 0.2214, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.663372859025033e-05, |
|
"loss": 0.2276, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.962654449534916, |
|
"eval_loss": 0.11358987540006638, |
|
"eval_runtime": 57.0626, |
|
"eval_samples_per_second": 126.23, |
|
"eval_steps_per_second": 3.961, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.6578831796223103e-05, |
|
"loss": 0.218, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.6523935002195874e-05, |
|
"loss": 0.2592, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.6469038208168645e-05, |
|
"loss": 0.2047, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.6414141414141416e-05, |
|
"loss": 0.2132, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.6359244620114187e-05, |
|
"loss": 0.1923, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 0.219, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.624945103205973e-05, |
|
"loss": 0.1647, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.61945542380325e-05, |
|
"loss": 0.1905, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.6139657444005272e-05, |
|
"loss": 0.1811, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.6084760649978043e-05, |
|
"loss": 0.1874, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.6029863855950814e-05, |
|
"loss": 0.2272, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.5974967061923585e-05, |
|
"loss": 0.1973, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.5920070267896357e-05, |
|
"loss": 0.1869, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.5865173473869128e-05, |
|
"loss": 0.189, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.58102766798419e-05, |
|
"loss": 0.2048, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.575537988581467e-05, |
|
"loss": 0.1762, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.570048309178744e-05, |
|
"loss": 0.2027, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.5645586297760212e-05, |
|
"loss": 0.2084, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.5590689503732984e-05, |
|
"loss": 0.1932, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.5535792709705755e-05, |
|
"loss": 0.1698, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.5480895915678526e-05, |
|
"loss": 0.2072, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.5425999121651297e-05, |
|
"loss": 0.1717, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.5371102327624068e-05, |
|
"loss": 0.1616, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.531620553359684e-05, |
|
"loss": 0.1969, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.526130873956961e-05, |
|
"loss": 0.1806, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.5206411945542382e-05, |
|
"loss": 0.212, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.1999, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.5096618357487924e-05, |
|
"loss": 0.1622, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.5041721563460695e-05, |
|
"loss": 0.2007, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.4986824769433466e-05, |
|
"loss": 0.1805, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.4931927975406238e-05, |
|
"loss": 0.1956, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.4877031181379009e-05, |
|
"loss": 0.2389, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.482213438735178e-05, |
|
"loss": 0.1633, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.4767237593324551e-05, |
|
"loss": 0.1691, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.4712340799297322e-05, |
|
"loss": 0.1748, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.4657444005270093e-05, |
|
"loss": 0.2072, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.4602547211242864e-05, |
|
"loss": 0.1748, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.4547650417215636e-05, |
|
"loss": 0.2091, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.4492753623188407e-05, |
|
"loss": 0.1821, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.4437856829161178e-05, |
|
"loss": 0.1984, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.4382960035133949e-05, |
|
"loss": 0.1737, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.432806324110672e-05, |
|
"loss": 0.1796, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.4273166447079491e-05, |
|
"loss": 0.1891, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.4218269653052263e-05, |
|
"loss": 0.1721, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.4163372859025034e-05, |
|
"loss": 0.1466, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.4108476064997805e-05, |
|
"loss": 0.1822, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.4053579270970576e-05, |
|
"loss": 0.1995, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.3998682476943347e-05, |
|
"loss": 0.1708, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.3943785682916118e-05, |
|
"loss": 0.2168, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.1712, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.383399209486166e-05, |
|
"loss": 0.1669, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.3779095300834432e-05, |
|
"loss": 0.1854, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.3724198506807203e-05, |
|
"loss": 0.1994, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.3669301712779974e-05, |
|
"loss": 0.1921, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.3614404918752745e-05, |
|
"loss": 0.2337, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.3559508124725517e-05, |
|
"loss": 0.1605, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.350461133069829e-05, |
|
"loss": 0.1475, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.344971453667106e-05, |
|
"loss": 0.1737, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.3394817742643832e-05, |
|
"loss": 0.2196, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.3339920948616603e-05, |
|
"loss": 0.1981, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.3285024154589374e-05, |
|
"loss": 0.211, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.3230127360562145e-05, |
|
"loss": 0.1906, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.3175230566534916e-05, |
|
"loss": 0.1808, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.3120333772507687e-05, |
|
"loss": 0.2073, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.3065436978480459e-05, |
|
"loss": 0.2041, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.301054018445323e-05, |
|
"loss": 0.2143, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.2955643390426001e-05, |
|
"loss": 0.2021, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.2900746596398772e-05, |
|
"loss": 0.1753, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.2845849802371543e-05, |
|
"loss": 0.1655, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.2790953008344314e-05, |
|
"loss": 0.2062, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.2736056214317086e-05, |
|
"loss": 0.1821, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.2681159420289857e-05, |
|
"loss": 0.1699, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 0.1743, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.2571365832235399e-05, |
|
"loss": 0.1603, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.251646903820817e-05, |
|
"loss": 0.179, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.246157224418094e-05, |
|
"loss": 0.1987, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.2406675450153711e-05, |
|
"loss": 0.2397, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.2351778656126482e-05, |
|
"loss": 0.2465, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.2296881862099253e-05, |
|
"loss": 0.2025, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2241985068072024e-05, |
|
"loss": 0.17, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.2187088274044796e-05, |
|
"loss": 0.193, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.2132191480017567e-05, |
|
"loss": 0.2017, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.2077294685990338e-05, |
|
"loss": 0.1917, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.2022397891963109e-05, |
|
"loss": 0.1933, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.196750109793588e-05, |
|
"loss": 0.2098, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.1912604303908651e-05, |
|
"loss": 0.2037, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.1857707509881423e-05, |
|
"loss": 0.179, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.1802810715854194e-05, |
|
"loss": 0.1779, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.1747913921826965e-05, |
|
"loss": 0.2029, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.1693017127799736e-05, |
|
"loss": 0.19, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.1638120333772507e-05, |
|
"loss": 0.2372, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.1583223539745278e-05, |
|
"loss": 0.1643, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.152832674571805e-05, |
|
"loss": 0.1755, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.147342995169082e-05, |
|
"loss": 0.175, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.1418533157663592e-05, |
|
"loss": 0.1989, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.1363636363636365e-05, |
|
"loss": 0.1953, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.1308739569609136e-05, |
|
"loss": 0.2101, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.1253842775581907e-05, |
|
"loss": 0.1894, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.1198945981554678e-05, |
|
"loss": 0.2032, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.114404918752745e-05, |
|
"loss": 0.2322, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.108915239350022e-05, |
|
"loss": 0.2182, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9627932805775371, |
|
"eval_loss": 0.11205828189849854, |
|
"eval_runtime": 54.986, |
|
"eval_samples_per_second": 130.997, |
|
"eval_steps_per_second": 4.11, |
|
"step": 4052 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.1034255599472992e-05, |
|
"loss": 0.1955, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 1.0979358805445763e-05, |
|
"loss": 0.1869, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 1.0924462011418534e-05, |
|
"loss": 0.1577, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 0.1948, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 1.0814668423364076e-05, |
|
"loss": 0.1596, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.0759771629336847e-05, |
|
"loss": 0.1523, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 1.0704874835309619e-05, |
|
"loss": 0.1752, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 1.064997804128239e-05, |
|
"loss": 0.1703, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 1.0595081247255161e-05, |
|
"loss": 0.1974, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.0540184453227932e-05, |
|
"loss": 0.1792, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 1.0485287659200703e-05, |
|
"loss": 0.2019, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.0430390865173474e-05, |
|
"loss": 0.1521, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 1.0375494071146246e-05, |
|
"loss": 0.1872, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.0320597277119017e-05, |
|
"loss": 0.2074, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 1.0265700483091788e-05, |
|
"loss": 0.2105, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 1.0210803689064559e-05, |
|
"loss": 0.187, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.015590689503733e-05, |
|
"loss": 0.1517, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 0.1845, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 1.0046113306982873e-05, |
|
"loss": 0.1887, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.991216512955644e-06, |
|
"loss": 0.1967, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 9.936319718928415e-06, |
|
"loss": 0.2092, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 9.881422924901186e-06, |
|
"loss": 0.1819, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 9.826526130873957e-06, |
|
"loss": 0.1875, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 9.771629336846728e-06, |
|
"loss": 0.1732, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 9.7167325428195e-06, |
|
"loss": 0.1676, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 9.66183574879227e-06, |
|
"loss": 0.144, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 9.606938954765042e-06, |
|
"loss": 0.1654, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 9.552042160737813e-06, |
|
"loss": 0.1777, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 9.497145366710584e-06, |
|
"loss": 0.1889, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 9.442248572683355e-06, |
|
"loss": 0.1645, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 9.387351778656126e-06, |
|
"loss": 0.1766, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 9.332454984628898e-06, |
|
"loss": 0.2061, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 9.277558190601669e-06, |
|
"loss": 0.1927, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 9.22266139657444e-06, |
|
"loss": 0.1661, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 9.167764602547211e-06, |
|
"loss": 0.1278, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 9.112867808519982e-06, |
|
"loss": 0.2137, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 9.057971014492753e-06, |
|
"loss": 0.1508, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 9.003074220465525e-06, |
|
"loss": 0.1858, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.948177426438296e-06, |
|
"loss": 0.1492, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 8.893280632411067e-06, |
|
"loss": 0.1798, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 8.838383838383838e-06, |
|
"loss": 0.1869, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 8.78348704435661e-06, |
|
"loss": 0.194, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 8.728590250329382e-06, |
|
"loss": 0.1675, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 8.673693456302153e-06, |
|
"loss": 0.1853, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 8.618796662274924e-06, |
|
"loss": 0.1924, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 8.563899868247696e-06, |
|
"loss": 0.1622, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 8.509003074220467e-06, |
|
"loss": 0.1674, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 8.454106280193238e-06, |
|
"loss": 0.2292, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 8.399209486166009e-06, |
|
"loss": 0.155, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 8.34431269213878e-06, |
|
"loss": 0.1696, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 8.289415898111551e-06, |
|
"loss": 0.1853, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 8.234519104084322e-06, |
|
"loss": 0.1972, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 8.179622310057094e-06, |
|
"loss": 0.1902, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 8.124725516029865e-06, |
|
"loss": 0.1912, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 8.069828722002636e-06, |
|
"loss": 0.1613, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 8.014931927975407e-06, |
|
"loss": 0.1898, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.960035133948178e-06, |
|
"loss": 0.1751, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.90513833992095e-06, |
|
"loss": 0.1526, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.85024154589372e-06, |
|
"loss": 0.2213, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.795344751866492e-06, |
|
"loss": 0.1745, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.740447957839263e-06, |
|
"loss": 0.127, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 7.685551163812034e-06, |
|
"loss": 0.2031, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 7.630654369784805e-06, |
|
"loss": 0.166, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.2043, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 7.520860781730348e-06, |
|
"loss": 0.1654, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 7.465963987703119e-06, |
|
"loss": 0.1949, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 7.41106719367589e-06, |
|
"loss": 0.175, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 7.356170399648661e-06, |
|
"loss": 0.1738, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 7.301273605621432e-06, |
|
"loss": 0.2214, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 7.246376811594203e-06, |
|
"loss": 0.2118, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 7.1914800175669746e-06, |
|
"loss": 0.1652, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 7.136583223539746e-06, |
|
"loss": 0.1742, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 7.081686429512517e-06, |
|
"loss": 0.1625, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 7.026789635485288e-06, |
|
"loss": 0.184, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.971892841458059e-06, |
|
"loss": 0.1613, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.91699604743083e-06, |
|
"loss": 0.1845, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.8620992534036015e-06, |
|
"loss": 0.1727, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.807202459376373e-06, |
|
"loss": 0.2103, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.752305665349145e-06, |
|
"loss": 0.1985, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 6.697408871321916e-06, |
|
"loss": 0.1945, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.642512077294687e-06, |
|
"loss": 0.2077, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 6.587615283267458e-06, |
|
"loss": 0.1952, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 6.532718489240229e-06, |
|
"loss": 0.1703, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 6.4778216952130005e-06, |
|
"loss": 0.1445, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 6.422924901185772e-06, |
|
"loss": 0.1641, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 6.368028107158543e-06, |
|
"loss": 0.1948, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 6.313131313131314e-06, |
|
"loss": 0.2044, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 6.258234519104085e-06, |
|
"loss": 0.1778, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 6.2033377250768554e-06, |
|
"loss": 0.1652, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 6.148440931049627e-06, |
|
"loss": 0.1929, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 6.093544137022398e-06, |
|
"loss": 0.1634, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 6.038647342995169e-06, |
|
"loss": 0.1826, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.98375054896794e-06, |
|
"loss": 0.154, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.928853754940711e-06, |
|
"loss": 0.1618, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.873956960913482e-06, |
|
"loss": 0.1726, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.8190601668862536e-06, |
|
"loss": 0.1768, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.764163372859025e-06, |
|
"loss": 0.2005, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.709266578831796e-06, |
|
"loss": 0.1859, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.654369784804568e-06, |
|
"loss": 0.1796, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.599472990777339e-06, |
|
"loss": 0.1612, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.54457619675011e-06, |
|
"loss": 0.1386, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9632097737054005, |
|
"eval_loss": 0.11160053312778473, |
|
"eval_runtime": 58.5989, |
|
"eval_samples_per_second": 122.92, |
|
"eval_steps_per_second": 3.857, |
|
"step": 4558 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.489679402722881e-06, |
|
"loss": 0.2128, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 0.1563, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 5.379885814668424e-06, |
|
"loss": 0.1825, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 5.324989020641195e-06, |
|
"loss": 0.182, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 5.270092226613966e-06, |
|
"loss": 0.1663, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 5.215195432586737e-06, |
|
"loss": 0.1732, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 5.160298638559508e-06, |
|
"loss": 0.1786, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.1054018445322795e-06, |
|
"loss": 0.2164, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 0.178, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.995608256477822e-06, |
|
"loss": 0.1657, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.940711462450593e-06, |
|
"loss": 0.1467, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.885814668423364e-06, |
|
"loss": 0.1992, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.830917874396135e-06, |
|
"loss": 0.133, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.7760210803689065e-06, |
|
"loss": 0.1862, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.721124286341678e-06, |
|
"loss": 0.1832, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.666227492314449e-06, |
|
"loss": 0.1787, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.61133069828722e-06, |
|
"loss": 0.1699, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.556433904259991e-06, |
|
"loss": 0.1749, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.501537110232762e-06, |
|
"loss": 0.1562, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.4466403162055334e-06, |
|
"loss": 0.1945, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 4.391743522178305e-06, |
|
"loss": 0.2123, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 4.336846728151077e-06, |
|
"loss": 0.2221, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 4.281949934123848e-06, |
|
"loss": 0.1847, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 4.227053140096619e-06, |
|
"loss": 0.1691, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 4.17215634606939e-06, |
|
"loss": 0.1916, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 4.117259552042161e-06, |
|
"loss": 0.1597, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 4.062362758014932e-06, |
|
"loss": 0.1646, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 4.0074659639877036e-06, |
|
"loss": 0.1751, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 3.952569169960475e-06, |
|
"loss": 0.1683, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.897672375933246e-06, |
|
"loss": 0.1601, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.842775581906017e-06, |
|
"loss": 0.1489, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.7878787878787882e-06, |
|
"loss": 0.2123, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.7329819938515594e-06, |
|
"loss": 0.1602, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.6780851998243305e-06, |
|
"loss": 0.1837, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.6231884057971017e-06, |
|
"loss": 0.1439, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.568291611769873e-06, |
|
"loss": 0.1614, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.513394817742644e-06, |
|
"loss": 0.1503, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.458498023715415e-06, |
|
"loss": 0.1613, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.4036012296881863e-06, |
|
"loss": 0.171, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.348704435660958e-06, |
|
"loss": 0.1502, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.293807641633729e-06, |
|
"loss": 0.1802, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 3.2389108476065002e-06, |
|
"loss": 0.197, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.1840140535792714e-06, |
|
"loss": 0.2024, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 3.1291172595520426e-06, |
|
"loss": 0.1499, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.0742204655248133e-06, |
|
"loss": 0.1802, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 3.0193236714975845e-06, |
|
"loss": 0.1758, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 2.9644268774703556e-06, |
|
"loss": 0.1668, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.9095300834431268e-06, |
|
"loss": 0.152, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.854633289415898e-06, |
|
"loss": 0.2262, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.7997364953886695e-06, |
|
"loss": 0.1751, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.7448397013614407e-06, |
|
"loss": 0.1517, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.689942907334212e-06, |
|
"loss": 0.1989, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.635046113306983e-06, |
|
"loss": 0.1467, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.580149319279754e-06, |
|
"loss": 0.2142, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 0.1735, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.4703557312252965e-06, |
|
"loss": 0.162, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.4154589371980677e-06, |
|
"loss": 0.1672, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.360562143170839e-06, |
|
"loss": 0.1711, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.30566534914361e-06, |
|
"loss": 0.1538, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.250768555116381e-06, |
|
"loss": 0.1608, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 2.1958717610891523e-06, |
|
"loss": 0.1446, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.140974967061924e-06, |
|
"loss": 0.1996, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.086078173034695e-06, |
|
"loss": 0.1485, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.031181379007466e-06, |
|
"loss": 0.1344, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.9762845849802374e-06, |
|
"loss": 0.1858, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.9213877909530085e-06, |
|
"loss": 0.1693, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.8664909969257797e-06, |
|
"loss": 0.1593, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.8115942028985508e-06, |
|
"loss": 0.148, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.756697408871322e-06, |
|
"loss": 0.1914, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.7018006148440932e-06, |
|
"loss": 0.1972, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.6469038208168645e-06, |
|
"loss": 0.1735, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.5920070267896357e-06, |
|
"loss": 0.1383, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.5371102327624067e-06, |
|
"loss": 0.154, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.4822134387351778e-06, |
|
"loss": 0.177, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.427316644707949e-06, |
|
"loss": 0.1495, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.3724198506807203e-06, |
|
"loss": 0.1647, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.3175230566534915e-06, |
|
"loss": 0.2, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.2626262626262627e-06, |
|
"loss": 0.1462, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.2077294685990338e-06, |
|
"loss": 0.1849, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.152832674571805e-06, |
|
"loss": 0.1777, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.0979358805445761e-06, |
|
"loss": 0.1825, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 1.0430390865173475e-06, |
|
"loss": 0.1632, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.881422924901187e-07, |
|
"loss": 0.1766, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 9.332454984628898e-07, |
|
"loss": 0.1574, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 8.78348704435661e-07, |
|
"loss": 0.1575, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 8.234519104084323e-07, |
|
"loss": 0.1609, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 7.685551163812033e-07, |
|
"loss": 0.1792, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 7.136583223539745e-07, |
|
"loss": 0.1683, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.587615283267458e-07, |
|
"loss": 0.1519, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 6.038647342995169e-07, |
|
"loss": 0.1568, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 5.489679402722881e-07, |
|
"loss": 0.2216, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 4.940711462450593e-07, |
|
"loss": 0.2022, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.391743522178305e-07, |
|
"loss": 0.148, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.8427755819060166e-07, |
|
"loss": 0.1836, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 3.293807641633729e-07, |
|
"loss": 0.1551, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 2.7448397013614404e-07, |
|
"loss": 0.166, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.1958717610891525e-07, |
|
"loss": 0.1526, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.6469038208168644e-07, |
|
"loss": 0.1785, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.0979358805445763e-07, |
|
"loss": 0.1854, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 5.489679402722881e-08, |
|
"loss": 0.1539, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 0.0, |
|
"loss": 0.1466, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"eval_accuracy": 0.9644592530889907, |
|
"eval_loss": 0.10692168027162552, |
|
"eval_runtime": 55.2553, |
|
"eval_samples_per_second": 130.359, |
|
"eval_steps_per_second": 4.09, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"step": 5060, |
|
"total_flos": 1.6098284322345738e+19, |
|
"train_loss": 0.31308594611086865, |
|
"train_runtime": 11218.6126, |
|
"train_samples_per_second": 57.784, |
|
"train_steps_per_second": 0.451 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 5060, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 1.6098284322345738e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|