|
{ |
|
"best_metric": 0.9086, |
|
"best_model_checkpoint": "./results/checkpoint-4680", |
|
"epoch": 9.9984, |
|
"global_step": 4680, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.344017094017094e-05, |
|
"loss": 0.5116, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.450854700854701e-05, |
|
"loss": 0.4826, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.557692307692308e-05, |
|
"loss": 0.4609, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.664529914529915e-05, |
|
"loss": 0.4828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.771367521367522e-05, |
|
"loss": 0.515, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.878205128205129e-05, |
|
"loss": 0.4694, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.985042735042736e-05, |
|
"loss": 0.4676, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.091880341880342e-05, |
|
"loss": 0.4992, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.198717948717949e-05, |
|
"loss": 0.4541, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.305555555555556e-05, |
|
"loss": 0.4417, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.412393162393162e-05, |
|
"loss": 0.4958, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.519230769230769e-05, |
|
"loss": 0.5027, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.626068376068376e-05, |
|
"loss": 0.4531, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.732905982905983e-05, |
|
"loss": 0.4671, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.83974358974359e-05, |
|
"loss": 0.4696, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.946581196581197e-05, |
|
"loss": 0.4496, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.994064577397911e-05, |
|
"loss": 0.4481, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.982193732193732e-05, |
|
"loss": 0.4559, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.970322886989554e-05, |
|
"loss": 0.426, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.9584520417853754e-05, |
|
"loss": 0.4454, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.946581196581197e-05, |
|
"loss": 0.4364, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.934710351377018e-05, |
|
"loss": 0.4569, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.92283950617284e-05, |
|
"loss": 0.4139, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.9109686609686614e-05, |
|
"loss": 0.387, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.899097815764483e-05, |
|
"loss": 0.4149, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.887226970560304e-05, |
|
"loss": 0.4132, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.875356125356126e-05, |
|
"loss": 0.4662, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.863485280151947e-05, |
|
"loss": 0.4325, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8516144349477684e-05, |
|
"loss": 0.4455, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.83974358974359e-05, |
|
"loss": 0.453, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.827872744539411e-05, |
|
"loss": 0.4062, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.816001899335233e-05, |
|
"loss": 0.4098, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.8041310541310545e-05, |
|
"loss": 0.442, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.792260208926876e-05, |
|
"loss": 0.426, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.780389363722697e-05, |
|
"loss": 0.4603, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.768518518518519e-05, |
|
"loss": 0.3436, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.75664767331434e-05, |
|
"loss": 0.4108, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.7447768281101615e-05, |
|
"loss": 0.3932, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.732905982905983e-05, |
|
"loss": 0.396, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.721035137701805e-05, |
|
"loss": 0.4101, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.709164292497626e-05, |
|
"loss": 0.4116, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.6972934472934475e-05, |
|
"loss": 0.4133, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.685422602089269e-05, |
|
"loss": 0.3968, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.673551756885091e-05, |
|
"loss": 0.3688, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.661680911680912e-05, |
|
"loss": 0.424, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.6498100664767335e-05, |
|
"loss": 0.3952, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8584, |
|
"eval_loss": 0.3884398937225342, |
|
"eval_runtime": 42.2861, |
|
"eval_samples_per_second": 236.484, |
|
"eval_steps_per_second": 7.402, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6379392212725545e-05, |
|
"loss": 0.4125, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.626068376068376e-05, |
|
"loss": 0.3419, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.614197530864198e-05, |
|
"loss": 0.3792, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.6023266856600195e-05, |
|
"loss": 0.3655, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.5904558404558405e-05, |
|
"loss": 0.3539, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.578584995251662e-05, |
|
"loss": 0.3653, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.566714150047484e-05, |
|
"loss": 0.3749, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.554843304843305e-05, |
|
"loss": 0.3775, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.5429724596391265e-05, |
|
"loss": 0.3337, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.5311016144349475e-05, |
|
"loss": 0.3581, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.519230769230769e-05, |
|
"loss": 0.3829, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.507359924026591e-05, |
|
"loss": 0.3598, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.4954890788224125e-05, |
|
"loss": 0.3287, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.4836182336182335e-05, |
|
"loss": 0.3547, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.471747388414055e-05, |
|
"loss": 0.3787, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.459876543209877e-05, |
|
"loss": 0.3901, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.4480056980056986e-05, |
|
"loss": 0.3816, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.4361348528015196e-05, |
|
"loss": 0.3091, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.424264007597341e-05, |
|
"loss": 0.306, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.412393162393162e-05, |
|
"loss": 0.3591, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.400522317188984e-05, |
|
"loss": 0.3772, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.3886514719848056e-05, |
|
"loss": 0.3808, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.376780626780627e-05, |
|
"loss": 0.3741, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.364909781576448e-05, |
|
"loss": 0.3368, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.35303893637227e-05, |
|
"loss": 0.3816, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.3411680911680916e-05, |
|
"loss": 0.3531, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.329297245963913e-05, |
|
"loss": 0.3347, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.317426400759734e-05, |
|
"loss": 0.3551, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.305555555555556e-05, |
|
"loss": 0.3297, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.293684710351377e-05, |
|
"loss": 0.3055, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.281813865147199e-05, |
|
"loss": 0.3743, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.26994301994302e-05, |
|
"loss": 0.3774, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.258072174738841e-05, |
|
"loss": 0.349, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.246201329534663e-05, |
|
"loss": 0.3853, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.2343304843304846e-05, |
|
"loss": 0.3304, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.222459639126306e-05, |
|
"loss": 0.3238, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.210588793922127e-05, |
|
"loss": 0.3073, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.198717948717949e-05, |
|
"loss": 0.3225, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.18684710351377e-05, |
|
"loss": 0.3293, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.1749762583095916e-05, |
|
"loss": 0.3265, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.163105413105413e-05, |
|
"loss": 0.3882, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.151234567901235e-05, |
|
"loss": 0.3513, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.139363722697056e-05, |
|
"loss": 0.3394, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.1274928774928776e-05, |
|
"loss": 0.3552, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.115622032288699e-05, |
|
"loss": 0.3723, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.103751187084521e-05, |
|
"loss": 0.3605, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.091880341880342e-05, |
|
"loss": 0.3348, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8691, |
|
"eval_loss": 0.36213183403015137, |
|
"eval_runtime": 42.3927, |
|
"eval_samples_per_second": 235.89, |
|
"eval_steps_per_second": 7.383, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.0800094966761637e-05, |
|
"loss": 0.3809, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.0681386514719847e-05, |
|
"loss": 0.3514, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.056267806267807e-05, |
|
"loss": 0.3182, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.044396961063628e-05, |
|
"loss": 0.3321, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.03252611585945e-05, |
|
"loss": 0.3473, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.020655270655271e-05, |
|
"loss": 0.3174, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.0087844254510923e-05, |
|
"loss": 0.3256, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.996913580246914e-05, |
|
"loss": 0.3478, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.985042735042736e-05, |
|
"loss": 0.3255, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.973171889838557e-05, |
|
"loss": 0.3322, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.961301044634378e-05, |
|
"loss": 0.3269, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.9494301994301994e-05, |
|
"loss": 0.3229, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.937559354226021e-05, |
|
"loss": 0.3673, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.925688509021843e-05, |
|
"loss": 0.273, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.913817663817664e-05, |
|
"loss": 0.3712, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.9019468186134854e-05, |
|
"loss": 0.3051, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.890075973409307e-05, |
|
"loss": 0.3494, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.878205128205129e-05, |
|
"loss": 0.3198, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.86633428300095e-05, |
|
"loss": 0.3397, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.8544634377967714e-05, |
|
"loss": 0.2907, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.8425925925925924e-05, |
|
"loss": 0.3441, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.830721747388414e-05, |
|
"loss": 0.3356, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.818850902184236e-05, |
|
"loss": 0.2924, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.8069800569800574e-05, |
|
"loss": 0.3179, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.7951092117758784e-05, |
|
"loss": 0.3183, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.7832383665717e-05, |
|
"loss": 0.3186, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.771367521367522e-05, |
|
"loss": 0.3192, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.7594966761633434e-05, |
|
"loss": 0.3431, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.7476258309591644e-05, |
|
"loss": 0.2914, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.735754985754986e-05, |
|
"loss": 0.2794, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.723884140550807e-05, |
|
"loss": 0.2934, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.7120132953466294e-05, |
|
"loss": 0.3627, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.7001424501424504e-05, |
|
"loss": 0.3087, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.6882716049382714e-05, |
|
"loss": 0.3198, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.676400759734093e-05, |
|
"loss": 0.3219, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.664529914529915e-05, |
|
"loss": 0.2951, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6526590693257364e-05, |
|
"loss": 0.3525, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.6407882241215574e-05, |
|
"loss": 0.3241, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.628917378917379e-05, |
|
"loss": 0.3315, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.6170465337132e-05, |
|
"loss": 0.3155, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.605175688509022e-05, |
|
"loss": 0.291, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.5933048433048435e-05, |
|
"loss": 0.3543, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.581433998100665e-05, |
|
"loss": 0.3039, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.569563152896486e-05, |
|
"loss": 0.3036, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.557692307692308e-05, |
|
"loss": 0.3079, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5458214624881295e-05, |
|
"loss": 0.297, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.533950617283951e-05, |
|
"loss": 0.2632, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.882, |
|
"eval_loss": 0.33032089471817017, |
|
"eval_runtime": 42.3115, |
|
"eval_samples_per_second": 236.343, |
|
"eval_steps_per_second": 7.398, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.522079772079772e-05, |
|
"loss": 0.3317, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.510208926875594e-05, |
|
"loss": 0.3198, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.498338081671415e-05, |
|
"loss": 0.2977, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.486467236467237e-05, |
|
"loss": 0.3448, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.474596391263058e-05, |
|
"loss": 0.2444, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.46272554605888e-05, |
|
"loss": 0.289, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.450854700854701e-05, |
|
"loss": 0.3191, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.4389838556505225e-05, |
|
"loss": 0.314, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.427113010446344e-05, |
|
"loss": 0.3063, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.415242165242166e-05, |
|
"loss": 0.3068, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.403371320037987e-05, |
|
"loss": 0.307, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.391500474833808e-05, |
|
"loss": 0.293, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.3796296296296295e-05, |
|
"loss": 0.3224, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.367758784425451e-05, |
|
"loss": 0.2709, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.355887939221273e-05, |
|
"loss": 0.266, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.344017094017094e-05, |
|
"loss": 0.2894, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.3321462488129155e-05, |
|
"loss": 0.2826, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.320275403608737e-05, |
|
"loss": 0.3215, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.308404558404559e-05, |
|
"loss": 0.2729, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.29653371320038e-05, |
|
"loss": 0.2911, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2846628679962015e-05, |
|
"loss": 0.3218, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.2727920227920225e-05, |
|
"loss": 0.2757, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.260921177587844e-05, |
|
"loss": 0.3144, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.249050332383666e-05, |
|
"loss": 0.2541, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.2371794871794876e-05, |
|
"loss": 0.2588, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.2253086419753086e-05, |
|
"loss": 0.2614, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.21343779677113e-05, |
|
"loss": 0.3001, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.201566951566952e-05, |
|
"loss": 0.2864, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.1896961063627736e-05, |
|
"loss": 0.3021, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.1778252611585946e-05, |
|
"loss": 0.2616, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.165954415954416e-05, |
|
"loss": 0.249, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.154083570750237e-05, |
|
"loss": 0.3208, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.1422127255460596e-05, |
|
"loss": 0.263, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.1303418803418806e-05, |
|
"loss": 0.2769, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.118471035137702e-05, |
|
"loss": 0.3022, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.106600189933523e-05, |
|
"loss": 0.2739, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.094729344729345e-05, |
|
"loss": 0.311, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.0828584995251666e-05, |
|
"loss": 0.2914, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.0709876543209876e-05, |
|
"loss": 0.2774, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.059116809116809e-05, |
|
"loss": 0.3084, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.0472459639126306e-05, |
|
"loss": 0.3092, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.0353751187084523e-05, |
|
"loss": 0.3016, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0235042735042736e-05, |
|
"loss": 0.2995, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0116334283000953e-05, |
|
"loss": 0.2761, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.9997625830959163e-05, |
|
"loss": 0.2694, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.9878917378917383e-05, |
|
"loss": 0.3259, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.9760208926875593e-05, |
|
"loss": 0.3201, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8946, |
|
"eval_loss": 0.29688265919685364, |
|
"eval_runtime": 42.699, |
|
"eval_samples_per_second": 234.197, |
|
"eval_steps_per_second": 7.33, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.964150047483381e-05, |
|
"loss": 0.3168, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.9522792022792023e-05, |
|
"loss": 0.2752, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.940408357075024e-05, |
|
"loss": 0.2339, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.9285375118708453e-05, |
|
"loss": 0.2997, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 0.2515, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9047958214624883e-05, |
|
"loss": 0.2478, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.89292497625831e-05, |
|
"loss": 0.3037, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.881054131054131e-05, |
|
"loss": 0.3028, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.869183285849953e-05, |
|
"loss": 0.2785, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.857312440645774e-05, |
|
"loss": 0.2959, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.845441595441596e-05, |
|
"loss": 0.2934, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.833570750237417e-05, |
|
"loss": 0.2764, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.8216999050332387e-05, |
|
"loss": 0.2965, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.80982905982906e-05, |
|
"loss": 0.2902, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.7979582146248813e-05, |
|
"loss": 0.2761, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.786087369420703e-05, |
|
"loss": 0.2683, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.774216524216524e-05, |
|
"loss": 0.2646, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.762345679012346e-05, |
|
"loss": 0.2721, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.750474833808167e-05, |
|
"loss": 0.2621, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.7386039886039887e-05, |
|
"loss": 0.2512, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.72673314339981e-05, |
|
"loss": 0.2847, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.7148622981956317e-05, |
|
"loss": 0.271, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.702991452991453e-05, |
|
"loss": 0.2589, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.6911206077872747e-05, |
|
"loss": 0.2466, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.679249762583096e-05, |
|
"loss": 0.2518, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.6673789173789177e-05, |
|
"loss": 0.3062, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.6555080721747387e-05, |
|
"loss": 0.2562, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.6436372269705607e-05, |
|
"loss": 0.2557, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6317663817663817e-05, |
|
"loss": 0.2822, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.6198955365622034e-05, |
|
"loss": 0.2695, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.6080246913580247e-05, |
|
"loss": 0.2642, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.5961538461538464e-05, |
|
"loss": 0.2701, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.5842830009496677e-05, |
|
"loss": 0.2606, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.5724121557454894e-05, |
|
"loss": 0.2426, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.5605413105413107e-05, |
|
"loss": 0.2631, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.5486704653371324e-05, |
|
"loss": 0.2542, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.5367996201329537e-05, |
|
"loss": 0.2673, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.5249287749287747e-05, |
|
"loss": 0.2668, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5130579297245964e-05, |
|
"loss": 0.2886, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.5011870845204177e-05, |
|
"loss": 0.263, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.4893162393162394e-05, |
|
"loss": 0.2513, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.477445394112061e-05, |
|
"loss": 0.2672, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.4655745489078824e-05, |
|
"loss": 0.2367, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.4537037037037038e-05, |
|
"loss": 0.2684, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.4418328584995254e-05, |
|
"loss": 0.2861, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.4299620132953468e-05, |
|
"loss": 0.2599, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.4180911680911684e-05, |
|
"loss": 0.2428, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8933, |
|
"eval_loss": 0.2944689691066742, |
|
"eval_runtime": 42.2523, |
|
"eval_samples_per_second": 236.673, |
|
"eval_steps_per_second": 7.408, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4062203228869898e-05, |
|
"loss": 0.2381, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.394349477682811e-05, |
|
"loss": 0.2344, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.3824786324786324e-05, |
|
"loss": 0.2607, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.3706077872744538e-05, |
|
"loss": 0.2201, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.3587369420702755e-05, |
|
"loss": 0.2867, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.3468660968660968e-05, |
|
"loss": 0.2694, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.3349952516619185e-05, |
|
"loss": 0.2258, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.3231244064577398e-05, |
|
"loss": 0.2523, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.311253561253561e-05, |
|
"loss": 0.2311, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.2993827160493828e-05, |
|
"loss": 0.2695, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.287511870845204e-05, |
|
"loss": 0.2417, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.2756410256410258e-05, |
|
"loss": 0.2969, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.263770180436847e-05, |
|
"loss": 0.2712, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.2518993352326688e-05, |
|
"loss": 0.256, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.24002849002849e-05, |
|
"loss": 0.2402, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.2281576448243115e-05, |
|
"loss": 0.2604, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.216286799620133e-05, |
|
"loss": 0.247, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.2044159544159545e-05, |
|
"loss": 0.274, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.1925451092117762e-05, |
|
"loss": 0.2573, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.1806742640075975e-05, |
|
"loss": 0.2785, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.168803418803419e-05, |
|
"loss": 0.2378, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.1569325735992405e-05, |
|
"loss": 0.2102, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.145061728395062e-05, |
|
"loss": 0.2627, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.1331908831908835e-05, |
|
"loss": 0.2672, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.121320037986705e-05, |
|
"loss": 0.2716, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.1094491927825262e-05, |
|
"loss": 0.244, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.097578347578348e-05, |
|
"loss": 0.2784, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.085707502374169e-05, |
|
"loss": 0.256, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.0738366571699905e-05, |
|
"loss": 0.261, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.061965811965812e-05, |
|
"loss": 0.2395, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.0500949667616335e-05, |
|
"loss": 0.2911, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.038224121557455e-05, |
|
"loss": 0.2633, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.0263532763532762e-05, |
|
"loss": 0.2364, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.014482431149098e-05, |
|
"loss": 0.273, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.0026115859449192e-05, |
|
"loss": 0.2546, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 1.990740740740741e-05, |
|
"loss": 0.25, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.9788698955365622e-05, |
|
"loss": 0.2913, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.966999050332384e-05, |
|
"loss": 0.2486, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.9551282051282052e-05, |
|
"loss": 0.2468, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.9432573599240266e-05, |
|
"loss": 0.2427, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.9313865147198482e-05, |
|
"loss": 0.2451, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.9195156695156696e-05, |
|
"loss": 0.2488, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 1.9076448243114913e-05, |
|
"loss": 0.2391, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 1.8957739791073126e-05, |
|
"loss": 0.2391, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.883903133903134e-05, |
|
"loss": 0.255, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 1.8720322886989556e-05, |
|
"loss": 0.2008, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8953, |
|
"eval_loss": 0.2888832092285156, |
|
"eval_runtime": 41.9713, |
|
"eval_samples_per_second": 238.258, |
|
"eval_steps_per_second": 7.457, |
|
"step": 2808 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.860161443494777e-05, |
|
"loss": 0.2604, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.8482905982905986e-05, |
|
"loss": 0.2691, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.83641975308642e-05, |
|
"loss": 0.2085, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.8245489078822413e-05, |
|
"loss": 0.2429, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.812678062678063e-05, |
|
"loss": 0.249, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.800807217473884e-05, |
|
"loss": 0.2608, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.7889363722697056e-05, |
|
"loss": 0.2088, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.777065527065527e-05, |
|
"loss": 0.2382, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.7651946818613486e-05, |
|
"loss": 0.2389, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.75332383665717e-05, |
|
"loss": 0.2403, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.7414529914529913e-05, |
|
"loss": 0.2207, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.729582146248813e-05, |
|
"loss": 0.2641, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.7177113010446343e-05, |
|
"loss": 0.2301, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.705840455840456e-05, |
|
"loss": 0.2135, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.6939696106362773e-05, |
|
"loss": 0.2033, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.682098765432099e-05, |
|
"loss": 0.2382, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.6702279202279203e-05, |
|
"loss": 0.2336, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.6583570750237416e-05, |
|
"loss": 0.2248, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.6464862298195633e-05, |
|
"loss": 0.2411, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.6346153846153847e-05, |
|
"loss": 0.2421, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.6227445394112063e-05, |
|
"loss": 0.2169, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.6108736942070277e-05, |
|
"loss": 0.2248, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.599002849002849e-05, |
|
"loss": 0.2065, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.5871320037986707e-05, |
|
"loss": 0.242, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.575261158594492e-05, |
|
"loss": 0.281, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.5633903133903137e-05, |
|
"loss": 0.2757, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.551519468186135e-05, |
|
"loss": 0.2273, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.5396486229819563e-05, |
|
"loss": 0.2139, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.527777777777778e-05, |
|
"loss": 0.2429, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.5159069325735995e-05, |
|
"loss": 0.2349, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.5040360873694207e-05, |
|
"loss": 0.2232, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.4921652421652422e-05, |
|
"loss": 0.2326, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.4802943969610635e-05, |
|
"loss": 0.2384, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.468423551756885e-05, |
|
"loss": 0.238, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.4565527065527065e-05, |
|
"loss": 0.2324, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.444681861348528e-05, |
|
"loss": 0.2347, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.4328110161443495e-05, |
|
"loss": 0.2259, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.4209401709401709e-05, |
|
"loss": 0.2186, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.4090693257359924e-05, |
|
"loss": 0.2062, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.3971984805318139e-05, |
|
"loss": 0.2366, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.3853276353276354e-05, |
|
"loss": 0.2137, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.3734567901234569e-05, |
|
"loss": 0.248, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.3615859449192784e-05, |
|
"loss": 0.2154, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.3497150997150997e-05, |
|
"loss": 0.2273, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.3378442545109212e-05, |
|
"loss": 0.2286, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.3259734093067427e-05, |
|
"loss": 0.2281, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.3141025641025642e-05, |
|
"loss": 0.2439, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.902, |
|
"eval_loss": 0.2731182277202606, |
|
"eval_runtime": 41.8304, |
|
"eval_samples_per_second": 239.06, |
|
"eval_steps_per_second": 7.483, |
|
"step": 3276 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.3022317188983857e-05, |
|
"loss": 0.2641, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.290360873694207e-05, |
|
"loss": 0.2683, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.2784900284900286e-05, |
|
"loss": 0.2466, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.2666191832858501e-05, |
|
"loss": 0.2384, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.2547483380816716e-05, |
|
"loss": 0.2094, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.242877492877493e-05, |
|
"loss": 0.2172, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.2310066476733144e-05, |
|
"loss": 0.2357, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.219135802469136e-05, |
|
"loss": 0.2062, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.2072649572649573e-05, |
|
"loss": 0.2054, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.1953941120607788e-05, |
|
"loss": 0.2283, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.1835232668566003e-05, |
|
"loss": 0.1973, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.1716524216524218e-05, |
|
"loss": 0.2173, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.1597815764482433e-05, |
|
"loss": 0.2233, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.1479107312440646e-05, |
|
"loss": 0.2262, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.136039886039886e-05, |
|
"loss": 0.235, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.1241690408357075e-05, |
|
"loss": 0.2152, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.112298195631529e-05, |
|
"loss": 0.2014, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.1004273504273505e-05, |
|
"loss": 0.2182, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.088556505223172e-05, |
|
"loss": 0.2176, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.0766856600189935e-05, |
|
"loss": 0.2076, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.0648148148148148e-05, |
|
"loss": 0.2182, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.0529439696106363e-05, |
|
"loss": 0.2279, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.0410731244064578e-05, |
|
"loss": 0.2278, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.0292022792022793e-05, |
|
"loss": 0.2191, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.0173314339981008e-05, |
|
"loss": 0.2353, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.0054605887939222e-05, |
|
"loss": 0.243, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 9.935897435897435e-06, |
|
"loss": 0.198, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 9.81718898385565e-06, |
|
"loss": 0.1768, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 9.698480531813865e-06, |
|
"loss": 0.2213, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 9.57977207977208e-06, |
|
"loss": 0.2309, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 9.461063627730295e-06, |
|
"loss": 0.2018, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 9.34235517568851e-06, |
|
"loss": 0.2154, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 9.223646723646723e-06, |
|
"loss": 0.1891, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 9.104938271604939e-06, |
|
"loss": 0.2372, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 8.986229819563154e-06, |
|
"loss": 0.2471, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 8.867521367521369e-06, |
|
"loss": 0.2373, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 8.748812915479584e-06, |
|
"loss": 0.2021, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 8.630104463437797e-06, |
|
"loss": 0.2023, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 8.51139601139601e-06, |
|
"loss": 0.1933, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 8.392687559354225e-06, |
|
"loss": 0.2028, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 8.27397910731244e-06, |
|
"loss": 0.2253, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 8.155270655270655e-06, |
|
"loss": 0.2568, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 8.03656220322887e-06, |
|
"loss": 0.2159, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 7.917853751187086e-06, |
|
"loss": 0.2225, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 7.799145299145299e-06, |
|
"loss": 0.2232, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 7.680436847103514e-06, |
|
"loss": 0.1996, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 7.561728395061729e-06, |
|
"loss": 0.2099, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9047, |
|
"eval_loss": 0.26033690571784973, |
|
"eval_runtime": 42.0488, |
|
"eval_samples_per_second": 237.819, |
|
"eval_steps_per_second": 7.444, |
|
"step": 3744 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 7.443019943019944e-06, |
|
"loss": 0.2122, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 7.324311490978158e-06, |
|
"loss": 0.2154, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 7.205603038936373e-06, |
|
"loss": 0.1813, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 7.0868945868945866e-06, |
|
"loss": 0.2202, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 6.968186134852802e-06, |
|
"loss": 0.2215, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.849477682811016e-06, |
|
"loss": 0.2242, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 6.730769230769231e-06, |
|
"loss": 0.1883, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 6.612060778727446e-06, |
|
"loss": 0.2232, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 6.49335232668566e-06, |
|
"loss": 0.2138, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 6.374643874643875e-06, |
|
"loss": 0.2093, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 6.255935422602089e-06, |
|
"loss": 0.2264, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 6.137226970560304e-06, |
|
"loss": 0.2072, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 6.0185185185185185e-06, |
|
"loss": 0.2372, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 5.8998100664767336e-06, |
|
"loss": 0.1983, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 5.781101614434948e-06, |
|
"loss": 0.1887, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 5.662393162393163e-06, |
|
"loss": 0.2393, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 5.543684710351377e-06, |
|
"loss": 0.2134, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 5.424976258309592e-06, |
|
"loss": 0.1844, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 5.306267806267807e-06, |
|
"loss": 0.1963, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 5.187559354226021e-06, |
|
"loss": 0.2066, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 5.0688509021842354e-06, |
|
"loss": 0.2074, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 4.9501424501424505e-06, |
|
"loss": 0.2209, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 4.8314339981006655e-06, |
|
"loss": 0.2151, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 4.71272554605888e-06, |
|
"loss": 0.2016, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 4.594017094017095e-06, |
|
"loss": 0.2167, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 4.475308641975309e-06, |
|
"loss": 0.2113, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 4.356600189933523e-06, |
|
"loss": 0.2023, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 4.237891737891738e-06, |
|
"loss": 0.1946, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 4.119183285849953e-06, |
|
"loss": 0.2053, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 4.000474833808167e-06, |
|
"loss": 0.2036, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 3.8817663817663825e-06, |
|
"loss": 0.2234, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 3.7630579297245962e-06, |
|
"loss": 0.2337, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 3.644349477682811e-06, |
|
"loss": 0.1812, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 3.525641025641026e-06, |
|
"loss": 0.2122, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 3.4069325735992405e-06, |
|
"loss": 0.2294, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 3.288224121557455e-06, |
|
"loss": 0.194, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 3.16951566951567e-06, |
|
"loss": 0.2044, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 3.0508072174738843e-06, |
|
"loss": 0.2095, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.932098765432099e-06, |
|
"loss": 0.1992, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.8133903133903136e-06, |
|
"loss": 0.2114, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 2.694681861348528e-06, |
|
"loss": 0.2198, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 2.575973409306743e-06, |
|
"loss": 0.2108, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 2.4572649572649574e-06, |
|
"loss": 0.2065, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.338556505223172e-06, |
|
"loss": 0.1917, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 2.2198480531813866e-06, |
|
"loss": 0.2246, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.1011396011396013e-06, |
|
"loss": 0.2241, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.982431149097816e-06, |
|
"loss": 0.2002, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9076, |
|
"eval_loss": 0.26307567954063416, |
|
"eval_runtime": 41.9933, |
|
"eval_samples_per_second": 238.133, |
|
"eval_steps_per_second": 7.454, |
|
"step": 4212 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.8637226970560307e-06, |
|
"loss": 0.2029, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.7450142450142451e-06, |
|
"loss": 0.2027, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 1.6263057929724597e-06, |
|
"loss": 0.2285, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.5075973409306743e-06, |
|
"loss": 0.213, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.388888888888889e-06, |
|
"loss": 0.1922, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.2701804368471036e-06, |
|
"loss": 0.2462, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 1.1514719848053182e-06, |
|
"loss": 0.2236, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.0327635327635328e-06, |
|
"loss": 0.215, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 9.140550807217474e-07, |
|
"loss": 0.1831, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 7.95346628679962e-07, |
|
"loss": 0.2108, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 6.766381766381766e-07, |
|
"loss": 0.1701, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 5.579297245963913e-07, |
|
"loss": 0.1931, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 4.392212725546059e-07, |
|
"loss": 0.2082, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.205128205128205e-07, |
|
"loss": 0.2034, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.0180436847103516e-07, |
|
"loss": 0.1671, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 8.309591642924977e-08, |
|
"loss": 0.2223, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 0.0, |
|
"loss": 0.1798, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 0.0, |
|
"loss": 0.1942, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 0.0, |
|
"loss": 0.2108, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 0.0, |
|
"loss": 0.1951, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 0.0, |
|
"loss": 0.2197, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 0.0, |
|
"loss": 0.1848, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 0.0, |
|
"loss": 0.177, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 0.0, |
|
"loss": 0.1737, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 0.0, |
|
"loss": 0.1813, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 0.0, |
|
"loss": 0.1726, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 0.0, |
|
"loss": 0.1778, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 0.0, |
|
"loss": 0.2027, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 0.0, |
|
"loss": 0.2117, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 0.0, |
|
"loss": 0.2084, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 0.0, |
|
"loss": 0.2076, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 0.0, |
|
"loss": 0.1976, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 0.0, |
|
"loss": 0.2229, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 0.0, |
|
"loss": 0.2032, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 0.0, |
|
"loss": 0.2014, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 0.0, |
|
"loss": 0.1919, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 0.0, |
|
"loss": 0.2093, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 0.0, |
|
"loss": 0.208, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 0.0, |
|
"loss": 0.1757, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 0.0, |
|
"loss": 0.1822, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 0.0, |
|
"loss": 0.2009, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 0.0, |
|
"loss": 0.2252, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 0.0, |
|
"loss": 0.1827, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 0.0, |
|
"loss": 0.1731, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 0.0, |
|
"loss": 0.173, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 0.0, |
|
"loss": 0.197, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.193, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.9086, |
|
"eval_loss": 0.25825217366218567, |
|
"eval_runtime": 42.0394, |
|
"eval_samples_per_second": 237.872, |
|
"eval_steps_per_second": 7.445, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 4680, |
|
"total_flos": 4.650096471071824e+19, |
|
"train_loss": 0.279335702662794, |
|
"train_runtime": 6125.6563, |
|
"train_samples_per_second": 97.949, |
|
"train_steps_per_second": 0.764 |
|
} |
|
], |
|
"max_steps": 4680, |
|
"num_train_epochs": 10, |
|
"total_flos": 4.650096471071824e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|