|
{ |
|
"best_metric": 1.0162557363510132, |
|
"best_model_checkpoint": "ai-light-dance_stepmania_ft_wav2vec2-large-xlsr-53-v5/checkpoint-2256", |
|
"epoch": 10.0, |
|
"global_step": 3760, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9198, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.879, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.8647, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.8746, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8716, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.9164, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.8862, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.8818, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.8818, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4e-05, |
|
"loss": 0.8894, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.989071038251366e-05, |
|
"loss": 0.9504, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.9781420765027323e-05, |
|
"loss": 0.8916, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.9672131147540983e-05, |
|
"loss": 0.8946, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.956284153005465e-05, |
|
"loss": 0.8959, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.945355191256831e-05, |
|
"loss": 0.8869, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.934426229508197e-05, |
|
"loss": 0.9374, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.923497267759563e-05, |
|
"loss": 0.9024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.912568306010929e-05, |
|
"loss": 0.8983, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.901639344262295e-05, |
|
"loss": 0.8933, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.890710382513661e-05, |
|
"loss": 0.9064, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.879781420765028e-05, |
|
"loss": 0.9612, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.868852459016394e-05, |
|
"loss": 0.8932, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.85792349726776e-05, |
|
"loss": 0.9063, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.846994535519126e-05, |
|
"loss": 0.8773, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.836065573770492e-05, |
|
"loss": 0.8908, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.825136612021858e-05, |
|
"loss": 0.939, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8142076502732245e-05, |
|
"loss": 0.8949, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.8032786885245905e-05, |
|
"loss": 0.8847, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7923497267759565e-05, |
|
"loss": 0.8946, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.781420765027323e-05, |
|
"loss": 0.8865, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.770491803278689e-05, |
|
"loss": 0.9316, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.759562841530055e-05, |
|
"loss": 0.8822, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.748633879781421e-05, |
|
"loss": 0.8983, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.737704918032787e-05, |
|
"loss": 0.8889, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.726775956284153e-05, |
|
"loss": 0.8957, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.71584699453552e-05, |
|
"loss": 0.9067, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.704918032786886e-05, |
|
"loss": 0.8867, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.0381758213043213, |
|
"eval_runtime": 348.9535, |
|
"eval_samples_per_second": 7.663, |
|
"eval_steps_per_second": 1.917, |
|
"eval_wer": 0.6820688128905276, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.693989071038252e-05, |
|
"loss": 0.9229, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.683060109289618e-05, |
|
"loss": 0.9061, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.672131147540984e-05, |
|
"loss": 0.8896, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.66120218579235e-05, |
|
"loss": 0.8908, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.650273224043716e-05, |
|
"loss": 0.8999, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.6393442622950826e-05, |
|
"loss": 0.9126, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.6284153005464486e-05, |
|
"loss": 0.9274, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.6174863387978146e-05, |
|
"loss": 0.8986, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.6065573770491806e-05, |
|
"loss": 0.8889, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.5956284153005466e-05, |
|
"loss": 0.89, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.5846994535519126e-05, |
|
"loss": 0.9142, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.5737704918032786e-05, |
|
"loss": 0.9122, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.562841530054645e-05, |
|
"loss": 0.8805, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.551912568306011e-05, |
|
"loss": 0.878, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.5409836065573773e-05, |
|
"loss": 0.8903, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.5300546448087433e-05, |
|
"loss": 0.9098, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.5191256830601094e-05, |
|
"loss": 0.918, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.5081967213114754e-05, |
|
"loss": 0.8911, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.4972677595628414e-05, |
|
"loss": 0.8767, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.486338797814208e-05, |
|
"loss": 0.8821, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.475409836065574e-05, |
|
"loss": 0.9001, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.46448087431694e-05, |
|
"loss": 0.8989, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.453551912568306e-05, |
|
"loss": 0.8716, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.442622950819672e-05, |
|
"loss": 0.8965, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.431693989071038e-05, |
|
"loss": 0.9048, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.420765027322405e-05, |
|
"loss": 0.9245, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.409836065573771e-05, |
|
"loss": 0.9237, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.398907103825137e-05, |
|
"loss": 0.9033, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.3879781420765035e-05, |
|
"loss": 0.8863, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.3770491803278695e-05, |
|
"loss": 0.8961, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.3661202185792355e-05, |
|
"loss": 0.9243, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.3551912568306015e-05, |
|
"loss": 0.9223, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.3442622950819675e-05, |
|
"loss": 0.8865, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.893, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.3224043715847e-05, |
|
"loss": 0.892, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.311475409836066e-05, |
|
"loss": 0.9133, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.300546448087432e-05, |
|
"loss": 0.8801, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.289617486338798e-05, |
|
"loss": 0.8861, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.0259734392166138, |
|
"eval_runtime": 341.2083, |
|
"eval_samples_per_second": 7.837, |
|
"eval_steps_per_second": 1.961, |
|
"eval_wer": 0.6686301931036431, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.278688524590164e-05, |
|
"loss": 0.9331, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.26775956284153e-05, |
|
"loss": 0.8923, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.256830601092896e-05, |
|
"loss": 0.871, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.245901639344263e-05, |
|
"loss": 0.8728, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.234972677595629e-05, |
|
"loss": 0.897, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.224043715846995e-05, |
|
"loss": 0.9309, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.213114754098361e-05, |
|
"loss": 0.8783, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.202185792349727e-05, |
|
"loss": 0.8631, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.191256830601093e-05, |
|
"loss": 0.8903, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.180327868852459e-05, |
|
"loss": 0.8706, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.1693989071038256e-05, |
|
"loss": 0.9252, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.1584699453551916e-05, |
|
"loss": 0.8834, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.1475409836065576e-05, |
|
"loss": 0.8802, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.1366120218579236e-05, |
|
"loss": 0.8935, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.1256830601092896e-05, |
|
"loss": 0.8759, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.1147540983606557e-05, |
|
"loss": 0.9278, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.103825136612022e-05, |
|
"loss": 0.8689, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.0928961748633883e-05, |
|
"loss": 0.8675, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.0819672131147544e-05, |
|
"loss": 0.8751, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.0710382513661204e-05, |
|
"loss": 0.877, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.0601092896174864e-05, |
|
"loss": 0.9216, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.0491803278688527e-05, |
|
"loss": 0.8636, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.0382513661202187e-05, |
|
"loss": 0.8748, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.0273224043715847e-05, |
|
"loss": 0.8876, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.0163934426229507e-05, |
|
"loss": 0.8637, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.005464480874317e-05, |
|
"loss": 0.9266, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.9945355191256834e-05, |
|
"loss": 0.8768, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.9836065573770498e-05, |
|
"loss": 0.8833, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9726775956284158e-05, |
|
"loss": 0.878, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.9617486338797818e-05, |
|
"loss": 0.8862, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9508196721311478e-05, |
|
"loss": 0.9319, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.939890710382514e-05, |
|
"loss": 0.8807, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.92896174863388e-05, |
|
"loss": 0.9018, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.918032786885246e-05, |
|
"loss": 0.8827, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9071038251366125e-05, |
|
"loss": 0.8759, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.8961748633879785e-05, |
|
"loss": 0.9162, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.8852459016393445e-05, |
|
"loss": 0.8682, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.0357706546783447, |
|
"eval_runtime": 341.6621, |
|
"eval_samples_per_second": 7.826, |
|
"eval_steps_per_second": 1.958, |
|
"eval_wer": 0.6603899742434302, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 2.874316939890711e-05, |
|
"loss": 0.9038, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.863387978142077e-05, |
|
"loss": 0.9097, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.852459016393443e-05, |
|
"loss": 0.891, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.841530054644809e-05, |
|
"loss": 0.8733, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.8306010928961752e-05, |
|
"loss": 0.8789, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.8196721311475412e-05, |
|
"loss": 0.8846, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.8087431693989072e-05, |
|
"loss": 0.9069, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.7978142076502736e-05, |
|
"loss": 0.8742, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.7868852459016396e-05, |
|
"loss": 0.8769, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 2.7759562841530056e-05, |
|
"loss": 0.876, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.765027322404372e-05, |
|
"loss": 0.8952, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.754098360655738e-05, |
|
"loss": 0.898, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.743169398907104e-05, |
|
"loss": 0.8642, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.73224043715847e-05, |
|
"loss": 0.8772, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.7213114754098363e-05, |
|
"loss": 0.8589, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.7103825136612023e-05, |
|
"loss": 0.8754, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.6994535519125683e-05, |
|
"loss": 0.9035, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.6885245901639346e-05, |
|
"loss": 0.8784, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.6775956284153007e-05, |
|
"loss": 0.8698, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.8671, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.655737704918033e-05, |
|
"loss": 0.8834, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.644808743169399e-05, |
|
"loss": 0.9126, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.633879781420765e-05, |
|
"loss": 0.8678, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 2.622950819672131e-05, |
|
"loss": 0.8769, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 2.6120218579234974e-05, |
|
"loss": 0.8687, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 2.6010928961748634e-05, |
|
"loss": 0.8817, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 2.59016393442623e-05, |
|
"loss": 0.9162, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.579234972677596e-05, |
|
"loss": 0.8587, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.568306010928962e-05, |
|
"loss": 0.8572, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.5573770491803284e-05, |
|
"loss": 0.8629, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.5464480874316944e-05, |
|
"loss": 0.8755, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.5355191256830604e-05, |
|
"loss": 0.9048, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.5245901639344264e-05, |
|
"loss": 0.8635, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.5136612021857928e-05, |
|
"loss": 0.8618, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.5027322404371588e-05, |
|
"loss": 0.8637, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.4918032786885248e-05, |
|
"loss": 0.8885, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.480874316939891e-05, |
|
"loss": 0.8829, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.469945355191257e-05, |
|
"loss": 0.8662, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.0233858823776245, |
|
"eval_runtime": 343.724, |
|
"eval_samples_per_second": 7.779, |
|
"eval_steps_per_second": 1.946, |
|
"eval_wer": 0.6664547753245469, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.459016393442623e-05, |
|
"loss": 0.9061, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.448087431693989e-05, |
|
"loss": 0.8684, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.4371584699453555e-05, |
|
"loss": 0.8616, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.4262295081967215e-05, |
|
"loss": 0.8718, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.4153005464480875e-05, |
|
"loss": 0.8663, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.404371584699454e-05, |
|
"loss": 0.9004, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.39344262295082e-05, |
|
"loss": 0.8659, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.382513661202186e-05, |
|
"loss": 0.8569, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.3715846994535522e-05, |
|
"loss": 0.8574, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.3606557377049182e-05, |
|
"loss": 0.8702, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.3497267759562842e-05, |
|
"loss": 0.9054, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.3387978142076502e-05, |
|
"loss": 0.8777, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.3278688524590166e-05, |
|
"loss": 0.8657, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.3169398907103826e-05, |
|
"loss": 0.8712, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.3060109289617486e-05, |
|
"loss": 0.8578, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.295081967213115e-05, |
|
"loss": 0.8931, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.284153005464481e-05, |
|
"loss": 0.8734, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.273224043715847e-05, |
|
"loss": 0.8715, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.2622950819672133e-05, |
|
"loss": 0.8578, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.2513661202185793e-05, |
|
"loss": 0.8612, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.2404371584699453e-05, |
|
"loss": 0.8929, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.2295081967213113e-05, |
|
"loss": 0.8684, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.2185792349726777e-05, |
|
"loss": 0.87, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.2076502732240437e-05, |
|
"loss": 0.8621, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.1967213114754104e-05, |
|
"loss": 0.8569, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.1857923497267764e-05, |
|
"loss": 0.9089, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.1748633879781424e-05, |
|
"loss": 0.8664, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.1639344262295087e-05, |
|
"loss": 0.872, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.1530054644808747e-05, |
|
"loss": 0.8537, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.1420765027322407e-05, |
|
"loss": 0.8592, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.1311475409836067e-05, |
|
"loss": 0.9015, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.120218579234973e-05, |
|
"loss": 0.8788, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.109289617486339e-05, |
|
"loss": 0.8552, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.098360655737705e-05, |
|
"loss": 0.8587, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.0874316939890714e-05, |
|
"loss": 0.8552, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.0765027322404374e-05, |
|
"loss": 0.8833, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.0655737704918034e-05, |
|
"loss": 0.8501, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.0546448087431698e-05, |
|
"loss": 0.8463, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.0332772731781006, |
|
"eval_runtime": 349.3455, |
|
"eval_samples_per_second": 7.654, |
|
"eval_steps_per_second": 1.915, |
|
"eval_wer": 0.6666054536122765, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.0437158469945358e-05, |
|
"loss": 0.9174, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.0327868852459018e-05, |
|
"loss": 0.8478, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.0218579234972678e-05, |
|
"loss": 0.8535, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.010928961748634e-05, |
|
"loss": 0.8651, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8535, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.9890710382513662e-05, |
|
"loss": 0.9069, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.9781420765027325e-05, |
|
"loss": 0.8565, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.9672131147540985e-05, |
|
"loss": 0.8713, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.9562841530054645e-05, |
|
"loss": 0.8545, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.9453551912568305e-05, |
|
"loss": 0.8637, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.934426229508197e-05, |
|
"loss": 0.9046, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.923497267759563e-05, |
|
"loss": 0.8542, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.912568306010929e-05, |
|
"loss": 0.8442, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.9027322404371588e-05, |
|
"loss": 0.8502, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.8918032786885248e-05, |
|
"loss": 0.8663, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.8808743169398908e-05, |
|
"loss": 0.8946, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.869945355191257e-05, |
|
"loss": 0.8449, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.859016393442623e-05, |
|
"loss": 0.8443, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.848087431693989e-05, |
|
"loss": 0.8446, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.837158469945355e-05, |
|
"loss": 0.859, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.8262295081967215e-05, |
|
"loss": 0.9006, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.8153005464480875e-05, |
|
"loss": 0.8588, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.8043715846994535e-05, |
|
"loss": 0.8568, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 1.79344262295082e-05, |
|
"loss": 0.8614, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 1.782513661202186e-05, |
|
"loss": 0.8447, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.7715846994535522e-05, |
|
"loss": 0.9098, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 1.7606557377049182e-05, |
|
"loss": 0.8408, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.7497267759562842e-05, |
|
"loss": 0.8611, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 1.7387978142076506e-05, |
|
"loss": 0.8524, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 1.7278688524590166e-05, |
|
"loss": 0.859, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.7169398907103826e-05, |
|
"loss": 0.9099, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.706010928961749e-05, |
|
"loss": 0.8534, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.695081967213115e-05, |
|
"loss": 0.8525, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.684153005464481e-05, |
|
"loss": 0.8426, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.6732240437158473e-05, |
|
"loss": 0.8708, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.6622950819672133e-05, |
|
"loss": 0.8648, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 1.6513661202185793e-05, |
|
"loss": 0.8573, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.0162557363510132, |
|
"eval_runtime": 338.1768, |
|
"eval_samples_per_second": 7.907, |
|
"eval_steps_per_second": 1.978, |
|
"eval_wer": 0.6621886963032024, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.6404371584699453e-05, |
|
"loss": 0.8838, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.6295081967213116e-05, |
|
"loss": 0.8716, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.6185792349726776e-05, |
|
"loss": 0.8522, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.6076502732240437e-05, |
|
"loss": 0.8475, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.59672131147541e-05, |
|
"loss": 0.8533, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.585792349726776e-05, |
|
"loss": 0.8867, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.5748633879781424e-05, |
|
"loss": 0.8741, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.5639344262295084e-05, |
|
"loss": 0.8564, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.5530054644808744e-05, |
|
"loss": 0.8445, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.5420765027322407e-05, |
|
"loss": 0.8345, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.5311475409836067e-05, |
|
"loss": 0.8805, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.5202185792349729e-05, |
|
"loss": 0.8609, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.5092896174863389e-05, |
|
"loss": 0.8438, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.498360655737705e-05, |
|
"loss": 0.8471, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.4874316939890713e-05, |
|
"loss": 0.8487, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.4765027322404373e-05, |
|
"loss": 0.8833, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.4655737704918034e-05, |
|
"loss": 0.8878, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.4546448087431694e-05, |
|
"loss": 0.8349, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.4437158469945356e-05, |
|
"loss": 0.8425, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.4327868852459016e-05, |
|
"loss": 0.8523, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.4218579234972678e-05, |
|
"loss": 0.879, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.410928961748634e-05, |
|
"loss": 0.865, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.8514, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.3890710382513662e-05, |
|
"loss": 0.8625, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.3781420765027325e-05, |
|
"loss": 0.8543, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.3672131147540985e-05, |
|
"loss": 0.8793, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.3562841530054647e-05, |
|
"loss": 0.8603, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.3453551912568309e-05, |
|
"loss": 0.8584, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.3344262295081969e-05, |
|
"loss": 0.8384, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.323497267759563e-05, |
|
"loss": 0.8335, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.312568306010929e-05, |
|
"loss": 0.8594, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.3016393442622952e-05, |
|
"loss": 0.8789, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.2907103825136614e-05, |
|
"loss": 0.8446, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.2797814207650274e-05, |
|
"loss": 0.8577, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.2688524590163936e-05, |
|
"loss": 0.8502, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.2579234972677596e-05, |
|
"loss": 0.8872, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.2469945355191258e-05, |
|
"loss": 0.8502, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.236065573770492e-05, |
|
"loss": 0.8628, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.0209393501281738, |
|
"eval_runtime": 339.6154, |
|
"eval_samples_per_second": 7.874, |
|
"eval_steps_per_second": 1.97, |
|
"eval_wer": 0.6550550211185038, |
|
"step": 2632 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.225136612021858e-05, |
|
"loss": 0.9001, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.2142076502732241e-05, |
|
"loss": 0.8424, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.2032786885245901e-05, |
|
"loss": 0.8444, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.1923497267759563e-05, |
|
"loss": 0.8413, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.1814207650273223e-05, |
|
"loss": 0.8429, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.1704918032786887e-05, |
|
"loss": 0.904, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.1595628415300548e-05, |
|
"loss": 0.8378, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.148633879781421e-05, |
|
"loss": 0.8401, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.137704918032787e-05, |
|
"loss": 0.8533, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.1267759562841532e-05, |
|
"loss": 0.8341, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.1158469945355192e-05, |
|
"loss": 0.8987, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.1049180327868854e-05, |
|
"loss": 0.8522, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.0939890710382515e-05, |
|
"loss": 0.8437, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.0830601092896176e-05, |
|
"loss": 0.8466, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.0721311475409837e-05, |
|
"loss": 0.8477, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.0612021857923497e-05, |
|
"loss": 0.9094, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.0502732240437159e-05, |
|
"loss": 0.8399, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.0393442622950821e-05, |
|
"loss": 0.855, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.0284153005464481e-05, |
|
"loss": 0.8448, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.0174863387978143e-05, |
|
"loss": 0.8438, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.0065573770491803e-05, |
|
"loss": 0.8951, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 9.956284153005466e-06, |
|
"loss": 0.858, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 9.846994535519126e-06, |
|
"loss": 0.8505, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 9.737704918032788e-06, |
|
"loss": 0.8452, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 9.628415300546448e-06, |
|
"loss": 0.8374, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 9.51912568306011e-06, |
|
"loss": 0.8905, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 9.409836065573772e-06, |
|
"loss": 0.8538, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 9.300546448087432e-06, |
|
"loss": 0.848, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 9.191256830601093e-06, |
|
"loss": 0.844, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 9.081967213114755e-06, |
|
"loss": 0.8375, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 8.972677595628417e-06, |
|
"loss": 0.8901, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 8.863387978142077e-06, |
|
"loss": 0.8564, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 8.754098360655739e-06, |
|
"loss": 0.8372, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 8.644808743169399e-06, |
|
"loss": 0.8338, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 8.53551912568306e-06, |
|
"loss": 0.8386, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 8.426229508196722e-06, |
|
"loss": 0.8699, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 8.316939890710382e-06, |
|
"loss": 0.8493, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.0525248050689697, |
|
"eval_runtime": 343.4159, |
|
"eval_samples_per_second": 7.786, |
|
"eval_steps_per_second": 1.948, |
|
"eval_wer": 0.6581815955888931, |
|
"step": 3008 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 8.207650273224044e-06, |
|
"loss": 0.8601, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 8.098360655737706e-06, |
|
"loss": 0.8718, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 7.989071038251368e-06, |
|
"loss": 0.8381, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 7.879781420765028e-06, |
|
"loss": 0.8315, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 7.77049180327869e-06, |
|
"loss": 0.8324, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 7.66120218579235e-06, |
|
"loss": 0.8601, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 7.551912568306011e-06, |
|
"loss": 0.8776, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 7.442622950819672e-06, |
|
"loss": 0.8488, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.8432, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 7.224043715846996e-06, |
|
"loss": 0.8341, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 7.114754098360657e-06, |
|
"loss": 0.8486, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 7.0054644808743176e-06, |
|
"loss": 0.885, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 6.8961748633879785e-06, |
|
"loss": 0.8465, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 6.78688524590164e-06, |
|
"loss": 0.8395, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6.677595628415301e-06, |
|
"loss": 0.8381, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 6.568306010928962e-06, |
|
"loss": 0.8614, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 6.459016393442623e-06, |
|
"loss": 0.8787, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 6.349726775956284e-06, |
|
"loss": 0.83, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 6.2404371584699465e-06, |
|
"loss": 0.848, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 6.131147540983607e-06, |
|
"loss": 0.8501, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 6.021857923497268e-06, |
|
"loss": 0.8618, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 5.912568306010929e-06, |
|
"loss": 0.8855, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 5.803278688524591e-06, |
|
"loss": 0.8357, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 5.693989071038252e-06, |
|
"loss": 0.8365, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 5.584699453551913e-06, |
|
"loss": 0.8537, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 5.475409836065574e-06, |
|
"loss": 0.8565, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 5.366120218579235e-06, |
|
"loss": 0.8691, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 5.256830601092897e-06, |
|
"loss": 0.8333, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 5.147540983606558e-06, |
|
"loss": 0.8268, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 5.038251366120219e-06, |
|
"loss": 0.8405, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 4.92896174863388e-06, |
|
"loss": 0.8438, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 4.819672131147542e-06, |
|
"loss": 0.8763, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 4.710382513661203e-06, |
|
"loss": 0.841, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 4.6010928961748635e-06, |
|
"loss": 0.8371, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 4.491803278688525e-06, |
|
"loss": 0.8287, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 4.382513661202186e-06, |
|
"loss": 0.8526, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 4.273224043715847e-06, |
|
"loss": 0.8507, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 4.163934426229508e-06, |
|
"loss": 0.8371, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.0408543348312378, |
|
"eval_runtime": 348.0788, |
|
"eval_samples_per_second": 7.682, |
|
"eval_steps_per_second": 1.922, |
|
"eval_wer": 0.651471703088434, |
|
"step": 3384 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.05464480874317e-06, |
|
"loss": 0.8875, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 3.945355191256831e-06, |
|
"loss": 0.8507, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 3.8360655737704925e-06, |
|
"loss": 0.8481, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 3.7267759562841534e-06, |
|
"loss": 0.843, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 3.6174863387978143e-06, |
|
"loss": 0.8397, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 3.508196721311476e-06, |
|
"loss": 0.8743, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 3.398907103825137e-06, |
|
"loss": 0.8453, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.2896174863387983e-06, |
|
"loss": 0.8378, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 3.180327868852459e-06, |
|
"loss": 0.8245, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.07103825136612e-06, |
|
"loss": 0.8337, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 2.961748633879782e-06, |
|
"loss": 0.8748, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 2.8524590163934428e-06, |
|
"loss": 0.8398, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 2.743169398907104e-06, |
|
"loss": 0.8387, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 2.633879781420765e-06, |
|
"loss": 0.8413, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 2.5245901639344268e-06, |
|
"loss": 0.8338, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.4153005464480877e-06, |
|
"loss": 0.8767, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.306010928961749e-06, |
|
"loss": 0.8293, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.19672131147541e-06, |
|
"loss": 0.839, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.0874316939890713e-06, |
|
"loss": 0.8318, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 1.978142076502732e-06, |
|
"loss": 0.8353, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 1.8688524590163937e-06, |
|
"loss": 0.8795, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 1.7595628415300548e-06, |
|
"loss": 0.8441, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 1.6502732240437162e-06, |
|
"loss": 0.8318, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.540983606557377e-06, |
|
"loss": 0.8313, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.4316939890710382e-06, |
|
"loss": 0.8372, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.3224043715846995e-06, |
|
"loss": 0.8708, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.2131147540983609e-06, |
|
"loss": 0.8448, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.103825136612022e-06, |
|
"loss": 0.837, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 9.945355191256831e-07, |
|
"loss": 0.8315, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 8.852459016393443e-07, |
|
"loss": 0.8322, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 7.759562841530055e-07, |
|
"loss": 0.8701, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.8334, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 5.573770491803279e-07, |
|
"loss": 0.8416, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 4.480874316939891e-07, |
|
"loss": 0.8386, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.387978142076503e-07, |
|
"loss": 0.8322, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.295081967213115e-07, |
|
"loss": 0.8574, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.202185792349727e-07, |
|
"loss": 0.8294, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1.0928961748633881e-08, |
|
"loss": 0.8229, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.059747576713562, |
|
"eval_runtime": 346.8481, |
|
"eval_samples_per_second": 7.709, |
|
"eval_steps_per_second": 1.929, |
|
"eval_wer": 0.6522768901884891, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 3760, |
|
"total_flos": 7.29061419637639e+19, |
|
"train_loss": 0.8711832551245994, |
|
"train_runtime": 72833.4219, |
|
"train_samples_per_second": 3.304, |
|
"train_steps_per_second": 0.052 |
|
} |
|
], |
|
"max_steps": 3760, |
|
"num_train_epochs": 10, |
|
"total_flos": 7.29061419637639e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|