|
{ |
|
"best_metric": 14.849506681653555, |
|
"best_model_checkpoint": "./checkpoint-10000", |
|
"epoch": 1.0, |
|
"eval_steps": 1000, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0025, |
|
"grad_norm": 32.17699432373047, |
|
"learning_rate": 7.875e-07, |
|
"loss": 3.5416, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 17.338768005371094, |
|
"learning_rate": 1.7249999999999998e-06, |
|
"loss": 2.8498, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0075, |
|
"grad_norm": 13.783746719360352, |
|
"learning_rate": 2.6624999999999995e-06, |
|
"loss": 1.9843, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.80038070678711, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.6113, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0125, |
|
"grad_norm": 9.404985427856445, |
|
"learning_rate": 4.537499999999999e-06, |
|
"loss": 1.3704, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 8.393142700195312, |
|
"learning_rate": 5.474999999999999e-06, |
|
"loss": 1.1666, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0175, |
|
"grad_norm": 9.127829551696777, |
|
"learning_rate": 6.4125e-06, |
|
"loss": 1.0771, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.412626266479492, |
|
"learning_rate": 7.35e-06, |
|
"loss": 1.0148, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0225, |
|
"grad_norm": 7.45149564743042, |
|
"learning_rate": 8.2875e-06, |
|
"loss": 0.8875, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 10.142014503479004, |
|
"learning_rate": 9.224999999999999e-06, |
|
"loss": 0.8914, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0275, |
|
"grad_norm": 8.410262107849121, |
|
"learning_rate": 1.01625e-05, |
|
"loss": 0.8811, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.67717170715332, |
|
"learning_rate": 1.1099999999999999e-05, |
|
"loss": 0.8374, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.0325, |
|
"grad_norm": 7.850519180297852, |
|
"learning_rate": 1.20375e-05, |
|
"loss": 0.7848, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 7.787046909332275, |
|
"learning_rate": 1.2974999999999999e-05, |
|
"loss": 0.7056, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0375, |
|
"grad_norm": 8.000933647155762, |
|
"learning_rate": 1.39125e-05, |
|
"loss": 0.6118, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.588298320770264, |
|
"learning_rate": 1.485e-05, |
|
"loss": 0.5839, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.0425, |
|
"grad_norm": 7.084440231323242, |
|
"learning_rate": 1.5787499999999997e-05, |
|
"loss": 0.5352, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 6.439165115356445, |
|
"learning_rate": 1.6725e-05, |
|
"loss": 0.5099, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.0475, |
|
"grad_norm": 6.848193168640137, |
|
"learning_rate": 1.76625e-05, |
|
"loss": 0.4845, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.957454204559326, |
|
"learning_rate": 1.8599999999999998e-05, |
|
"loss": 0.4591, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0525, |
|
"grad_norm": 7.173631191253662, |
|
"learning_rate": 1.95375e-05, |
|
"loss": 0.4265, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 5.627034664154053, |
|
"learning_rate": 2.0475e-05, |
|
"loss": 0.4271, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.0575, |
|
"grad_norm": 5.964080333709717, |
|
"learning_rate": 2.1412499999999995e-05, |
|
"loss": 0.4051, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.408676147460938, |
|
"learning_rate": 2.2349999999999998e-05, |
|
"loss": 0.3964, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 6.321172714233398, |
|
"learning_rate": 2.3287499999999997e-05, |
|
"loss": 0.3565, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 5.115601062774658, |
|
"learning_rate": 2.4225e-05, |
|
"loss": 0.3746, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.0675, |
|
"grad_norm": 5.302433967590332, |
|
"learning_rate": 2.51625e-05, |
|
"loss": 0.3636, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.260375499725342, |
|
"learning_rate": 2.6099999999999997e-05, |
|
"loss": 0.3597, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.0725, |
|
"grad_norm": 8.463918685913086, |
|
"learning_rate": 2.7037499999999997e-05, |
|
"loss": 0.399, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 6.9618659019470215, |
|
"learning_rate": 2.7975e-05, |
|
"loss": 0.5304, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.0775, |
|
"grad_norm": 6.6647162437438965, |
|
"learning_rate": 2.8912499999999998e-05, |
|
"loss": 0.5135, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.29068660736084, |
|
"learning_rate": 2.985e-05, |
|
"loss": 0.5595, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.0825, |
|
"grad_norm": 5.9147186279296875, |
|
"learning_rate": 3.0787499999999996e-05, |
|
"loss": 0.3989, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 4.840363502502441, |
|
"learning_rate": 3.1725e-05, |
|
"loss": 0.3295, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.0875, |
|
"grad_norm": 4.7903571128845215, |
|
"learning_rate": 3.2662499999999994e-05, |
|
"loss": 0.3158, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.355573654174805, |
|
"learning_rate": 3.36e-05, |
|
"loss": 0.4046, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.0925, |
|
"grad_norm": 7.575016975402832, |
|
"learning_rate": 3.45375e-05, |
|
"loss": 0.4779, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 6.783919334411621, |
|
"learning_rate": 3.5474999999999995e-05, |
|
"loss": 0.4578, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.0975, |
|
"grad_norm": 7.364860534667969, |
|
"learning_rate": 3.64125e-05, |
|
"loss": 0.6591, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.520545959472656, |
|
"learning_rate": 3.735e-05, |
|
"loss": 0.586, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 0.6248713731765747, |
|
"eval_runtime": 39.9662, |
|
"eval_samples_per_second": 42.311, |
|
"eval_steps_per_second": 2.652, |
|
"eval_wer": 34.16385662545272, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1025, |
|
"grad_norm": 6.746083736419678, |
|
"learning_rate": 3.74125e-05, |
|
"loss": 0.5033, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 5.35268497467041, |
|
"learning_rate": 3.730833333333333e-05, |
|
"loss": 0.3954, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.1075, |
|
"grad_norm": 4.840243816375732, |
|
"learning_rate": 3.7204166666666665e-05, |
|
"loss": 0.3024, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.398126602172852, |
|
"learning_rate": 3.7099999999999994e-05, |
|
"loss": 0.2543, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.1125, |
|
"grad_norm": 4.504413604736328, |
|
"learning_rate": 3.699583333333333e-05, |
|
"loss": 0.2439, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 4.229017734527588, |
|
"learning_rate": 3.6891666666666664e-05, |
|
"loss": 0.2602, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.1175, |
|
"grad_norm": 4.006415367126465, |
|
"learning_rate": 3.678749999999999e-05, |
|
"loss": 0.2588, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.367706298828125, |
|
"learning_rate": 3.668333333333333e-05, |
|
"loss": 0.2419, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1225, |
|
"grad_norm": 7.554067611694336, |
|
"learning_rate": 3.6579166666666664e-05, |
|
"loss": 0.3699, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 6.349033355712891, |
|
"learning_rate": 3.6475e-05, |
|
"loss": 0.3811, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.1275, |
|
"grad_norm": 6.685649394989014, |
|
"learning_rate": 3.6370833333333334e-05, |
|
"loss": 0.4109, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.145482063293457, |
|
"learning_rate": 3.626666666666666e-05, |
|
"loss": 0.4295, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1325, |
|
"grad_norm": 6.097599983215332, |
|
"learning_rate": 3.61625e-05, |
|
"loss": 0.3691, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 6.148138999938965, |
|
"learning_rate": 3.6058333333333333e-05, |
|
"loss": 0.3754, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.1375, |
|
"grad_norm": 6.437056541442871, |
|
"learning_rate": 3.595416666666666e-05, |
|
"loss": 0.3569, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.0700201988220215, |
|
"learning_rate": 3.585e-05, |
|
"loss": 0.3443, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.1425, |
|
"grad_norm": 6.010077476501465, |
|
"learning_rate": 3.5745833333333326e-05, |
|
"loss": 0.3516, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 4.950198173522949, |
|
"learning_rate": 3.564166666666666e-05, |
|
"loss": 0.3309, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.1475, |
|
"grad_norm": 4.597211837768555, |
|
"learning_rate": 3.5537499999999996e-05, |
|
"loss": 0.2661, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.0821356773376465, |
|
"learning_rate": 3.543333333333333e-05, |
|
"loss": 0.2346, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1525, |
|
"grad_norm": 3.8110575675964355, |
|
"learning_rate": 3.532916666666667e-05, |
|
"loss": 0.2383, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 5.058354377746582, |
|
"learning_rate": 3.5224999999999996e-05, |
|
"loss": 0.2311, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.1575, |
|
"grad_norm": 4.210774898529053, |
|
"learning_rate": 3.512083333333333e-05, |
|
"loss": 0.205, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.318747043609619, |
|
"learning_rate": 3.5016666666666666e-05, |
|
"loss": 0.215, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.1625, |
|
"grad_norm": 5.172638893127441, |
|
"learning_rate": 3.49125e-05, |
|
"loss": 0.3064, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 5.396555423736572, |
|
"learning_rate": 3.480833333333333e-05, |
|
"loss": 0.3197, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1675, |
|
"grad_norm": 7.734130382537842, |
|
"learning_rate": 3.4704166666666665e-05, |
|
"loss": 0.3303, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.788872241973877, |
|
"learning_rate": 3.4599999999999994e-05, |
|
"loss": 0.3104, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.1725, |
|
"grad_norm": 4.634150505065918, |
|
"learning_rate": 3.449583333333333e-05, |
|
"loss": 0.26, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 4.166738510131836, |
|
"learning_rate": 3.4391666666666665e-05, |
|
"loss": 0.2411, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.1775, |
|
"grad_norm": 4.349499702453613, |
|
"learning_rate": 3.42875e-05, |
|
"loss": 0.2139, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.308148384094238, |
|
"learning_rate": 3.418333333333333e-05, |
|
"loss": 0.218, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.1825, |
|
"grad_norm": 3.210516929626465, |
|
"learning_rate": 3.4079166666666664e-05, |
|
"loss": 0.2069, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 3.348567247390747, |
|
"learning_rate": 3.3975e-05, |
|
"loss": 0.193, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 4.138334274291992, |
|
"learning_rate": 3.3870833333333334e-05, |
|
"loss": 0.1939, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.503148555755615, |
|
"learning_rate": 3.376666666666666e-05, |
|
"loss": 0.1921, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1925, |
|
"grad_norm": 3.379345417022705, |
|
"learning_rate": 3.36625e-05, |
|
"loss": 0.1829, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 4.82003927230835, |
|
"learning_rate": 3.355833333333333e-05, |
|
"loss": 0.2564, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1975, |
|
"grad_norm": 5.72125768661499, |
|
"learning_rate": 3.345416666666666e-05, |
|
"loss": 0.3326, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.574431896209717, |
|
"learning_rate": 3.335e-05, |
|
"loss": 0.3145, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.5048365592956543, |
|
"eval_runtime": 37.7406, |
|
"eval_samples_per_second": 44.806, |
|
"eval_steps_per_second": 2.809, |
|
"eval_wer": 25.259148245285374, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2025, |
|
"grad_norm": 4.822042942047119, |
|
"learning_rate": 3.324583333333333e-05, |
|
"loss": 0.2662, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 4.082330226898193, |
|
"learning_rate": 3.314166666666666e-05, |
|
"loss": 0.1935, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.2075, |
|
"grad_norm": 5.841582775115967, |
|
"learning_rate": 3.30375e-05, |
|
"loss": 0.186, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.377840042114258, |
|
"learning_rate": 3.293333333333333e-05, |
|
"loss": 0.1834, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.2125, |
|
"grad_norm": 3.8289568424224854, |
|
"learning_rate": 3.282916666666667e-05, |
|
"loss": 0.1594, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 3.3037023544311523, |
|
"learning_rate": 3.2724999999999996e-05, |
|
"loss": 0.1623, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.2175, |
|
"grad_norm": 4.1908860206604, |
|
"learning_rate": 3.262083333333333e-05, |
|
"loss": 0.1674, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.681819438934326, |
|
"learning_rate": 3.2516666666666666e-05, |
|
"loss": 0.2856, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2225, |
|
"grad_norm": 5.509127616882324, |
|
"learning_rate": 3.2412499999999995e-05, |
|
"loss": 0.2918, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 4.549745559692383, |
|
"learning_rate": 3.230833333333333e-05, |
|
"loss": 0.3008, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.2275, |
|
"grad_norm": 4.073692798614502, |
|
"learning_rate": 3.2204166666666666e-05, |
|
"loss": 0.2116, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.217360734939575, |
|
"learning_rate": 3.2099999999999994e-05, |
|
"loss": 0.1622, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.2325, |
|
"grad_norm": 4.068457126617432, |
|
"learning_rate": 3.199583333333333e-05, |
|
"loss": 0.1538, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 6.21290922164917, |
|
"learning_rate": 3.1891666666666665e-05, |
|
"loss": 0.1543, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2375, |
|
"grad_norm": 2.9277803897857666, |
|
"learning_rate": 3.17875e-05, |
|
"loss": 0.158, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.4852840900421143, |
|
"learning_rate": 3.168333333333333e-05, |
|
"loss": 0.1626, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2425, |
|
"grad_norm": 4.382246971130371, |
|
"learning_rate": 3.1579166666666664e-05, |
|
"loss": 0.1642, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 2.6627678871154785, |
|
"learning_rate": 3.1475e-05, |
|
"loss": 0.1526, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.2475, |
|
"grad_norm": 3.2728075981140137, |
|
"learning_rate": 3.1370833333333335e-05, |
|
"loss": 0.1624, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.8267433643341064, |
|
"learning_rate": 3.126666666666666e-05, |
|
"loss": 0.1857, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2525, |
|
"grad_norm": 9.670909881591797, |
|
"learning_rate": 3.11625e-05, |
|
"loss": 0.2492, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 4.275064945220947, |
|
"learning_rate": 3.105833333333333e-05, |
|
"loss": 0.238, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.2575, |
|
"grad_norm": 4.751267910003662, |
|
"learning_rate": 3.095416666666666e-05, |
|
"loss": 0.2788, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.571152925491333, |
|
"learning_rate": 3.085e-05, |
|
"loss": 0.2056, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.2625, |
|
"grad_norm": 4.068671226501465, |
|
"learning_rate": 3.074583333333333e-05, |
|
"loss": 0.1741, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 3.274651527404785, |
|
"learning_rate": 3.064166666666666e-05, |
|
"loss": 0.1571, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.2675, |
|
"grad_norm": 3.0895748138427734, |
|
"learning_rate": 3.05375e-05, |
|
"loss": 0.149, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.9620044231414795, |
|
"learning_rate": 3.0433333333333332e-05, |
|
"loss": 0.1584, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.2725, |
|
"grad_norm": 3.2053639888763428, |
|
"learning_rate": 3.0329166666666664e-05, |
|
"loss": 0.1676, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 4.407464981079102, |
|
"learning_rate": 3.0225e-05, |
|
"loss": 0.1799, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.2775, |
|
"grad_norm": 5.430395603179932, |
|
"learning_rate": 3.0120833333333328e-05, |
|
"loss": 0.3165, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.443249702453613, |
|
"learning_rate": 3.0016666666666663e-05, |
|
"loss": 0.2934, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.2825, |
|
"grad_norm": 4.94956111907959, |
|
"learning_rate": 2.9912499999999995e-05, |
|
"loss": 0.2587, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 4.102962493896484, |
|
"learning_rate": 2.980833333333333e-05, |
|
"loss": 0.2637, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.2875, |
|
"grad_norm": 3.9422686100006104, |
|
"learning_rate": 2.9704166666666662e-05, |
|
"loss": 0.2561, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.891707897186279, |
|
"learning_rate": 2.9599999999999998e-05, |
|
"loss": 0.2476, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.2925, |
|
"grad_norm": 3.3687989711761475, |
|
"learning_rate": 2.949583333333333e-05, |
|
"loss": 0.2015, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 3.280766010284424, |
|
"learning_rate": 2.9391666666666665e-05, |
|
"loss": 0.1618, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.2975, |
|
"grad_norm": 3.6520583629608154, |
|
"learning_rate": 2.9287499999999997e-05, |
|
"loss": 0.1685, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.03117561340332, |
|
"learning_rate": 2.9183333333333332e-05, |
|
"loss": 0.225, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 0.4838997423648834, |
|
"eval_runtime": 38.3108, |
|
"eval_samples_per_second": 44.139, |
|
"eval_steps_per_second": 2.767, |
|
"eval_wer": 22.05570126139628, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3025, |
|
"grad_norm": 5.466115951538086, |
|
"learning_rate": 2.9079166666666664e-05, |
|
"loss": 0.2752, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 6.833763599395752, |
|
"learning_rate": 2.8974999999999996e-05, |
|
"loss": 0.3203, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3075, |
|
"grad_norm": 3.3256447315216064, |
|
"learning_rate": 2.8870833333333328e-05, |
|
"loss": 0.2206, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.841505289077759, |
|
"learning_rate": 2.8766666666666663e-05, |
|
"loss": 0.1708, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 3.2223970890045166, |
|
"learning_rate": 2.8662499999999995e-05, |
|
"loss": 0.1507, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 4.438643932342529, |
|
"learning_rate": 2.855833333333333e-05, |
|
"loss": 0.1827, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.3175, |
|
"grad_norm": 4.88558292388916, |
|
"learning_rate": 2.8454166666666663e-05, |
|
"loss": 0.2668, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.326033115386963, |
|
"learning_rate": 2.8349999999999998e-05, |
|
"loss": 0.247, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.3225, |
|
"grad_norm": 4.686404228210449, |
|
"learning_rate": 2.824583333333333e-05, |
|
"loss": 0.2666, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 5.8094401359558105, |
|
"learning_rate": 2.8141666666666665e-05, |
|
"loss": 0.2178, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.3275, |
|
"grad_norm": 5.154630661010742, |
|
"learning_rate": 2.80375e-05, |
|
"loss": 0.2241, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.6322267055511475, |
|
"learning_rate": 2.7933333333333332e-05, |
|
"loss": 0.2393, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3325, |
|
"grad_norm": 3.901859998703003, |
|
"learning_rate": 2.7829166666666668e-05, |
|
"loss": 0.188, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 3.8515241146087646, |
|
"learning_rate": 2.7724999999999996e-05, |
|
"loss": 0.1522, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.3375, |
|
"grad_norm": 3.150157928466797, |
|
"learning_rate": 2.7620833333333328e-05, |
|
"loss": 0.1356, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.471564769744873, |
|
"learning_rate": 2.7516666666666664e-05, |
|
"loss": 0.1435, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.3425, |
|
"grad_norm": 3.938312292098999, |
|
"learning_rate": 2.7412499999999995e-05, |
|
"loss": 0.1445, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 2.5613808631896973, |
|
"learning_rate": 2.730833333333333e-05, |
|
"loss": 0.1363, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3475, |
|
"grad_norm": 3.3892834186553955, |
|
"learning_rate": 2.7204166666666663e-05, |
|
"loss": 0.1322, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.404175281524658, |
|
"learning_rate": 2.7099999999999998e-05, |
|
"loss": 0.1602, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.3525, |
|
"grad_norm": 3.336073398590088, |
|
"learning_rate": 2.6995833333333333e-05, |
|
"loss": 0.1488, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 5.919867992401123, |
|
"learning_rate": 2.6891666666666665e-05, |
|
"loss": 0.1476, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.3575, |
|
"grad_norm": 5.114974021911621, |
|
"learning_rate": 2.67875e-05, |
|
"loss": 0.2274, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.443377494812012, |
|
"learning_rate": 2.6683333333333333e-05, |
|
"loss": 0.2627, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.3625, |
|
"grad_norm": 4.095592021942139, |
|
"learning_rate": 2.657916666666666e-05, |
|
"loss": 0.2451, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 2.603665828704834, |
|
"learning_rate": 2.6474999999999996e-05, |
|
"loss": 0.1531, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.3675, |
|
"grad_norm": 3.089383125305176, |
|
"learning_rate": 2.637083333333333e-05, |
|
"loss": 0.1334, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.058241605758667, |
|
"learning_rate": 2.6266666666666664e-05, |
|
"loss": 0.1322, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.3725, |
|
"grad_norm": 3.3943984508514404, |
|
"learning_rate": 2.61625e-05, |
|
"loss": 0.159, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 4.98250150680542, |
|
"learning_rate": 2.605833333333333e-05, |
|
"loss": 0.2044, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.3775, |
|
"grad_norm": 3.9143283367156982, |
|
"learning_rate": 2.5954166666666666e-05, |
|
"loss": 0.2495, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.307981014251709, |
|
"learning_rate": 2.5849999999999998e-05, |
|
"loss": 0.2502, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.3825, |
|
"grad_norm": 4.974552631378174, |
|
"learning_rate": 2.5745833333333333e-05, |
|
"loss": 0.2502, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 5.2868547439575195, |
|
"learning_rate": 2.5641666666666665e-05, |
|
"loss": 0.2662, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.3875, |
|
"grad_norm": 4.530261039733887, |
|
"learning_rate": 2.55375e-05, |
|
"loss": 0.2359, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.932204484939575, |
|
"learning_rate": 2.543333333333333e-05, |
|
"loss": 0.1757, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.3925, |
|
"grad_norm": 3.0662739276885986, |
|
"learning_rate": 2.5329166666666665e-05, |
|
"loss": 0.148, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 3.4663736820220947, |
|
"learning_rate": 2.5224999999999997e-05, |
|
"loss": 0.1411, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.3975, |
|
"grad_norm": 6.448814868927002, |
|
"learning_rate": 2.5120833333333332e-05, |
|
"loss": 0.2409, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.826459884643555, |
|
"learning_rate": 2.5016666666666664e-05, |
|
"loss": 0.3003, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.45400944352149963, |
|
"eval_runtime": 38.2487, |
|
"eval_samples_per_second": 44.211, |
|
"eval_steps_per_second": 2.771, |
|
"eval_wer": 20.307231172723867, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4025, |
|
"grad_norm": 3.8398826122283936, |
|
"learning_rate": 2.49125e-05, |
|
"loss": 0.2244, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 5.592238426208496, |
|
"learning_rate": 2.480833333333333e-05, |
|
"loss": 0.3029, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.4075, |
|
"grad_norm": 4.635577201843262, |
|
"learning_rate": 2.4704166666666666e-05, |
|
"loss": 0.2194, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.2737910747528076, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 0.2102, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.4125, |
|
"grad_norm": 2.5521626472473145, |
|
"learning_rate": 2.4495833333333334e-05, |
|
"loss": 0.1517, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 3.2408368587493896, |
|
"learning_rate": 2.4391666666666666e-05, |
|
"loss": 0.1331, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4175, |
|
"grad_norm": 3.243713855743408, |
|
"learning_rate": 2.4287499999999997e-05, |
|
"loss": 0.1385, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.6619813442230225, |
|
"learning_rate": 2.418333333333333e-05, |
|
"loss": 0.1307, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4225, |
|
"grad_norm": 2.726177930831909, |
|
"learning_rate": 2.4079166666666665e-05, |
|
"loss": 0.1399, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 3.1518099308013916, |
|
"learning_rate": 2.3974999999999997e-05, |
|
"loss": 0.1344, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.4275, |
|
"grad_norm": 3.95133376121521, |
|
"learning_rate": 2.3870833333333332e-05, |
|
"loss": 0.1512, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.805422306060791, |
|
"learning_rate": 2.3766666666666664e-05, |
|
"loss": 0.2212, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.4325, |
|
"grad_norm": 4.867978572845459, |
|
"learning_rate": 2.36625e-05, |
|
"loss": 0.2005, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 3.448626756668091, |
|
"learning_rate": 2.355833333333333e-05, |
|
"loss": 0.2085, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 2.675185441970825, |
|
"learning_rate": 2.3454166666666666e-05, |
|
"loss": 0.1263, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.467017412185669, |
|
"learning_rate": 2.335e-05, |
|
"loss": 0.1122, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.4425, |
|
"grad_norm": 3.4933815002441406, |
|
"learning_rate": 2.3245833333333334e-05, |
|
"loss": 0.1266, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 3.4990930557250977, |
|
"learning_rate": 2.3141666666666666e-05, |
|
"loss": 0.1466, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.4475, |
|
"grad_norm": 3.077288866043091, |
|
"learning_rate": 2.3037499999999998e-05, |
|
"loss": 0.1486, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.375373363494873, |
|
"learning_rate": 2.293333333333333e-05, |
|
"loss": 0.1391, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.4525, |
|
"grad_norm": 4.026995658874512, |
|
"learning_rate": 2.2829166666666665e-05, |
|
"loss": 0.1807, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 4.551440238952637, |
|
"learning_rate": 2.2724999999999997e-05, |
|
"loss": 0.2373, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.4575, |
|
"grad_norm": 5.197234153747559, |
|
"learning_rate": 2.2620833333333332e-05, |
|
"loss": 0.2225, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.593672752380371, |
|
"learning_rate": 2.2516666666666664e-05, |
|
"loss": 0.2394, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.4625, |
|
"grad_norm": 4.616493225097656, |
|
"learning_rate": 2.24125e-05, |
|
"loss": 0.2431, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 5.233091831207275, |
|
"learning_rate": 2.230833333333333e-05, |
|
"loss": 0.2357, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.4675, |
|
"grad_norm": 4.357009410858154, |
|
"learning_rate": 2.2204166666666667e-05, |
|
"loss": 0.2041, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.481781482696533, |
|
"learning_rate": 2.21e-05, |
|
"loss": 0.1817, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.4725, |
|
"grad_norm": 4.179861545562744, |
|
"learning_rate": 2.1995833333333334e-05, |
|
"loss": 0.1953, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 3.9264073371887207, |
|
"learning_rate": 2.1891666666666662e-05, |
|
"loss": 0.1927, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.4775, |
|
"grad_norm": 3.4452853202819824, |
|
"learning_rate": 2.1787499999999998e-05, |
|
"loss": 0.1443, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.9945247173309326, |
|
"learning_rate": 2.168333333333333e-05, |
|
"loss": 0.1587, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.4825, |
|
"grad_norm": 3.666703701019287, |
|
"learning_rate": 2.1579166666666665e-05, |
|
"loss": 0.1314, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.485, |
|
"grad_norm": 3.7032406330108643, |
|
"learning_rate": 2.1474999999999997e-05, |
|
"loss": 0.1524, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.4875, |
|
"grad_norm": 3.9737789630889893, |
|
"learning_rate": 2.1370833333333332e-05, |
|
"loss": 0.1605, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.30121374130249, |
|
"learning_rate": 2.1266666666666664e-05, |
|
"loss": 0.1834, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.4925, |
|
"grad_norm": 3.051647424697876, |
|
"learning_rate": 2.11625e-05, |
|
"loss": 0.1651, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.495, |
|
"grad_norm": 3.1889472007751465, |
|
"learning_rate": 2.105833333333333e-05, |
|
"loss": 0.1392, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.4975, |
|
"grad_norm": 3.226409912109375, |
|
"learning_rate": 2.0954166666666667e-05, |
|
"loss": 0.1286, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.729316234588623, |
|
"learning_rate": 2.085e-05, |
|
"loss": 0.132, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.45741865038871765, |
|
"eval_runtime": 38.6756, |
|
"eval_samples_per_second": 43.723, |
|
"eval_steps_per_second": 2.741, |
|
"eval_wer": 19.014612214312475, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5025, |
|
"grad_norm": 3.4599833488464355, |
|
"learning_rate": 2.074583333333333e-05, |
|
"loss": 0.1868, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 0.505, |
|
"grad_norm": 3.7839012145996094, |
|
"learning_rate": 2.0641666666666662e-05, |
|
"loss": 0.1635, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.5075, |
|
"grad_norm": 4.264291286468506, |
|
"learning_rate": 2.0537499999999998e-05, |
|
"loss": 0.2522, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.526363849639893, |
|
"learning_rate": 2.043333333333333e-05, |
|
"loss": 0.2296, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5125, |
|
"grad_norm": 4.191585063934326, |
|
"learning_rate": 2.0329166666666665e-05, |
|
"loss": 0.1969, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 0.515, |
|
"grad_norm": 5.047796249389648, |
|
"learning_rate": 2.0224999999999997e-05, |
|
"loss": 0.204, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.5175, |
|
"grad_norm": 2.665344476699829, |
|
"learning_rate": 2.0120833333333332e-05, |
|
"loss": 0.1704, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.922651529312134, |
|
"learning_rate": 2.0016666666666664e-05, |
|
"loss": 0.117, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.5225, |
|
"grad_norm": 2.9956483840942383, |
|
"learning_rate": 1.99125e-05, |
|
"loss": 0.123, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 2.745727777481079, |
|
"learning_rate": 1.980833333333333e-05, |
|
"loss": 0.1223, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.5275, |
|
"grad_norm": 2.3079681396484375, |
|
"learning_rate": 1.9704166666666667e-05, |
|
"loss": 0.1229, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.4931304454803467, |
|
"learning_rate": 1.9599999999999995e-05, |
|
"loss": 0.1287, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.5325, |
|
"grad_norm": 3.3155760765075684, |
|
"learning_rate": 1.949583333333333e-05, |
|
"loss": 0.1284, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 0.535, |
|
"grad_norm": 4.483437538146973, |
|
"learning_rate": 1.9391666666666663e-05, |
|
"loss": 0.1695, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.5375, |
|
"grad_norm": 4.182435512542725, |
|
"learning_rate": 1.9287499999999998e-05, |
|
"loss": 0.2029, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.4684391021728516, |
|
"learning_rate": 1.918333333333333e-05, |
|
"loss": 0.1907, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.5425, |
|
"grad_norm": 4.1064019203186035, |
|
"learning_rate": 1.9079166666666665e-05, |
|
"loss": 0.1847, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.545, |
|
"grad_norm": 3.6676344871520996, |
|
"learning_rate": 1.8974999999999997e-05, |
|
"loss": 0.1644, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.5475, |
|
"grad_norm": 3.6105685234069824, |
|
"learning_rate": 1.8870833333333332e-05, |
|
"loss": 0.1368, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.2299535274505615, |
|
"learning_rate": 1.8766666666666664e-05, |
|
"loss": 0.138, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5525, |
|
"grad_norm": 5.327032566070557, |
|
"learning_rate": 1.8662499999999996e-05, |
|
"loss": 0.1786, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 0.555, |
|
"grad_norm": 4.6841583251953125, |
|
"learning_rate": 1.855833333333333e-05, |
|
"loss": 0.2034, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.5575, |
|
"grad_norm": 4.341950416564941, |
|
"learning_rate": 1.8454166666666663e-05, |
|
"loss": 0.1611, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.254387378692627, |
|
"learning_rate": 1.835e-05, |
|
"loss": 0.1087, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 3.0610556602478027, |
|
"learning_rate": 1.8245833333333334e-05, |
|
"loss": 0.0877, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 0.565, |
|
"grad_norm": 2.6597042083740234, |
|
"learning_rate": 1.8141666666666663e-05, |
|
"loss": 0.0893, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.5675, |
|
"grad_norm": 2.638615369796753, |
|
"learning_rate": 1.8037499999999998e-05, |
|
"loss": 0.1155, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.690338373184204, |
|
"learning_rate": 1.793333333333333e-05, |
|
"loss": 0.1252, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.5725, |
|
"grad_norm": 2.847730875015259, |
|
"learning_rate": 1.7829166666666665e-05, |
|
"loss": 0.1228, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 2.7520906925201416, |
|
"learning_rate": 1.7725e-05, |
|
"loss": 0.1162, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.5775, |
|
"grad_norm": 2.643385648727417, |
|
"learning_rate": 1.7620833333333332e-05, |
|
"loss": 0.1168, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.70414400100708, |
|
"learning_rate": 1.7516666666666664e-05, |
|
"loss": 0.1092, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.5825, |
|
"grad_norm": 2.194267988204956, |
|
"learning_rate": 1.7412499999999996e-05, |
|
"loss": 0.0949, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 0.585, |
|
"grad_norm": 3.784864902496338, |
|
"learning_rate": 1.730833333333333e-05, |
|
"loss": 0.1041, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.5875, |
|
"grad_norm": 3.313042402267456, |
|
"learning_rate": 1.7204166666666667e-05, |
|
"loss": 0.1105, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.7411179542541504, |
|
"learning_rate": 1.71e-05, |
|
"loss": 0.1396, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.5925, |
|
"grad_norm": 5.693575859069824, |
|
"learning_rate": 1.6995833333333334e-05, |
|
"loss": 0.2024, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 0.595, |
|
"grad_norm": 3.940126419067383, |
|
"learning_rate": 1.6891666666666663e-05, |
|
"loss": 0.1936, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.5975, |
|
"grad_norm": 4.423816680908203, |
|
"learning_rate": 1.6787499999999998e-05, |
|
"loss": 0.1702, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.7899911403656006, |
|
"learning_rate": 1.6683333333333333e-05, |
|
"loss": 0.1588, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.4380021393299103, |
|
"eval_runtime": 37.918, |
|
"eval_samples_per_second": 44.596, |
|
"eval_steps_per_second": 2.796, |
|
"eval_wer": 17.821905832396652, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6025, |
|
"grad_norm": 3.4838547706604004, |
|
"learning_rate": 1.6579166666666665e-05, |
|
"loss": 0.1486, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 0.605, |
|
"grad_norm": 3.3172895908355713, |
|
"learning_rate": 1.6475e-05, |
|
"loss": 0.1349, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.6075, |
|
"grad_norm": 2.6951234340667725, |
|
"learning_rate": 1.6370833333333333e-05, |
|
"loss": 0.1201, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.4754366874694824, |
|
"learning_rate": 1.6266666666666665e-05, |
|
"loss": 0.1109, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.6125, |
|
"grad_norm": 3.2808432579040527, |
|
"learning_rate": 1.61625e-05, |
|
"loss": 0.115, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 0.615, |
|
"grad_norm": 3.401995897293091, |
|
"learning_rate": 1.6058333333333332e-05, |
|
"loss": 0.1293, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.6175, |
|
"grad_norm": 3.313483476638794, |
|
"learning_rate": 1.5954166666666667e-05, |
|
"loss": 0.1285, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.194374084472656, |
|
"learning_rate": 1.585e-05, |
|
"loss": 0.129, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6225, |
|
"grad_norm": 3.408337354660034, |
|
"learning_rate": 1.574583333333333e-05, |
|
"loss": 0.1212, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 3.5416505336761475, |
|
"learning_rate": 1.5641666666666666e-05, |
|
"loss": 0.1146, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.6275, |
|
"grad_norm": 3.736720561981201, |
|
"learning_rate": 1.5537499999999998e-05, |
|
"loss": 0.133, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.025073051452637, |
|
"learning_rate": 1.5433333333333334e-05, |
|
"loss": 0.1445, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.6325, |
|
"grad_norm": 3.1371307373046875, |
|
"learning_rate": 1.5329166666666665e-05, |
|
"loss": 0.1943, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 0.635, |
|
"grad_norm": 3.793010711669922, |
|
"learning_rate": 1.5224999999999999e-05, |
|
"loss": 0.176, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.6375, |
|
"grad_norm": 3.112964630126953, |
|
"learning_rate": 1.5120833333333331e-05, |
|
"loss": 0.1647, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.877613067626953, |
|
"learning_rate": 1.5016666666666665e-05, |
|
"loss": 0.1252, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.6425, |
|
"grad_norm": 2.556334972381592, |
|
"learning_rate": 1.4912499999999998e-05, |
|
"loss": 0.1137, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 0.645, |
|
"grad_norm": 5.248802661895752, |
|
"learning_rate": 1.4808333333333332e-05, |
|
"loss": 0.1087, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.6475, |
|
"grad_norm": 3.568530321121216, |
|
"learning_rate": 1.4704166666666666e-05, |
|
"loss": 0.1548, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.2436602115631104, |
|
"learning_rate": 1.4599999999999997e-05, |
|
"loss": 0.1494, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.6525, |
|
"grad_norm": 5.127964019775391, |
|
"learning_rate": 1.4495833333333331e-05, |
|
"loss": 0.1935, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 0.655, |
|
"grad_norm": 3.848689079284668, |
|
"learning_rate": 1.4391666666666665e-05, |
|
"loss": 0.1781, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.6575, |
|
"grad_norm": 3.3769173622131348, |
|
"learning_rate": 1.4287499999999998e-05, |
|
"loss": 0.178, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.279507875442505, |
|
"learning_rate": 1.4183333333333332e-05, |
|
"loss": 0.1559, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.6625, |
|
"grad_norm": 4.212521553039551, |
|
"learning_rate": 1.4079166666666666e-05, |
|
"loss": 0.1667, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 0.665, |
|
"grad_norm": 3.3866727352142334, |
|
"learning_rate": 1.3974999999999997e-05, |
|
"loss": 0.1966, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.6675, |
|
"grad_norm": 3.52656888961792, |
|
"learning_rate": 1.3870833333333331e-05, |
|
"loss": 0.1675, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.9528002738952637, |
|
"learning_rate": 1.3766666666666665e-05, |
|
"loss": 0.1612, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.6725, |
|
"grad_norm": 3.4069855213165283, |
|
"learning_rate": 1.3662499999999998e-05, |
|
"loss": 0.1162, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 3.048247814178467, |
|
"learning_rate": 1.3558333333333334e-05, |
|
"loss": 0.105, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.6775, |
|
"grad_norm": 3.930851459503174, |
|
"learning_rate": 1.3454166666666664e-05, |
|
"loss": 0.1272, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.210860729217529, |
|
"learning_rate": 1.3349999999999998e-05, |
|
"loss": 0.2174, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.6825, |
|
"grad_norm": 6.0455498695373535, |
|
"learning_rate": 1.3245833333333331e-05, |
|
"loss": 0.194, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 0.685, |
|
"grad_norm": 5.270689487457275, |
|
"learning_rate": 1.3141666666666665e-05, |
|
"loss": 0.2003, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 4.297138690948486, |
|
"learning_rate": 1.30375e-05, |
|
"loss": 0.1923, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 4.304157257080078, |
|
"learning_rate": 1.2933333333333334e-05, |
|
"loss": 0.1884, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.6925, |
|
"grad_norm": 3.3254096508026123, |
|
"learning_rate": 1.2829166666666664e-05, |
|
"loss": 0.1713, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 0.695, |
|
"grad_norm": 4.185650825500488, |
|
"learning_rate": 1.2724999999999998e-05, |
|
"loss": 0.1756, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.6975, |
|
"grad_norm": 4.220348358154297, |
|
"learning_rate": 1.2620833333333333e-05, |
|
"loss": 0.1958, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.203312873840332, |
|
"learning_rate": 1.2516666666666667e-05, |
|
"loss": 0.1841, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 0.4394695460796356, |
|
"eval_runtime": 37.5658, |
|
"eval_samples_per_second": 45.014, |
|
"eval_steps_per_second": 2.822, |
|
"eval_wer": 16.666666666666664, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.7025, |
|
"grad_norm": 6.67822265625, |
|
"learning_rate": 1.2416666666666666e-05, |
|
"loss": 0.3408, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 0.705, |
|
"grad_norm": 8.843494415283203, |
|
"learning_rate": 1.2312499999999998e-05, |
|
"loss": 0.6013, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.7075, |
|
"grad_norm": 5.624876499176025, |
|
"learning_rate": 1.2208333333333331e-05, |
|
"loss": 0.5257, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.507925987243652, |
|
"learning_rate": 1.2104166666666665e-05, |
|
"loss": 0.4356, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.7125, |
|
"grad_norm": 4.30970573425293, |
|
"learning_rate": 1.1999999999999999e-05, |
|
"loss": 0.2381, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 0.715, |
|
"grad_norm": 5.139924049377441, |
|
"learning_rate": 1.1895833333333332e-05, |
|
"loss": 0.185, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.7175, |
|
"grad_norm": 2.9631428718566895, |
|
"learning_rate": 1.1791666666666666e-05, |
|
"loss": 0.17, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.6727359294891357, |
|
"learning_rate": 1.1687499999999998e-05, |
|
"loss": 0.1323, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.7225, |
|
"grad_norm": 2.7629499435424805, |
|
"learning_rate": 1.1583333333333331e-05, |
|
"loss": 0.128, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 3.040910482406616, |
|
"learning_rate": 1.1479166666666665e-05, |
|
"loss": 0.1371, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.7275, |
|
"grad_norm": 2.7423293590545654, |
|
"learning_rate": 1.1374999999999999e-05, |
|
"loss": 0.1128, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.2156248092651367, |
|
"learning_rate": 1.1270833333333332e-05, |
|
"loss": 0.0964, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.7325, |
|
"grad_norm": 2.783275604248047, |
|
"learning_rate": 1.1166666666666664e-05, |
|
"loss": 0.101, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 0.735, |
|
"grad_norm": 4.380770206451416, |
|
"learning_rate": 1.1062499999999998e-05, |
|
"loss": 0.132, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.7375, |
|
"grad_norm": 3.904127597808838, |
|
"learning_rate": 1.0958333333333331e-05, |
|
"loss": 0.1807, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.420929431915283, |
|
"learning_rate": 1.0854166666666665e-05, |
|
"loss": 0.1436, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.7425, |
|
"grad_norm": 2.929569721221924, |
|
"learning_rate": 1.075e-05, |
|
"loss": 0.1648, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 0.745, |
|
"grad_norm": 3.0535011291503906, |
|
"learning_rate": 1.0645833333333334e-05, |
|
"loss": 0.1166, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.7475, |
|
"grad_norm": 2.630368232727051, |
|
"learning_rate": 1.0541666666666664e-05, |
|
"loss": 0.1081, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.1855621337890625, |
|
"learning_rate": 1.0437499999999998e-05, |
|
"loss": 0.1033, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7525, |
|
"grad_norm": 4.28782844543457, |
|
"learning_rate": 1.0333333333333332e-05, |
|
"loss": 0.2037, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 0.755, |
|
"grad_norm": 5.351161956787109, |
|
"learning_rate": 1.0229166666666667e-05, |
|
"loss": 0.2651, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.7575, |
|
"grad_norm": 5.073176383972168, |
|
"learning_rate": 1.0125e-05, |
|
"loss": 0.2933, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.562958002090454, |
|
"learning_rate": 1.0020833333333334e-05, |
|
"loss": 0.1829, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.7625, |
|
"grad_norm": 4.164181709289551, |
|
"learning_rate": 9.916666666666664e-06, |
|
"loss": 0.1309, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 0.765, |
|
"grad_norm": 2.616945743560791, |
|
"learning_rate": 9.8125e-06, |
|
"loss": 0.1077, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.7675, |
|
"grad_norm": 4.083223342895508, |
|
"learning_rate": 9.708333333333333e-06, |
|
"loss": 0.1442, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.046257972717285, |
|
"learning_rate": 9.604166666666667e-06, |
|
"loss": 0.1725, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.7725, |
|
"grad_norm": 4.065029621124268, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.1833, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 3.321071147918701, |
|
"learning_rate": 9.39583333333333e-06, |
|
"loss": 0.1671, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.7775, |
|
"grad_norm": 3.388200044631958, |
|
"learning_rate": 9.291666666666666e-06, |
|
"loss": 0.1196, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.393247604370117, |
|
"learning_rate": 9.1875e-06, |
|
"loss": 0.1399, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.7825, |
|
"grad_norm": 3.463327646255493, |
|
"learning_rate": 9.083333333333333e-06, |
|
"loss": 0.3125, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 0.785, |
|
"grad_norm": 2.6306939125061035, |
|
"learning_rate": 8.979166666666665e-06, |
|
"loss": 0.153, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.7875, |
|
"grad_norm": 2.3278794288635254, |
|
"learning_rate": 8.874999999999999e-06, |
|
"loss": 0.1141, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.850830554962158, |
|
"learning_rate": 8.770833333333333e-06, |
|
"loss": 0.1096, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.7925, |
|
"grad_norm": 2.4107863903045654, |
|
"learning_rate": 8.666666666666666e-06, |
|
"loss": 0.1117, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 0.795, |
|
"grad_norm": 3.420828104019165, |
|
"learning_rate": 8.5625e-06, |
|
"loss": 0.1071, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.7975, |
|
"grad_norm": 3.005918025970459, |
|
"learning_rate": 8.458333333333333e-06, |
|
"loss": 0.1146, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.3296029567718506, |
|
"learning_rate": 8.354166666666665e-06, |
|
"loss": 0.143, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 0.3718983232975006, |
|
"eval_runtime": 38.8288, |
|
"eval_samples_per_second": 43.55, |
|
"eval_steps_per_second": 2.73, |
|
"eval_wer": 15.448982140626951, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8025, |
|
"grad_norm": 4.029442310333252, |
|
"learning_rate": 8.249999999999999e-06, |
|
"loss": 0.1663, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 0.805, |
|
"grad_norm": 3.7387983798980713, |
|
"learning_rate": 8.145833333333333e-06, |
|
"loss": 0.1808, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.8075, |
|
"grad_norm": 3.0396807193756104, |
|
"learning_rate": 8.041666666666666e-06, |
|
"loss": 0.1351, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.9825220108032227, |
|
"learning_rate": 7.9375e-06, |
|
"loss": 0.1215, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 2.495398998260498, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.1054, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 0.815, |
|
"grad_norm": 3.750054121017456, |
|
"learning_rate": 7.729166666666665e-06, |
|
"loss": 0.1049, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.8175, |
|
"grad_norm": 2.1268110275268555, |
|
"learning_rate": 7.625e-06, |
|
"loss": 0.0932, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.2984964847564697, |
|
"learning_rate": 7.520833333333332e-06, |
|
"loss": 0.0759, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.8225, |
|
"grad_norm": 2.470698356628418, |
|
"learning_rate": 7.416666666666666e-06, |
|
"loss": 0.0983, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 3.2774899005889893, |
|
"learning_rate": 7.3125e-06, |
|
"loss": 0.085, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.8275, |
|
"grad_norm": 2.46321177482605, |
|
"learning_rate": 7.208333333333333e-06, |
|
"loss": 0.0916, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.0468456745147705, |
|
"learning_rate": 7.104166666666666e-06, |
|
"loss": 0.0951, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.8325, |
|
"grad_norm": 3.481823444366455, |
|
"learning_rate": 7e-06, |
|
"loss": 0.1368, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 0.835, |
|
"grad_norm": 4.122819423675537, |
|
"learning_rate": 6.895833333333333e-06, |
|
"loss": 0.1719, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.8375, |
|
"grad_norm": 4.750814914703369, |
|
"learning_rate": 6.791666666666666e-06, |
|
"loss": 0.2073, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.771611452102661, |
|
"learning_rate": 6.6875e-06, |
|
"loss": 0.1592, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.8425, |
|
"grad_norm": 4.536616325378418, |
|
"learning_rate": 6.583333333333333e-06, |
|
"loss": 0.1153, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 0.845, |
|
"grad_norm": 2.6289072036743164, |
|
"learning_rate": 6.479166666666666e-06, |
|
"loss": 0.1098, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.8475, |
|
"grad_norm": 3.1746957302093506, |
|
"learning_rate": 6.375e-06, |
|
"loss": 0.1216, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.029151439666748, |
|
"learning_rate": 6.270833333333333e-06, |
|
"loss": 0.1374, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.8525, |
|
"grad_norm": 3.8162496089935303, |
|
"learning_rate": 6.166666666666666e-06, |
|
"loss": 0.1408, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.855, |
|
"grad_norm": 5.568809509277344, |
|
"learning_rate": 6.062499999999999e-06, |
|
"loss": 0.1948, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.8575, |
|
"grad_norm": 5.649611473083496, |
|
"learning_rate": 5.958333333333333e-06, |
|
"loss": 0.3217, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 7.035798072814941, |
|
"learning_rate": 5.854166666666666e-06, |
|
"loss": 0.4378, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.8625, |
|
"grad_norm": 7.135374546051025, |
|
"learning_rate": 5.749999999999999e-06, |
|
"loss": 0.5134, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 0.865, |
|
"grad_norm": 3.085472345352173, |
|
"learning_rate": 5.645833333333333e-06, |
|
"loss": 0.3075, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.8675, |
|
"grad_norm": 4.60648250579834, |
|
"learning_rate": 5.541666666666666e-06, |
|
"loss": 0.2117, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.39269495010376, |
|
"learning_rate": 5.437499999999999e-06, |
|
"loss": 0.2447, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.8725, |
|
"grad_norm": 3.4270975589752197, |
|
"learning_rate": 5.333333333333333e-06, |
|
"loss": 0.1657, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 2.6738994121551514, |
|
"learning_rate": 5.2291666666666664e-06, |
|
"loss": 0.1087, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.8775, |
|
"grad_norm": 2.7085695266723633, |
|
"learning_rate": 5.124999999999999e-06, |
|
"loss": 0.1076, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.1228623390197754, |
|
"learning_rate": 5.020833333333333e-06, |
|
"loss": 0.1216, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.8825, |
|
"grad_norm": 5.188724994659424, |
|
"learning_rate": 4.9166666666666665e-06, |
|
"loss": 0.154, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 0.885, |
|
"grad_norm": 3.700319528579712, |
|
"learning_rate": 4.812499999999999e-06, |
|
"loss": 0.1491, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.8875, |
|
"grad_norm": 3.185534954071045, |
|
"learning_rate": 4.708333333333333e-06, |
|
"loss": 0.1517, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.0981645584106445, |
|
"learning_rate": 4.6041666666666665e-06, |
|
"loss": 0.1092, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.8925, |
|
"grad_norm": 3.850358247756958, |
|
"learning_rate": 4.499999999999999e-06, |
|
"loss": 0.107, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 0.895, |
|
"grad_norm": 3.169626235961914, |
|
"learning_rate": 4.395833333333333e-06, |
|
"loss": 0.115, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.8975, |
|
"grad_norm": 2.9183895587921143, |
|
"learning_rate": 4.2916666666666665e-06, |
|
"loss": 0.0914, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.877211570739746, |
|
"learning_rate": 4.187499999999999e-06, |
|
"loss": 0.0967, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"eval_loss": 0.3684903383255005, |
|
"eval_runtime": 39.0288, |
|
"eval_samples_per_second": 43.327, |
|
"eval_steps_per_second": 2.716, |
|
"eval_wer": 15.136755339078306, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.9025, |
|
"grad_norm": 3.196434497833252, |
|
"learning_rate": 4.083333333333333e-06, |
|
"loss": 0.1113, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 0.905, |
|
"grad_norm": 3.862992525100708, |
|
"learning_rate": 3.9791666666666665e-06, |
|
"loss": 0.1354, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.9075, |
|
"grad_norm": 4.684475898742676, |
|
"learning_rate": 3.874999999999999e-06, |
|
"loss": 0.1739, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.8043711185455322, |
|
"learning_rate": 3.770833333333333e-06, |
|
"loss": 0.1725, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.9125, |
|
"grad_norm": 3.2041592597961426, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.1385, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 0.915, |
|
"grad_norm": 2.448774576187134, |
|
"learning_rate": 3.5624999999999998e-06, |
|
"loss": 0.1294, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.9175, |
|
"grad_norm": 3.0288240909576416, |
|
"learning_rate": 3.458333333333333e-06, |
|
"loss": 0.1252, |
|
"step": 9175 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.7271015644073486, |
|
"learning_rate": 3.354166666666666e-06, |
|
"loss": 0.1297, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.9225, |
|
"grad_norm": 3.9993860721588135, |
|
"learning_rate": 3.25e-06, |
|
"loss": 0.1503, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 4.390246391296387, |
|
"learning_rate": 3.145833333333333e-06, |
|
"loss": 0.1744, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.9275, |
|
"grad_norm": 3.318673849105835, |
|
"learning_rate": 3.041666666666666e-06, |
|
"loss": 0.1926, |
|
"step": 9275 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.6013784408569336, |
|
"learning_rate": 2.9375e-06, |
|
"loss": 0.1183, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.9325, |
|
"grad_norm": 2.507906675338745, |
|
"learning_rate": 2.833333333333333e-06, |
|
"loss": 0.1198, |
|
"step": 9325 |
|
}, |
|
{ |
|
"epoch": 0.935, |
|
"grad_norm": 2.4156315326690674, |
|
"learning_rate": 2.7291666666666662e-06, |
|
"loss": 0.0964, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 3.535658359527588, |
|
"learning_rate": 2.625e-06, |
|
"loss": 0.125, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.782447099685669, |
|
"learning_rate": 2.520833333333333e-06, |
|
"loss": 0.15, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.9425, |
|
"grad_norm": 3.4775750637054443, |
|
"learning_rate": 2.4166666666666663e-06, |
|
"loss": 0.169, |
|
"step": 9425 |
|
}, |
|
{ |
|
"epoch": 0.945, |
|
"grad_norm": 2.6878416538238525, |
|
"learning_rate": 2.3125e-06, |
|
"loss": 0.2931, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.9475, |
|
"grad_norm": 3.1754825115203857, |
|
"learning_rate": 2.208333333333333e-06, |
|
"loss": 0.1597, |
|
"step": 9475 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.0340256690979, |
|
"learning_rate": 2.1041666666666667e-06, |
|
"loss": 0.1347, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.9525, |
|
"grad_norm": 4.122541904449463, |
|
"learning_rate": 2e-06, |
|
"loss": 0.1548, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 0.955, |
|
"grad_norm": 3.8141324520111084, |
|
"learning_rate": 1.8958333333333331e-06, |
|
"loss": 0.1713, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.9575, |
|
"grad_norm": 4.313726902008057, |
|
"learning_rate": 1.7916666666666665e-06, |
|
"loss": 0.1928, |
|
"step": 9575 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.0914995670318604, |
|
"learning_rate": 1.6874999999999997e-06, |
|
"loss": 0.1501, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.9625, |
|
"grad_norm": 2.9334723949432373, |
|
"learning_rate": 1.5833333333333331e-06, |
|
"loss": 0.1221, |
|
"step": 9625 |
|
}, |
|
{ |
|
"epoch": 0.965, |
|
"grad_norm": 2.4183266162872314, |
|
"learning_rate": 1.4791666666666663e-06, |
|
"loss": 0.0928, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.9675, |
|
"grad_norm": 2.296832323074341, |
|
"learning_rate": 1.375e-06, |
|
"loss": 0.0959, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.63551926612854, |
|
"learning_rate": 1.2708333333333334e-06, |
|
"loss": 0.0966, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.9725, |
|
"grad_norm": 1.874773621559143, |
|
"learning_rate": 1.1666666666666666e-06, |
|
"loss": 0.0958, |
|
"step": 9725 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 2.397674322128296, |
|
"learning_rate": 1.0624999999999998e-06, |
|
"loss": 0.1009, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.9775, |
|
"grad_norm": 3.000075101852417, |
|
"learning_rate": 9.583333333333334e-07, |
|
"loss": 0.1095, |
|
"step": 9775 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.8197531700134277, |
|
"learning_rate": 8.541666666666666e-07, |
|
"loss": 0.1535, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.9825, |
|
"grad_norm": 3.8931257724761963, |
|
"learning_rate": 7.499999999999999e-07, |
|
"loss": 0.1639, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 0.985, |
|
"grad_norm": 3.463376045227051, |
|
"learning_rate": 6.458333333333332e-07, |
|
"loss": 0.1494, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.9875, |
|
"grad_norm": 2.9888062477111816, |
|
"learning_rate": 5.416666666666666e-07, |
|
"loss": 0.1483, |
|
"step": 9875 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 7.088611602783203, |
|
"learning_rate": 4.375e-07, |
|
"loss": 0.2288, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.9925, |
|
"grad_norm": 2.751157283782959, |
|
"learning_rate": 3.333333333333333e-07, |
|
"loss": 0.1954, |
|
"step": 9925 |
|
}, |
|
{ |
|
"epoch": 0.995, |
|
"grad_norm": 3.0588839054107666, |
|
"learning_rate": 2.2916666666666666e-07, |
|
"loss": 0.1068, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.9975, |
|
"grad_norm": 2.712277412414551, |
|
"learning_rate": 1.25e-07, |
|
"loss": 0.0997, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.309202194213867, |
|
"learning_rate": 2.083333333333333e-08, |
|
"loss": 0.1059, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.37193799018859863, |
|
"eval_runtime": 39.6288, |
|
"eval_samples_per_second": 42.671, |
|
"eval_steps_per_second": 2.675, |
|
"eval_wer": 14.849506681653555, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 10000, |
|
"total_flos": 7.8780432384e+18, |
|
"train_loss": 0.2454334835767746, |
|
"train_runtime": 3974.1996, |
|
"train_samples_per_second": 80.519, |
|
"train_steps_per_second": 2.516 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.8780432384e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|