|
{ |
|
"best_metric": 0.9124748490945674, |
|
"best_model_checkpoint": "PhoWhisper-medium-vispeech-classifier-v1/checkpoint-2938", |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 2938, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010211027910142955, |
|
"grad_norm": 79976.4921875, |
|
"learning_rate": 6.122448979591837e-08, |
|
"loss": 2.0788, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02042205582028591, |
|
"grad_norm": 49252.46484375, |
|
"learning_rate": 1.2244897959183673e-07, |
|
"loss": 2.0787, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03063308373042886, |
|
"grad_norm": 66237.7890625, |
|
"learning_rate": 1.836734693877551e-07, |
|
"loss": 2.0734, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04084411164057182, |
|
"grad_norm": 66530.140625, |
|
"learning_rate": 2.4489795918367347e-07, |
|
"loss": 2.0745, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05105513955071477, |
|
"grad_norm": 75123.3515625, |
|
"learning_rate": 3.0612244897959183e-07, |
|
"loss": 2.0715, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06126616746085772, |
|
"grad_norm": 91761.9921875, |
|
"learning_rate": 3.673469387755102e-07, |
|
"loss": 2.0702, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07147719537100068, |
|
"grad_norm": 76544.7421875, |
|
"learning_rate": 4.2857142857142857e-07, |
|
"loss": 2.0659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08168822328114364, |
|
"grad_norm": 85211.5078125, |
|
"learning_rate": 4.897959183673469e-07, |
|
"loss": 2.0632, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09189925119128659, |
|
"grad_norm": 86731.6171875, |
|
"learning_rate": 5.510204081632653e-07, |
|
"loss": 2.0579, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.10211027910142954, |
|
"grad_norm": 73202.421875, |
|
"learning_rate": 6.122448979591837e-07, |
|
"loss": 2.0466, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1123213070115725, |
|
"grad_norm": 99795.828125, |
|
"learning_rate": 6.73469387755102e-07, |
|
"loss": 2.0266, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12253233492171545, |
|
"grad_norm": 78665.6015625, |
|
"learning_rate": 7.346938775510204e-07, |
|
"loss": 2.0133, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13274336283185842, |
|
"grad_norm": 109804.5546875, |
|
"learning_rate": 7.959183673469389e-07, |
|
"loss": 2.0033, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14295439074200136, |
|
"grad_norm": 88988.0234375, |
|
"learning_rate": 8.571428571428571e-07, |
|
"loss": 1.9912, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1531654186521443, |
|
"grad_norm": 163108.234375, |
|
"learning_rate": 9.183673469387755e-07, |
|
"loss": 1.9714, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16337644656228728, |
|
"grad_norm": 147248.25, |
|
"learning_rate": 9.795918367346939e-07, |
|
"loss": 1.9368, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17358747447243023, |
|
"grad_norm": 161750.640625, |
|
"learning_rate": 1.0408163265306123e-06, |
|
"loss": 1.8955, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.18379850238257317, |
|
"grad_norm": 189184.296875, |
|
"learning_rate": 1.1020408163265306e-06, |
|
"loss": 1.8849, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19400953029271614, |
|
"grad_norm": 160164.859375, |
|
"learning_rate": 1.163265306122449e-06, |
|
"loss": 1.8496, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2042205582028591, |
|
"grad_norm": 147315.296875, |
|
"learning_rate": 1.2244897959183673e-06, |
|
"loss": 1.8118, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21443158611300203, |
|
"grad_norm": 164890.859375, |
|
"learning_rate": 1.2857142857142856e-06, |
|
"loss": 1.7829, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.224642614023145, |
|
"grad_norm": 195027.578125, |
|
"learning_rate": 1.346938775510204e-06, |
|
"loss": 1.7938, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23485364193328795, |
|
"grad_norm": 206460.03125, |
|
"learning_rate": 1.4081632653061225e-06, |
|
"loss": 1.7258, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2450646698434309, |
|
"grad_norm": 247498.296875, |
|
"learning_rate": 1.4693877551020408e-06, |
|
"loss": 1.6498, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.25527569775357384, |
|
"grad_norm": 282144.96875, |
|
"learning_rate": 1.5306122448979593e-06, |
|
"loss": 1.6515, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 244598.515625, |
|
"learning_rate": 1.5918367346938777e-06, |
|
"loss": 1.6327, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2756977535738598, |
|
"grad_norm": 169616.5, |
|
"learning_rate": 1.6530612244897958e-06, |
|
"loss": 1.576, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2859087814840027, |
|
"grad_norm": 186143.078125, |
|
"learning_rate": 1.7142857142857143e-06, |
|
"loss": 1.6067, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.29611980939414567, |
|
"grad_norm": 239726.171875, |
|
"learning_rate": 1.7755102040816327e-06, |
|
"loss": 1.5676, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.3063308373042886, |
|
"grad_norm": 224399.15625, |
|
"learning_rate": 1.836734693877551e-06, |
|
"loss": 1.4658, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31654186521443156, |
|
"grad_norm": 585805.375, |
|
"learning_rate": 1.8979591836734695e-06, |
|
"loss": 1.4288, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.32675289312457456, |
|
"grad_norm": 204082.328125, |
|
"learning_rate": 1.9591836734693877e-06, |
|
"loss": 1.4079, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3369639210347175, |
|
"grad_norm": 253897.078125, |
|
"learning_rate": 2.020408163265306e-06, |
|
"loss": 1.4658, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.34717494894486045, |
|
"grad_norm": 322156.15625, |
|
"learning_rate": 2.0816326530612247e-06, |
|
"loss": 1.3193, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3573859768550034, |
|
"grad_norm": 209002.03125, |
|
"learning_rate": 2.142857142857143e-06, |
|
"loss": 1.3258, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.36759700476514634, |
|
"grad_norm": 239622.390625, |
|
"learning_rate": 2.204081632653061e-06, |
|
"loss": 1.3396, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3778080326752893, |
|
"grad_norm": 302531.125, |
|
"learning_rate": 2.2653061224489797e-06, |
|
"loss": 1.2724, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3880190605854323, |
|
"grad_norm": 277807.9375, |
|
"learning_rate": 2.326530612244898e-06, |
|
"loss": 1.2506, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.39823008849557523, |
|
"grad_norm": 543051.25, |
|
"learning_rate": 2.387755102040816e-06, |
|
"loss": 1.3114, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.4084411164057182, |
|
"grad_norm": 278458.59375, |
|
"learning_rate": 2.4489795918367347e-06, |
|
"loss": 1.2505, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4186521443158611, |
|
"grad_norm": 227854.078125, |
|
"learning_rate": 2.510204081632653e-06, |
|
"loss": 1.2032, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.42886317222600406, |
|
"grad_norm": 669112.0625, |
|
"learning_rate": 2.571428571428571e-06, |
|
"loss": 1.2144, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.43907420013614706, |
|
"grad_norm": 291158.375, |
|
"learning_rate": 2.6326530612244897e-06, |
|
"loss": 1.1663, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.44928522804629, |
|
"grad_norm": 322840.65625, |
|
"learning_rate": 2.693877551020408e-06, |
|
"loss": 1.1802, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.45949625595643295, |
|
"grad_norm": 399617.4375, |
|
"learning_rate": 2.7551020408163266e-06, |
|
"loss": 1.0181, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4697072838665759, |
|
"grad_norm": 399545.5, |
|
"learning_rate": 2.816326530612245e-06, |
|
"loss": 1.2468, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.47991831177671884, |
|
"grad_norm": 1116027.875, |
|
"learning_rate": 2.877551020408163e-06, |
|
"loss": 1.2023, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4901293396868618, |
|
"grad_norm": 300487.4375, |
|
"learning_rate": 2.9387755102040816e-06, |
|
"loss": 1.147, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5003403675970047, |
|
"grad_norm": 413331.65625, |
|
"learning_rate": 3e-06, |
|
"loss": 0.9871, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.5105513955071477, |
|
"grad_norm": 715814.125, |
|
"learning_rate": 2.993192133131619e-06, |
|
"loss": 1.1643, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5207624234172906, |
|
"grad_norm": 1614147.0, |
|
"learning_rate": 2.9863842662632373e-06, |
|
"loss": 0.9421, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 639895.0625, |
|
"learning_rate": 2.9795763993948565e-06, |
|
"loss": 1.0573, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5411844792375766, |
|
"grad_norm": 345273.84375, |
|
"learning_rate": 2.9727685325264754e-06, |
|
"loss": 1.0337, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5513955071477196, |
|
"grad_norm": 611370.375, |
|
"learning_rate": 2.9659606656580938e-06, |
|
"loss": 1.1131, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5616065350578625, |
|
"grad_norm": 239903.25, |
|
"learning_rate": 2.9591527987897126e-06, |
|
"loss": 0.9076, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5718175629680055, |
|
"grad_norm": 257958.703125, |
|
"learning_rate": 2.9523449319213314e-06, |
|
"loss": 0.9152, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5820285908781484, |
|
"grad_norm": 590921.375, |
|
"learning_rate": 2.9455370650529502e-06, |
|
"loss": 0.8795, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5922396187882913, |
|
"grad_norm": 363986.1875, |
|
"learning_rate": 2.938729198184569e-06, |
|
"loss": 1.0641, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.6024506466984343, |
|
"grad_norm": 1878926.875, |
|
"learning_rate": 2.9319213313161875e-06, |
|
"loss": 0.9414, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.6126616746085772, |
|
"grad_norm": 2941328.0, |
|
"learning_rate": 2.9251134644478063e-06, |
|
"loss": 0.9541, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6228727025187202, |
|
"grad_norm": 1327942.75, |
|
"learning_rate": 2.918305597579425e-06, |
|
"loss": 0.8319, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6330837304288631, |
|
"grad_norm": 161807.125, |
|
"learning_rate": 2.911497730711044e-06, |
|
"loss": 0.9268, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6432947583390062, |
|
"grad_norm": 899555.875, |
|
"learning_rate": 2.9046898638426628e-06, |
|
"loss": 0.8278, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6535057862491491, |
|
"grad_norm": 268360.0625, |
|
"learning_rate": 2.8978819969742816e-06, |
|
"loss": 0.9021, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6637168141592921, |
|
"grad_norm": 555703.0625, |
|
"learning_rate": 2.8910741301059e-06, |
|
"loss": 1.0146, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.673927842069435, |
|
"grad_norm": 222425.40625, |
|
"learning_rate": 2.884266263237519e-06, |
|
"loss": 1.0005, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.684138869979578, |
|
"grad_norm": 681254.4375, |
|
"learning_rate": 2.877458396369138e-06, |
|
"loss": 0.842, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6943498978897209, |
|
"grad_norm": 1348221.125, |
|
"learning_rate": 2.8706505295007565e-06, |
|
"loss": 0.9394, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.7045609257998638, |
|
"grad_norm": 225728.046875, |
|
"learning_rate": 2.8638426626323753e-06, |
|
"loss": 0.8289, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.7147719537100068, |
|
"grad_norm": 413089.65625, |
|
"learning_rate": 2.857034795763994e-06, |
|
"loss": 1.0175, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7249829816201497, |
|
"grad_norm": 279751.71875, |
|
"learning_rate": 2.8502269288956125e-06, |
|
"loss": 0.9563, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.7351940095302927, |
|
"grad_norm": 422167.125, |
|
"learning_rate": 2.8434190620272318e-06, |
|
"loss": 0.8566, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7454050374404356, |
|
"grad_norm": 2015877.125, |
|
"learning_rate": 2.83661119515885e-06, |
|
"loss": 0.7012, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7556160653505786, |
|
"grad_norm": 502859.5, |
|
"learning_rate": 2.829803328290469e-06, |
|
"loss": 0.7563, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7658270932607216, |
|
"grad_norm": 537785.125, |
|
"learning_rate": 2.822995461422088e-06, |
|
"loss": 0.9854, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7760381211708646, |
|
"grad_norm": 390231.125, |
|
"learning_rate": 2.8161875945537066e-06, |
|
"loss": 1.0036, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7862491490810075, |
|
"grad_norm": 370246.0625, |
|
"learning_rate": 2.8093797276853255e-06, |
|
"loss": 0.8186, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 1279901.375, |
|
"learning_rate": 2.8025718608169443e-06, |
|
"loss": 0.9289, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.8066712049012934, |
|
"grad_norm": 316319.25, |
|
"learning_rate": 2.7957639939485627e-06, |
|
"loss": 0.7542, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.8168822328114363, |
|
"grad_norm": 577071.5, |
|
"learning_rate": 2.7889561270801815e-06, |
|
"loss": 0.8579, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8270932607215793, |
|
"grad_norm": 412667.71875, |
|
"learning_rate": 2.7821482602118003e-06, |
|
"loss": 0.8424, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.8373042886317222, |
|
"grad_norm": 555197.125, |
|
"learning_rate": 2.775340393343419e-06, |
|
"loss": 0.8225, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8475153165418652, |
|
"grad_norm": 429715.0, |
|
"learning_rate": 2.768532526475038e-06, |
|
"loss": 1.1042, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.8577263444520081, |
|
"grad_norm": 1407773.25, |
|
"learning_rate": 2.761724659606657e-06, |
|
"loss": 0.9892, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8679373723621511, |
|
"grad_norm": 328839.6875, |
|
"learning_rate": 2.754916792738275e-06, |
|
"loss": 0.7921, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8781484002722941, |
|
"grad_norm": 196251.03125, |
|
"learning_rate": 2.748108925869894e-06, |
|
"loss": 0.6631, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8883594281824371, |
|
"grad_norm": 761081.6875, |
|
"learning_rate": 2.7413010590015133e-06, |
|
"loss": 0.8396, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.89857045609258, |
|
"grad_norm": 905756.375, |
|
"learning_rate": 2.7344931921331317e-06, |
|
"loss": 0.8238, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.908781484002723, |
|
"grad_norm": 1221640.875, |
|
"learning_rate": 2.7276853252647505e-06, |
|
"loss": 0.7228, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.9189925119128659, |
|
"grad_norm": 2633817.0, |
|
"learning_rate": 2.7208774583963693e-06, |
|
"loss": 0.8766, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9292035398230089, |
|
"grad_norm": 454206.53125, |
|
"learning_rate": 2.7140695915279877e-06, |
|
"loss": 0.922, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.9394145677331518, |
|
"grad_norm": 715028.8125, |
|
"learning_rate": 2.707261724659607e-06, |
|
"loss": 0.8212, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.9496255956432947, |
|
"grad_norm": 384052.40625, |
|
"learning_rate": 2.7004538577912254e-06, |
|
"loss": 0.6728, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.9598366235534377, |
|
"grad_norm": 268327.0, |
|
"learning_rate": 2.693645990922844e-06, |
|
"loss": 0.7077, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9700476514635806, |
|
"grad_norm": 430912.0625, |
|
"learning_rate": 2.686838124054463e-06, |
|
"loss": 0.8267, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9802586793737236, |
|
"grad_norm": 448801.65625, |
|
"learning_rate": 2.680030257186082e-06, |
|
"loss": 0.6444, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9904697072838666, |
|
"grad_norm": 625466.3125, |
|
"learning_rate": 2.6732223903177007e-06, |
|
"loss": 0.8255, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8819584171696848, |
|
"eval_loss": 0.5119155645370483, |
|
"eval_runtime": 579.0751, |
|
"eval_samples_per_second": 5.15, |
|
"eval_steps_per_second": 0.644, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 1.0006807351940095, |
|
"grad_norm": 166140.59375, |
|
"learning_rate": 2.6664145234493195e-06, |
|
"loss": 0.8523, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.0108917631041525, |
|
"grad_norm": 1578961.375, |
|
"learning_rate": 2.659606656580938e-06, |
|
"loss": 0.8466, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.0211027910142954, |
|
"grad_norm": 3046324.75, |
|
"learning_rate": 2.6527987897125567e-06, |
|
"loss": 0.7421, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0313138189244384, |
|
"grad_norm": 486359.28125, |
|
"learning_rate": 2.6459909228441755e-06, |
|
"loss": 0.586, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.0415248468345812, |
|
"grad_norm": 476911.5, |
|
"learning_rate": 2.6391830559757944e-06, |
|
"loss": 0.7123, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.0517358747447243, |
|
"grad_norm": 312268.75, |
|
"learning_rate": 2.632375189107413e-06, |
|
"loss": 0.7437, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.0619469026548674, |
|
"grad_norm": 555973.375, |
|
"learning_rate": 2.625567322239032e-06, |
|
"loss": 0.7994, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.0721579305650102, |
|
"grad_norm": 441313.59375, |
|
"learning_rate": 2.6187594553706504e-06, |
|
"loss": 0.7675, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.0823689584751532, |
|
"grad_norm": 2035757.875, |
|
"learning_rate": 2.6119515885022692e-06, |
|
"loss": 0.7534, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.092579986385296, |
|
"grad_norm": 453271.59375, |
|
"learning_rate": 2.605143721633888e-06, |
|
"loss": 0.645, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.1027910142954391, |
|
"grad_norm": 319587.0625, |
|
"learning_rate": 2.598335854765507e-06, |
|
"loss": 0.765, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.113002042205582, |
|
"grad_norm": 732482.8125, |
|
"learning_rate": 2.5915279878971257e-06, |
|
"loss": 0.6155, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.123213070115725, |
|
"grad_norm": 396029.65625, |
|
"learning_rate": 2.5847201210287445e-06, |
|
"loss": 0.7213, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.1334240980258679, |
|
"grad_norm": 326143.03125, |
|
"learning_rate": 2.577912254160363e-06, |
|
"loss": 0.7134, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.143635125936011, |
|
"grad_norm": 1083100.875, |
|
"learning_rate": 2.571104387291982e-06, |
|
"loss": 0.7249, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 905634.25, |
|
"learning_rate": 2.5642965204236006e-06, |
|
"loss": 0.767, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.1640571817562968, |
|
"grad_norm": 656056.9375, |
|
"learning_rate": 2.5574886535552194e-06, |
|
"loss": 0.8304, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.1742682096664399, |
|
"grad_norm": 722782.5625, |
|
"learning_rate": 2.5506807866868382e-06, |
|
"loss": 0.6995, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.1844792375765827, |
|
"grad_norm": 577313.5625, |
|
"learning_rate": 2.5438729198184566e-06, |
|
"loss": 0.9264, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.1946902654867257, |
|
"grad_norm": 2527231.5, |
|
"learning_rate": 2.537065052950076e-06, |
|
"loss": 0.9283, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.2049012933968686, |
|
"grad_norm": 1089097.5, |
|
"learning_rate": 2.5302571860816947e-06, |
|
"loss": 0.6552, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.2151123213070116, |
|
"grad_norm": 747680.25, |
|
"learning_rate": 2.523449319213313e-06, |
|
"loss": 0.7181, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.2253233492171545, |
|
"grad_norm": 143748.9375, |
|
"learning_rate": 2.516641452344932e-06, |
|
"loss": 0.5906, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.2355343771272975, |
|
"grad_norm": 782987.3125, |
|
"learning_rate": 2.5098335854765507e-06, |
|
"loss": 0.7338, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.2457454050374404, |
|
"grad_norm": 199057.203125, |
|
"learning_rate": 2.5030257186081696e-06, |
|
"loss": 0.6712, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.2559564329475834, |
|
"grad_norm": 169465.65625, |
|
"learning_rate": 2.4962178517397884e-06, |
|
"loss": 0.4682, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.2661674608577265, |
|
"grad_norm": 458018.5, |
|
"learning_rate": 2.4894099848714072e-06, |
|
"loss": 0.7659, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.2763784887678693, |
|
"grad_norm": 373506.46875, |
|
"learning_rate": 2.4826021180030256e-06, |
|
"loss": 0.7384, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.2865895166780121, |
|
"grad_norm": 140858.234375, |
|
"learning_rate": 2.4757942511346444e-06, |
|
"loss": 0.723, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.2968005445881552, |
|
"grad_norm": 758790.1875, |
|
"learning_rate": 2.4689863842662633e-06, |
|
"loss": 0.9274, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.3070115724982982, |
|
"grad_norm": 311553.5625, |
|
"learning_rate": 2.462178517397882e-06, |
|
"loss": 0.6113, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.317222600408441, |
|
"grad_norm": 167712.234375, |
|
"learning_rate": 2.455370650529501e-06, |
|
"loss": 0.6976, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.3274336283185841, |
|
"grad_norm": 500111.15625, |
|
"learning_rate": 2.4485627836611193e-06, |
|
"loss": 0.7833, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.337644656228727, |
|
"grad_norm": 517175.0, |
|
"learning_rate": 2.441754916792738e-06, |
|
"loss": 0.6971, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.34785568413887, |
|
"grad_norm": 659138.6875, |
|
"learning_rate": 2.4349470499243574e-06, |
|
"loss": 0.7602, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.3580667120490129, |
|
"grad_norm": 301056.09375, |
|
"learning_rate": 2.4281391830559758e-06, |
|
"loss": 0.6703, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.368277739959156, |
|
"grad_norm": 273580.0625, |
|
"learning_rate": 2.4213313161875946e-06, |
|
"loss": 0.6183, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.3784887678692987, |
|
"grad_norm": 559704.875, |
|
"learning_rate": 2.4145234493192134e-06, |
|
"loss": 0.564, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.3886997957794418, |
|
"grad_norm": 593091.6875, |
|
"learning_rate": 2.407715582450832e-06, |
|
"loss": 0.605, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.3989108236895849, |
|
"grad_norm": 307976.4375, |
|
"learning_rate": 2.400907715582451e-06, |
|
"loss": 0.771, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.4091218515997277, |
|
"grad_norm": 593720.625, |
|
"learning_rate": 2.39409984871407e-06, |
|
"loss": 0.7066, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.4193328795098705, |
|
"grad_norm": 831806.0625, |
|
"learning_rate": 2.3872919818456883e-06, |
|
"loss": 0.5807, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.4295439074200136, |
|
"grad_norm": 206191.21875, |
|
"learning_rate": 2.380484114977307e-06, |
|
"loss": 0.6574, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.4397549353301566, |
|
"grad_norm": 258367.828125, |
|
"learning_rate": 2.373676248108926e-06, |
|
"loss": 0.6792, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.4499659632402995, |
|
"grad_norm": 1177121.25, |
|
"learning_rate": 2.3668683812405448e-06, |
|
"loss": 0.8608, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.4601769911504425, |
|
"grad_norm": 368604.9375, |
|
"learning_rate": 2.3600605143721636e-06, |
|
"loss": 0.7224, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.4703880190605854, |
|
"grad_norm": 570852.125, |
|
"learning_rate": 2.3532526475037824e-06, |
|
"loss": 0.6717, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.4805990469707284, |
|
"grad_norm": 240292.90625, |
|
"learning_rate": 2.346444780635401e-06, |
|
"loss": 0.5415, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.4908100748808715, |
|
"grad_norm": 1409754.875, |
|
"learning_rate": 2.3396369137670197e-06, |
|
"loss": 0.66, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.5010211027910143, |
|
"grad_norm": 1670060.75, |
|
"learning_rate": 2.3328290468986385e-06, |
|
"loss": 0.686, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.5112321307011571, |
|
"grad_norm": 530769.5, |
|
"learning_rate": 2.3260211800302573e-06, |
|
"loss": 0.612, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.5214431586113002, |
|
"grad_norm": 1758232.75, |
|
"learning_rate": 2.319213313161876e-06, |
|
"loss": 0.6849, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 1.5316541865214433, |
|
"grad_norm": 1104782.0, |
|
"learning_rate": 2.3124054462934945e-06, |
|
"loss": 0.7704, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.541865214431586, |
|
"grad_norm": 826967.125, |
|
"learning_rate": 2.3055975794251133e-06, |
|
"loss": 0.5943, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 1.552076242341729, |
|
"grad_norm": 1028021.3125, |
|
"learning_rate": 2.2987897125567326e-06, |
|
"loss": 0.6579, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.562287270251872, |
|
"grad_norm": 250268.609375, |
|
"learning_rate": 2.291981845688351e-06, |
|
"loss": 0.671, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 1.572498298162015, |
|
"grad_norm": 608503.5625, |
|
"learning_rate": 2.28517397881997e-06, |
|
"loss": 0.546, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.582709326072158, |
|
"grad_norm": 2906189.5, |
|
"learning_rate": 2.2783661119515886e-06, |
|
"loss": 0.5798, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.592920353982301, |
|
"grad_norm": 1186615.875, |
|
"learning_rate": 2.271558245083207e-06, |
|
"loss": 0.8234, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.6031313818924438, |
|
"grad_norm": 189114.234375, |
|
"learning_rate": 2.2647503782148263e-06, |
|
"loss": 0.7017, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 1.6133424098025868, |
|
"grad_norm": 1086454.375, |
|
"learning_rate": 2.257942511346445e-06, |
|
"loss": 0.6067, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.6235534377127299, |
|
"grad_norm": 438766.8125, |
|
"learning_rate": 2.2511346444780635e-06, |
|
"loss": 0.7277, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 1.6337644656228727, |
|
"grad_norm": 746603.625, |
|
"learning_rate": 2.2443267776096823e-06, |
|
"loss": 0.7049, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.6439754935330155, |
|
"grad_norm": 1114921.375, |
|
"learning_rate": 2.237518910741301e-06, |
|
"loss": 0.6517, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 1.6541865214431586, |
|
"grad_norm": 119118.2421875, |
|
"learning_rate": 2.23071104387292e-06, |
|
"loss": 0.6355, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.6643975493533016, |
|
"grad_norm": 278499.84375, |
|
"learning_rate": 2.223903177004539e-06, |
|
"loss": 0.528, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 1.6746085772634445, |
|
"grad_norm": 2842614.5, |
|
"learning_rate": 2.2170953101361572e-06, |
|
"loss": 0.818, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.6848196051735873, |
|
"grad_norm": 681713.6875, |
|
"learning_rate": 2.210287443267776e-06, |
|
"loss": 0.5879, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.6950306330837304, |
|
"grad_norm": 955702.4375, |
|
"learning_rate": 2.203479576399395e-06, |
|
"loss": 0.6868, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.7052416609938734, |
|
"grad_norm": 293277.28125, |
|
"learning_rate": 2.1966717095310137e-06, |
|
"loss": 0.6262, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.7154526889040165, |
|
"grad_norm": 1372940.875, |
|
"learning_rate": 2.1898638426626325e-06, |
|
"loss": 0.7799, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.7256637168141593, |
|
"grad_norm": 277903.0625, |
|
"learning_rate": 2.1830559757942513e-06, |
|
"loss": 0.6204, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.7358747447243021, |
|
"grad_norm": 959110.125, |
|
"learning_rate": 2.1762481089258697e-06, |
|
"loss": 0.7283, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.7460857726344452, |
|
"grad_norm": 870804.125, |
|
"learning_rate": 2.1694402420574886e-06, |
|
"loss": 0.6073, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.7562968005445883, |
|
"grad_norm": 723633.5, |
|
"learning_rate": 2.162632375189108e-06, |
|
"loss": 0.8315, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.766507828454731, |
|
"grad_norm": 279254.0625, |
|
"learning_rate": 2.155824508320726e-06, |
|
"loss": 0.6998, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.776718856364874, |
|
"grad_norm": 816422.0625, |
|
"learning_rate": 2.149016641452345e-06, |
|
"loss": 0.7695, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.786929884275017, |
|
"grad_norm": 277112.28125, |
|
"learning_rate": 2.142208774583964e-06, |
|
"loss": 0.5903, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.79714091218516, |
|
"grad_norm": 1158773.75, |
|
"learning_rate": 2.1354009077155823e-06, |
|
"loss": 0.5811, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.807351940095303, |
|
"grad_norm": 3944267.75, |
|
"learning_rate": 2.1285930408472015e-06, |
|
"loss": 0.5903, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.817562968005446, |
|
"grad_norm": 774781.9375, |
|
"learning_rate": 2.12178517397882e-06, |
|
"loss": 0.7741, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.8277739959155888, |
|
"grad_norm": 744016.25, |
|
"learning_rate": 2.1149773071104387e-06, |
|
"loss": 0.7171, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.8379850238257318, |
|
"grad_norm": 5070120.5, |
|
"learning_rate": 2.1081694402420575e-06, |
|
"loss": 0.5936, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.8481960517358749, |
|
"grad_norm": 249426.0, |
|
"learning_rate": 2.1013615733736764e-06, |
|
"loss": 0.7609, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 1.8584070796460177, |
|
"grad_norm": 403828.75, |
|
"learning_rate": 2.094553706505295e-06, |
|
"loss": 0.6597, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.8686181075561605, |
|
"grad_norm": 829779.0625, |
|
"learning_rate": 2.087745839636914e-06, |
|
"loss": 0.6788, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 1.8788291354663036, |
|
"grad_norm": 939866.5625, |
|
"learning_rate": 2.0809379727685324e-06, |
|
"loss": 0.5589, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.8890401633764466, |
|
"grad_norm": 1203267.0, |
|
"learning_rate": 2.0741301059001512e-06, |
|
"loss": 0.7759, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.8992511912865895, |
|
"grad_norm": 1656486.5, |
|
"learning_rate": 2.06732223903177e-06, |
|
"loss": 0.4373, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.9094622191967323, |
|
"grad_norm": 1833536.875, |
|
"learning_rate": 2.060514372163389e-06, |
|
"loss": 1.0234, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 1.9196732471068754, |
|
"grad_norm": 429027.65625, |
|
"learning_rate": 2.0537065052950077e-06, |
|
"loss": 0.5515, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.9298842750170184, |
|
"grad_norm": 2480347.0, |
|
"learning_rate": 2.0468986384266265e-06, |
|
"loss": 0.679, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 1.9400953029271615, |
|
"grad_norm": 682525.625, |
|
"learning_rate": 2.040090771558245e-06, |
|
"loss": 0.5025, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.9503063308373043, |
|
"grad_norm": 289022.15625, |
|
"learning_rate": 2.0332829046898638e-06, |
|
"loss": 0.5361, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 1.9605173587474471, |
|
"grad_norm": 633536.75, |
|
"learning_rate": 2.0264750378214826e-06, |
|
"loss": 0.6829, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.9707283866575902, |
|
"grad_norm": 551145.0, |
|
"learning_rate": 2.0196671709531014e-06, |
|
"loss": 0.591, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 1.9809394145677333, |
|
"grad_norm": 951443.5, |
|
"learning_rate": 2.0128593040847202e-06, |
|
"loss": 0.5114, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.991150442477876, |
|
"grad_norm": 1422993.0, |
|
"learning_rate": 2.006051437216339e-06, |
|
"loss": 0.6774, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9124748490945674, |
|
"eval_loss": 0.3880421221256256, |
|
"eval_runtime": 580.6935, |
|
"eval_samples_per_second": 5.135, |
|
"eval_steps_per_second": 0.642, |
|
"step": 2938 |
|
} |
|
], |
|
"logging_steps": 15, |
|
"max_steps": 7345, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.040378616096768e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|