|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.4344629729245113, |
|
"eval_steps": 2000, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.9492120146751404, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.9769, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.344916582107544, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0718, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6680417060852051, |
|
"learning_rate": 3e-06, |
|
"loss": 0.8901, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8369797468185425, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9398, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5738756656646729, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9152, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.028475284576416, |
|
"learning_rate": 6e-06, |
|
"loss": 0.849, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2893372774124146, |
|
"learning_rate": 7e-06, |
|
"loss": 0.7312, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.8779547810554504, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6707, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.8972748517990112, |
|
"learning_rate": 9e-06, |
|
"loss": 0.6413, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.505399227142334, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6129, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6442121267318726, |
|
"learning_rate": 9.989898989898991e-06, |
|
"loss": 0.5579, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.623290479183197, |
|
"learning_rate": 9.97979797979798e-06, |
|
"loss": 0.6022, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7216657996177673, |
|
"learning_rate": 9.96969696969697e-06, |
|
"loss": 0.5481, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7031328678131104, |
|
"learning_rate": 9.95959595959596e-06, |
|
"loss": 0.5594, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7411965727806091, |
|
"learning_rate": 9.94949494949495e-06, |
|
"loss": 0.6241, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.8735277652740479, |
|
"learning_rate": 9.939393939393939e-06, |
|
"loss": 0.498, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1062073707580566, |
|
"learning_rate": 9.92929292929293e-06, |
|
"loss": 0.6463, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7930333614349365, |
|
"learning_rate": 9.91919191919192e-06, |
|
"loss": 0.6231, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8774802684783936, |
|
"learning_rate": 9.90909090909091e-06, |
|
"loss": 0.5895, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.929542064666748, |
|
"learning_rate": 9.8989898989899e-06, |
|
"loss": 0.632, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6024735569953918, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.5682, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5887441635131836, |
|
"learning_rate": 9.87878787878788e-06, |
|
"loss": 0.5192, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7813496589660645, |
|
"learning_rate": 9.86868686868687e-06, |
|
"loss": 0.5841, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5949407815933228, |
|
"learning_rate": 9.85858585858586e-06, |
|
"loss": 0.5263, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8114556670188904, |
|
"learning_rate": 9.84848484848485e-06, |
|
"loss": 0.5513, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7692855596542358, |
|
"learning_rate": 9.838383838383839e-06, |
|
"loss": 0.5217, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8319826126098633, |
|
"learning_rate": 9.828282828282829e-06, |
|
"loss": 0.4386, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6725042462348938, |
|
"learning_rate": 9.81818181818182e-06, |
|
"loss": 0.4919, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.779315173625946, |
|
"learning_rate": 9.80808080808081e-06, |
|
"loss": 0.5281, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.8005223274230957, |
|
"learning_rate": 9.797979797979798e-06, |
|
"loss": 0.5206, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6395801901817322, |
|
"learning_rate": 9.787878787878788e-06, |
|
"loss": 0.474, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1016992330551147, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.5403, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.395189881324768, |
|
"learning_rate": 9.767676767676767e-06, |
|
"loss": 0.5181, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.801499605178833, |
|
"learning_rate": 9.757575757575758e-06, |
|
"loss": 0.5296, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7606889605522156, |
|
"learning_rate": 9.747474747474748e-06, |
|
"loss": 0.5406, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.580170750617981, |
|
"learning_rate": 9.737373737373738e-06, |
|
"loss": 0.541, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6730871796607971, |
|
"learning_rate": 9.727272727272728e-06, |
|
"loss": 0.5665, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.240430474281311, |
|
"learning_rate": 9.717171717171719e-06, |
|
"loss": 0.5148, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9439683556556702, |
|
"learning_rate": 9.707070707070709e-06, |
|
"loss": 0.513, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6674547791481018, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.5202, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8516045808792114, |
|
"learning_rate": 9.686868686868688e-06, |
|
"loss": 0.5162, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8700432181358337, |
|
"learning_rate": 9.676767676767678e-06, |
|
"loss": 0.5392, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5687388777732849, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.5106, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2382631301879883, |
|
"learning_rate": 9.656565656565657e-06, |
|
"loss": 0.5074, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8921974301338196, |
|
"learning_rate": 9.646464646464647e-06, |
|
"loss": 0.561, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3508048057556152, |
|
"learning_rate": 9.636363636363638e-06, |
|
"loss": 0.5484, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1822205781936646, |
|
"learning_rate": 9.626262626262626e-06, |
|
"loss": 0.5719, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6617271304130554, |
|
"learning_rate": 9.616161616161616e-06, |
|
"loss": 0.535, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6614571809768677, |
|
"learning_rate": 9.606060606060607e-06, |
|
"loss": 0.4549, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8642953038215637, |
|
"learning_rate": 9.595959595959597e-06, |
|
"loss": 0.4789, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.614743173122406, |
|
"learning_rate": 9.585858585858586e-06, |
|
"loss": 0.4854, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.970829427242279, |
|
"learning_rate": 9.575757575757576e-06, |
|
"loss": 0.5196, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7311980128288269, |
|
"learning_rate": 9.565656565656566e-06, |
|
"loss": 0.5106, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.765849769115448, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.5782, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0889312028884888, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.5824, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7402384877204895, |
|
"learning_rate": 9.535353535353537e-06, |
|
"loss": 0.5005, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.707028329372406, |
|
"learning_rate": 9.525252525252526e-06, |
|
"loss": 0.5233, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.8338315486907959, |
|
"learning_rate": 9.515151515151516e-06, |
|
"loss": 0.4694, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7450662851333618, |
|
"learning_rate": 9.505050505050506e-06, |
|
"loss": 0.4762, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7595840692520142, |
|
"learning_rate": 9.494949494949497e-06, |
|
"loss": 0.5018, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5880123376846313, |
|
"learning_rate": 9.484848484848485e-06, |
|
"loss": 0.5228, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.9635146260261536, |
|
"learning_rate": 9.474747474747475e-06, |
|
"loss": 0.4987, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1274373531341553, |
|
"learning_rate": 9.464646464646466e-06, |
|
"loss": 0.5085, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1324831247329712, |
|
"learning_rate": 9.454545454545456e-06, |
|
"loss": 0.504, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5410157442092896, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.4619, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7583281993865967, |
|
"learning_rate": 9.434343434343435e-06, |
|
"loss": 0.5162, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6546668410301208, |
|
"learning_rate": 9.424242424242425e-06, |
|
"loss": 0.4969, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6070376634597778, |
|
"learning_rate": 9.414141414141414e-06, |
|
"loss": 0.4805, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0108693838119507, |
|
"learning_rate": 9.404040404040404e-06, |
|
"loss": 0.4808, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8799183368682861, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.5082, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3432070016860962, |
|
"learning_rate": 9.383838383838385e-06, |
|
"loss": 0.4353, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6518195271492004, |
|
"learning_rate": 9.373737373737375e-06, |
|
"loss": 0.4933, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6736329793930054, |
|
"learning_rate": 9.363636363636365e-06, |
|
"loss": 0.5342, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.776785135269165, |
|
"learning_rate": 9.353535353535354e-06, |
|
"loss": 0.5162, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9443957805633545, |
|
"learning_rate": 9.343434343434344e-06, |
|
"loss": 0.4486, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8882728815078735, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4774, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.819239616394043, |
|
"learning_rate": 9.323232323232325e-06, |
|
"loss": 0.5, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8829065561294556, |
|
"learning_rate": 9.313131313131313e-06, |
|
"loss": 0.4655, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1993345022201538, |
|
"learning_rate": 9.303030303030303e-06, |
|
"loss": 0.5392, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.681409478187561, |
|
"learning_rate": 9.292929292929294e-06, |
|
"loss": 0.5076, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.075088381767273, |
|
"learning_rate": 9.282828282828284e-06, |
|
"loss": 0.4953, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7040195465087891, |
|
"learning_rate": 9.272727272727273e-06, |
|
"loss": 0.5596, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1210192441940308, |
|
"learning_rate": 9.262626262626263e-06, |
|
"loss": 0.5299, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.583011269569397, |
|
"learning_rate": 9.252525252525253e-06, |
|
"loss": 0.497, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6583887338638306, |
|
"learning_rate": 9.242424242424244e-06, |
|
"loss": 0.5556, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.8040810227394104, |
|
"learning_rate": 9.232323232323232e-06, |
|
"loss": 0.5257, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9269919991493225, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.4421, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9947918653488159, |
|
"learning_rate": 9.212121212121213e-06, |
|
"loss": 0.5297, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6900811791419983, |
|
"learning_rate": 9.202020202020203e-06, |
|
"loss": 0.4833, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.8033557534217834, |
|
"learning_rate": 9.191919191919193e-06, |
|
"loss": 0.4894, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.635124683380127, |
|
"learning_rate": 9.181818181818184e-06, |
|
"loss": 0.4554, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7293840646743774, |
|
"learning_rate": 9.171717171717172e-06, |
|
"loss": 0.4693, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7628031373023987, |
|
"learning_rate": 9.161616161616162e-06, |
|
"loss": 0.5181, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0783181190490723, |
|
"learning_rate": 9.151515151515153e-06, |
|
"loss": 0.4632, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5340379476547241, |
|
"learning_rate": 9.141414141414143e-06, |
|
"loss": 0.4664, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9029551148414612, |
|
"learning_rate": 9.131313131313132e-06, |
|
"loss": 0.5333, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.7257616519927979, |
|
"learning_rate": 9.121212121212122e-06, |
|
"loss": 0.5168, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.761325478553772, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.5606, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8582245707511902, |
|
"learning_rate": 9.1010101010101e-06, |
|
"loss": 0.4332, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8598415851593018, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5884, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8292351365089417, |
|
"learning_rate": 9.080808080808081e-06, |
|
"loss": 0.4848, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0559266805648804, |
|
"learning_rate": 9.070707070707072e-06, |
|
"loss": 0.4588, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6693033576011658, |
|
"learning_rate": 9.06060606060606e-06, |
|
"loss": 0.5333, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.8114706873893738, |
|
"learning_rate": 9.050505050505052e-06, |
|
"loss": 0.5166, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8659316301345825, |
|
"learning_rate": 9.040404040404042e-06, |
|
"loss": 0.4504, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9083582758903503, |
|
"learning_rate": 9.030303030303031e-06, |
|
"loss": 0.5611, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6691566109657288, |
|
"learning_rate": 9.020202020202021e-06, |
|
"loss": 0.5192, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5889317989349365, |
|
"learning_rate": 9.010101010101012e-06, |
|
"loss": 0.4515, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9215373992919922, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4776, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.7439729571342468, |
|
"learning_rate": 8.98989898989899e-06, |
|
"loss": 0.4656, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1780657768249512, |
|
"learning_rate": 8.97979797979798e-06, |
|
"loss": 0.4933, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9686077833175659, |
|
"learning_rate": 8.969696969696971e-06, |
|
"loss": 0.5167, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.829994261264801, |
|
"learning_rate": 8.95959595959596e-06, |
|
"loss": 0.491, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6313827633857727, |
|
"learning_rate": 8.94949494949495e-06, |
|
"loss": 0.4864, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.596537709236145, |
|
"learning_rate": 8.93939393939394e-06, |
|
"loss": 0.4807, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8714896440505981, |
|
"learning_rate": 8.92929292929293e-06, |
|
"loss": 0.512, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9466399550437927, |
|
"learning_rate": 8.919191919191919e-06, |
|
"loss": 0.4883, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.7337993383407593, |
|
"learning_rate": 8.90909090909091e-06, |
|
"loss": 0.4757, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7684504985809326, |
|
"learning_rate": 8.8989898989899e-06, |
|
"loss": 0.5224, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6455299854278564, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.5346, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7279661893844604, |
|
"learning_rate": 8.87878787878788e-06, |
|
"loss": 0.4845, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7996425032615662, |
|
"learning_rate": 8.86868686868687e-06, |
|
"loss": 0.4812, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7299336791038513, |
|
"learning_rate": 8.85858585858586e-06, |
|
"loss": 0.4985, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7462379932403564, |
|
"learning_rate": 8.84848484848485e-06, |
|
"loss": 0.5742, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.7165307998657227, |
|
"learning_rate": 8.83838383838384e-06, |
|
"loss": 0.4627, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.7239411473274231, |
|
"learning_rate": 8.82828282828283e-06, |
|
"loss": 0.5469, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5761345028877258, |
|
"learning_rate": 8.818181818181819e-06, |
|
"loss": 0.4391, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8207817077636719, |
|
"learning_rate": 8.808080808080809e-06, |
|
"loss": 0.5477, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9331930875778198, |
|
"learning_rate": 8.7979797979798e-06, |
|
"loss": 0.4975, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0401968955993652, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.5149, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.7848596572875977, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.5448, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.5979989767074585, |
|
"learning_rate": 8.767676767676768e-06, |
|
"loss": 0.4861, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7734145522117615, |
|
"learning_rate": 8.757575757575759e-06, |
|
"loss": 0.5955, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9506089687347412, |
|
"learning_rate": 8.747474747474747e-06, |
|
"loss": 0.5212, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7118907570838928, |
|
"learning_rate": 8.737373737373738e-06, |
|
"loss": 0.474, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0045005083084106, |
|
"learning_rate": 8.727272727272728e-06, |
|
"loss": 0.509, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1283302307128906, |
|
"learning_rate": 8.717171717171718e-06, |
|
"loss": 0.5096, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.3884085416793823, |
|
"learning_rate": 8.707070707070707e-06, |
|
"loss": 0.5008, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7452566027641296, |
|
"learning_rate": 8.696969696969699e-06, |
|
"loss": 0.4792, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3434414863586426, |
|
"learning_rate": 8.686868686868687e-06, |
|
"loss": 0.5385, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9600369930267334, |
|
"learning_rate": 8.676767676767678e-06, |
|
"loss": 0.4743, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6895599961280823, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.5639, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9460670948028564, |
|
"learning_rate": 8.656565656565658e-06, |
|
"loss": 0.5957, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6181186437606812, |
|
"learning_rate": 8.646464646464647e-06, |
|
"loss": 0.5032, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6992838382720947, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 0.5371, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.6007195711135864, |
|
"learning_rate": 8.626262626262627e-06, |
|
"loss": 0.4446, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8171485662460327, |
|
"learning_rate": 8.616161616161618e-06, |
|
"loss": 0.4835, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9369902610778809, |
|
"learning_rate": 8.606060606060606e-06, |
|
"loss": 0.5437, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.1894652843475342, |
|
"learning_rate": 8.595959595959596e-06, |
|
"loss": 0.5151, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.694837212562561, |
|
"learning_rate": 8.585858585858587e-06, |
|
"loss": 0.488, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8813522458076477, |
|
"learning_rate": 8.575757575757575e-06, |
|
"loss": 0.5129, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0121350288391113, |
|
"learning_rate": 8.565656565656566e-06, |
|
"loss": 0.5165, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7935439944267273, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.4736, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7670463919639587, |
|
"learning_rate": 8.545454545454546e-06, |
|
"loss": 0.5214, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.212927222251892, |
|
"learning_rate": 8.535353535353535e-06, |
|
"loss": 0.5125, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7966919541358948, |
|
"learning_rate": 8.525252525252527e-06, |
|
"loss": 0.4823, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0880494117736816, |
|
"learning_rate": 8.515151515151517e-06, |
|
"loss": 0.478, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0308737754821777, |
|
"learning_rate": 8.505050505050506e-06, |
|
"loss": 0.5368, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7291275262832642, |
|
"learning_rate": 8.494949494949496e-06, |
|
"loss": 0.4838, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6764214038848877, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.4882, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.051628828048706, |
|
"learning_rate": 8.474747474747475e-06, |
|
"loss": 0.4564, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8614614605903625, |
|
"learning_rate": 8.464646464646465e-06, |
|
"loss": 0.5632, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.1045228242874146, |
|
"learning_rate": 8.454545454545455e-06, |
|
"loss": 0.4535, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8160364031791687, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.4964, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7776429653167725, |
|
"learning_rate": 8.434343434343434e-06, |
|
"loss": 0.4554, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7589672207832336, |
|
"learning_rate": 8.424242424242425e-06, |
|
"loss": 0.4458, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.825233519077301, |
|
"learning_rate": 8.414141414141415e-06, |
|
"loss": 0.4814, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8226912617683411, |
|
"learning_rate": 8.404040404040405e-06, |
|
"loss": 0.4892, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9273412823677063, |
|
"learning_rate": 8.393939393939394e-06, |
|
"loss": 0.5143, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9043828248977661, |
|
"learning_rate": 8.383838383838384e-06, |
|
"loss": 0.4598, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.2596805095672607, |
|
"learning_rate": 8.373737373737374e-06, |
|
"loss": 0.5415, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0041202306747437, |
|
"learning_rate": 8.363636363636365e-06, |
|
"loss": 0.5899, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9188370704650879, |
|
"learning_rate": 8.353535353535355e-06, |
|
"loss": 0.5119, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7778961062431335, |
|
"learning_rate": 8.343434343434345e-06, |
|
"loss": 0.5237, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7438649535179138, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.4999, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5649489760398865, |
|
"learning_rate": 8.323232323232324e-06, |
|
"loss": 0.552, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5625451803207397, |
|
"learning_rate": 8.313131313131314e-06, |
|
"loss": 0.4549, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3711755275726318, |
|
"learning_rate": 8.303030303030305e-06, |
|
"loss": 0.445, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.4339165687561035, |
|
"learning_rate": 8.292929292929293e-06, |
|
"loss": 0.4975, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8113200068473816, |
|
"learning_rate": 8.282828282828283e-06, |
|
"loss": 0.5288, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.1567124128341675, |
|
"learning_rate": 8.272727272727274e-06, |
|
"loss": 0.4669, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7966761589050293, |
|
"learning_rate": 8.262626262626264e-06, |
|
"loss": 0.4856, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0181186199188232, |
|
"learning_rate": 8.252525252525253e-06, |
|
"loss": 0.4576, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.611566960811615, |
|
"learning_rate": 8.242424242424243e-06, |
|
"loss": 0.496, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6482832431793213, |
|
"learning_rate": 8.232323232323233e-06, |
|
"loss": 0.4601, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7550622820854187, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.5036, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7835694551467896, |
|
"learning_rate": 8.212121212121212e-06, |
|
"loss": 0.5617, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7926068305969238, |
|
"learning_rate": 8.202020202020202e-06, |
|
"loss": 0.4327, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.786851167678833, |
|
"learning_rate": 8.191919191919193e-06, |
|
"loss": 0.4654, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9023171663284302, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.5426, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0345401763916016, |
|
"learning_rate": 8.171717171717173e-06, |
|
"loss": 0.52, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.945004940032959, |
|
"learning_rate": 8.161616161616162e-06, |
|
"loss": 0.5512, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.658362090587616, |
|
"learning_rate": 8.151515151515152e-06, |
|
"loss": 0.513, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6390058398246765, |
|
"learning_rate": 8.141414141414142e-06, |
|
"loss": 0.5652, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.7705880403518677, |
|
"learning_rate": 8.131313131313133e-06, |
|
"loss": 0.4929, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5400047302246094, |
|
"learning_rate": 8.121212121212121e-06, |
|
"loss": 0.4932, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9128320217132568, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.5085, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0019017457962036, |
|
"learning_rate": 8.101010101010102e-06, |
|
"loss": 0.4552, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.818148136138916, |
|
"learning_rate": 8.090909090909092e-06, |
|
"loss": 0.5517, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8848174810409546, |
|
"learning_rate": 8.08080808080808e-06, |
|
"loss": 0.5094, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 0.6485620141029358, |
|
"eval_runtime": 340.5095, |
|
"eval_samples_per_second": 2.937, |
|
"eval_steps_per_second": 2.937, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9389908313751221, |
|
"learning_rate": 8.070707070707071e-06, |
|
"loss": 0.5228, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.901942789554596, |
|
"learning_rate": 8.060606060606061e-06, |
|
"loss": 0.4322, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6482081413269043, |
|
"learning_rate": 8.050505050505052e-06, |
|
"loss": 0.4123, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.188251256942749, |
|
"learning_rate": 8.04040404040404e-06, |
|
"loss": 0.3863, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6995498538017273, |
|
"learning_rate": 8.03030303030303e-06, |
|
"loss": 0.4834, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.7236158847808838, |
|
"learning_rate": 8.02020202020202e-06, |
|
"loss": 0.5279, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6699327826499939, |
|
"learning_rate": 8.010101010101011e-06, |
|
"loss": 0.5294, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7172369956970215, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4882, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7741474509239197, |
|
"learning_rate": 7.989898989898992e-06, |
|
"loss": 0.4928, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7598722577095032, |
|
"learning_rate": 7.97979797979798e-06, |
|
"loss": 0.477, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7849257588386536, |
|
"learning_rate": 7.96969696969697e-06, |
|
"loss": 0.497, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7920640707015991, |
|
"learning_rate": 7.95959595959596e-06, |
|
"loss": 0.4974, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7092992067337036, |
|
"learning_rate": 7.949494949494951e-06, |
|
"loss": 0.5234, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8268713355064392, |
|
"learning_rate": 7.93939393939394e-06, |
|
"loss": 0.3966, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1046198606491089, |
|
"learning_rate": 7.92929292929293e-06, |
|
"loss": 0.452, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8234682083129883, |
|
"learning_rate": 7.91919191919192e-06, |
|
"loss": 0.5364, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8524590730667114, |
|
"learning_rate": 7.909090909090909e-06, |
|
"loss": 0.3922, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.964320182800293, |
|
"learning_rate": 7.898989898989899e-06, |
|
"loss": 0.4991, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.2167562246322632, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.4237, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.893385648727417, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.503, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4781544208526611, |
|
"learning_rate": 7.868686868686868e-06, |
|
"loss": 0.4084, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0282255411148071, |
|
"learning_rate": 7.858585858585859e-06, |
|
"loss": 0.5531, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5956897139549255, |
|
"learning_rate": 7.848484848484849e-06, |
|
"loss": 0.421, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.674526572227478, |
|
"learning_rate": 7.838383838383839e-06, |
|
"loss": 0.5221, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7602831125259399, |
|
"learning_rate": 7.82828282828283e-06, |
|
"loss": 0.4826, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.637804388999939, |
|
"learning_rate": 7.81818181818182e-06, |
|
"loss": 0.4965, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0468482971191406, |
|
"learning_rate": 7.808080808080808e-06, |
|
"loss": 0.4352, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8232371807098389, |
|
"learning_rate": 7.797979797979799e-06, |
|
"loss": 0.4314, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.2416061162948608, |
|
"learning_rate": 7.787878787878789e-06, |
|
"loss": 0.4559, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7591462135314941, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.5443, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7769295573234558, |
|
"learning_rate": 7.767676767676768e-06, |
|
"loss": 0.4915, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7007648944854736, |
|
"learning_rate": 7.757575757575758e-06, |
|
"loss": 0.4102, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8512477874755859, |
|
"learning_rate": 7.747474747474748e-06, |
|
"loss": 0.488, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.0806859731674194, |
|
"learning_rate": 7.737373737373739e-06, |
|
"loss": 0.4482, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8377964496612549, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 0.4714, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9962327480316162, |
|
"learning_rate": 7.717171717171717e-06, |
|
"loss": 0.4354, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.7444832921028137, |
|
"learning_rate": 7.707070707070708e-06, |
|
"loss": 0.5651, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.7137871384620667, |
|
"learning_rate": 7.696969696969696e-06, |
|
"loss": 0.4773, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8521376252174377, |
|
"learning_rate": 7.686868686868687e-06, |
|
"loss": 0.4961, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9442594647407532, |
|
"learning_rate": 7.676767676767677e-06, |
|
"loss": 0.4627, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.169360876083374, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.4601, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8181536197662354, |
|
"learning_rate": 7.656565656565658e-06, |
|
"loss": 0.4635, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9113516211509705, |
|
"learning_rate": 7.646464646464648e-06, |
|
"loss": 0.4219, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.26584792137146, |
|
"learning_rate": 7.636363636363638e-06, |
|
"loss": 0.5165, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.6445989608764648, |
|
"learning_rate": 7.6262626262626275e-06, |
|
"loss": 0.4755, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.7551100850105286, |
|
"learning_rate": 7.616161616161617e-06, |
|
"loss": 0.4976, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0578736066818237, |
|
"learning_rate": 7.606060606060606e-06, |
|
"loss": 0.524, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0422261953353882, |
|
"learning_rate": 7.595959595959597e-06, |
|
"loss": 0.5136, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0524308681488037, |
|
"learning_rate": 7.585858585858586e-06, |
|
"loss": 0.5058, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8334051966667175, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.4487, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9613906145095825, |
|
"learning_rate": 7.565656565656566e-06, |
|
"loss": 0.5055, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9347293972969055, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.4672, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.0498868227005005, |
|
"learning_rate": 7.545454545454546e-06, |
|
"loss": 0.4336, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.0005099773406982, |
|
"learning_rate": 7.535353535353536e-06, |
|
"loss": 0.523, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.027875542640686, |
|
"learning_rate": 7.525252525252525e-06, |
|
"loss": 0.5088, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.7960171103477478, |
|
"learning_rate": 7.515151515151516e-06, |
|
"loss": 0.4636, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.1945937871932983, |
|
"learning_rate": 7.505050505050505e-06, |
|
"loss": 0.4736, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9849948287010193, |
|
"learning_rate": 7.494949494949496e-06, |
|
"loss": 0.4766, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.7381343841552734, |
|
"learning_rate": 7.484848484848486e-06, |
|
"loss": 0.4427, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.964435875415802, |
|
"learning_rate": 7.474747474747476e-06, |
|
"loss": 0.5179, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.0832308530807495, |
|
"learning_rate": 7.464646464646465e-06, |
|
"loss": 0.5179, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8275842070579529, |
|
"learning_rate": 7.454545454545456e-06, |
|
"loss": 0.3957, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.7618038654327393, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.4626, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9667460918426514, |
|
"learning_rate": 7.434343434343435e-06, |
|
"loss": 0.4338, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8243513107299805, |
|
"learning_rate": 7.424242424242425e-06, |
|
"loss": 0.496, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8097424507141113, |
|
"learning_rate": 7.414141414141415e-06, |
|
"loss": 0.4911, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.219351053237915, |
|
"learning_rate": 7.4040404040404045e-06, |
|
"loss": 0.5163, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.7272489666938782, |
|
"learning_rate": 7.393939393939395e-06, |
|
"loss": 0.4783, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9799202084541321, |
|
"learning_rate": 7.383838383838384e-06, |
|
"loss": 0.4638, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.1537474393844604, |
|
"learning_rate": 7.373737373737374e-06, |
|
"loss": 0.4184, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.750483512878418, |
|
"learning_rate": 7.363636363636364e-06, |
|
"loss": 0.4544, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.915256679058075, |
|
"learning_rate": 7.353535353535353e-06, |
|
"loss": 0.4579, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0220776796340942, |
|
"learning_rate": 7.343434343434344e-06, |
|
"loss": 0.4568, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0540516376495361, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.5228, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.014601230621338, |
|
"learning_rate": 7.323232323232324e-06, |
|
"loss": 0.4482, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.9005866646766663, |
|
"learning_rate": 7.3131313131313146e-06, |
|
"loss": 0.4868, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0538028478622437, |
|
"learning_rate": 7.303030303030304e-06, |
|
"loss": 0.4884, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9115027785301208, |
|
"learning_rate": 7.2929292929292934e-06, |
|
"loss": 0.4421, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5715361833572388, |
|
"learning_rate": 7.282828282828284e-06, |
|
"loss": 0.4442, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.8492701649665833, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.4828, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.7459146976470947, |
|
"learning_rate": 7.2626262626262635e-06, |
|
"loss": 0.4649, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.6268482804298401, |
|
"learning_rate": 7.252525252525253e-06, |
|
"loss": 0.4521, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.9125592112541199, |
|
"learning_rate": 7.242424242424243e-06, |
|
"loss": 0.57, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.8162791132926941, |
|
"learning_rate": 7.232323232323233e-06, |
|
"loss": 0.4746, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.9345506429672241, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.4777, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.6382574439048767, |
|
"learning_rate": 7.212121212121212e-06, |
|
"loss": 0.4832, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.9409840703010559, |
|
"learning_rate": 7.202020202020203e-06, |
|
"loss": 0.4659, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.0053200721740723, |
|
"learning_rate": 7.191919191919192e-06, |
|
"loss": 0.4041, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8394672870635986, |
|
"learning_rate": 7.181818181818182e-06, |
|
"loss": 0.4477, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8685116171836853, |
|
"learning_rate": 7.171717171717172e-06, |
|
"loss": 0.4155, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8150361776351929, |
|
"learning_rate": 7.161616161616162e-06, |
|
"loss": 0.3901, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.7799510955810547, |
|
"learning_rate": 7.151515151515152e-06, |
|
"loss": 0.4273, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.1197009086608887, |
|
"learning_rate": 7.141414141414143e-06, |
|
"loss": 0.447, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.8753387331962585, |
|
"learning_rate": 7.131313131313132e-06, |
|
"loss": 0.3847, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.9633245468139648, |
|
"learning_rate": 7.121212121212122e-06, |
|
"loss": 0.4985, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.0391559600830078, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.4476, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.9432191252708435, |
|
"learning_rate": 7.101010101010102e-06, |
|
"loss": 0.4669, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.078410267829895, |
|
"learning_rate": 7.0909090909090916e-06, |
|
"loss": 0.401, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.9405683875083923, |
|
"learning_rate": 7.080808080808082e-06, |
|
"loss": 0.4744, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.4189893007278442, |
|
"learning_rate": 7.070707070707071e-06, |
|
"loss": 0.4334, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.9169256091117859, |
|
"learning_rate": 7.060606060606061e-06, |
|
"loss": 0.4234, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.8117393851280212, |
|
"learning_rate": 7.050505050505051e-06, |
|
"loss": 0.4676, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.967890739440918, |
|
"learning_rate": 7.0404040404040404e-06, |
|
"loss": 0.4497, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.0548313856124878, |
|
"learning_rate": 7.030303030303031e-06, |
|
"loss": 0.4429, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.071303367614746, |
|
"learning_rate": 7.02020202020202e-06, |
|
"loss": 0.4251, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.1772490739822388, |
|
"learning_rate": 7.0101010101010105e-06, |
|
"loss": 0.4897, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.3903322219848633, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4953, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.8463280200958252, |
|
"learning_rate": 6.98989898989899e-06, |
|
"loss": 0.3923, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.146888256072998, |
|
"learning_rate": 6.979797979797981e-06, |
|
"loss": 0.4723, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.0877312421798706, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.4822, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7444977164268494, |
|
"learning_rate": 6.95959595959596e-06, |
|
"loss": 0.4591, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7259607911109924, |
|
"learning_rate": 6.9494949494949505e-06, |
|
"loss": 0.4564, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.9535048604011536, |
|
"learning_rate": 6.93939393939394e-06, |
|
"loss": 0.4402, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.9428088068962097, |
|
"learning_rate": 6.92929292929293e-06, |
|
"loss": 0.4039, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.9858347177505493, |
|
"learning_rate": 6.91919191919192e-06, |
|
"loss": 0.4334, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.0837122201919556, |
|
"learning_rate": 6.90909090909091e-06, |
|
"loss": 0.3957, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.0605705976486206, |
|
"learning_rate": 6.898989898989899e-06, |
|
"loss": 0.4195, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.0856525897979736, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.4286, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.02509605884552, |
|
"learning_rate": 6.878787878787879e-06, |
|
"loss": 0.4413, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.8847616314888, |
|
"learning_rate": 6.868686868686869e-06, |
|
"loss": 0.4117, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.1464060544967651, |
|
"learning_rate": 6.858585858585859e-06, |
|
"loss": 0.44, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.9589990973472595, |
|
"learning_rate": 6.848484848484849e-06, |
|
"loss": 0.4896, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.080541968345642, |
|
"learning_rate": 6.8383838383838386e-06, |
|
"loss": 0.4203, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.3150702714920044, |
|
"learning_rate": 6.828282828282828e-06, |
|
"loss": 0.4213, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.0854105949401855, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 0.4538, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.7211188673973083, |
|
"learning_rate": 6.808080808080809e-06, |
|
"loss": 0.4332, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.0323004722595215, |
|
"learning_rate": 6.797979797979799e-06, |
|
"loss": 0.465, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.9176574349403381, |
|
"learning_rate": 6.787878787878789e-06, |
|
"loss": 0.4898, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.2033518552780151, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.4652, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.6069207191467285, |
|
"learning_rate": 6.767676767676769e-06, |
|
"loss": 0.4663, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.9627435803413391, |
|
"learning_rate": 6.757575757575758e-06, |
|
"loss": 0.4992, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.7333098649978638, |
|
"learning_rate": 6.747474747474749e-06, |
|
"loss": 0.4876, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.6735036969184875, |
|
"learning_rate": 6.737373737373738e-06, |
|
"loss": 0.4797, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.8416649699211121, |
|
"learning_rate": 6.7272727272727275e-06, |
|
"loss": 0.3889, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.227149486541748, |
|
"learning_rate": 6.717171717171718e-06, |
|
"loss": 0.4677, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.5126155614852905, |
|
"learning_rate": 6.707070707070707e-06, |
|
"loss": 0.4444, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.1622529029846191, |
|
"learning_rate": 6.6969696969696975e-06, |
|
"loss": 0.4372, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.975615918636322, |
|
"learning_rate": 6.686868686868687e-06, |
|
"loss": 0.4037, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.8992089033126831, |
|
"learning_rate": 6.676767676767677e-06, |
|
"loss": 0.3785, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.4439970254898071, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5169, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.9601142406463623, |
|
"learning_rate": 6.656565656565657e-06, |
|
"loss": 0.4396, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0457966327667236, |
|
"learning_rate": 6.646464646464646e-06, |
|
"loss": 0.4065, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0014325380325317, |
|
"learning_rate": 6.6363636363636375e-06, |
|
"loss": 0.4616, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.3321492671966553, |
|
"learning_rate": 6.626262626262627e-06, |
|
"loss": 0.4164, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.9500308036804199, |
|
"learning_rate": 6.616161616161617e-06, |
|
"loss": 0.4156, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.1907135248184204, |
|
"learning_rate": 6.606060606060607e-06, |
|
"loss": 0.4353, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.7826789617538452, |
|
"learning_rate": 6.595959595959597e-06, |
|
"loss": 0.4764, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.0160069465637207, |
|
"learning_rate": 6.585858585858586e-06, |
|
"loss": 0.4594, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.0795533657073975, |
|
"learning_rate": 6.575757575757577e-06, |
|
"loss": 0.4557, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.2484716176986694, |
|
"learning_rate": 6.565656565656566e-06, |
|
"loss": 0.5057, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.6144826412200928, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.4554, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.3575657606124878, |
|
"learning_rate": 6.545454545454546e-06, |
|
"loss": 0.3373, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.9021044373512268, |
|
"learning_rate": 6.535353535353536e-06, |
|
"loss": 0.4765, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.0576077699661255, |
|
"learning_rate": 6.525252525252526e-06, |
|
"loss": 0.4344, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.0687928199768066, |
|
"learning_rate": 6.515151515151516e-06, |
|
"loss": 0.5022, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.9124734401702881, |
|
"learning_rate": 6.505050505050505e-06, |
|
"loss": 0.4133, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.8715507984161377, |
|
"learning_rate": 6.494949494949495e-06, |
|
"loss": 0.454, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.0889540910720825, |
|
"learning_rate": 6.484848484848485e-06, |
|
"loss": 0.443, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.9323639273643494, |
|
"learning_rate": 6.4747474747474745e-06, |
|
"loss": 0.4983, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.6946113109588623, |
|
"learning_rate": 6.464646464646466e-06, |
|
"loss": 0.4364, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.1879271268844604, |
|
"learning_rate": 6.454545454545456e-06, |
|
"loss": 0.4816, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.337172031402588, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.4963, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.9384005069732666, |
|
"learning_rate": 6.434343434343436e-06, |
|
"loss": 0.5185, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.2666473388671875, |
|
"learning_rate": 6.424242424242425e-06, |
|
"loss": 0.389, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.037668228149414, |
|
"learning_rate": 6.4141414141414145e-06, |
|
"loss": 0.4529, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.4652140140533447, |
|
"learning_rate": 6.404040404040405e-06, |
|
"loss": 0.3915, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.1823006868362427, |
|
"learning_rate": 6.393939393939394e-06, |
|
"loss": 0.5123, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.0706424713134766, |
|
"learning_rate": 6.3838383838383845e-06, |
|
"loss": 0.4989, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.9993262887001038, |
|
"learning_rate": 6.373737373737374e-06, |
|
"loss": 0.4268, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.9947906732559204, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.4246, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.8563311696052551, |
|
"learning_rate": 6.353535353535354e-06, |
|
"loss": 0.4439, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.2519429922103882, |
|
"learning_rate": 6.343434343434344e-06, |
|
"loss": 0.3973, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.2925148010253906, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.4179, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.1731756925582886, |
|
"learning_rate": 6.323232323232324e-06, |
|
"loss": 0.4748, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.030536413192749, |
|
"learning_rate": 6.313131313131313e-06, |
|
"loss": 0.4131, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1279093027114868, |
|
"learning_rate": 6.303030303030303e-06, |
|
"loss": 0.4143, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1602801084518433, |
|
"learning_rate": 6.292929292929294e-06, |
|
"loss": 0.4718, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.5314010381698608, |
|
"learning_rate": 6.282828282828284e-06, |
|
"loss": 0.5463, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.6124476194381714, |
|
"learning_rate": 6.2727272727272734e-06, |
|
"loss": 0.4857, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.185355305671692, |
|
"learning_rate": 6.262626262626264e-06, |
|
"loss": 0.4167, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.1900933980941772, |
|
"learning_rate": 6.252525252525253e-06, |
|
"loss": 0.4658, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.2930880784988403, |
|
"learning_rate": 6.2424242424242434e-06, |
|
"loss": 0.4912, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.1819838285446167, |
|
"learning_rate": 6.232323232323233e-06, |
|
"loss": 0.4129, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.315292477607727, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.477, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.007524013519287, |
|
"learning_rate": 6.212121212121213e-06, |
|
"loss": 0.414, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.1948471069335938, |
|
"learning_rate": 6.202020202020203e-06, |
|
"loss": 0.4576, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.3339303731918335, |
|
"learning_rate": 6.191919191919192e-06, |
|
"loss": 0.5547, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.2568715810775757, |
|
"learning_rate": 6.181818181818182e-06, |
|
"loss": 0.3941, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.2084232568740845, |
|
"learning_rate": 6.171717171717172e-06, |
|
"loss": 0.462, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.0328818559646606, |
|
"learning_rate": 6.1616161616161615e-06, |
|
"loss": 0.4484, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.9673748016357422, |
|
"learning_rate": 6.151515151515152e-06, |
|
"loss": 0.4543, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.4105424880981445, |
|
"learning_rate": 6.141414141414141e-06, |
|
"loss": 0.4964, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.7082942128181458, |
|
"learning_rate": 6.1313131313131315e-06, |
|
"loss": 0.5297, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.2604327201843262, |
|
"learning_rate": 6.121212121212121e-06, |
|
"loss": 0.4207, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.999332070350647, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.4791, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.139615535736084, |
|
"learning_rate": 6.1010101010101015e-06, |
|
"loss": 0.4281, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.1164727210998535, |
|
"learning_rate": 6.090909090909092e-06, |
|
"loss": 0.3807, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.0829392671585083, |
|
"learning_rate": 6.080808080808081e-06, |
|
"loss": 0.4996, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.8551573753356934, |
|
"learning_rate": 6.0707070707070715e-06, |
|
"loss": 0.4759, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.224134087562561, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.4244, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"eval_loss": 0.5992664694786072, |
|
"eval_runtime": 334.8169, |
|
"eval_samples_per_second": 2.987, |
|
"eval_steps_per_second": 2.987, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"total_flos": 3.26411004936192e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|