|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2, |
|
"eval_steps": 2000, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.9070186614990234, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.0857, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.667642593383789, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.6733, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.0051660537719727, |
|
"learning_rate": 3e-06, |
|
"loss": 0.6534, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.0736082792282104, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5221, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.684818744659424, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6807, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.5355663299560547, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4381, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.204728126525879, |
|
"learning_rate": 7e-06, |
|
"loss": 0.4578, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.8668341636657715, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.362, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.7530344724655151, |
|
"learning_rate": 9e-06, |
|
"loss": 0.6272, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.6746323108673096, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5589, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.4799275398254395, |
|
"learning_rate": 9.989898989898991e-06, |
|
"loss": 0.6067, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.678624153137207, |
|
"learning_rate": 9.97979797979798e-06, |
|
"loss": 0.5582, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.9827778339385986, |
|
"learning_rate": 9.96969696969697e-06, |
|
"loss": 0.4943, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.8628324270248413, |
|
"learning_rate": 9.95959595959596e-06, |
|
"loss": 0.6706, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.0590109825134277, |
|
"learning_rate": 9.94949494949495e-06, |
|
"loss": 0.4861, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6400913596153259, |
|
"learning_rate": 9.939393939393939e-06, |
|
"loss": 0.6373, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.803528070449829, |
|
"learning_rate": 9.92929292929293e-06, |
|
"loss": 0.6073, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.7043955326080322, |
|
"learning_rate": 9.91919191919192e-06, |
|
"loss": 0.6351, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.0071933269500732, |
|
"learning_rate": 9.90909090909091e-06, |
|
"loss": 0.6624, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.8767812252044678, |
|
"learning_rate": 9.8989898989899e-06, |
|
"loss": 0.5892, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.037977695465088, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.5099, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.31269109249115, |
|
"learning_rate": 9.87878787878788e-06, |
|
"loss": 0.5048, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.604134559631348, |
|
"learning_rate": 9.86868686868687e-06, |
|
"loss": 0.7424, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.378148078918457, |
|
"learning_rate": 9.85858585858586e-06, |
|
"loss": 0.6627, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8570343852043152, |
|
"learning_rate": 9.84848484848485e-06, |
|
"loss": 0.5931, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.617653489112854, |
|
"learning_rate": 9.838383838383839e-06, |
|
"loss": 0.4749, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.7800276279449463, |
|
"learning_rate": 9.828282828282829e-06, |
|
"loss": 0.8069, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.901750564575195, |
|
"learning_rate": 9.81818181818182e-06, |
|
"loss": 0.4457, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.434395790100098, |
|
"learning_rate": 9.80808080808081e-06, |
|
"loss": 0.7167, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.92600154876709, |
|
"learning_rate": 9.797979797979798e-06, |
|
"loss": 0.3975, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.07546558231115341, |
|
"learning_rate": 9.787878787878788e-06, |
|
"loss": 0.3655, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.365915775299072, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.5224, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.0982255935668945, |
|
"learning_rate": 9.767676767676767e-06, |
|
"loss": 0.6165, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.7056803703308105, |
|
"learning_rate": 9.757575757575758e-06, |
|
"loss": 0.5539, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.2261226177215576, |
|
"learning_rate": 9.747474747474748e-06, |
|
"loss": 0.4769, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9531151056289673, |
|
"learning_rate": 9.737373737373738e-06, |
|
"loss": 0.3609, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2107353210449219, |
|
"learning_rate": 9.727272727272728e-06, |
|
"loss": 0.5399, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.582059860229492, |
|
"learning_rate": 9.717171717171719e-06, |
|
"loss": 0.6101, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.4467086791992188, |
|
"learning_rate": 9.707070707070709e-06, |
|
"loss": 0.5436, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.19772769510746002, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.6105, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.347663164138794, |
|
"learning_rate": 9.686868686868688e-06, |
|
"loss": 0.4353, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.073924541473389, |
|
"learning_rate": 9.676767676767678e-06, |
|
"loss": 0.6866, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.099503517150879, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.5744, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2135908603668213, |
|
"learning_rate": 9.656565656565657e-06, |
|
"loss": 0.5677, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5008598566055298, |
|
"learning_rate": 9.646464646464647e-06, |
|
"loss": 0.3873, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6704565286636353, |
|
"learning_rate": 9.636363636363638e-06, |
|
"loss": 0.6814, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.4024767875671387, |
|
"learning_rate": 9.626262626262626e-06, |
|
"loss": 0.5843, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.00467598857358098, |
|
"learning_rate": 9.616161616161616e-06, |
|
"loss": 0.4113, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.787539958953857, |
|
"learning_rate": 9.606060606060607e-06, |
|
"loss": 0.7154, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.0733729600906372, |
|
"learning_rate": 9.595959595959597e-06, |
|
"loss": 0.6492, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.1921634674072266, |
|
"learning_rate": 9.585858585858586e-06, |
|
"loss": 0.5533, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.963071584701538, |
|
"learning_rate": 9.575757575757576e-06, |
|
"loss": 0.6422, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.266462326049805, |
|
"learning_rate": 9.565656565656566e-06, |
|
"loss": 0.4266, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.4488701820373535, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.4274, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.9400691986083984, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.5337, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.224618434906006, |
|
"learning_rate": 9.535353535353537e-06, |
|
"loss": 0.5675, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.3731207847595215, |
|
"learning_rate": 9.525252525252526e-06, |
|
"loss": 0.362, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.9596872329711914, |
|
"learning_rate": 9.515151515151516e-06, |
|
"loss": 0.5264, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.6627348065376282, |
|
"learning_rate": 9.505050505050506e-06, |
|
"loss": 0.435, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.430358409881592, |
|
"learning_rate": 9.494949494949497e-06, |
|
"loss": 0.5424, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.306655406951904, |
|
"learning_rate": 9.484848484848485e-06, |
|
"loss": 0.4153, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.6357769966125488, |
|
"learning_rate": 9.474747474747475e-06, |
|
"loss": 0.5251, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.038285732269287, |
|
"learning_rate": 9.464646464646466e-06, |
|
"loss": 0.5593, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.054030895233154, |
|
"learning_rate": 9.454545454545456e-06, |
|
"loss": 0.582, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8453586101531982, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.5627, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.760792851448059, |
|
"learning_rate": 9.434343434343435e-06, |
|
"loss": 0.4531, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.7284159660339355, |
|
"learning_rate": 9.424242424242425e-06, |
|
"loss": 0.4397, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.4066224694252014, |
|
"learning_rate": 9.414141414141414e-06, |
|
"loss": 0.4056, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.083108425140381, |
|
"learning_rate": 9.404040404040404e-06, |
|
"loss": 0.6092, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.98835563659668, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.4545, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.849491119384766, |
|
"learning_rate": 9.383838383838385e-06, |
|
"loss": 0.4038, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9796783924102783, |
|
"learning_rate": 9.373737373737375e-06, |
|
"loss": 0.6097, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3015940189361572, |
|
"learning_rate": 9.363636363636365e-06, |
|
"loss": 0.4561, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.0265772342681885, |
|
"learning_rate": 9.353535353535354e-06, |
|
"loss": 0.5972, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.158243656158447, |
|
"learning_rate": 9.343434343434344e-06, |
|
"loss": 0.4375, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6322413682937622, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4394, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8402198553085327, |
|
"learning_rate": 9.323232323232325e-06, |
|
"loss": 0.4942, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.9348700046539307, |
|
"learning_rate": 9.313131313131313e-06, |
|
"loss": 0.6713, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.633822917938232, |
|
"learning_rate": 9.303030303030303e-06, |
|
"loss": 0.5727, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5603723526000977, |
|
"learning_rate": 9.292929292929294e-06, |
|
"loss": 0.6393, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2063639163970947, |
|
"learning_rate": 9.282828282828284e-06, |
|
"loss": 0.6278, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9794161319732666, |
|
"learning_rate": 9.272727272727273e-06, |
|
"loss": 0.5332, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.441140055656433, |
|
"learning_rate": 9.262626262626263e-06, |
|
"loss": 0.5951, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.256943941116333, |
|
"learning_rate": 9.252525252525253e-06, |
|
"loss": 0.3266, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.15118904411792755, |
|
"learning_rate": 9.242424242424244e-06, |
|
"loss": 0.5511, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.8574471473693848, |
|
"learning_rate": 9.232323232323232e-06, |
|
"loss": 0.8058, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.2425239086151123, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.5866, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.476180911064148, |
|
"learning_rate": 9.212121212121213e-06, |
|
"loss": 0.5133, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.133812665939331, |
|
"learning_rate": 9.202020202020203e-06, |
|
"loss": 0.3262, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0041747093200684, |
|
"learning_rate": 9.191919191919193e-06, |
|
"loss": 0.6861, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9870094060897827, |
|
"learning_rate": 9.181818181818184e-06, |
|
"loss": 0.4655, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3034363985061646, |
|
"learning_rate": 9.171717171717172e-06, |
|
"loss": 0.6274, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.1342576742172241, |
|
"learning_rate": 9.161616161616162e-06, |
|
"loss": 0.404, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.735161781311035, |
|
"learning_rate": 9.151515151515153e-06, |
|
"loss": 0.4047, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.926325798034668, |
|
"learning_rate": 9.141414141414143e-06, |
|
"loss": 0.3813, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.1905412673950195, |
|
"learning_rate": 9.131313131313132e-06, |
|
"loss": 0.7336, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6212941408157349, |
|
"learning_rate": 9.121212121212122e-06, |
|
"loss": 0.6218, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.1676979064941406, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.4894, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.414123773574829, |
|
"learning_rate": 9.1010101010101e-06, |
|
"loss": 0.505, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8107662200927734, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.3938, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8321704864501953, |
|
"learning_rate": 9.080808080808081e-06, |
|
"loss": 0.504, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.217884540557861, |
|
"learning_rate": 9.070707070707072e-06, |
|
"loss": 0.5721, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.3424525260925293, |
|
"learning_rate": 9.06060606060606e-06, |
|
"loss": 0.7381, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.54730749130249, |
|
"learning_rate": 9.050505050505052e-06, |
|
"loss": 0.4286, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.27701061964035034, |
|
"learning_rate": 9.040404040404042e-06, |
|
"loss": 0.4992, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.010037928819656372, |
|
"learning_rate": 9.030303030303031e-06, |
|
"loss": 0.3576, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.300331711769104, |
|
"learning_rate": 9.020202020202021e-06, |
|
"loss": 0.4664, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9744607210159302, |
|
"learning_rate": 9.010101010101012e-06, |
|
"loss": 0.6967, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 11.244192123413086, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5748, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7657531499862671, |
|
"learning_rate": 8.98989898989899e-06, |
|
"loss": 0.5886, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.8190665245056152, |
|
"learning_rate": 8.97979797979798e-06, |
|
"loss": 0.4612, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.700965404510498, |
|
"learning_rate": 8.969696969696971e-06, |
|
"loss": 0.4551, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6943060755729675, |
|
"learning_rate": 8.95959595959596e-06, |
|
"loss": 0.4099, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1452864408493042, |
|
"learning_rate": 8.94949494949495e-06, |
|
"loss": 0.5128, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.24422715604305267, |
|
"learning_rate": 8.93939393939394e-06, |
|
"loss": 0.6912, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.50047492980957, |
|
"learning_rate": 8.92929292929293e-06, |
|
"loss": 0.4031, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4065107107162476, |
|
"learning_rate": 8.919191919191919e-06, |
|
"loss": 0.4238, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.607456922531128, |
|
"learning_rate": 8.90909090909091e-06, |
|
"loss": 0.5757, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.915406584739685, |
|
"learning_rate": 8.8989898989899e-06, |
|
"loss": 0.5571, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.252658843994141, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.6749, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.4744722843170166, |
|
"learning_rate": 8.87878787878788e-06, |
|
"loss": 0.6021, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5909355878829956, |
|
"learning_rate": 8.86868686868687e-06, |
|
"loss": 0.3381, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.02662992477417, |
|
"learning_rate": 8.85858585858586e-06, |
|
"loss": 0.5778, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4688441753387451, |
|
"learning_rate": 8.84848484848485e-06, |
|
"loss": 0.512, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.9729368090629578, |
|
"learning_rate": 8.83838383838384e-06, |
|
"loss": 0.5566, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.964110255241394, |
|
"learning_rate": 8.82828282828283e-06, |
|
"loss": 0.3959, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.288043022155762, |
|
"learning_rate": 8.818181818181819e-06, |
|
"loss": 0.5016, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6841915845870972, |
|
"learning_rate": 8.808080808080809e-06, |
|
"loss": 0.598, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1980228424072266, |
|
"learning_rate": 8.7979797979798e-06, |
|
"loss": 0.5803, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8826773166656494, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.3286, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.622875690460205, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.4741, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.7105564475059509, |
|
"learning_rate": 8.767676767676768e-06, |
|
"loss": 0.515, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3432900607585907, |
|
"learning_rate": 8.757575757575759e-06, |
|
"loss": 0.7004, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.025210857391357, |
|
"learning_rate": 8.747474747474747e-06, |
|
"loss": 0.3985, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.137606382369995, |
|
"learning_rate": 8.737373737373738e-06, |
|
"loss": 0.4918, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4660451114177704, |
|
"learning_rate": 8.727272727272728e-06, |
|
"loss": 0.6669, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9560216069221497, |
|
"learning_rate": 8.717171717171718e-06, |
|
"loss": 0.3283, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.245961904525757, |
|
"learning_rate": 8.707070707070707e-06, |
|
"loss": 0.7393, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.01912454143166542, |
|
"learning_rate": 8.696969696969699e-06, |
|
"loss": 0.418, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0197285413742065, |
|
"learning_rate": 8.686868686868687e-06, |
|
"loss": 0.492, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.5338358879089355, |
|
"learning_rate": 8.676767676767678e-06, |
|
"loss": 0.6951, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.198877334594727, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3249, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8914008140563965, |
|
"learning_rate": 8.656565656565658e-06, |
|
"loss": 0.3945, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6320083141326904, |
|
"learning_rate": 8.646464646464647e-06, |
|
"loss": 0.6922, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.5630173683166504, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 0.4658, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.9083809852600098, |
|
"learning_rate": 8.626262626262627e-06, |
|
"loss": 0.4208, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.0006579883047379553, |
|
"learning_rate": 8.616161616161618e-06, |
|
"loss": 0.4293, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.7547767162323, |
|
"learning_rate": 8.606060606060606e-06, |
|
"loss": 0.5838, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.998627662658691, |
|
"learning_rate": 8.595959595959596e-06, |
|
"loss": 0.6267, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.166638374328613, |
|
"learning_rate": 8.585858585858587e-06, |
|
"loss": 0.7075, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.313971519470215, |
|
"learning_rate": 8.575757575757575e-06, |
|
"loss": 0.6543, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.2389867305755615, |
|
"learning_rate": 8.565656565656566e-06, |
|
"loss": 0.5254, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.2064523696899414, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.4022, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1788508892059326, |
|
"learning_rate": 8.545454545454546e-06, |
|
"loss": 0.4377, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.40915897488594055, |
|
"learning_rate": 8.535353535353535e-06, |
|
"loss": 0.5123, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.2502992153167725, |
|
"learning_rate": 8.525252525252527e-06, |
|
"loss": 0.5134, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.067376136779785, |
|
"learning_rate": 8.515151515151517e-06, |
|
"loss": 0.7065, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.364538669586182, |
|
"learning_rate": 8.505050505050506e-06, |
|
"loss": 0.5286, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8198277950286865, |
|
"learning_rate": 8.494949494949496e-06, |
|
"loss": 0.7122, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.0054919719696045, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.6057, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.340848684310913, |
|
"learning_rate": 8.474747474747475e-06, |
|
"loss": 0.586, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.649854898452759, |
|
"learning_rate": 8.464646464646465e-06, |
|
"loss": 0.6599, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.0032835814636200666, |
|
"learning_rate": 8.454545454545455e-06, |
|
"loss": 0.5668, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.295167326927185, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.4962, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4222893714904785, |
|
"learning_rate": 8.434343434343434e-06, |
|
"loss": 0.303, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.70741868019104, |
|
"learning_rate": 8.424242424242425e-06, |
|
"loss": 0.3325, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6272850632667542, |
|
"learning_rate": 8.414141414141415e-06, |
|
"loss": 0.3552, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.39682525396347046, |
|
"learning_rate": 8.404040404040405e-06, |
|
"loss": 0.5188, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.233819484710693, |
|
"learning_rate": 8.393939393939394e-06, |
|
"loss": 0.4309, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6901202201843262, |
|
"learning_rate": 8.383838383838384e-06, |
|
"loss": 0.4005, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.25327730178833, |
|
"learning_rate": 8.373737373737374e-06, |
|
"loss": 0.5327, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.563572883605957, |
|
"learning_rate": 8.363636363636365e-06, |
|
"loss": 0.5132, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6525028944015503, |
|
"learning_rate": 8.353535353535355e-06, |
|
"loss": 0.3818, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6655619144439697, |
|
"learning_rate": 8.343434343434345e-06, |
|
"loss": 0.4418, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8698630332946777, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.6928, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.353645324707031, |
|
"learning_rate": 8.323232323232324e-06, |
|
"loss": 0.6257, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.349965572357178, |
|
"learning_rate": 8.313131313131314e-06, |
|
"loss": 0.9007, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.7333180904388428, |
|
"learning_rate": 8.303030303030305e-06, |
|
"loss": 0.4963, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6360317468643188, |
|
"learning_rate": 8.292929292929293e-06, |
|
"loss": 0.9028, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3366623520851135, |
|
"learning_rate": 8.282828282828283e-06, |
|
"loss": 0.4281, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0651333332061768, |
|
"learning_rate": 8.272727272727274e-06, |
|
"loss": 0.3646, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6993482708930969, |
|
"learning_rate": 8.262626262626264e-06, |
|
"loss": 0.5423, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.010565996170044, |
|
"learning_rate": 8.252525252525253e-06, |
|
"loss": 0.7413, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.367119312286377, |
|
"learning_rate": 8.242424242424243e-06, |
|
"loss": 0.6385, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1688125133514404, |
|
"learning_rate": 8.232323232323233e-06, |
|
"loss": 0.5547, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5372488498687744, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.5464, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.86212682723999, |
|
"learning_rate": 8.212121212121212e-06, |
|
"loss": 0.414, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5354101657867432, |
|
"learning_rate": 8.202020202020202e-06, |
|
"loss": 0.6147, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.8895275592803955, |
|
"learning_rate": 8.191919191919193e-06, |
|
"loss": 0.4875, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0076402425765991, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.553, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.115602493286133, |
|
"learning_rate": 8.171717171717173e-06, |
|
"loss": 0.3642, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1112468242645264, |
|
"learning_rate": 8.161616161616162e-06, |
|
"loss": 0.4393, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.099217891693115, |
|
"learning_rate": 8.151515151515152e-06, |
|
"loss": 0.3543, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.719365358352661, |
|
"learning_rate": 8.141414141414142e-06, |
|
"loss": 0.6467, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.9949913024902344, |
|
"learning_rate": 8.131313131313133e-06, |
|
"loss": 0.5151, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5979423522949219, |
|
"learning_rate": 8.121212121212121e-06, |
|
"loss": 0.3655, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.0186359882354736, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.5278, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.232735633850098, |
|
"learning_rate": 8.101010101010102e-06, |
|
"loss": 0.5965, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.5541656017303467, |
|
"learning_rate": 8.090909090909092e-06, |
|
"loss": 0.5226, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.416280269622803, |
|
"learning_rate": 8.08080808080808e-06, |
|
"loss": 0.3881, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 0.49726012349128723, |
|
"eval_runtime": 296.5886, |
|
"eval_samples_per_second": 3.372, |
|
"eval_steps_per_second": 3.372, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3124769926071167, |
|
"learning_rate": 8.070707070707071e-06, |
|
"loss": 0.6145, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.8283050060272217, |
|
"learning_rate": 8.060606060606061e-06, |
|
"loss": 0.5876, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.924971580505371, |
|
"learning_rate": 8.050505050505052e-06, |
|
"loss": 0.6135, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.105405569076538, |
|
"learning_rate": 8.04040404040404e-06, |
|
"loss": 0.3757, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2510244846343994, |
|
"learning_rate": 8.03030303030303e-06, |
|
"loss": 0.6634, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.0584206581115723, |
|
"learning_rate": 8.02020202020202e-06, |
|
"loss": 0.464, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.1460766792297363, |
|
"learning_rate": 8.010101010101011e-06, |
|
"loss": 0.4943, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.760594367980957, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.4359, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7750723361968994, |
|
"learning_rate": 7.989898989898992e-06, |
|
"loss": 0.3657, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.917431116104126, |
|
"learning_rate": 7.97979797979798e-06, |
|
"loss": 0.3915, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.772168755531311, |
|
"learning_rate": 7.96969696969697e-06, |
|
"loss": 0.8333, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.072344183921814, |
|
"learning_rate": 7.95959595959596e-06, |
|
"loss": 0.8133, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.3543601036071777, |
|
"learning_rate": 7.949494949494951e-06, |
|
"loss": 0.538, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6881474256515503, |
|
"learning_rate": 7.93939393939394e-06, |
|
"loss": 0.3318, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6667841672897339, |
|
"learning_rate": 7.92929292929293e-06, |
|
"loss": 0.6603, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.7068934440612793, |
|
"learning_rate": 7.91919191919192e-06, |
|
"loss": 0.3717, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.8397080898284912, |
|
"learning_rate": 7.909090909090909e-06, |
|
"loss": 0.5456, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.438650369644165, |
|
"learning_rate": 7.898989898989899e-06, |
|
"loss": 0.4993, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1788564920425415, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.5379, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.0841777324676514, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.4348, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.9438015222549438, |
|
"learning_rate": 7.868686868686868e-06, |
|
"loss": 0.4776, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.519500195980072, |
|
"learning_rate": 7.858585858585859e-06, |
|
"loss": 0.624, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.4791972637176514, |
|
"learning_rate": 7.848484848484849e-06, |
|
"loss": 0.491, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.15498948097229, |
|
"learning_rate": 7.838383838383839e-06, |
|
"loss": 0.3268, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.7391984462738037, |
|
"learning_rate": 7.82828282828283e-06, |
|
"loss": 0.695, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0758000612258911, |
|
"learning_rate": 7.81818181818182e-06, |
|
"loss": 0.614, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.8113633394241333, |
|
"learning_rate": 7.808080808080808e-06, |
|
"loss": 0.5223, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.9470202326774597, |
|
"learning_rate": 7.797979797979799e-06, |
|
"loss": 0.741, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.6583170890808105, |
|
"learning_rate": 7.787878787878789e-06, |
|
"loss": 0.5571, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.9988512992858887, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.5672, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 8.29896068572998, |
|
"learning_rate": 7.767676767676768e-06, |
|
"loss": 0.4828, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0121721029281616, |
|
"learning_rate": 7.757575757575758e-06, |
|
"loss": 0.588, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5740251541137695, |
|
"learning_rate": 7.747474747474748e-06, |
|
"loss": 0.5385, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.059847593307495, |
|
"learning_rate": 7.737373737373739e-06, |
|
"loss": 0.7256, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 30.723230361938477, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 0.581, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.9344816207885742, |
|
"learning_rate": 7.717171717171717e-06, |
|
"loss": 0.5751, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.999989032745361, |
|
"learning_rate": 7.707070707070708e-06, |
|
"loss": 0.5531, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.1106081008911133, |
|
"learning_rate": 7.696969696969696e-06, |
|
"loss": 0.5414, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0552499294281006, |
|
"learning_rate": 7.686868686868687e-06, |
|
"loss": 0.3083, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.31422069668769836, |
|
"learning_rate": 7.676767676767677e-06, |
|
"loss": 0.4043, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.465919852256775, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.3835, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.2063610553741455, |
|
"learning_rate": 7.656565656565658e-06, |
|
"loss": 0.5132, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7438418865203857, |
|
"learning_rate": 7.646464646464648e-06, |
|
"loss": 0.7799, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.2593297958374023, |
|
"learning_rate": 7.636363636363638e-06, |
|
"loss": 0.4142, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.2809324264526367, |
|
"learning_rate": 7.6262626262626275e-06, |
|
"loss": 0.4862, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.056776762008667, |
|
"learning_rate": 7.616161616161617e-06, |
|
"loss": 0.5726, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.739293098449707, |
|
"learning_rate": 7.606060606060606e-06, |
|
"loss": 0.4945, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.824068069458008, |
|
"learning_rate": 7.595959595959597e-06, |
|
"loss": 0.6431, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.1176700592041016, |
|
"learning_rate": 7.585858585858586e-06, |
|
"loss": 0.4511, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.115619421005249, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.3236, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.1324357986450195, |
|
"learning_rate": 7.565656565656566e-06, |
|
"loss": 0.488, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.205792427062988, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.6519, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.29966816306114197, |
|
"learning_rate": 7.545454545454546e-06, |
|
"loss": 0.2427, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.695274293422699, |
|
"learning_rate": 7.535353535353536e-06, |
|
"loss": 0.6302, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.0804121494293213, |
|
"learning_rate": 7.525252525252525e-06, |
|
"loss": 0.581, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.076641798019409, |
|
"learning_rate": 7.515151515151516e-06, |
|
"loss": 0.5715, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.8025639057159424, |
|
"learning_rate": 7.505050505050505e-06, |
|
"loss": 0.5451, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.8459610342979431, |
|
"learning_rate": 7.494949494949496e-06, |
|
"loss": 0.6437, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0758402347564697, |
|
"learning_rate": 7.484848484848486e-06, |
|
"loss": 0.5077, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3608747720718384, |
|
"learning_rate": 7.474747474747476e-06, |
|
"loss": 0.5145, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.9487907886505127, |
|
"learning_rate": 7.464646464646465e-06, |
|
"loss": 0.4173, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6212667226791382, |
|
"learning_rate": 7.454545454545456e-06, |
|
"loss": 0.4199, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.783740282058716, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.5639, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7923329472541809, |
|
"learning_rate": 7.434343434343435e-06, |
|
"loss": 0.5934, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.9525859355926514, |
|
"learning_rate": 7.424242424242425e-06, |
|
"loss": 0.3554, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.8090410232543945, |
|
"learning_rate": 7.414141414141415e-06, |
|
"loss": 0.472, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.31178975105285645, |
|
"learning_rate": 7.4040404040404045e-06, |
|
"loss": 0.4174, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.298391342163086, |
|
"learning_rate": 7.393939393939395e-06, |
|
"loss": 0.5147, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.6585010290145874, |
|
"learning_rate": 7.383838383838384e-06, |
|
"loss": 0.5512, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.1424102783203125, |
|
"learning_rate": 7.373737373737374e-06, |
|
"loss": 0.538, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.008727720007300377, |
|
"learning_rate": 7.363636363636364e-06, |
|
"loss": 0.3115, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3273710012435913, |
|
"learning_rate": 7.353535353535353e-06, |
|
"loss": 0.602, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.9286468029022217, |
|
"learning_rate": 7.343434343434344e-06, |
|
"loss": 0.5314, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.033441543579102, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.4682, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.1140573024749756, |
|
"learning_rate": 7.323232323232324e-06, |
|
"loss": 0.8108, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.7377740144729614, |
|
"learning_rate": 7.3131313131313146e-06, |
|
"loss": 0.6938, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.0510828495025635, |
|
"learning_rate": 7.303030303030304e-06, |
|
"loss": 0.3355, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9355423450469971, |
|
"learning_rate": 7.2929292929292934e-06, |
|
"loss": 0.6595, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.6041629314422607, |
|
"learning_rate": 7.282828282828284e-06, |
|
"loss": 0.3718, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9129393696784973, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.42, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.611159086227417, |
|
"learning_rate": 7.2626262626262635e-06, |
|
"loss": 0.5017, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.267125129699707, |
|
"learning_rate": 7.252525252525253e-06, |
|
"loss": 0.5167, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.4013969898223877, |
|
"learning_rate": 7.242424242424243e-06, |
|
"loss": 0.3934, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3873810768127441, |
|
"learning_rate": 7.232323232323233e-06, |
|
"loss": 0.5354, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.160951018333435, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.442, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36929821968078613, |
|
"learning_rate": 7.212121212121212e-06, |
|
"loss": 0.5383, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8486219644546509, |
|
"learning_rate": 7.202020202020203e-06, |
|
"loss": 0.6933, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 9.711508750915527, |
|
"learning_rate": 7.191919191919192e-06, |
|
"loss": 0.5174, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.458278179168701, |
|
"learning_rate": 7.181818181818182e-06, |
|
"loss": 0.4861, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.309101104736328, |
|
"learning_rate": 7.171717171717172e-06, |
|
"loss": 0.3574, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.2773056030273438, |
|
"learning_rate": 7.161616161616162e-06, |
|
"loss": 0.3333, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.31492137908935547, |
|
"learning_rate": 7.151515151515152e-06, |
|
"loss": 0.4338, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.8371341228485107, |
|
"learning_rate": 7.141414141414143e-06, |
|
"loss": 0.659, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.0017792941071093082, |
|
"learning_rate": 7.131313131313132e-06, |
|
"loss": 0.4461, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.9826254844665527, |
|
"learning_rate": 7.121212121212122e-06, |
|
"loss": 0.4224, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.41608259081840515, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.4182, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.108034610748291, |
|
"learning_rate": 7.101010101010102e-06, |
|
"loss": 0.4149, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7766743898391724, |
|
"learning_rate": 7.0909090909090916e-06, |
|
"loss": 0.4758, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.912661075592041, |
|
"learning_rate": 7.080808080808082e-06, |
|
"loss": 0.3912, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.7931787967681885, |
|
"learning_rate": 7.070707070707071e-06, |
|
"loss": 0.5228, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.948633909225464, |
|
"learning_rate": 7.060606060606061e-06, |
|
"loss": 0.5097, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4696679711341858, |
|
"learning_rate": 7.050505050505051e-06, |
|
"loss": 0.4057, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.7676633596420288, |
|
"learning_rate": 7.0404040404040404e-06, |
|
"loss": 0.6008, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0048167705535889, |
|
"learning_rate": 7.030303030303031e-06, |
|
"loss": 0.6355, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 13.031144142150879, |
|
"learning_rate": 7.02020202020202e-06, |
|
"loss": 0.5433, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.508690357208252, |
|
"learning_rate": 7.0101010101010105e-06, |
|
"loss": 0.5252, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6236469745635986, |
|
"learning_rate": 7e-06, |
|
"loss": 0.3748, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.831434726715088, |
|
"learning_rate": 6.98989898989899e-06, |
|
"loss": 0.6294, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6510546207427979, |
|
"learning_rate": 6.979797979797981e-06, |
|
"loss": 0.5122, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7243143916130066, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.4985, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.455175876617432, |
|
"learning_rate": 6.95959595959596e-06, |
|
"loss": 0.4532, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3478859663009644, |
|
"learning_rate": 6.9494949494949505e-06, |
|
"loss": 0.5427, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.694676637649536, |
|
"learning_rate": 6.93939393939394e-06, |
|
"loss": 0.542, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.880995750427246, |
|
"learning_rate": 6.92929292929293e-06, |
|
"loss": 0.5949, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.839517593383789, |
|
"learning_rate": 6.91919191919192e-06, |
|
"loss": 0.6437, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.4446146488189697, |
|
"learning_rate": 6.90909090909091e-06, |
|
"loss": 0.5319, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8178725242614746, |
|
"learning_rate": 6.898989898989899e-06, |
|
"loss": 0.5109, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1470345258712769, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.5791, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2457770109176636, |
|
"learning_rate": 6.878787878787879e-06, |
|
"loss": 0.3547, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.6244733333587646, |
|
"learning_rate": 6.868686868686869e-06, |
|
"loss": 0.6079, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.3486361503601074, |
|
"learning_rate": 6.858585858585859e-06, |
|
"loss": 0.5009, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.4471588134765625, |
|
"learning_rate": 6.848484848484849e-06, |
|
"loss": 0.7388, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.836559772491455, |
|
"learning_rate": 6.8383838383838386e-06, |
|
"loss": 0.5774, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.352264881134033, |
|
"learning_rate": 6.828282828282828e-06, |
|
"loss": 0.6538, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12757644057273865, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 0.4133, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.509770154953003, |
|
"learning_rate": 6.808080808080809e-06, |
|
"loss": 0.5061, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.2330329418182373, |
|
"learning_rate": 6.797979797979799e-06, |
|
"loss": 0.5501, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.468204975128174, |
|
"learning_rate": 6.787878787878789e-06, |
|
"loss": 0.5382, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.865310788154602, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.5676, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.1490612030029297, |
|
"learning_rate": 6.767676767676769e-06, |
|
"loss": 0.6547, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.9897737503051758, |
|
"learning_rate": 6.757575757575758e-06, |
|
"loss": 0.6402, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8899117112159729, |
|
"learning_rate": 6.747474747474749e-06, |
|
"loss": 0.4865, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5953271985054016, |
|
"learning_rate": 6.737373737373738e-06, |
|
"loss": 0.5601, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.7100812196731567, |
|
"learning_rate": 6.7272727272727275e-06, |
|
"loss": 0.4554, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1394648551940918, |
|
"learning_rate": 6.717171717171718e-06, |
|
"loss": 0.4665, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.531031131744385, |
|
"learning_rate": 6.707070707070707e-06, |
|
"loss": 0.351, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8314118981361389, |
|
"learning_rate": 6.6969696969696975e-06, |
|
"loss": 0.2993, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.985590934753418, |
|
"learning_rate": 6.686868686868687e-06, |
|
"loss": 0.5341, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.496417284011841, |
|
"learning_rate": 6.676767676767677e-06, |
|
"loss": 0.3507, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.9464170932769775, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.4813, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.9352614283561707, |
|
"learning_rate": 6.656565656565657e-06, |
|
"loss": 0.7408, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2820653915405273, |
|
"learning_rate": 6.646464646464646e-06, |
|
"loss": 0.5518, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6449127793312073, |
|
"learning_rate": 6.6363636363636375e-06, |
|
"loss": 0.3831, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.303825855255127, |
|
"learning_rate": 6.626262626262627e-06, |
|
"loss": 0.3408, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5336707830429077, |
|
"learning_rate": 6.616161616161617e-06, |
|
"loss": 0.4224, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.874389171600342, |
|
"learning_rate": 6.606060606060607e-06, |
|
"loss": 0.4829, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6338319778442383, |
|
"learning_rate": 6.595959595959597e-06, |
|
"loss": 0.3992, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.416031837463379, |
|
"learning_rate": 6.585858585858586e-06, |
|
"loss": 0.464, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0006036623963154852, |
|
"learning_rate": 6.575757575757577e-06, |
|
"loss": 0.454, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.059628486633301, |
|
"learning_rate": 6.565656565656566e-06, |
|
"loss": 0.4677, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7535749673843384, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.4647, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2723642587661743, |
|
"learning_rate": 6.545454545454546e-06, |
|
"loss": 0.4257, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9197982549667358, |
|
"learning_rate": 6.535353535353536e-06, |
|
"loss": 0.4922, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.634425163269043, |
|
"learning_rate": 6.525252525252526e-06, |
|
"loss": 0.3445, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2264757752418518, |
|
"learning_rate": 6.515151515151516e-06, |
|
"loss": 0.3621, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.509833574295044, |
|
"learning_rate": 6.505050505050505e-06, |
|
"loss": 0.4623, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.437053680419922, |
|
"learning_rate": 6.494949494949495e-06, |
|
"loss": 0.5641, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.30497670173645, |
|
"learning_rate": 6.484848484848485e-06, |
|
"loss": 0.3517, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4982795715332031, |
|
"learning_rate": 6.4747474747474745e-06, |
|
"loss": 0.4624, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8188090324401855, |
|
"learning_rate": 6.464646464646466e-06, |
|
"loss": 0.4145, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 8.528606414794922, |
|
"learning_rate": 6.454545454545456e-06, |
|
"loss": 0.5919, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2666741609573364, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.2912, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.027946710586548, |
|
"learning_rate": 6.434343434343436e-06, |
|
"loss": 0.4914, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.017730474472046, |
|
"learning_rate": 6.424242424242425e-06, |
|
"loss": 0.7763, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3871991634368896, |
|
"learning_rate": 6.4141414141414145e-06, |
|
"loss": 0.4079, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.026352882385254, |
|
"learning_rate": 6.404040404040405e-06, |
|
"loss": 0.6339, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.719714403152466, |
|
"learning_rate": 6.393939393939394e-06, |
|
"loss": 0.5748, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.5980193614959717, |
|
"learning_rate": 6.3838383838383845e-06, |
|
"loss": 0.7281, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5198047161102295, |
|
"learning_rate": 6.373737373737374e-06, |
|
"loss": 0.6011, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3764171600341797, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.4418, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.8297829627990723, |
|
"learning_rate": 6.353535353535354e-06, |
|
"loss": 0.4101, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.657487392425537, |
|
"learning_rate": 6.343434343434344e-06, |
|
"loss": 0.6139, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4731935262680054, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.4851, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.780787229537964, |
|
"learning_rate": 6.323232323232324e-06, |
|
"loss": 0.5278, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.002871747827157378, |
|
"learning_rate": 6.313131313131313e-06, |
|
"loss": 0.3632, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.415860652923584, |
|
"learning_rate": 6.303030303030303e-06, |
|
"loss": 0.5422, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.642449378967285, |
|
"learning_rate": 6.292929292929294e-06, |
|
"loss": 0.5575, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.6322145462036133, |
|
"learning_rate": 6.282828282828284e-06, |
|
"loss": 0.3176, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.659850597381592, |
|
"learning_rate": 6.2727272727272734e-06, |
|
"loss": 0.4444, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.632639169692993, |
|
"learning_rate": 6.262626262626264e-06, |
|
"loss": 0.4449, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3175582885742188, |
|
"learning_rate": 6.252525252525253e-06, |
|
"loss": 0.4983, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5408777594566345, |
|
"learning_rate": 6.2424242424242434e-06, |
|
"loss": 0.4653, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.46673011779785156, |
|
"learning_rate": 6.232323232323233e-06, |
|
"loss": 0.3618, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.012616774067282677, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.3363, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.871781826019287, |
|
"learning_rate": 6.212121212121213e-06, |
|
"loss": 0.5996, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.607459783554077, |
|
"learning_rate": 6.202020202020203e-06, |
|
"loss": 0.633, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.0456507205963135, |
|
"learning_rate": 6.191919191919192e-06, |
|
"loss": 0.4236, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3647568225860596, |
|
"learning_rate": 6.181818181818182e-06, |
|
"loss": 0.6016, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3835606575012207, |
|
"learning_rate": 6.171717171717172e-06, |
|
"loss": 0.4672, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.276355743408203, |
|
"learning_rate": 6.1616161616161615e-06, |
|
"loss": 0.3073, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.4117212295532227, |
|
"learning_rate": 6.151515151515152e-06, |
|
"loss": 0.4799, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6188517212867737, |
|
"learning_rate": 6.141414141414141e-06, |
|
"loss": 0.4852, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.361952304840088, |
|
"learning_rate": 6.1313131313131315e-06, |
|
"loss": 0.4593, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.983630359172821, |
|
"learning_rate": 6.121212121212121e-06, |
|
"loss": 0.5209, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.2454302310943604, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.6493, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.920189619064331, |
|
"learning_rate": 6.1010101010101015e-06, |
|
"loss": 0.3506, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.168548107147217, |
|
"learning_rate": 6.090909090909092e-06, |
|
"loss": 0.4219, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7518400549888611, |
|
"learning_rate": 6.080808080808081e-06, |
|
"loss": 0.2783, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.4468128681182861, |
|
"learning_rate": 6.0707070707070715e-06, |
|
"loss": 0.5845, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7297825217247009, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.5356, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.46820691227912903, |
|
"eval_runtime": 298.8864, |
|
"eval_samples_per_second": 3.346, |
|
"eval_steps_per_second": 3.346, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"total_flos": 1.63205502468096e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|