diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,20081 @@ +{ + "best_metric": 0.9132990837097168, + "best_model_checkpoint": "data/output/20240925-142312_qwen2.5-32_full_v4-sft-1e-5/checkpoint-28000", + "epoch": 1.9475551227655283, + "eval_steps": 500, + "global_step": 28000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0006955554009876886, + "grad_norm": 37.25965881347656, + "learning_rate": 2.3180343069077426e-08, + "loss": 3.6955, + "step": 10 + }, + { + "epoch": 0.0013911108019753773, + "grad_norm": 50.09966278076172, + "learning_rate": 4.636068613815485e-08, + "loss": 3.2073, + "step": 20 + }, + { + "epoch": 0.002086666202963066, + "grad_norm": 52.61014938354492, + "learning_rate": 6.954102920723227e-08, + "loss": 3.0566, + "step": 30 + }, + { + "epoch": 0.0027822216039507545, + "grad_norm": 62.67387008666992, + "learning_rate": 9.27213722763097e-08, + "loss": 3.1148, + "step": 40 + }, + { + "epoch": 0.0034777770049384434, + "grad_norm": 90.94354248046875, + "learning_rate": 1.1590171534538712e-07, + "loss": 2.9661, + "step": 50 + }, + { + "epoch": 0.004173332405926132, + "grad_norm": 106.8541259765625, + "learning_rate": 1.3908205841446455e-07, + "loss": 3.3307, + "step": 60 + }, + { + "epoch": 0.004868887806913821, + "grad_norm": 64.84474182128906, + "learning_rate": 1.6226240148354197e-07, + "loss": 2.9697, + "step": 70 + }, + { + "epoch": 0.005564443207901509, + "grad_norm": 31.05628776550293, + "learning_rate": 1.854427445526194e-07, + "loss": 2.4271, + "step": 80 + }, + { + "epoch": 0.006259998608889198, + "grad_norm": 95.02410888671875, + "learning_rate": 2.0862308762169682e-07, + "loss": 2.6777, + "step": 90 + }, + { + "epoch": 0.006955554009876887, + "grad_norm": 121.2574691772461, + "learning_rate": 2.3180343069077424e-07, + "loss": 2.4744, + "step": 100 + }, + { + "epoch": 0.0076511094108645756, + "grad_norm": 57.23112869262695, + "learning_rate": 2.5498377375985166e-07, + "loss": 2.0119, + "step": 110 + }, + { + "epoch": 0.008346664811852264, + "grad_norm": 20.427780151367188, + "learning_rate": 2.781641168289291e-07, + "loss": 1.9563, + "step": 120 + }, + { + "epoch": 0.009042220212839952, + "grad_norm": 13.36486530303955, + "learning_rate": 3.0134445989800654e-07, + "loss": 1.5658, + "step": 130 + }, + { + "epoch": 0.009737775613827642, + "grad_norm": 9.685415267944336, + "learning_rate": 3.2452480296708393e-07, + "loss": 1.5071, + "step": 140 + }, + { + "epoch": 0.01043333101481533, + "grad_norm": 20.197505950927734, + "learning_rate": 3.477051460361613e-07, + "loss": 1.454, + "step": 150 + }, + { + "epoch": 0.011128886415803018, + "grad_norm": 24.960918426513672, + "learning_rate": 3.708854891052388e-07, + "loss": 1.4725, + "step": 160 + }, + { + "epoch": 0.011824441816790708, + "grad_norm": 44.6523323059082, + "learning_rate": 3.940658321743162e-07, + "loss": 1.3871, + "step": 170 + }, + { + "epoch": 0.012519997217778396, + "grad_norm": 4.03730583190918, + "learning_rate": 4.1724617524339365e-07, + "loss": 1.2733, + "step": 180 + }, + { + "epoch": 0.013215552618766085, + "grad_norm": 4.858972549438477, + "learning_rate": 4.4042651831247104e-07, + "loss": 1.3416, + "step": 190 + }, + { + "epoch": 0.013911108019753773, + "grad_norm": 5.954676151275635, + "learning_rate": 4.636068613815485e-07, + "loss": 1.351, + "step": 200 + }, + { + "epoch": 0.014606663420741461, + "grad_norm": 18.572786331176758, + "learning_rate": 4.867872044506259e-07, + "loss": 1.3901, + "step": 210 + }, + { + "epoch": 0.015302218821729151, + "grad_norm": 22.28153419494629, + "learning_rate": 5.099675475197033e-07, + "loss": 1.4022, + "step": 220 + }, + { + "epoch": 0.01599777422271684, + "grad_norm": 3.5818583965301514, + "learning_rate": 5.331478905887808e-07, + "loss": 1.2544, + "step": 230 + }, + { + "epoch": 0.01669332962370453, + "grad_norm": 14.616150856018066, + "learning_rate": 5.563282336578582e-07, + "loss": 1.2663, + "step": 240 + }, + { + "epoch": 0.017388885024692217, + "grad_norm": 4.8885321617126465, + "learning_rate": 5.795085767269356e-07, + "loss": 1.2854, + "step": 250 + }, + { + "epoch": 0.018084440425679905, + "grad_norm": 3.7322280406951904, + "learning_rate": 6.026889197960131e-07, + "loss": 1.2205, + "step": 260 + }, + { + "epoch": 0.018779995826667593, + "grad_norm": 3.915174961090088, + "learning_rate": 6.258692628650904e-07, + "loss": 1.1702, + "step": 270 + }, + { + "epoch": 0.019475551227655284, + "grad_norm": 4.257216930389404, + "learning_rate": 6.490496059341679e-07, + "loss": 1.2754, + "step": 280 + }, + { + "epoch": 0.020171106628642972, + "grad_norm": 4.261681079864502, + "learning_rate": 6.722299490032454e-07, + "loss": 1.222, + "step": 290 + }, + { + "epoch": 0.02086666202963066, + "grad_norm": 4.942173957824707, + "learning_rate": 6.954102920723226e-07, + "loss": 1.2906, + "step": 300 + }, + { + "epoch": 0.021562217430618348, + "grad_norm": 3.5400586128234863, + "learning_rate": 7.185906351414002e-07, + "loss": 1.2357, + "step": 310 + }, + { + "epoch": 0.022257772831606036, + "grad_norm": 16.817251205444336, + "learning_rate": 7.417709782104776e-07, + "loss": 1.2667, + "step": 320 + }, + { + "epoch": 0.022953328232593728, + "grad_norm": 4.213096618652344, + "learning_rate": 7.64951321279555e-07, + "loss": 1.2873, + "step": 330 + }, + { + "epoch": 0.023648883633581416, + "grad_norm": 3.9034950733184814, + "learning_rate": 7.881316643486324e-07, + "loss": 1.1903, + "step": 340 + }, + { + "epoch": 0.024344439034569103, + "grad_norm": 3.171891927719116, + "learning_rate": 8.113120074177099e-07, + "loss": 1.2227, + "step": 350 + }, + { + "epoch": 0.02503999443555679, + "grad_norm": 3.737774610519409, + "learning_rate": 8.344923504867873e-07, + "loss": 1.1841, + "step": 360 + }, + { + "epoch": 0.02573554983654448, + "grad_norm": 9.648716926574707, + "learning_rate": 8.576726935558646e-07, + "loss": 1.234, + "step": 370 + }, + { + "epoch": 0.02643110523753217, + "grad_norm": 9.600088119506836, + "learning_rate": 8.808530366249421e-07, + "loss": 1.1583, + "step": 380 + }, + { + "epoch": 0.02712666063851986, + "grad_norm": 4.241340637207031, + "learning_rate": 9.040333796940195e-07, + "loss": 1.1403, + "step": 390 + }, + { + "epoch": 0.027822216039507547, + "grad_norm": 3.8889546394348145, + "learning_rate": 9.27213722763097e-07, + "loss": 1.1141, + "step": 400 + }, + { + "epoch": 0.028517771440495235, + "grad_norm": 4.157471179962158, + "learning_rate": 9.503940658321743e-07, + "loss": 1.1188, + "step": 410 + }, + { + "epoch": 0.029213326841482923, + "grad_norm": 5.980175495147705, + "learning_rate": 9.735744089012517e-07, + "loss": 1.0837, + "step": 420 + }, + { + "epoch": 0.029908882242470614, + "grad_norm": 3.4161574840545654, + "learning_rate": 9.967547519703292e-07, + "loss": 1.1313, + "step": 430 + }, + { + "epoch": 0.030604437643458302, + "grad_norm": 6.54382848739624, + "learning_rate": 1.0199350950394066e-06, + "loss": 1.1467, + "step": 440 + }, + { + "epoch": 0.03129999304444599, + "grad_norm": 4.046510219573975, + "learning_rate": 1.043115438108484e-06, + "loss": 1.1802, + "step": 450 + }, + { + "epoch": 0.03199554844543368, + "grad_norm": 3.495988607406616, + "learning_rate": 1.0662957811775615e-06, + "loss": 1.175, + "step": 460 + }, + { + "epoch": 0.03269110384642137, + "grad_norm": 3.1931769847869873, + "learning_rate": 1.089476124246639e-06, + "loss": 1.155, + "step": 470 + }, + { + "epoch": 0.03338665924740906, + "grad_norm": 4.020750045776367, + "learning_rate": 1.1126564673157164e-06, + "loss": 1.1252, + "step": 480 + }, + { + "epoch": 0.034082214648396746, + "grad_norm": 2.9343626499176025, + "learning_rate": 1.1358368103847938e-06, + "loss": 1.0957, + "step": 490 + }, + { + "epoch": 0.034777770049384434, + "grad_norm": 4.813891887664795, + "learning_rate": 1.1590171534538713e-06, + "loss": 1.1257, + "step": 500 + }, + { + "epoch": 0.034777770049384434, + "eval_loss": 1.1430312395095825, + "eval_runtime": 4633.8932, + "eval_samples_per_second": 3.919, + "eval_steps_per_second": 0.653, + "step": 500 + }, + { + "epoch": 0.03547332545037212, + "grad_norm": 3.757990598678589, + "learning_rate": 1.1821974965229487e-06, + "loss": 1.1846, + "step": 510 + }, + { + "epoch": 0.03616888085135981, + "grad_norm": 2.993931293487549, + "learning_rate": 1.2053778395920262e-06, + "loss": 1.1671, + "step": 520 + }, + { + "epoch": 0.0368644362523475, + "grad_norm": 3.018545150756836, + "learning_rate": 1.2285581826611034e-06, + "loss": 1.135, + "step": 530 + }, + { + "epoch": 0.037559991653335185, + "grad_norm": 4.122855186462402, + "learning_rate": 1.2517385257301808e-06, + "loss": 1.1827, + "step": 540 + }, + { + "epoch": 0.03825554705432287, + "grad_norm": 8.622248649597168, + "learning_rate": 1.2749188687992583e-06, + "loss": 1.1128, + "step": 550 + }, + { + "epoch": 0.03895110245531057, + "grad_norm": 3.4022130966186523, + "learning_rate": 1.2980992118683357e-06, + "loss": 1.1323, + "step": 560 + }, + { + "epoch": 0.039646657856298256, + "grad_norm": 8.871124267578125, + "learning_rate": 1.3212795549374134e-06, + "loss": 1.1804, + "step": 570 + }, + { + "epoch": 0.040342213257285944, + "grad_norm": 2.904726266860962, + "learning_rate": 1.3444598980064908e-06, + "loss": 1.1114, + "step": 580 + }, + { + "epoch": 0.04103776865827363, + "grad_norm": 7.765448093414307, + "learning_rate": 1.3676402410755678e-06, + "loss": 1.1493, + "step": 590 + }, + { + "epoch": 0.04173332405926132, + "grad_norm": 4.239745140075684, + "learning_rate": 1.3908205841446453e-06, + "loss": 1.0772, + "step": 600 + }, + { + "epoch": 0.04242887946024901, + "grad_norm": 3.149268627166748, + "learning_rate": 1.414000927213723e-06, + "loss": 1.1213, + "step": 610 + }, + { + "epoch": 0.043124434861236696, + "grad_norm": 3.5800631046295166, + "learning_rate": 1.4371812702828004e-06, + "loss": 1.1333, + "step": 620 + }, + { + "epoch": 0.043819990262224384, + "grad_norm": 4.57244873046875, + "learning_rate": 1.4603616133518778e-06, + "loss": 1.1279, + "step": 630 + }, + { + "epoch": 0.04451554566321207, + "grad_norm": 4.348361492156982, + "learning_rate": 1.4835419564209553e-06, + "loss": 1.095, + "step": 640 + }, + { + "epoch": 0.04521110106419976, + "grad_norm": 3.516139507293701, + "learning_rate": 1.5067222994900327e-06, + "loss": 1.1031, + "step": 650 + }, + { + "epoch": 0.045906656465187455, + "grad_norm": 4.7980146408081055, + "learning_rate": 1.52990264255911e-06, + "loss": 1.1299, + "step": 660 + }, + { + "epoch": 0.04660221186617514, + "grad_norm": 3.4176089763641357, + "learning_rate": 1.5530829856281874e-06, + "loss": 1.0713, + "step": 670 + }, + { + "epoch": 0.04729776726716283, + "grad_norm": 10.940692901611328, + "learning_rate": 1.5762633286972648e-06, + "loss": 1.1688, + "step": 680 + }, + { + "epoch": 0.04799332266815052, + "grad_norm": 3.5923173427581787, + "learning_rate": 1.5994436717663423e-06, + "loss": 1.0371, + "step": 690 + }, + { + "epoch": 0.04868887806913821, + "grad_norm": 3.7712814807891846, + "learning_rate": 1.6226240148354197e-06, + "loss": 1.1259, + "step": 700 + }, + { + "epoch": 0.049384433470125895, + "grad_norm": 4.4271440505981445, + "learning_rate": 1.6458043579044972e-06, + "loss": 1.0877, + "step": 710 + }, + { + "epoch": 0.05007998887111358, + "grad_norm": 3.129453659057617, + "learning_rate": 1.6689847009735746e-06, + "loss": 1.0551, + "step": 720 + }, + { + "epoch": 0.05077554427210127, + "grad_norm": 3.8093466758728027, + "learning_rate": 1.692165044042652e-06, + "loss": 1.0843, + "step": 730 + }, + { + "epoch": 0.05147109967308896, + "grad_norm": 3.4803884029388428, + "learning_rate": 1.7153453871117293e-06, + "loss": 1.0714, + "step": 740 + }, + { + "epoch": 0.05216665507407665, + "grad_norm": 6.938583850860596, + "learning_rate": 1.7385257301808067e-06, + "loss": 1.0567, + "step": 750 + }, + { + "epoch": 0.05286221047506434, + "grad_norm": 4.000776767730713, + "learning_rate": 1.7617060732498842e-06, + "loss": 1.1272, + "step": 760 + }, + { + "epoch": 0.05355776587605203, + "grad_norm": 5.665772914886475, + "learning_rate": 1.7848864163189616e-06, + "loss": 1.0421, + "step": 770 + }, + { + "epoch": 0.05425332127703972, + "grad_norm": 9.340896606445312, + "learning_rate": 1.808066759388039e-06, + "loss": 1.0441, + "step": 780 + }, + { + "epoch": 0.054948876678027406, + "grad_norm": 3.839641809463501, + "learning_rate": 1.8312471024571165e-06, + "loss": 1.0873, + "step": 790 + }, + { + "epoch": 0.055644432079015094, + "grad_norm": 3.655421495437622, + "learning_rate": 1.854427445526194e-06, + "loss": 1.0365, + "step": 800 + }, + { + "epoch": 0.05633998748000278, + "grad_norm": 4.659374713897705, + "learning_rate": 1.8776077885952716e-06, + "loss": 1.1169, + "step": 810 + }, + { + "epoch": 0.05703554288099047, + "grad_norm": 3.916454315185547, + "learning_rate": 1.9007881316643486e-06, + "loss": 1.0816, + "step": 820 + }, + { + "epoch": 0.05773109828197816, + "grad_norm": 10.336819648742676, + "learning_rate": 1.9239684747334263e-06, + "loss": 1.0211, + "step": 830 + }, + { + "epoch": 0.058426653682965846, + "grad_norm": 5.523763179779053, + "learning_rate": 1.9471488178025035e-06, + "loss": 1.0357, + "step": 840 + }, + { + "epoch": 0.059122209083953534, + "grad_norm": 2.708738327026367, + "learning_rate": 1.970329160871581e-06, + "loss": 1.0726, + "step": 850 + }, + { + "epoch": 0.05981776448494123, + "grad_norm": 2.3255724906921387, + "learning_rate": 1.9935095039406584e-06, + "loss": 1.0501, + "step": 860 + }, + { + "epoch": 0.060513319885928916, + "grad_norm": 3.717278003692627, + "learning_rate": 2.016689847009736e-06, + "loss": 1.1719, + "step": 870 + }, + { + "epoch": 0.061208875286916604, + "grad_norm": 6.030879497528076, + "learning_rate": 2.0398701900788133e-06, + "loss": 1.0681, + "step": 880 + }, + { + "epoch": 0.06190443068790429, + "grad_norm": 5.228379249572754, + "learning_rate": 2.063050533147891e-06, + "loss": 1.1535, + "step": 890 + }, + { + "epoch": 0.06259998608889197, + "grad_norm": 4.2219462394714355, + "learning_rate": 2.086230876216968e-06, + "loss": 1.1331, + "step": 900 + }, + { + "epoch": 0.06329554148987968, + "grad_norm": 3.0123608112335205, + "learning_rate": 2.1094112192860454e-06, + "loss": 1.1021, + "step": 910 + }, + { + "epoch": 0.06399109689086736, + "grad_norm": 3.275928020477295, + "learning_rate": 2.132591562355123e-06, + "loss": 1.1287, + "step": 920 + }, + { + "epoch": 0.06468665229185505, + "grad_norm": 4.635502338409424, + "learning_rate": 2.1557719054242003e-06, + "loss": 1.044, + "step": 930 + }, + { + "epoch": 0.06538220769284274, + "grad_norm": 9.401188850402832, + "learning_rate": 2.178952248493278e-06, + "loss": 1.0551, + "step": 940 + }, + { + "epoch": 0.06607776309383043, + "grad_norm": 3.470381736755371, + "learning_rate": 2.2021325915623556e-06, + "loss": 1.0403, + "step": 950 + }, + { + "epoch": 0.06677331849481812, + "grad_norm": 2.8811886310577393, + "learning_rate": 2.225312934631433e-06, + "loss": 1.0906, + "step": 960 + }, + { + "epoch": 0.0674688738958058, + "grad_norm": 3.554046392440796, + "learning_rate": 2.24849327770051e-06, + "loss": 1.0382, + "step": 970 + }, + { + "epoch": 0.06816442929679349, + "grad_norm": 3.099353075027466, + "learning_rate": 2.2716736207695877e-06, + "loss": 1.0731, + "step": 980 + }, + { + "epoch": 0.06885998469778118, + "grad_norm": 2.7674717903137207, + "learning_rate": 2.294853963838665e-06, + "loss": 1.0512, + "step": 990 + }, + { + "epoch": 0.06955554009876887, + "grad_norm": 16.4854793548584, + "learning_rate": 2.3180343069077426e-06, + "loss": 1.1369, + "step": 1000 + }, + { + "epoch": 0.06955554009876887, + "eval_loss": 1.0665637254714966, + "eval_runtime": 4606.3828, + "eval_samples_per_second": 3.943, + "eval_steps_per_second": 0.657, + "step": 1000 + }, + { + "epoch": 0.07025109549975656, + "grad_norm": 4.004472732543945, + "learning_rate": 2.34121464997682e-06, + "loss": 1.0495, + "step": 1010 + }, + { + "epoch": 0.07094665090074424, + "grad_norm": 3.4908339977264404, + "learning_rate": 2.3643949930458975e-06, + "loss": 1.0325, + "step": 1020 + }, + { + "epoch": 0.07164220630173193, + "grad_norm": 4.877111434936523, + "learning_rate": 2.3875753361149747e-06, + "loss": 1.028, + "step": 1030 + }, + { + "epoch": 0.07233776170271962, + "grad_norm": 2.5117993354797363, + "learning_rate": 2.4107556791840523e-06, + "loss": 1.0385, + "step": 1040 + }, + { + "epoch": 0.07303331710370731, + "grad_norm": 4.781423091888428, + "learning_rate": 2.4339360222531296e-06, + "loss": 1.0619, + "step": 1050 + }, + { + "epoch": 0.073728872504695, + "grad_norm": 3.109616279602051, + "learning_rate": 2.457116365322207e-06, + "loss": 1.0513, + "step": 1060 + }, + { + "epoch": 0.07442442790568268, + "grad_norm": 3.612562417984009, + "learning_rate": 2.4802967083912845e-06, + "loss": 1.0629, + "step": 1070 + }, + { + "epoch": 0.07511998330667037, + "grad_norm": 6.416637897491455, + "learning_rate": 2.5034770514603617e-06, + "loss": 1.1094, + "step": 1080 + }, + { + "epoch": 0.07581553870765806, + "grad_norm": 4.202667713165283, + "learning_rate": 2.5266573945294393e-06, + "loss": 1.1047, + "step": 1090 + }, + { + "epoch": 0.07651109410864575, + "grad_norm": 3.243105173110962, + "learning_rate": 2.5498377375985166e-06, + "loss": 1.0592, + "step": 1100 + }, + { + "epoch": 0.07720664950963345, + "grad_norm": 3.662684917449951, + "learning_rate": 2.5730180806675942e-06, + "loss": 1.1141, + "step": 1110 + }, + { + "epoch": 0.07790220491062114, + "grad_norm": 3.2705509662628174, + "learning_rate": 2.5961984237366715e-06, + "loss": 1.0491, + "step": 1120 + }, + { + "epoch": 0.07859776031160882, + "grad_norm": 3.8699951171875, + "learning_rate": 2.619378766805749e-06, + "loss": 1.0524, + "step": 1130 + }, + { + "epoch": 0.07929331571259651, + "grad_norm": 4.80465841293335, + "learning_rate": 2.6425591098748268e-06, + "loss": 0.9872, + "step": 1140 + }, + { + "epoch": 0.0799888711135842, + "grad_norm": 3.211935520172119, + "learning_rate": 2.665739452943904e-06, + "loss": 1.0726, + "step": 1150 + }, + { + "epoch": 0.08068442651457189, + "grad_norm": 7.7907795906066895, + "learning_rate": 2.6889197960129816e-06, + "loss": 1.0466, + "step": 1160 + }, + { + "epoch": 0.08137998191555958, + "grad_norm": 3.158726453781128, + "learning_rate": 2.7121001390820585e-06, + "loss": 1.0793, + "step": 1170 + }, + { + "epoch": 0.08207553731654726, + "grad_norm": 13.609711647033691, + "learning_rate": 2.7352804821511357e-06, + "loss": 1.0584, + "step": 1180 + }, + { + "epoch": 0.08277109271753495, + "grad_norm": 4.259270191192627, + "learning_rate": 2.7584608252202133e-06, + "loss": 1.0095, + "step": 1190 + }, + { + "epoch": 0.08346664811852264, + "grad_norm": 3.029995918273926, + "learning_rate": 2.7816411682892906e-06, + "loss": 1.037, + "step": 1200 + }, + { + "epoch": 0.08416220351951033, + "grad_norm": 7.515969276428223, + "learning_rate": 2.8048215113583682e-06, + "loss": 1.08, + "step": 1210 + }, + { + "epoch": 0.08485775892049802, + "grad_norm": 3.2901253700256348, + "learning_rate": 2.828001854427446e-06, + "loss": 1.0351, + "step": 1220 + }, + { + "epoch": 0.0855533143214857, + "grad_norm": 3.090965986251831, + "learning_rate": 2.851182197496523e-06, + "loss": 1.0725, + "step": 1230 + }, + { + "epoch": 0.08624886972247339, + "grad_norm": 4.105306148529053, + "learning_rate": 2.8743625405656008e-06, + "loss": 1.0388, + "step": 1240 + }, + { + "epoch": 0.08694442512346108, + "grad_norm": 2.9879133701324463, + "learning_rate": 2.897542883634678e-06, + "loss": 0.9842, + "step": 1250 + }, + { + "epoch": 0.08763998052444877, + "grad_norm": 5.397761344909668, + "learning_rate": 2.9207232267037557e-06, + "loss": 1.0253, + "step": 1260 + }, + { + "epoch": 0.08833553592543646, + "grad_norm": 3.3835368156433105, + "learning_rate": 2.943903569772833e-06, + "loss": 1.0785, + "step": 1270 + }, + { + "epoch": 0.08903109132642414, + "grad_norm": 4.830877304077148, + "learning_rate": 2.9670839128419105e-06, + "loss": 1.1242, + "step": 1280 + }, + { + "epoch": 0.08972664672741183, + "grad_norm": 4.06881046295166, + "learning_rate": 2.9902642559109878e-06, + "loss": 1.0296, + "step": 1290 + }, + { + "epoch": 0.09042220212839952, + "grad_norm": 3.112290382385254, + "learning_rate": 3.0134445989800654e-06, + "loss": 1.0386, + "step": 1300 + }, + { + "epoch": 0.09111775752938722, + "grad_norm": 9.144720077514648, + "learning_rate": 3.0366249420491427e-06, + "loss": 1.1019, + "step": 1310 + }, + { + "epoch": 0.09181331293037491, + "grad_norm": 2.629798412322998, + "learning_rate": 3.05980528511822e-06, + "loss": 1.0323, + "step": 1320 + }, + { + "epoch": 0.0925088683313626, + "grad_norm": 3.2272164821624756, + "learning_rate": 3.082985628187297e-06, + "loss": 1.0078, + "step": 1330 + }, + { + "epoch": 0.09320442373235029, + "grad_norm": 3.2060770988464355, + "learning_rate": 3.1061659712563748e-06, + "loss": 1.088, + "step": 1340 + }, + { + "epoch": 0.09389997913333797, + "grad_norm": 3.1854593753814697, + "learning_rate": 3.129346314325452e-06, + "loss": 1.0639, + "step": 1350 + }, + { + "epoch": 0.09459553453432566, + "grad_norm": 3.252732276916504, + "learning_rate": 3.1525266573945297e-06, + "loss": 1.0775, + "step": 1360 + }, + { + "epoch": 0.09529108993531335, + "grad_norm": 3.309623956680298, + "learning_rate": 3.175707000463607e-06, + "loss": 1.1166, + "step": 1370 + }, + { + "epoch": 0.09598664533630104, + "grad_norm": 2.6745898723602295, + "learning_rate": 3.1988873435326845e-06, + "loss": 1.0171, + "step": 1380 + }, + { + "epoch": 0.09668220073728873, + "grad_norm": 3.3228795528411865, + "learning_rate": 3.2220676866017618e-06, + "loss": 0.9917, + "step": 1390 + }, + { + "epoch": 0.09737775613827641, + "grad_norm": 3.9568288326263428, + "learning_rate": 3.2452480296708394e-06, + "loss": 1.1053, + "step": 1400 + }, + { + "epoch": 0.0980733115392641, + "grad_norm": 3.7636682987213135, + "learning_rate": 3.268428372739917e-06, + "loss": 1.0371, + "step": 1410 + }, + { + "epoch": 0.09876886694025179, + "grad_norm": 2.715043067932129, + "learning_rate": 3.2916087158089943e-06, + "loss": 1.0261, + "step": 1420 + }, + { + "epoch": 0.09946442234123948, + "grad_norm": 5.22044038772583, + "learning_rate": 3.314789058878072e-06, + "loss": 1.03, + "step": 1430 + }, + { + "epoch": 0.10015997774222717, + "grad_norm": 4.460041522979736, + "learning_rate": 3.337969401947149e-06, + "loss": 1.0097, + "step": 1440 + }, + { + "epoch": 0.10085553314321485, + "grad_norm": 3.8226139545440674, + "learning_rate": 3.361149745016227e-06, + "loss": 1.0553, + "step": 1450 + }, + { + "epoch": 0.10155108854420254, + "grad_norm": 3.1603705883026123, + "learning_rate": 3.384330088085304e-06, + "loss": 1.0507, + "step": 1460 + }, + { + "epoch": 0.10224664394519023, + "grad_norm": 3.8729329109191895, + "learning_rate": 3.4075104311543817e-06, + "loss": 0.9578, + "step": 1470 + }, + { + "epoch": 0.10294219934617792, + "grad_norm": 3.2560646533966064, + "learning_rate": 3.4306907742234585e-06, + "loss": 1.0604, + "step": 1480 + }, + { + "epoch": 0.1036377547471656, + "grad_norm": 13.046659469604492, + "learning_rate": 3.453871117292536e-06, + "loss": 1.0735, + "step": 1490 + }, + { + "epoch": 0.1043333101481533, + "grad_norm": 6.670973777770996, + "learning_rate": 3.4770514603616134e-06, + "loss": 1.0936, + "step": 1500 + }, + { + "epoch": 0.1043333101481533, + "eval_loss": 1.0489426851272583, + "eval_runtime": 4585.5793, + "eval_samples_per_second": 3.96, + "eval_steps_per_second": 0.66, + "step": 1500 + }, + { + "epoch": 0.105028865549141, + "grad_norm": 4.4911580085754395, + "learning_rate": 3.500231803430691e-06, + "loss": 1.0735, + "step": 1510 + }, + { + "epoch": 0.10572442095012868, + "grad_norm": 4.335114002227783, + "learning_rate": 3.5234121464997683e-06, + "loss": 1.0697, + "step": 1520 + }, + { + "epoch": 0.10641997635111637, + "grad_norm": 3.9132916927337646, + "learning_rate": 3.546592489568846e-06, + "loss": 1.0103, + "step": 1530 + }, + { + "epoch": 0.10711553175210406, + "grad_norm": 10.592308044433594, + "learning_rate": 3.569772832637923e-06, + "loss": 1.0397, + "step": 1540 + }, + { + "epoch": 0.10781108715309175, + "grad_norm": 2.9619836807250977, + "learning_rate": 3.592953175707001e-06, + "loss": 1.0705, + "step": 1550 + }, + { + "epoch": 0.10850664255407944, + "grad_norm": 4.534087181091309, + "learning_rate": 3.616133518776078e-06, + "loss": 1.0539, + "step": 1560 + }, + { + "epoch": 0.10920219795506712, + "grad_norm": 3.737814426422119, + "learning_rate": 3.6393138618451557e-06, + "loss": 1.024, + "step": 1570 + }, + { + "epoch": 0.10989775335605481, + "grad_norm": 4.465404987335205, + "learning_rate": 3.662494204914233e-06, + "loss": 1.0695, + "step": 1580 + }, + { + "epoch": 0.1105933087570425, + "grad_norm": 9.201057434082031, + "learning_rate": 3.6856745479833106e-06, + "loss": 1.0967, + "step": 1590 + }, + { + "epoch": 0.11128886415803019, + "grad_norm": 5.97947883605957, + "learning_rate": 3.708854891052388e-06, + "loss": 1.0394, + "step": 1600 + }, + { + "epoch": 0.11198441955901788, + "grad_norm": 3.5937957763671875, + "learning_rate": 3.7320352341214655e-06, + "loss": 1.0095, + "step": 1610 + }, + { + "epoch": 0.11267997496000556, + "grad_norm": 3.2040810585021973, + "learning_rate": 3.755215577190543e-06, + "loss": 1.1142, + "step": 1620 + }, + { + "epoch": 0.11337553036099325, + "grad_norm": 3.4615955352783203, + "learning_rate": 3.77839592025962e-06, + "loss": 1.0527, + "step": 1630 + }, + { + "epoch": 0.11407108576198094, + "grad_norm": 2.7473151683807373, + "learning_rate": 3.801576263328697e-06, + "loss": 1.0328, + "step": 1640 + }, + { + "epoch": 0.11476664116296863, + "grad_norm": 4.428897857666016, + "learning_rate": 3.824756606397775e-06, + "loss": 1.0711, + "step": 1650 + }, + { + "epoch": 0.11546219656395632, + "grad_norm": 9.98466968536377, + "learning_rate": 3.8479369494668525e-06, + "loss": 1.0176, + "step": 1660 + }, + { + "epoch": 0.116157751964944, + "grad_norm": 2.8557121753692627, + "learning_rate": 3.871117292535929e-06, + "loss": 0.972, + "step": 1670 + }, + { + "epoch": 0.11685330736593169, + "grad_norm": 2.7264370918273926, + "learning_rate": 3.894297635605007e-06, + "loss": 0.9918, + "step": 1680 + }, + { + "epoch": 0.11754886276691938, + "grad_norm": 2.7528839111328125, + "learning_rate": 3.917477978674085e-06, + "loss": 1.0157, + "step": 1690 + }, + { + "epoch": 0.11824441816790707, + "grad_norm": 2.6149799823760986, + "learning_rate": 3.940658321743162e-06, + "loss": 0.9974, + "step": 1700 + }, + { + "epoch": 0.11893997356889477, + "grad_norm": 19.720413208007812, + "learning_rate": 3.96383866481224e-06, + "loss": 1.0435, + "step": 1710 + }, + { + "epoch": 0.11963552896988246, + "grad_norm": 3.2748563289642334, + "learning_rate": 3.987019007881317e-06, + "loss": 1.012, + "step": 1720 + }, + { + "epoch": 0.12033108437087014, + "grad_norm": 3.2334346771240234, + "learning_rate": 4.010199350950394e-06, + "loss": 1.0321, + "step": 1730 + }, + { + "epoch": 0.12102663977185783, + "grad_norm": 3.975581169128418, + "learning_rate": 4.033379694019472e-06, + "loss": 1.1038, + "step": 1740 + }, + { + "epoch": 0.12172219517284552, + "grad_norm": 3.414109230041504, + "learning_rate": 4.05656003708855e-06, + "loss": 1.0452, + "step": 1750 + }, + { + "epoch": 0.12241775057383321, + "grad_norm": 3.2001562118530273, + "learning_rate": 4.0797403801576265e-06, + "loss": 1.0031, + "step": 1760 + }, + { + "epoch": 0.1231133059748209, + "grad_norm": 2.67118763923645, + "learning_rate": 4.102920723226704e-06, + "loss": 1.03, + "step": 1770 + }, + { + "epoch": 0.12380886137580858, + "grad_norm": 3.5356035232543945, + "learning_rate": 4.126101066295782e-06, + "loss": 1.0404, + "step": 1780 + }, + { + "epoch": 0.12450441677679627, + "grad_norm": 3.317657232284546, + "learning_rate": 4.149281409364859e-06, + "loss": 1.0665, + "step": 1790 + }, + { + "epoch": 0.12519997217778395, + "grad_norm": 61.6154899597168, + "learning_rate": 4.172461752433936e-06, + "loss": 1.0029, + "step": 1800 + }, + { + "epoch": 0.12589552757877165, + "grad_norm": 4.594258785247803, + "learning_rate": 4.195642095503014e-06, + "loss": 1.0045, + "step": 1810 + }, + { + "epoch": 0.12659108297975935, + "grad_norm": 4.006076812744141, + "learning_rate": 4.218822438572091e-06, + "loss": 1.0172, + "step": 1820 + }, + { + "epoch": 0.12728663838074702, + "grad_norm": 6.1904826164245605, + "learning_rate": 4.242002781641168e-06, + "loss": 1.1102, + "step": 1830 + }, + { + "epoch": 0.12798219378173473, + "grad_norm": 6.121225833892822, + "learning_rate": 4.265183124710246e-06, + "loss": 1.0112, + "step": 1840 + }, + { + "epoch": 0.1286777491827224, + "grad_norm": 3.7720468044281006, + "learning_rate": 4.288363467779324e-06, + "loss": 1.0437, + "step": 1850 + }, + { + "epoch": 0.1293733045837101, + "grad_norm": 3.4095354080200195, + "learning_rate": 4.3115438108484005e-06, + "loss": 0.9923, + "step": 1860 + }, + { + "epoch": 0.13006885998469778, + "grad_norm": 2.9032609462738037, + "learning_rate": 4.334724153917478e-06, + "loss": 1.1012, + "step": 1870 + }, + { + "epoch": 0.13076441538568548, + "grad_norm": 3.5732736587524414, + "learning_rate": 4.357904496986556e-06, + "loss": 1.0973, + "step": 1880 + }, + { + "epoch": 0.13145997078667315, + "grad_norm": 3.2918882369995117, + "learning_rate": 4.3810848400556335e-06, + "loss": 1.0398, + "step": 1890 + }, + { + "epoch": 0.13215552618766085, + "grad_norm": 3.7572669982910156, + "learning_rate": 4.404265183124711e-06, + "loss": 1.0007, + "step": 1900 + }, + { + "epoch": 0.13285108158864853, + "grad_norm": 3.866692304611206, + "learning_rate": 4.427445526193788e-06, + "loss": 1.014, + "step": 1910 + }, + { + "epoch": 0.13354663698963623, + "grad_norm": 3.473393678665161, + "learning_rate": 4.450625869262866e-06, + "loss": 1.0066, + "step": 1920 + }, + { + "epoch": 0.1342421923906239, + "grad_norm": 4.123939037322998, + "learning_rate": 4.473806212331943e-06, + "loss": 1.0003, + "step": 1930 + }, + { + "epoch": 0.1349377477916116, + "grad_norm": 2.3558366298675537, + "learning_rate": 4.49698655540102e-06, + "loss": 0.9655, + "step": 1940 + }, + { + "epoch": 0.13563330319259928, + "grad_norm": 3.968125581741333, + "learning_rate": 4.520166898470098e-06, + "loss": 0.9774, + "step": 1950 + }, + { + "epoch": 0.13632885859358698, + "grad_norm": 4.159358024597168, + "learning_rate": 4.543347241539175e-06, + "loss": 1.0257, + "step": 1960 + }, + { + "epoch": 0.13702441399457466, + "grad_norm": 3.516815185546875, + "learning_rate": 4.566527584608252e-06, + "loss": 1.0963, + "step": 1970 + }, + { + "epoch": 0.13771996939556236, + "grad_norm": 3.096132516860962, + "learning_rate": 4.58970792767733e-06, + "loss": 1.0635, + "step": 1980 + }, + { + "epoch": 0.13841552479655003, + "grad_norm": 2.2607431411743164, + "learning_rate": 4.6128882707464075e-06, + "loss": 1.029, + "step": 1990 + }, + { + "epoch": 0.13911108019753773, + "grad_norm": 4.0500264167785645, + "learning_rate": 4.636068613815485e-06, + "loss": 0.9818, + "step": 2000 + }, + { + "epoch": 0.13911108019753773, + "eval_loss": 1.0320639610290527, + "eval_runtime": 4585.1072, + "eval_samples_per_second": 3.961, + "eval_steps_per_second": 0.66, + "step": 2000 + }, + { + "epoch": 0.13980663559852544, + "grad_norm": 2.914787530899048, + "learning_rate": 4.659248956884562e-06, + "loss": 1.0047, + "step": 2010 + }, + { + "epoch": 0.1405021909995131, + "grad_norm": 4.331862449645996, + "learning_rate": 4.68242929995364e-06, + "loss": 1.0369, + "step": 2020 + }, + { + "epoch": 0.1411977464005008, + "grad_norm": 4.7135820388793945, + "learning_rate": 4.705609643022717e-06, + "loss": 1.0352, + "step": 2030 + }, + { + "epoch": 0.1418933018014885, + "grad_norm": 3.7532501220703125, + "learning_rate": 4.728789986091795e-06, + "loss": 0.998, + "step": 2040 + }, + { + "epoch": 0.1425888572024762, + "grad_norm": 3.8624391555786133, + "learning_rate": 4.751970329160872e-06, + "loss": 1.0447, + "step": 2050 + }, + { + "epoch": 0.14328441260346386, + "grad_norm": 2.8472628593444824, + "learning_rate": 4.775150672229949e-06, + "loss": 1.0052, + "step": 2060 + }, + { + "epoch": 0.14397996800445156, + "grad_norm": 7.239597320556641, + "learning_rate": 4.798331015299027e-06, + "loss": 1.0301, + "step": 2070 + }, + { + "epoch": 0.14467552340543924, + "grad_norm": 2.7887721061706543, + "learning_rate": 4.821511358368105e-06, + "loss": 1.0574, + "step": 2080 + }, + { + "epoch": 0.14537107880642694, + "grad_norm": 3.660353899002075, + "learning_rate": 4.844691701437182e-06, + "loss": 0.9898, + "step": 2090 + }, + { + "epoch": 0.14606663420741461, + "grad_norm": 3.7146918773651123, + "learning_rate": 4.867872044506259e-06, + "loss": 1.0797, + "step": 2100 + }, + { + "epoch": 0.14676218960840232, + "grad_norm": 2.9690799713134766, + "learning_rate": 4.891052387575336e-06, + "loss": 1.0542, + "step": 2110 + }, + { + "epoch": 0.14745774500939, + "grad_norm": 4.56571102142334, + "learning_rate": 4.914232730644414e-06, + "loss": 1.0368, + "step": 2120 + }, + { + "epoch": 0.1481533004103777, + "grad_norm": 3.69332218170166, + "learning_rate": 4.937413073713491e-06, + "loss": 1.0236, + "step": 2130 + }, + { + "epoch": 0.14884885581136537, + "grad_norm": 3.94018292427063, + "learning_rate": 4.960593416782569e-06, + "loss": 1.0699, + "step": 2140 + }, + { + "epoch": 0.14954441121235307, + "grad_norm": 2.530980110168457, + "learning_rate": 4.983773759851646e-06, + "loss": 1.0497, + "step": 2150 + }, + { + "epoch": 0.15023996661334074, + "grad_norm": 3.093601703643799, + "learning_rate": 5.006954102920723e-06, + "loss": 1.0821, + "step": 2160 + }, + { + "epoch": 0.15093552201432844, + "grad_norm": 2.5038959980010986, + "learning_rate": 5.030134445989802e-06, + "loss": 0.9797, + "step": 2170 + }, + { + "epoch": 0.15163107741531612, + "grad_norm": 7.219821929931641, + "learning_rate": 5.053314789058879e-06, + "loss": 1.0625, + "step": 2180 + }, + { + "epoch": 0.15232663281630382, + "grad_norm": 15.204484939575195, + "learning_rate": 5.0764951321279555e-06, + "loss": 1.0131, + "step": 2190 + }, + { + "epoch": 0.1530221882172915, + "grad_norm": 4.988640308380127, + "learning_rate": 5.099675475197033e-06, + "loss": 0.9974, + "step": 2200 + }, + { + "epoch": 0.1537177436182792, + "grad_norm": 3.2888946533203125, + "learning_rate": 5.12285581826611e-06, + "loss": 0.9801, + "step": 2210 + }, + { + "epoch": 0.1544132990192669, + "grad_norm": 4.079813003540039, + "learning_rate": 5.1460361613351884e-06, + "loss": 1.0442, + "step": 2220 + }, + { + "epoch": 0.15510885442025457, + "grad_norm": 3.9711544513702393, + "learning_rate": 5.169216504404265e-06, + "loss": 1.0175, + "step": 2230 + }, + { + "epoch": 0.15580440982124227, + "grad_norm": 2.5340263843536377, + "learning_rate": 5.192396847473343e-06, + "loss": 1.0791, + "step": 2240 + }, + { + "epoch": 0.15649996522222995, + "grad_norm": 3.0436949729919434, + "learning_rate": 5.2155771905424206e-06, + "loss": 1.04, + "step": 2250 + }, + { + "epoch": 0.15719552062321765, + "grad_norm": 14.658137321472168, + "learning_rate": 5.238757533611498e-06, + "loss": 1.0388, + "step": 2260 + }, + { + "epoch": 0.15789107602420532, + "grad_norm": 2.35018253326416, + "learning_rate": 5.261937876680575e-06, + "loss": 1.0291, + "step": 2270 + }, + { + "epoch": 0.15858663142519303, + "grad_norm": 5.7171549797058105, + "learning_rate": 5.2851182197496535e-06, + "loss": 1.0244, + "step": 2280 + }, + { + "epoch": 0.1592821868261807, + "grad_norm": 2.9277355670928955, + "learning_rate": 5.30829856281873e-06, + "loss": 1.0, + "step": 2290 + }, + { + "epoch": 0.1599777422271684, + "grad_norm": 9.371576309204102, + "learning_rate": 5.331478905887808e-06, + "loss": 1.0826, + "step": 2300 + }, + { + "epoch": 0.16067329762815608, + "grad_norm": 3.6242470741271973, + "learning_rate": 5.354659248956885e-06, + "loss": 0.9673, + "step": 2310 + }, + { + "epoch": 0.16136885302914378, + "grad_norm": 2.8460490703582764, + "learning_rate": 5.377839592025963e-06, + "loss": 1.0328, + "step": 2320 + }, + { + "epoch": 0.16206440843013145, + "grad_norm": 3.7562429904937744, + "learning_rate": 5.40101993509504e-06, + "loss": 1.0147, + "step": 2330 + }, + { + "epoch": 0.16275996383111915, + "grad_norm": 4.492100238800049, + "learning_rate": 5.424200278164117e-06, + "loss": 0.9919, + "step": 2340 + }, + { + "epoch": 0.16345551923210683, + "grad_norm": 2.958352565765381, + "learning_rate": 5.4473806212331946e-06, + "loss": 1.0211, + "step": 2350 + }, + { + "epoch": 0.16415107463309453, + "grad_norm": 7.289975643157959, + "learning_rate": 5.470560964302271e-06, + "loss": 1.0264, + "step": 2360 + }, + { + "epoch": 0.1648466300340822, + "grad_norm": 10.191829681396484, + "learning_rate": 5.49374130737135e-06, + "loss": 1.0248, + "step": 2370 + }, + { + "epoch": 0.1655421854350699, + "grad_norm": 2.8892757892608643, + "learning_rate": 5.516921650440427e-06, + "loss": 1.043, + "step": 2380 + }, + { + "epoch": 0.16623774083605758, + "grad_norm": 3.8568115234375, + "learning_rate": 5.540101993509504e-06, + "loss": 1.0478, + "step": 2390 + }, + { + "epoch": 0.16693329623704528, + "grad_norm": 4.576777458190918, + "learning_rate": 5.563282336578581e-06, + "loss": 1.0061, + "step": 2400 + }, + { + "epoch": 0.16762885163803298, + "grad_norm": 4.571799278259277, + "learning_rate": 5.58646267964766e-06, + "loss": 1.0553, + "step": 2410 + }, + { + "epoch": 0.16832440703902066, + "grad_norm": 3.6281046867370605, + "learning_rate": 5.6096430227167365e-06, + "loss": 1.0618, + "step": 2420 + }, + { + "epoch": 0.16901996244000836, + "grad_norm": 3.100255250930786, + "learning_rate": 5.632823365785814e-06, + "loss": 1.0548, + "step": 2430 + }, + { + "epoch": 0.16971551784099603, + "grad_norm": 4.903957366943359, + "learning_rate": 5.656003708854892e-06, + "loss": 1.0166, + "step": 2440 + }, + { + "epoch": 0.17041107324198373, + "grad_norm": 5.9280595779418945, + "learning_rate": 5.679184051923969e-06, + "loss": 1.0647, + "step": 2450 + }, + { + "epoch": 0.1711066286429714, + "grad_norm": 7.788846969604492, + "learning_rate": 5.702364394993046e-06, + "loss": 0.9924, + "step": 2460 + }, + { + "epoch": 0.1718021840439591, + "grad_norm": 3.5416007041931152, + "learning_rate": 5.725544738062124e-06, + "loss": 0.9874, + "step": 2470 + }, + { + "epoch": 0.17249773944494678, + "grad_norm": 2.890352487564087, + "learning_rate": 5.7487250811312015e-06, + "loss": 1.0712, + "step": 2480 + }, + { + "epoch": 0.1731932948459345, + "grad_norm": 5.128032684326172, + "learning_rate": 5.771905424200278e-06, + "loss": 0.9978, + "step": 2490 + }, + { + "epoch": 0.17388885024692216, + "grad_norm": 7.297148704528809, + "learning_rate": 5.795085767269356e-06, + "loss": 1.0484, + "step": 2500 + }, + { + "epoch": 0.17388885024692216, + "eval_loss": 1.0326470136642456, + "eval_runtime": 4587.3956, + "eval_samples_per_second": 3.959, + "eval_steps_per_second": 0.66, + "step": 2500 + }, + { + "epoch": 0.17458440564790986, + "grad_norm": 2.595507860183716, + "learning_rate": 5.818266110338433e-06, + "loss": 0.966, + "step": 2510 + }, + { + "epoch": 0.17527996104889754, + "grad_norm": 3.793600082397461, + "learning_rate": 5.841446453407511e-06, + "loss": 0.9886, + "step": 2520 + }, + { + "epoch": 0.17597551644988524, + "grad_norm": 2.509936809539795, + "learning_rate": 5.864626796476588e-06, + "loss": 1.0172, + "step": 2530 + }, + { + "epoch": 0.1766710718508729, + "grad_norm": 3.3735904693603516, + "learning_rate": 5.887807139545666e-06, + "loss": 1.0556, + "step": 2540 + }, + { + "epoch": 0.17736662725186061, + "grad_norm": 2.285456418991089, + "learning_rate": 5.9109874826147426e-06, + "loss": 0.9587, + "step": 2550 + }, + { + "epoch": 0.1780621826528483, + "grad_norm": 3.224372386932373, + "learning_rate": 5.934167825683821e-06, + "loss": 1.0499, + "step": 2560 + }, + { + "epoch": 0.178757738053836, + "grad_norm": 3.9238903522491455, + "learning_rate": 5.957348168752898e-06, + "loss": 1.0743, + "step": 2570 + }, + { + "epoch": 0.17945329345482366, + "grad_norm": 2.979311227798462, + "learning_rate": 5.9805285118219755e-06, + "loss": 1.0374, + "step": 2580 + }, + { + "epoch": 0.18014884885581137, + "grad_norm": 3.510424852371216, + "learning_rate": 6.003708854891052e-06, + "loss": 1.0429, + "step": 2590 + }, + { + "epoch": 0.18084440425679904, + "grad_norm": 3.3279194831848145, + "learning_rate": 6.026889197960131e-06, + "loss": 1.0579, + "step": 2600 + }, + { + "epoch": 0.18153995965778674, + "grad_norm": 4.568840503692627, + "learning_rate": 6.050069541029208e-06, + "loss": 1.0743, + "step": 2610 + }, + { + "epoch": 0.18223551505877444, + "grad_norm": 3.8150672912597656, + "learning_rate": 6.073249884098285e-06, + "loss": 1.0458, + "step": 2620 + }, + { + "epoch": 0.18293107045976212, + "grad_norm": 3.638550043106079, + "learning_rate": 6.096430227167363e-06, + "loss": 1.0278, + "step": 2630 + }, + { + "epoch": 0.18362662586074982, + "grad_norm": 3.139571189880371, + "learning_rate": 6.11961057023644e-06, + "loss": 1.0712, + "step": 2640 + }, + { + "epoch": 0.1843221812617375, + "grad_norm": 2.5664305686950684, + "learning_rate": 6.142790913305517e-06, + "loss": 1.0287, + "step": 2650 + }, + { + "epoch": 0.1850177366627252, + "grad_norm": 7.010003089904785, + "learning_rate": 6.165971256374594e-06, + "loss": 1.0145, + "step": 2660 + }, + { + "epoch": 0.18571329206371287, + "grad_norm": 3.4053866863250732, + "learning_rate": 6.189151599443673e-06, + "loss": 1.0125, + "step": 2670 + }, + { + "epoch": 0.18640884746470057, + "grad_norm": 3.0617032051086426, + "learning_rate": 6.2123319425127495e-06, + "loss": 0.9655, + "step": 2680 + }, + { + "epoch": 0.18710440286568825, + "grad_norm": 4.470221996307373, + "learning_rate": 6.235512285581827e-06, + "loss": 1.0303, + "step": 2690 + }, + { + "epoch": 0.18779995826667595, + "grad_norm": 3.828457832336426, + "learning_rate": 6.258692628650904e-06, + "loss": 1.0675, + "step": 2700 + }, + { + "epoch": 0.18849551366766362, + "grad_norm": 3.0096592903137207, + "learning_rate": 6.2818729717199825e-06, + "loss": 1.0811, + "step": 2710 + }, + { + "epoch": 0.18919106906865132, + "grad_norm": 4.744997024536133, + "learning_rate": 6.305053314789059e-06, + "loss": 1.0309, + "step": 2720 + }, + { + "epoch": 0.189886624469639, + "grad_norm": 2.461359977722168, + "learning_rate": 6.328233657858137e-06, + "loss": 0.9615, + "step": 2730 + }, + { + "epoch": 0.1905821798706267, + "grad_norm": 4.052887439727783, + "learning_rate": 6.351414000927214e-06, + "loss": 0.9998, + "step": 2740 + }, + { + "epoch": 0.19127773527161437, + "grad_norm": 2.9805335998535156, + "learning_rate": 6.374594343996292e-06, + "loss": 1.0472, + "step": 2750 + }, + { + "epoch": 0.19197329067260208, + "grad_norm": 3.3843190670013428, + "learning_rate": 6.397774687065369e-06, + "loss": 1.0199, + "step": 2760 + }, + { + "epoch": 0.19266884607358975, + "grad_norm": 11.764408111572266, + "learning_rate": 6.420955030134447e-06, + "loss": 1.044, + "step": 2770 + }, + { + "epoch": 0.19336440147457745, + "grad_norm": 4.164066791534424, + "learning_rate": 6.4441353732035235e-06, + "loss": 1.0223, + "step": 2780 + }, + { + "epoch": 0.19405995687556513, + "grad_norm": 2.7690541744232178, + "learning_rate": 6.467315716272602e-06, + "loss": 1.0196, + "step": 2790 + }, + { + "epoch": 0.19475551227655283, + "grad_norm": 4.581409931182861, + "learning_rate": 6.490496059341679e-06, + "loss": 1.0034, + "step": 2800 + }, + { + "epoch": 0.19545106767754053, + "grad_norm": 4.398415565490723, + "learning_rate": 6.513676402410756e-06, + "loss": 1.0564, + "step": 2810 + }, + { + "epoch": 0.1961466230785282, + "grad_norm": 2.8072848320007324, + "learning_rate": 6.536856745479834e-06, + "loss": 1.0244, + "step": 2820 + }, + { + "epoch": 0.1968421784795159, + "grad_norm": 4.428516387939453, + "learning_rate": 6.560037088548911e-06, + "loss": 0.9582, + "step": 2830 + }, + { + "epoch": 0.19753773388050358, + "grad_norm": 3.368539333343506, + "learning_rate": 6.583217431617989e-06, + "loss": 1.047, + "step": 2840 + }, + { + "epoch": 0.19823328928149128, + "grad_norm": 2.524186849594116, + "learning_rate": 6.6063977746870654e-06, + "loss": 1.0749, + "step": 2850 + }, + { + "epoch": 0.19892884468247896, + "grad_norm": 5.150571346282959, + "learning_rate": 6.629578117756144e-06, + "loss": 0.9884, + "step": 2860 + }, + { + "epoch": 0.19962440008346666, + "grad_norm": 6.5002665519714355, + "learning_rate": 6.652758460825221e-06, + "loss": 1.0272, + "step": 2870 + }, + { + "epoch": 0.20031995548445433, + "grad_norm": 4.859982490539551, + "learning_rate": 6.675938803894298e-06, + "loss": 1.02, + "step": 2880 + }, + { + "epoch": 0.20101551088544203, + "grad_norm": 7.7048492431640625, + "learning_rate": 6.699119146963375e-06, + "loss": 0.993, + "step": 2890 + }, + { + "epoch": 0.2017110662864297, + "grad_norm": 2.9444189071655273, + "learning_rate": 6.722299490032454e-06, + "loss": 0.9903, + "step": 2900 + }, + { + "epoch": 0.2024066216874174, + "grad_norm": 2.6852824687957764, + "learning_rate": 6.7454798331015305e-06, + "loss": 1.0457, + "step": 2910 + }, + { + "epoch": 0.20310217708840508, + "grad_norm": 3.5874369144439697, + "learning_rate": 6.768660176170608e-06, + "loss": 1.0232, + "step": 2920 + }, + { + "epoch": 0.20379773248939279, + "grad_norm": 3.2236382961273193, + "learning_rate": 6.791840519239685e-06, + "loss": 0.9752, + "step": 2930 + }, + { + "epoch": 0.20449328789038046, + "grad_norm": 13.75607967376709, + "learning_rate": 6.8150208623087635e-06, + "loss": 1.015, + "step": 2940 + }, + { + "epoch": 0.20518884329136816, + "grad_norm": 5.554055690765381, + "learning_rate": 6.83820120537784e-06, + "loss": 1.0509, + "step": 2950 + }, + { + "epoch": 0.20588439869235584, + "grad_norm": 2.7356760501861572, + "learning_rate": 6.861381548446917e-06, + "loss": 1.0797, + "step": 2960 + }, + { + "epoch": 0.20657995409334354, + "grad_norm": 8.756003379821777, + "learning_rate": 6.884561891515995e-06, + "loss": 1.0004, + "step": 2970 + }, + { + "epoch": 0.2072755094943312, + "grad_norm": 3.1420445442199707, + "learning_rate": 6.907742234585072e-06, + "loss": 1.0154, + "step": 2980 + }, + { + "epoch": 0.2079710648953189, + "grad_norm": 3.906752109527588, + "learning_rate": 6.93092257765415e-06, + "loss": 1.0191, + "step": 2990 + }, + { + "epoch": 0.2086666202963066, + "grad_norm": 2.6053898334503174, + "learning_rate": 6.954102920723227e-06, + "loss": 1.0158, + "step": 3000 + }, + { + "epoch": 0.2086666202963066, + "eval_loss": 1.0331050157546997, + "eval_runtime": 4613.6406, + "eval_samples_per_second": 3.936, + "eval_steps_per_second": 0.656, + "step": 3000 + }, + { + "epoch": 0.2093621756972943, + "grad_norm": 2.663600444793701, + "learning_rate": 6.9772832637923045e-06, + "loss": 1.0795, + "step": 3010 + }, + { + "epoch": 0.210057731098282, + "grad_norm": 3.1565446853637695, + "learning_rate": 7.000463606861382e-06, + "loss": 1.1026, + "step": 3020 + }, + { + "epoch": 0.21075328649926967, + "grad_norm": 3.030322313308716, + "learning_rate": 7.02364394993046e-06, + "loss": 0.9753, + "step": 3030 + }, + { + "epoch": 0.21144884190025737, + "grad_norm": 3.586249589920044, + "learning_rate": 7.046824292999537e-06, + "loss": 1.0216, + "step": 3040 + }, + { + "epoch": 0.21214439730124504, + "grad_norm": 3.4874258041381836, + "learning_rate": 7.070004636068615e-06, + "loss": 1.0128, + "step": 3050 + }, + { + "epoch": 0.21283995270223274, + "grad_norm": 3.606189489364624, + "learning_rate": 7.093184979137692e-06, + "loss": 1.04, + "step": 3060 + }, + { + "epoch": 0.21353550810322042, + "grad_norm": 2.4312124252319336, + "learning_rate": 7.11636532220677e-06, + "loss": 1.0423, + "step": 3070 + }, + { + "epoch": 0.21423106350420812, + "grad_norm": 11.245027542114258, + "learning_rate": 7.139545665275846e-06, + "loss": 1.0181, + "step": 3080 + }, + { + "epoch": 0.2149266189051958, + "grad_norm": 4.3477888107299805, + "learning_rate": 7.162726008344925e-06, + "loss": 0.9984, + "step": 3090 + }, + { + "epoch": 0.2156221743061835, + "grad_norm": 8.94852352142334, + "learning_rate": 7.185906351414002e-06, + "loss": 1.0377, + "step": 3100 + }, + { + "epoch": 0.21631772970717117, + "grad_norm": 3.847513198852539, + "learning_rate": 7.2090866944830785e-06, + "loss": 1.0834, + "step": 3110 + }, + { + "epoch": 0.21701328510815887, + "grad_norm": 2.518542766571045, + "learning_rate": 7.232267037552156e-06, + "loss": 1.0265, + "step": 3120 + }, + { + "epoch": 0.21770884050914655, + "grad_norm": 7.568866729736328, + "learning_rate": 7.255447380621233e-06, + "loss": 1.0405, + "step": 3130 + }, + { + "epoch": 0.21840439591013425, + "grad_norm": 3.0198209285736084, + "learning_rate": 7.2786277236903115e-06, + "loss": 1.0098, + "step": 3140 + }, + { + "epoch": 0.21909995131112192, + "grad_norm": 2.8102176189422607, + "learning_rate": 7.301808066759388e-06, + "loss": 1.0189, + "step": 3150 + }, + { + "epoch": 0.21979550671210962, + "grad_norm": 3.3369140625, + "learning_rate": 7.324988409828466e-06, + "loss": 1.0784, + "step": 3160 + }, + { + "epoch": 0.2204910621130973, + "grad_norm": 4.140603065490723, + "learning_rate": 7.348168752897544e-06, + "loss": 1.0094, + "step": 3170 + }, + { + "epoch": 0.221186617514085, + "grad_norm": 4.184774875640869, + "learning_rate": 7.371349095966621e-06, + "loss": 1.0025, + "step": 3180 + }, + { + "epoch": 0.22188217291507267, + "grad_norm": 15.252664566040039, + "learning_rate": 7.394529439035698e-06, + "loss": 1.0284, + "step": 3190 + }, + { + "epoch": 0.22257772831606037, + "grad_norm": 3.4159204959869385, + "learning_rate": 7.417709782104776e-06, + "loss": 1.0346, + "step": 3200 + }, + { + "epoch": 0.22327328371704808, + "grad_norm": 4.024420738220215, + "learning_rate": 7.440890125173853e-06, + "loss": 1.0663, + "step": 3210 + }, + { + "epoch": 0.22396883911803575, + "grad_norm": 6.411583423614502, + "learning_rate": 7.464070468242931e-06, + "loss": 1.0306, + "step": 3220 + }, + { + "epoch": 0.22466439451902345, + "grad_norm": 2.9651191234588623, + "learning_rate": 7.487250811312008e-06, + "loss": 1.0506, + "step": 3230 + }, + { + "epoch": 0.22535994992001113, + "grad_norm": 3.710402488708496, + "learning_rate": 7.510431154381086e-06, + "loss": 1.0051, + "step": 3240 + }, + { + "epoch": 0.22605550532099883, + "grad_norm": 4.407790660858154, + "learning_rate": 7.533611497450163e-06, + "loss": 1.0079, + "step": 3250 + }, + { + "epoch": 0.2267510607219865, + "grad_norm": 4.010880470275879, + "learning_rate": 7.55679184051924e-06, + "loss": 1.0, + "step": 3260 + }, + { + "epoch": 0.2274466161229742, + "grad_norm": 3.6526100635528564, + "learning_rate": 7.579972183588318e-06, + "loss": 1.0057, + "step": 3270 + }, + { + "epoch": 0.22814217152396188, + "grad_norm": 2.9387059211730957, + "learning_rate": 7.603152526657394e-06, + "loss": 0.9984, + "step": 3280 + }, + { + "epoch": 0.22883772692494958, + "grad_norm": 7.137051105499268, + "learning_rate": 7.626332869726473e-06, + "loss": 1.0024, + "step": 3290 + }, + { + "epoch": 0.22953328232593725, + "grad_norm": 2.7008614540100098, + "learning_rate": 7.64951321279555e-06, + "loss": 1.0382, + "step": 3300 + }, + { + "epoch": 0.23022883772692496, + "grad_norm": 2.397165060043335, + "learning_rate": 7.672693555864627e-06, + "loss": 1.0386, + "step": 3310 + }, + { + "epoch": 0.23092439312791263, + "grad_norm": 3.395324230194092, + "learning_rate": 7.695873898933705e-06, + "loss": 1.0018, + "step": 3320 + }, + { + "epoch": 0.23161994852890033, + "grad_norm": 3.310481548309326, + "learning_rate": 7.719054242002783e-06, + "loss": 1.0449, + "step": 3330 + }, + { + "epoch": 0.232315503929888, + "grad_norm": 3.374783992767334, + "learning_rate": 7.742234585071859e-06, + "loss": 1.0427, + "step": 3340 + }, + { + "epoch": 0.2330110593308757, + "grad_norm": 3.488377571105957, + "learning_rate": 7.765414928140938e-06, + "loss": 1.0299, + "step": 3350 + }, + { + "epoch": 0.23370661473186338, + "grad_norm": 2.015575408935547, + "learning_rate": 7.788595271210014e-06, + "loss": 0.9859, + "step": 3360 + }, + { + "epoch": 0.23440217013285108, + "grad_norm": 3.1947450637817383, + "learning_rate": 7.811775614279092e-06, + "loss": 1.0733, + "step": 3370 + }, + { + "epoch": 0.23509772553383876, + "grad_norm": 3.195134401321411, + "learning_rate": 7.83495595734817e-06, + "loss": 0.9957, + "step": 3380 + }, + { + "epoch": 0.23579328093482646, + "grad_norm": 2.5582737922668457, + "learning_rate": 7.858136300417247e-06, + "loss": 1.0808, + "step": 3390 + }, + { + "epoch": 0.23648883633581413, + "grad_norm": 5.5156145095825195, + "learning_rate": 7.881316643486325e-06, + "loss": 1.0578, + "step": 3400 + }, + { + "epoch": 0.23718439173680184, + "grad_norm": 2.649886131286621, + "learning_rate": 7.904496986555402e-06, + "loss": 1.0073, + "step": 3410 + }, + { + "epoch": 0.23787994713778954, + "grad_norm": 9.969372749328613, + "learning_rate": 7.92767732962448e-06, + "loss": 1.0334, + "step": 3420 + }, + { + "epoch": 0.2385755025387772, + "grad_norm": 4.264416694641113, + "learning_rate": 7.950857672693556e-06, + "loss": 1.0983, + "step": 3430 + }, + { + "epoch": 0.23927105793976491, + "grad_norm": 3.7548670768737793, + "learning_rate": 7.974038015762633e-06, + "loss": 0.99, + "step": 3440 + }, + { + "epoch": 0.2399666133407526, + "grad_norm": 9.063867568969727, + "learning_rate": 7.997218358831711e-06, + "loss": 1.0523, + "step": 3450 + }, + { + "epoch": 0.2406621687417403, + "grad_norm": 4.435846328735352, + "learning_rate": 8.020398701900789e-06, + "loss": 1.0017, + "step": 3460 + }, + { + "epoch": 0.24135772414272796, + "grad_norm": 4.534463405609131, + "learning_rate": 8.043579044969866e-06, + "loss": 1.0588, + "step": 3470 + }, + { + "epoch": 0.24205327954371567, + "grad_norm": 7.399610996246338, + "learning_rate": 8.066759388038944e-06, + "loss": 1.0256, + "step": 3480 + }, + { + "epoch": 0.24274883494470334, + "grad_norm": 5.838286399841309, + "learning_rate": 8.08993973110802e-06, + "loss": 1.0131, + "step": 3490 + }, + { + "epoch": 0.24344439034569104, + "grad_norm": 3.5783426761627197, + "learning_rate": 8.1131200741771e-06, + "loss": 1.0473, + "step": 3500 + }, + { + "epoch": 0.24344439034569104, + "eval_loss": 1.0377700328826904, + "eval_runtime": 4632.7969, + "eval_samples_per_second": 3.92, + "eval_steps_per_second": 0.653, + "step": 3500 + }, + { + "epoch": 0.24413994574667872, + "grad_norm": 2.4813787937164307, + "learning_rate": 8.136300417246175e-06, + "loss": 1.0248, + "step": 3510 + }, + { + "epoch": 0.24483550114766642, + "grad_norm": 3.1482067108154297, + "learning_rate": 8.159480760315253e-06, + "loss": 1.0834, + "step": 3520 + }, + { + "epoch": 0.2455310565486541, + "grad_norm": 4.067923069000244, + "learning_rate": 8.18266110338433e-06, + "loss": 1.0155, + "step": 3530 + }, + { + "epoch": 0.2462266119496418, + "grad_norm": 2.6725502014160156, + "learning_rate": 8.205841446453408e-06, + "loss": 1.0324, + "step": 3540 + }, + { + "epoch": 0.24692216735062947, + "grad_norm": 6.6415276527404785, + "learning_rate": 8.229021789522486e-06, + "loss": 1.0495, + "step": 3550 + }, + { + "epoch": 0.24761772275161717, + "grad_norm": 2.444960117340088, + "learning_rate": 8.252202132591564e-06, + "loss": 1.0141, + "step": 3560 + }, + { + "epoch": 0.24831327815260484, + "grad_norm": 4.207001209259033, + "learning_rate": 8.275382475660641e-06, + "loss": 1.005, + "step": 3570 + }, + { + "epoch": 0.24900883355359255, + "grad_norm": 2.667459011077881, + "learning_rate": 8.298562818729717e-06, + "loss": 1.0907, + "step": 3580 + }, + { + "epoch": 0.24970438895458022, + "grad_norm": 7.600094318389893, + "learning_rate": 8.321743161798795e-06, + "loss": 0.982, + "step": 3590 + }, + { + "epoch": 0.2503999443555679, + "grad_norm": 3.282633066177368, + "learning_rate": 8.344923504867873e-06, + "loss": 1.0834, + "step": 3600 + }, + { + "epoch": 0.2510954997565556, + "grad_norm": 6.192445278167725, + "learning_rate": 8.36810384793695e-06, + "loss": 1.0885, + "step": 3610 + }, + { + "epoch": 0.2517910551575433, + "grad_norm": 3.247286081314087, + "learning_rate": 8.391284191006028e-06, + "loss": 1.0177, + "step": 3620 + }, + { + "epoch": 0.252486610558531, + "grad_norm": 3.562649965286255, + "learning_rate": 8.414464534075106e-06, + "loss": 1.1204, + "step": 3630 + }, + { + "epoch": 0.2531821659595187, + "grad_norm": 3.087660789489746, + "learning_rate": 8.437644877144181e-06, + "loss": 1.0414, + "step": 3640 + }, + { + "epoch": 0.25387772136050635, + "grad_norm": 3.169058084487915, + "learning_rate": 8.46082522021326e-06, + "loss": 1.0744, + "step": 3650 + }, + { + "epoch": 0.25457327676149405, + "grad_norm": 2.7671542167663574, + "learning_rate": 8.484005563282337e-06, + "loss": 0.9839, + "step": 3660 + }, + { + "epoch": 0.25526883216248175, + "grad_norm": 2.731065273284912, + "learning_rate": 8.507185906351414e-06, + "loss": 1.0012, + "step": 3670 + }, + { + "epoch": 0.25596438756346945, + "grad_norm": 5.201871395111084, + "learning_rate": 8.530366249420492e-06, + "loss": 1.006, + "step": 3680 + }, + { + "epoch": 0.2566599429644571, + "grad_norm": 7.038867473602295, + "learning_rate": 8.55354659248957e-06, + "loss": 1.0172, + "step": 3690 + }, + { + "epoch": 0.2573554983654448, + "grad_norm": 3.3397016525268555, + "learning_rate": 8.576726935558647e-06, + "loss": 1.0088, + "step": 3700 + }, + { + "epoch": 0.2580510537664325, + "grad_norm": 5.4827656745910645, + "learning_rate": 8.599907278627725e-06, + "loss": 1.071, + "step": 3710 + }, + { + "epoch": 0.2587466091674202, + "grad_norm": 2.42504620552063, + "learning_rate": 8.623087621696801e-06, + "loss": 1.0391, + "step": 3720 + }, + { + "epoch": 0.25944216456840785, + "grad_norm": 2.9646613597869873, + "learning_rate": 8.646267964765879e-06, + "loss": 1.0443, + "step": 3730 + }, + { + "epoch": 0.26013771996939555, + "grad_norm": 3.2530064582824707, + "learning_rate": 8.669448307834956e-06, + "loss": 1.0381, + "step": 3740 + }, + { + "epoch": 0.26083327537038326, + "grad_norm": 2.661914348602295, + "learning_rate": 8.692628650904034e-06, + "loss": 1.0518, + "step": 3750 + }, + { + "epoch": 0.26152883077137096, + "grad_norm": 2.8795435428619385, + "learning_rate": 8.715808993973112e-06, + "loss": 1.0286, + "step": 3760 + }, + { + "epoch": 0.2622243861723586, + "grad_norm": 3.3627450466156006, + "learning_rate": 8.73898933704219e-06, + "loss": 1.0343, + "step": 3770 + }, + { + "epoch": 0.2629199415733463, + "grad_norm": 5.300382614135742, + "learning_rate": 8.762169680111267e-06, + "loss": 1.0524, + "step": 3780 + }, + { + "epoch": 0.263615496974334, + "grad_norm": 3.7690680027008057, + "learning_rate": 8.785350023180343e-06, + "loss": 1.002, + "step": 3790 + }, + { + "epoch": 0.2643110523753217, + "grad_norm": 2.6462290287017822, + "learning_rate": 8.808530366249422e-06, + "loss": 1.0191, + "step": 3800 + }, + { + "epoch": 0.2650066077763094, + "grad_norm": 2.7227768898010254, + "learning_rate": 8.831710709318498e-06, + "loss": 1.011, + "step": 3810 + }, + { + "epoch": 0.26570216317729706, + "grad_norm": 2.548276424407959, + "learning_rate": 8.854891052387576e-06, + "loss": 1.0306, + "step": 3820 + }, + { + "epoch": 0.26639771857828476, + "grad_norm": 5.025858402252197, + "learning_rate": 8.878071395456654e-06, + "loss": 1.0334, + "step": 3830 + }, + { + "epoch": 0.26709327397927246, + "grad_norm": 4.201222896575928, + "learning_rate": 8.901251738525731e-06, + "loss": 0.9767, + "step": 3840 + }, + { + "epoch": 0.26778882938026016, + "grad_norm": 12.024513244628906, + "learning_rate": 8.924432081594809e-06, + "loss": 1.0697, + "step": 3850 + }, + { + "epoch": 0.2684843847812478, + "grad_norm": 4.187282085418701, + "learning_rate": 8.947612424663886e-06, + "loss": 1.0454, + "step": 3860 + }, + { + "epoch": 0.2691799401822355, + "grad_norm": 5.399913787841797, + "learning_rate": 8.970792767732962e-06, + "loss": 1.0336, + "step": 3870 + }, + { + "epoch": 0.2698754955832232, + "grad_norm": 2.8284974098205566, + "learning_rate": 8.99397311080204e-06, + "loss": 1.0684, + "step": 3880 + }, + { + "epoch": 0.2705710509842109, + "grad_norm": 7.704894065856934, + "learning_rate": 9.017153453871118e-06, + "loss": 1.0202, + "step": 3890 + }, + { + "epoch": 0.27126660638519856, + "grad_norm": 2.735288381576538, + "learning_rate": 9.040333796940195e-06, + "loss": 1.0064, + "step": 3900 + }, + { + "epoch": 0.27196216178618626, + "grad_norm": 2.1801159381866455, + "learning_rate": 9.063514140009273e-06, + "loss": 1.0077, + "step": 3910 + }, + { + "epoch": 0.27265771718717396, + "grad_norm": 3.1631767749786377, + "learning_rate": 9.08669448307835e-06, + "loss": 1.0317, + "step": 3920 + }, + { + "epoch": 0.27335327258816167, + "grad_norm": 3.3035104274749756, + "learning_rate": 9.109874826147428e-06, + "loss": 1.0675, + "step": 3930 + }, + { + "epoch": 0.2740488279891493, + "grad_norm": 2.8754637241363525, + "learning_rate": 9.133055169216504e-06, + "loss": 1.0509, + "step": 3940 + }, + { + "epoch": 0.274744383390137, + "grad_norm": 4.164738655090332, + "learning_rate": 9.156235512285582e-06, + "loss": 1.0821, + "step": 3950 + }, + { + "epoch": 0.2754399387911247, + "grad_norm": 2.739020347595215, + "learning_rate": 9.17941585535466e-06, + "loss": 1.0211, + "step": 3960 + }, + { + "epoch": 0.2761354941921124, + "grad_norm": 3.2094786167144775, + "learning_rate": 9.202596198423737e-06, + "loss": 1.0282, + "step": 3970 + }, + { + "epoch": 0.27683104959310006, + "grad_norm": 6.651099681854248, + "learning_rate": 9.225776541492815e-06, + "loss": 1.0382, + "step": 3980 + }, + { + "epoch": 0.27752660499408777, + "grad_norm": 2.5474884510040283, + "learning_rate": 9.248956884561893e-06, + "loss": 1.0188, + "step": 3990 + }, + { + "epoch": 0.27822216039507547, + "grad_norm": 2.9760305881500244, + "learning_rate": 9.27213722763097e-06, + "loss": 1.0142, + "step": 4000 + }, + { + "epoch": 0.27822216039507547, + "eval_loss": 1.047255277633667, + "eval_runtime": 4641.1856, + "eval_samples_per_second": 3.913, + "eval_steps_per_second": 0.652, + "step": 4000 + }, + { + "epoch": 0.27891771579606317, + "grad_norm": 5.424582481384277, + "learning_rate": 9.295317570700048e-06, + "loss": 1.0695, + "step": 4010 + }, + { + "epoch": 0.27961327119705087, + "grad_norm": 5.875683307647705, + "learning_rate": 9.318497913769124e-06, + "loss": 1.1002, + "step": 4020 + }, + { + "epoch": 0.2803088265980385, + "grad_norm": 3.968740940093994, + "learning_rate": 9.341678256838203e-06, + "loss": 1.0511, + "step": 4030 + }, + { + "epoch": 0.2810043819990262, + "grad_norm": 3.2854228019714355, + "learning_rate": 9.36485859990728e-06, + "loss": 1.0385, + "step": 4040 + }, + { + "epoch": 0.2816999374000139, + "grad_norm": 2.94095516204834, + "learning_rate": 9.388038942976357e-06, + "loss": 1.0748, + "step": 4050 + }, + { + "epoch": 0.2823954928010016, + "grad_norm": 3.485081911087036, + "learning_rate": 9.411219286045434e-06, + "loss": 0.9695, + "step": 4060 + }, + { + "epoch": 0.28309104820198927, + "grad_norm": 3.5424959659576416, + "learning_rate": 9.43439962911451e-06, + "loss": 1.0728, + "step": 4070 + }, + { + "epoch": 0.283786603602977, + "grad_norm": 3.2194695472717285, + "learning_rate": 9.45757997218359e-06, + "loss": 1.0453, + "step": 4080 + }, + { + "epoch": 0.2844821590039647, + "grad_norm": 3.432450532913208, + "learning_rate": 9.480760315252666e-06, + "loss": 1.0989, + "step": 4090 + }, + { + "epoch": 0.2851777144049524, + "grad_norm": 3.5696616172790527, + "learning_rate": 9.503940658321743e-06, + "loss": 1.0795, + "step": 4100 + }, + { + "epoch": 0.28587326980594, + "grad_norm": 2.309061288833618, + "learning_rate": 9.527121001390821e-06, + "loss": 1.0077, + "step": 4110 + }, + { + "epoch": 0.2865688252069277, + "grad_norm": 2.974426507949829, + "learning_rate": 9.550301344459899e-06, + "loss": 1.0262, + "step": 4120 + }, + { + "epoch": 0.2872643806079154, + "grad_norm": 1.9806816577911377, + "learning_rate": 9.573481687528976e-06, + "loss": 0.9895, + "step": 4130 + }, + { + "epoch": 0.28795993600890313, + "grad_norm": 2.5636301040649414, + "learning_rate": 9.596662030598054e-06, + "loss": 1.0308, + "step": 4140 + }, + { + "epoch": 0.2886554914098908, + "grad_norm": 4.132993221282959, + "learning_rate": 9.619842373667132e-06, + "loss": 1.0664, + "step": 4150 + }, + { + "epoch": 0.2893510468108785, + "grad_norm": 2.7040839195251465, + "learning_rate": 9.64302271673621e-06, + "loss": 1.0198, + "step": 4160 + }, + { + "epoch": 0.2900466022118662, + "grad_norm": 2.4326305389404297, + "learning_rate": 9.666203059805285e-06, + "loss": 1.0511, + "step": 4170 + }, + { + "epoch": 0.2907421576128539, + "grad_norm": 3.101189374923706, + "learning_rate": 9.689383402874365e-06, + "loss": 1.0408, + "step": 4180 + }, + { + "epoch": 0.2914377130138415, + "grad_norm": 2.0922415256500244, + "learning_rate": 9.71256374594344e-06, + "loss": 0.9986, + "step": 4190 + }, + { + "epoch": 0.29213326841482923, + "grad_norm": 1.8994951248168945, + "learning_rate": 9.735744089012518e-06, + "loss": 0.9988, + "step": 4200 + }, + { + "epoch": 0.29282882381581693, + "grad_norm": 5.430738925933838, + "learning_rate": 9.758924432081596e-06, + "loss": 1.011, + "step": 4210 + }, + { + "epoch": 0.29352437921680463, + "grad_norm": 3.189260959625244, + "learning_rate": 9.782104775150672e-06, + "loss": 1.0607, + "step": 4220 + }, + { + "epoch": 0.29421993461779233, + "grad_norm": 2.5282936096191406, + "learning_rate": 9.805285118219751e-06, + "loss": 1.1292, + "step": 4230 + }, + { + "epoch": 0.29491549001878, + "grad_norm": 7.342461585998535, + "learning_rate": 9.828465461288827e-06, + "loss": 1.0156, + "step": 4240 + }, + { + "epoch": 0.2956110454197677, + "grad_norm": 2.6191577911376953, + "learning_rate": 9.851645804357905e-06, + "loss": 1.0329, + "step": 4250 + }, + { + "epoch": 0.2963066008207554, + "grad_norm": 6.352475643157959, + "learning_rate": 9.874826147426983e-06, + "loss": 1.058, + "step": 4260 + }, + { + "epoch": 0.2970021562217431, + "grad_norm": 2.6068737506866455, + "learning_rate": 9.89800649049606e-06, + "loss": 1.0205, + "step": 4270 + }, + { + "epoch": 0.29769771162273073, + "grad_norm": 10.811634063720703, + "learning_rate": 9.921186833565138e-06, + "loss": 1.0343, + "step": 4280 + }, + { + "epoch": 0.29839326702371843, + "grad_norm": 12.347911834716797, + "learning_rate": 9.944367176634215e-06, + "loss": 0.9911, + "step": 4290 + }, + { + "epoch": 0.29908882242470614, + "grad_norm": 5.012406826019287, + "learning_rate": 9.967547519703291e-06, + "loss": 1.0655, + "step": 4300 + }, + { + "epoch": 0.29978437782569384, + "grad_norm": 1.9300044775009155, + "learning_rate": 9.99072786277237e-06, + "loss": 1.0192, + "step": 4310 + }, + { + "epoch": 0.3004799332266815, + "grad_norm": 2.2523481845855713, + "learning_rate": 9.999999410480316e-06, + "loss": 0.9632, + "step": 4320 + }, + { + "epoch": 0.3011754886276692, + "grad_norm": 2.9525656700134277, + "learning_rate": 9.999995807860525e-06, + "loss": 1.0569, + "step": 4330 + }, + { + "epoch": 0.3018710440286569, + "grad_norm": 2.1144776344299316, + "learning_rate": 9.999988930134236e-06, + "loss": 1.0412, + "step": 4340 + }, + { + "epoch": 0.3025665994296446, + "grad_norm": 6.398777484893799, + "learning_rate": 9.999978777305955e-06, + "loss": 1.0659, + "step": 4350 + }, + { + "epoch": 0.30326215483063224, + "grad_norm": 2.747971534729004, + "learning_rate": 9.999965349382327e-06, + "loss": 1.0256, + "step": 4360 + }, + { + "epoch": 0.30395771023161994, + "grad_norm": 1.8770275115966797, + "learning_rate": 9.999948646372155e-06, + "loss": 1.0371, + "step": 4370 + }, + { + "epoch": 0.30465326563260764, + "grad_norm": 4.542801856994629, + "learning_rate": 9.999928668286377e-06, + "loss": 1.0632, + "step": 4380 + }, + { + "epoch": 0.30534882103359534, + "grad_norm": 9.094869613647461, + "learning_rate": 9.999905415138079e-06, + "loss": 1.0105, + "step": 4390 + }, + { + "epoch": 0.306044376434583, + "grad_norm": 5.316960334777832, + "learning_rate": 9.999878886942489e-06, + "loss": 1.0901, + "step": 4400 + }, + { + "epoch": 0.3067399318355707, + "grad_norm": 6.42795467376709, + "learning_rate": 9.999849083716989e-06, + "loss": 1.0813, + "step": 4410 + }, + { + "epoch": 0.3074354872365584, + "grad_norm": 3.1024363040924072, + "learning_rate": 9.999816005481097e-06, + "loss": 1.0402, + "step": 4420 + }, + { + "epoch": 0.3081310426375461, + "grad_norm": 2.2440919876098633, + "learning_rate": 9.99977965225648e-06, + "loss": 1.0642, + "step": 4430 + }, + { + "epoch": 0.3088265980385338, + "grad_norm": 2.943878173828125, + "learning_rate": 9.999740024066955e-06, + "loss": 1.044, + "step": 4440 + }, + { + "epoch": 0.30952215343952144, + "grad_norm": 2.430373430252075, + "learning_rate": 9.999697120938473e-06, + "loss": 1.0152, + "step": 4450 + }, + { + "epoch": 0.31021770884050914, + "grad_norm": 2.3316709995269775, + "learning_rate": 9.99965094289914e-06, + "loss": 1.0473, + "step": 4460 + }, + { + "epoch": 0.31091326424149685, + "grad_norm": 2.5294973850250244, + "learning_rate": 9.999601489979203e-06, + "loss": 1.0597, + "step": 4470 + }, + { + "epoch": 0.31160881964248455, + "grad_norm": 7.392703056335449, + "learning_rate": 9.999548762211055e-06, + "loss": 1.0448, + "step": 4480 + }, + { + "epoch": 0.3123043750434722, + "grad_norm": 1.8714004755020142, + "learning_rate": 9.999492759629233e-06, + "loss": 1.0113, + "step": 4490 + }, + { + "epoch": 0.3129999304444599, + "grad_norm": 2.4874730110168457, + "learning_rate": 9.999433482270419e-06, + "loss": 0.9765, + "step": 4500 + }, + { + "epoch": 0.3129999304444599, + "eval_loss": 1.0507131814956665, + "eval_runtime": 4613.0098, + "eval_samples_per_second": 3.937, + "eval_steps_per_second": 0.656, + "step": 4500 + }, + { + "epoch": 0.3136954858454476, + "grad_norm": 2.104146957397461, + "learning_rate": 9.999370930173445e-06, + "loss": 1.0348, + "step": 4510 + }, + { + "epoch": 0.3143910412464353, + "grad_norm": 2.0951380729675293, + "learning_rate": 9.99930510337928e-06, + "loss": 1.0209, + "step": 4520 + }, + { + "epoch": 0.31508659664742295, + "grad_norm": 3.3489181995391846, + "learning_rate": 9.999236001931043e-06, + "loss": 1.0084, + "step": 4530 + }, + { + "epoch": 0.31578215204841065, + "grad_norm": 2.36907696723938, + "learning_rate": 9.999163625873998e-06, + "loss": 1.0346, + "step": 4540 + }, + { + "epoch": 0.31647770744939835, + "grad_norm": 3.0769405364990234, + "learning_rate": 9.99908797525555e-06, + "loss": 0.9908, + "step": 4550 + }, + { + "epoch": 0.31717326285038605, + "grad_norm": 2.911822557449341, + "learning_rate": 9.999009050125257e-06, + "loss": 0.9753, + "step": 4560 + }, + { + "epoch": 0.3178688182513737, + "grad_norm": 2.7419121265411377, + "learning_rate": 9.998926850534811e-06, + "loss": 1.0458, + "step": 4570 + }, + { + "epoch": 0.3185643736523614, + "grad_norm": 4.640398979187012, + "learning_rate": 9.998841376538058e-06, + "loss": 1.1055, + "step": 4580 + }, + { + "epoch": 0.3192599290533491, + "grad_norm": 2.6035497188568115, + "learning_rate": 9.998752628190986e-06, + "loss": 1.0495, + "step": 4590 + }, + { + "epoch": 0.3199554844543368, + "grad_norm": 2.4305527210235596, + "learning_rate": 9.998660605551725e-06, + "loss": 1.0784, + "step": 4600 + }, + { + "epoch": 0.32065103985532445, + "grad_norm": 2.6899681091308594, + "learning_rate": 9.998565308680551e-06, + "loss": 1.0761, + "step": 4610 + }, + { + "epoch": 0.32134659525631215, + "grad_norm": 3.015202522277832, + "learning_rate": 9.998466737639889e-06, + "loss": 1.0594, + "step": 4620 + }, + { + "epoch": 0.32204215065729985, + "grad_norm": 5.018462181091309, + "learning_rate": 9.9983648924943e-06, + "loss": 1.0131, + "step": 4630 + }, + { + "epoch": 0.32273770605828755, + "grad_norm": 4.5205464363098145, + "learning_rate": 9.998259773310501e-06, + "loss": 1.0468, + "step": 4640 + }, + { + "epoch": 0.32343326145927526, + "grad_norm": 4.8739333152771, + "learning_rate": 9.998151380157344e-06, + "loss": 1.1014, + "step": 4650 + }, + { + "epoch": 0.3241288168602629, + "grad_norm": 2.3750078678131104, + "learning_rate": 9.998039713105827e-06, + "loss": 1.0337, + "step": 4660 + }, + { + "epoch": 0.3248243722612506, + "grad_norm": 2.809387445449829, + "learning_rate": 9.9979247722291e-06, + "loss": 1.0885, + "step": 4670 + }, + { + "epoch": 0.3255199276622383, + "grad_norm": 2.417814254760742, + "learning_rate": 9.997806557602446e-06, + "loss": 1.1252, + "step": 4680 + }, + { + "epoch": 0.326215483063226, + "grad_norm": 3.246739149093628, + "learning_rate": 9.9976850693033e-06, + "loss": 1.0607, + "step": 4690 + }, + { + "epoch": 0.32691103846421365, + "grad_norm": 2.1986732482910156, + "learning_rate": 9.997560307411241e-06, + "loss": 1.0515, + "step": 4700 + }, + { + "epoch": 0.32760659386520136, + "grad_norm": 3.8100876808166504, + "learning_rate": 9.99743227200799e-06, + "loss": 0.9873, + "step": 4710 + }, + { + "epoch": 0.32830214926618906, + "grad_norm": 5.42280912399292, + "learning_rate": 9.997300963177412e-06, + "loss": 1.0277, + "step": 4720 + }, + { + "epoch": 0.32899770466717676, + "grad_norm": 5.013223648071289, + "learning_rate": 9.997166381005518e-06, + "loss": 1.0477, + "step": 4730 + }, + { + "epoch": 0.3296932600681644, + "grad_norm": 3.2371959686279297, + "learning_rate": 9.997028525580463e-06, + "loss": 1.0284, + "step": 4740 + }, + { + "epoch": 0.3303888154691521, + "grad_norm": 6.018877029418945, + "learning_rate": 9.996887396992545e-06, + "loss": 0.9941, + "step": 4750 + }, + { + "epoch": 0.3310843708701398, + "grad_norm": 2.9237730503082275, + "learning_rate": 9.996742995334205e-06, + "loss": 1.0518, + "step": 4760 + }, + { + "epoch": 0.3317799262711275, + "grad_norm": 1.9030520915985107, + "learning_rate": 9.99659532070003e-06, + "loss": 0.9824, + "step": 4770 + }, + { + "epoch": 0.33247548167211516, + "grad_norm": 1.6474735736846924, + "learning_rate": 9.99644437318675e-06, + "loss": 1.0602, + "step": 4780 + }, + { + "epoch": 0.33317103707310286, + "grad_norm": 2.6931610107421875, + "learning_rate": 9.996290152893239e-06, + "loss": 1.0237, + "step": 4790 + }, + { + "epoch": 0.33386659247409056, + "grad_norm": 3.441774606704712, + "learning_rate": 9.996132659920515e-06, + "loss": 1.0302, + "step": 4800 + }, + { + "epoch": 0.33456214787507826, + "grad_norm": 7.111067295074463, + "learning_rate": 9.99597189437174e-06, + "loss": 1.0612, + "step": 4810 + }, + { + "epoch": 0.33525770327606597, + "grad_norm": 4.218466281890869, + "learning_rate": 9.995807856352215e-06, + "loss": 1.0626, + "step": 4820 + }, + { + "epoch": 0.3359532586770536, + "grad_norm": 2.0221176147460938, + "learning_rate": 9.995640545969393e-06, + "loss": 1.0373, + "step": 4830 + }, + { + "epoch": 0.3366488140780413, + "grad_norm": 2.6482598781585693, + "learning_rate": 9.995469963332866e-06, + "loss": 1.1489, + "step": 4840 + }, + { + "epoch": 0.337344369479029, + "grad_norm": 4.540498733520508, + "learning_rate": 9.995296108554367e-06, + "loss": 1.0889, + "step": 4850 + }, + { + "epoch": 0.3380399248800167, + "grad_norm": 4.00014591217041, + "learning_rate": 9.995118981747775e-06, + "loss": 1.0562, + "step": 4860 + }, + { + "epoch": 0.33873548028100436, + "grad_norm": 3.3716061115264893, + "learning_rate": 9.994938583029112e-06, + "loss": 1.0648, + "step": 4870 + }, + { + "epoch": 0.33943103568199207, + "grad_norm": 3.811533212661743, + "learning_rate": 9.994754912516545e-06, + "loss": 1.1293, + "step": 4880 + }, + { + "epoch": 0.34012659108297977, + "grad_norm": 2.74750018119812, + "learning_rate": 9.99456797033038e-06, + "loss": 1.0721, + "step": 4890 + }, + { + "epoch": 0.34082214648396747, + "grad_norm": 5.763965606689453, + "learning_rate": 9.994377756593069e-06, + "loss": 0.9923, + "step": 4900 + }, + { + "epoch": 0.3415177018849551, + "grad_norm": 12.668098449707031, + "learning_rate": 9.994184271429205e-06, + "loss": 1.0049, + "step": 4910 + }, + { + "epoch": 0.3422132572859428, + "grad_norm": 3.2131540775299072, + "learning_rate": 9.99398751496553e-06, + "loss": 1.004, + "step": 4920 + }, + { + "epoch": 0.3429088126869305, + "grad_norm": 5.885711193084717, + "learning_rate": 9.993787487330915e-06, + "loss": 1.0482, + "step": 4930 + }, + { + "epoch": 0.3436043680879182, + "grad_norm": 2.019047975540161, + "learning_rate": 9.993584188656389e-06, + "loss": 1.0049, + "step": 4940 + }, + { + "epoch": 0.34429992348890587, + "grad_norm": 3.424795389175415, + "learning_rate": 9.993377619075116e-06, + "loss": 1.0311, + "step": 4950 + }, + { + "epoch": 0.34499547888989357, + "grad_norm": 2.206117868423462, + "learning_rate": 9.993167778722402e-06, + "loss": 1.0462, + "step": 4960 + }, + { + "epoch": 0.34569103429088127, + "grad_norm": 2.1619067192077637, + "learning_rate": 9.9929546677357e-06, + "loss": 1.0796, + "step": 4970 + }, + { + "epoch": 0.346386589691869, + "grad_norm": 2.362414836883545, + "learning_rate": 9.992738286254599e-06, + "loss": 1.0662, + "step": 4980 + }, + { + "epoch": 0.3470821450928566, + "grad_norm": 2.4229462146759033, + "learning_rate": 9.992518634420834e-06, + "loss": 1.0508, + "step": 4990 + }, + { + "epoch": 0.3477777004938443, + "grad_norm": 4.403920650482178, + "learning_rate": 9.992295712378284e-06, + "loss": 1.1341, + "step": 5000 + }, + { + "epoch": 0.3477777004938443, + "eval_loss": 1.0439701080322266, + "eval_runtime": 4680.8817, + "eval_samples_per_second": 3.88, + "eval_steps_per_second": 0.647, + "step": 5000 + }, + { + "epoch": 0.348473255894832, + "grad_norm": 3.770839214324951, + "learning_rate": 9.992069520272967e-06, + "loss": 1.1185, + "step": 5010 + }, + { + "epoch": 0.3491688112958197, + "grad_norm": 5.947413444519043, + "learning_rate": 9.991840058253044e-06, + "loss": 1.0214, + "step": 5020 + }, + { + "epoch": 0.3498643666968074, + "grad_norm": 4.575600624084473, + "learning_rate": 9.991607326468816e-06, + "loss": 1.0758, + "step": 5030 + }, + { + "epoch": 0.3505599220977951, + "grad_norm": 2.7185375690460205, + "learning_rate": 9.991371325072727e-06, + "loss": 1.0673, + "step": 5040 + }, + { + "epoch": 0.3512554774987828, + "grad_norm": 4.168835163116455, + "learning_rate": 9.991132054219366e-06, + "loss": 1.0455, + "step": 5050 + }, + { + "epoch": 0.3519510328997705, + "grad_norm": 2.77966570854187, + "learning_rate": 9.990889514065459e-06, + "loss": 1.0711, + "step": 5060 + }, + { + "epoch": 0.3526465883007582, + "grad_norm": 2.7357871532440186, + "learning_rate": 9.990643704769874e-06, + "loss": 0.9592, + "step": 5070 + }, + { + "epoch": 0.3533421437017458, + "grad_norm": 2.4415783882141113, + "learning_rate": 9.990394626493622e-06, + "loss": 1.0541, + "step": 5080 + }, + { + "epoch": 0.3540376991027335, + "grad_norm": 2.6576969623565674, + "learning_rate": 9.990142279399856e-06, + "loss": 1.0369, + "step": 5090 + }, + { + "epoch": 0.35473325450372123, + "grad_norm": 3.4306960105895996, + "learning_rate": 9.989886663653869e-06, + "loss": 1.0588, + "step": 5100 + }, + { + "epoch": 0.35542880990470893, + "grad_norm": 4.168266773223877, + "learning_rate": 9.989627779423095e-06, + "loss": 1.0684, + "step": 5110 + }, + { + "epoch": 0.3561243653056966, + "grad_norm": 4.160959243774414, + "learning_rate": 9.989365626877106e-06, + "loss": 1.0631, + "step": 5120 + }, + { + "epoch": 0.3568199207066843, + "grad_norm": 1.7843825817108154, + "learning_rate": 9.98910020618762e-06, + "loss": 1.0732, + "step": 5130 + }, + { + "epoch": 0.357515476107672, + "grad_norm": 4.0116987228393555, + "learning_rate": 9.988831517528494e-06, + "loss": 1.046, + "step": 5140 + }, + { + "epoch": 0.3582110315086597, + "grad_norm": 2.794257164001465, + "learning_rate": 9.988559561075723e-06, + "loss": 1.0151, + "step": 5150 + }, + { + "epoch": 0.35890658690964733, + "grad_norm": 2.2605085372924805, + "learning_rate": 9.988284337007445e-06, + "loss": 1.0322, + "step": 5160 + }, + { + "epoch": 0.35960214231063503, + "grad_norm": 2.138259172439575, + "learning_rate": 9.98800584550394e-06, + "loss": 1.0488, + "step": 5170 + }, + { + "epoch": 0.36029769771162273, + "grad_norm": 2.6369879245758057, + "learning_rate": 9.987724086747622e-06, + "loss": 1.0856, + "step": 5180 + }, + { + "epoch": 0.36099325311261043, + "grad_norm": 2.4682302474975586, + "learning_rate": 9.987439060923052e-06, + "loss": 1.0702, + "step": 5190 + }, + { + "epoch": 0.3616888085135981, + "grad_norm": 2.9794416427612305, + "learning_rate": 9.987150768216926e-06, + "loss": 1.0509, + "step": 5200 + }, + { + "epoch": 0.3623843639145858, + "grad_norm": 2.9885737895965576, + "learning_rate": 9.986859208818086e-06, + "loss": 1.0032, + "step": 5210 + }, + { + "epoch": 0.3630799193155735, + "grad_norm": 2.4614968299865723, + "learning_rate": 9.986564382917505e-06, + "loss": 1.0738, + "step": 5220 + }, + { + "epoch": 0.3637754747165612, + "grad_norm": 1.8250749111175537, + "learning_rate": 9.986266290708304e-06, + "loss": 1.0083, + "step": 5230 + }, + { + "epoch": 0.3644710301175489, + "grad_norm": 2.4030511379241943, + "learning_rate": 9.985964932385737e-06, + "loss": 1.0133, + "step": 5240 + }, + { + "epoch": 0.36516658551853654, + "grad_norm": 2.657980442047119, + "learning_rate": 9.985660308147202e-06, + "loss": 1.0305, + "step": 5250 + }, + { + "epoch": 0.36586214091952424, + "grad_norm": 1.9457042217254639, + "learning_rate": 9.985352418192236e-06, + "loss": 1.0085, + "step": 5260 + }, + { + "epoch": 0.36655769632051194, + "grad_norm": 2.5800135135650635, + "learning_rate": 9.98504126272251e-06, + "loss": 1.0212, + "step": 5270 + }, + { + "epoch": 0.36725325172149964, + "grad_norm": 2.558617115020752, + "learning_rate": 9.984726841941841e-06, + "loss": 1.0501, + "step": 5280 + }, + { + "epoch": 0.3679488071224873, + "grad_norm": 3.0941543579101562, + "learning_rate": 9.98440915605618e-06, + "loss": 1.0172, + "step": 5290 + }, + { + "epoch": 0.368644362523475, + "grad_norm": 2.3173184394836426, + "learning_rate": 9.984088205273617e-06, + "loss": 1.0264, + "step": 5300 + }, + { + "epoch": 0.3693399179244627, + "grad_norm": 2.1131627559661865, + "learning_rate": 9.983763989804384e-06, + "loss": 1.0771, + "step": 5310 + }, + { + "epoch": 0.3700354733254504, + "grad_norm": 1.9023852348327637, + "learning_rate": 9.983436509860847e-06, + "loss": 0.9632, + "step": 5320 + }, + { + "epoch": 0.37073102872643804, + "grad_norm": 2.699855327606201, + "learning_rate": 9.983105765657514e-06, + "loss": 1.0413, + "step": 5330 + }, + { + "epoch": 0.37142658412742574, + "grad_norm": 5.310295104980469, + "learning_rate": 9.982771757411032e-06, + "loss": 1.069, + "step": 5340 + }, + { + "epoch": 0.37212213952841344, + "grad_norm": 2.289287567138672, + "learning_rate": 9.982434485340178e-06, + "loss": 1.0146, + "step": 5350 + }, + { + "epoch": 0.37281769492940114, + "grad_norm": 3.4784088134765625, + "learning_rate": 9.982093949665876e-06, + "loss": 1.091, + "step": 5360 + }, + { + "epoch": 0.3735132503303888, + "grad_norm": 3.0726890563964844, + "learning_rate": 9.981750150611187e-06, + "loss": 1.0399, + "step": 5370 + }, + { + "epoch": 0.3742088057313765, + "grad_norm": 2.4800238609313965, + "learning_rate": 9.9814030884013e-06, + "loss": 0.976, + "step": 5380 + }, + { + "epoch": 0.3749043611323642, + "grad_norm": 3.184840679168701, + "learning_rate": 9.981052763263554e-06, + "loss": 1.0562, + "step": 5390 + }, + { + "epoch": 0.3755999165333519, + "grad_norm": 2.1435444355010986, + "learning_rate": 9.98069917542742e-06, + "loss": 1.0277, + "step": 5400 + }, + { + "epoch": 0.37629547193433954, + "grad_norm": 5.143226623535156, + "learning_rate": 9.980342325124501e-06, + "loss": 1.0059, + "step": 5410 + }, + { + "epoch": 0.37699102733532724, + "grad_norm": 4.298587322235107, + "learning_rate": 9.979982212588544e-06, + "loss": 1.052, + "step": 5420 + }, + { + "epoch": 0.37768658273631495, + "grad_norm": 2.6828598976135254, + "learning_rate": 9.97961883805543e-06, + "loss": 1.0508, + "step": 5430 + }, + { + "epoch": 0.37838213813730265, + "grad_norm": 2.509519577026367, + "learning_rate": 9.979252201763182e-06, + "loss": 0.9941, + "step": 5440 + }, + { + "epoch": 0.37907769353829035, + "grad_norm": 1.9193837642669678, + "learning_rate": 9.978882303951948e-06, + "loss": 1.0276, + "step": 5450 + }, + { + "epoch": 0.379773248939278, + "grad_norm": 4.005630016326904, + "learning_rate": 9.978509144864024e-06, + "loss": 1.0206, + "step": 5460 + }, + { + "epoch": 0.3804688043402657, + "grad_norm": 1.7720558643341064, + "learning_rate": 9.978132724743835e-06, + "loss": 1.0795, + "step": 5470 + }, + { + "epoch": 0.3811643597412534, + "grad_norm": 3.186534881591797, + "learning_rate": 9.977753043837944e-06, + "loss": 1.0322, + "step": 5480 + }, + { + "epoch": 0.3818599151422411, + "grad_norm": 3.083890914916992, + "learning_rate": 9.977370102395052e-06, + "loss": 0.9924, + "step": 5490 + }, + { + "epoch": 0.38255547054322875, + "grad_norm": 2.599231481552124, + "learning_rate": 9.976983900665992e-06, + "loss": 0.9921, + "step": 5500 + }, + { + "epoch": 0.38255547054322875, + "eval_loss": 1.040250539779663, + "eval_runtime": 4636.9914, + "eval_samples_per_second": 3.917, + "eval_steps_per_second": 0.653, + "step": 5500 + }, + { + "epoch": 0.38325102594421645, + "grad_norm": 2.2104082107543945, + "learning_rate": 9.976594438903737e-06, + "loss": 1.001, + "step": 5510 + }, + { + "epoch": 0.38394658134520415, + "grad_norm": 2.622354030609131, + "learning_rate": 9.976201717363391e-06, + "loss": 1.0908, + "step": 5520 + }, + { + "epoch": 0.38464213674619185, + "grad_norm": 2.8059942722320557, + "learning_rate": 9.975805736302198e-06, + "loss": 1.0691, + "step": 5530 + }, + { + "epoch": 0.3853376921471795, + "grad_norm": 5.203469753265381, + "learning_rate": 9.97540649597953e-06, + "loss": 1.032, + "step": 5540 + }, + { + "epoch": 0.3860332475481672, + "grad_norm": 2.4351253509521484, + "learning_rate": 9.9750039966569e-06, + "loss": 1.0736, + "step": 5550 + }, + { + "epoch": 0.3867288029491549, + "grad_norm": 2.799710512161255, + "learning_rate": 9.974598238597955e-06, + "loss": 1.0212, + "step": 5560 + }, + { + "epoch": 0.3874243583501426, + "grad_norm": 2.004046678543091, + "learning_rate": 9.974189222068476e-06, + "loss": 1.0473, + "step": 5570 + }, + { + "epoch": 0.38811991375113025, + "grad_norm": 2.3978769779205322, + "learning_rate": 9.973776947336373e-06, + "loss": 0.9689, + "step": 5580 + }, + { + "epoch": 0.38881546915211795, + "grad_norm": 2.091973066329956, + "learning_rate": 9.9733614146717e-06, + "loss": 1.0075, + "step": 5590 + }, + { + "epoch": 0.38951102455310566, + "grad_norm": 4.5794758796691895, + "learning_rate": 9.97294262434664e-06, + "loss": 1.0218, + "step": 5600 + }, + { + "epoch": 0.39020657995409336, + "grad_norm": 2.1137895584106445, + "learning_rate": 9.972520576635505e-06, + "loss": 1.0202, + "step": 5610 + }, + { + "epoch": 0.39090213535508106, + "grad_norm": 1.945766568183899, + "learning_rate": 9.97209527181475e-06, + "loss": 0.9493, + "step": 5620 + }, + { + "epoch": 0.3915976907560687, + "grad_norm": 2.1842095851898193, + "learning_rate": 9.971666710162957e-06, + "loss": 1.0847, + "step": 5630 + }, + { + "epoch": 0.3922932461570564, + "grad_norm": 3.5761191844940186, + "learning_rate": 9.971234891960844e-06, + "loss": 1.0278, + "step": 5640 + }, + { + "epoch": 0.3929888015580441, + "grad_norm": 3.4763684272766113, + "learning_rate": 9.97079981749126e-06, + "loss": 1.017, + "step": 5650 + }, + { + "epoch": 0.3936843569590318, + "grad_norm": 2.165386915206909, + "learning_rate": 9.970361487039191e-06, + "loss": 1.0363, + "step": 5660 + }, + { + "epoch": 0.39437991236001946, + "grad_norm": 3.7385926246643066, + "learning_rate": 9.969919900891752e-06, + "loss": 1.0354, + "step": 5670 + }, + { + "epoch": 0.39507546776100716, + "grad_norm": 2.1887364387512207, + "learning_rate": 9.96947505933819e-06, + "loss": 1.0681, + "step": 5680 + }, + { + "epoch": 0.39577102316199486, + "grad_norm": 2.3895952701568604, + "learning_rate": 9.969026962669885e-06, + "loss": 1.0849, + "step": 5690 + }, + { + "epoch": 0.39646657856298256, + "grad_norm": 2.299842119216919, + "learning_rate": 9.968575611180355e-06, + "loss": 1.0552, + "step": 5700 + }, + { + "epoch": 0.3971621339639702, + "grad_norm": 3.6924402713775635, + "learning_rate": 9.96812100516524e-06, + "loss": 0.9965, + "step": 5710 + }, + { + "epoch": 0.3978576893649579, + "grad_norm": 3.425346851348877, + "learning_rate": 9.96766314492232e-06, + "loss": 1.0511, + "step": 5720 + }, + { + "epoch": 0.3985532447659456, + "grad_norm": 2.9647505283355713, + "learning_rate": 9.967202030751501e-06, + "loss": 1.1045, + "step": 5730 + }, + { + "epoch": 0.3992488001669333, + "grad_norm": 3.9521915912628174, + "learning_rate": 9.966737662954826e-06, + "loss": 0.9764, + "step": 5740 + }, + { + "epoch": 0.39994435556792096, + "grad_norm": 3.9713664054870605, + "learning_rate": 9.966270041836463e-06, + "loss": 1.0547, + "step": 5750 + }, + { + "epoch": 0.40063991096890866, + "grad_norm": 3.1165716648101807, + "learning_rate": 9.965799167702716e-06, + "loss": 1.1082, + "step": 5760 + }, + { + "epoch": 0.40133546636989637, + "grad_norm": 2.1991937160491943, + "learning_rate": 9.965325040862019e-06, + "loss": 1.0444, + "step": 5770 + }, + { + "epoch": 0.40203102177088407, + "grad_norm": 2.8839242458343506, + "learning_rate": 9.964847661624931e-06, + "loss": 0.9756, + "step": 5780 + }, + { + "epoch": 0.4027265771718717, + "grad_norm": 3.9936490058898926, + "learning_rate": 9.964367030304149e-06, + "loss": 1.0424, + "step": 5790 + }, + { + "epoch": 0.4034221325728594, + "grad_norm": 2.6571409702301025, + "learning_rate": 9.963883147214497e-06, + "loss": 1.0456, + "step": 5800 + }, + { + "epoch": 0.4041176879738471, + "grad_norm": 5.000946521759033, + "learning_rate": 9.963396012672928e-06, + "loss": 1.0214, + "step": 5810 + }, + { + "epoch": 0.4048132433748348, + "grad_norm": 4.154998302459717, + "learning_rate": 9.962905626998529e-06, + "loss": 1.0071, + "step": 5820 + }, + { + "epoch": 0.4055087987758225, + "grad_norm": 3.504626512527466, + "learning_rate": 9.962411990512507e-06, + "loss": 1.0522, + "step": 5830 + }, + { + "epoch": 0.40620435417681017, + "grad_norm": 1.7185014486312866, + "learning_rate": 9.96191510353821e-06, + "loss": 0.9499, + "step": 5840 + }, + { + "epoch": 0.40689990957779787, + "grad_norm": 1.9717833995819092, + "learning_rate": 9.961414966401109e-06, + "loss": 1.0135, + "step": 5850 + }, + { + "epoch": 0.40759546497878557, + "grad_norm": 2.702493906021118, + "learning_rate": 9.960911579428802e-06, + "loss": 0.9919, + "step": 5860 + }, + { + "epoch": 0.4082910203797733, + "grad_norm": 2.275829792022705, + "learning_rate": 9.96040494295102e-06, + "loss": 1.0326, + "step": 5870 + }, + { + "epoch": 0.4089865757807609, + "grad_norm": 2.295130491256714, + "learning_rate": 9.959895057299623e-06, + "loss": 0.9808, + "step": 5880 + }, + { + "epoch": 0.4096821311817486, + "grad_norm": 2.218177318572998, + "learning_rate": 9.959381922808594e-06, + "loss": 1.0169, + "step": 5890 + }, + { + "epoch": 0.4103776865827363, + "grad_norm": 4.184948444366455, + "learning_rate": 9.95886553981405e-06, + "loss": 1.0016, + "step": 5900 + }, + { + "epoch": 0.411073241983724, + "grad_norm": 3.178241491317749, + "learning_rate": 9.958345908654232e-06, + "loss": 1.0109, + "step": 5910 + }, + { + "epoch": 0.41176879738471167, + "grad_norm": 2.0161163806915283, + "learning_rate": 9.957823029669509e-06, + "loss": 0.9704, + "step": 5920 + }, + { + "epoch": 0.4124643527856994, + "grad_norm": 5.442134857177734, + "learning_rate": 9.95729690320238e-06, + "loss": 1.0474, + "step": 5930 + }, + { + "epoch": 0.4131599081866871, + "grad_norm": 4.610241889953613, + "learning_rate": 9.956767529597466e-06, + "loss": 1.0641, + "step": 5940 + }, + { + "epoch": 0.4138554635876748, + "grad_norm": 3.107534646987915, + "learning_rate": 9.956234909201523e-06, + "loss": 1.0109, + "step": 5950 + }, + { + "epoch": 0.4145510189886624, + "grad_norm": 5.447590351104736, + "learning_rate": 9.955699042363425e-06, + "loss": 1.0033, + "step": 5960 + }, + { + "epoch": 0.4152465743896501, + "grad_norm": 3.7463176250457764, + "learning_rate": 9.955159929434178e-06, + "loss": 0.9758, + "step": 5970 + }, + { + "epoch": 0.4159421297906378, + "grad_norm": 3.2710506916046143, + "learning_rate": 9.954617570766913e-06, + "loss": 1.04, + "step": 5980 + }, + { + "epoch": 0.41663768519162553, + "grad_norm": 2.6503562927246094, + "learning_rate": 9.954071966716887e-06, + "loss": 0.9454, + "step": 5990 + }, + { + "epoch": 0.4173332405926132, + "grad_norm": 3.5699102878570557, + "learning_rate": 9.953523117641482e-06, + "loss": 0.9818, + "step": 6000 + }, + { + "epoch": 0.4173332405926132, + "eval_loss": 1.0309199094772339, + "eval_runtime": 4667.673, + "eval_samples_per_second": 3.891, + "eval_steps_per_second": 0.649, + "step": 6000 + }, + { + "epoch": 0.4180287959936009, + "grad_norm": 2.8652687072753906, + "learning_rate": 9.952971023900207e-06, + "loss": 1.0169, + "step": 6010 + }, + { + "epoch": 0.4187243513945886, + "grad_norm": 4.080816268920898, + "learning_rate": 9.952415685854692e-06, + "loss": 1.0234, + "step": 6020 + }, + { + "epoch": 0.4194199067955763, + "grad_norm": 2.3878185749053955, + "learning_rate": 9.9518571038687e-06, + "loss": 1.0555, + "step": 6030 + }, + { + "epoch": 0.420115462196564, + "grad_norm": 2.348707675933838, + "learning_rate": 9.951295278308113e-06, + "loss": 1.0438, + "step": 6040 + }, + { + "epoch": 0.42081101759755163, + "grad_norm": 4.05886697769165, + "learning_rate": 9.95073020954094e-06, + "loss": 1.0267, + "step": 6050 + }, + { + "epoch": 0.42150657299853933, + "grad_norm": 2.233332633972168, + "learning_rate": 9.95016189793731e-06, + "loss": 1.0366, + "step": 6060 + }, + { + "epoch": 0.42220212839952703, + "grad_norm": 2.9144365787506104, + "learning_rate": 9.949590343869483e-06, + "loss": 1.0003, + "step": 6070 + }, + { + "epoch": 0.42289768380051473, + "grad_norm": 2.133848190307617, + "learning_rate": 9.949015547711836e-06, + "loss": 1.05, + "step": 6080 + }, + { + "epoch": 0.4235932392015024, + "grad_norm": 3.030538320541382, + "learning_rate": 9.948437509840877e-06, + "loss": 0.998, + "step": 6090 + }, + { + "epoch": 0.4242887946024901, + "grad_norm": 5.181257247924805, + "learning_rate": 9.947856230635228e-06, + "loss": 1.0272, + "step": 6100 + }, + { + "epoch": 0.4249843500034778, + "grad_norm": 4.386233329772949, + "learning_rate": 9.947271710475647e-06, + "loss": 1.0622, + "step": 6110 + }, + { + "epoch": 0.4256799054044655, + "grad_norm": 4.235024452209473, + "learning_rate": 9.946683949745002e-06, + "loss": 1.0351, + "step": 6120 + }, + { + "epoch": 0.42637546080545313, + "grad_norm": 1.8785879611968994, + "learning_rate": 9.94609294882829e-06, + "loss": 1.0774, + "step": 6130 + }, + { + "epoch": 0.42707101620644083, + "grad_norm": 2.7049503326416016, + "learning_rate": 9.945498708112632e-06, + "loss": 1.0422, + "step": 6140 + }, + { + "epoch": 0.42776657160742854, + "grad_norm": 2.78950834274292, + "learning_rate": 9.944901227987264e-06, + "loss": 0.9491, + "step": 6150 + }, + { + "epoch": 0.42846212700841624, + "grad_norm": 4.958338260650635, + "learning_rate": 9.944300508843555e-06, + "loss": 0.9808, + "step": 6160 + }, + { + "epoch": 0.4291576824094039, + "grad_norm": 2.615994691848755, + "learning_rate": 9.943696551074982e-06, + "loss": 0.9896, + "step": 6170 + }, + { + "epoch": 0.4298532378103916, + "grad_norm": 1.812137484550476, + "learning_rate": 9.943089355077156e-06, + "loss": 0.9934, + "step": 6180 + }, + { + "epoch": 0.4305487932113793, + "grad_norm": 1.6958556175231934, + "learning_rate": 9.9424789212478e-06, + "loss": 1.0195, + "step": 6190 + }, + { + "epoch": 0.431244348612367, + "grad_norm": 2.5209977626800537, + "learning_rate": 9.941865249986765e-06, + "loss": 0.9908, + "step": 6200 + }, + { + "epoch": 0.43193990401335464, + "grad_norm": 1.8985532522201538, + "learning_rate": 9.941248341696017e-06, + "loss": 0.9994, + "step": 6210 + }, + { + "epoch": 0.43263545941434234, + "grad_norm": 3.8270673751831055, + "learning_rate": 9.940628196779644e-06, + "loss": 1.0186, + "step": 6220 + }, + { + "epoch": 0.43333101481533004, + "grad_norm": 2.1683595180511475, + "learning_rate": 9.940004815643855e-06, + "loss": 0.9882, + "step": 6230 + }, + { + "epoch": 0.43402657021631774, + "grad_norm": 4.375995635986328, + "learning_rate": 9.939378198696978e-06, + "loss": 1.0082, + "step": 6240 + }, + { + "epoch": 0.43472212561730544, + "grad_norm": 2.4603703022003174, + "learning_rate": 9.938748346349463e-06, + "loss": 1.0732, + "step": 6250 + }, + { + "epoch": 0.4354176810182931, + "grad_norm": 2.8282809257507324, + "learning_rate": 9.938115259013875e-06, + "loss": 1.0096, + "step": 6260 + }, + { + "epoch": 0.4361132364192808, + "grad_norm": 2.9633076190948486, + "learning_rate": 9.937478937104899e-06, + "loss": 1.0224, + "step": 6270 + }, + { + "epoch": 0.4368087918202685, + "grad_norm": 8.529446601867676, + "learning_rate": 9.936839381039341e-06, + "loss": 1.0861, + "step": 6280 + }, + { + "epoch": 0.4375043472212562, + "grad_norm": 4.085458278656006, + "learning_rate": 9.936196591236125e-06, + "loss": 1.0207, + "step": 6290 + }, + { + "epoch": 0.43819990262224384, + "grad_norm": 2.4278528690338135, + "learning_rate": 9.93555056811629e-06, + "loss": 1.0298, + "step": 6300 + }, + { + "epoch": 0.43889545802323154, + "grad_norm": 2.234976053237915, + "learning_rate": 9.934901312103001e-06, + "loss": 1.0521, + "step": 6310 + }, + { + "epoch": 0.43959101342421925, + "grad_norm": 3.0491485595703125, + "learning_rate": 9.934248823621526e-06, + "loss": 1.0452, + "step": 6320 + }, + { + "epoch": 0.44028656882520695, + "grad_norm": 1.5340641736984253, + "learning_rate": 9.933593103099266e-06, + "loss": 0.9535, + "step": 6330 + }, + { + "epoch": 0.4409821242261946, + "grad_norm": 2.3937132358551025, + "learning_rate": 9.93293415096573e-06, + "loss": 1.0648, + "step": 6340 + }, + { + "epoch": 0.4416776796271823, + "grad_norm": 2.241138458251953, + "learning_rate": 9.932271967652547e-06, + "loss": 1.0076, + "step": 6350 + }, + { + "epoch": 0.44237323502817, + "grad_norm": 3.036702871322632, + "learning_rate": 9.93160655359346e-06, + "loss": 1.0747, + "step": 6360 + }, + { + "epoch": 0.4430687904291577, + "grad_norm": 6.085126876831055, + "learning_rate": 9.93093790922433e-06, + "loss": 1.0895, + "step": 6370 + }, + { + "epoch": 0.44376434583014535, + "grad_norm": 2.620971441268921, + "learning_rate": 9.930266034983134e-06, + "loss": 1.0525, + "step": 6380 + }, + { + "epoch": 0.44445990123113305, + "grad_norm": 2.9015555381774902, + "learning_rate": 9.929590931309967e-06, + "loss": 1.011, + "step": 6390 + }, + { + "epoch": 0.44515545663212075, + "grad_norm": 3.025658369064331, + "learning_rate": 9.92891259864703e-06, + "loss": 1.0149, + "step": 6400 + }, + { + "epoch": 0.44585101203310845, + "grad_norm": 2.1784346103668213, + "learning_rate": 9.928231037438654e-06, + "loss": 1.0216, + "step": 6410 + }, + { + "epoch": 0.44654656743409615, + "grad_norm": 1.630619764328003, + "learning_rate": 9.92754624813127e-06, + "loss": 1.0861, + "step": 6420 + }, + { + "epoch": 0.4472421228350838, + "grad_norm": 1.7557528018951416, + "learning_rate": 9.926858231173435e-06, + "loss": 0.985, + "step": 6430 + }, + { + "epoch": 0.4479376782360715, + "grad_norm": 7.960514545440674, + "learning_rate": 9.92616698701581e-06, + "loss": 1.007, + "step": 6440 + }, + { + "epoch": 0.4486332336370592, + "grad_norm": 4.171719551086426, + "learning_rate": 9.925472516111178e-06, + "loss": 1.0058, + "step": 6450 + }, + { + "epoch": 0.4493287890380469, + "grad_norm": 3.442769765853882, + "learning_rate": 9.92477481891443e-06, + "loss": 1.0465, + "step": 6460 + }, + { + "epoch": 0.45002434443903455, + "grad_norm": 4.363941669464111, + "learning_rate": 9.924073895882579e-06, + "loss": 1.0454, + "step": 6470 + }, + { + "epoch": 0.45071989984002225, + "grad_norm": 3.627669334411621, + "learning_rate": 9.923369747474738e-06, + "loss": 1.0057, + "step": 6480 + }, + { + "epoch": 0.45141545524100996, + "grad_norm": 2.354245901107788, + "learning_rate": 9.922662374152144e-06, + "loss": 1.0532, + "step": 6490 + }, + { + "epoch": 0.45211101064199766, + "grad_norm": 2.7835798263549805, + "learning_rate": 9.92195177637814e-06, + "loss": 1.0312, + "step": 6500 + }, + { + "epoch": 0.45211101064199766, + "eval_loss": 1.025974154472351, + "eval_runtime": 4573.5334, + "eval_samples_per_second": 3.971, + "eval_steps_per_second": 0.662, + "step": 6500 + }, + { + "epoch": 0.4528065660429853, + "grad_norm": 3.5498995780944824, + "learning_rate": 9.921237954618184e-06, + "loss": 0.966, + "step": 6510 + }, + { + "epoch": 0.453502121443973, + "grad_norm": 3.4667043685913086, + "learning_rate": 9.920520909339843e-06, + "loss": 0.9642, + "step": 6520 + }, + { + "epoch": 0.4541976768449607, + "grad_norm": 2.2787327766418457, + "learning_rate": 9.9198006410128e-06, + "loss": 0.9393, + "step": 6530 + }, + { + "epoch": 0.4548932322459484, + "grad_norm": 2.1635196208953857, + "learning_rate": 9.919077150108846e-06, + "loss": 0.997, + "step": 6540 + }, + { + "epoch": 0.45558878764693606, + "grad_norm": 2.058483839035034, + "learning_rate": 9.91835043710188e-06, + "loss": 1.0401, + "step": 6550 + }, + { + "epoch": 0.45628434304792376, + "grad_norm": 1.6486293077468872, + "learning_rate": 9.917620502467921e-06, + "loss": 1.0345, + "step": 6560 + }, + { + "epoch": 0.45697989844891146, + "grad_norm": 20.16895866394043, + "learning_rate": 9.916887346685087e-06, + "loss": 1.0547, + "step": 6570 + }, + { + "epoch": 0.45767545384989916, + "grad_norm": 2.431431531906128, + "learning_rate": 9.916150970233612e-06, + "loss": 0.9858, + "step": 6580 + }, + { + "epoch": 0.4583710092508868, + "grad_norm": 4.229950428009033, + "learning_rate": 9.91541137359584e-06, + "loss": 1.0114, + "step": 6590 + }, + { + "epoch": 0.4590665646518745, + "grad_norm": 4.022190570831299, + "learning_rate": 9.914668557256221e-06, + "loss": 0.9764, + "step": 6600 + }, + { + "epoch": 0.4597621200528622, + "grad_norm": 3.665649652481079, + "learning_rate": 9.913922521701318e-06, + "loss": 0.965, + "step": 6610 + }, + { + "epoch": 0.4604576754538499, + "grad_norm": 6.0390777587890625, + "learning_rate": 9.9131732674198e-06, + "loss": 0.9956, + "step": 6620 + }, + { + "epoch": 0.4611532308548376, + "grad_norm": 2.35815691947937, + "learning_rate": 9.912420794902445e-06, + "loss": 0.9766, + "step": 6630 + }, + { + "epoch": 0.46184878625582526, + "grad_norm": 2.254441499710083, + "learning_rate": 9.911665104642138e-06, + "loss": 1.0338, + "step": 6640 + }, + { + "epoch": 0.46254434165681296, + "grad_norm": 2.7547621726989746, + "learning_rate": 9.910906197133874e-06, + "loss": 1.0489, + "step": 6650 + }, + { + "epoch": 0.46323989705780066, + "grad_norm": 2.068786144256592, + "learning_rate": 9.910144072874753e-06, + "loss": 1.0104, + "step": 6660 + }, + { + "epoch": 0.46393545245878837, + "grad_norm": 2.409694194793701, + "learning_rate": 9.909378732363982e-06, + "loss": 1.0548, + "step": 6670 + }, + { + "epoch": 0.464631007859776, + "grad_norm": 2.3633503913879395, + "learning_rate": 9.908610176102879e-06, + "loss": 1.0197, + "step": 6680 + }, + { + "epoch": 0.4653265632607637, + "grad_norm": 1.7714943885803223, + "learning_rate": 9.907838404594863e-06, + "loss": 0.9333, + "step": 6690 + }, + { + "epoch": 0.4660221186617514, + "grad_norm": 2.059300661087036, + "learning_rate": 9.90706341834546e-06, + "loss": 1.033, + "step": 6700 + }, + { + "epoch": 0.4667176740627391, + "grad_norm": 2.2540431022644043, + "learning_rate": 9.906285217862306e-06, + "loss": 1.0471, + "step": 6710 + }, + { + "epoch": 0.46741322946372676, + "grad_norm": 3.4632771015167236, + "learning_rate": 9.905503803655137e-06, + "loss": 1.0289, + "step": 6720 + }, + { + "epoch": 0.46810878486471447, + "grad_norm": 3.5190069675445557, + "learning_rate": 9.904719176235797e-06, + "loss": 1.0624, + "step": 6730 + }, + { + "epoch": 0.46880434026570217, + "grad_norm": 2.8400673866271973, + "learning_rate": 9.903931336118233e-06, + "loss": 0.9707, + "step": 6740 + }, + { + "epoch": 0.46949989566668987, + "grad_norm": 3.695323944091797, + "learning_rate": 9.9031402838185e-06, + "loss": 1.0085, + "step": 6750 + }, + { + "epoch": 0.4701954510676775, + "grad_norm": 2.2030811309814453, + "learning_rate": 9.902346019854753e-06, + "loss": 1.0222, + "step": 6760 + }, + { + "epoch": 0.4708910064686652, + "grad_norm": 2.152660846710205, + "learning_rate": 9.901548544747252e-06, + "loss": 1.0193, + "step": 6770 + }, + { + "epoch": 0.4715865618696529, + "grad_norm": 1.8368233442306519, + "learning_rate": 9.900747859018362e-06, + "loss": 0.9586, + "step": 6780 + }, + { + "epoch": 0.4722821172706406, + "grad_norm": 3.0591297149658203, + "learning_rate": 9.89994396319255e-06, + "loss": 0.9535, + "step": 6790 + }, + { + "epoch": 0.47297767267162827, + "grad_norm": 3.9850494861602783, + "learning_rate": 9.899136857796381e-06, + "loss": 1.0676, + "step": 6800 + }, + { + "epoch": 0.47367322807261597, + "grad_norm": 3.501262664794922, + "learning_rate": 9.898326543358531e-06, + "loss": 1.0469, + "step": 6810 + }, + { + "epoch": 0.4743687834736037, + "grad_norm": 2.745246171951294, + "learning_rate": 9.897513020409772e-06, + "loss": 1.1004, + "step": 6820 + }, + { + "epoch": 0.4750643388745914, + "grad_norm": 3.542746067047119, + "learning_rate": 9.896696289482982e-06, + "loss": 0.9633, + "step": 6830 + }, + { + "epoch": 0.4757598942755791, + "grad_norm": 2.1650230884552, + "learning_rate": 9.895876351113131e-06, + "loss": 1.0626, + "step": 6840 + }, + { + "epoch": 0.4764554496765667, + "grad_norm": 3.222668170928955, + "learning_rate": 9.895053205837305e-06, + "loss": 0.985, + "step": 6850 + }, + { + "epoch": 0.4771510050775544, + "grad_norm": 1.9891599416732788, + "learning_rate": 9.894226854194676e-06, + "loss": 1.0286, + "step": 6860 + }, + { + "epoch": 0.4778465604785421, + "grad_norm": 2.149726152420044, + "learning_rate": 9.893397296726523e-06, + "loss": 1.0357, + "step": 6870 + }, + { + "epoch": 0.47854211587952983, + "grad_norm": 2.238480567932129, + "learning_rate": 9.892564533976228e-06, + "loss": 1.0587, + "step": 6880 + }, + { + "epoch": 0.4792376712805175, + "grad_norm": 3.750748872756958, + "learning_rate": 9.891728566489264e-06, + "loss": 1.0959, + "step": 6890 + }, + { + "epoch": 0.4799332266815052, + "grad_norm": 3.269805908203125, + "learning_rate": 9.890889394813214e-06, + "loss": 1.0581, + "step": 6900 + }, + { + "epoch": 0.4806287820824929, + "grad_norm": 1.959336519241333, + "learning_rate": 9.890047019497747e-06, + "loss": 1.0467, + "step": 6910 + }, + { + "epoch": 0.4813243374834806, + "grad_norm": 2.5765366554260254, + "learning_rate": 9.889201441094643e-06, + "loss": 1.0335, + "step": 6920 + }, + { + "epoch": 0.4820198928844682, + "grad_norm": 2.5908074378967285, + "learning_rate": 9.888352660157769e-06, + "loss": 1.0013, + "step": 6930 + }, + { + "epoch": 0.48271544828545593, + "grad_norm": 2.2753396034240723, + "learning_rate": 9.887500677243099e-06, + "loss": 0.9906, + "step": 6940 + }, + { + "epoch": 0.48341100368644363, + "grad_norm": 2.8097686767578125, + "learning_rate": 9.8866454929087e-06, + "loss": 0.9934, + "step": 6950 + }, + { + "epoch": 0.48410655908743133, + "grad_norm": 2.7409074306488037, + "learning_rate": 9.885787107714734e-06, + "loss": 1.0348, + "step": 6960 + }, + { + "epoch": 0.484802114488419, + "grad_norm": 3.1567165851593018, + "learning_rate": 9.884925522223463e-06, + "loss": 1.0602, + "step": 6970 + }, + { + "epoch": 0.4854976698894067, + "grad_norm": 2.4128568172454834, + "learning_rate": 9.884060736999249e-06, + "loss": 1.0054, + "step": 6980 + }, + { + "epoch": 0.4861932252903944, + "grad_norm": 2.5478358268737793, + "learning_rate": 9.883192752608537e-06, + "loss": 1.0362, + "step": 6990 + }, + { + "epoch": 0.4868887806913821, + "grad_norm": 2.3209121227264404, + "learning_rate": 9.882321569619882e-06, + "loss": 1.0046, + "step": 7000 + }, + { + "epoch": 0.4868887806913821, + "eval_loss": 1.0239976644515991, + "eval_runtime": 4583.0765, + "eval_samples_per_second": 3.963, + "eval_steps_per_second": 0.66, + "step": 7000 + }, + { + "epoch": 0.48758433609236973, + "grad_norm": 2.3326728343963623, + "learning_rate": 9.881447188603926e-06, + "loss": 1.0307, + "step": 7010 + }, + { + "epoch": 0.48827989149335743, + "grad_norm": 2.4830734729766846, + "learning_rate": 9.880569610133406e-06, + "loss": 1.0463, + "step": 7020 + }, + { + "epoch": 0.48897544689434513, + "grad_norm": 5.021186351776123, + "learning_rate": 9.879688834783159e-06, + "loss": 1.0102, + "step": 7030 + }, + { + "epoch": 0.48967100229533284, + "grad_norm": 2.9218618869781494, + "learning_rate": 9.878804863130107e-06, + "loss": 1.0278, + "step": 7040 + }, + { + "epoch": 0.49036655769632054, + "grad_norm": 1.601610779762268, + "learning_rate": 9.877917695753275e-06, + "loss": 1.0463, + "step": 7050 + }, + { + "epoch": 0.4910621130973082, + "grad_norm": 1.9483613967895508, + "learning_rate": 9.877027333233776e-06, + "loss": 1.0142, + "step": 7060 + }, + { + "epoch": 0.4917576684982959, + "grad_norm": 2.8962295055389404, + "learning_rate": 9.876133776154815e-06, + "loss": 1.0638, + "step": 7070 + }, + { + "epoch": 0.4924532238992836, + "grad_norm": 2.1714117527008057, + "learning_rate": 9.875237025101694e-06, + "loss": 1.0333, + "step": 7080 + }, + { + "epoch": 0.4931487793002713, + "grad_norm": 1.860797643661499, + "learning_rate": 9.874337080661802e-06, + "loss": 1.0224, + "step": 7090 + }, + { + "epoch": 0.49384433470125894, + "grad_norm": 2.6164956092834473, + "learning_rate": 9.873433943424624e-06, + "loss": 1.0308, + "step": 7100 + }, + { + "epoch": 0.49453989010224664, + "grad_norm": 2.542285442352295, + "learning_rate": 9.872527613981735e-06, + "loss": 1.076, + "step": 7110 + }, + { + "epoch": 0.49523544550323434, + "grad_norm": 3.1684298515319824, + "learning_rate": 9.871618092926799e-06, + "loss": 0.9939, + "step": 7120 + }, + { + "epoch": 0.49593100090422204, + "grad_norm": 1.68818998336792, + "learning_rate": 9.870705380855573e-06, + "loss": 0.9988, + "step": 7130 + }, + { + "epoch": 0.4966265563052097, + "grad_norm": 2.4015157222747803, + "learning_rate": 9.869789478365904e-06, + "loss": 0.9833, + "step": 7140 + }, + { + "epoch": 0.4973221117061974, + "grad_norm": 1.783079981803894, + "learning_rate": 9.868870386057727e-06, + "loss": 1.0195, + "step": 7150 + }, + { + "epoch": 0.4980176671071851, + "grad_norm": 2.019951105117798, + "learning_rate": 9.867948104533067e-06, + "loss": 0.9836, + "step": 7160 + }, + { + "epoch": 0.4987132225081728, + "grad_norm": 1.950823187828064, + "learning_rate": 9.86702263439604e-06, + "loss": 1.0186, + "step": 7170 + }, + { + "epoch": 0.49940877790916044, + "grad_norm": 2.8443894386291504, + "learning_rate": 9.86609397625285e-06, + "loss": 1.0513, + "step": 7180 + }, + { + "epoch": 0.5001043333101481, + "grad_norm": 2.6552109718322754, + "learning_rate": 9.865162130711786e-06, + "loss": 1.041, + "step": 7190 + }, + { + "epoch": 0.5007998887111358, + "grad_norm": 1.8136320114135742, + "learning_rate": 9.86422709838323e-06, + "loss": 0.9796, + "step": 7200 + }, + { + "epoch": 0.5014954441121235, + "grad_norm": 3.6363747119903564, + "learning_rate": 9.863288879879645e-06, + "loss": 1.0281, + "step": 7210 + }, + { + "epoch": 0.5021909995131112, + "grad_norm": 1.7311104536056519, + "learning_rate": 9.862347475815585e-06, + "loss": 1.0042, + "step": 7220 + }, + { + "epoch": 0.502886554914099, + "grad_norm": 2.2573037147521973, + "learning_rate": 9.861402886807694e-06, + "loss": 0.9608, + "step": 7230 + }, + { + "epoch": 0.5035821103150866, + "grad_norm": 2.6544926166534424, + "learning_rate": 9.860455113474697e-06, + "loss": 1.0702, + "step": 7240 + }, + { + "epoch": 0.5042776657160742, + "grad_norm": 2.9834513664245605, + "learning_rate": 9.859504156437402e-06, + "loss": 1.0522, + "step": 7250 + }, + { + "epoch": 0.504973221117062, + "grad_norm": 2.186408281326294, + "learning_rate": 9.858550016318714e-06, + "loss": 1.0081, + "step": 7260 + }, + { + "epoch": 0.5056687765180496, + "grad_norm": 1.742927074432373, + "learning_rate": 9.85759269374361e-06, + "loss": 1.0065, + "step": 7270 + }, + { + "epoch": 0.5063643319190374, + "grad_norm": 2.912945508956909, + "learning_rate": 9.856632189339157e-06, + "loss": 1.0648, + "step": 7280 + }, + { + "epoch": 0.507059887320025, + "grad_norm": 8.70742130279541, + "learning_rate": 9.85566850373451e-06, + "loss": 1.0127, + "step": 7290 + }, + { + "epoch": 0.5077554427210127, + "grad_norm": 2.507518768310547, + "learning_rate": 9.854701637560902e-06, + "loss": 1.0564, + "step": 7300 + }, + { + "epoch": 0.5084509981220005, + "grad_norm": 1.683749794960022, + "learning_rate": 9.853731591451652e-06, + "loss": 0.9675, + "step": 7310 + }, + { + "epoch": 0.5091465535229881, + "grad_norm": 1.6638413667678833, + "learning_rate": 9.852758366042161e-06, + "loss": 0.9737, + "step": 7320 + }, + { + "epoch": 0.5098421089239757, + "grad_norm": 2.0514090061187744, + "learning_rate": 9.851781961969913e-06, + "loss": 1.0593, + "step": 7330 + }, + { + "epoch": 0.5105376643249635, + "grad_norm": 3.7516157627105713, + "learning_rate": 9.850802379874476e-06, + "loss": 1.0257, + "step": 7340 + }, + { + "epoch": 0.5112332197259511, + "grad_norm": 3.967139959335327, + "learning_rate": 9.849819620397494e-06, + "loss": 1.0337, + "step": 7350 + }, + { + "epoch": 0.5119287751269389, + "grad_norm": 2.667910099029541, + "learning_rate": 9.848833684182698e-06, + "loss": 1.0235, + "step": 7360 + }, + { + "epoch": 0.5126243305279266, + "grad_norm": 1.9951664209365845, + "learning_rate": 9.847844571875898e-06, + "loss": 1.0021, + "step": 7370 + }, + { + "epoch": 0.5133198859289142, + "grad_norm": 2.806793212890625, + "learning_rate": 9.846852284124982e-06, + "loss": 1.0506, + "step": 7380 + }, + { + "epoch": 0.514015441329902, + "grad_norm": 3.252951145172119, + "learning_rate": 9.845856821579922e-06, + "loss": 1.0389, + "step": 7390 + }, + { + "epoch": 0.5147109967308896, + "grad_norm": 2.142876386642456, + "learning_rate": 9.844858184892769e-06, + "loss": 0.9991, + "step": 7400 + }, + { + "epoch": 0.5154065521318772, + "grad_norm": 2.6384758949279785, + "learning_rate": 9.84385637471765e-06, + "loss": 1.0338, + "step": 7410 + }, + { + "epoch": 0.516102107532865, + "grad_norm": 1.8117916584014893, + "learning_rate": 9.842851391710772e-06, + "loss": 1.0336, + "step": 7420 + }, + { + "epoch": 0.5167976629338527, + "grad_norm": 2.368708372116089, + "learning_rate": 9.841843236530424e-06, + "loss": 1.0955, + "step": 7430 + }, + { + "epoch": 0.5174932183348404, + "grad_norm": 2.016583204269409, + "learning_rate": 9.840831909836965e-06, + "loss": 1.0342, + "step": 7440 + }, + { + "epoch": 0.5181887737358281, + "grad_norm": 2.1400434970855713, + "learning_rate": 9.83981741229284e-06, + "loss": 1.0268, + "step": 7450 + }, + { + "epoch": 0.5188843291368157, + "grad_norm": 2.953632354736328, + "learning_rate": 9.838799744562564e-06, + "loss": 0.9827, + "step": 7460 + }, + { + "epoch": 0.5195798845378035, + "grad_norm": 2.3987784385681152, + "learning_rate": 9.837778907312735e-06, + "loss": 1.0004, + "step": 7470 + }, + { + "epoch": 0.5202754399387911, + "grad_norm": 2.6313183307647705, + "learning_rate": 9.836754901212022e-06, + "loss": 1.0059, + "step": 7480 + }, + { + "epoch": 0.5209709953397789, + "grad_norm": 4.7407050132751465, + "learning_rate": 9.83572772693117e-06, + "loss": 1.0312, + "step": 7490 + }, + { + "epoch": 0.5216665507407665, + "grad_norm": 2.313974142074585, + "learning_rate": 9.834697385143002e-06, + "loss": 1.0317, + "step": 7500 + }, + { + "epoch": 0.5216665507407665, + "eval_loss": 1.016491174697876, + "eval_runtime": 4621.4339, + "eval_samples_per_second": 3.93, + "eval_steps_per_second": 0.655, + "step": 7500 + }, + { + "epoch": 0.5223621061417542, + "grad_norm": 2.9008705615997314, + "learning_rate": 9.833663876522415e-06, + "loss": 1.0433, + "step": 7510 + }, + { + "epoch": 0.5230576615427419, + "grad_norm": 4.5347514152526855, + "learning_rate": 9.832627201746377e-06, + "loss": 0.9894, + "step": 7520 + }, + { + "epoch": 0.5237532169437296, + "grad_norm": 2.2596795558929443, + "learning_rate": 9.831587361493936e-06, + "loss": 0.9965, + "step": 7530 + }, + { + "epoch": 0.5244487723447172, + "grad_norm": 4.020895481109619, + "learning_rate": 9.830544356446208e-06, + "loss": 1.0401, + "step": 7540 + }, + { + "epoch": 0.525144327745705, + "grad_norm": 2.8942155838012695, + "learning_rate": 9.829498187286385e-06, + "loss": 1.0768, + "step": 7550 + }, + { + "epoch": 0.5258398831466926, + "grad_norm": 2.0343353748321533, + "learning_rate": 9.828448854699732e-06, + "loss": 1.0496, + "step": 7560 + }, + { + "epoch": 0.5265354385476804, + "grad_norm": 5.143177509307861, + "learning_rate": 9.827396359373582e-06, + "loss": 0.9986, + "step": 7570 + }, + { + "epoch": 0.527230993948668, + "grad_norm": 2.1881449222564697, + "learning_rate": 9.826340701997343e-06, + "loss": 1.0823, + "step": 7580 + }, + { + "epoch": 0.5279265493496557, + "grad_norm": 2.260178565979004, + "learning_rate": 9.825281883262497e-06, + "loss": 1.0374, + "step": 7590 + }, + { + "epoch": 0.5286221047506434, + "grad_norm": 2.7261288166046143, + "learning_rate": 9.824219903862587e-06, + "loss": 1.0068, + "step": 7600 + }, + { + "epoch": 0.5293176601516311, + "grad_norm": 3.0303843021392822, + "learning_rate": 9.823154764493237e-06, + "loss": 1.036, + "step": 7610 + }, + { + "epoch": 0.5300132155526188, + "grad_norm": 2.3561244010925293, + "learning_rate": 9.822086465852138e-06, + "loss": 1.019, + "step": 7620 + }, + { + "epoch": 0.5307087709536065, + "grad_norm": 3.086543321609497, + "learning_rate": 9.821015008639046e-06, + "loss": 1.0563, + "step": 7630 + }, + { + "epoch": 0.5314043263545941, + "grad_norm": 3.3858025074005127, + "learning_rate": 9.819940393555788e-06, + "loss": 0.9943, + "step": 7640 + }, + { + "epoch": 0.5320998817555819, + "grad_norm": 1.9641404151916504, + "learning_rate": 9.818862621306264e-06, + "loss": 1.0524, + "step": 7650 + }, + { + "epoch": 0.5327954371565695, + "grad_norm": 2.721820831298828, + "learning_rate": 9.817781692596438e-06, + "loss": 1.0672, + "step": 7660 + }, + { + "epoch": 0.5334909925575572, + "grad_norm": 2.8029532432556152, + "learning_rate": 9.816697608134339e-06, + "loss": 0.9851, + "step": 7670 + }, + { + "epoch": 0.5341865479585449, + "grad_norm": 5.5589776039123535, + "learning_rate": 9.815610368630065e-06, + "loss": 1.0357, + "step": 7680 + }, + { + "epoch": 0.5348821033595326, + "grad_norm": 1.6050211191177368, + "learning_rate": 9.814519974795786e-06, + "loss": 0.9735, + "step": 7690 + }, + { + "epoch": 0.5355776587605203, + "grad_norm": 1.5897235870361328, + "learning_rate": 9.813426427345733e-06, + "loss": 0.9803, + "step": 7700 + }, + { + "epoch": 0.536273214161508, + "grad_norm": 2.496211528778076, + "learning_rate": 9.812329726996202e-06, + "loss": 0.9619, + "step": 7710 + }, + { + "epoch": 0.5369687695624956, + "grad_norm": 1.6880065202713013, + "learning_rate": 9.811229874465554e-06, + "loss": 0.9521, + "step": 7720 + }, + { + "epoch": 0.5376643249634834, + "grad_norm": 1.8385671377182007, + "learning_rate": 9.810126870474219e-06, + "loss": 1.0207, + "step": 7730 + }, + { + "epoch": 0.538359880364471, + "grad_norm": 1.9293749332427979, + "learning_rate": 9.80902071574469e-06, + "loss": 1.0956, + "step": 7740 + }, + { + "epoch": 0.5390554357654587, + "grad_norm": 1.4340964555740356, + "learning_rate": 9.807911411001518e-06, + "loss": 0.9482, + "step": 7750 + }, + { + "epoch": 0.5397509911664464, + "grad_norm": 4.971289157867432, + "learning_rate": 9.806798956971327e-06, + "loss": 1.0395, + "step": 7760 + }, + { + "epoch": 0.5404465465674341, + "grad_norm": 3.2275097370147705, + "learning_rate": 9.805683354382795e-06, + "loss": 1.0964, + "step": 7770 + }, + { + "epoch": 0.5411421019684218, + "grad_norm": 2.33555269241333, + "learning_rate": 9.80456460396667e-06, + "loss": 1.0371, + "step": 7780 + }, + { + "epoch": 0.5418376573694095, + "grad_norm": 2.9807257652282715, + "learning_rate": 9.803442706455751e-06, + "loss": 1.0334, + "step": 7790 + }, + { + "epoch": 0.5425332127703971, + "grad_norm": 3.2262439727783203, + "learning_rate": 9.802317662584912e-06, + "loss": 1.0147, + "step": 7800 + }, + { + "epoch": 0.5432287681713849, + "grad_norm": 2.9365663528442383, + "learning_rate": 9.801189473091078e-06, + "loss": 1.0663, + "step": 7810 + }, + { + "epoch": 0.5439243235723725, + "grad_norm": 2.464205503463745, + "learning_rate": 9.80005813871324e-06, + "loss": 0.9752, + "step": 7820 + }, + { + "epoch": 0.5446198789733603, + "grad_norm": 2.221801280975342, + "learning_rate": 9.798923660192444e-06, + "loss": 1.0083, + "step": 7830 + }, + { + "epoch": 0.5453154343743479, + "grad_norm": 2.4428696632385254, + "learning_rate": 9.797786038271801e-06, + "loss": 0.9583, + "step": 7840 + }, + { + "epoch": 0.5460109897753356, + "grad_norm": 2.144453287124634, + "learning_rate": 9.796645273696476e-06, + "loss": 1.0196, + "step": 7850 + }, + { + "epoch": 0.5467065451763233, + "grad_norm": 3.8755130767822266, + "learning_rate": 9.795501367213696e-06, + "loss": 1.0193, + "step": 7860 + }, + { + "epoch": 0.547402100577311, + "grad_norm": 2.995910167694092, + "learning_rate": 9.794354319572742e-06, + "loss": 1.0267, + "step": 7870 + }, + { + "epoch": 0.5480976559782986, + "grad_norm": 4.136692523956299, + "learning_rate": 9.793204131524961e-06, + "loss": 0.9838, + "step": 7880 + }, + { + "epoch": 0.5487932113792864, + "grad_norm": 1.5194177627563477, + "learning_rate": 9.792050803823747e-06, + "loss": 0.9981, + "step": 7890 + }, + { + "epoch": 0.549488766780274, + "grad_norm": 6.1063995361328125, + "learning_rate": 9.790894337224555e-06, + "loss": 1.02, + "step": 7900 + }, + { + "epoch": 0.5501843221812618, + "grad_norm": 6.971775531768799, + "learning_rate": 9.789734732484897e-06, + "loss": 1.0635, + "step": 7910 + }, + { + "epoch": 0.5508798775822494, + "grad_norm": 3.659198522567749, + "learning_rate": 9.78857199036434e-06, + "loss": 0.9742, + "step": 7920 + }, + { + "epoch": 0.5515754329832371, + "grad_norm": 7.353811264038086, + "learning_rate": 9.787406111624504e-06, + "loss": 1.0069, + "step": 7930 + }, + { + "epoch": 0.5522709883842248, + "grad_norm": 2.0825560092926025, + "learning_rate": 9.786237097029065e-06, + "loss": 0.9917, + "step": 7940 + }, + { + "epoch": 0.5529665437852125, + "grad_norm": 1.7086381912231445, + "learning_rate": 9.785064947343754e-06, + "loss": 0.9997, + "step": 7950 + }, + { + "epoch": 0.5536620991862001, + "grad_norm": 2.0725972652435303, + "learning_rate": 9.783889663336356e-06, + "loss": 0.9888, + "step": 7960 + }, + { + "epoch": 0.5543576545871879, + "grad_norm": 3.4529120922088623, + "learning_rate": 9.782711245776703e-06, + "loss": 1.0014, + "step": 7970 + }, + { + "epoch": 0.5550532099881755, + "grad_norm": 1.9326363801956177, + "learning_rate": 9.78152969543669e-06, + "loss": 1.0244, + "step": 7980 + }, + { + "epoch": 0.5557487653891633, + "grad_norm": 2.433790683746338, + "learning_rate": 9.780345013090255e-06, + "loss": 1.0149, + "step": 7990 + }, + { + "epoch": 0.5564443207901509, + "grad_norm": 2.6118321418762207, + "learning_rate": 9.779157199513392e-06, + "loss": 0.9893, + "step": 8000 + }, + { + "epoch": 0.5564443207901509, + "eval_loss": 1.0137079954147339, + "eval_runtime": 4598.4305, + "eval_samples_per_second": 3.949, + "eval_steps_per_second": 0.658, + "step": 8000 + }, + { + "epoch": 0.5571398761911386, + "grad_norm": 2.2977099418640137, + "learning_rate": 9.777966255484143e-06, + "loss": 1.009, + "step": 8010 + }, + { + "epoch": 0.5578354315921263, + "grad_norm": 2.034648895263672, + "learning_rate": 9.776772181782604e-06, + "loss": 0.9877, + "step": 8020 + }, + { + "epoch": 0.558530986993114, + "grad_norm": 2.6150259971618652, + "learning_rate": 9.775574979190918e-06, + "loss": 1.0366, + "step": 8030 + }, + { + "epoch": 0.5592265423941017, + "grad_norm": 3.86712908744812, + "learning_rate": 9.774374648493281e-06, + "loss": 1.0241, + "step": 8040 + }, + { + "epoch": 0.5599220977950894, + "grad_norm": 1.8797314167022705, + "learning_rate": 9.773171190475935e-06, + "loss": 1.0109, + "step": 8050 + }, + { + "epoch": 0.560617653196077, + "grad_norm": 2.832256555557251, + "learning_rate": 9.77196460592717e-06, + "loss": 0.9232, + "step": 8060 + }, + { + "epoch": 0.5613132085970648, + "grad_norm": 1.8037693500518799, + "learning_rate": 9.770754895637328e-06, + "loss": 0.96, + "step": 8070 + }, + { + "epoch": 0.5620087639980524, + "grad_norm": 1.8019936084747314, + "learning_rate": 9.769542060398794e-06, + "loss": 1.0237, + "step": 8080 + }, + { + "epoch": 0.5627043193990401, + "grad_norm": 2.0839247703552246, + "learning_rate": 9.768326101006e-06, + "loss": 1.0206, + "step": 8090 + }, + { + "epoch": 0.5633998748000278, + "grad_norm": 2.7258458137512207, + "learning_rate": 9.767107018255428e-06, + "loss": 1.0632, + "step": 8100 + }, + { + "epoch": 0.5640954302010155, + "grad_norm": 2.873509645462036, + "learning_rate": 9.765884812945603e-06, + "loss": 1.0517, + "step": 8110 + }, + { + "epoch": 0.5647909856020032, + "grad_norm": 1.674194097518921, + "learning_rate": 9.764659485877095e-06, + "loss": 0.9489, + "step": 8120 + }, + { + "epoch": 0.5654865410029909, + "grad_norm": 3.6784873008728027, + "learning_rate": 9.763431037852524e-06, + "loss": 1.0042, + "step": 8130 + }, + { + "epoch": 0.5661820964039785, + "grad_norm": 1.8324865102767944, + "learning_rate": 9.762199469676547e-06, + "loss": 1.0209, + "step": 8140 + }, + { + "epoch": 0.5668776518049663, + "grad_norm": 2.3326356410980225, + "learning_rate": 9.760964782155868e-06, + "loss": 1.069, + "step": 8150 + }, + { + "epoch": 0.567573207205954, + "grad_norm": 2.6556546688079834, + "learning_rate": 9.759726976099237e-06, + "loss": 1.0207, + "step": 8160 + }, + { + "epoch": 0.5682687626069416, + "grad_norm": 3.342338800430298, + "learning_rate": 9.75848605231744e-06, + "loss": 1.0224, + "step": 8170 + }, + { + "epoch": 0.5689643180079293, + "grad_norm": 4.15756893157959, + "learning_rate": 9.757242011623313e-06, + "loss": 1.0117, + "step": 8180 + }, + { + "epoch": 0.569659873408917, + "grad_norm": 2.3357350826263428, + "learning_rate": 9.755994854831727e-06, + "loss": 1.0249, + "step": 8190 + }, + { + "epoch": 0.5703554288099048, + "grad_norm": 2.6819000244140625, + "learning_rate": 9.754744582759598e-06, + "loss": 0.9628, + "step": 8200 + }, + { + "epoch": 0.5710509842108924, + "grad_norm": 2.5345399379730225, + "learning_rate": 9.753491196225883e-06, + "loss": 0.988, + "step": 8210 + }, + { + "epoch": 0.57174653961188, + "grad_norm": 3.754526376724243, + "learning_rate": 9.752234696051577e-06, + "loss": 0.957, + "step": 8220 + }, + { + "epoch": 0.5724420950128678, + "grad_norm": 1.7479192018508911, + "learning_rate": 9.750975083059712e-06, + "loss": 1.0011, + "step": 8230 + }, + { + "epoch": 0.5731376504138554, + "grad_norm": 1.9977163076400757, + "learning_rate": 9.749712358075366e-06, + "loss": 1.053, + "step": 8240 + }, + { + "epoch": 0.5738332058148432, + "grad_norm": 1.8184921741485596, + "learning_rate": 9.748446521925648e-06, + "loss": 0.9444, + "step": 8250 + }, + { + "epoch": 0.5745287612158309, + "grad_norm": 2.3305389881134033, + "learning_rate": 9.747177575439713e-06, + "loss": 0.9814, + "step": 8260 + }, + { + "epoch": 0.5752243166168185, + "grad_norm": 2.3633840084075928, + "learning_rate": 9.745905519448743e-06, + "loss": 1.0394, + "step": 8270 + }, + { + "epoch": 0.5759198720178063, + "grad_norm": 3.7427425384521484, + "learning_rate": 9.744630354785967e-06, + "loss": 1.0097, + "step": 8280 + }, + { + "epoch": 0.5766154274187939, + "grad_norm": 1.642398476600647, + "learning_rate": 9.743352082286641e-06, + "loss": 0.986, + "step": 8290 + }, + { + "epoch": 0.5773109828197815, + "grad_norm": 1.925115704536438, + "learning_rate": 9.742070702788067e-06, + "loss": 0.9725, + "step": 8300 + }, + { + "epoch": 0.5780065382207693, + "grad_norm": 3.494719982147217, + "learning_rate": 9.740786217129574e-06, + "loss": 0.9394, + "step": 8310 + }, + { + "epoch": 0.578702093621757, + "grad_norm": 1.676769495010376, + "learning_rate": 9.739498626152528e-06, + "loss": 0.9681, + "step": 8320 + }, + { + "epoch": 0.5793976490227447, + "grad_norm": 2.1723740100860596, + "learning_rate": 9.73820793070033e-06, + "loss": 1.0784, + "step": 8330 + }, + { + "epoch": 0.5800932044237324, + "grad_norm": 3.8778064250946045, + "learning_rate": 9.736914131618412e-06, + "loss": 0.9844, + "step": 8340 + }, + { + "epoch": 0.58078875982472, + "grad_norm": 3.0927348136901855, + "learning_rate": 9.73561722975424e-06, + "loss": 0.9963, + "step": 8350 + }, + { + "epoch": 0.5814843152257078, + "grad_norm": 4.640049457550049, + "learning_rate": 9.734317225957317e-06, + "loss": 0.9676, + "step": 8360 + }, + { + "epoch": 0.5821798706266954, + "grad_norm": 2.799706220626831, + "learning_rate": 9.73301412107917e-06, + "loss": 0.9661, + "step": 8370 + }, + { + "epoch": 0.582875426027683, + "grad_norm": 2.3261349201202393, + "learning_rate": 9.731707915973365e-06, + "loss": 0.9593, + "step": 8380 + }, + { + "epoch": 0.5835709814286708, + "grad_norm": 3.6237289905548096, + "learning_rate": 9.730398611495492e-06, + "loss": 1.0147, + "step": 8390 + }, + { + "epoch": 0.5842665368296585, + "grad_norm": 1.5647138357162476, + "learning_rate": 9.729086208503174e-06, + "loss": 0.9982, + "step": 8400 + }, + { + "epoch": 0.5849620922306462, + "grad_norm": 2.580134391784668, + "learning_rate": 9.727770707856066e-06, + "loss": 1.0073, + "step": 8410 + }, + { + "epoch": 0.5856576476316339, + "grad_norm": 2.6061387062072754, + "learning_rate": 9.726452110415847e-06, + "loss": 0.9523, + "step": 8420 + }, + { + "epoch": 0.5863532030326215, + "grad_norm": 2.248084306716919, + "learning_rate": 9.725130417046228e-06, + "loss": 0.9443, + "step": 8430 + }, + { + "epoch": 0.5870487584336093, + "grad_norm": 3.1775996685028076, + "learning_rate": 9.723805628612947e-06, + "loss": 0.9844, + "step": 8440 + }, + { + "epoch": 0.5877443138345969, + "grad_norm": 2.256800651550293, + "learning_rate": 9.72247774598377e-06, + "loss": 0.9478, + "step": 8450 + }, + { + "epoch": 0.5884398692355847, + "grad_norm": 1.7384254932403564, + "learning_rate": 9.721146770028489e-06, + "loss": 0.8992, + "step": 8460 + }, + { + "epoch": 0.5891354246365723, + "grad_norm": 3.145155429840088, + "learning_rate": 9.719812701618921e-06, + "loss": 0.9961, + "step": 8470 + }, + { + "epoch": 0.58983098003756, + "grad_norm": 3.972504138946533, + "learning_rate": 9.718475541628913e-06, + "loss": 1.0154, + "step": 8480 + }, + { + "epoch": 0.5905265354385477, + "grad_norm": 2.5990331172943115, + "learning_rate": 9.71713529093433e-06, + "loss": 1.0297, + "step": 8490 + }, + { + "epoch": 0.5912220908395354, + "grad_norm": 1.6239560842514038, + "learning_rate": 9.71579195041307e-06, + "loss": 0.9909, + "step": 8500 + }, + { + "epoch": 0.5912220908395354, + "eval_loss": 1.0061399936676025, + "eval_runtime": 4590.4035, + "eval_samples_per_second": 3.956, + "eval_steps_per_second": 0.659, + "step": 8500 + }, + { + "epoch": 0.591917646240523, + "grad_norm": 2.935238838195801, + "learning_rate": 9.714445520945045e-06, + "loss": 0.937, + "step": 8510 + }, + { + "epoch": 0.5926132016415108, + "grad_norm": 2.36318302154541, + "learning_rate": 9.7130960034122e-06, + "loss": 1.0045, + "step": 8520 + }, + { + "epoch": 0.5933087570424984, + "grad_norm": 1.7025820016860962, + "learning_rate": 9.711743398698496e-06, + "loss": 0.9444, + "step": 8530 + }, + { + "epoch": 0.5940043124434862, + "grad_norm": 2.3346784114837646, + "learning_rate": 9.710387707689923e-06, + "loss": 1.0037, + "step": 8540 + }, + { + "epoch": 0.5946998678444738, + "grad_norm": 3.193920850753784, + "learning_rate": 9.709028931274482e-06, + "loss": 0.9836, + "step": 8550 + }, + { + "epoch": 0.5953954232454615, + "grad_norm": 2.688633441925049, + "learning_rate": 9.707667070342205e-06, + "loss": 1.0152, + "step": 8560 + }, + { + "epoch": 0.5960909786464492, + "grad_norm": 2.7123701572418213, + "learning_rate": 9.706302125785139e-06, + "loss": 1.0067, + "step": 8570 + }, + { + "epoch": 0.5967865340474369, + "grad_norm": 1.7976980209350586, + "learning_rate": 9.704934098497356e-06, + "loss": 0.9776, + "step": 8580 + }, + { + "epoch": 0.5974820894484245, + "grad_norm": 1.723183512687683, + "learning_rate": 9.70356298937494e-06, + "loss": 0.9457, + "step": 8590 + }, + { + "epoch": 0.5981776448494123, + "grad_norm": 2.132117986679077, + "learning_rate": 9.702188799315997e-06, + "loss": 0.9996, + "step": 8600 + }, + { + "epoch": 0.5988732002503999, + "grad_norm": 2.9660356044769287, + "learning_rate": 9.700811529220653e-06, + "loss": 0.9767, + "step": 8610 + }, + { + "epoch": 0.5995687556513877, + "grad_norm": 3.7348194122314453, + "learning_rate": 9.699431179991053e-06, + "loss": 1.0515, + "step": 8620 + }, + { + "epoch": 0.6002643110523753, + "grad_norm": 4.010781288146973, + "learning_rate": 9.69804775253135e-06, + "loss": 0.9087, + "step": 8630 + }, + { + "epoch": 0.600959866453363, + "grad_norm": 1.7743746042251587, + "learning_rate": 9.696661247747723e-06, + "loss": 0.9865, + "step": 8640 + }, + { + "epoch": 0.6016554218543507, + "grad_norm": 1.9430928230285645, + "learning_rate": 9.695271666548362e-06, + "loss": 0.9417, + "step": 8650 + }, + { + "epoch": 0.6023509772553384, + "grad_norm": 1.8382066488265991, + "learning_rate": 9.693879009843475e-06, + "loss": 0.9386, + "step": 8660 + }, + { + "epoch": 0.6030465326563261, + "grad_norm": 2.234323263168335, + "learning_rate": 9.69248327854528e-06, + "loss": 1.0203, + "step": 8670 + }, + { + "epoch": 0.6037420880573138, + "grad_norm": 2.4181480407714844, + "learning_rate": 9.69108447356801e-06, + "loss": 1.0061, + "step": 8680 + }, + { + "epoch": 0.6044376434583014, + "grad_norm": 3.2375218868255615, + "learning_rate": 9.689682595827919e-06, + "loss": 0.9455, + "step": 8690 + }, + { + "epoch": 0.6051331988592892, + "grad_norm": 3.2061665058135986, + "learning_rate": 9.68827764624326e-06, + "loss": 0.9863, + "step": 8700 + }, + { + "epoch": 0.6058287542602768, + "grad_norm": 2.3733572959899902, + "learning_rate": 9.686869625734311e-06, + "loss": 0.9849, + "step": 8710 + }, + { + "epoch": 0.6065243096612645, + "grad_norm": 1.5500117540359497, + "learning_rate": 9.685458535223356e-06, + "loss": 0.9995, + "step": 8720 + }, + { + "epoch": 0.6072198650622522, + "grad_norm": 1.8214384317398071, + "learning_rate": 9.684044375634687e-06, + "loss": 0.9947, + "step": 8730 + }, + { + "epoch": 0.6079154204632399, + "grad_norm": 2.254053831100464, + "learning_rate": 9.68262714789461e-06, + "loss": 1.0335, + "step": 8740 + }, + { + "epoch": 0.6086109758642276, + "grad_norm": 2.1616814136505127, + "learning_rate": 9.681206852931442e-06, + "loss": 1.048, + "step": 8750 + }, + { + "epoch": 0.6093065312652153, + "grad_norm": 4.881176471710205, + "learning_rate": 9.679783491675507e-06, + "loss": 1.0136, + "step": 8760 + }, + { + "epoch": 0.6100020866662029, + "grad_norm": 2.0357179641723633, + "learning_rate": 9.678357065059136e-06, + "loss": 0.9821, + "step": 8770 + }, + { + "epoch": 0.6106976420671907, + "grad_norm": 2.476459503173828, + "learning_rate": 9.676927574016672e-06, + "loss": 0.9851, + "step": 8780 + }, + { + "epoch": 0.6113931974681783, + "grad_norm": 3.3701744079589844, + "learning_rate": 9.67549501948446e-06, + "loss": 0.9877, + "step": 8790 + }, + { + "epoch": 0.612088752869166, + "grad_norm": 3.102743148803711, + "learning_rate": 9.674059402400858e-06, + "loss": 1.0208, + "step": 8800 + }, + { + "epoch": 0.6127843082701537, + "grad_norm": 2.4717562198638916, + "learning_rate": 9.672620723706223e-06, + "loss": 1.031, + "step": 8810 + }, + { + "epoch": 0.6134798636711414, + "grad_norm": 2.82344388961792, + "learning_rate": 9.671178984342924e-06, + "loss": 0.983, + "step": 8820 + }, + { + "epoch": 0.6141754190721291, + "grad_norm": 7.428765773773193, + "learning_rate": 9.669734185255331e-06, + "loss": 1.0176, + "step": 8830 + }, + { + "epoch": 0.6148709744731168, + "grad_norm": 2.341041326522827, + "learning_rate": 9.668286327389817e-06, + "loss": 0.9774, + "step": 8840 + }, + { + "epoch": 0.6155665298741044, + "grad_norm": 3.4605600833892822, + "learning_rate": 9.666835411694761e-06, + "loss": 1.013, + "step": 8850 + }, + { + "epoch": 0.6162620852750922, + "grad_norm": 2.113783836364746, + "learning_rate": 9.665381439120547e-06, + "loss": 1.0513, + "step": 8860 + }, + { + "epoch": 0.6169576406760798, + "grad_norm": 2.5813071727752686, + "learning_rate": 9.663924410619556e-06, + "loss": 1.0461, + "step": 8870 + }, + { + "epoch": 0.6176531960770676, + "grad_norm": 2.0591375827789307, + "learning_rate": 9.662464327146177e-06, + "loss": 0.9903, + "step": 8880 + }, + { + "epoch": 0.6183487514780552, + "grad_norm": 3.770012378692627, + "learning_rate": 9.661001189656793e-06, + "loss": 1.0375, + "step": 8890 + }, + { + "epoch": 0.6190443068790429, + "grad_norm": 2.2549147605895996, + "learning_rate": 9.659534999109792e-06, + "loss": 0.9503, + "step": 8900 + }, + { + "epoch": 0.6197398622800306, + "grad_norm": 2.151211738586426, + "learning_rate": 9.658065756465563e-06, + "loss": 1.04, + "step": 8910 + }, + { + "epoch": 0.6204354176810183, + "grad_norm": 1.6309565305709839, + "learning_rate": 9.656593462686488e-06, + "loss": 0.9729, + "step": 8920 + }, + { + "epoch": 0.6211309730820059, + "grad_norm": 1.7615456581115723, + "learning_rate": 9.655118118736954e-06, + "loss": 1.0052, + "step": 8930 + }, + { + "epoch": 0.6218265284829937, + "grad_norm": 2.3096272945404053, + "learning_rate": 9.653639725583344e-06, + "loss": 1.0526, + "step": 8940 + }, + { + "epoch": 0.6225220838839813, + "grad_norm": 2.4891197681427, + "learning_rate": 9.652158284194035e-06, + "loss": 1.0385, + "step": 8950 + }, + { + "epoch": 0.6232176392849691, + "grad_norm": 1.665881872177124, + "learning_rate": 9.650673795539409e-06, + "loss": 1.033, + "step": 8960 + }, + { + "epoch": 0.6239131946859567, + "grad_norm": 2.896926164627075, + "learning_rate": 9.649186260591833e-06, + "loss": 1.0121, + "step": 8970 + }, + { + "epoch": 0.6246087500869444, + "grad_norm": 2.250419855117798, + "learning_rate": 9.647695680325678e-06, + "loss": 0.9835, + "step": 8980 + }, + { + "epoch": 0.6253043054879321, + "grad_norm": 1.9252668619155884, + "learning_rate": 9.646202055717304e-06, + "loss": 1.0204, + "step": 8990 + }, + { + "epoch": 0.6259998608889198, + "grad_norm": 2.1913342475891113, + "learning_rate": 9.64470538774507e-06, + "loss": 1.003, + "step": 9000 + }, + { + "epoch": 0.6259998608889198, + "eval_loss": 1.0061264038085938, + "eval_runtime": 4632.8356, + "eval_samples_per_second": 3.92, + "eval_steps_per_second": 0.653, + "step": 9000 + }, + { + "epoch": 0.6266954162899074, + "grad_norm": 3.9791197776794434, + "learning_rate": 9.643205677389327e-06, + "loss": 1.024, + "step": 9010 + }, + { + "epoch": 0.6273909716908952, + "grad_norm": 1.985949158668518, + "learning_rate": 9.641702925632418e-06, + "loss": 0.9905, + "step": 9020 + }, + { + "epoch": 0.6280865270918828, + "grad_norm": 3.431908130645752, + "learning_rate": 9.640197133458674e-06, + "loss": 0.9809, + "step": 9030 + }, + { + "epoch": 0.6287820824928706, + "grad_norm": 4.159012794494629, + "learning_rate": 9.638688301854425e-06, + "loss": 0.95, + "step": 9040 + }, + { + "epoch": 0.6294776378938582, + "grad_norm": 2.140486001968384, + "learning_rate": 9.637176431807989e-06, + "loss": 1.0041, + "step": 9050 + }, + { + "epoch": 0.6301731932948459, + "grad_norm": 2.142855405807495, + "learning_rate": 9.635661524309672e-06, + "loss": 0.9412, + "step": 9060 + }, + { + "epoch": 0.6308687486958336, + "grad_norm": 2.6207847595214844, + "learning_rate": 9.634143580351775e-06, + "loss": 0.9622, + "step": 9070 + }, + { + "epoch": 0.6315643040968213, + "grad_norm": 6.061389446258545, + "learning_rate": 9.63262260092858e-06, + "loss": 1.0005, + "step": 9080 + }, + { + "epoch": 0.632259859497809, + "grad_norm": 2.2443125247955322, + "learning_rate": 9.631098587036367e-06, + "loss": 1.0022, + "step": 9090 + }, + { + "epoch": 0.6329554148987967, + "grad_norm": 2.26668119430542, + "learning_rate": 9.629571539673392e-06, + "loss": 1.0646, + "step": 9100 + }, + { + "epoch": 0.6336509702997843, + "grad_norm": 2.0673985481262207, + "learning_rate": 9.62804145983991e-06, + "loss": 1.0013, + "step": 9110 + }, + { + "epoch": 0.6343465257007721, + "grad_norm": 2.7966713905334473, + "learning_rate": 9.626508348538153e-06, + "loss": 0.9321, + "step": 9120 + }, + { + "epoch": 0.6350420811017597, + "grad_norm": 4.965925693511963, + "learning_rate": 9.624972206772345e-06, + "loss": 1.0382, + "step": 9130 + }, + { + "epoch": 0.6357376365027474, + "grad_norm": 2.5893540382385254, + "learning_rate": 9.62343303554869e-06, + "loss": 1.0621, + "step": 9140 + }, + { + "epoch": 0.6364331919037352, + "grad_norm": 2.3161184787750244, + "learning_rate": 9.621890835875383e-06, + "loss": 1.0749, + "step": 9150 + }, + { + "epoch": 0.6371287473047228, + "grad_norm": 2.2058660984039307, + "learning_rate": 9.620345608762593e-06, + "loss": 0.9667, + "step": 9160 + }, + { + "epoch": 0.6378243027057106, + "grad_norm": 3.6150317192077637, + "learning_rate": 9.61879735522248e-06, + "loss": 0.9872, + "step": 9170 + }, + { + "epoch": 0.6385198581066982, + "grad_norm": 1.9755939245224, + "learning_rate": 9.617246076269184e-06, + "loss": 0.9685, + "step": 9180 + }, + { + "epoch": 0.6392154135076858, + "grad_norm": 5.842147350311279, + "learning_rate": 9.615691772918829e-06, + "loss": 0.9217, + "step": 9190 + }, + { + "epoch": 0.6399109689086736, + "grad_norm": 2.3024919033050537, + "learning_rate": 9.614134446189512e-06, + "loss": 0.9775, + "step": 9200 + }, + { + "epoch": 0.6406065243096613, + "grad_norm": 1.9859203100204468, + "learning_rate": 9.612574097101322e-06, + "loss": 0.9985, + "step": 9210 + }, + { + "epoch": 0.6413020797106489, + "grad_norm": 1.423622727394104, + "learning_rate": 9.611010726676317e-06, + "loss": 1.0134, + "step": 9220 + }, + { + "epoch": 0.6419976351116367, + "grad_norm": 2.1606407165527344, + "learning_rate": 9.609444335938542e-06, + "loss": 0.9874, + "step": 9230 + }, + { + "epoch": 0.6426931905126243, + "grad_norm": 1.8728210926055908, + "learning_rate": 9.607874925914016e-06, + "loss": 0.966, + "step": 9240 + }, + { + "epoch": 0.6433887459136121, + "grad_norm": 2.6309092044830322, + "learning_rate": 9.606302497630735e-06, + "loss": 0.9922, + "step": 9250 + }, + { + "epoch": 0.6440843013145997, + "grad_norm": 1.9632236957550049, + "learning_rate": 9.604727052118678e-06, + "loss": 0.9773, + "step": 9260 + }, + { + "epoch": 0.6447798567155874, + "grad_norm": 5.6396589279174805, + "learning_rate": 9.603148590409794e-06, + "loss": 0.9802, + "step": 9270 + }, + { + "epoch": 0.6454754121165751, + "grad_norm": 2.2929046154022217, + "learning_rate": 9.601567113538008e-06, + "loss": 1.0006, + "step": 9280 + }, + { + "epoch": 0.6461709675175628, + "grad_norm": 1.895707368850708, + "learning_rate": 9.599982622539225e-06, + "loss": 0.9896, + "step": 9290 + }, + { + "epoch": 0.6468665229185505, + "grad_norm": 2.3205296993255615, + "learning_rate": 9.59839511845132e-06, + "loss": 0.9867, + "step": 9300 + }, + { + "epoch": 0.6475620783195382, + "grad_norm": 2.58019757270813, + "learning_rate": 9.596804602314141e-06, + "loss": 0.9711, + "step": 9310 + }, + { + "epoch": 0.6482576337205258, + "grad_norm": 2.3122129440307617, + "learning_rate": 9.595211075169515e-06, + "loss": 0.9937, + "step": 9320 + }, + { + "epoch": 0.6489531891215136, + "grad_norm": 3.512523889541626, + "learning_rate": 9.593614538061233e-06, + "loss": 1.0282, + "step": 9330 + }, + { + "epoch": 0.6496487445225012, + "grad_norm": 2.8056132793426514, + "learning_rate": 9.592014992035065e-06, + "loss": 1.0194, + "step": 9340 + }, + { + "epoch": 0.6503442999234889, + "grad_norm": 1.8286393880844116, + "learning_rate": 9.590412438138746e-06, + "loss": 0.916, + "step": 9350 + }, + { + "epoch": 0.6510398553244766, + "grad_norm": 2.3665032386779785, + "learning_rate": 9.588806877421986e-06, + "loss": 0.9629, + "step": 9360 + }, + { + "epoch": 0.6517354107254643, + "grad_norm": 2.2017245292663574, + "learning_rate": 9.58719831093646e-06, + "loss": 1.0112, + "step": 9370 + }, + { + "epoch": 0.652430966126452, + "grad_norm": 1.4880505800247192, + "learning_rate": 9.585586739735815e-06, + "loss": 0.9562, + "step": 9380 + }, + { + "epoch": 0.6531265215274397, + "grad_norm": 3.351055860519409, + "learning_rate": 9.583972164875668e-06, + "loss": 0.987, + "step": 9390 + }, + { + "epoch": 0.6538220769284273, + "grad_norm": 4.248812198638916, + "learning_rate": 9.582354587413596e-06, + "loss": 1.0007, + "step": 9400 + }, + { + "epoch": 0.6545176323294151, + "grad_norm": 2.5656821727752686, + "learning_rate": 9.580734008409151e-06, + "loss": 1.0423, + "step": 9410 + }, + { + "epoch": 0.6552131877304027, + "grad_norm": 2.495500326156616, + "learning_rate": 9.579110428923847e-06, + "loss": 1.0597, + "step": 9420 + }, + { + "epoch": 0.6559087431313905, + "grad_norm": 3.2983193397521973, + "learning_rate": 9.577483850021164e-06, + "loss": 1.0241, + "step": 9430 + }, + { + "epoch": 0.6566042985323781, + "grad_norm": 2.821584463119507, + "learning_rate": 9.575854272766547e-06, + "loss": 0.9705, + "step": 9440 + }, + { + "epoch": 0.6572998539333658, + "grad_norm": 2.1200578212738037, + "learning_rate": 9.574221698227403e-06, + "loss": 1.0092, + "step": 9450 + }, + { + "epoch": 0.6579954093343535, + "grad_norm": 2.74942946434021, + "learning_rate": 9.572586127473106e-06, + "loss": 1.0168, + "step": 9460 + }, + { + "epoch": 0.6586909647353412, + "grad_norm": 2.5691816806793213, + "learning_rate": 9.57094756157499e-06, + "loss": 0.8996, + "step": 9470 + }, + { + "epoch": 0.6593865201363288, + "grad_norm": 2.193666458129883, + "learning_rate": 9.56930600160635e-06, + "loss": 0.9644, + "step": 9480 + }, + { + "epoch": 0.6600820755373166, + "grad_norm": 2.4247875213623047, + "learning_rate": 9.567661448642447e-06, + "loss": 1.0535, + "step": 9490 + }, + { + "epoch": 0.6607776309383042, + "grad_norm": 12.583206176757812, + "learning_rate": 9.566013903760496e-06, + "loss": 1.0103, + "step": 9500 + }, + { + "epoch": 0.6607776309383042, + "eval_loss": 1.0018049478530884, + "eval_runtime": 4597.4447, + "eval_samples_per_second": 3.95, + "eval_steps_per_second": 0.658, + "step": 9500 + }, + { + "epoch": 0.661473186339292, + "grad_norm": 2.1884517669677734, + "learning_rate": 9.564363368039675e-06, + "loss": 1.0005, + "step": 9510 + }, + { + "epoch": 0.6621687417402796, + "grad_norm": 1.3615188598632812, + "learning_rate": 9.562709842561124e-06, + "loss": 0.9962, + "step": 9520 + }, + { + "epoch": 0.6628642971412673, + "grad_norm": 1.6603033542633057, + "learning_rate": 9.561053328407934e-06, + "loss": 0.9985, + "step": 9530 + }, + { + "epoch": 0.663559852542255, + "grad_norm": 5.264922618865967, + "learning_rate": 9.55939382666516e-06, + "loss": 1.002, + "step": 9540 + }, + { + "epoch": 0.6642554079432427, + "grad_norm": 1.9967402219772339, + "learning_rate": 9.557731338419815e-06, + "loss": 1.0114, + "step": 9550 + }, + { + "epoch": 0.6649509633442303, + "grad_norm": 3.912971019744873, + "learning_rate": 9.55606586476086e-06, + "loss": 1.0223, + "step": 9560 + }, + { + "epoch": 0.6656465187452181, + "grad_norm": 2.251217842102051, + "learning_rate": 9.554397406779219e-06, + "loss": 1.0481, + "step": 9570 + }, + { + "epoch": 0.6663420741462057, + "grad_norm": 2.3170528411865234, + "learning_rate": 9.552725965567769e-06, + "loss": 0.9976, + "step": 9580 + }, + { + "epoch": 0.6670376295471935, + "grad_norm": 3.7170145511627197, + "learning_rate": 9.55105154222134e-06, + "loss": 1.022, + "step": 9590 + }, + { + "epoch": 0.6677331849481811, + "grad_norm": 2.2989819049835205, + "learning_rate": 9.549374137836714e-06, + "loss": 1.0293, + "step": 9600 + }, + { + "epoch": 0.6684287403491688, + "grad_norm": 3.6269569396972656, + "learning_rate": 9.54769375351263e-06, + "loss": 1.0375, + "step": 9610 + }, + { + "epoch": 0.6691242957501565, + "grad_norm": 1.8223940134048462, + "learning_rate": 9.546010390349778e-06, + "loss": 1.0233, + "step": 9620 + }, + { + "epoch": 0.6698198511511442, + "grad_norm": 5.032322406768799, + "learning_rate": 9.544324049450791e-06, + "loss": 0.9435, + "step": 9630 + }, + { + "epoch": 0.6705154065521319, + "grad_norm": 1.777269721031189, + "learning_rate": 9.542634731920266e-06, + "loss": 1.0111, + "step": 9640 + }, + { + "epoch": 0.6712109619531196, + "grad_norm": 2.944465398788452, + "learning_rate": 9.54094243886474e-06, + "loss": 0.9795, + "step": 9650 + }, + { + "epoch": 0.6719065173541072, + "grad_norm": 2.2826709747314453, + "learning_rate": 9.539247171392702e-06, + "loss": 0.9887, + "step": 9660 + }, + { + "epoch": 0.672602072755095, + "grad_norm": 2.905890703201294, + "learning_rate": 9.53754893061459e-06, + "loss": 1.0462, + "step": 9670 + }, + { + "epoch": 0.6732976281560826, + "grad_norm": 2.7604434490203857, + "learning_rate": 9.535847717642787e-06, + "loss": 0.9867, + "step": 9680 + }, + { + "epoch": 0.6739931835570703, + "grad_norm": 4.987980365753174, + "learning_rate": 9.534143533591627e-06, + "loss": 0.982, + "step": 9690 + }, + { + "epoch": 0.674688738958058, + "grad_norm": 1.9343113899230957, + "learning_rate": 9.532436379577387e-06, + "loss": 0.9866, + "step": 9700 + }, + { + "epoch": 0.6753842943590457, + "grad_norm": 2.739419460296631, + "learning_rate": 9.53072625671829e-06, + "loss": 0.9644, + "step": 9710 + }, + { + "epoch": 0.6760798497600334, + "grad_norm": 2.5628106594085693, + "learning_rate": 9.529013166134505e-06, + "loss": 0.9364, + "step": 9720 + }, + { + "epoch": 0.6767754051610211, + "grad_norm": 2.700146198272705, + "learning_rate": 9.527297108948139e-06, + "loss": 1.0176, + "step": 9730 + }, + { + "epoch": 0.6774709605620087, + "grad_norm": 1.975307822227478, + "learning_rate": 9.525578086283252e-06, + "loss": 1.0511, + "step": 9740 + }, + { + "epoch": 0.6781665159629965, + "grad_norm": 1.7426694631576538, + "learning_rate": 9.523856099265841e-06, + "loss": 0.9963, + "step": 9750 + }, + { + "epoch": 0.6788620713639841, + "grad_norm": 2.286609649658203, + "learning_rate": 9.522131149023844e-06, + "loss": 0.9253, + "step": 9760 + }, + { + "epoch": 0.6795576267649718, + "grad_norm": 5.640908718109131, + "learning_rate": 9.52040323668714e-06, + "loss": 0.9884, + "step": 9770 + }, + { + "epoch": 0.6802531821659595, + "grad_norm": 3.6927990913391113, + "learning_rate": 9.51867236338755e-06, + "loss": 0.9555, + "step": 9780 + }, + { + "epoch": 0.6809487375669472, + "grad_norm": 3.1818950176239014, + "learning_rate": 9.516938530258835e-06, + "loss": 0.9717, + "step": 9790 + }, + { + "epoch": 0.6816442929679349, + "grad_norm": 2.6653597354888916, + "learning_rate": 9.515201738436692e-06, + "loss": 1.0098, + "step": 9800 + }, + { + "epoch": 0.6823398483689226, + "grad_norm": 2.387503147125244, + "learning_rate": 9.51346198905876e-06, + "loss": 0.9773, + "step": 9810 + }, + { + "epoch": 0.6830354037699102, + "grad_norm": 2.3157920837402344, + "learning_rate": 9.51171928326461e-06, + "loss": 0.9921, + "step": 9820 + }, + { + "epoch": 0.683730959170898, + "grad_norm": 2.4291329383850098, + "learning_rate": 9.509973622195754e-06, + "loss": 0.9404, + "step": 9830 + }, + { + "epoch": 0.6844265145718856, + "grad_norm": 1.9757418632507324, + "learning_rate": 9.508225006995638e-06, + "loss": 0.9658, + "step": 9840 + }, + { + "epoch": 0.6851220699728734, + "grad_norm": 2.564131498336792, + "learning_rate": 9.506473438809642e-06, + "loss": 0.9818, + "step": 9850 + }, + { + "epoch": 0.685817625373861, + "grad_norm": 1.6689167022705078, + "learning_rate": 9.504718918785084e-06, + "loss": 0.9383, + "step": 9860 + }, + { + "epoch": 0.6865131807748487, + "grad_norm": 1.8604680299758911, + "learning_rate": 9.50296144807121e-06, + "loss": 0.9192, + "step": 9870 + }, + { + "epoch": 0.6872087361758364, + "grad_norm": 1.4895521402359009, + "learning_rate": 9.501201027819204e-06, + "loss": 0.9419, + "step": 9880 + }, + { + "epoch": 0.6879042915768241, + "grad_norm": 3.203233480453491, + "learning_rate": 9.499437659182179e-06, + "loss": 1.0051, + "step": 9890 + }, + { + "epoch": 0.6885998469778117, + "grad_norm": 2.001832962036133, + "learning_rate": 9.497671343315177e-06, + "loss": 0.9635, + "step": 9900 + }, + { + "epoch": 0.6892954023787995, + "grad_norm": 1.819734811782837, + "learning_rate": 9.49590208137518e-06, + "loss": 1.0434, + "step": 9910 + }, + { + "epoch": 0.6899909577797871, + "grad_norm": 2.4274919033050537, + "learning_rate": 9.494129874521088e-06, + "loss": 1.0567, + "step": 9920 + }, + { + "epoch": 0.6906865131807749, + "grad_norm": 2.767319917678833, + "learning_rate": 9.492354723913737e-06, + "loss": 0.9399, + "step": 9930 + }, + { + "epoch": 0.6913820685817625, + "grad_norm": 3.997968912124634, + "learning_rate": 9.490576630715889e-06, + "loss": 0.9703, + "step": 9940 + }, + { + "epoch": 0.6920776239827502, + "grad_norm": 2.543867349624634, + "learning_rate": 9.488795596092233e-06, + "loss": 1.0606, + "step": 9950 + }, + { + "epoch": 0.692773179383738, + "grad_norm": 3.4960811138153076, + "learning_rate": 9.487011621209387e-06, + "loss": 0.9798, + "step": 9960 + }, + { + "epoch": 0.6934687347847256, + "grad_norm": 2.765897512435913, + "learning_rate": 9.485224707235895e-06, + "loss": 1.011, + "step": 9970 + }, + { + "epoch": 0.6941642901857132, + "grad_norm": 1.8281095027923584, + "learning_rate": 9.48343485534222e-06, + "loss": 0.9942, + "step": 9980 + }, + { + "epoch": 0.694859845586701, + "grad_norm": 1.985290765762329, + "learning_rate": 9.481642066700759e-06, + "loss": 0.9907, + "step": 9990 + }, + { + "epoch": 0.6955554009876886, + "grad_norm": 2.1889357566833496, + "learning_rate": 9.479846342485823e-06, + "loss": 0.9673, + "step": 10000 + }, + { + "epoch": 0.6955554009876886, + "eval_loss": 0.9976942539215088, + "eval_runtime": 4594.5705, + "eval_samples_per_second": 3.953, + "eval_steps_per_second": 0.659, + "step": 10000 + }, + { + "epoch": 0.6962509563886764, + "grad_norm": 2.512913227081299, + "learning_rate": 9.478047683873656e-06, + "loss": 0.9903, + "step": 10010 + }, + { + "epoch": 0.696946511789664, + "grad_norm": 2.160325527191162, + "learning_rate": 9.476246092042413e-06, + "loss": 1.0368, + "step": 10020 + }, + { + "epoch": 0.6976420671906517, + "grad_norm": 4.687230110168457, + "learning_rate": 9.474441568172182e-06, + "loss": 0.952, + "step": 10030 + }, + { + "epoch": 0.6983376225916395, + "grad_norm": 2.2816243171691895, + "learning_rate": 9.472634113444962e-06, + "loss": 1.0033, + "step": 10040 + }, + { + "epoch": 0.6990331779926271, + "grad_norm": 3.874019145965576, + "learning_rate": 9.470823729044675e-06, + "loss": 0.9865, + "step": 10050 + }, + { + "epoch": 0.6997287333936149, + "grad_norm": 2.6616098880767822, + "learning_rate": 9.469010416157163e-06, + "loss": 0.9811, + "step": 10060 + }, + { + "epoch": 0.7004242887946025, + "grad_norm": 2.0778048038482666, + "learning_rate": 9.467194175970187e-06, + "loss": 0.9712, + "step": 10070 + }, + { + "epoch": 0.7011198441955901, + "grad_norm": 2.558802366256714, + "learning_rate": 9.46537500967342e-06, + "loss": 0.9829, + "step": 10080 + }, + { + "epoch": 0.7018153995965779, + "grad_norm": 2.535323143005371, + "learning_rate": 9.463552918458463e-06, + "loss": 0.9969, + "step": 10090 + }, + { + "epoch": 0.7025109549975656, + "grad_norm": 2.70457124710083, + "learning_rate": 9.461727903518818e-06, + "loss": 0.9747, + "step": 10100 + }, + { + "epoch": 0.7032065103985532, + "grad_norm": 2.281812906265259, + "learning_rate": 9.459899966049912e-06, + "loss": 0.9489, + "step": 10110 + }, + { + "epoch": 0.703902065799541, + "grad_norm": 1.7755597829818726, + "learning_rate": 9.458069107249086e-06, + "loss": 1.0659, + "step": 10120 + }, + { + "epoch": 0.7045976212005286, + "grad_norm": 2.2153127193450928, + "learning_rate": 9.456235328315591e-06, + "loss": 1.0269, + "step": 10130 + }, + { + "epoch": 0.7052931766015164, + "grad_norm": 2.1447501182556152, + "learning_rate": 9.454398630450592e-06, + "loss": 0.9667, + "step": 10140 + }, + { + "epoch": 0.705988732002504, + "grad_norm": 5.827666759490967, + "learning_rate": 9.452559014857167e-06, + "loss": 0.9841, + "step": 10150 + }, + { + "epoch": 0.7066842874034917, + "grad_norm": 2.090508460998535, + "learning_rate": 9.450716482740304e-06, + "loss": 0.9702, + "step": 10160 + }, + { + "epoch": 0.7073798428044794, + "grad_norm": 2.35599684715271, + "learning_rate": 9.4488710353069e-06, + "loss": 0.9672, + "step": 10170 + }, + { + "epoch": 0.708075398205467, + "grad_norm": 1.5405713319778442, + "learning_rate": 9.447022673765768e-06, + "loss": 1.0444, + "step": 10180 + }, + { + "epoch": 0.7087709536064547, + "grad_norm": 6.140218257904053, + "learning_rate": 9.445171399327621e-06, + "loss": 0.9557, + "step": 10190 + }, + { + "epoch": 0.7094665090074425, + "grad_norm": 2.4522502422332764, + "learning_rate": 9.443317213205086e-06, + "loss": 0.9572, + "step": 10200 + }, + { + "epoch": 0.7101620644084301, + "grad_norm": 2.7388534545898438, + "learning_rate": 9.441460116612694e-06, + "loss": 1.0132, + "step": 10210 + }, + { + "epoch": 0.7108576198094179, + "grad_norm": 3.217639446258545, + "learning_rate": 9.439600110766887e-06, + "loss": 0.9993, + "step": 10220 + }, + { + "epoch": 0.7115531752104055, + "grad_norm": 3.5153424739837646, + "learning_rate": 9.437737196886006e-06, + "loss": 1.0231, + "step": 10230 + }, + { + "epoch": 0.7122487306113932, + "grad_norm": 1.83612060546875, + "learning_rate": 9.435871376190301e-06, + "loss": 0.9378, + "step": 10240 + }, + { + "epoch": 0.7129442860123809, + "grad_norm": 3.279198169708252, + "learning_rate": 9.434002649901928e-06, + "loss": 0.9485, + "step": 10250 + }, + { + "epoch": 0.7136398414133686, + "grad_norm": 6.548892021179199, + "learning_rate": 9.43213101924494e-06, + "loss": 1.0514, + "step": 10260 + }, + { + "epoch": 0.7143353968143563, + "grad_norm": 3.0969650745391846, + "learning_rate": 9.430256485445297e-06, + "loss": 1.0075, + "step": 10270 + }, + { + "epoch": 0.715030952215344, + "grad_norm": 1.9153330326080322, + "learning_rate": 9.428379049730861e-06, + "loss": 0.9754, + "step": 10280 + }, + { + "epoch": 0.7157265076163316, + "grad_norm": 1.4645906686782837, + "learning_rate": 9.426498713331392e-06, + "loss": 0.9481, + "step": 10290 + }, + { + "epoch": 0.7164220630173194, + "grad_norm": 2.670499086380005, + "learning_rate": 9.424615477478553e-06, + "loss": 0.9275, + "step": 10300 + }, + { + "epoch": 0.717117618418307, + "grad_norm": 1.4417914152145386, + "learning_rate": 9.422729343405903e-06, + "loss": 0.9904, + "step": 10310 + }, + { + "epoch": 0.7178131738192947, + "grad_norm": 4.265714168548584, + "learning_rate": 9.4208403123489e-06, + "loss": 0.9741, + "step": 10320 + }, + { + "epoch": 0.7185087292202824, + "grad_norm": 2.0860867500305176, + "learning_rate": 9.418948385544905e-06, + "loss": 1.0535, + "step": 10330 + }, + { + "epoch": 0.7192042846212701, + "grad_norm": 1.9561222791671753, + "learning_rate": 9.417053564233168e-06, + "loss": 0.985, + "step": 10340 + }, + { + "epoch": 0.7198998400222578, + "grad_norm": 2.2563211917877197, + "learning_rate": 9.415155849654837e-06, + "loss": 0.9203, + "step": 10350 + }, + { + "epoch": 0.7205953954232455, + "grad_norm": 1.9555695056915283, + "learning_rate": 9.41325524305296e-06, + "loss": 1.0043, + "step": 10360 + }, + { + "epoch": 0.7212909508242331, + "grad_norm": 2.571779489517212, + "learning_rate": 9.411351745672474e-06, + "loss": 1.017, + "step": 10370 + }, + { + "epoch": 0.7219865062252209, + "grad_norm": 2.320901393890381, + "learning_rate": 9.409445358760212e-06, + "loss": 0.9633, + "step": 10380 + }, + { + "epoch": 0.7226820616262085, + "grad_norm": 1.6165013313293457, + "learning_rate": 9.407536083564897e-06, + "loss": 0.9611, + "step": 10390 + }, + { + "epoch": 0.7233776170271962, + "grad_norm": 3.5785179138183594, + "learning_rate": 9.40562392133715e-06, + "loss": 0.9684, + "step": 10400 + }, + { + "epoch": 0.7240731724281839, + "grad_norm": 1.5670959949493408, + "learning_rate": 9.403708873329476e-06, + "loss": 0.9833, + "step": 10410 + }, + { + "epoch": 0.7247687278291716, + "grad_norm": 2.3901941776275635, + "learning_rate": 9.401790940796274e-06, + "loss": 1.0165, + "step": 10420 + }, + { + "epoch": 0.7254642832301593, + "grad_norm": 2.2159793376922607, + "learning_rate": 9.39987012499383e-06, + "loss": 1.0128, + "step": 10430 + }, + { + "epoch": 0.726159838631147, + "grad_norm": 1.7386146783828735, + "learning_rate": 9.397946427180326e-06, + "loss": 0.9918, + "step": 10440 + }, + { + "epoch": 0.7268553940321346, + "grad_norm": 1.7697595357894897, + "learning_rate": 9.39601984861582e-06, + "loss": 0.9961, + "step": 10450 + }, + { + "epoch": 0.7275509494331224, + "grad_norm": 3.7953929901123047, + "learning_rate": 9.394090390562265e-06, + "loss": 0.9192, + "step": 10460 + }, + { + "epoch": 0.72824650483411, + "grad_norm": 3.9541234970092773, + "learning_rate": 9.392158054283497e-06, + "loss": 0.9354, + "step": 10470 + }, + { + "epoch": 0.7289420602350978, + "grad_norm": 10.294798851013184, + "learning_rate": 9.390222841045243e-06, + "loss": 1.0191, + "step": 10480 + }, + { + "epoch": 0.7296376156360854, + "grad_norm": 2.8620400428771973, + "learning_rate": 9.388284752115105e-06, + "loss": 0.9561, + "step": 10490 + }, + { + "epoch": 0.7303331710370731, + "grad_norm": 1.977408528327942, + "learning_rate": 9.386343788762576e-06, + "loss": 1.0051, + "step": 10500 + }, + { + "epoch": 0.7303331710370731, + "eval_loss": 0.9927883148193359, + "eval_runtime": 4621.3576, + "eval_samples_per_second": 3.93, + "eval_steps_per_second": 0.655, + "step": 10500 + }, + { + "epoch": 0.7310287264380608, + "grad_norm": 1.7951139211654663, + "learning_rate": 9.384399952259029e-06, + "loss": 0.94, + "step": 10510 + }, + { + "epoch": 0.7317242818390485, + "grad_norm": 2.442408561706543, + "learning_rate": 9.382453243877718e-06, + "loss": 0.9758, + "step": 10520 + }, + { + "epoch": 0.7324198372400361, + "grad_norm": 2.3050520420074463, + "learning_rate": 9.380503664893783e-06, + "loss": 0.9321, + "step": 10530 + }, + { + "epoch": 0.7331153926410239, + "grad_norm": 1.825283169746399, + "learning_rate": 9.378551216584237e-06, + "loss": 0.9506, + "step": 10540 + }, + { + "epoch": 0.7338109480420115, + "grad_norm": 2.9695827960968018, + "learning_rate": 9.376595900227979e-06, + "loss": 1.0379, + "step": 10550 + }, + { + "epoch": 0.7345065034429993, + "grad_norm": 2.492952823638916, + "learning_rate": 9.37463771710578e-06, + "loss": 0.9128, + "step": 10560 + }, + { + "epoch": 0.7352020588439869, + "grad_norm": 1.9598690271377563, + "learning_rate": 9.372676668500298e-06, + "loss": 1.0278, + "step": 10570 + }, + { + "epoch": 0.7358976142449746, + "grad_norm": 1.7750362157821655, + "learning_rate": 9.370712755696061e-06, + "loss": 0.9971, + "step": 10580 + }, + { + "epoch": 0.7365931696459623, + "grad_norm": 1.5262411832809448, + "learning_rate": 9.368745979979471e-06, + "loss": 0.9254, + "step": 10590 + }, + { + "epoch": 0.73728872504695, + "grad_norm": 2.2119686603546143, + "learning_rate": 9.366776342638814e-06, + "loss": 0.9765, + "step": 10600 + }, + { + "epoch": 0.7379842804479376, + "grad_norm": 1.8232074975967407, + "learning_rate": 9.364803844964246e-06, + "loss": 0.9034, + "step": 10610 + }, + { + "epoch": 0.7386798358489254, + "grad_norm": 1.3224016427993774, + "learning_rate": 9.36282848824779e-06, + "loss": 0.9585, + "step": 10620 + }, + { + "epoch": 0.739375391249913, + "grad_norm": 1.711287260055542, + "learning_rate": 9.360850273783353e-06, + "loss": 1.0285, + "step": 10630 + }, + { + "epoch": 0.7400709466509008, + "grad_norm": 2.5977909564971924, + "learning_rate": 9.358869202866708e-06, + "loss": 1.0552, + "step": 10640 + }, + { + "epoch": 0.7407665020518884, + "grad_norm": 1.869174838066101, + "learning_rate": 9.356885276795496e-06, + "loss": 1.0807, + "step": 10650 + }, + { + "epoch": 0.7414620574528761, + "grad_norm": 3.3211796283721924, + "learning_rate": 9.354898496869238e-06, + "loss": 0.938, + "step": 10660 + }, + { + "epoch": 0.7421576128538638, + "grad_norm": 1.793689489364624, + "learning_rate": 9.352908864389313e-06, + "loss": 1.0133, + "step": 10670 + }, + { + "epoch": 0.7428531682548515, + "grad_norm": 2.074301242828369, + "learning_rate": 9.350916380658976e-06, + "loss": 0.9743, + "step": 10680 + }, + { + "epoch": 0.7435487236558392, + "grad_norm": 2.1499667167663574, + "learning_rate": 9.348921046983348e-06, + "loss": 1.0257, + "step": 10690 + }, + { + "epoch": 0.7442442790568269, + "grad_norm": 1.7764832973480225, + "learning_rate": 9.346922864669414e-06, + "loss": 0.9536, + "step": 10700 + }, + { + "epoch": 0.7449398344578145, + "grad_norm": 2.3296449184417725, + "learning_rate": 9.34492183502603e-06, + "loss": 0.9845, + "step": 10710 + }, + { + "epoch": 0.7456353898588023, + "grad_norm": 4.317366123199463, + "learning_rate": 9.342917959363914e-06, + "loss": 1.0028, + "step": 10720 + }, + { + "epoch": 0.7463309452597899, + "grad_norm": 2.6898350715637207, + "learning_rate": 9.340911238995644e-06, + "loss": 1.0041, + "step": 10730 + }, + { + "epoch": 0.7470265006607776, + "grad_norm": 1.8172175884246826, + "learning_rate": 9.338901675235669e-06, + "loss": 1.0564, + "step": 10740 + }, + { + "epoch": 0.7477220560617653, + "grad_norm": 6.4872541427612305, + "learning_rate": 9.336889269400298e-06, + "loss": 0.9827, + "step": 10750 + }, + { + "epoch": 0.748417611462753, + "grad_norm": 3.912036418914795, + "learning_rate": 9.334874022807699e-06, + "loss": 0.996, + "step": 10760 + }, + { + "epoch": 0.7491131668637407, + "grad_norm": 2.1346828937530518, + "learning_rate": 9.332855936777903e-06, + "loss": 0.9724, + "step": 10770 + }, + { + "epoch": 0.7498087222647284, + "grad_norm": 2.01444411277771, + "learning_rate": 9.330835012632801e-06, + "loss": 0.9446, + "step": 10780 + }, + { + "epoch": 0.750504277665716, + "grad_norm": 1.454373836517334, + "learning_rate": 9.328811251696141e-06, + "loss": 1.0036, + "step": 10790 + }, + { + "epoch": 0.7511998330667038, + "grad_norm": 1.8096171617507935, + "learning_rate": 9.326784655293533e-06, + "loss": 0.9761, + "step": 10800 + }, + { + "epoch": 0.7518953884676914, + "grad_norm": 1.6304287910461426, + "learning_rate": 9.32475522475244e-06, + "loss": 0.9859, + "step": 10810 + }, + { + "epoch": 0.7525909438686791, + "grad_norm": 2.1828205585479736, + "learning_rate": 9.322722961402183e-06, + "loss": 1.0004, + "step": 10820 + }, + { + "epoch": 0.7532864992696668, + "grad_norm": 1.9716434478759766, + "learning_rate": 9.320687866573941e-06, + "loss": 1.018, + "step": 10830 + }, + { + "epoch": 0.7539820546706545, + "grad_norm": 1.9968303442001343, + "learning_rate": 9.318649941600744e-06, + "loss": 1.0221, + "step": 10840 + }, + { + "epoch": 0.7546776100716422, + "grad_norm": 1.394667387008667, + "learning_rate": 9.316609187817479e-06, + "loss": 0.9758, + "step": 10850 + }, + { + "epoch": 0.7553731654726299, + "grad_norm": 2.489720582962036, + "learning_rate": 9.31456560656088e-06, + "loss": 1.0026, + "step": 10860 + }, + { + "epoch": 0.7560687208736175, + "grad_norm": 17.803722381591797, + "learning_rate": 9.312519199169543e-06, + "loss": 0.9284, + "step": 10870 + }, + { + "epoch": 0.7567642762746053, + "grad_norm": 2.0162179470062256, + "learning_rate": 9.310469966983906e-06, + "loss": 0.9589, + "step": 10880 + }, + { + "epoch": 0.7574598316755929, + "grad_norm": 1.9285005331039429, + "learning_rate": 9.308417911346262e-06, + "loss": 0.9955, + "step": 10890 + }, + { + "epoch": 0.7581553870765807, + "grad_norm": 8.364664077758789, + "learning_rate": 9.306363033600753e-06, + "loss": 0.9981, + "step": 10900 + }, + { + "epoch": 0.7588509424775683, + "grad_norm": 5.958714485168457, + "learning_rate": 9.304305335093366e-06, + "loss": 0.959, + "step": 10910 + }, + { + "epoch": 0.759546497878556, + "grad_norm": 2.0308122634887695, + "learning_rate": 9.302244817171943e-06, + "loss": 0.9998, + "step": 10920 + }, + { + "epoch": 0.7602420532795438, + "grad_norm": 2.5587306022644043, + "learning_rate": 9.300181481186164e-06, + "loss": 0.9852, + "step": 10930 + }, + { + "epoch": 0.7609376086805314, + "grad_norm": 2.131622076034546, + "learning_rate": 9.298115328487562e-06, + "loss": 1.0378, + "step": 10940 + }, + { + "epoch": 0.761633164081519, + "grad_norm": 4.111046314239502, + "learning_rate": 9.29604636042951e-06, + "loss": 0.9922, + "step": 10950 + }, + { + "epoch": 0.7623287194825068, + "grad_norm": 1.7649215459823608, + "learning_rate": 9.293974578367229e-06, + "loss": 0.9755, + "step": 10960 + }, + { + "epoch": 0.7630242748834944, + "grad_norm": 2.0672836303710938, + "learning_rate": 9.29189998365778e-06, + "loss": 0.9863, + "step": 10970 + }, + { + "epoch": 0.7637198302844822, + "grad_norm": 2.2669830322265625, + "learning_rate": 9.28982257766007e-06, + "loss": 0.9829, + "step": 10980 + }, + { + "epoch": 0.7644153856854699, + "grad_norm": 2.8982460498809814, + "learning_rate": 9.287742361734843e-06, + "loss": 0.9879, + "step": 10990 + }, + { + "epoch": 0.7651109410864575, + "grad_norm": 2.115569829940796, + "learning_rate": 9.285659337244688e-06, + "loss": 0.9677, + "step": 11000 + }, + { + "epoch": 0.7651109410864575, + "eval_loss": 0.9869261384010315, + "eval_runtime": 4584.071, + "eval_samples_per_second": 3.962, + "eval_steps_per_second": 0.66, + "step": 11000 + }, + { + "epoch": 0.7658064964874453, + "grad_norm": 1.7354387044906616, + "learning_rate": 9.283573505554028e-06, + "loss": 0.9354, + "step": 11010 + }, + { + "epoch": 0.7665020518884329, + "grad_norm": 2.0192418098449707, + "learning_rate": 9.281484868029134e-06, + "loss": 0.9263, + "step": 11020 + }, + { + "epoch": 0.7671976072894207, + "grad_norm": 3.400847911834717, + "learning_rate": 9.279393426038103e-06, + "loss": 1.0157, + "step": 11030 + }, + { + "epoch": 0.7678931626904083, + "grad_norm": 2.150063991546631, + "learning_rate": 9.27729918095088e-06, + "loss": 1.0261, + "step": 11040 + }, + { + "epoch": 0.768588718091396, + "grad_norm": 1.5513664484024048, + "learning_rate": 9.275202134139239e-06, + "loss": 1.025, + "step": 11050 + }, + { + "epoch": 0.7692842734923837, + "grad_norm": 3.4665329456329346, + "learning_rate": 9.273102286976792e-06, + "loss": 0.9427, + "step": 11060 + }, + { + "epoch": 0.7699798288933714, + "grad_norm": 2.9653308391571045, + "learning_rate": 9.270999640838984e-06, + "loss": 0.9454, + "step": 11070 + }, + { + "epoch": 0.770675384294359, + "grad_norm": 3.3398637771606445, + "learning_rate": 9.268894197103095e-06, + "loss": 0.975, + "step": 11080 + }, + { + "epoch": 0.7713709396953468, + "grad_norm": 3.157940149307251, + "learning_rate": 9.266785957148238e-06, + "loss": 0.9867, + "step": 11090 + }, + { + "epoch": 0.7720664950963344, + "grad_norm": 5.157686710357666, + "learning_rate": 9.264674922355354e-06, + "loss": 0.9818, + "step": 11100 + }, + { + "epoch": 0.7727620504973222, + "grad_norm": 1.71923828125, + "learning_rate": 9.262561094107217e-06, + "loss": 0.9823, + "step": 11110 + }, + { + "epoch": 0.7734576058983098, + "grad_norm": 1.5572396516799927, + "learning_rate": 9.260444473788432e-06, + "loss": 0.9488, + "step": 11120 + }, + { + "epoch": 0.7741531612992975, + "grad_norm": 2.465153932571411, + "learning_rate": 9.258325062785432e-06, + "loss": 1.0031, + "step": 11130 + }, + { + "epoch": 0.7748487167002852, + "grad_norm": 2.7863686084747314, + "learning_rate": 9.256202862486474e-06, + "loss": 0.9709, + "step": 11140 + }, + { + "epoch": 0.7755442721012729, + "grad_norm": 7.307269096374512, + "learning_rate": 9.254077874281649e-06, + "loss": 0.9962, + "step": 11150 + }, + { + "epoch": 0.7762398275022605, + "grad_norm": 2.378608226776123, + "learning_rate": 9.25195009956287e-06, + "loss": 0.9474, + "step": 11160 + }, + { + "epoch": 0.7769353829032483, + "grad_norm": 1.4693279266357422, + "learning_rate": 9.249819539723876e-06, + "loss": 0.9823, + "step": 11170 + }, + { + "epoch": 0.7776309383042359, + "grad_norm": 1.7977452278137207, + "learning_rate": 9.24768619616023e-06, + "loss": 1.0022, + "step": 11180 + }, + { + "epoch": 0.7783264937052237, + "grad_norm": 2.926719903945923, + "learning_rate": 9.245550070269318e-06, + "loss": 1.0174, + "step": 11190 + }, + { + "epoch": 0.7790220491062113, + "grad_norm": 2.231137275695801, + "learning_rate": 9.243411163450349e-06, + "loss": 0.9454, + "step": 11200 + }, + { + "epoch": 0.779717604507199, + "grad_norm": 6.833038330078125, + "learning_rate": 9.241269477104356e-06, + "loss": 0.9645, + "step": 11210 + }, + { + "epoch": 0.7804131599081867, + "grad_norm": 12.1441068649292, + "learning_rate": 9.239125012634187e-06, + "loss": 1.0336, + "step": 11220 + }, + { + "epoch": 0.7811087153091744, + "grad_norm": 3.595836877822876, + "learning_rate": 9.236977771444515e-06, + "loss": 0.9457, + "step": 11230 + }, + { + "epoch": 0.7818042707101621, + "grad_norm": 1.6409573554992676, + "learning_rate": 9.23482775494183e-06, + "loss": 0.969, + "step": 11240 + }, + { + "epoch": 0.7824998261111498, + "grad_norm": 2.8841323852539062, + "learning_rate": 9.23267496453444e-06, + "loss": 0.9914, + "step": 11250 + }, + { + "epoch": 0.7831953815121374, + "grad_norm": 3.194254159927368, + "learning_rate": 9.230519401632467e-06, + "loss": 0.9826, + "step": 11260 + }, + { + "epoch": 0.7838909369131252, + "grad_norm": 2.2655327320098877, + "learning_rate": 9.228361067647857e-06, + "loss": 0.9834, + "step": 11270 + }, + { + "epoch": 0.7845864923141128, + "grad_norm": 2.116649866104126, + "learning_rate": 9.226199963994362e-06, + "loss": 0.9811, + "step": 11280 + }, + { + "epoch": 0.7852820477151005, + "grad_norm": 1.769460916519165, + "learning_rate": 9.224036092087552e-06, + "loss": 0.9662, + "step": 11290 + }, + { + "epoch": 0.7859776031160882, + "grad_norm": 1.8222196102142334, + "learning_rate": 9.22186945334481e-06, + "loss": 0.9701, + "step": 11300 + }, + { + "epoch": 0.7866731585170759, + "grad_norm": 2.509464740753174, + "learning_rate": 9.219700049185337e-06, + "loss": 0.9803, + "step": 11310 + }, + { + "epoch": 0.7873687139180636, + "grad_norm": 2.2453930377960205, + "learning_rate": 9.217527881030134e-06, + "loss": 0.8887, + "step": 11320 + }, + { + "epoch": 0.7880642693190513, + "grad_norm": 1.7438175678253174, + "learning_rate": 9.215352950302022e-06, + "loss": 0.9916, + "step": 11330 + }, + { + "epoch": 0.7887598247200389, + "grad_norm": 3.519965648651123, + "learning_rate": 9.213175258425626e-06, + "loss": 0.9955, + "step": 11340 + }, + { + "epoch": 0.7894553801210267, + "grad_norm": 1.6467702388763428, + "learning_rate": 9.210994806827384e-06, + "loss": 1.0012, + "step": 11350 + }, + { + "epoch": 0.7901509355220143, + "grad_norm": 1.6678965091705322, + "learning_rate": 9.208811596935537e-06, + "loss": 0.9599, + "step": 11360 + }, + { + "epoch": 0.790846490923002, + "grad_norm": 3.5240025520324707, + "learning_rate": 9.206625630180137e-06, + "loss": 1.0176, + "step": 11370 + }, + { + "epoch": 0.7915420463239897, + "grad_norm": 2.3493504524230957, + "learning_rate": 9.204436907993039e-06, + "loss": 0.9469, + "step": 11380 + }, + { + "epoch": 0.7922376017249774, + "grad_norm": 1.7836196422576904, + "learning_rate": 9.202245431807904e-06, + "loss": 0.9339, + "step": 11390 + }, + { + "epoch": 0.7929331571259651, + "grad_norm": 2.819885015487671, + "learning_rate": 9.200051203060196e-06, + "loss": 0.9124, + "step": 11400 + }, + { + "epoch": 0.7936287125269528, + "grad_norm": 4.124748706817627, + "learning_rate": 9.197854223187186e-06, + "loss": 0.9362, + "step": 11410 + }, + { + "epoch": 0.7943242679279404, + "grad_norm": 1.9177837371826172, + "learning_rate": 9.195654493627942e-06, + "loss": 0.9958, + "step": 11420 + }, + { + "epoch": 0.7950198233289282, + "grad_norm": 1.8601995706558228, + "learning_rate": 9.193452015823332e-06, + "loss": 1.0169, + "step": 11430 + }, + { + "epoch": 0.7957153787299158, + "grad_norm": 3.5639326572418213, + "learning_rate": 9.191246791216031e-06, + "loss": 1.0128, + "step": 11440 + }, + { + "epoch": 0.7964109341309036, + "grad_norm": 1.5190987586975098, + "learning_rate": 9.189038821250506e-06, + "loss": 0.9359, + "step": 11450 + }, + { + "epoch": 0.7971064895318912, + "grad_norm": 1.6510673761367798, + "learning_rate": 9.186828107373029e-06, + "loss": 1.0156, + "step": 11460 + }, + { + "epoch": 0.7978020449328789, + "grad_norm": 1.8245681524276733, + "learning_rate": 9.184614651031665e-06, + "loss": 1.0026, + "step": 11470 + }, + { + "epoch": 0.7984976003338666, + "grad_norm": 1.7725977897644043, + "learning_rate": 9.182398453676276e-06, + "loss": 1.0418, + "step": 11480 + }, + { + "epoch": 0.7991931557348543, + "grad_norm": 1.6046912670135498, + "learning_rate": 9.180179516758518e-06, + "loss": 1.016, + "step": 11490 + }, + { + "epoch": 0.7998887111358419, + "grad_norm": 2.261159658432007, + "learning_rate": 9.177957841731844e-06, + "loss": 0.9382, + "step": 11500 + }, + { + "epoch": 0.7998887111358419, + "eval_loss": 0.9829555749893188, + "eval_runtime": 4594.689, + "eval_samples_per_second": 3.953, + "eval_steps_per_second": 0.659, + "step": 11500 + }, + { + "epoch": 0.8005842665368297, + "grad_norm": 2.7518558502197266, + "learning_rate": 9.175733430051502e-06, + "loss": 0.9895, + "step": 11510 + }, + { + "epoch": 0.8012798219378173, + "grad_norm": 1.5967988967895508, + "learning_rate": 9.173506283174526e-06, + "loss": 0.959, + "step": 11520 + }, + { + "epoch": 0.8019753773388051, + "grad_norm": 2.111435651779175, + "learning_rate": 9.17127640255975e-06, + "loss": 0.9835, + "step": 11530 + }, + { + "epoch": 0.8026709327397927, + "grad_norm": 2.9902431964874268, + "learning_rate": 9.169043789667792e-06, + "loss": 0.9993, + "step": 11540 + }, + { + "epoch": 0.8033664881407804, + "grad_norm": 2.0832207202911377, + "learning_rate": 9.166808445961065e-06, + "loss": 0.9087, + "step": 11550 + }, + { + "epoch": 0.8040620435417681, + "grad_norm": 1.6776680946350098, + "learning_rate": 9.164570372903763e-06, + "loss": 0.9473, + "step": 11560 + }, + { + "epoch": 0.8047575989427558, + "grad_norm": 2.803178071975708, + "learning_rate": 9.162329571961877e-06, + "loss": 1.0759, + "step": 11570 + }, + { + "epoch": 0.8054531543437434, + "grad_norm": 1.6260005235671997, + "learning_rate": 9.16008604460318e-06, + "loss": 0.9598, + "step": 11580 + }, + { + "epoch": 0.8061487097447312, + "grad_norm": 1.9102253913879395, + "learning_rate": 9.15783979229723e-06, + "loss": 1.0097, + "step": 11590 + }, + { + "epoch": 0.8068442651457188, + "grad_norm": 3.6235358715057373, + "learning_rate": 9.155590816515372e-06, + "loss": 0.9685, + "step": 11600 + }, + { + "epoch": 0.8075398205467066, + "grad_norm": 1.8293380737304688, + "learning_rate": 9.153339118730735e-06, + "loss": 1.0328, + "step": 11610 + }, + { + "epoch": 0.8082353759476942, + "grad_norm": 3.006948947906494, + "learning_rate": 9.15108470041823e-06, + "loss": 0.9857, + "step": 11620 + }, + { + "epoch": 0.8089309313486819, + "grad_norm": 3.3616182804107666, + "learning_rate": 9.148827563054547e-06, + "loss": 1.0295, + "step": 11630 + }, + { + "epoch": 0.8096264867496696, + "grad_norm": 3.9848215579986572, + "learning_rate": 9.146567708118166e-06, + "loss": 0.9399, + "step": 11640 + }, + { + "epoch": 0.8103220421506573, + "grad_norm": 3.212236166000366, + "learning_rate": 9.144305137089338e-06, + "loss": 1.0084, + "step": 11650 + }, + { + "epoch": 0.811017597551645, + "grad_norm": 1.6579129695892334, + "learning_rate": 9.142039851450097e-06, + "loss": 0.9304, + "step": 11660 + }, + { + "epoch": 0.8117131529526327, + "grad_norm": 2.5798208713531494, + "learning_rate": 9.139771852684254e-06, + "loss": 1.0076, + "step": 11670 + }, + { + "epoch": 0.8124087083536203, + "grad_norm": 1.9309515953063965, + "learning_rate": 9.137501142277398e-06, + "loss": 1.0158, + "step": 11680 + }, + { + "epoch": 0.8131042637546081, + "grad_norm": 1.7512496709823608, + "learning_rate": 9.135227721716895e-06, + "loss": 1.0078, + "step": 11690 + }, + { + "epoch": 0.8137998191555957, + "grad_norm": 1.7853280305862427, + "learning_rate": 9.132951592491886e-06, + "loss": 1.0371, + "step": 11700 + }, + { + "epoch": 0.8144953745565834, + "grad_norm": 2.0795769691467285, + "learning_rate": 9.13067275609328e-06, + "loss": 1.0011, + "step": 11710 + }, + { + "epoch": 0.8151909299575711, + "grad_norm": 1.884053349494934, + "learning_rate": 9.12839121401377e-06, + "loss": 0.9769, + "step": 11720 + }, + { + "epoch": 0.8158864853585588, + "grad_norm": 3.656273365020752, + "learning_rate": 9.126106967747814e-06, + "loss": 0.9895, + "step": 11730 + }, + { + "epoch": 0.8165820407595465, + "grad_norm": 2.333209753036499, + "learning_rate": 9.123820018791645e-06, + "loss": 0.9435, + "step": 11740 + }, + { + "epoch": 0.8172775961605342, + "grad_norm": 2.0555002689361572, + "learning_rate": 9.121530368643263e-06, + "loss": 1.0323, + "step": 11750 + }, + { + "epoch": 0.8179731515615218, + "grad_norm": 3.1569812297821045, + "learning_rate": 9.119238018802437e-06, + "loss": 0.9402, + "step": 11760 + }, + { + "epoch": 0.8186687069625096, + "grad_norm": 1.6527621746063232, + "learning_rate": 9.116942970770709e-06, + "loss": 0.946, + "step": 11770 + }, + { + "epoch": 0.8193642623634972, + "grad_norm": 4.273525238037109, + "learning_rate": 9.114645226051385e-06, + "loss": 0.9906, + "step": 11780 + }, + { + "epoch": 0.8200598177644849, + "grad_norm": 1.720085859298706, + "learning_rate": 9.112344786149536e-06, + "loss": 0.9812, + "step": 11790 + }, + { + "epoch": 0.8207553731654726, + "grad_norm": 3.602309226989746, + "learning_rate": 9.110041652572006e-06, + "loss": 0.9923, + "step": 11800 + }, + { + "epoch": 0.8214509285664603, + "grad_norm": 4.719746112823486, + "learning_rate": 9.107735826827391e-06, + "loss": 0.9724, + "step": 11810 + }, + { + "epoch": 0.822146483967448, + "grad_norm": 1.810930848121643, + "learning_rate": 9.10542731042606e-06, + "loss": 0.9996, + "step": 11820 + }, + { + "epoch": 0.8228420393684357, + "grad_norm": 1.9131889343261719, + "learning_rate": 9.103116104880143e-06, + "loss": 0.9653, + "step": 11830 + }, + { + "epoch": 0.8235375947694233, + "grad_norm": 2.445328712463379, + "learning_rate": 9.100802211703528e-06, + "loss": 0.9527, + "step": 11840 + }, + { + "epoch": 0.8242331501704111, + "grad_norm": 1.3218022584915161, + "learning_rate": 9.098485632411868e-06, + "loss": 0.9666, + "step": 11850 + }, + { + "epoch": 0.8249287055713987, + "grad_norm": 2.850393533706665, + "learning_rate": 9.096166368522571e-06, + "loss": 0.9934, + "step": 11860 + }, + { + "epoch": 0.8256242609723865, + "grad_norm": 2.029785633087158, + "learning_rate": 9.093844421554804e-06, + "loss": 0.9631, + "step": 11870 + }, + { + "epoch": 0.8263198163733741, + "grad_norm": 4.33350133895874, + "learning_rate": 9.091519793029499e-06, + "loss": 0.9842, + "step": 11880 + }, + { + "epoch": 0.8270153717743618, + "grad_norm": 1.8066006898880005, + "learning_rate": 9.089192484469333e-06, + "loss": 0.894, + "step": 11890 + }, + { + "epoch": 0.8277109271753496, + "grad_norm": 3.2369089126586914, + "learning_rate": 9.086862497398745e-06, + "loss": 0.9937, + "step": 11900 + }, + { + "epoch": 0.8284064825763372, + "grad_norm": 1.446080207824707, + "learning_rate": 9.08452983334393e-06, + "loss": 0.9651, + "step": 11910 + }, + { + "epoch": 0.8291020379773248, + "grad_norm": 2.2237725257873535, + "learning_rate": 9.082194493832829e-06, + "loss": 1.0193, + "step": 11920 + }, + { + "epoch": 0.8297975933783126, + "grad_norm": 1.9130140542984009, + "learning_rate": 9.079856480395143e-06, + "loss": 0.9479, + "step": 11930 + }, + { + "epoch": 0.8304931487793002, + "grad_norm": 1.992829442024231, + "learning_rate": 9.077515794562326e-06, + "loss": 0.9457, + "step": 11940 + }, + { + "epoch": 0.831188704180288, + "grad_norm": 2.0158851146698, + "learning_rate": 9.075172437867572e-06, + "loss": 0.9952, + "step": 11950 + }, + { + "epoch": 0.8318842595812757, + "grad_norm": 1.763421893119812, + "learning_rate": 9.072826411845834e-06, + "loss": 1.005, + "step": 11960 + }, + { + "epoch": 0.8325798149822633, + "grad_norm": 3.52842378616333, + "learning_rate": 9.07047771803381e-06, + "loss": 0.9143, + "step": 11970 + }, + { + "epoch": 0.8332753703832511, + "grad_norm": 3.013589382171631, + "learning_rate": 9.068126357969944e-06, + "loss": 0.9405, + "step": 11980 + }, + { + "epoch": 0.8339709257842387, + "grad_norm": 3.948521375656128, + "learning_rate": 9.065772333194432e-06, + "loss": 0.9823, + "step": 11990 + }, + { + "epoch": 0.8346664811852263, + "grad_norm": 2.6132824420928955, + "learning_rate": 9.063415645249207e-06, + "loss": 1.0074, + "step": 12000 + }, + { + "epoch": 0.8346664811852263, + "eval_loss": 0.9802739024162292, + "eval_runtime": 4607.3413, + "eval_samples_per_second": 3.942, + "eval_steps_per_second": 0.657, + "step": 12000 + }, + { + "epoch": 0.8353620365862141, + "grad_norm": 3.880948543548584, + "learning_rate": 9.061056295677955e-06, + "loss": 0.9818, + "step": 12010 + }, + { + "epoch": 0.8360575919872018, + "grad_norm": 2.4631187915802, + "learning_rate": 9.0586942860261e-06, + "loss": 0.9352, + "step": 12020 + }, + { + "epoch": 0.8367531473881895, + "grad_norm": 2.9436087608337402, + "learning_rate": 9.056329617840808e-06, + "loss": 0.941, + "step": 12030 + }, + { + "epoch": 0.8374487027891772, + "grad_norm": 1.4203734397888184, + "learning_rate": 9.053962292670992e-06, + "loss": 1.0158, + "step": 12040 + }, + { + "epoch": 0.8381442581901648, + "grad_norm": 2.796330213546753, + "learning_rate": 9.051592312067302e-06, + "loss": 0.9966, + "step": 12050 + }, + { + "epoch": 0.8388398135911526, + "grad_norm": 2.3619067668914795, + "learning_rate": 9.049219677582122e-06, + "loss": 0.9824, + "step": 12060 + }, + { + "epoch": 0.8395353689921402, + "grad_norm": 1.991422176361084, + "learning_rate": 9.046844390769582e-06, + "loss": 0.9712, + "step": 12070 + }, + { + "epoch": 0.840230924393128, + "grad_norm": 2.0296924114227295, + "learning_rate": 9.044466453185549e-06, + "loss": 0.9414, + "step": 12080 + }, + { + "epoch": 0.8409264797941156, + "grad_norm": 1.9094817638397217, + "learning_rate": 9.042085866387621e-06, + "loss": 0.9164, + "step": 12090 + }, + { + "epoch": 0.8416220351951033, + "grad_norm": 3.8172900676727295, + "learning_rate": 9.039702631935137e-06, + "loss": 0.9065, + "step": 12100 + }, + { + "epoch": 0.842317590596091, + "grad_norm": 1.4825770854949951, + "learning_rate": 9.037316751389164e-06, + "loss": 1.0359, + "step": 12110 + }, + { + "epoch": 0.8430131459970787, + "grad_norm": 2.5776805877685547, + "learning_rate": 9.034928226312511e-06, + "loss": 0.9957, + "step": 12120 + }, + { + "epoch": 0.8437087013980663, + "grad_norm": 4.232692718505859, + "learning_rate": 9.03253705826971e-06, + "loss": 0.9875, + "step": 12130 + }, + { + "epoch": 0.8444042567990541, + "grad_norm": 1.4343740940093994, + "learning_rate": 9.03014324882703e-06, + "loss": 0.9996, + "step": 12140 + }, + { + "epoch": 0.8450998122000417, + "grad_norm": 2.663133382797241, + "learning_rate": 9.027746799552469e-06, + "loss": 0.9783, + "step": 12150 + }, + { + "epoch": 0.8457953676010295, + "grad_norm": 2.600322723388672, + "learning_rate": 9.025347712015752e-06, + "loss": 0.9603, + "step": 12160 + }, + { + "epoch": 0.8464909230020171, + "grad_norm": 3.312680721282959, + "learning_rate": 9.022945987788332e-06, + "loss": 0.9425, + "step": 12170 + }, + { + "epoch": 0.8471864784030048, + "grad_norm": 2.5642881393432617, + "learning_rate": 9.020541628443395e-06, + "loss": 0.9989, + "step": 12180 + }, + { + "epoch": 0.8478820338039925, + "grad_norm": 2.0231070518493652, + "learning_rate": 9.018134635555848e-06, + "loss": 0.8794, + "step": 12190 + }, + { + "epoch": 0.8485775892049802, + "grad_norm": 2.5975289344787598, + "learning_rate": 9.015725010702321e-06, + "loss": 0.9935, + "step": 12200 + }, + { + "epoch": 0.8492731446059678, + "grad_norm": 7.324997425079346, + "learning_rate": 9.013312755461176e-06, + "loss": 0.9484, + "step": 12210 + }, + { + "epoch": 0.8499687000069556, + "grad_norm": 2.1895008087158203, + "learning_rate": 9.010897871412487e-06, + "loss": 0.993, + "step": 12220 + }, + { + "epoch": 0.8506642554079432, + "grad_norm": 4.597356796264648, + "learning_rate": 9.00848036013806e-06, + "loss": 1.0107, + "step": 12230 + }, + { + "epoch": 0.851359810808931, + "grad_norm": 2.300265312194824, + "learning_rate": 9.006060223221417e-06, + "loss": 0.972, + "step": 12240 + }, + { + "epoch": 0.8520553662099186, + "grad_norm": 5.216679573059082, + "learning_rate": 9.003637462247801e-06, + "loss": 0.9857, + "step": 12250 + }, + { + "epoch": 0.8527509216109063, + "grad_norm": 2.263061046600342, + "learning_rate": 9.001212078804172e-06, + "loss": 0.9308, + "step": 12260 + }, + { + "epoch": 0.853446477011894, + "grad_norm": 1.8232241868972778, + "learning_rate": 8.99878407447921e-06, + "loss": 0.9391, + "step": 12270 + }, + { + "epoch": 0.8541420324128817, + "grad_norm": 1.9675838947296143, + "learning_rate": 8.996353450863307e-06, + "loss": 1.0314, + "step": 12280 + }, + { + "epoch": 0.8548375878138694, + "grad_norm": 3.0564322471618652, + "learning_rate": 8.99392020954858e-06, + "loss": 1.0028, + "step": 12290 + }, + { + "epoch": 0.8555331432148571, + "grad_norm": 1.6459615230560303, + "learning_rate": 8.991484352128853e-06, + "loss": 0.9635, + "step": 12300 + }, + { + "epoch": 0.8562286986158447, + "grad_norm": 1.9832383394241333, + "learning_rate": 8.989045880199669e-06, + "loss": 0.9361, + "step": 12310 + }, + { + "epoch": 0.8569242540168325, + "grad_norm": 4.27775239944458, + "learning_rate": 8.986604795358275e-06, + "loss": 1.024, + "step": 12320 + }, + { + "epoch": 0.8576198094178201, + "grad_norm": 5.151965141296387, + "learning_rate": 8.984161099203636e-06, + "loss": 0.9956, + "step": 12330 + }, + { + "epoch": 0.8583153648188078, + "grad_norm": 3.036284923553467, + "learning_rate": 8.98171479333643e-06, + "loss": 1.0214, + "step": 12340 + }, + { + "epoch": 0.8590109202197955, + "grad_norm": 2.848088502883911, + "learning_rate": 8.979265879359038e-06, + "loss": 0.9813, + "step": 12350 + }, + { + "epoch": 0.8597064756207832, + "grad_norm": 2.738024950027466, + "learning_rate": 8.976814358875553e-06, + "loss": 0.9999, + "step": 12360 + }, + { + "epoch": 0.8604020310217709, + "grad_norm": 4.439132213592529, + "learning_rate": 8.974360233491773e-06, + "loss": 1.0016, + "step": 12370 + }, + { + "epoch": 0.8610975864227586, + "grad_norm": 3.196666717529297, + "learning_rate": 8.971903504815205e-06, + "loss": 0.9581, + "step": 12380 + }, + { + "epoch": 0.8617931418237462, + "grad_norm": 2.150972604751587, + "learning_rate": 8.969444174455061e-06, + "loss": 0.9681, + "step": 12390 + }, + { + "epoch": 0.862488697224734, + "grad_norm": 1.983615517616272, + "learning_rate": 8.966982244022254e-06, + "loss": 0.9831, + "step": 12400 + }, + { + "epoch": 0.8631842526257216, + "grad_norm": 1.7294100522994995, + "learning_rate": 8.964517715129404e-06, + "loss": 0.9627, + "step": 12410 + }, + { + "epoch": 0.8638798080267093, + "grad_norm": 2.321531295776367, + "learning_rate": 8.962050589390829e-06, + "loss": 0.9687, + "step": 12420 + }, + { + "epoch": 0.864575363427697, + "grad_norm": 3.469329595565796, + "learning_rate": 8.959580868422554e-06, + "loss": 0.9147, + "step": 12430 + }, + { + "epoch": 0.8652709188286847, + "grad_norm": 5.686237335205078, + "learning_rate": 8.957108553842296e-06, + "loss": 1.0038, + "step": 12440 + }, + { + "epoch": 0.8659664742296724, + "grad_norm": 3.5261380672454834, + "learning_rate": 8.954633647269479e-06, + "loss": 0.9814, + "step": 12450 + }, + { + "epoch": 0.8666620296306601, + "grad_norm": 2.00789737701416, + "learning_rate": 8.952156150325217e-06, + "loss": 0.9291, + "step": 12460 + }, + { + "epoch": 0.8673575850316477, + "grad_norm": 1.5012083053588867, + "learning_rate": 8.949676064632327e-06, + "loss": 0.9362, + "step": 12470 + }, + { + "epoch": 0.8680531404326355, + "grad_norm": 2.0159707069396973, + "learning_rate": 8.947193391815319e-06, + "loss": 0.9769, + "step": 12480 + }, + { + "epoch": 0.8687486958336231, + "grad_norm": 1.7505065202713013, + "learning_rate": 8.944708133500398e-06, + "loss": 0.9575, + "step": 12490 + }, + { + "epoch": 0.8694442512346109, + "grad_norm": 2.131394624710083, + "learning_rate": 8.942220291315463e-06, + "loss": 1.0279, + "step": 12500 + }, + { + "epoch": 0.8694442512346109, + "eval_loss": 0.9748206734657288, + "eval_runtime": 4586.4992, + "eval_samples_per_second": 3.96, + "eval_steps_per_second": 0.66, + "step": 12500 + }, + { + "epoch": 0.8701398066355985, + "grad_norm": 1.928750991821289, + "learning_rate": 8.939729866890103e-06, + "loss": 0.9868, + "step": 12510 + }, + { + "epoch": 0.8708353620365862, + "grad_norm": 1.864011526107788, + "learning_rate": 8.937236861855602e-06, + "loss": 1.0081, + "step": 12520 + }, + { + "epoch": 0.8715309174375739, + "grad_norm": 1.6580491065979004, + "learning_rate": 8.934741277844933e-06, + "loss": 0.9721, + "step": 12530 + }, + { + "epoch": 0.8722264728385616, + "grad_norm": 1.8358879089355469, + "learning_rate": 8.932243116492756e-06, + "loss": 0.9718, + "step": 12540 + }, + { + "epoch": 0.8729220282395492, + "grad_norm": 3.139885663986206, + "learning_rate": 8.929742379435424e-06, + "loss": 0.9831, + "step": 12550 + }, + { + "epoch": 0.873617583640537, + "grad_norm": 1.8078655004501343, + "learning_rate": 8.927239068310973e-06, + "loss": 0.9494, + "step": 12560 + }, + { + "epoch": 0.8743131390415246, + "grad_norm": 2.946500301361084, + "learning_rate": 8.924733184759127e-06, + "loss": 0.9733, + "step": 12570 + }, + { + "epoch": 0.8750086944425124, + "grad_norm": 1.7697480916976929, + "learning_rate": 8.922224730421294e-06, + "loss": 0.9505, + "step": 12580 + }, + { + "epoch": 0.8757042498435, + "grad_norm": 2.418221950531006, + "learning_rate": 8.919713706940566e-06, + "loss": 0.9612, + "step": 12590 + }, + { + "epoch": 0.8763998052444877, + "grad_norm": 2.099231481552124, + "learning_rate": 8.917200115961719e-06, + "loss": 0.9474, + "step": 12600 + }, + { + "epoch": 0.8770953606454754, + "grad_norm": 2.5777673721313477, + "learning_rate": 8.91468395913121e-06, + "loss": 0.9983, + "step": 12610 + }, + { + "epoch": 0.8777909160464631, + "grad_norm": 1.385313868522644, + "learning_rate": 8.912165238097177e-06, + "loss": 0.9967, + "step": 12620 + }, + { + "epoch": 0.8784864714474507, + "grad_norm": 2.078000545501709, + "learning_rate": 8.909643954509435e-06, + "loss": 0.9305, + "step": 12630 + }, + { + "epoch": 0.8791820268484385, + "grad_norm": 3.9489481449127197, + "learning_rate": 8.907120110019483e-06, + "loss": 0.9931, + "step": 12640 + }, + { + "epoch": 0.8798775822494261, + "grad_norm": 2.4401168823242188, + "learning_rate": 8.904593706280493e-06, + "loss": 0.9277, + "step": 12650 + }, + { + "epoch": 0.8805731376504139, + "grad_norm": 2.487644910812378, + "learning_rate": 8.902064744947314e-06, + "loss": 0.9343, + "step": 12660 + }, + { + "epoch": 0.8812686930514015, + "grad_norm": 2.0721242427825928, + "learning_rate": 8.899533227676471e-06, + "loss": 0.9559, + "step": 12670 + }, + { + "epoch": 0.8819642484523892, + "grad_norm": 2.0462698936462402, + "learning_rate": 8.896999156126165e-06, + "loss": 0.944, + "step": 12680 + }, + { + "epoch": 0.882659803853377, + "grad_norm": 1.8234336376190186, + "learning_rate": 8.894462531956266e-06, + "loss": 0.9823, + "step": 12690 + }, + { + "epoch": 0.8833553592543646, + "grad_norm": 3.525721549987793, + "learning_rate": 8.89192335682832e-06, + "loss": 0.9987, + "step": 12700 + }, + { + "epoch": 0.8840509146553523, + "grad_norm": 2.562140464782715, + "learning_rate": 8.88938163240554e-06, + "loss": 0.927, + "step": 12710 + }, + { + "epoch": 0.88474647005634, + "grad_norm": 1.9773950576782227, + "learning_rate": 8.886837360352814e-06, + "loss": 0.9959, + "step": 12720 + }, + { + "epoch": 0.8854420254573276, + "grad_norm": 2.449106216430664, + "learning_rate": 8.884290542336692e-06, + "loss": 0.9346, + "step": 12730 + }, + { + "epoch": 0.8861375808583154, + "grad_norm": 1.937443733215332, + "learning_rate": 8.881741180025398e-06, + "loss": 0.9986, + "step": 12740 + }, + { + "epoch": 0.886833136259303, + "grad_norm": 2.7920517921447754, + "learning_rate": 8.87918927508882e-06, + "loss": 0.9844, + "step": 12750 + }, + { + "epoch": 0.8875286916602907, + "grad_norm": 2.306570529937744, + "learning_rate": 8.876634829198511e-06, + "loss": 0.9344, + "step": 12760 + }, + { + "epoch": 0.8882242470612784, + "grad_norm": 2.471832275390625, + "learning_rate": 8.87407784402769e-06, + "loss": 1.0087, + "step": 12770 + }, + { + "epoch": 0.8889198024622661, + "grad_norm": 2.1264467239379883, + "learning_rate": 8.871518321251235e-06, + "loss": 0.9321, + "step": 12780 + }, + { + "epoch": 0.8896153578632539, + "grad_norm": 2.327498435974121, + "learning_rate": 8.868956262545694e-06, + "loss": 0.9813, + "step": 12790 + }, + { + "epoch": 0.8903109132642415, + "grad_norm": 1.6814820766448975, + "learning_rate": 8.866391669589268e-06, + "loss": 0.9538, + "step": 12800 + }, + { + "epoch": 0.8910064686652291, + "grad_norm": 1.7784655094146729, + "learning_rate": 8.86382454406182e-06, + "loss": 0.9462, + "step": 12810 + }, + { + "epoch": 0.8917020240662169, + "grad_norm": 2.3006210327148438, + "learning_rate": 8.861254887644877e-06, + "loss": 1.0294, + "step": 12820 + }, + { + "epoch": 0.8923975794672045, + "grad_norm": 2.74394154548645, + "learning_rate": 8.85868270202162e-06, + "loss": 0.9982, + "step": 12830 + }, + { + "epoch": 0.8930931348681923, + "grad_norm": 3.555579900741577, + "learning_rate": 8.856107988876884e-06, + "loss": 0.9136, + "step": 12840 + }, + { + "epoch": 0.89378869026918, + "grad_norm": 2.412964105606079, + "learning_rate": 8.853530749897163e-06, + "loss": 0.9452, + "step": 12850 + }, + { + "epoch": 0.8944842456701676, + "grad_norm": 2.0192527770996094, + "learning_rate": 8.850950986770607e-06, + "loss": 0.95, + "step": 12860 + }, + { + "epoch": 0.8951798010711554, + "grad_norm": 2.1989331245422363, + "learning_rate": 8.848368701187015e-06, + "loss": 0.9751, + "step": 12870 + }, + { + "epoch": 0.895875356472143, + "grad_norm": 2.443451404571533, + "learning_rate": 8.845783894837843e-06, + "loss": 0.9768, + "step": 12880 + }, + { + "epoch": 0.8965709118731306, + "grad_norm": 4.700780868530273, + "learning_rate": 8.843196569416192e-06, + "loss": 0.9969, + "step": 12890 + }, + { + "epoch": 0.8972664672741184, + "grad_norm": 2.4961307048797607, + "learning_rate": 8.84060672661682e-06, + "loss": 1.0022, + "step": 12900 + }, + { + "epoch": 0.897962022675106, + "grad_norm": 2.4468812942504883, + "learning_rate": 8.83801436813613e-06, + "loss": 0.9858, + "step": 12910 + }, + { + "epoch": 0.8986575780760938, + "grad_norm": 2.2721283435821533, + "learning_rate": 8.83541949567217e-06, + "loss": 1.0144, + "step": 12920 + }, + { + "epoch": 0.8993531334770815, + "grad_norm": 2.386091470718384, + "learning_rate": 8.832822110924644e-06, + "loss": 0.9523, + "step": 12930 + }, + { + "epoch": 0.9000486888780691, + "grad_norm": 3.5671725273132324, + "learning_rate": 8.83022221559489e-06, + "loss": 0.9547, + "step": 12940 + }, + { + "epoch": 0.9007442442790569, + "grad_norm": 2.6562201976776123, + "learning_rate": 8.827619811385901e-06, + "loss": 0.909, + "step": 12950 + }, + { + "epoch": 0.9014397996800445, + "grad_norm": 2.6291422843933105, + "learning_rate": 8.825014900002306e-06, + "loss": 0.9428, + "step": 12960 + }, + { + "epoch": 0.9021353550810322, + "grad_norm": 1.7787702083587646, + "learning_rate": 8.82240748315038e-06, + "loss": 0.924, + "step": 12970 + }, + { + "epoch": 0.9028309104820199, + "grad_norm": 2.30851411819458, + "learning_rate": 8.819797562538035e-06, + "loss": 0.9838, + "step": 12980 + }, + { + "epoch": 0.9035264658830076, + "grad_norm": 3.8084182739257812, + "learning_rate": 8.817185139874828e-06, + "loss": 0.9853, + "step": 12990 + }, + { + "epoch": 0.9042220212839953, + "grad_norm": 2.3703219890594482, + "learning_rate": 8.814570216871958e-06, + "loss": 1.0156, + "step": 13000 + }, + { + "epoch": 0.9042220212839953, + "eval_loss": 0.9730282425880432, + "eval_runtime": 4571.7766, + "eval_samples_per_second": 3.972, + "eval_steps_per_second": 0.662, + "step": 13000 + }, + { + "epoch": 0.904917576684983, + "grad_norm": 2.0556037425994873, + "learning_rate": 8.811952795242248e-06, + "loss": 1.0066, + "step": 13010 + }, + { + "epoch": 0.9056131320859706, + "grad_norm": 2.0021297931671143, + "learning_rate": 8.809332876700173e-06, + "loss": 0.9532, + "step": 13020 + }, + { + "epoch": 0.9063086874869584, + "grad_norm": 2.1832666397094727, + "learning_rate": 8.806710462961831e-06, + "loss": 1.002, + "step": 13030 + }, + { + "epoch": 0.907004242887946, + "grad_norm": 3.0259506702423096, + "learning_rate": 8.804085555744966e-06, + "loss": 0.9913, + "step": 13040 + }, + { + "epoch": 0.9076997982889338, + "grad_norm": 1.360175371170044, + "learning_rate": 8.801458156768945e-06, + "loss": 0.9624, + "step": 13050 + }, + { + "epoch": 0.9083953536899214, + "grad_norm": 1.778806209564209, + "learning_rate": 8.798828267754775e-06, + "loss": 0.9639, + "step": 13060 + }, + { + "epoch": 0.9090909090909091, + "grad_norm": 3.9381191730499268, + "learning_rate": 8.796195890425092e-06, + "loss": 0.9452, + "step": 13070 + }, + { + "epoch": 0.9097864644918968, + "grad_norm": 3.4939966201782227, + "learning_rate": 8.793561026504156e-06, + "loss": 0.9601, + "step": 13080 + }, + { + "epoch": 0.9104820198928845, + "grad_norm": 1.903119683265686, + "learning_rate": 8.790923677717861e-06, + "loss": 0.9073, + "step": 13090 + }, + { + "epoch": 0.9111775752938721, + "grad_norm": 2.798227310180664, + "learning_rate": 8.788283845793733e-06, + "loss": 0.9474, + "step": 13100 + }, + { + "epoch": 0.9118731306948599, + "grad_norm": 3.813565254211426, + "learning_rate": 8.785641532460916e-06, + "loss": 0.9813, + "step": 13110 + }, + { + "epoch": 0.9125686860958475, + "grad_norm": 1.6967185735702515, + "learning_rate": 8.782996739450182e-06, + "loss": 0.9714, + "step": 13120 + }, + { + "epoch": 0.9132642414968353, + "grad_norm": 1.700583577156067, + "learning_rate": 8.78034946849393e-06, + "loss": 0.947, + "step": 13130 + }, + { + "epoch": 0.9139597968978229, + "grad_norm": 2.0121889114379883, + "learning_rate": 8.777699721326181e-06, + "loss": 0.9403, + "step": 13140 + }, + { + "epoch": 0.9146553522988106, + "grad_norm": 2.822720527648926, + "learning_rate": 8.775047499682576e-06, + "loss": 1.0194, + "step": 13150 + }, + { + "epoch": 0.9153509076997983, + "grad_norm": 3.1907718181610107, + "learning_rate": 8.772392805300377e-06, + "loss": 0.9457, + "step": 13160 + }, + { + "epoch": 0.916046463100786, + "grad_norm": 2.5841588973999023, + "learning_rate": 8.769735639918468e-06, + "loss": 1.0187, + "step": 13170 + }, + { + "epoch": 0.9167420185017736, + "grad_norm": 1.799480676651001, + "learning_rate": 8.767076005277351e-06, + "loss": 0.9771, + "step": 13180 + }, + { + "epoch": 0.9174375739027614, + "grad_norm": 2.08625864982605, + "learning_rate": 8.764413903119147e-06, + "loss": 1.031, + "step": 13190 + }, + { + "epoch": 0.918133129303749, + "grad_norm": 1.9750385284423828, + "learning_rate": 8.761749335187583e-06, + "loss": 0.9925, + "step": 13200 + }, + { + "epoch": 0.9188286847047368, + "grad_norm": 2.5532047748565674, + "learning_rate": 8.75908230322802e-06, + "loss": 0.9656, + "step": 13210 + }, + { + "epoch": 0.9195242401057244, + "grad_norm": 5.225860118865967, + "learning_rate": 8.756412808987413e-06, + "loss": 0.9129, + "step": 13220 + }, + { + "epoch": 0.9202197955067121, + "grad_norm": 2.2022864818573, + "learning_rate": 8.753740854214345e-06, + "loss": 0.9599, + "step": 13230 + }, + { + "epoch": 0.9209153509076998, + "grad_norm": 1.4189292192459106, + "learning_rate": 8.751066440659001e-06, + "loss": 0.9611, + "step": 13240 + }, + { + "epoch": 0.9216109063086875, + "grad_norm": 2.5763726234436035, + "learning_rate": 8.748389570073183e-06, + "loss": 1.0084, + "step": 13250 + }, + { + "epoch": 0.9223064617096752, + "grad_norm": 3.897679090499878, + "learning_rate": 8.745710244210299e-06, + "loss": 0.9182, + "step": 13260 + }, + { + "epoch": 0.9230020171106629, + "grad_norm": 2.09405779838562, + "learning_rate": 8.743028464825365e-06, + "loss": 0.941, + "step": 13270 + }, + { + "epoch": 0.9236975725116505, + "grad_norm": 1.8565291166305542, + "learning_rate": 8.740344233675006e-06, + "loss": 0.968, + "step": 13280 + }, + { + "epoch": 0.9243931279126383, + "grad_norm": 1.944518804550171, + "learning_rate": 8.737657552517452e-06, + "loss": 0.9516, + "step": 13290 + }, + { + "epoch": 0.9250886833136259, + "grad_norm": 2.2532875537872314, + "learning_rate": 8.734968423112538e-06, + "loss": 0.9938, + "step": 13300 + }, + { + "epoch": 0.9257842387146136, + "grad_norm": 2.909119129180908, + "learning_rate": 8.7322768472217e-06, + "loss": 0.9471, + "step": 13310 + }, + { + "epoch": 0.9264797941156013, + "grad_norm": 4.586622714996338, + "learning_rate": 8.729582826607984e-06, + "loss": 0.9711, + "step": 13320 + }, + { + "epoch": 0.927175349516589, + "grad_norm": 2.420353651046753, + "learning_rate": 8.726886363036029e-06, + "loss": 0.9459, + "step": 13330 + }, + { + "epoch": 0.9278709049175767, + "grad_norm": 2.205770492553711, + "learning_rate": 8.724187458272075e-06, + "loss": 0.9267, + "step": 13340 + }, + { + "epoch": 0.9285664603185644, + "grad_norm": 2.5123190879821777, + "learning_rate": 8.72148611408397e-06, + "loss": 0.9605, + "step": 13350 + }, + { + "epoch": 0.929262015719552, + "grad_norm": 3.484818935394287, + "learning_rate": 8.71878233224115e-06, + "loss": 0.9143, + "step": 13360 + }, + { + "epoch": 0.9299575711205398, + "grad_norm": 5.252190589904785, + "learning_rate": 8.716076114514649e-06, + "loss": 1.0003, + "step": 13370 + }, + { + "epoch": 0.9306531265215274, + "grad_norm": 2.173283100128174, + "learning_rate": 8.713367462677102e-06, + "loss": 0.9147, + "step": 13380 + }, + { + "epoch": 0.9313486819225151, + "grad_norm": 1.7559391260147095, + "learning_rate": 8.710656378502735e-06, + "loss": 0.9605, + "step": 13390 + }, + { + "epoch": 0.9320442373235028, + "grad_norm": 1.5544240474700928, + "learning_rate": 8.707942863767367e-06, + "loss": 1.0106, + "step": 13400 + }, + { + "epoch": 0.9327397927244905, + "grad_norm": 3.0416088104248047, + "learning_rate": 8.705226920248409e-06, + "loss": 0.9732, + "step": 13410 + }, + { + "epoch": 0.9334353481254782, + "grad_norm": 4.1010894775390625, + "learning_rate": 8.702508549724863e-06, + "loss": 0.986, + "step": 13420 + }, + { + "epoch": 0.9341309035264659, + "grad_norm": 3.4233736991882324, + "learning_rate": 8.699787753977319e-06, + "loss": 0.9707, + "step": 13430 + }, + { + "epoch": 0.9348264589274535, + "grad_norm": 2.1837637424468994, + "learning_rate": 8.697064534787963e-06, + "loss": 0.9209, + "step": 13440 + }, + { + "epoch": 0.9355220143284413, + "grad_norm": 1.4565198421478271, + "learning_rate": 8.69433889394056e-06, + "loss": 0.9858, + "step": 13450 + }, + { + "epoch": 0.9362175697294289, + "grad_norm": 3.4957103729248047, + "learning_rate": 8.691610833220463e-06, + "loss": 0.9604, + "step": 13460 + }, + { + "epoch": 0.9369131251304167, + "grad_norm": 1.8053743839263916, + "learning_rate": 8.688880354414612e-06, + "loss": 1.005, + "step": 13470 + }, + { + "epoch": 0.9376086805314043, + "grad_norm": 3.2275493144989014, + "learning_rate": 8.686147459311534e-06, + "loss": 1.0124, + "step": 13480 + }, + { + "epoch": 0.938304235932392, + "grad_norm": 1.7845708131790161, + "learning_rate": 8.68341214970133e-06, + "loss": 0.9929, + "step": 13490 + }, + { + "epoch": 0.9389997913333797, + "grad_norm": 3.232656240463257, + "learning_rate": 8.68067442737569e-06, + "loss": 0.994, + "step": 13500 + }, + { + "epoch": 0.9389997913333797, + "eval_loss": 0.9677397608757019, + "eval_runtime": 4592.9427, + "eval_samples_per_second": 3.954, + "eval_steps_per_second": 0.659, + "step": 13500 + }, + { + "epoch": 0.9396953467343674, + "grad_norm": 2.372960329055786, + "learning_rate": 8.677934294127883e-06, + "loss": 0.9564, + "step": 13510 + }, + { + "epoch": 0.940390902135355, + "grad_norm": 2.175845146179199, + "learning_rate": 8.675191751752752e-06, + "loss": 0.9552, + "step": 13520 + }, + { + "epoch": 0.9410864575363428, + "grad_norm": 1.5812195539474487, + "learning_rate": 8.672446802046729e-06, + "loss": 1.0089, + "step": 13530 + }, + { + "epoch": 0.9417820129373304, + "grad_norm": 1.8181036710739136, + "learning_rate": 8.66969944680781e-06, + "loss": 0.9021, + "step": 13540 + }, + { + "epoch": 0.9424775683383182, + "grad_norm": 2.4824655055999756, + "learning_rate": 8.666949687835574e-06, + "loss": 0.9935, + "step": 13550 + }, + { + "epoch": 0.9431731237393058, + "grad_norm": 2.1550769805908203, + "learning_rate": 8.664197526931173e-06, + "loss": 0.9667, + "step": 13560 + }, + { + "epoch": 0.9438686791402935, + "grad_norm": 3.344862937927246, + "learning_rate": 8.661442965897335e-06, + "loss": 0.9613, + "step": 13570 + }, + { + "epoch": 0.9445642345412812, + "grad_norm": 2.042036771774292, + "learning_rate": 8.658686006538356e-06, + "loss": 1.0134, + "step": 13580 + }, + { + "epoch": 0.9452597899422689, + "grad_norm": 5.747425079345703, + "learning_rate": 8.655926650660104e-06, + "loss": 0.9529, + "step": 13590 + }, + { + "epoch": 0.9459553453432565, + "grad_norm": 1.9617727994918823, + "learning_rate": 8.653164900070019e-06, + "loss": 0.9499, + "step": 13600 + }, + { + "epoch": 0.9466509007442443, + "grad_norm": 1.7576738595962524, + "learning_rate": 8.650400756577107e-06, + "loss": 0.9541, + "step": 13610 + }, + { + "epoch": 0.9473464561452319, + "grad_norm": 1.453457236289978, + "learning_rate": 8.647634221991941e-06, + "loss": 0.9305, + "step": 13620 + }, + { + "epoch": 0.9480420115462197, + "grad_norm": 2.4770960807800293, + "learning_rate": 8.644865298126663e-06, + "loss": 0.9075, + "step": 13630 + }, + { + "epoch": 0.9487375669472073, + "grad_norm": 1.8332018852233887, + "learning_rate": 8.642093986794982e-06, + "loss": 0.9999, + "step": 13640 + }, + { + "epoch": 0.949433122348195, + "grad_norm": 1.9793848991394043, + "learning_rate": 8.63932028981216e-06, + "loss": 0.9731, + "step": 13650 + }, + { + "epoch": 0.9501286777491827, + "grad_norm": 3.6468584537506104, + "learning_rate": 8.636544208995036e-06, + "loss": 0.9433, + "step": 13660 + }, + { + "epoch": 0.9508242331501704, + "grad_norm": 2.267899990081787, + "learning_rate": 8.633765746161999e-06, + "loss": 1.0281, + "step": 13670 + }, + { + "epoch": 0.9515197885511582, + "grad_norm": 2.956336259841919, + "learning_rate": 8.630984903133005e-06, + "loss": 1.0245, + "step": 13680 + }, + { + "epoch": 0.9522153439521458, + "grad_norm": 1.7231134176254272, + "learning_rate": 8.62820168172957e-06, + "loss": 0.9134, + "step": 13690 + }, + { + "epoch": 0.9529108993531334, + "grad_norm": 2.645479917526245, + "learning_rate": 8.625416083774758e-06, + "loss": 0.919, + "step": 13700 + }, + { + "epoch": 0.9536064547541212, + "grad_norm": 2.1462292671203613, + "learning_rate": 8.6226281110932e-06, + "loss": 0.9516, + "step": 13710 + }, + { + "epoch": 0.9543020101551088, + "grad_norm": 4.928686618804932, + "learning_rate": 8.61983776551108e-06, + "loss": 1.0379, + "step": 13720 + }, + { + "epoch": 0.9549975655560965, + "grad_norm": 1.561934471130371, + "learning_rate": 8.617045048856134e-06, + "loss": 0.9737, + "step": 13730 + }, + { + "epoch": 0.9556931209570843, + "grad_norm": 3.4714345932006836, + "learning_rate": 8.614249962957653e-06, + "loss": 1.0468, + "step": 13740 + }, + { + "epoch": 0.9563886763580719, + "grad_norm": 2.7518184185028076, + "learning_rate": 8.61145250964648e-06, + "loss": 0.9479, + "step": 13750 + }, + { + "epoch": 0.9570842317590597, + "grad_norm": 1.7943062782287598, + "learning_rate": 8.608652690755006e-06, + "loss": 0.9488, + "step": 13760 + }, + { + "epoch": 0.9577797871600473, + "grad_norm": 2.9543495178222656, + "learning_rate": 8.605850508117176e-06, + "loss": 0.9801, + "step": 13770 + }, + { + "epoch": 0.958475342561035, + "grad_norm": 1.5785011053085327, + "learning_rate": 8.603045963568477e-06, + "loss": 1.0198, + "step": 13780 + }, + { + "epoch": 0.9591708979620227, + "grad_norm": 3.4022629261016846, + "learning_rate": 8.600239058945952e-06, + "loss": 0.934, + "step": 13790 + }, + { + "epoch": 0.9598664533630104, + "grad_norm": 3.18458890914917, + "learning_rate": 8.597429796088182e-06, + "loss": 0.9188, + "step": 13800 + }, + { + "epoch": 0.960562008763998, + "grad_norm": 3.360553741455078, + "learning_rate": 8.594618176835294e-06, + "loss": 0.9158, + "step": 13810 + }, + { + "epoch": 0.9612575641649858, + "grad_norm": 2.93894362449646, + "learning_rate": 8.591804203028963e-06, + "loss": 0.9886, + "step": 13820 + }, + { + "epoch": 0.9619531195659734, + "grad_norm": 1.8637430667877197, + "learning_rate": 8.588987876512402e-06, + "loss": 0.9832, + "step": 13830 + }, + { + "epoch": 0.9626486749669612, + "grad_norm": 3.98600172996521, + "learning_rate": 8.586169199130368e-06, + "loss": 0.9954, + "step": 13840 + }, + { + "epoch": 0.9633442303679488, + "grad_norm": 2.066798686981201, + "learning_rate": 8.583348172729153e-06, + "loss": 0.9606, + "step": 13850 + }, + { + "epoch": 0.9640397857689365, + "grad_norm": 1.8230609893798828, + "learning_rate": 8.580524799156596e-06, + "loss": 0.9611, + "step": 13860 + }, + { + "epoch": 0.9647353411699242, + "grad_norm": 3.3776772022247314, + "learning_rate": 8.577699080262062e-06, + "loss": 0.997, + "step": 13870 + }, + { + "epoch": 0.9654308965709119, + "grad_norm": 1.45298433303833, + "learning_rate": 8.574871017896463e-06, + "loss": 0.9424, + "step": 13880 + }, + { + "epoch": 0.9661264519718996, + "grad_norm": 1.8044461011886597, + "learning_rate": 8.572040613912241e-06, + "loss": 0.9516, + "step": 13890 + }, + { + "epoch": 0.9668220073728873, + "grad_norm": 2.7098989486694336, + "learning_rate": 8.569207870163372e-06, + "loss": 0.9866, + "step": 13900 + }, + { + "epoch": 0.9675175627738749, + "grad_norm": 2.5459578037261963, + "learning_rate": 8.566372788505364e-06, + "loss": 1.008, + "step": 13910 + }, + { + "epoch": 0.9682131181748627, + "grad_norm": 1.7982382774353027, + "learning_rate": 8.56353537079526e-06, + "loss": 1.001, + "step": 13920 + }, + { + "epoch": 0.9689086735758503, + "grad_norm": 3.0038814544677734, + "learning_rate": 8.560695618891627e-06, + "loss": 1.0117, + "step": 13930 + }, + { + "epoch": 0.969604228976838, + "grad_norm": 3.195246458053589, + "learning_rate": 8.557853534654568e-06, + "loss": 0.9974, + "step": 13940 + }, + { + "epoch": 0.9702997843778257, + "grad_norm": 2.2115116119384766, + "learning_rate": 8.555009119945708e-06, + "loss": 0.9897, + "step": 13950 + }, + { + "epoch": 0.9709953397788134, + "grad_norm": 1.9018934965133667, + "learning_rate": 8.552162376628203e-06, + "loss": 0.935, + "step": 13960 + }, + { + "epoch": 0.9716908951798011, + "grad_norm": 3.7653276920318604, + "learning_rate": 8.54931330656673e-06, + "loss": 0.9685, + "step": 13970 + }, + { + "epoch": 0.9723864505807888, + "grad_norm": 5.348063945770264, + "learning_rate": 8.54646191162749e-06, + "loss": 0.9332, + "step": 13980 + }, + { + "epoch": 0.9730820059817764, + "grad_norm": 2.070491313934326, + "learning_rate": 8.543608193678216e-06, + "loss": 0.9057, + "step": 13990 + }, + { + "epoch": 0.9737775613827642, + "grad_norm": 1.6753884553909302, + "learning_rate": 8.54075215458815e-06, + "loss": 0.877, + "step": 14000 + }, + { + "epoch": 0.9737775613827642, + "eval_loss": 0.9670655131340027, + "eval_runtime": 4593.2426, + "eval_samples_per_second": 3.954, + "eval_steps_per_second": 0.659, + "step": 14000 + }, + { + "epoch": 0.9744731167837518, + "grad_norm": 1.7773000001907349, + "learning_rate": 8.537893796228061e-06, + "loss": 0.956, + "step": 14010 + }, + { + "epoch": 0.9751686721847395, + "grad_norm": 2.8914554119110107, + "learning_rate": 8.535033120470237e-06, + "loss": 0.959, + "step": 14020 + }, + { + "epoch": 0.9758642275857272, + "grad_norm": 4.496715068817139, + "learning_rate": 8.532170129188482e-06, + "loss": 0.9426, + "step": 14030 + }, + { + "epoch": 0.9765597829867149, + "grad_norm": 1.9260976314544678, + "learning_rate": 8.52930482425812e-06, + "loss": 0.9582, + "step": 14040 + }, + { + "epoch": 0.9772553383877026, + "grad_norm": 4.6378350257873535, + "learning_rate": 8.526437207555986e-06, + "loss": 0.9809, + "step": 14050 + }, + { + "epoch": 0.9779508937886903, + "grad_norm": 1.7122628688812256, + "learning_rate": 8.523567280960433e-06, + "loss": 0.9341, + "step": 14060 + }, + { + "epoch": 0.9786464491896779, + "grad_norm": 7.955687999725342, + "learning_rate": 8.520695046351324e-06, + "loss": 0.9302, + "step": 14070 + }, + { + "epoch": 0.9793420045906657, + "grad_norm": 3.14459490776062, + "learning_rate": 8.517820505610038e-06, + "loss": 1.0111, + "step": 14080 + }, + { + "epoch": 0.9800375599916533, + "grad_norm": 2.054530382156372, + "learning_rate": 8.514943660619459e-06, + "loss": 0.9941, + "step": 14090 + }, + { + "epoch": 0.9807331153926411, + "grad_norm": 1.4441908597946167, + "learning_rate": 8.512064513263986e-06, + "loss": 0.9739, + "step": 14100 + }, + { + "epoch": 0.9814286707936287, + "grad_norm": 2.1355228424072266, + "learning_rate": 8.509183065429522e-06, + "loss": 0.9482, + "step": 14110 + }, + { + "epoch": 0.9821242261946164, + "grad_norm": 1.8478132486343384, + "learning_rate": 8.50629931900348e-06, + "loss": 0.9937, + "step": 14120 + }, + { + "epoch": 0.9828197815956041, + "grad_norm": 2.2707157135009766, + "learning_rate": 8.503413275874773e-06, + "loss": 1.0074, + "step": 14130 + }, + { + "epoch": 0.9835153369965918, + "grad_norm": 2.216323137283325, + "learning_rate": 8.500524937933826e-06, + "loss": 0.9262, + "step": 14140 + }, + { + "epoch": 0.9842108923975794, + "grad_norm": 2.1001882553100586, + "learning_rate": 8.497634307072562e-06, + "loss": 0.934, + "step": 14150 + }, + { + "epoch": 0.9849064477985672, + "grad_norm": 1.9876182079315186, + "learning_rate": 8.494741385184408e-06, + "loss": 0.9506, + "step": 14160 + }, + { + "epoch": 0.9856020031995548, + "grad_norm": 2.1037468910217285, + "learning_rate": 8.49184617416429e-06, + "loss": 0.9562, + "step": 14170 + }, + { + "epoch": 0.9862975586005426, + "grad_norm": 2.8352832794189453, + "learning_rate": 8.488948675908637e-06, + "loss": 0.9949, + "step": 14180 + }, + { + "epoch": 0.9869931140015302, + "grad_norm": 4.05332612991333, + "learning_rate": 8.486048892315369e-06, + "loss": 0.8504, + "step": 14190 + }, + { + "epoch": 0.9876886694025179, + "grad_norm": 2.2888882160186768, + "learning_rate": 8.483146825283912e-06, + "loss": 1.021, + "step": 14200 + }, + { + "epoch": 0.9883842248035056, + "grad_norm": 2.5956735610961914, + "learning_rate": 8.480242476715181e-06, + "loss": 0.9301, + "step": 14210 + }, + { + "epoch": 0.9890797802044933, + "grad_norm": 1.8318628072738647, + "learning_rate": 8.477335848511589e-06, + "loss": 0.9734, + "step": 14220 + }, + { + "epoch": 0.9897753356054809, + "grad_norm": 1.4738233089447021, + "learning_rate": 8.474426942577041e-06, + "loss": 0.8943, + "step": 14230 + }, + { + "epoch": 0.9904708910064687, + "grad_norm": 1.5479012727737427, + "learning_rate": 8.471515760816932e-06, + "loss": 0.9218, + "step": 14240 + }, + { + "epoch": 0.9911664464074563, + "grad_norm": 1.5006170272827148, + "learning_rate": 8.468602305138154e-06, + "loss": 0.97, + "step": 14250 + }, + { + "epoch": 0.9918620018084441, + "grad_norm": 1.569976806640625, + "learning_rate": 8.46568657744908e-06, + "loss": 0.9998, + "step": 14260 + }, + { + "epoch": 0.9925575572094317, + "grad_norm": 4.0301899909973145, + "learning_rate": 8.462768579659575e-06, + "loss": 0.978, + "step": 14270 + }, + { + "epoch": 0.9932531126104194, + "grad_norm": 3.188709020614624, + "learning_rate": 8.459848313680994e-06, + "loss": 0.9702, + "step": 14280 + }, + { + "epoch": 0.9939486680114071, + "grad_norm": 2.110725164413452, + "learning_rate": 8.456925781426173e-06, + "loss": 0.9713, + "step": 14290 + }, + { + "epoch": 0.9946442234123948, + "grad_norm": 1.8988115787506104, + "learning_rate": 8.454000984809437e-06, + "loss": 0.9412, + "step": 14300 + }, + { + "epoch": 0.9953397788133825, + "grad_norm": 2.4722671508789062, + "learning_rate": 8.451073925746586e-06, + "loss": 0.9689, + "step": 14310 + }, + { + "epoch": 0.9960353342143702, + "grad_norm": 5.469512462615967, + "learning_rate": 8.448144606154917e-06, + "loss": 0.9461, + "step": 14320 + }, + { + "epoch": 0.9967308896153578, + "grad_norm": 3.445692777633667, + "learning_rate": 8.445213027953189e-06, + "loss": 0.9242, + "step": 14330 + }, + { + "epoch": 0.9974264450163456, + "grad_norm": 2.4500815868377686, + "learning_rate": 8.442279193061656e-06, + "loss": 0.9117, + "step": 14340 + }, + { + "epoch": 0.9981220004173332, + "grad_norm": 1.793296456336975, + "learning_rate": 8.439343103402042e-06, + "loss": 1.0341, + "step": 14350 + }, + { + "epoch": 0.9988175558183209, + "grad_norm": 5.493537902832031, + "learning_rate": 8.436404760897549e-06, + "loss": 0.954, + "step": 14360 + }, + { + "epoch": 0.9995131112193086, + "grad_norm": 1.6765305995941162, + "learning_rate": 8.433464167472855e-06, + "loss": 1.0192, + "step": 14370 + }, + { + "epoch": 1.0002086666202963, + "grad_norm": 1.6733906269073486, + "learning_rate": 8.430521325054115e-06, + "loss": 0.8827, + "step": 14380 + }, + { + "epoch": 1.000904222021284, + "grad_norm": 1.766305685043335, + "learning_rate": 8.427576235568954e-06, + "loss": 0.7956, + "step": 14390 + }, + { + "epoch": 1.0015997774222716, + "grad_norm": 2.3937249183654785, + "learning_rate": 8.424628900946473e-06, + "loss": 0.712, + "step": 14400 + }, + { + "epoch": 1.0022953328232593, + "grad_norm": 1.6619166135787964, + "learning_rate": 8.421679323117233e-06, + "loss": 0.7496, + "step": 14410 + }, + { + "epoch": 1.002990888224247, + "grad_norm": 1.2816236019134521, + "learning_rate": 8.418727504013279e-06, + "loss": 0.7499, + "step": 14420 + }, + { + "epoch": 1.0036864436252348, + "grad_norm": 2.0806503295898438, + "learning_rate": 8.415773445568117e-06, + "loss": 0.7613, + "step": 14430 + }, + { + "epoch": 1.0043819990262224, + "grad_norm": 1.6364102363586426, + "learning_rate": 8.412817149716714e-06, + "loss": 0.722, + "step": 14440 + }, + { + "epoch": 1.0050775544272101, + "grad_norm": 2.270911931991577, + "learning_rate": 8.409858618395513e-06, + "loss": 0.7859, + "step": 14450 + }, + { + "epoch": 1.005773109828198, + "grad_norm": 2.0408642292022705, + "learning_rate": 8.406897853542415e-06, + "loss": 0.7493, + "step": 14460 + }, + { + "epoch": 1.0064686652291854, + "grad_norm": 1.5551255941390991, + "learning_rate": 8.403934857096787e-06, + "loss": 0.7797, + "step": 14470 + }, + { + "epoch": 1.0071642206301732, + "grad_norm": 1.8320207595825195, + "learning_rate": 8.400969630999454e-06, + "loss": 0.7319, + "step": 14480 + }, + { + "epoch": 1.007859776031161, + "grad_norm": 2.8866078853607178, + "learning_rate": 8.398002177192706e-06, + "loss": 0.7709, + "step": 14490 + }, + { + "epoch": 1.0085553314321485, + "grad_norm": 2.2940316200256348, + "learning_rate": 8.395032497620292e-06, + "loss": 0.7144, + "step": 14500 + }, + { + "epoch": 1.0085553314321485, + "eval_loss": 0.983755886554718, + "eval_runtime": 4602.7675, + "eval_samples_per_second": 3.946, + "eval_steps_per_second": 0.658, + "step": 14500 + }, + { + "epoch": 1.0092508868331362, + "grad_norm": 2.0964720249176025, + "learning_rate": 8.392060594227412e-06, + "loss": 0.7773, + "step": 14510 + }, + { + "epoch": 1.009946442234124, + "grad_norm": 1.7267087697982788, + "learning_rate": 8.38908646896073e-06, + "loss": 0.7663, + "step": 14520 + }, + { + "epoch": 1.0106419976351115, + "grad_norm": 1.6698150634765625, + "learning_rate": 8.386110123768364e-06, + "loss": 0.8064, + "step": 14530 + }, + { + "epoch": 1.0113375530360993, + "grad_norm": 3.281358003616333, + "learning_rate": 8.383131560599887e-06, + "loss": 0.7576, + "step": 14540 + }, + { + "epoch": 1.012033108437087, + "grad_norm": 1.7519060373306274, + "learning_rate": 8.380150781406317e-06, + "loss": 0.7221, + "step": 14550 + }, + { + "epoch": 1.0127286638380748, + "grad_norm": 1.4570820331573486, + "learning_rate": 8.377167788140132e-06, + "loss": 0.7743, + "step": 14560 + }, + { + "epoch": 1.0134242192390623, + "grad_norm": 1.7678040266036987, + "learning_rate": 8.374182582755262e-06, + "loss": 0.7832, + "step": 14570 + }, + { + "epoch": 1.01411977464005, + "grad_norm": 3.228226661682129, + "learning_rate": 8.371195167207075e-06, + "loss": 0.7238, + "step": 14580 + }, + { + "epoch": 1.0148153300410379, + "grad_norm": 3.163618326187134, + "learning_rate": 8.3682055434524e-06, + "loss": 0.7221, + "step": 14590 + }, + { + "epoch": 1.0155108854420254, + "grad_norm": 1.7556897401809692, + "learning_rate": 8.3652137134495e-06, + "loss": 0.7228, + "step": 14600 + }, + { + "epoch": 1.0162064408430131, + "grad_norm": 1.9520761966705322, + "learning_rate": 8.362219679158093e-06, + "loss": 0.7205, + "step": 14610 + }, + { + "epoch": 1.016901996244001, + "grad_norm": 2.752692222595215, + "learning_rate": 8.359223442539335e-06, + "loss": 0.7295, + "step": 14620 + }, + { + "epoch": 1.0175975516449884, + "grad_norm": 3.6744093894958496, + "learning_rate": 8.356225005555828e-06, + "loss": 0.7146, + "step": 14630 + }, + { + "epoch": 1.0182931070459762, + "grad_norm": 2.666919708251953, + "learning_rate": 8.353224370171611e-06, + "loss": 0.7472, + "step": 14640 + }, + { + "epoch": 1.018988662446964, + "grad_norm": 1.9705307483673096, + "learning_rate": 8.35022153835217e-06, + "loss": 0.7639, + "step": 14650 + }, + { + "epoch": 1.0196842178479515, + "grad_norm": 2.6209969520568848, + "learning_rate": 8.347216512064421e-06, + "loss": 0.7553, + "step": 14660 + }, + { + "epoch": 1.0203797732489392, + "grad_norm": 4.5554094314575195, + "learning_rate": 8.344209293276723e-06, + "loss": 0.7921, + "step": 14670 + }, + { + "epoch": 1.021075328649927, + "grad_norm": 2.3071000576019287, + "learning_rate": 8.341199883958874e-06, + "loss": 0.7795, + "step": 14680 + }, + { + "epoch": 1.0217708840509148, + "grad_norm": 2.6672236919403076, + "learning_rate": 8.338188286082098e-06, + "loss": 0.7336, + "step": 14690 + }, + { + "epoch": 1.0224664394519023, + "grad_norm": 4.109497547149658, + "learning_rate": 8.335174501619059e-06, + "loss": 0.758, + "step": 14700 + }, + { + "epoch": 1.02316199485289, + "grad_norm": 2.045199394226074, + "learning_rate": 8.33215853254385e-06, + "loss": 0.7877, + "step": 14710 + }, + { + "epoch": 1.0238575502538778, + "grad_norm": 1.9087852239608765, + "learning_rate": 8.329140380832002e-06, + "loss": 0.7735, + "step": 14720 + }, + { + "epoch": 1.0245531056548653, + "grad_norm": 1.6183117628097534, + "learning_rate": 8.326120048460464e-06, + "loss": 0.6892, + "step": 14730 + }, + { + "epoch": 1.025248661055853, + "grad_norm": 2.8262500762939453, + "learning_rate": 8.323097537407623e-06, + "loss": 0.7723, + "step": 14740 + }, + { + "epoch": 1.0259442164568409, + "grad_norm": 2.4466335773468018, + "learning_rate": 8.32007284965329e-06, + "loss": 0.7525, + "step": 14750 + }, + { + "epoch": 1.0266397718578284, + "grad_norm": 2.335531234741211, + "learning_rate": 8.3170459871787e-06, + "loss": 0.7526, + "step": 14760 + }, + { + "epoch": 1.0273353272588162, + "grad_norm": 3.604314088821411, + "learning_rate": 8.314016951966515e-06, + "loss": 0.6879, + "step": 14770 + }, + { + "epoch": 1.028030882659804, + "grad_norm": 1.586673617362976, + "learning_rate": 8.310985746000819e-06, + "loss": 0.7581, + "step": 14780 + }, + { + "epoch": 1.0287264380607914, + "grad_norm": 1.6671473979949951, + "learning_rate": 8.307952371267119e-06, + "loss": 0.7555, + "step": 14790 + }, + { + "epoch": 1.0294219934617792, + "grad_norm": 1.7498714923858643, + "learning_rate": 8.304916829752338e-06, + "loss": 0.6922, + "step": 14800 + }, + { + "epoch": 1.030117548862767, + "grad_norm": 3.4646215438842773, + "learning_rate": 8.301879123444827e-06, + "loss": 0.6923, + "step": 14810 + }, + { + "epoch": 1.0308131042637547, + "grad_norm": 1.6995065212249756, + "learning_rate": 8.298839254334349e-06, + "loss": 0.7389, + "step": 14820 + }, + { + "epoch": 1.0315086596647423, + "grad_norm": 1.4525501728057861, + "learning_rate": 8.295797224412082e-06, + "loss": 0.7972, + "step": 14830 + }, + { + "epoch": 1.03220421506573, + "grad_norm": 1.4350645542144775, + "learning_rate": 8.292753035670624e-06, + "loss": 0.7782, + "step": 14840 + }, + { + "epoch": 1.0328997704667178, + "grad_norm": 2.1404457092285156, + "learning_rate": 8.289706690103986e-06, + "loss": 0.7412, + "step": 14850 + }, + { + "epoch": 1.0335953258677053, + "grad_norm": 1.5597481727600098, + "learning_rate": 8.286658189707589e-06, + "loss": 0.7235, + "step": 14860 + }, + { + "epoch": 1.034290881268693, + "grad_norm": 2.1592156887054443, + "learning_rate": 8.283607536478267e-06, + "loss": 0.759, + "step": 14870 + }, + { + "epoch": 1.0349864366696808, + "grad_norm": 2.3763725757598877, + "learning_rate": 8.280554732414266e-06, + "loss": 0.7707, + "step": 14880 + }, + { + "epoch": 1.0356819920706684, + "grad_norm": 1.660506010055542, + "learning_rate": 8.27749977951524e-06, + "loss": 0.7236, + "step": 14890 + }, + { + "epoch": 1.0363775474716561, + "grad_norm": 1.6247450113296509, + "learning_rate": 8.274442679782248e-06, + "loss": 0.7119, + "step": 14900 + }, + { + "epoch": 1.0370731028726439, + "grad_norm": 2.1720407009124756, + "learning_rate": 8.27138343521776e-06, + "loss": 0.7231, + "step": 14910 + }, + { + "epoch": 1.0377686582736314, + "grad_norm": 1.8026961088180542, + "learning_rate": 8.268322047825644e-06, + "loss": 0.7536, + "step": 14920 + }, + { + "epoch": 1.0384642136746192, + "grad_norm": 2.8577497005462646, + "learning_rate": 8.265258519611176e-06, + "loss": 0.7588, + "step": 14930 + }, + { + "epoch": 1.039159769075607, + "grad_norm": 2.0775489807128906, + "learning_rate": 8.262192852581039e-06, + "loss": 0.7261, + "step": 14940 + }, + { + "epoch": 1.0398553244765945, + "grad_norm": 1.279734492301941, + "learning_rate": 8.259125048743306e-06, + "loss": 0.7424, + "step": 14950 + }, + { + "epoch": 1.0405508798775822, + "grad_norm": 1.4050887823104858, + "learning_rate": 8.25605511010746e-06, + "loss": 0.7444, + "step": 14960 + }, + { + "epoch": 1.04124643527857, + "grad_norm": 2.297724723815918, + "learning_rate": 8.252983038684375e-06, + "loss": 0.7486, + "step": 14970 + }, + { + "epoch": 1.0419419906795577, + "grad_norm": 3.132356882095337, + "learning_rate": 8.249908836486324e-06, + "loss": 0.8293, + "step": 14980 + }, + { + "epoch": 1.0426375460805453, + "grad_norm": 1.5922280550003052, + "learning_rate": 8.24683250552698e-06, + "loss": 0.7556, + "step": 14990 + }, + { + "epoch": 1.043333101481533, + "grad_norm": 2.3563168048858643, + "learning_rate": 8.243754047821406e-06, + "loss": 0.7834, + "step": 15000 + }, + { + "epoch": 1.043333101481533, + "eval_loss": 0.980137825012207, + "eval_runtime": 4586.4055, + "eval_samples_per_second": 3.96, + "eval_steps_per_second": 0.66, + "step": 15000 + }, + { + "epoch": 1.0440286568825208, + "grad_norm": 1.5939233303070068, + "learning_rate": 8.240673465386058e-06, + "loss": 0.7151, + "step": 15010 + }, + { + "epoch": 1.0447242122835083, + "grad_norm": 2.6707277297973633, + "learning_rate": 8.237590760238784e-06, + "loss": 0.7212, + "step": 15020 + }, + { + "epoch": 1.045419767684496, + "grad_norm": 1.6728860139846802, + "learning_rate": 8.234505934398827e-06, + "loss": 0.6825, + "step": 15030 + }, + { + "epoch": 1.0461153230854838, + "grad_norm": 2.038123846054077, + "learning_rate": 8.23141898988681e-06, + "loss": 0.8119, + "step": 15040 + }, + { + "epoch": 1.0468108784864714, + "grad_norm": 1.976813554763794, + "learning_rate": 8.228329928724753e-06, + "loss": 0.7353, + "step": 15050 + }, + { + "epoch": 1.0475064338874591, + "grad_norm": 2.056896686553955, + "learning_rate": 8.225238752936058e-06, + "loss": 0.7339, + "step": 15060 + }, + { + "epoch": 1.0482019892884469, + "grad_norm": 1.8560460805892944, + "learning_rate": 8.222145464545511e-06, + "loss": 0.7617, + "step": 15070 + }, + { + "epoch": 1.0488975446894344, + "grad_norm": 2.125256299972534, + "learning_rate": 8.219050065579285e-06, + "loss": 0.7593, + "step": 15080 + }, + { + "epoch": 1.0495931000904222, + "grad_norm": 2.655061960220337, + "learning_rate": 8.215952558064934e-06, + "loss": 0.7604, + "step": 15090 + }, + { + "epoch": 1.05028865549141, + "grad_norm": 1.9248294830322266, + "learning_rate": 8.212852944031394e-06, + "loss": 0.7422, + "step": 15100 + }, + { + "epoch": 1.0509842108923977, + "grad_norm": 1.5344346761703491, + "learning_rate": 8.209751225508975e-06, + "loss": 0.7319, + "step": 15110 + }, + { + "epoch": 1.0516797662933852, + "grad_norm": 1.6226840019226074, + "learning_rate": 8.206647404529375e-06, + "loss": 0.8226, + "step": 15120 + }, + { + "epoch": 1.052375321694373, + "grad_norm": 1.6069071292877197, + "learning_rate": 8.203541483125666e-06, + "loss": 0.7096, + "step": 15130 + }, + { + "epoch": 1.0530708770953607, + "grad_norm": 1.8037627935409546, + "learning_rate": 8.20043346333229e-06, + "loss": 0.7491, + "step": 15140 + }, + { + "epoch": 1.0537664324963483, + "grad_norm": 3.6735610961914062, + "learning_rate": 8.19732334718507e-06, + "loss": 0.7687, + "step": 15150 + }, + { + "epoch": 1.054461987897336, + "grad_norm": 2.2948520183563232, + "learning_rate": 8.1942111367212e-06, + "loss": 0.7372, + "step": 15160 + }, + { + "epoch": 1.0551575432983238, + "grad_norm": 1.772353172302246, + "learning_rate": 8.191096833979246e-06, + "loss": 0.7778, + "step": 15170 + }, + { + "epoch": 1.0558530986993113, + "grad_norm": 1.8623069524765015, + "learning_rate": 8.187980440999144e-06, + "loss": 0.7377, + "step": 15180 + }, + { + "epoch": 1.056548654100299, + "grad_norm": 1.788987636566162, + "learning_rate": 8.1848619598222e-06, + "loss": 0.7394, + "step": 15190 + }, + { + "epoch": 1.0572442095012868, + "grad_norm": 1.391777515411377, + "learning_rate": 8.181741392491084e-06, + "loss": 0.7261, + "step": 15200 + }, + { + "epoch": 1.0579397649022744, + "grad_norm": 1.9223103523254395, + "learning_rate": 8.178618741049841e-06, + "loss": 0.6973, + "step": 15210 + }, + { + "epoch": 1.0586353203032621, + "grad_norm": 1.278745174407959, + "learning_rate": 8.175494007543872e-06, + "loss": 0.7937, + "step": 15220 + }, + { + "epoch": 1.0593308757042499, + "grad_norm": 3.466860294342041, + "learning_rate": 8.172367194019949e-06, + "loss": 0.7596, + "step": 15230 + }, + { + "epoch": 1.0600264311052374, + "grad_norm": 2.1827003955841064, + "learning_rate": 8.169238302526201e-06, + "loss": 0.6918, + "step": 15240 + }, + { + "epoch": 1.0607219865062252, + "grad_norm": 2.2348504066467285, + "learning_rate": 8.16610733511212e-06, + "loss": 0.7691, + "step": 15250 + }, + { + "epoch": 1.061417541907213, + "grad_norm": 1.754944086074829, + "learning_rate": 8.162974293828559e-06, + "loss": 0.6976, + "step": 15260 + }, + { + "epoch": 1.0621130973082007, + "grad_norm": 1.8999314308166504, + "learning_rate": 8.159839180727726e-06, + "loss": 0.7237, + "step": 15270 + }, + { + "epoch": 1.0628086527091882, + "grad_norm": 1.6591819524765015, + "learning_rate": 8.156701997863195e-06, + "loss": 0.7329, + "step": 15280 + }, + { + "epoch": 1.063504208110176, + "grad_norm": 4.215458869934082, + "learning_rate": 8.153562747289883e-06, + "loss": 0.7211, + "step": 15290 + }, + { + "epoch": 1.0641997635111637, + "grad_norm": 2.122084379196167, + "learning_rate": 8.150421431064069e-06, + "loss": 0.7512, + "step": 15300 + }, + { + "epoch": 1.0648953189121513, + "grad_norm": 1.8679026365280151, + "learning_rate": 8.147278051243386e-06, + "loss": 0.6669, + "step": 15310 + }, + { + "epoch": 1.065590874313139, + "grad_norm": 3.0175089836120605, + "learning_rate": 8.144132609886815e-06, + "loss": 0.7204, + "step": 15320 + }, + { + "epoch": 1.0662864297141268, + "grad_norm": 2.3799450397491455, + "learning_rate": 8.140985109054688e-06, + "loss": 0.7142, + "step": 15330 + }, + { + "epoch": 1.0669819851151143, + "grad_norm": 1.65891695022583, + "learning_rate": 8.137835550808688e-06, + "loss": 0.7004, + "step": 15340 + }, + { + "epoch": 1.067677540516102, + "grad_norm": 3.2163312435150146, + "learning_rate": 8.134683937211846e-06, + "loss": 0.7635, + "step": 15350 + }, + { + "epoch": 1.0683730959170898, + "grad_norm": 3.021054267883301, + "learning_rate": 8.131530270328534e-06, + "loss": 0.7203, + "step": 15360 + }, + { + "epoch": 1.0690686513180774, + "grad_norm": 6.664762020111084, + "learning_rate": 8.128374552224474e-06, + "loss": 0.7407, + "step": 15370 + }, + { + "epoch": 1.0697642067190651, + "grad_norm": 2.141761541366577, + "learning_rate": 8.125216784966734e-06, + "loss": 0.8048, + "step": 15380 + }, + { + "epoch": 1.070459762120053, + "grad_norm": 1.7114832401275635, + "learning_rate": 8.122056970623714e-06, + "loss": 0.6965, + "step": 15390 + }, + { + "epoch": 1.0711553175210407, + "grad_norm": 1.781787395477295, + "learning_rate": 8.118895111265166e-06, + "loss": 0.76, + "step": 15400 + }, + { + "epoch": 1.0718508729220282, + "grad_norm": 1.8947938680648804, + "learning_rate": 8.115731208962177e-06, + "loss": 0.7609, + "step": 15410 + }, + { + "epoch": 1.072546428323016, + "grad_norm": 1.885662317276001, + "learning_rate": 8.11256526578717e-06, + "loss": 0.7783, + "step": 15420 + }, + { + "epoch": 1.0732419837240037, + "grad_norm": 3.1641204357147217, + "learning_rate": 8.109397283813909e-06, + "loss": 0.7266, + "step": 15430 + }, + { + "epoch": 1.0739375391249912, + "grad_norm": 1.9282429218292236, + "learning_rate": 8.106227265117488e-06, + "loss": 0.7569, + "step": 15440 + }, + { + "epoch": 1.074633094525979, + "grad_norm": 1.6356946229934692, + "learning_rate": 8.103055211774343e-06, + "loss": 0.738, + "step": 15450 + }, + { + "epoch": 1.0753286499269668, + "grad_norm": 3.1384096145629883, + "learning_rate": 8.099881125862237e-06, + "loss": 0.7421, + "step": 15460 + }, + { + "epoch": 1.0760242053279543, + "grad_norm": 1.5029643774032593, + "learning_rate": 8.096705009460262e-06, + "loss": 0.7525, + "step": 15470 + }, + { + "epoch": 1.076719760728942, + "grad_norm": 1.8786005973815918, + "learning_rate": 8.093526864648848e-06, + "loss": 0.7015, + "step": 15480 + }, + { + "epoch": 1.0774153161299298, + "grad_norm": 2.743666410446167, + "learning_rate": 8.090346693509749e-06, + "loss": 0.7151, + "step": 15490 + }, + { + "epoch": 1.0781108715309173, + "grad_norm": 2.227748155593872, + "learning_rate": 8.087164498126044e-06, + "loss": 0.7389, + "step": 15500 + }, + { + "epoch": 1.0781108715309173, + "eval_loss": 0.9800230860710144, + "eval_runtime": 4590.3008, + "eval_samples_per_second": 3.956, + "eval_steps_per_second": 0.659, + "step": 15500 + }, + { + "epoch": 1.078806426931905, + "grad_norm": 1.6082303524017334, + "learning_rate": 8.083980280582142e-06, + "loss": 0.6989, + "step": 15510 + }, + { + "epoch": 1.0795019823328929, + "grad_norm": 2.01486873626709, + "learning_rate": 8.080794042963774e-06, + "loss": 0.7173, + "step": 15520 + }, + { + "epoch": 1.0801975377338806, + "grad_norm": 1.9832744598388672, + "learning_rate": 8.077605787357996e-06, + "loss": 0.785, + "step": 15530 + }, + { + "epoch": 1.0808930931348681, + "grad_norm": 1.987999439239502, + "learning_rate": 8.074415515853186e-06, + "loss": 0.6642, + "step": 15540 + }, + { + "epoch": 1.081588648535856, + "grad_norm": 1.4631825685501099, + "learning_rate": 8.07122323053904e-06, + "loss": 0.772, + "step": 15550 + }, + { + "epoch": 1.0822842039368437, + "grad_norm": 1.4155844449996948, + "learning_rate": 8.068028933506576e-06, + "loss": 0.7245, + "step": 15560 + }, + { + "epoch": 1.0829797593378312, + "grad_norm": 1.8805222511291504, + "learning_rate": 8.064832626848127e-06, + "loss": 0.6863, + "step": 15570 + }, + { + "epoch": 1.083675314738819, + "grad_norm": 2.301875352859497, + "learning_rate": 8.061634312657344e-06, + "loss": 0.7145, + "step": 15580 + }, + { + "epoch": 1.0843708701398067, + "grad_norm": 1.6264792680740356, + "learning_rate": 8.058433993029195e-06, + "loss": 0.7257, + "step": 15590 + }, + { + "epoch": 1.0850664255407942, + "grad_norm": 2.216895580291748, + "learning_rate": 8.055231670059958e-06, + "loss": 0.7574, + "step": 15600 + }, + { + "epoch": 1.085761980941782, + "grad_norm": 2.281456708908081, + "learning_rate": 8.05202734584722e-06, + "loss": 0.7027, + "step": 15610 + }, + { + "epoch": 1.0864575363427698, + "grad_norm": 1.7820533514022827, + "learning_rate": 8.04882102248989e-06, + "loss": 0.7547, + "step": 15620 + }, + { + "epoch": 1.0871530917437573, + "grad_norm": 1.789183259010315, + "learning_rate": 8.045612702088177e-06, + "loss": 0.7427, + "step": 15630 + }, + { + "epoch": 1.087848647144745, + "grad_norm": 2.4573729038238525, + "learning_rate": 8.0424023867436e-06, + "loss": 0.7711, + "step": 15640 + }, + { + "epoch": 1.0885442025457328, + "grad_norm": 1.720495343208313, + "learning_rate": 8.039190078558987e-06, + "loss": 0.634, + "step": 15650 + }, + { + "epoch": 1.0892397579467206, + "grad_norm": 1.7626230716705322, + "learning_rate": 8.03597577963847e-06, + "loss": 0.7316, + "step": 15660 + }, + { + "epoch": 1.089935313347708, + "grad_norm": 1.5430456399917603, + "learning_rate": 8.032759492087485e-06, + "loss": 0.7176, + "step": 15670 + }, + { + "epoch": 1.0906308687486959, + "grad_norm": 2.127326488494873, + "learning_rate": 8.029541218012771e-06, + "loss": 0.6906, + "step": 15680 + }, + { + "epoch": 1.0913264241496836, + "grad_norm": 1.6455721855163574, + "learning_rate": 8.026320959522366e-06, + "loss": 0.7248, + "step": 15690 + }, + { + "epoch": 1.0920219795506712, + "grad_norm": 2.8555684089660645, + "learning_rate": 8.023098718725613e-06, + "loss": 0.786, + "step": 15700 + }, + { + "epoch": 1.092717534951659, + "grad_norm": 2.5554494857788086, + "learning_rate": 8.019874497733144e-06, + "loss": 0.748, + "step": 15710 + }, + { + "epoch": 1.0934130903526467, + "grad_norm": 2.404695749282837, + "learning_rate": 8.016648298656902e-06, + "loss": 0.813, + "step": 15720 + }, + { + "epoch": 1.0941086457536342, + "grad_norm": 2.2759807109832764, + "learning_rate": 8.013420123610112e-06, + "loss": 0.7112, + "step": 15730 + }, + { + "epoch": 1.094804201154622, + "grad_norm": 1.6544609069824219, + "learning_rate": 8.010189974707302e-06, + "loss": 0.726, + "step": 15740 + }, + { + "epoch": 1.0954997565556097, + "grad_norm": 1.5047714710235596, + "learning_rate": 8.00695785406429e-06, + "loss": 0.726, + "step": 15750 + }, + { + "epoch": 1.0961953119565973, + "grad_norm": 2.504558563232422, + "learning_rate": 8.003723763798185e-06, + "loss": 0.7405, + "step": 15760 + }, + { + "epoch": 1.096890867357585, + "grad_norm": 2.3876149654388428, + "learning_rate": 8.000487706027386e-06, + "loss": 0.7714, + "step": 15770 + }, + { + "epoch": 1.0975864227585728, + "grad_norm": 5.663303375244141, + "learning_rate": 7.997249682871583e-06, + "loss": 0.7037, + "step": 15780 + }, + { + "epoch": 1.0982819781595605, + "grad_norm": 1.5679620504379272, + "learning_rate": 7.994009696451753e-06, + "loss": 0.7235, + "step": 15790 + }, + { + "epoch": 1.098977533560548, + "grad_norm": 2.29827618598938, + "learning_rate": 7.990767748890153e-06, + "loss": 0.783, + "step": 15800 + }, + { + "epoch": 1.0996730889615358, + "grad_norm": 3.10477876663208, + "learning_rate": 7.987523842310334e-06, + "loss": 0.7487, + "step": 15810 + }, + { + "epoch": 1.1003686443625236, + "grad_norm": 1.8620591163635254, + "learning_rate": 7.984277978837125e-06, + "loss": 0.7435, + "step": 15820 + }, + { + "epoch": 1.101064199763511, + "grad_norm": 2.2057790756225586, + "learning_rate": 7.981030160596636e-06, + "loss": 0.7271, + "step": 15830 + }, + { + "epoch": 1.1017597551644989, + "grad_norm": 1.5505954027175903, + "learning_rate": 7.97778038971626e-06, + "loss": 0.7383, + "step": 15840 + }, + { + "epoch": 1.1024553105654866, + "grad_norm": 2.823699951171875, + "learning_rate": 7.974528668324668e-06, + "loss": 0.741, + "step": 15850 + }, + { + "epoch": 1.1031508659664742, + "grad_norm": 2.2215020656585693, + "learning_rate": 7.971274998551808e-06, + "loss": 0.7838, + "step": 15860 + }, + { + "epoch": 1.103846421367462, + "grad_norm": 1.672216773033142, + "learning_rate": 7.968019382528904e-06, + "loss": 0.7138, + "step": 15870 + }, + { + "epoch": 1.1045419767684497, + "grad_norm": 1.971535325050354, + "learning_rate": 7.964761822388458e-06, + "loss": 0.7053, + "step": 15880 + }, + { + "epoch": 1.1052375321694372, + "grad_norm": 2.7451932430267334, + "learning_rate": 7.961502320264242e-06, + "loss": 0.7323, + "step": 15890 + }, + { + "epoch": 1.105933087570425, + "grad_norm": 1.926413655281067, + "learning_rate": 7.9582408782913e-06, + "loss": 0.6997, + "step": 15900 + }, + { + "epoch": 1.1066286429714127, + "grad_norm": 1.714254379272461, + "learning_rate": 7.954977498605949e-06, + "loss": 0.7461, + "step": 15910 + }, + { + "epoch": 1.1073241983724003, + "grad_norm": 2.093796730041504, + "learning_rate": 7.951712183345774e-06, + "loss": 0.6979, + "step": 15920 + }, + { + "epoch": 1.108019753773388, + "grad_norm": 2.0592517852783203, + "learning_rate": 7.948444934649626e-06, + "loss": 0.6954, + "step": 15930 + }, + { + "epoch": 1.1087153091743758, + "grad_norm": 1.7189865112304688, + "learning_rate": 7.945175754657628e-06, + "loss": 0.7104, + "step": 15940 + }, + { + "epoch": 1.1094108645753635, + "grad_norm": 1.457751750946045, + "learning_rate": 7.94190464551116e-06, + "loss": 0.7607, + "step": 15950 + }, + { + "epoch": 1.110106419976351, + "grad_norm": 1.9452743530273438, + "learning_rate": 7.938631609352873e-06, + "loss": 0.7646, + "step": 15960 + }, + { + "epoch": 1.1108019753773388, + "grad_norm": 1.596604824066162, + "learning_rate": 7.935356648326675e-06, + "loss": 0.685, + "step": 15970 + }, + { + "epoch": 1.1114975307783266, + "grad_norm": 1.439998984336853, + "learning_rate": 7.93207976457774e-06, + "loss": 0.7268, + "step": 15980 + }, + { + "epoch": 1.1121930861793141, + "grad_norm": 1.8189902305603027, + "learning_rate": 7.928800960252497e-06, + "loss": 0.7612, + "step": 15990 + }, + { + "epoch": 1.1128886415803019, + "grad_norm": 1.8137803077697754, + "learning_rate": 7.925520237498632e-06, + "loss": 0.7646, + "step": 16000 + }, + { + "epoch": 1.1128886415803019, + "eval_loss": 0.977469801902771, + "eval_runtime": 4605.6495, + "eval_samples_per_second": 3.943, + "eval_steps_per_second": 0.657, + "step": 16000 + }, + { + "epoch": 1.1135841969812896, + "grad_norm": 1.8203479051589966, + "learning_rate": 7.922237598465093e-06, + "loss": 0.7509, + "step": 16010 + }, + { + "epoch": 1.1142797523822772, + "grad_norm": 1.672701358795166, + "learning_rate": 7.91895304530208e-06, + "loss": 0.7765, + "step": 16020 + }, + { + "epoch": 1.114975307783265, + "grad_norm": 2.0833663940429688, + "learning_rate": 7.915666580161046e-06, + "loss": 0.7358, + "step": 16030 + }, + { + "epoch": 1.1156708631842527, + "grad_norm": 2.6208438873291016, + "learning_rate": 7.912378205194698e-06, + "loss": 0.6853, + "step": 16040 + }, + { + "epoch": 1.1163664185852402, + "grad_norm": 1.833115816116333, + "learning_rate": 7.909087922556993e-06, + "loss": 0.7502, + "step": 16050 + }, + { + "epoch": 1.117061973986228, + "grad_norm": 1.963848352432251, + "learning_rate": 7.90579573440314e-06, + "loss": 0.7444, + "step": 16060 + }, + { + "epoch": 1.1177575293872157, + "grad_norm": 2.0024590492248535, + "learning_rate": 7.902501642889593e-06, + "loss": 0.7395, + "step": 16070 + }, + { + "epoch": 1.1184530847882033, + "grad_norm": 2.096116065979004, + "learning_rate": 7.899205650174051e-06, + "loss": 0.7969, + "step": 16080 + }, + { + "epoch": 1.119148640189191, + "grad_norm": 1.3885408639907837, + "learning_rate": 7.895907758415467e-06, + "loss": 0.7574, + "step": 16090 + }, + { + "epoch": 1.1198441955901788, + "grad_norm": 1.3996411561965942, + "learning_rate": 7.892607969774027e-06, + "loss": 0.7109, + "step": 16100 + }, + { + "epoch": 1.1205397509911665, + "grad_norm": 1.8396230936050415, + "learning_rate": 7.889306286411168e-06, + "loss": 0.7687, + "step": 16110 + }, + { + "epoch": 1.121235306392154, + "grad_norm": 2.374706745147705, + "learning_rate": 7.886002710489562e-06, + "loss": 0.7171, + "step": 16120 + }, + { + "epoch": 1.1219308617931418, + "grad_norm": 1.9237761497497559, + "learning_rate": 7.882697244173126e-06, + "loss": 0.7387, + "step": 16130 + }, + { + "epoch": 1.1226264171941296, + "grad_norm": 1.9676580429077148, + "learning_rate": 7.879389889627011e-06, + "loss": 0.7235, + "step": 16140 + }, + { + "epoch": 1.1233219725951171, + "grad_norm": 2.0831451416015625, + "learning_rate": 7.876080649017608e-06, + "loss": 0.7417, + "step": 16150 + }, + { + "epoch": 1.1240175279961049, + "grad_norm": 1.9543906450271606, + "learning_rate": 7.872769524512539e-06, + "loss": 0.6837, + "step": 16160 + }, + { + "epoch": 1.1247130833970926, + "grad_norm": 1.5101432800292969, + "learning_rate": 7.869456518280666e-06, + "loss": 0.7728, + "step": 16170 + }, + { + "epoch": 1.1254086387980802, + "grad_norm": 2.654278516769409, + "learning_rate": 7.86614163249208e-06, + "loss": 0.7623, + "step": 16180 + }, + { + "epoch": 1.126104194199068, + "grad_norm": 2.7122201919555664, + "learning_rate": 7.8628248693181e-06, + "loss": 0.7344, + "step": 16190 + }, + { + "epoch": 1.1267997496000557, + "grad_norm": 1.9171080589294434, + "learning_rate": 7.859506230931285e-06, + "loss": 0.7829, + "step": 16200 + }, + { + "epoch": 1.1274953050010432, + "grad_norm": 5.162977695465088, + "learning_rate": 7.85618571950541e-06, + "loss": 0.7154, + "step": 16210 + }, + { + "epoch": 1.128190860402031, + "grad_norm": 1.795058250427246, + "learning_rate": 7.852863337215483e-06, + "loss": 0.766, + "step": 16220 + }, + { + "epoch": 1.1288864158030187, + "grad_norm": 2.018876075744629, + "learning_rate": 7.849539086237739e-06, + "loss": 0.7396, + "step": 16230 + }, + { + "epoch": 1.1295819712040065, + "grad_norm": 2.415282964706421, + "learning_rate": 7.846212968749636e-06, + "loss": 0.7877, + "step": 16240 + }, + { + "epoch": 1.130277526604994, + "grad_norm": 1.7467466592788696, + "learning_rate": 7.842884986929851e-06, + "loss": 0.695, + "step": 16250 + }, + { + "epoch": 1.1309730820059818, + "grad_norm": 3.2757728099823, + "learning_rate": 7.839555142958284e-06, + "loss": 0.749, + "step": 16260 + }, + { + "epoch": 1.1316686374069695, + "grad_norm": 1.4201987981796265, + "learning_rate": 7.836223439016061e-06, + "loss": 0.7764, + "step": 16270 + }, + { + "epoch": 1.132364192807957, + "grad_norm": 2.32922101020813, + "learning_rate": 7.832889877285516e-06, + "loss": 0.8183, + "step": 16280 + }, + { + "epoch": 1.1330597482089448, + "grad_norm": 1.4414551258087158, + "learning_rate": 7.829554459950205e-06, + "loss": 0.7204, + "step": 16290 + }, + { + "epoch": 1.1337553036099326, + "grad_norm": 1.3334795236587524, + "learning_rate": 7.8262171891949e-06, + "loss": 0.7629, + "step": 16300 + }, + { + "epoch": 1.1344508590109201, + "grad_norm": 5.218554973602295, + "learning_rate": 7.822878067205589e-06, + "loss": 0.7531, + "step": 16310 + }, + { + "epoch": 1.135146414411908, + "grad_norm": 1.844734787940979, + "learning_rate": 7.819537096169464e-06, + "loss": 0.7435, + "step": 16320 + }, + { + "epoch": 1.1358419698128956, + "grad_norm": 1.4511303901672363, + "learning_rate": 7.81619427827494e-06, + "loss": 0.662, + "step": 16330 + }, + { + "epoch": 1.1365375252138832, + "grad_norm": 1.6987226009368896, + "learning_rate": 7.812849615711632e-06, + "loss": 0.7193, + "step": 16340 + }, + { + "epoch": 1.137233080614871, + "grad_norm": 1.4814562797546387, + "learning_rate": 7.809503110670369e-06, + "loss": 0.6724, + "step": 16350 + }, + { + "epoch": 1.1379286360158587, + "grad_norm": 1.7906551361083984, + "learning_rate": 7.806154765343183e-06, + "loss": 0.7082, + "step": 16360 + }, + { + "epoch": 1.1386241914168465, + "grad_norm": 1.6984189748764038, + "learning_rate": 7.802804581923316e-06, + "loss": 0.7459, + "step": 16370 + }, + { + "epoch": 1.139319746817834, + "grad_norm": 2.6759443283081055, + "learning_rate": 7.79945256260521e-06, + "loss": 0.7644, + "step": 16380 + }, + { + "epoch": 1.1400153022188217, + "grad_norm": 1.7625153064727783, + "learning_rate": 7.796098709584509e-06, + "loss": 0.7748, + "step": 16390 + }, + { + "epoch": 1.1407108576198095, + "grad_norm": 1.5213898420333862, + "learning_rate": 7.792743025058062e-06, + "loss": 0.7933, + "step": 16400 + }, + { + "epoch": 1.141406413020797, + "grad_norm": 1.8224724531173706, + "learning_rate": 7.789385511223917e-06, + "loss": 0.7344, + "step": 16410 + }, + { + "epoch": 1.1421019684217848, + "grad_norm": 1.7619141340255737, + "learning_rate": 7.786026170281316e-06, + "loss": 0.7307, + "step": 16420 + }, + { + "epoch": 1.1427975238227726, + "grad_norm": 1.7765525579452515, + "learning_rate": 7.782665004430702e-06, + "loss": 0.7233, + "step": 16430 + }, + { + "epoch": 1.14349307922376, + "grad_norm": 1.6042877435684204, + "learning_rate": 7.779302015873712e-06, + "loss": 0.7633, + "step": 16440 + }, + { + "epoch": 1.1441886346247478, + "grad_norm": 2.026230812072754, + "learning_rate": 7.775937206813178e-06, + "loss": 0.6756, + "step": 16450 + }, + { + "epoch": 1.1448841900257356, + "grad_norm": 2.075192451477051, + "learning_rate": 7.772570579453122e-06, + "loss": 0.7649, + "step": 16460 + }, + { + "epoch": 1.1455797454267231, + "grad_norm": 1.7926652431488037, + "learning_rate": 7.769202135998758e-06, + "loss": 0.7262, + "step": 16470 + }, + { + "epoch": 1.146275300827711, + "grad_norm": 1.5356025695800781, + "learning_rate": 7.765831878656491e-06, + "loss": 0.731, + "step": 16480 + }, + { + "epoch": 1.1469708562286987, + "grad_norm": 2.0537948608398438, + "learning_rate": 7.762459809633915e-06, + "loss": 0.7076, + "step": 16490 + }, + { + "epoch": 1.1476664116296864, + "grad_norm": 1.7556426525115967, + "learning_rate": 7.759085931139808e-06, + "loss": 0.7413, + "step": 16500 + }, + { + "epoch": 1.1476664116296864, + "eval_loss": 0.9782079458236694, + "eval_runtime": 4598.6041, + "eval_samples_per_second": 3.949, + "eval_steps_per_second": 0.658, + "step": 16500 + }, + { + "epoch": 1.148361967030674, + "grad_norm": 3.163007974624634, + "learning_rate": 7.75571024538413e-06, + "loss": 0.7007, + "step": 16510 + }, + { + "epoch": 1.1490575224316617, + "grad_norm": 1.3722561597824097, + "learning_rate": 7.752332754578035e-06, + "loss": 0.7363, + "step": 16520 + }, + { + "epoch": 1.1497530778326495, + "grad_norm": 1.7917343378067017, + "learning_rate": 7.748953460933849e-06, + "loss": 0.7431, + "step": 16530 + }, + { + "epoch": 1.150448633233637, + "grad_norm": 1.9198352098464966, + "learning_rate": 7.745572366665085e-06, + "loss": 0.7131, + "step": 16540 + }, + { + "epoch": 1.1511441886346248, + "grad_norm": 2.063246726989746, + "learning_rate": 7.742189473986434e-06, + "loss": 0.7317, + "step": 16550 + }, + { + "epoch": 1.1518397440356125, + "grad_norm": 1.7116358280181885, + "learning_rate": 7.738804785113762e-06, + "loss": 0.7026, + "step": 16560 + }, + { + "epoch": 1.1525352994366, + "grad_norm": 2.101548433303833, + "learning_rate": 7.735418302264119e-06, + "loss": 0.7664, + "step": 16570 + }, + { + "epoch": 1.1532308548375878, + "grad_norm": 3.236647129058838, + "learning_rate": 7.732030027655719e-06, + "loss": 0.747, + "step": 16580 + }, + { + "epoch": 1.1539264102385756, + "grad_norm": 1.9104225635528564, + "learning_rate": 7.728639963507962e-06, + "loss": 0.7768, + "step": 16590 + }, + { + "epoch": 1.154621965639563, + "grad_norm": 1.8343124389648438, + "learning_rate": 7.72524811204141e-06, + "loss": 0.7262, + "step": 16600 + }, + { + "epoch": 1.1553175210405509, + "grad_norm": 11.77759075164795, + "learning_rate": 7.721854475477802e-06, + "loss": 0.7356, + "step": 16610 + }, + { + "epoch": 1.1560130764415386, + "grad_norm": 2.980041027069092, + "learning_rate": 7.718459056040042e-06, + "loss": 0.7492, + "step": 16620 + }, + { + "epoch": 1.1567086318425264, + "grad_norm": 1.8407713174819946, + "learning_rate": 7.715061855952206e-06, + "loss": 0.7114, + "step": 16630 + }, + { + "epoch": 1.157404187243514, + "grad_norm": 1.4749658107757568, + "learning_rate": 7.711662877439531e-06, + "loss": 0.7121, + "step": 16640 + }, + { + "epoch": 1.1580997426445017, + "grad_norm": 1.4815341234207153, + "learning_rate": 7.708262122728426e-06, + "loss": 0.704, + "step": 16650 + }, + { + "epoch": 1.1587952980454894, + "grad_norm": 1.5834189653396606, + "learning_rate": 7.704859594046462e-06, + "loss": 0.7608, + "step": 16660 + }, + { + "epoch": 1.159490853446477, + "grad_norm": 2.3483595848083496, + "learning_rate": 7.701455293622361e-06, + "loss": 0.7795, + "step": 16670 + }, + { + "epoch": 1.1601864088474647, + "grad_norm": 4.480607509613037, + "learning_rate": 7.698049223686021e-06, + "loss": 0.7156, + "step": 16680 + }, + { + "epoch": 1.1608819642484525, + "grad_norm": 3.4225215911865234, + "learning_rate": 7.69464138646849e-06, + "loss": 0.7199, + "step": 16690 + }, + { + "epoch": 1.16157751964944, + "grad_norm": 3.5965778827667236, + "learning_rate": 7.691231784201976e-06, + "loss": 0.7644, + "step": 16700 + }, + { + "epoch": 1.1622730750504278, + "grad_norm": 2.3045315742492676, + "learning_rate": 7.687820419119843e-06, + "loss": 0.7214, + "step": 16710 + }, + { + "epoch": 1.1629686304514155, + "grad_norm": 1.386800765991211, + "learning_rate": 7.68440729345661e-06, + "loss": 0.716, + "step": 16720 + }, + { + "epoch": 1.163664185852403, + "grad_norm": 3.5695197582244873, + "learning_rate": 7.680992409447949e-06, + "loss": 0.7366, + "step": 16730 + }, + { + "epoch": 1.1643597412533908, + "grad_norm": 1.5364261865615845, + "learning_rate": 7.677575769330682e-06, + "loss": 0.7239, + "step": 16740 + }, + { + "epoch": 1.1650552966543786, + "grad_norm": 2.756621837615967, + "learning_rate": 7.674157375342785e-06, + "loss": 0.7303, + "step": 16750 + }, + { + "epoch": 1.1657508520553663, + "grad_norm": 1.9190778732299805, + "learning_rate": 7.670737229723381e-06, + "loss": 0.7335, + "step": 16760 + }, + { + "epoch": 1.1664464074563539, + "grad_norm": 1.526294231414795, + "learning_rate": 7.667315334712738e-06, + "loss": 0.7504, + "step": 16770 + }, + { + "epoch": 1.1671419628573416, + "grad_norm": 1.4361801147460938, + "learning_rate": 7.663891692552273e-06, + "loss": 0.7499, + "step": 16780 + }, + { + "epoch": 1.1678375182583292, + "grad_norm": 5.260773181915283, + "learning_rate": 7.660466305484546e-06, + "loss": 0.7609, + "step": 16790 + }, + { + "epoch": 1.168533073659317, + "grad_norm": 2.130103349685669, + "learning_rate": 7.65703917575326e-06, + "loss": 0.7386, + "step": 16800 + }, + { + "epoch": 1.1692286290603047, + "grad_norm": 2.170276403427124, + "learning_rate": 7.653610305603263e-06, + "loss": 0.7057, + "step": 16810 + }, + { + "epoch": 1.1699241844612924, + "grad_norm": 1.6590690612792969, + "learning_rate": 7.650179697280537e-06, + "loss": 0.7631, + "step": 16820 + }, + { + "epoch": 1.17061973986228, + "grad_norm": 2.131392002105713, + "learning_rate": 7.646747353032205e-06, + "loss": 0.7167, + "step": 16830 + }, + { + "epoch": 1.1713152952632677, + "grad_norm": 1.7783950567245483, + "learning_rate": 7.643313275106529e-06, + "loss": 0.7507, + "step": 16840 + }, + { + "epoch": 1.1720108506642555, + "grad_norm": 2.3366200923919678, + "learning_rate": 7.639877465752902e-06, + "loss": 0.747, + "step": 16850 + }, + { + "epoch": 1.172706406065243, + "grad_norm": 2.3637051582336426, + "learning_rate": 7.63643992722186e-06, + "loss": 0.7244, + "step": 16860 + }, + { + "epoch": 1.1734019614662308, + "grad_norm": 4.523015022277832, + "learning_rate": 7.63300066176506e-06, + "loss": 0.7677, + "step": 16870 + }, + { + "epoch": 1.1740975168672185, + "grad_norm": 1.8634729385375977, + "learning_rate": 7.629559671635302e-06, + "loss": 0.7414, + "step": 16880 + }, + { + "epoch": 1.1747930722682063, + "grad_norm": 1.515803337097168, + "learning_rate": 7.626116959086502e-06, + "loss": 0.6581, + "step": 16890 + }, + { + "epoch": 1.1754886276691938, + "grad_norm": 1.612243890762329, + "learning_rate": 7.62267252637372e-06, + "loss": 0.7734, + "step": 16900 + }, + { + "epoch": 1.1761841830701816, + "grad_norm": 1.4237169027328491, + "learning_rate": 7.61922637575313e-06, + "loss": 0.7419, + "step": 16910 + }, + { + "epoch": 1.1768797384711691, + "grad_norm": 1.8054207563400269, + "learning_rate": 7.6157785094820345e-06, + "loss": 0.7428, + "step": 16920 + }, + { + "epoch": 1.1775752938721569, + "grad_norm": 1.8821370601654053, + "learning_rate": 7.612328929818866e-06, + "loss": 0.7394, + "step": 16930 + }, + { + "epoch": 1.1782708492731446, + "grad_norm": 2.073482036590576, + "learning_rate": 7.6088776390231714e-06, + "loss": 0.7464, + "step": 16940 + }, + { + "epoch": 1.1789664046741324, + "grad_norm": 2.2198994159698486, + "learning_rate": 7.605424639355623e-06, + "loss": 0.7376, + "step": 16950 + }, + { + "epoch": 1.17966196007512, + "grad_norm": 1.9598629474639893, + "learning_rate": 7.601969933078009e-06, + "loss": 0.7109, + "step": 16960 + }, + { + "epoch": 1.1803575154761077, + "grad_norm": 2.8487653732299805, + "learning_rate": 7.598513522453239e-06, + "loss": 0.7724, + "step": 16970 + }, + { + "epoch": 1.1810530708770954, + "grad_norm": 1.5344852209091187, + "learning_rate": 7.595055409745339e-06, + "loss": 0.7552, + "step": 16980 + }, + { + "epoch": 1.181748626278083, + "grad_norm": 7.255866050720215, + "learning_rate": 7.5915955972194445e-06, + "loss": 0.7667, + "step": 16990 + }, + { + "epoch": 1.1824441816790707, + "grad_norm": 1.964206337928772, + "learning_rate": 7.588134087141812e-06, + "loss": 0.7996, + "step": 17000 + }, + { + "epoch": 1.1824441816790707, + "eval_loss": 0.9709168076515198, + "eval_runtime": 4600.8225, + "eval_samples_per_second": 3.947, + "eval_steps_per_second": 0.658, + "step": 17000 + }, + { + "epoch": 1.1831397370800585, + "grad_norm": 1.6443986892700195, + "learning_rate": 7.584670881779803e-06, + "loss": 0.7379, + "step": 17010 + }, + { + "epoch": 1.183835292481046, + "grad_norm": 1.5993534326553345, + "learning_rate": 7.581205983401896e-06, + "loss": 0.6786, + "step": 17020 + }, + { + "epoch": 1.1845308478820338, + "grad_norm": 6.630913257598877, + "learning_rate": 7.5777393942776725e-06, + "loss": 0.7336, + "step": 17030 + }, + { + "epoch": 1.1852264032830215, + "grad_norm": 1.5093629360198975, + "learning_rate": 7.574271116677826e-06, + "loss": 0.7334, + "step": 17040 + }, + { + "epoch": 1.185921958684009, + "grad_norm": 3.1409695148468018, + "learning_rate": 7.570801152874153e-06, + "loss": 0.7455, + "step": 17050 + }, + { + "epoch": 1.1866175140849968, + "grad_norm": 1.3799759149551392, + "learning_rate": 7.567329505139556e-06, + "loss": 0.7034, + "step": 17060 + }, + { + "epoch": 1.1873130694859846, + "grad_norm": 2.3834662437438965, + "learning_rate": 7.563856175748039e-06, + "loss": 0.7524, + "step": 17070 + }, + { + "epoch": 1.1880086248869723, + "grad_norm": 2.537773609161377, + "learning_rate": 7.560381166974711e-06, + "loss": 0.6897, + "step": 17080 + }, + { + "epoch": 1.1887041802879599, + "grad_norm": 1.9108469486236572, + "learning_rate": 7.556904481095777e-06, + "loss": 0.674, + "step": 17090 + }, + { + "epoch": 1.1893997356889476, + "grad_norm": 2.196781635284424, + "learning_rate": 7.553426120388542e-06, + "loss": 0.7529, + "step": 17100 + }, + { + "epoch": 1.1900952910899354, + "grad_norm": 1.4514846801757812, + "learning_rate": 7.549946087131408e-06, + "loss": 0.7101, + "step": 17110 + }, + { + "epoch": 1.190790846490923, + "grad_norm": 2.290584087371826, + "learning_rate": 7.546464383603875e-06, + "loss": 0.7878, + "step": 17120 + }, + { + "epoch": 1.1914864018919107, + "grad_norm": 2.6354496479034424, + "learning_rate": 7.542981012086532e-06, + "loss": 0.7022, + "step": 17130 + }, + { + "epoch": 1.1921819572928984, + "grad_norm": 1.6883187294006348, + "learning_rate": 7.539495974861066e-06, + "loss": 0.7888, + "step": 17140 + }, + { + "epoch": 1.192877512693886, + "grad_norm": 1.669197916984558, + "learning_rate": 7.536009274210251e-06, + "loss": 0.7136, + "step": 17150 + }, + { + "epoch": 1.1935730680948737, + "grad_norm": 1.7792937755584717, + "learning_rate": 7.532520912417953e-06, + "loss": 0.7538, + "step": 17160 + }, + { + "epoch": 1.1942686234958615, + "grad_norm": 1.8939685821533203, + "learning_rate": 7.529030891769124e-06, + "loss": 0.7399, + "step": 17170 + }, + { + "epoch": 1.194964178896849, + "grad_norm": 1.666150450706482, + "learning_rate": 7.525539214549805e-06, + "loss": 0.7255, + "step": 17180 + }, + { + "epoch": 1.1956597342978368, + "grad_norm": 3.4776463508605957, + "learning_rate": 7.52204588304712e-06, + "loss": 0.7713, + "step": 17190 + }, + { + "epoch": 1.1963552896988245, + "grad_norm": 2.244530200958252, + "learning_rate": 7.51855089954928e-06, + "loss": 0.7216, + "step": 17200 + }, + { + "epoch": 1.1970508450998123, + "grad_norm": 2.436161756515503, + "learning_rate": 7.515054266345571e-06, + "loss": 0.7034, + "step": 17210 + }, + { + "epoch": 1.1977464005007998, + "grad_norm": 2.159059762954712, + "learning_rate": 7.5115559857263664e-06, + "loss": 0.7097, + "step": 17220 + }, + { + "epoch": 1.1984419559017876, + "grad_norm": 3.094331741333008, + "learning_rate": 7.508056059983119e-06, + "loss": 0.7744, + "step": 17230 + }, + { + "epoch": 1.1991375113027753, + "grad_norm": 1.4148571491241455, + "learning_rate": 7.5045544914083515e-06, + "loss": 0.7071, + "step": 17240 + }, + { + "epoch": 1.1998330667037629, + "grad_norm": 1.7230191230773926, + "learning_rate": 7.5010512822956706e-06, + "loss": 0.7587, + "step": 17250 + }, + { + "epoch": 1.2005286221047506, + "grad_norm": 1.8063654899597168, + "learning_rate": 7.497546434939756e-06, + "loss": 0.7449, + "step": 17260 + }, + { + "epoch": 1.2012241775057384, + "grad_norm": 2.7699267864227295, + "learning_rate": 7.494039951636359e-06, + "loss": 0.7091, + "step": 17270 + }, + { + "epoch": 1.201919732906726, + "grad_norm": 2.470055341720581, + "learning_rate": 7.4905318346823e-06, + "loss": 0.6737, + "step": 17280 + }, + { + "epoch": 1.2026152883077137, + "grad_norm": 1.8754862546920776, + "learning_rate": 7.487022086375474e-06, + "loss": 0.7606, + "step": 17290 + }, + { + "epoch": 1.2033108437087014, + "grad_norm": 2.0946145057678223, + "learning_rate": 7.483510709014845e-06, + "loss": 0.8189, + "step": 17300 + }, + { + "epoch": 1.204006399109689, + "grad_norm": 1.8891016244888306, + "learning_rate": 7.479997704900437e-06, + "loss": 0.7144, + "step": 17310 + }, + { + "epoch": 1.2047019545106767, + "grad_norm": 1.8674472570419312, + "learning_rate": 7.4764830763333485e-06, + "loss": 0.7815, + "step": 17320 + }, + { + "epoch": 1.2053975099116645, + "grad_norm": 2.5638229846954346, + "learning_rate": 7.472966825615738e-06, + "loss": 0.7279, + "step": 17330 + }, + { + "epoch": 1.2060930653126523, + "grad_norm": 2.338073968887329, + "learning_rate": 7.4694489550508235e-06, + "loss": 0.7193, + "step": 17340 + }, + { + "epoch": 1.2067886207136398, + "grad_norm": 1.571984052658081, + "learning_rate": 7.465929466942888e-06, + "loss": 0.7407, + "step": 17350 + }, + { + "epoch": 1.2074841761146275, + "grad_norm": 2.1647307872772217, + "learning_rate": 7.462408363597276e-06, + "loss": 0.7417, + "step": 17360 + }, + { + "epoch": 1.2081797315156153, + "grad_norm": 1.2205125093460083, + "learning_rate": 7.458885647320384e-06, + "loss": 0.7495, + "step": 17370 + }, + { + "epoch": 1.2088752869166028, + "grad_norm": 2.5350382328033447, + "learning_rate": 7.455361320419669e-06, + "loss": 0.7301, + "step": 17380 + }, + { + "epoch": 1.2095708423175906, + "grad_norm": 1.679731011390686, + "learning_rate": 7.451835385203644e-06, + "loss": 0.6819, + "step": 17390 + }, + { + "epoch": 1.2102663977185784, + "grad_norm": 1.8390997648239136, + "learning_rate": 7.448307843981871e-06, + "loss": 0.7966, + "step": 17400 + }, + { + "epoch": 1.210961953119566, + "grad_norm": 1.516202688217163, + "learning_rate": 7.444778699064968e-06, + "loss": 0.7624, + "step": 17410 + }, + { + "epoch": 1.2116575085205536, + "grad_norm": 1.608948826789856, + "learning_rate": 7.441247952764601e-06, + "loss": 0.7233, + "step": 17420 + }, + { + "epoch": 1.2123530639215414, + "grad_norm": 2.061920404434204, + "learning_rate": 7.437715607393486e-06, + "loss": 0.7737, + "step": 17430 + }, + { + "epoch": 1.213048619322529, + "grad_norm": 4.024318695068359, + "learning_rate": 7.434181665265388e-06, + "loss": 0.7028, + "step": 17440 + }, + { + "epoch": 1.2137441747235167, + "grad_norm": 1.4213738441467285, + "learning_rate": 7.4306461286951135e-06, + "loss": 0.657, + "step": 17450 + }, + { + "epoch": 1.2144397301245045, + "grad_norm": 2.0485122203826904, + "learning_rate": 7.42710899999852e-06, + "loss": 0.7552, + "step": 17460 + }, + { + "epoch": 1.2151352855254922, + "grad_norm": 2.8756332397460938, + "learning_rate": 7.4235702814925e-06, + "loss": 0.7424, + "step": 17470 + }, + { + "epoch": 1.2158308409264798, + "grad_norm": 1.7246570587158203, + "learning_rate": 7.420029975494996e-06, + "loss": 0.7146, + "step": 17480 + }, + { + "epoch": 1.2165263963274675, + "grad_norm": 1.9771344661712646, + "learning_rate": 7.416488084324981e-06, + "loss": 0.722, + "step": 17490 + }, + { + "epoch": 1.2172219517284553, + "grad_norm": 3.96952748298645, + "learning_rate": 7.4129446103024725e-06, + "loss": 0.7424, + "step": 17500 + }, + { + "epoch": 1.2172219517284553, + "eval_loss": 0.9690396189689636, + "eval_runtime": 4663.1868, + "eval_samples_per_second": 3.895, + "eval_steps_per_second": 0.649, + "step": 17500 + }, + { + "epoch": 1.2179175071294428, + "grad_norm": 1.6346319913864136, + "learning_rate": 7.409399555748526e-06, + "loss": 0.7389, + "step": 17510 + }, + { + "epoch": 1.2186130625304306, + "grad_norm": 1.5730923414230347, + "learning_rate": 7.405852922985228e-06, + "loss": 0.7823, + "step": 17520 + }, + { + "epoch": 1.2193086179314183, + "grad_norm": 1.8568601608276367, + "learning_rate": 7.4023047143357e-06, + "loss": 0.7441, + "step": 17530 + }, + { + "epoch": 1.2200041733324059, + "grad_norm": 2.2312443256378174, + "learning_rate": 7.398754932124096e-06, + "loss": 0.743, + "step": 17540 + }, + { + "epoch": 1.2206997287333936, + "grad_norm": 2.7717463970184326, + "learning_rate": 7.395203578675603e-06, + "loss": 0.7309, + "step": 17550 + }, + { + "epoch": 1.2213952841343814, + "grad_norm": 1.78309166431427, + "learning_rate": 7.3916506563164325e-06, + "loss": 0.7285, + "step": 17560 + }, + { + "epoch": 1.222090839535369, + "grad_norm": 2.357851505279541, + "learning_rate": 7.388096167373826e-06, + "loss": 0.7778, + "step": 17570 + }, + { + "epoch": 1.2227863949363567, + "grad_norm": 2.2886083126068115, + "learning_rate": 7.384540114176056e-06, + "loss": 0.7489, + "step": 17580 + }, + { + "epoch": 1.2234819503373444, + "grad_norm": 1.3577769994735718, + "learning_rate": 7.38098249905241e-06, + "loss": 0.7251, + "step": 17590 + }, + { + "epoch": 1.2241775057383322, + "grad_norm": 1.5947679281234741, + "learning_rate": 7.3774233243332035e-06, + "loss": 0.7437, + "step": 17600 + }, + { + "epoch": 1.2248730611393197, + "grad_norm": 2.45898175239563, + "learning_rate": 7.3738625923497785e-06, + "loss": 0.777, + "step": 17610 + }, + { + "epoch": 1.2255686165403075, + "grad_norm": 1.1868382692337036, + "learning_rate": 7.370300305434489e-06, + "loss": 0.7129, + "step": 17620 + }, + { + "epoch": 1.226264171941295, + "grad_norm": 1.952952265739441, + "learning_rate": 7.366736465920709e-06, + "loss": 0.7499, + "step": 17630 + }, + { + "epoch": 1.2269597273422828, + "grad_norm": 1.8075990676879883, + "learning_rate": 7.363171076142836e-06, + "loss": 0.7557, + "step": 17640 + }, + { + "epoch": 1.2276552827432705, + "grad_norm": 1.9723416566848755, + "learning_rate": 7.359604138436274e-06, + "loss": 0.7286, + "step": 17650 + }, + { + "epoch": 1.2283508381442583, + "grad_norm": 2.0037879943847656, + "learning_rate": 7.356035655137447e-06, + "loss": 0.7337, + "step": 17660 + }, + { + "epoch": 1.2290463935452458, + "grad_norm": 1.7544316053390503, + "learning_rate": 7.352465628583789e-06, + "loss": 0.7245, + "step": 17670 + }, + { + "epoch": 1.2297419489462336, + "grad_norm": 2.2510509490966797, + "learning_rate": 7.348894061113747e-06, + "loss": 0.7095, + "step": 17680 + }, + { + "epoch": 1.2304375043472213, + "grad_norm": 1.8188114166259766, + "learning_rate": 7.345320955066773e-06, + "loss": 0.7827, + "step": 17690 + }, + { + "epoch": 1.2311330597482089, + "grad_norm": 2.1466455459594727, + "learning_rate": 7.341746312783331e-06, + "loss": 0.7134, + "step": 17700 + }, + { + "epoch": 1.2318286151491966, + "grad_norm": 2.1710309982299805, + "learning_rate": 7.33817013660489e-06, + "loss": 0.7594, + "step": 17710 + }, + { + "epoch": 1.2325241705501844, + "grad_norm": 2.683817148208618, + "learning_rate": 7.334592428873924e-06, + "loss": 0.7482, + "step": 17720 + }, + { + "epoch": 1.2332197259511721, + "grad_norm": 2.148509979248047, + "learning_rate": 7.331013191933908e-06, + "loss": 0.7713, + "step": 17730 + }, + { + "epoch": 1.2339152813521597, + "grad_norm": 1.3620214462280273, + "learning_rate": 7.327432428129322e-06, + "loss": 0.7355, + "step": 17740 + }, + { + "epoch": 1.2346108367531474, + "grad_norm": 1.713748812675476, + "learning_rate": 7.323850139805643e-06, + "loss": 0.7462, + "step": 17750 + }, + { + "epoch": 1.235306392154135, + "grad_norm": 1.6102030277252197, + "learning_rate": 7.320266329309349e-06, + "loss": 0.7286, + "step": 17760 + }, + { + "epoch": 1.2360019475551227, + "grad_norm": 1.9782726764678955, + "learning_rate": 7.316680998987915e-06, + "loss": 0.7334, + "step": 17770 + }, + { + "epoch": 1.2366975029561105, + "grad_norm": 2.4939191341400146, + "learning_rate": 7.31309415118981e-06, + "loss": 0.7036, + "step": 17780 + }, + { + "epoch": 1.2373930583570982, + "grad_norm": 1.7690492868423462, + "learning_rate": 7.309505788264496e-06, + "loss": 0.7034, + "step": 17790 + }, + { + "epoch": 1.2380886137580858, + "grad_norm": 1.5757818222045898, + "learning_rate": 7.305915912562432e-06, + "loss": 0.7528, + "step": 17800 + }, + { + "epoch": 1.2387841691590735, + "grad_norm": 2.2610669136047363, + "learning_rate": 7.302324526435064e-06, + "loss": 0.7348, + "step": 17810 + }, + { + "epoch": 1.2394797245600613, + "grad_norm": 1.5655224323272705, + "learning_rate": 7.298731632234827e-06, + "loss": 0.7536, + "step": 17820 + }, + { + "epoch": 1.2401752799610488, + "grad_norm": 1.702947735786438, + "learning_rate": 7.295137232315148e-06, + "loss": 0.7448, + "step": 17830 + }, + { + "epoch": 1.2408708353620366, + "grad_norm": 1.8597019910812378, + "learning_rate": 7.291541329030434e-06, + "loss": 0.6744, + "step": 17840 + }, + { + "epoch": 1.2415663907630243, + "grad_norm": 1.9019911289215088, + "learning_rate": 7.287943924736082e-06, + "loss": 0.7337, + "step": 17850 + }, + { + "epoch": 1.242261946164012, + "grad_norm": 1.7096370458602905, + "learning_rate": 7.28434502178847e-06, + "loss": 0.7399, + "step": 17860 + }, + { + "epoch": 1.2429575015649996, + "grad_norm": 5.454235553741455, + "learning_rate": 7.2807446225449606e-06, + "loss": 0.7668, + "step": 17870 + }, + { + "epoch": 1.2436530569659874, + "grad_norm": 3.3226876258850098, + "learning_rate": 7.277142729363891e-06, + "loss": 0.7849, + "step": 17880 + }, + { + "epoch": 1.244348612366975, + "grad_norm": 1.742781639099121, + "learning_rate": 7.27353934460458e-06, + "loss": 0.7551, + "step": 17890 + }, + { + "epoch": 1.2450441677679627, + "grad_norm": 1.1768951416015625, + "learning_rate": 7.269934470627325e-06, + "loss": 0.6918, + "step": 17900 + }, + { + "epoch": 1.2457397231689504, + "grad_norm": 1.9134175777435303, + "learning_rate": 7.266328109793396e-06, + "loss": 0.7563, + "step": 17910 + }, + { + "epoch": 1.2464352785699382, + "grad_norm": 1.7989120483398438, + "learning_rate": 7.262720264465038e-06, + "loss": 0.7477, + "step": 17920 + }, + { + "epoch": 1.2471308339709257, + "grad_norm": 2.976818561553955, + "learning_rate": 7.259110937005468e-06, + "loss": 0.7838, + "step": 17930 + }, + { + "epoch": 1.2478263893719135, + "grad_norm": 2.1231327056884766, + "learning_rate": 7.2555001297788775e-06, + "loss": 0.7166, + "step": 17940 + }, + { + "epoch": 1.2485219447729012, + "grad_norm": 1.5820504426956177, + "learning_rate": 7.25188784515042e-06, + "loss": 0.7706, + "step": 17950 + }, + { + "epoch": 1.2492175001738888, + "grad_norm": 1.6454449892044067, + "learning_rate": 7.2482740854862245e-06, + "loss": 0.7443, + "step": 17960 + }, + { + "epoch": 1.2499130555748765, + "grad_norm": 1.792536735534668, + "learning_rate": 7.244658853153379e-06, + "loss": 0.719, + "step": 17970 + }, + { + "epoch": 1.2506086109758643, + "grad_norm": 1.7187315225601196, + "learning_rate": 7.241042150519943e-06, + "loss": 0.7623, + "step": 17980 + }, + { + "epoch": 1.251304166376852, + "grad_norm": 1.5491689443588257, + "learning_rate": 7.237423979954934e-06, + "loss": 0.7363, + "step": 17990 + }, + { + "epoch": 1.2519997217778396, + "grad_norm": 4.109227657318115, + "learning_rate": 7.2338043438283324e-06, + "loss": 0.7397, + "step": 18000 + }, + { + "epoch": 1.2519997217778396, + "eval_loss": 0.9682740569114685, + "eval_runtime": 4603.0503, + "eval_samples_per_second": 3.945, + "eval_steps_per_second": 0.658, + "step": 18000 + }, + { + "epoch": 1.2526952771788273, + "grad_norm": 1.5728344917297363, + "learning_rate": 7.230183244511081e-06, + "loss": 0.74, + "step": 18010 + }, + { + "epoch": 1.2533908325798149, + "grad_norm": 1.9369425773620605, + "learning_rate": 7.226560684375077e-06, + "loss": 0.7433, + "step": 18020 + }, + { + "epoch": 1.2540863879808026, + "grad_norm": 2.15568208694458, + "learning_rate": 7.2229366657931755e-06, + "loss": 0.7399, + "step": 18030 + }, + { + "epoch": 1.2547819433817904, + "grad_norm": 1.8374186754226685, + "learning_rate": 7.219311191139191e-06, + "loss": 0.7063, + "step": 18040 + }, + { + "epoch": 1.2554774987827781, + "grad_norm": 1.830335259437561, + "learning_rate": 7.2156842627878856e-06, + "loss": 0.7341, + "step": 18050 + }, + { + "epoch": 1.2561730541837657, + "grad_norm": 1.5589704513549805, + "learning_rate": 7.212055883114979e-06, + "loss": 0.7126, + "step": 18060 + }, + { + "epoch": 1.2568686095847534, + "grad_norm": 1.7654367685317993, + "learning_rate": 7.208426054497137e-06, + "loss": 0.7296, + "step": 18070 + }, + { + "epoch": 1.2575641649857412, + "grad_norm": 1.796360731124878, + "learning_rate": 7.204794779311979e-06, + "loss": 0.7298, + "step": 18080 + }, + { + "epoch": 1.2582597203867287, + "grad_norm": 1.8311980962753296, + "learning_rate": 7.201162059938068e-06, + "loss": 0.7062, + "step": 18090 + }, + { + "epoch": 1.2589552757877165, + "grad_norm": 3.8734405040740967, + "learning_rate": 7.197527898754915e-06, + "loss": 0.8434, + "step": 18100 + }, + { + "epoch": 1.2596508311887042, + "grad_norm": 1.6472984552383423, + "learning_rate": 7.193892298142974e-06, + "loss": 0.735, + "step": 18110 + }, + { + "epoch": 1.260346386589692, + "grad_norm": 1.825825572013855, + "learning_rate": 7.190255260483645e-06, + "loss": 0.7291, + "step": 18120 + }, + { + "epoch": 1.2610419419906795, + "grad_norm": 1.484377145767212, + "learning_rate": 7.186616788159265e-06, + "loss": 0.6945, + "step": 18130 + }, + { + "epoch": 1.2617374973916673, + "grad_norm": 2.392759323120117, + "learning_rate": 7.182976883553113e-06, + "loss": 0.7324, + "step": 18140 + }, + { + "epoch": 1.2624330527926548, + "grad_norm": 1.9688462018966675, + "learning_rate": 7.179335549049408e-06, + "loss": 0.744, + "step": 18150 + }, + { + "epoch": 1.2631286081936426, + "grad_norm": 1.871225357055664, + "learning_rate": 7.175692787033304e-06, + "loss": 0.6852, + "step": 18160 + }, + { + "epoch": 1.2638241635946303, + "grad_norm": 1.770012617111206, + "learning_rate": 7.172048599890886e-06, + "loss": 0.7235, + "step": 18170 + }, + { + "epoch": 1.264519718995618, + "grad_norm": 1.5198339223861694, + "learning_rate": 7.1684029900091775e-06, + "loss": 0.7664, + "step": 18180 + }, + { + "epoch": 1.2652152743966056, + "grad_norm": 1.281686544418335, + "learning_rate": 7.164755959776135e-06, + "loss": 0.7375, + "step": 18190 + }, + { + "epoch": 1.2659108297975934, + "grad_norm": 1.8066498041152954, + "learning_rate": 7.161107511580642e-06, + "loss": 0.7788, + "step": 18200 + }, + { + "epoch": 1.266606385198581, + "grad_norm": 2.865297794342041, + "learning_rate": 7.1574576478125094e-06, + "loss": 0.775, + "step": 18210 + }, + { + "epoch": 1.2673019405995687, + "grad_norm": 2.2368898391723633, + "learning_rate": 7.153806370862482e-06, + "loss": 0.7197, + "step": 18220 + }, + { + "epoch": 1.2679974960005564, + "grad_norm": 2.5960853099823, + "learning_rate": 7.1501536831222205e-06, + "loss": 0.7249, + "step": 18230 + }, + { + "epoch": 1.2686930514015442, + "grad_norm": 1.4169334173202515, + "learning_rate": 7.146499586984319e-06, + "loss": 0.6976, + "step": 18240 + }, + { + "epoch": 1.269388606802532, + "grad_norm": 2.343489170074463, + "learning_rate": 7.142844084842291e-06, + "loss": 0.6922, + "step": 18250 + }, + { + "epoch": 1.2700841622035195, + "grad_norm": 2.0613107681274414, + "learning_rate": 7.1391871790905685e-06, + "loss": 0.7859, + "step": 18260 + }, + { + "epoch": 1.2707797176045073, + "grad_norm": 2.279526948928833, + "learning_rate": 7.135528872124504e-06, + "loss": 0.7236, + "step": 18270 + }, + { + "epoch": 1.2714752730054948, + "grad_norm": 2.2856810092926025, + "learning_rate": 7.13186916634037e-06, + "loss": 0.7351, + "step": 18280 + }, + { + "epoch": 1.2721708284064825, + "grad_norm": 1.9384171962738037, + "learning_rate": 7.128208064135353e-06, + "loss": 0.7104, + "step": 18290 + }, + { + "epoch": 1.2728663838074703, + "grad_norm": 1.6891708374023438, + "learning_rate": 7.124545567907555e-06, + "loss": 0.7438, + "step": 18300 + }, + { + "epoch": 1.273561939208458, + "grad_norm": 1.1708288192749023, + "learning_rate": 7.120881680055991e-06, + "loss": 0.7231, + "step": 18310 + }, + { + "epoch": 1.2742574946094456, + "grad_norm": 1.5777531862258911, + "learning_rate": 7.117216402980588e-06, + "loss": 0.717, + "step": 18320 + }, + { + "epoch": 1.2749530500104334, + "grad_norm": 2.45457124710083, + "learning_rate": 7.113549739082183e-06, + "loss": 0.7217, + "step": 18330 + }, + { + "epoch": 1.2756486054114209, + "grad_norm": 1.7701858282089233, + "learning_rate": 7.109881690762519e-06, + "loss": 0.7633, + "step": 18340 + }, + { + "epoch": 1.2763441608124086, + "grad_norm": 8.644448280334473, + "learning_rate": 7.10621226042425e-06, + "loss": 0.7543, + "step": 18350 + }, + { + "epoch": 1.2770397162133964, + "grad_norm": 1.3496848344802856, + "learning_rate": 7.102541450470932e-06, + "loss": 0.6698, + "step": 18360 + }, + { + "epoch": 1.2777352716143842, + "grad_norm": 1.7514654397964478, + "learning_rate": 7.0988692633070265e-06, + "loss": 0.7429, + "step": 18370 + }, + { + "epoch": 1.2784308270153717, + "grad_norm": 2.3610963821411133, + "learning_rate": 7.095195701337895e-06, + "loss": 0.7314, + "step": 18380 + }, + { + "epoch": 1.2791263824163595, + "grad_norm": 2.082404851913452, + "learning_rate": 7.091520766969802e-06, + "loss": 0.7, + "step": 18390 + }, + { + "epoch": 1.2798219378173472, + "grad_norm": 1.629526138305664, + "learning_rate": 7.0878444626099085e-06, + "loss": 0.7183, + "step": 18400 + }, + { + "epoch": 1.2805174932183347, + "grad_norm": 1.7087581157684326, + "learning_rate": 7.084166790666275e-06, + "loss": 0.7496, + "step": 18410 + }, + { + "epoch": 1.2812130486193225, + "grad_norm": 2.2229347229003906, + "learning_rate": 7.080487753547858e-06, + "loss": 0.7183, + "step": 18420 + }, + { + "epoch": 1.2819086040203103, + "grad_norm": 1.8445541858673096, + "learning_rate": 7.076807353664505e-06, + "loss": 0.7416, + "step": 18430 + }, + { + "epoch": 1.282604159421298, + "grad_norm": 1.761014461517334, + "learning_rate": 7.073125593426961e-06, + "loss": 0.7257, + "step": 18440 + }, + { + "epoch": 1.2832997148222856, + "grad_norm": 1.8244826793670654, + "learning_rate": 7.069442475246856e-06, + "loss": 0.7394, + "step": 18450 + }, + { + "epoch": 1.2839952702232733, + "grad_norm": 2.1204843521118164, + "learning_rate": 7.065758001536715e-06, + "loss": 0.7675, + "step": 18460 + }, + { + "epoch": 1.2846908256242608, + "grad_norm": 1.9532138109207153, + "learning_rate": 7.062072174709951e-06, + "loss": 0.719, + "step": 18470 + }, + { + "epoch": 1.2853863810252486, + "grad_norm": 1.7032570838928223, + "learning_rate": 7.058384997180857e-06, + "loss": 0.7488, + "step": 18480 + }, + { + "epoch": 1.2860819364262364, + "grad_norm": 1.6622296571731567, + "learning_rate": 7.054696471364617e-06, + "loss": 0.7318, + "step": 18490 + }, + { + "epoch": 1.2867774918272241, + "grad_norm": 1.596665620803833, + "learning_rate": 7.051006599677293e-06, + "loss": 0.7533, + "step": 18500 + }, + { + "epoch": 1.2867774918272241, + "eval_loss": 0.963870108127594, + "eval_runtime": 4586.7927, + "eval_samples_per_second": 3.959, + "eval_steps_per_second": 0.66, + "step": 18500 + }, + { + "epoch": 1.2874730472282117, + "grad_norm": 1.7572184801101685, + "learning_rate": 7.0473153845358375e-06, + "loss": 0.7362, + "step": 18510 + }, + { + "epoch": 1.2881686026291994, + "grad_norm": 1.680503487586975, + "learning_rate": 7.043622828358073e-06, + "loss": 0.7689, + "step": 18520 + }, + { + "epoch": 1.2888641580301872, + "grad_norm": 1.3073484897613525, + "learning_rate": 7.0399289335627034e-06, + "loss": 0.7509, + "step": 18530 + }, + { + "epoch": 1.2895597134311747, + "grad_norm": 2.5658061504364014, + "learning_rate": 7.036233702569315e-06, + "loss": 0.7713, + "step": 18540 + }, + { + "epoch": 1.2902552688321625, + "grad_norm": 1.6485391855239868, + "learning_rate": 7.032537137798361e-06, + "loss": 0.7146, + "step": 18550 + }, + { + "epoch": 1.2909508242331502, + "grad_norm": 1.835334300994873, + "learning_rate": 7.02883924167117e-06, + "loss": 0.7043, + "step": 18560 + }, + { + "epoch": 1.291646379634138, + "grad_norm": 2.2318968772888184, + "learning_rate": 7.025140016609951e-06, + "loss": 0.7087, + "step": 18570 + }, + { + "epoch": 1.2923419350351255, + "grad_norm": 1.8629264831542969, + "learning_rate": 7.021439465037776e-06, + "loss": 0.7729, + "step": 18580 + }, + { + "epoch": 1.2930374904361133, + "grad_norm": 1.6665958166122437, + "learning_rate": 7.017737589378582e-06, + "loss": 0.6846, + "step": 18590 + }, + { + "epoch": 1.2937330458371008, + "grad_norm": 1.6408368349075317, + "learning_rate": 7.014034392057183e-06, + "loss": 0.7047, + "step": 18600 + }, + { + "epoch": 1.2944286012380886, + "grad_norm": 1.9741275310516357, + "learning_rate": 7.010329875499252e-06, + "loss": 0.7336, + "step": 18610 + }, + { + "epoch": 1.2951241566390763, + "grad_norm": 2.0029027462005615, + "learning_rate": 7.0066240421313305e-06, + "loss": 0.7666, + "step": 18620 + }, + { + "epoch": 1.295819712040064, + "grad_norm": 2.031538248062134, + "learning_rate": 7.0029168943808175e-06, + "loss": 0.7834, + "step": 18630 + }, + { + "epoch": 1.2965152674410516, + "grad_norm": 1.901289463043213, + "learning_rate": 6.9992084346759794e-06, + "loss": 0.6969, + "step": 18640 + }, + { + "epoch": 1.2972108228420394, + "grad_norm": 1.3963438272476196, + "learning_rate": 6.995498665445935e-06, + "loss": 0.6863, + "step": 18650 + }, + { + "epoch": 1.2979063782430271, + "grad_norm": 2.0043725967407227, + "learning_rate": 6.991787589120664e-06, + "loss": 0.7249, + "step": 18660 + }, + { + "epoch": 1.2986019336440147, + "grad_norm": 3.747406244277954, + "learning_rate": 6.988075208131006e-06, + "loss": 0.7315, + "step": 18670 + }, + { + "epoch": 1.2992974890450024, + "grad_norm": 2.1698081493377686, + "learning_rate": 6.98436152490865e-06, + "loss": 0.7487, + "step": 18680 + }, + { + "epoch": 1.2999930444459902, + "grad_norm": 1.869093418121338, + "learning_rate": 6.980646541886138e-06, + "loss": 0.7368, + "step": 18690 + }, + { + "epoch": 1.300688599846978, + "grad_norm": 1.8964474201202393, + "learning_rate": 6.976930261496866e-06, + "loss": 0.7274, + "step": 18700 + }, + { + "epoch": 1.3013841552479655, + "grad_norm": 2.813713312149048, + "learning_rate": 6.973212686175079e-06, + "loss": 0.7216, + "step": 18710 + }, + { + "epoch": 1.3020797106489532, + "grad_norm": 1.8738377094268799, + "learning_rate": 6.96949381835587e-06, + "loss": 0.7381, + "step": 18720 + }, + { + "epoch": 1.3027752660499408, + "grad_norm": 2.643646478652954, + "learning_rate": 6.9657736604751804e-06, + "loss": 0.7472, + "step": 18730 + }, + { + "epoch": 1.3034708214509285, + "grad_norm": 1.8015767335891724, + "learning_rate": 6.962052214969792e-06, + "loss": 0.6989, + "step": 18740 + }, + { + "epoch": 1.3041663768519163, + "grad_norm": 2.345111131668091, + "learning_rate": 6.958329484277333e-06, + "loss": 0.7234, + "step": 18750 + }, + { + "epoch": 1.304861932252904, + "grad_norm": 1.9773120880126953, + "learning_rate": 6.954605470836277e-06, + "loss": 0.7295, + "step": 18760 + }, + { + "epoch": 1.3055574876538916, + "grad_norm": 1.7336528301239014, + "learning_rate": 6.950880177085932e-06, + "loss": 0.7415, + "step": 18770 + }, + { + "epoch": 1.3062530430548793, + "grad_norm": 2.071741819381714, + "learning_rate": 6.947153605466445e-06, + "loss": 0.7661, + "step": 18780 + }, + { + "epoch": 1.306948598455867, + "grad_norm": 1.7018749713897705, + "learning_rate": 6.9434257584188035e-06, + "loss": 0.7441, + "step": 18790 + }, + { + "epoch": 1.3076441538568546, + "grad_norm": 3.8296806812286377, + "learning_rate": 6.93969663838483e-06, + "loss": 0.7676, + "step": 18800 + }, + { + "epoch": 1.3083397092578424, + "grad_norm": 1.6634129285812378, + "learning_rate": 6.935966247807177e-06, + "loss": 0.7478, + "step": 18810 + }, + { + "epoch": 1.3090352646588301, + "grad_norm": 2.524336814880371, + "learning_rate": 6.932234589129332e-06, + "loss": 0.7292, + "step": 18820 + }, + { + "epoch": 1.309730820059818, + "grad_norm": 1.9049811363220215, + "learning_rate": 6.928501664795616e-06, + "loss": 0.6981, + "step": 18830 + }, + { + "epoch": 1.3104263754608054, + "grad_norm": 2.2543251514434814, + "learning_rate": 6.924767477251173e-06, + "loss": 0.7429, + "step": 18840 + }, + { + "epoch": 1.3111219308617932, + "grad_norm": 1.8610349893569946, + "learning_rate": 6.921032028941979e-06, + "loss": 0.6752, + "step": 18850 + }, + { + "epoch": 1.3118174862627807, + "grad_norm": 1.4456617832183838, + "learning_rate": 6.917295322314834e-06, + "loss": 0.7173, + "step": 18860 + }, + { + "epoch": 1.3125130416637685, + "grad_norm": 2.0534849166870117, + "learning_rate": 6.913557359817362e-06, + "loss": 0.7867, + "step": 18870 + }, + { + "epoch": 1.3132085970647562, + "grad_norm": 1.6244827508926392, + "learning_rate": 6.909818143898008e-06, + "loss": 0.7201, + "step": 18880 + }, + { + "epoch": 1.313904152465744, + "grad_norm": 2.886784315109253, + "learning_rate": 6.906077677006045e-06, + "loss": 0.7384, + "step": 18890 + }, + { + "epoch": 1.3145997078667315, + "grad_norm": 2.1008074283599854, + "learning_rate": 6.902335961591559e-06, + "loss": 0.7215, + "step": 18900 + }, + { + "epoch": 1.3152952632677193, + "grad_norm": 1.7977426052093506, + "learning_rate": 6.898593000105452e-06, + "loss": 0.7391, + "step": 18910 + }, + { + "epoch": 1.315990818668707, + "grad_norm": 2.019552230834961, + "learning_rate": 6.894848794999449e-06, + "loss": 0.7196, + "step": 18920 + }, + { + "epoch": 1.3166863740696946, + "grad_norm": 3.566270112991333, + "learning_rate": 6.891103348726085e-06, + "loss": 0.7776, + "step": 18930 + }, + { + "epoch": 1.3173819294706823, + "grad_norm": 2.471364736557007, + "learning_rate": 6.887356663738709e-06, + "loss": 0.6764, + "step": 18940 + }, + { + "epoch": 1.31807748487167, + "grad_norm": 2.0385968685150146, + "learning_rate": 6.883608742491481e-06, + "loss": 0.7609, + "step": 18950 + }, + { + "epoch": 1.3187730402726578, + "grad_norm": 1.422126054763794, + "learning_rate": 6.879859587439373e-06, + "loss": 0.7086, + "step": 18960 + }, + { + "epoch": 1.3194685956736454, + "grad_norm": 1.5325630903244019, + "learning_rate": 6.876109201038161e-06, + "loss": 0.7478, + "step": 18970 + }, + { + "epoch": 1.3201641510746331, + "grad_norm": 1.6288189888000488, + "learning_rate": 6.872357585744434e-06, + "loss": 0.7071, + "step": 18980 + }, + { + "epoch": 1.3208597064756207, + "grad_norm": 1.1976368427276611, + "learning_rate": 6.868604744015578e-06, + "loss": 0.7451, + "step": 18990 + }, + { + "epoch": 1.3215552618766084, + "grad_norm": 1.663931131362915, + "learning_rate": 6.864850678309788e-06, + "loss": 0.7368, + "step": 19000 + }, + { + "epoch": 1.3215552618766084, + "eval_loss": 0.960919976234436, + "eval_runtime": 4624.6844, + "eval_samples_per_second": 3.927, + "eval_steps_per_second": 0.655, + "step": 19000 + }, + { + "epoch": 1.3222508172775962, + "grad_norm": 1.8665128946304321, + "learning_rate": 6.861095391086059e-06, + "loss": 0.7138, + "step": 19010 + }, + { + "epoch": 1.322946372678584, + "grad_norm": 2.203533411026001, + "learning_rate": 6.857338884804185e-06, + "loss": 0.7108, + "step": 19020 + }, + { + "epoch": 1.3236419280795715, + "grad_norm": 2.6935863494873047, + "learning_rate": 6.853581161924763e-06, + "loss": 0.6968, + "step": 19030 + }, + { + "epoch": 1.3243374834805592, + "grad_norm": 1.8716309070587158, + "learning_rate": 6.849822224909179e-06, + "loss": 0.7452, + "step": 19040 + }, + { + "epoch": 1.325033038881547, + "grad_norm": 2.1420507431030273, + "learning_rate": 6.8460620762196226e-06, + "loss": 0.7445, + "step": 19050 + }, + { + "epoch": 1.3257285942825345, + "grad_norm": 4.204934120178223, + "learning_rate": 6.842300718319072e-06, + "loss": 0.7967, + "step": 19060 + }, + { + "epoch": 1.3264241496835223, + "grad_norm": 2.121975898742676, + "learning_rate": 6.838538153671298e-06, + "loss": 0.7991, + "step": 19070 + }, + { + "epoch": 1.32711970508451, + "grad_norm": 2.133665084838867, + "learning_rate": 6.834774384740865e-06, + "loss": 0.6992, + "step": 19080 + }, + { + "epoch": 1.3278152604854978, + "grad_norm": 1.477371096611023, + "learning_rate": 6.83100941399312e-06, + "loss": 0.7575, + "step": 19090 + }, + { + "epoch": 1.3285108158864853, + "grad_norm": 1.5322870016098022, + "learning_rate": 6.827243243894205e-06, + "loss": 0.7009, + "step": 19100 + }, + { + "epoch": 1.329206371287473, + "grad_norm": 1.5652471780776978, + "learning_rate": 6.823475876911042e-06, + "loss": 0.7405, + "step": 19110 + }, + { + "epoch": 1.3299019266884606, + "grad_norm": 1.6800421476364136, + "learning_rate": 6.819707315511338e-06, + "loss": 0.6807, + "step": 19120 + }, + { + "epoch": 1.3305974820894484, + "grad_norm": 3.5344507694244385, + "learning_rate": 6.815937562163585e-06, + "loss": 0.7548, + "step": 19130 + }, + { + "epoch": 1.3312930374904361, + "grad_norm": 1.5557117462158203, + "learning_rate": 6.81216661933705e-06, + "loss": 0.7678, + "step": 19140 + }, + { + "epoch": 1.331988592891424, + "grad_norm": 1.8312923908233643, + "learning_rate": 6.80839448950179e-06, + "loss": 0.7717, + "step": 19150 + }, + { + "epoch": 1.3326841482924114, + "grad_norm": 2.264564037322998, + "learning_rate": 6.804621175128625e-06, + "loss": 0.7325, + "step": 19160 + }, + { + "epoch": 1.3333797036933992, + "grad_norm": 2.241790533065796, + "learning_rate": 6.8008466786891616e-06, + "loss": 0.7229, + "step": 19170 + }, + { + "epoch": 1.3340752590943867, + "grad_norm": 2.09783935546875, + "learning_rate": 6.797071002655778e-06, + "loss": 0.7586, + "step": 19180 + }, + { + "epoch": 1.3347708144953745, + "grad_norm": 2.9909636974334717, + "learning_rate": 6.793294149501624e-06, + "loss": 0.6751, + "step": 19190 + }, + { + "epoch": 1.3354663698963622, + "grad_norm": 2.003004312515259, + "learning_rate": 6.7895161217006185e-06, + "loss": 0.7286, + "step": 19200 + }, + { + "epoch": 1.33616192529735, + "grad_norm": 1.4262187480926514, + "learning_rate": 6.785736921727457e-06, + "loss": 0.7256, + "step": 19210 + }, + { + "epoch": 1.3368574806983375, + "grad_norm": 1.7163954973220825, + "learning_rate": 6.781956552057596e-06, + "loss": 0.7613, + "step": 19220 + }, + { + "epoch": 1.3375530360993253, + "grad_norm": 1.8272589445114136, + "learning_rate": 6.778175015167256e-06, + "loss": 0.7438, + "step": 19230 + }, + { + "epoch": 1.338248591500313, + "grad_norm": 1.959664225578308, + "learning_rate": 6.774392313533434e-06, + "loss": 0.8067, + "step": 19240 + }, + { + "epoch": 1.3389441469013006, + "grad_norm": 5.268548965454102, + "learning_rate": 6.770608449633877e-06, + "loss": 0.7748, + "step": 19250 + }, + { + "epoch": 1.3396397023022883, + "grad_norm": 3.2382864952087402, + "learning_rate": 6.766823425947098e-06, + "loss": 0.727, + "step": 19260 + }, + { + "epoch": 1.340335257703276, + "grad_norm": 3.060558795928955, + "learning_rate": 6.763037244952373e-06, + "loss": 0.7262, + "step": 19270 + }, + { + "epoch": 1.3410308131042639, + "grad_norm": 3.3600611686706543, + "learning_rate": 6.7592499091297325e-06, + "loss": 0.7007, + "step": 19280 + }, + { + "epoch": 1.3417263685052514, + "grad_norm": 1.596608281135559, + "learning_rate": 6.755461420959965e-06, + "loss": 0.7166, + "step": 19290 + }, + { + "epoch": 1.3424219239062392, + "grad_norm": 2.1443235874176025, + "learning_rate": 6.751671782924611e-06, + "loss": 0.8014, + "step": 19300 + }, + { + "epoch": 1.3431174793072267, + "grad_norm": 1.699325442314148, + "learning_rate": 6.7478809975059665e-06, + "loss": 0.6869, + "step": 19310 + }, + { + "epoch": 1.3438130347082144, + "grad_norm": 1.5304608345031738, + "learning_rate": 6.7440890671870806e-06, + "loss": 0.7727, + "step": 19320 + }, + { + "epoch": 1.3445085901092022, + "grad_norm": 2.2162091732025146, + "learning_rate": 6.740295994451749e-06, + "loss": 0.8032, + "step": 19330 + }, + { + "epoch": 1.34520414551019, + "grad_norm": 3.4945290088653564, + "learning_rate": 6.736501781784518e-06, + "loss": 0.7209, + "step": 19340 + }, + { + "epoch": 1.3458997009111775, + "grad_norm": 2.773132562637329, + "learning_rate": 6.732706431670679e-06, + "loss": 0.6919, + "step": 19350 + }, + { + "epoch": 1.3465952563121653, + "grad_norm": 7.5964484214782715, + "learning_rate": 6.728909946596269e-06, + "loss": 0.6809, + "step": 19360 + }, + { + "epoch": 1.347290811713153, + "grad_norm": 1.5134934186935425, + "learning_rate": 6.72511232904807e-06, + "loss": 0.7334, + "step": 19370 + }, + { + "epoch": 1.3479863671141405, + "grad_norm": 2.5219058990478516, + "learning_rate": 6.721313581513605e-06, + "loss": 0.7284, + "step": 19380 + }, + { + "epoch": 1.3486819225151283, + "grad_norm": 3.151441812515259, + "learning_rate": 6.7175137064811325e-06, + "loss": 0.7348, + "step": 19390 + }, + { + "epoch": 1.349377477916116, + "grad_norm": 2.083405017852783, + "learning_rate": 6.713712706439659e-06, + "loss": 0.7843, + "step": 19400 + }, + { + "epoch": 1.3500730333171038, + "grad_norm": 1.4055002927780151, + "learning_rate": 6.709910583878919e-06, + "loss": 0.732, + "step": 19410 + }, + { + "epoch": 1.3507685887180914, + "grad_norm": 1.4892009496688843, + "learning_rate": 6.7061073412893874e-06, + "loss": 0.7161, + "step": 19420 + }, + { + "epoch": 1.3514641441190791, + "grad_norm": 1.7122801542282104, + "learning_rate": 6.702302981162271e-06, + "loss": 0.6975, + "step": 19430 + }, + { + "epoch": 1.3521596995200666, + "grad_norm": 2.044145345687866, + "learning_rate": 6.698497505989507e-06, + "loss": 0.7396, + "step": 19440 + }, + { + "epoch": 1.3528552549210544, + "grad_norm": 3.04289174079895, + "learning_rate": 6.6946909182637685e-06, + "loss": 0.7199, + "step": 19450 + }, + { + "epoch": 1.3535508103220422, + "grad_norm": 1.4417814016342163, + "learning_rate": 6.69088322047845e-06, + "loss": 0.7317, + "step": 19460 + }, + { + "epoch": 1.35424636572303, + "grad_norm": 3.2525851726531982, + "learning_rate": 6.68707441512768e-06, + "loss": 0.7696, + "step": 19470 + }, + { + "epoch": 1.3549419211240175, + "grad_norm": 1.5432922840118408, + "learning_rate": 6.683264504706306e-06, + "loss": 0.6547, + "step": 19480 + }, + { + "epoch": 1.3556374765250052, + "grad_norm": 1.7819520235061646, + "learning_rate": 6.679453491709904e-06, + "loss": 0.7623, + "step": 19490 + }, + { + "epoch": 1.356333031925993, + "grad_norm": 2.6051435470581055, + "learning_rate": 6.675641378634772e-06, + "loss": 0.7776, + "step": 19500 + }, + { + "epoch": 1.356333031925993, + "eval_loss": 0.9572448134422302, + "eval_runtime": 4618.4125, + "eval_samples_per_second": 3.932, + "eval_steps_per_second": 0.655, + "step": 19500 + }, + { + "epoch": 1.3570285873269805, + "grad_norm": 1.632511854171753, + "learning_rate": 6.671828167977925e-06, + "loss": 0.7536, + "step": 19510 + }, + { + "epoch": 1.3577241427279683, + "grad_norm": 1.799368143081665, + "learning_rate": 6.668013862237101e-06, + "loss": 0.7549, + "step": 19520 + }, + { + "epoch": 1.358419698128956, + "grad_norm": 1.773271083831787, + "learning_rate": 6.664198463910752e-06, + "loss": 0.7218, + "step": 19530 + }, + { + "epoch": 1.3591152535299438, + "grad_norm": 1.4315930604934692, + "learning_rate": 6.660381975498051e-06, + "loss": 0.7095, + "step": 19540 + }, + { + "epoch": 1.3598108089309313, + "grad_norm": 1.4854565858840942, + "learning_rate": 6.656564399498876e-06, + "loss": 0.7181, + "step": 19550 + }, + { + "epoch": 1.360506364331919, + "grad_norm": 3.9031383991241455, + "learning_rate": 6.652745738413826e-06, + "loss": 0.7534, + "step": 19560 + }, + { + "epoch": 1.3612019197329066, + "grad_norm": 2.8728508949279785, + "learning_rate": 6.648925994744208e-06, + "loss": 0.7715, + "step": 19570 + }, + { + "epoch": 1.3618974751338944, + "grad_norm": 2.304520845413208, + "learning_rate": 6.645105170992035e-06, + "loss": 0.7063, + "step": 19580 + }, + { + "epoch": 1.3625930305348821, + "grad_norm": 2.534449815750122, + "learning_rate": 6.641283269660031e-06, + "loss": 0.6602, + "step": 19590 + }, + { + "epoch": 1.3632885859358699, + "grad_norm": 1.9590859413146973, + "learning_rate": 6.6374602932516275e-06, + "loss": 0.7187, + "step": 19600 + }, + { + "epoch": 1.3639841413368574, + "grad_norm": 1.7080013751983643, + "learning_rate": 6.633636244270953e-06, + "loss": 0.7676, + "step": 19610 + }, + { + "epoch": 1.3646796967378452, + "grad_norm": 1.4937655925750732, + "learning_rate": 6.629811125222847e-06, + "loss": 0.7653, + "step": 19620 + }, + { + "epoch": 1.365375252138833, + "grad_norm": 2.195908784866333, + "learning_rate": 6.6259849386128435e-06, + "loss": 0.7867, + "step": 19630 + }, + { + "epoch": 1.3660708075398205, + "grad_norm": 1.7877005338668823, + "learning_rate": 6.62215768694718e-06, + "loss": 0.7028, + "step": 19640 + }, + { + "epoch": 1.3667663629408082, + "grad_norm": 1.8215194940567017, + "learning_rate": 6.618329372732788e-06, + "loss": 0.6973, + "step": 19650 + }, + { + "epoch": 1.367461918341796, + "grad_norm": 1.538421630859375, + "learning_rate": 6.6144999984773e-06, + "loss": 0.7559, + "step": 19660 + }, + { + "epoch": 1.3681574737427837, + "grad_norm": 4.285520553588867, + "learning_rate": 6.610669566689038e-06, + "loss": 0.7122, + "step": 19670 + }, + { + "epoch": 1.3688530291437713, + "grad_norm": 1.3544273376464844, + "learning_rate": 6.606838079877017e-06, + "loss": 0.7111, + "step": 19680 + }, + { + "epoch": 1.369548584544759, + "grad_norm": 1.9782638549804688, + "learning_rate": 6.603005540550946e-06, + "loss": 0.7435, + "step": 19690 + }, + { + "epoch": 1.3702441399457466, + "grad_norm": 1.8627787828445435, + "learning_rate": 6.599171951221224e-06, + "loss": 0.7162, + "step": 19700 + }, + { + "epoch": 1.3709396953467343, + "grad_norm": 1.988464117050171, + "learning_rate": 6.595337314398933e-06, + "loss": 0.6988, + "step": 19710 + }, + { + "epoch": 1.371635250747722, + "grad_norm": 1.573891043663025, + "learning_rate": 6.591501632595845e-06, + "loss": 0.7349, + "step": 19720 + }, + { + "epoch": 1.3723308061487098, + "grad_norm": 1.717590570449829, + "learning_rate": 6.587664908324415e-06, + "loss": 0.6986, + "step": 19730 + }, + { + "epoch": 1.3730263615496974, + "grad_norm": 1.9592229127883911, + "learning_rate": 6.58382714409778e-06, + "loss": 0.7014, + "step": 19740 + }, + { + "epoch": 1.3737219169506851, + "grad_norm": 1.5875359773635864, + "learning_rate": 6.579988342429764e-06, + "loss": 0.7646, + "step": 19750 + }, + { + "epoch": 1.3744174723516729, + "grad_norm": 1.5802985429763794, + "learning_rate": 6.576148505834861e-06, + "loss": 0.7474, + "step": 19760 + }, + { + "epoch": 1.3751130277526604, + "grad_norm": 2.338067054748535, + "learning_rate": 6.572307636828249e-06, + "loss": 0.6797, + "step": 19770 + }, + { + "epoch": 1.3758085831536482, + "grad_norm": 2.017709493637085, + "learning_rate": 6.568465737925782e-06, + "loss": 0.7014, + "step": 19780 + }, + { + "epoch": 1.376504138554636, + "grad_norm": 3.5567150115966797, + "learning_rate": 6.5646228116439895e-06, + "loss": 0.7549, + "step": 19790 + }, + { + "epoch": 1.3771996939556237, + "grad_norm": 1.9883997440338135, + "learning_rate": 6.560778860500068e-06, + "loss": 0.7651, + "step": 19800 + }, + { + "epoch": 1.3778952493566112, + "grad_norm": 1.9000462293624878, + "learning_rate": 6.556933887011891e-06, + "loss": 0.6874, + "step": 19810 + }, + { + "epoch": 1.378590804757599, + "grad_norm": 1.7734254598617554, + "learning_rate": 6.5530878936980034e-06, + "loss": 0.789, + "step": 19820 + }, + { + "epoch": 1.3792863601585865, + "grad_norm": 1.403000831604004, + "learning_rate": 6.549240883077611e-06, + "loss": 0.68, + "step": 19830 + }, + { + "epoch": 1.3799819155595743, + "grad_norm": 1.5137752294540405, + "learning_rate": 6.545392857670591e-06, + "loss": 0.7738, + "step": 19840 + }, + { + "epoch": 1.380677470960562, + "grad_norm": 1.8899444341659546, + "learning_rate": 6.541543819997484e-06, + "loss": 0.7433, + "step": 19850 + }, + { + "epoch": 1.3813730263615498, + "grad_norm": 1.689089298248291, + "learning_rate": 6.537693772579495e-06, + "loss": 0.7403, + "step": 19860 + }, + { + "epoch": 1.3820685817625373, + "grad_norm": 2.014357328414917, + "learning_rate": 6.533842717938487e-06, + "loss": 0.683, + "step": 19870 + }, + { + "epoch": 1.382764137163525, + "grad_norm": 1.7264859676361084, + "learning_rate": 6.529990658596986e-06, + "loss": 0.7678, + "step": 19880 + }, + { + "epoch": 1.3834596925645128, + "grad_norm": 2.221907615661621, + "learning_rate": 6.526137597078177e-06, + "loss": 0.7438, + "step": 19890 + }, + { + "epoch": 1.3841552479655004, + "grad_norm": 1.9804540872573853, + "learning_rate": 6.522283535905895e-06, + "loss": 0.6832, + "step": 19900 + }, + { + "epoch": 1.3848508033664881, + "grad_norm": 1.5021123886108398, + "learning_rate": 6.518428477604638e-06, + "loss": 0.7238, + "step": 19910 + }, + { + "epoch": 1.385546358767476, + "grad_norm": 3.1732945442199707, + "learning_rate": 6.514572424699552e-06, + "loss": 0.7793, + "step": 19920 + }, + { + "epoch": 1.3862419141684637, + "grad_norm": 1.921228289604187, + "learning_rate": 6.510715379716438e-06, + "loss": 0.7161, + "step": 19930 + }, + { + "epoch": 1.3869374695694512, + "grad_norm": 1.7142375707626343, + "learning_rate": 6.50685734518174e-06, + "loss": 0.7347, + "step": 19940 + }, + { + "epoch": 1.387633024970439, + "grad_norm": 2.190178871154785, + "learning_rate": 6.50299832362256e-06, + "loss": 0.6852, + "step": 19950 + }, + { + "epoch": 1.3883285803714265, + "grad_norm": 3.805229902267456, + "learning_rate": 6.499138317566639e-06, + "loss": 0.7321, + "step": 19960 + }, + { + "epoch": 1.3890241357724142, + "grad_norm": 1.6042863130569458, + "learning_rate": 6.495277329542364e-06, + "loss": 0.7415, + "step": 19970 + }, + { + "epoch": 1.389719691173402, + "grad_norm": 1.7392058372497559, + "learning_rate": 6.4914153620787705e-06, + "loss": 0.782, + "step": 19980 + }, + { + "epoch": 1.3904152465743898, + "grad_norm": 1.483004093170166, + "learning_rate": 6.487552417705527e-06, + "loss": 0.7144, + "step": 19990 + }, + { + "epoch": 1.3911108019753773, + "grad_norm": 1.5903856754302979, + "learning_rate": 6.483688498952949e-06, + "loss": 0.7083, + "step": 20000 + }, + { + "epoch": 1.3911108019753773, + "eval_loss": 0.9533275961875916, + "eval_runtime": 4641.5841, + "eval_samples_per_second": 3.913, + "eval_steps_per_second": 0.652, + "step": 20000 + }, + { + "epoch": 1.391806357376365, + "grad_norm": 2.015063524246216, + "learning_rate": 6.479823608351988e-06, + "loss": 0.7476, + "step": 20010 + }, + { + "epoch": 1.3925019127773526, + "grad_norm": 1.768751859664917, + "learning_rate": 6.4759577484342306e-06, + "loss": 0.7377, + "step": 20020 + }, + { + "epoch": 1.3931974681783403, + "grad_norm": 2.1253769397735596, + "learning_rate": 6.472090921731901e-06, + "loss": 0.7225, + "step": 20030 + }, + { + "epoch": 1.393893023579328, + "grad_norm": 2.2429039478302, + "learning_rate": 6.468223130777853e-06, + "loss": 0.7676, + "step": 20040 + }, + { + "epoch": 1.3945885789803159, + "grad_norm": 3.086918830871582, + "learning_rate": 6.464354378105575e-06, + "loss": 0.7386, + "step": 20050 + }, + { + "epoch": 1.3952841343813036, + "grad_norm": 1.6962336301803589, + "learning_rate": 6.460484666249187e-06, + "loss": 0.7559, + "step": 20060 + }, + { + "epoch": 1.3959796897822911, + "grad_norm": 2.080021381378174, + "learning_rate": 6.456613997743431e-06, + "loss": 0.7428, + "step": 20070 + }, + { + "epoch": 1.396675245183279, + "grad_norm": 1.8937263488769531, + "learning_rate": 6.452742375123681e-06, + "loss": 0.7295, + "step": 20080 + }, + { + "epoch": 1.3973708005842664, + "grad_norm": 3.948953866958618, + "learning_rate": 6.448869800925936e-06, + "loss": 0.7662, + "step": 20090 + }, + { + "epoch": 1.3980663559852542, + "grad_norm": 1.7160600423812866, + "learning_rate": 6.444996277686813e-06, + "loss": 0.702, + "step": 20100 + }, + { + "epoch": 1.398761911386242, + "grad_norm": 3.6124234199523926, + "learning_rate": 6.44112180794356e-06, + "loss": 0.7211, + "step": 20110 + }, + { + "epoch": 1.3994574667872297, + "grad_norm": 2.0008113384246826, + "learning_rate": 6.437246394234034e-06, + "loss": 0.7689, + "step": 20120 + }, + { + "epoch": 1.4001530221882172, + "grad_norm": 5.009844779968262, + "learning_rate": 6.4333700390967154e-06, + "loss": 0.7373, + "step": 20130 + }, + { + "epoch": 1.400848577589205, + "grad_norm": 2.1380205154418945, + "learning_rate": 6.429492745070708e-06, + "loss": 0.7599, + "step": 20140 + }, + { + "epoch": 1.4015441329901925, + "grad_norm": 2.028172016143799, + "learning_rate": 6.425614514695717e-06, + "loss": 0.723, + "step": 20150 + }, + { + "epoch": 1.4022396883911803, + "grad_norm": 1.7395390272140503, + "learning_rate": 6.421735350512071e-06, + "loss": 0.7021, + "step": 20160 + }, + { + "epoch": 1.402935243792168, + "grad_norm": 2.12265682220459, + "learning_rate": 6.417855255060708e-06, + "loss": 0.6969, + "step": 20170 + }, + { + "epoch": 1.4036307991931558, + "grad_norm": 2.0006103515625, + "learning_rate": 6.413974230883176e-06, + "loss": 0.7851, + "step": 20180 + }, + { + "epoch": 1.4043263545941433, + "grad_norm": 2.002169132232666, + "learning_rate": 6.4100922805216255e-06, + "loss": 0.7188, + "step": 20190 + }, + { + "epoch": 1.405021909995131, + "grad_norm": 2.073418378829956, + "learning_rate": 6.406209406518824e-06, + "loss": 0.7585, + "step": 20200 + }, + { + "epoch": 1.4057174653961189, + "grad_norm": 2.021371364593506, + "learning_rate": 6.402325611418139e-06, + "loss": 0.7853, + "step": 20210 + }, + { + "epoch": 1.4064130207971064, + "grad_norm": 7.596836566925049, + "learning_rate": 6.398440897763536e-06, + "loss": 0.7316, + "step": 20220 + }, + { + "epoch": 1.4071085761980942, + "grad_norm": 1.8418960571289062, + "learning_rate": 6.394555268099593e-06, + "loss": 0.6865, + "step": 20230 + }, + { + "epoch": 1.407804131599082, + "grad_norm": 1.872796893119812, + "learning_rate": 6.390668724971479e-06, + "loss": 0.7228, + "step": 20240 + }, + { + "epoch": 1.4084996870000697, + "grad_norm": 1.9781010150909424, + "learning_rate": 6.386781270924968e-06, + "loss": 0.6831, + "step": 20250 + }, + { + "epoch": 1.4091952424010572, + "grad_norm": 1.7199050188064575, + "learning_rate": 6.382892908506422e-06, + "loss": 0.6981, + "step": 20260 + }, + { + "epoch": 1.409890797802045, + "grad_norm": 1.879131555557251, + "learning_rate": 6.379003640262806e-06, + "loss": 0.7204, + "step": 20270 + }, + { + "epoch": 1.4105863532030325, + "grad_norm": 3.1250951290130615, + "learning_rate": 6.375113468741679e-06, + "loss": 0.7272, + "step": 20280 + }, + { + "epoch": 1.4112819086040203, + "grad_norm": 1.811104416847229, + "learning_rate": 6.371222396491184e-06, + "loss": 0.7949, + "step": 20290 + }, + { + "epoch": 1.411977464005008, + "grad_norm": 2.0964925289154053, + "learning_rate": 6.367330426060059e-06, + "loss": 0.7445, + "step": 20300 + }, + { + "epoch": 1.4126730194059958, + "grad_norm": 2.1352076530456543, + "learning_rate": 6.363437559997631e-06, + "loss": 0.7547, + "step": 20310 + }, + { + "epoch": 1.4133685748069833, + "grad_norm": 2.15402889251709, + "learning_rate": 6.359543800853811e-06, + "loss": 0.7322, + "step": 20320 + }, + { + "epoch": 1.414064130207971, + "grad_norm": 2.6780800819396973, + "learning_rate": 6.355649151179097e-06, + "loss": 0.7036, + "step": 20330 + }, + { + "epoch": 1.4147596856089588, + "grad_norm": 2.721198558807373, + "learning_rate": 6.35175361352457e-06, + "loss": 0.7719, + "step": 20340 + }, + { + "epoch": 1.4154552410099464, + "grad_norm": 3.478736400604248, + "learning_rate": 6.347857190441893e-06, + "loss": 0.7889, + "step": 20350 + }, + { + "epoch": 1.416150796410934, + "grad_norm": 2.050713062286377, + "learning_rate": 6.343959884483305e-06, + "loss": 0.7619, + "step": 20360 + }, + { + "epoch": 1.4168463518119219, + "grad_norm": 2.044745922088623, + "learning_rate": 6.3400616982016305e-06, + "loss": 0.7859, + "step": 20370 + }, + { + "epoch": 1.4175419072129096, + "grad_norm": 1.4889652729034424, + "learning_rate": 6.336162634150264e-06, + "loss": 0.7192, + "step": 20380 + }, + { + "epoch": 1.4182374626138972, + "grad_norm": 1.6725742816925049, + "learning_rate": 6.332262694883179e-06, + "loss": 0.7379, + "step": 20390 + }, + { + "epoch": 1.418933018014885, + "grad_norm": 1.6195837259292603, + "learning_rate": 6.32836188295492e-06, + "loss": 0.7267, + "step": 20400 + }, + { + "epoch": 1.4196285734158725, + "grad_norm": 1.7982573509216309, + "learning_rate": 6.324460200920604e-06, + "loss": 0.6903, + "step": 20410 + }, + { + "epoch": 1.4203241288168602, + "grad_norm": 1.5877782106399536, + "learning_rate": 6.320557651335919e-06, + "loss": 0.6991, + "step": 20420 + }, + { + "epoch": 1.421019684217848, + "grad_norm": 1.7605799436569214, + "learning_rate": 6.31665423675712e-06, + "loss": 0.7542, + "step": 20430 + }, + { + "epoch": 1.4217152396188357, + "grad_norm": 1.6749597787857056, + "learning_rate": 6.3127499597410295e-06, + "loss": 0.7409, + "step": 20440 + }, + { + "epoch": 1.4224107950198233, + "grad_norm": 2.0786211490631104, + "learning_rate": 6.30884482284503e-06, + "loss": 0.7441, + "step": 20450 + }, + { + "epoch": 1.423106350420811, + "grad_norm": 2.1792144775390625, + "learning_rate": 6.304938828627078e-06, + "loss": 0.7667, + "step": 20460 + }, + { + "epoch": 1.4238019058217988, + "grad_norm": 2.0480399131774902, + "learning_rate": 6.301031979645682e-06, + "loss": 0.6738, + "step": 20470 + }, + { + "epoch": 1.4244974612227863, + "grad_norm": 3.9135844707489014, + "learning_rate": 6.297124278459912e-06, + "loss": 0.6589, + "step": 20480 + }, + { + "epoch": 1.425193016623774, + "grad_norm": 1.8304831981658936, + "learning_rate": 6.293215727629398e-06, + "loss": 0.6838, + "step": 20490 + }, + { + "epoch": 1.4258885720247618, + "grad_norm": 3.071925640106201, + "learning_rate": 6.28930632971433e-06, + "loss": 0.6977, + "step": 20500 + }, + { + "epoch": 1.4258885720247618, + "eval_loss": 0.9510756134986877, + "eval_runtime": 4582.8717, + "eval_samples_per_second": 3.963, + "eval_steps_per_second": 0.661, + "step": 20500 + }, + { + "epoch": 1.4265841274257496, + "grad_norm": 2.166144371032715, + "learning_rate": 6.285396087275444e-06, + "loss": 0.7177, + "step": 20510 + }, + { + "epoch": 1.4272796828267371, + "grad_norm": 4.637923717498779, + "learning_rate": 6.281485002874036e-06, + "loss": 0.6541, + "step": 20520 + }, + { + "epoch": 1.4279752382277249, + "grad_norm": 3.2705001831054688, + "learning_rate": 6.277573079071955e-06, + "loss": 0.7729, + "step": 20530 + }, + { + "epoch": 1.4286707936287124, + "grad_norm": 1.8078502416610718, + "learning_rate": 6.273660318431591e-06, + "loss": 0.7598, + "step": 20540 + }, + { + "epoch": 1.4293663490297002, + "grad_norm": 1.602643609046936, + "learning_rate": 6.26974672351589e-06, + "loss": 0.6922, + "step": 20550 + }, + { + "epoch": 1.430061904430688, + "grad_norm": 1.9566184282302856, + "learning_rate": 6.265832296888344e-06, + "loss": 0.7096, + "step": 20560 + }, + { + "epoch": 1.4307574598316757, + "grad_norm": 1.5902973413467407, + "learning_rate": 6.261917041112988e-06, + "loss": 0.7283, + "step": 20570 + }, + { + "epoch": 1.4314530152326632, + "grad_norm": 4.408752918243408, + "learning_rate": 6.258000958754396e-06, + "loss": 0.7306, + "step": 20580 + }, + { + "epoch": 1.432148570633651, + "grad_norm": 1.7607803344726562, + "learning_rate": 6.254084052377691e-06, + "loss": 0.7709, + "step": 20590 + }, + { + "epoch": 1.4328441260346387, + "grad_norm": 3.961003065109253, + "learning_rate": 6.250166324548534e-06, + "loss": 0.788, + "step": 20600 + }, + { + "epoch": 1.4335396814356263, + "grad_norm": 1.646506428718567, + "learning_rate": 6.246247777833116e-06, + "loss": 0.7136, + "step": 20610 + }, + { + "epoch": 1.434235236836614, + "grad_norm": 2.2345190048217773, + "learning_rate": 6.2423284147981755e-06, + "loss": 0.7475, + "step": 20620 + }, + { + "epoch": 1.4349307922376018, + "grad_norm": 3.497584581375122, + "learning_rate": 6.23840823801098e-06, + "loss": 0.7418, + "step": 20630 + }, + { + "epoch": 1.4356263476385895, + "grad_norm": 1.9689849615097046, + "learning_rate": 6.2344872500393314e-06, + "loss": 0.7259, + "step": 20640 + }, + { + "epoch": 1.436321903039577, + "grad_norm": 1.890865683555603, + "learning_rate": 6.230565453451562e-06, + "loss": 0.7469, + "step": 20650 + }, + { + "epoch": 1.4370174584405648, + "grad_norm": 1.7204464673995972, + "learning_rate": 6.226642850816533e-06, + "loss": 0.7458, + "step": 20660 + }, + { + "epoch": 1.4377130138415524, + "grad_norm": 2.1306939125061035, + "learning_rate": 6.2227194447036374e-06, + "loss": 0.7306, + "step": 20670 + }, + { + "epoch": 1.4384085692425401, + "grad_norm": 1.5410900115966797, + "learning_rate": 6.21879523768279e-06, + "loss": 0.7255, + "step": 20680 + }, + { + "epoch": 1.4391041246435279, + "grad_norm": 1.9950885772705078, + "learning_rate": 6.214870232324432e-06, + "loss": 0.745, + "step": 20690 + }, + { + "epoch": 1.4397996800445156, + "grad_norm": 2.066538095474243, + "learning_rate": 6.21094443119953e-06, + "loss": 0.6834, + "step": 20700 + }, + { + "epoch": 1.4404952354455032, + "grad_norm": 3.989934206008911, + "learning_rate": 6.207017836879565e-06, + "loss": 0.7371, + "step": 20710 + }, + { + "epoch": 1.441190790846491, + "grad_norm": 3.8022444248199463, + "learning_rate": 6.2030904519365475e-06, + "loss": 0.7482, + "step": 20720 + }, + { + "epoch": 1.4418863462474787, + "grad_norm": 2.0100038051605225, + "learning_rate": 6.199162278942997e-06, + "loss": 0.6935, + "step": 20730 + }, + { + "epoch": 1.4425819016484662, + "grad_norm": 2.588963747024536, + "learning_rate": 6.1952333204719525e-06, + "loss": 0.7771, + "step": 20740 + }, + { + "epoch": 1.443277457049454, + "grad_norm": 1.7764983177185059, + "learning_rate": 6.19130357909697e-06, + "loss": 0.7412, + "step": 20750 + }, + { + "epoch": 1.4439730124504417, + "grad_norm": 1.775450348854065, + "learning_rate": 6.187373057392115e-06, + "loss": 0.7713, + "step": 20760 + }, + { + "epoch": 1.4446685678514295, + "grad_norm": 2.052152633666992, + "learning_rate": 6.183441757931963e-06, + "loss": 0.7139, + "step": 20770 + }, + { + "epoch": 1.445364123252417, + "grad_norm": 1.8105037212371826, + "learning_rate": 6.179509683291605e-06, + "loss": 0.7932, + "step": 20780 + }, + { + "epoch": 1.4460596786534048, + "grad_norm": 2.3451061248779297, + "learning_rate": 6.175576836046632e-06, + "loss": 0.7394, + "step": 20790 + }, + { + "epoch": 1.4467552340543923, + "grad_norm": 1.80269455909729, + "learning_rate": 6.171643218773145e-06, + "loss": 0.6688, + "step": 20800 + }, + { + "epoch": 1.44745078945538, + "grad_norm": 1.7092301845550537, + "learning_rate": 6.167708834047752e-06, + "loss": 0.7151, + "step": 20810 + }, + { + "epoch": 1.4481463448563678, + "grad_norm": 2.0103344917297363, + "learning_rate": 6.163773684447561e-06, + "loss": 0.6962, + "step": 20820 + }, + { + "epoch": 1.4488419002573556, + "grad_norm": 1.5856451988220215, + "learning_rate": 6.159837772550179e-06, + "loss": 0.7534, + "step": 20830 + }, + { + "epoch": 1.4495374556583431, + "grad_norm": 1.7007986307144165, + "learning_rate": 6.155901100933713e-06, + "loss": 0.7409, + "step": 20840 + }, + { + "epoch": 1.450233011059331, + "grad_norm": 2.0526061058044434, + "learning_rate": 6.1519636721767725e-06, + "loss": 0.7785, + "step": 20850 + }, + { + "epoch": 1.4509285664603186, + "grad_norm": 1.6532962322235107, + "learning_rate": 6.148025488858458e-06, + "loss": 0.6688, + "step": 20860 + }, + { + "epoch": 1.4516241218613062, + "grad_norm": 1.8831676244735718, + "learning_rate": 6.144086553558364e-06, + "loss": 0.7279, + "step": 20870 + }, + { + "epoch": 1.452319677262294, + "grad_norm": 1.8166247606277466, + "learning_rate": 6.14014686885658e-06, + "loss": 0.7242, + "step": 20880 + }, + { + "epoch": 1.4530152326632817, + "grad_norm": 1.775024652481079, + "learning_rate": 6.136206437333688e-06, + "loss": 0.6936, + "step": 20890 + }, + { + "epoch": 1.4537107880642695, + "grad_norm": 1.7720396518707275, + "learning_rate": 6.1322652615707535e-06, + "loss": 0.7382, + "step": 20900 + }, + { + "epoch": 1.454406343465257, + "grad_norm": 3.106752872467041, + "learning_rate": 6.128323344149334e-06, + "loss": 0.7615, + "step": 20910 + }, + { + "epoch": 1.4551018988662447, + "grad_norm": 3.2957632541656494, + "learning_rate": 6.124380687651472e-06, + "loss": 0.7675, + "step": 20920 + }, + { + "epoch": 1.4557974542672323, + "grad_norm": 2.1493892669677734, + "learning_rate": 6.120437294659692e-06, + "loss": 0.747, + "step": 20930 + }, + { + "epoch": 1.45649300966822, + "grad_norm": 1.8185917139053345, + "learning_rate": 6.116493167757005e-06, + "loss": 0.7113, + "step": 20940 + }, + { + "epoch": 1.4571885650692078, + "grad_norm": 2.4342501163482666, + "learning_rate": 6.112548309526899e-06, + "loss": 0.7641, + "step": 20950 + }, + { + "epoch": 1.4578841204701956, + "grad_norm": 1.4295082092285156, + "learning_rate": 6.108602722553343e-06, + "loss": 0.703, + "step": 20960 + }, + { + "epoch": 1.458579675871183, + "grad_norm": 1.7576842308044434, + "learning_rate": 6.1046564094207805e-06, + "loss": 0.743, + "step": 20970 + }, + { + "epoch": 1.4592752312721708, + "grad_norm": 1.5543948411941528, + "learning_rate": 6.100709372714136e-06, + "loss": 0.7698, + "step": 20980 + }, + { + "epoch": 1.4599707866731584, + "grad_norm": 1.6807893514633179, + "learning_rate": 6.096761615018802e-06, + "loss": 0.7258, + "step": 20990 + }, + { + "epoch": 1.4606663420741461, + "grad_norm": 4.937137126922607, + "learning_rate": 6.092813138920647e-06, + "loss": 0.7529, + "step": 21000 + }, + { + "epoch": 1.4606663420741461, + "eval_loss": 0.9476534128189087, + "eval_runtime": 4600.5007, + "eval_samples_per_second": 3.948, + "eval_steps_per_second": 0.658, + "step": 21000 + }, + { + "epoch": 1.461361897475134, + "grad_norm": 4.198197364807129, + "learning_rate": 6.08886394700601e-06, + "loss": 0.7456, + "step": 21010 + }, + { + "epoch": 1.4620574528761217, + "grad_norm": 3.6577060222625732, + "learning_rate": 6.084914041861697e-06, + "loss": 0.7538, + "step": 21020 + }, + { + "epoch": 1.4627530082771092, + "grad_norm": 1.5373015403747559, + "learning_rate": 6.080963426074982e-06, + "loss": 0.7044, + "step": 21030 + }, + { + "epoch": 1.463448563678097, + "grad_norm": 1.7912424802780151, + "learning_rate": 6.077012102233606e-06, + "loss": 0.6859, + "step": 21040 + }, + { + "epoch": 1.4641441190790847, + "grad_norm": 1.6238924264907837, + "learning_rate": 6.073060072925772e-06, + "loss": 0.7085, + "step": 21050 + }, + { + "epoch": 1.4648396744800722, + "grad_norm": 3.9504141807556152, + "learning_rate": 6.069107340740143e-06, + "loss": 0.7079, + "step": 21060 + }, + { + "epoch": 1.46553522988106, + "grad_norm": 1.4167453050613403, + "learning_rate": 6.065153908265848e-06, + "loss": 0.6856, + "step": 21070 + }, + { + "epoch": 1.4662307852820478, + "grad_norm": 2.064105987548828, + "learning_rate": 6.061199778092473e-06, + "loss": 0.7416, + "step": 21080 + }, + { + "epoch": 1.4669263406830355, + "grad_norm": 1.6333165168762207, + "learning_rate": 6.057244952810053e-06, + "loss": 0.7134, + "step": 21090 + }, + { + "epoch": 1.467621896084023, + "grad_norm": 1.773455023765564, + "learning_rate": 6.053289435009093e-06, + "loss": 0.7015, + "step": 21100 + }, + { + "epoch": 1.4683174514850108, + "grad_norm": 2.5223464965820312, + "learning_rate": 6.049333227280539e-06, + "loss": 0.6906, + "step": 21110 + }, + { + "epoch": 1.4690130068859983, + "grad_norm": 1.5233508348464966, + "learning_rate": 6.045376332215793e-06, + "loss": 0.6682, + "step": 21120 + }, + { + "epoch": 1.469708562286986, + "grad_norm": 1.99330472946167, + "learning_rate": 6.04141875240671e-06, + "loss": 0.7203, + "step": 21130 + }, + { + "epoch": 1.4704041176879739, + "grad_norm": 1.8008581399917603, + "learning_rate": 6.0374604904455925e-06, + "loss": 0.7397, + "step": 21140 + }, + { + "epoch": 1.4710996730889616, + "grad_norm": 1.29567551612854, + "learning_rate": 6.0335015489251845e-06, + "loss": 0.666, + "step": 21150 + }, + { + "epoch": 1.4717952284899491, + "grad_norm": 1.7180304527282715, + "learning_rate": 6.029541930438681e-06, + "loss": 0.7397, + "step": 21160 + }, + { + "epoch": 1.472490783890937, + "grad_norm": 4.854224681854248, + "learning_rate": 6.025581637579719e-06, + "loss": 0.7615, + "step": 21170 + }, + { + "epoch": 1.4731863392919247, + "grad_norm": 2.4868335723876953, + "learning_rate": 6.021620672942376e-06, + "loss": 0.7469, + "step": 21180 + }, + { + "epoch": 1.4738818946929122, + "grad_norm": 2.0224921703338623, + "learning_rate": 6.017659039121172e-06, + "loss": 0.7203, + "step": 21190 + }, + { + "epoch": 1.4745774500939, + "grad_norm": 1.8479740619659424, + "learning_rate": 6.0136967387110625e-06, + "loss": 0.7651, + "step": 21200 + }, + { + "epoch": 1.4752730054948877, + "grad_norm": 1.739109754562378, + "learning_rate": 6.009733774307441e-06, + "loss": 0.7133, + "step": 21210 + }, + { + "epoch": 1.4759685608958755, + "grad_norm": 2.0570037364959717, + "learning_rate": 6.005770148506135e-06, + "loss": 0.7219, + "step": 21220 + }, + { + "epoch": 1.476664116296863, + "grad_norm": 1.8864156007766724, + "learning_rate": 6.0018058639034086e-06, + "loss": 0.6785, + "step": 21230 + }, + { + "epoch": 1.4773596716978508, + "grad_norm": 1.829779863357544, + "learning_rate": 5.997840923095953e-06, + "loss": 0.7225, + "step": 21240 + }, + { + "epoch": 1.4780552270988383, + "grad_norm": 1.6666828393936157, + "learning_rate": 5.993875328680888e-06, + "loss": 0.7016, + "step": 21250 + }, + { + "epoch": 1.478750782499826, + "grad_norm": 2.3819828033447266, + "learning_rate": 5.98990908325577e-06, + "loss": 0.7137, + "step": 21260 + }, + { + "epoch": 1.4794463379008138, + "grad_norm": 1.6609293222427368, + "learning_rate": 5.985942189418574e-06, + "loss": 0.7187, + "step": 21270 + }, + { + "epoch": 1.4801418933018016, + "grad_norm": 2.9082655906677246, + "learning_rate": 5.981974649767702e-06, + "loss": 0.7616, + "step": 21280 + }, + { + "epoch": 1.480837448702789, + "grad_norm": 2.417423963546753, + "learning_rate": 5.97800646690198e-06, + "loss": 0.7056, + "step": 21290 + }, + { + "epoch": 1.4815330041037769, + "grad_norm": 1.529136300086975, + "learning_rate": 5.974037643420654e-06, + "loss": 0.7275, + "step": 21300 + }, + { + "epoch": 1.4822285595047646, + "grad_norm": 2.194647789001465, + "learning_rate": 5.970068181923388e-06, + "loss": 0.7047, + "step": 21310 + }, + { + "epoch": 1.4829241149057522, + "grad_norm": 1.921481966972351, + "learning_rate": 5.96609808501027e-06, + "loss": 0.7449, + "step": 21320 + }, + { + "epoch": 1.48361967030674, + "grad_norm": 1.8697667121887207, + "learning_rate": 5.962127355281798e-06, + "loss": 0.7498, + "step": 21330 + }, + { + "epoch": 1.4843152257077277, + "grad_norm": 1.6291097402572632, + "learning_rate": 5.9581559953388866e-06, + "loss": 0.724, + "step": 21340 + }, + { + "epoch": 1.4850107811087154, + "grad_norm": 1.7014708518981934, + "learning_rate": 5.954184007782864e-06, + "loss": 0.683, + "step": 21350 + }, + { + "epoch": 1.485706336509703, + "grad_norm": 3.9686367511749268, + "learning_rate": 5.950211395215468e-06, + "loss": 0.7221, + "step": 21360 + }, + { + "epoch": 1.4864018919106907, + "grad_norm": 1.637515902519226, + "learning_rate": 5.946238160238847e-06, + "loss": 0.7476, + "step": 21370 + }, + { + "epoch": 1.4870974473116783, + "grad_norm": 1.934130072593689, + "learning_rate": 5.9422643054555575e-06, + "loss": 0.7345, + "step": 21380 + }, + { + "epoch": 1.487793002712666, + "grad_norm": 2.1208794116973877, + "learning_rate": 5.93828983346856e-06, + "loss": 0.7548, + "step": 21390 + }, + { + "epoch": 1.4884885581136538, + "grad_norm": 2.0168259143829346, + "learning_rate": 5.934314746881221e-06, + "loss": 0.7128, + "step": 21400 + }, + { + "epoch": 1.4891841135146415, + "grad_norm": 1.562268614768982, + "learning_rate": 5.930339048297308e-06, + "loss": 0.7344, + "step": 21410 + }, + { + "epoch": 1.489879668915629, + "grad_norm": 1.5554943084716797, + "learning_rate": 5.926362740320995e-06, + "loss": 0.7245, + "step": 21420 + }, + { + "epoch": 1.4905752243166168, + "grad_norm": 1.6446715593338013, + "learning_rate": 5.922385825556844e-06, + "loss": 0.6771, + "step": 21430 + }, + { + "epoch": 1.4912707797176046, + "grad_norm": 5.561330795288086, + "learning_rate": 5.918408306609825e-06, + "loss": 0.7409, + "step": 21440 + }, + { + "epoch": 1.4919663351185921, + "grad_norm": 1.831284523010254, + "learning_rate": 5.9144301860852984e-06, + "loss": 0.8073, + "step": 21450 + }, + { + "epoch": 1.4926618905195799, + "grad_norm": 1.792970061302185, + "learning_rate": 5.910451466589022e-06, + "loss": 0.8093, + "step": 21460 + }, + { + "epoch": 1.4933574459205676, + "grad_norm": 2.104931116104126, + "learning_rate": 5.906472150727143e-06, + "loss": 0.7164, + "step": 21470 + }, + { + "epoch": 1.4940530013215554, + "grad_norm": 2.850149393081665, + "learning_rate": 5.902492241106197e-06, + "loss": 0.7381, + "step": 21480 + }, + { + "epoch": 1.494748556722543, + "grad_norm": 1.5714112520217896, + "learning_rate": 5.898511740333118e-06, + "loss": 0.7208, + "step": 21490 + }, + { + "epoch": 1.4954441121235307, + "grad_norm": 1.9905948638916016, + "learning_rate": 5.8945306510152165e-06, + "loss": 0.7254, + "step": 21500 + }, + { + "epoch": 1.4954441121235307, + "eval_loss": 0.9448292255401611, + "eval_runtime": 4582.1423, + "eval_samples_per_second": 3.963, + "eval_steps_per_second": 0.661, + "step": 21500 + }, + { + "epoch": 1.4961396675245182, + "grad_norm": 1.6383960247039795, + "learning_rate": 5.890548975760193e-06, + "loss": 0.6543, + "step": 21510 + }, + { + "epoch": 1.496835222925506, + "grad_norm": 1.7606786489486694, + "learning_rate": 5.8865667171761345e-06, + "loss": 0.7583, + "step": 21520 + }, + { + "epoch": 1.4975307783264937, + "grad_norm": 2.0935862064361572, + "learning_rate": 5.882583877871506e-06, + "loss": 0.806, + "step": 21530 + }, + { + "epoch": 1.4982263337274815, + "grad_norm": 3.1830670833587646, + "learning_rate": 5.878600460455152e-06, + "loss": 0.7381, + "step": 21540 + }, + { + "epoch": 1.498921889128469, + "grad_norm": 1.5818910598754883, + "learning_rate": 5.874616467536301e-06, + "loss": 0.6921, + "step": 21550 + }, + { + "epoch": 1.4996174445294568, + "grad_norm": 2.901428699493408, + "learning_rate": 5.870631901724556e-06, + "loss": 0.6895, + "step": 21560 + }, + { + "epoch": 1.5003129999304443, + "grad_norm": 3.626967430114746, + "learning_rate": 5.866646765629891e-06, + "loss": 0.7476, + "step": 21570 + }, + { + "epoch": 1.501008555331432, + "grad_norm": 1.9352563619613647, + "learning_rate": 5.86266106186266e-06, + "loss": 0.7713, + "step": 21580 + }, + { + "epoch": 1.5017041107324198, + "grad_norm": 2.051551342010498, + "learning_rate": 5.8586747930335856e-06, + "loss": 0.7774, + "step": 21590 + }, + { + "epoch": 1.5023996661334076, + "grad_norm": 1.660704493522644, + "learning_rate": 5.85468796175376e-06, + "loss": 0.7529, + "step": 21600 + }, + { + "epoch": 1.5030952215343953, + "grad_norm": 1.5530169010162354, + "learning_rate": 5.850700570634646e-06, + "loss": 0.7158, + "step": 21610 + }, + { + "epoch": 1.5037907769353829, + "grad_norm": 1.5957791805267334, + "learning_rate": 5.846712622288071e-06, + "loss": 0.7298, + "step": 21620 + }, + { + "epoch": 1.5044863323363706, + "grad_norm": 2.233914375305176, + "learning_rate": 5.8427241193262296e-06, + "loss": 0.7073, + "step": 21630 + }, + { + "epoch": 1.5051818877373582, + "grad_norm": 1.295982837677002, + "learning_rate": 5.838735064361677e-06, + "loss": 0.7559, + "step": 21640 + }, + { + "epoch": 1.505877443138346, + "grad_norm": 1.8416777849197388, + "learning_rate": 5.8347454600073315e-06, + "loss": 0.7464, + "step": 21650 + }, + { + "epoch": 1.5065729985393337, + "grad_norm": 2.106804847717285, + "learning_rate": 5.830755308876473e-06, + "loss": 0.7039, + "step": 21660 + }, + { + "epoch": 1.5072685539403214, + "grad_norm": 1.4277538061141968, + "learning_rate": 5.826764613582734e-06, + "loss": 0.7718, + "step": 21670 + }, + { + "epoch": 1.5079641093413092, + "grad_norm": 1.7527358531951904, + "learning_rate": 5.822773376740111e-06, + "loss": 0.7082, + "step": 21680 + }, + { + "epoch": 1.5086596647422967, + "grad_norm": 5.250451564788818, + "learning_rate": 5.818781600962948e-06, + "loss": 0.7103, + "step": 21690 + }, + { + "epoch": 1.5093552201432843, + "grad_norm": 5.3939948081970215, + "learning_rate": 5.814789288865949e-06, + "loss": 0.7485, + "step": 21700 + }, + { + "epoch": 1.510050775544272, + "grad_norm": 1.9694483280181885, + "learning_rate": 5.810796443064161e-06, + "loss": 0.6974, + "step": 21710 + }, + { + "epoch": 1.5107463309452598, + "grad_norm": 1.8791052103042603, + "learning_rate": 5.80680306617299e-06, + "loss": 0.756, + "step": 21720 + }, + { + "epoch": 1.5114418863462475, + "grad_norm": 1.5616987943649292, + "learning_rate": 5.802809160808181e-06, + "loss": 0.7228, + "step": 21730 + }, + { + "epoch": 1.5121374417472353, + "grad_norm": 2.1435296535491943, + "learning_rate": 5.798814729585833e-06, + "loss": 0.7293, + "step": 21740 + }, + { + "epoch": 1.5128329971482228, + "grad_norm": 1.9406498670578003, + "learning_rate": 5.794819775122385e-06, + "loss": 0.7118, + "step": 21750 + }, + { + "epoch": 1.5135285525492106, + "grad_norm": 1.7723698616027832, + "learning_rate": 5.790824300034617e-06, + "loss": 0.7105, + "step": 21760 + }, + { + "epoch": 1.5142241079501981, + "grad_norm": 1.7424460649490356, + "learning_rate": 5.786828306939653e-06, + "loss": 0.6843, + "step": 21770 + }, + { + "epoch": 1.5149196633511859, + "grad_norm": 1.8500877618789673, + "learning_rate": 5.782831798454958e-06, + "loss": 0.7809, + "step": 21780 + }, + { + "epoch": 1.5156152187521736, + "grad_norm": 5.153543949127197, + "learning_rate": 5.77883477719833e-06, + "loss": 0.7105, + "step": 21790 + }, + { + "epoch": 1.5163107741531614, + "grad_norm": 1.8209407329559326, + "learning_rate": 5.7748372457879055e-06, + "loss": 0.7176, + "step": 21800 + }, + { + "epoch": 1.517006329554149, + "grad_norm": 1.5030596256256104, + "learning_rate": 5.770839206842158e-06, + "loss": 0.6929, + "step": 21810 + }, + { + "epoch": 1.5177018849551367, + "grad_norm": 2.289888858795166, + "learning_rate": 5.766840662979887e-06, + "loss": 0.7443, + "step": 21820 + }, + { + "epoch": 1.5183974403561242, + "grad_norm": 1.20023512840271, + "learning_rate": 5.762841616820226e-06, + "loss": 0.7524, + "step": 21830 + }, + { + "epoch": 1.519092995757112, + "grad_norm": 3.095374822616577, + "learning_rate": 5.75884207098264e-06, + "loss": 0.6703, + "step": 21840 + }, + { + "epoch": 1.5197885511580997, + "grad_norm": 1.7363145351409912, + "learning_rate": 5.754842028086919e-06, + "loss": 0.7915, + "step": 21850 + }, + { + "epoch": 1.5204841065590875, + "grad_norm": 2.38399076461792, + "learning_rate": 5.750841490753174e-06, + "loss": 0.7544, + "step": 21860 + }, + { + "epoch": 1.5211796619600753, + "grad_norm": 3.968294143676758, + "learning_rate": 5.746840461601849e-06, + "loss": 0.7177, + "step": 21870 + }, + { + "epoch": 1.5218752173610628, + "grad_norm": 1.700698971748352, + "learning_rate": 5.742838943253706e-06, + "loss": 0.7554, + "step": 21880 + }, + { + "epoch": 1.5225707727620506, + "grad_norm": 1.588012933731079, + "learning_rate": 5.738836938329823e-06, + "loss": 0.7058, + "step": 21890 + }, + { + "epoch": 1.523266328163038, + "grad_norm": 1.854986310005188, + "learning_rate": 5.734834449451603e-06, + "loss": 0.7716, + "step": 21900 + }, + { + "epoch": 1.5239618835640258, + "grad_norm": 2.1583311557769775, + "learning_rate": 5.730831479240763e-06, + "loss": 0.7832, + "step": 21910 + }, + { + "epoch": 1.5246574389650136, + "grad_norm": 1.5693004131317139, + "learning_rate": 5.726828030319337e-06, + "loss": 0.6948, + "step": 21920 + }, + { + "epoch": 1.5253529943660014, + "grad_norm": 1.9345656633377075, + "learning_rate": 5.72282410530967e-06, + "loss": 0.7099, + "step": 21930 + }, + { + "epoch": 1.526048549766989, + "grad_norm": 1.647325873374939, + "learning_rate": 5.718819706834422e-06, + "loss": 0.7158, + "step": 21940 + }, + { + "epoch": 1.5267441051679767, + "grad_norm": 2.462768077850342, + "learning_rate": 5.71481483751656e-06, + "loss": 0.7158, + "step": 21950 + }, + { + "epoch": 1.5274396605689642, + "grad_norm": 3.6859002113342285, + "learning_rate": 5.710809499979362e-06, + "loss": 0.6769, + "step": 21960 + }, + { + "epoch": 1.528135215969952, + "grad_norm": 2.175856351852417, + "learning_rate": 5.706803696846411e-06, + "loss": 0.7219, + "step": 21970 + }, + { + "epoch": 1.5288307713709397, + "grad_norm": 3.8851959705352783, + "learning_rate": 5.702797430741596e-06, + "loss": 0.7178, + "step": 21980 + }, + { + "epoch": 1.5295263267719275, + "grad_norm": 1.8825372457504272, + "learning_rate": 5.698790704289108e-06, + "loss": 0.7241, + "step": 21990 + }, + { + "epoch": 1.5302218821729152, + "grad_norm": 2.9605417251586914, + "learning_rate": 5.694783520113442e-06, + "loss": 0.6541, + "step": 22000 + }, + { + "epoch": 1.5302218821729152, + "eval_loss": 0.9445372223854065, + "eval_runtime": 4602.1276, + "eval_samples_per_second": 3.946, + "eval_steps_per_second": 0.658, + "step": 22000 + }, + { + "epoch": 1.5309174375739028, + "grad_norm": 1.465183973312378, + "learning_rate": 5.690775880839389e-06, + "loss": 0.6664, + "step": 22010 + }, + { + "epoch": 1.5316129929748903, + "grad_norm": 1.9195865392684937, + "learning_rate": 5.686767789092041e-06, + "loss": 0.6942, + "step": 22020 + }, + { + "epoch": 1.532308548375878, + "grad_norm": 1.6929734945297241, + "learning_rate": 5.6827592474967875e-06, + "loss": 0.753, + "step": 22030 + }, + { + "epoch": 1.5330041037768658, + "grad_norm": 2.4073352813720703, + "learning_rate": 5.678750258679309e-06, + "loss": 0.682, + "step": 22040 + }, + { + "epoch": 1.5336996591778536, + "grad_norm": 2.4762394428253174, + "learning_rate": 5.6747408252655815e-06, + "loss": 0.746, + "step": 22050 + }, + { + "epoch": 1.5343952145788413, + "grad_norm": 1.49760103225708, + "learning_rate": 5.67073094988187e-06, + "loss": 0.7174, + "step": 22060 + }, + { + "epoch": 1.5350907699798289, + "grad_norm": 1.6571050882339478, + "learning_rate": 5.6667206351547325e-06, + "loss": 0.6949, + "step": 22070 + }, + { + "epoch": 1.5357863253808166, + "grad_norm": 3.272343873977661, + "learning_rate": 5.662709883711011e-06, + "loss": 0.7048, + "step": 22080 + }, + { + "epoch": 1.5364818807818041, + "grad_norm": 1.4857062101364136, + "learning_rate": 5.658698698177837e-06, + "loss": 0.7277, + "step": 22090 + }, + { + "epoch": 1.537177436182792, + "grad_norm": 3.7700612545013428, + "learning_rate": 5.654687081182624e-06, + "loss": 0.71, + "step": 22100 + }, + { + "epoch": 1.5378729915837797, + "grad_norm": 1.4871325492858887, + "learning_rate": 5.650675035353068e-06, + "loss": 0.8077, + "step": 22110 + }, + { + "epoch": 1.5385685469847674, + "grad_norm": 2.4973373413085938, + "learning_rate": 5.646662563317146e-06, + "loss": 0.7147, + "step": 22120 + }, + { + "epoch": 1.5392641023857552, + "grad_norm": 2.874157667160034, + "learning_rate": 5.642649667703119e-06, + "loss": 0.6892, + "step": 22130 + }, + { + "epoch": 1.5399596577867427, + "grad_norm": 2.2530651092529297, + "learning_rate": 5.638636351139518e-06, + "loss": 0.7146, + "step": 22140 + }, + { + "epoch": 1.5406552131877302, + "grad_norm": 1.634354591369629, + "learning_rate": 5.634622616255152e-06, + "loss": 0.6797, + "step": 22150 + }, + { + "epoch": 1.541350768588718, + "grad_norm": 2.1415884494781494, + "learning_rate": 5.6306084656791074e-06, + "loss": 0.7046, + "step": 22160 + }, + { + "epoch": 1.5420463239897058, + "grad_norm": 2.0433197021484375, + "learning_rate": 5.626593902040741e-06, + "loss": 0.7086, + "step": 22170 + }, + { + "epoch": 1.5427418793906935, + "grad_norm": 1.9643293619155884, + "learning_rate": 5.622578927969676e-06, + "loss": 0.7444, + "step": 22180 + }, + { + "epoch": 1.5434374347916813, + "grad_norm": 4.584105491638184, + "learning_rate": 5.618563546095812e-06, + "loss": 0.6908, + "step": 22190 + }, + { + "epoch": 1.5441329901926688, + "grad_norm": 1.7956854104995728, + "learning_rate": 5.614547759049311e-06, + "loss": 0.7074, + "step": 22200 + }, + { + "epoch": 1.5448285455936566, + "grad_norm": 2.214810848236084, + "learning_rate": 5.610531569460599e-06, + "loss": 0.7014, + "step": 22210 + }, + { + "epoch": 1.545524100994644, + "grad_norm": 1.5626862049102783, + "learning_rate": 5.606514979960372e-06, + "loss": 0.6998, + "step": 22220 + }, + { + "epoch": 1.5462196563956319, + "grad_norm": 6.88908052444458, + "learning_rate": 5.6024979931795786e-06, + "loss": 0.6586, + "step": 22230 + }, + { + "epoch": 1.5469152117966196, + "grad_norm": 3.538445472717285, + "learning_rate": 5.598480611749437e-06, + "loss": 0.7648, + "step": 22240 + }, + { + "epoch": 1.5476107671976074, + "grad_norm": 1.9082159996032715, + "learning_rate": 5.59446283830142e-06, + "loss": 0.7313, + "step": 22250 + }, + { + "epoch": 1.5483063225985951, + "grad_norm": 2.114708185195923, + "learning_rate": 5.590444675467253e-06, + "loss": 0.7203, + "step": 22260 + }, + { + "epoch": 1.5490018779995827, + "grad_norm": 2.481234312057495, + "learning_rate": 5.586426125878926e-06, + "loss": 0.7168, + "step": 22270 + }, + { + "epoch": 1.5496974334005702, + "grad_norm": 1.8632296323776245, + "learning_rate": 5.582407192168672e-06, + "loss": 0.6608, + "step": 22280 + }, + { + "epoch": 1.550392988801558, + "grad_norm": 2.5234501361846924, + "learning_rate": 5.578387876968982e-06, + "loss": 0.6785, + "step": 22290 + }, + { + "epoch": 1.5510885442025457, + "grad_norm": 2.022334337234497, + "learning_rate": 5.574368182912596e-06, + "loss": 0.7222, + "step": 22300 + }, + { + "epoch": 1.5517840996035335, + "grad_norm": 1.9647997617721558, + "learning_rate": 5.5703481126325006e-06, + "loss": 0.7198, + "step": 22310 + }, + { + "epoch": 1.5524796550045212, + "grad_norm": 1.71644127368927, + "learning_rate": 5.56632766876193e-06, + "loss": 0.7032, + "step": 22320 + }, + { + "epoch": 1.5531752104055088, + "grad_norm": 1.9077149629592896, + "learning_rate": 5.562306853934364e-06, + "loss": 0.7258, + "step": 22330 + }, + { + "epoch": 1.5538707658064965, + "grad_norm": 1.7742488384246826, + "learning_rate": 5.558285670783521e-06, + "loss": 0.7203, + "step": 22340 + }, + { + "epoch": 1.554566321207484, + "grad_norm": 1.9356529712677002, + "learning_rate": 5.554264121943367e-06, + "loss": 0.6395, + "step": 22350 + }, + { + "epoch": 1.5552618766084718, + "grad_norm": 6.844680309295654, + "learning_rate": 5.550242210048102e-06, + "loss": 0.673, + "step": 22360 + }, + { + "epoch": 1.5559574320094596, + "grad_norm": 2.2752342224121094, + "learning_rate": 5.546219937732169e-06, + "loss": 0.739, + "step": 22370 + }, + { + "epoch": 1.5566529874104473, + "grad_norm": 2.4868240356445312, + "learning_rate": 5.542197307630241e-06, + "loss": 0.7587, + "step": 22380 + }, + { + "epoch": 1.557348542811435, + "grad_norm": 1.8597087860107422, + "learning_rate": 5.538174322377231e-06, + "loss": 0.6924, + "step": 22390 + }, + { + "epoch": 1.5580440982124226, + "grad_norm": 1.9560133218765259, + "learning_rate": 5.534150984608281e-06, + "loss": 0.7723, + "step": 22400 + }, + { + "epoch": 1.5587396536134102, + "grad_norm": 1.5390968322753906, + "learning_rate": 5.5301272969587665e-06, + "loss": 0.6678, + "step": 22410 + }, + { + "epoch": 1.559435209014398, + "grad_norm": 2.4028687477111816, + "learning_rate": 5.526103262064289e-06, + "loss": 0.7021, + "step": 22420 + }, + { + "epoch": 1.5601307644153857, + "grad_norm": 4.753103733062744, + "learning_rate": 5.522078882560679e-06, + "loss": 0.685, + "step": 22430 + }, + { + "epoch": 1.5608263198163734, + "grad_norm": 2.0321872234344482, + "learning_rate": 5.518054161083994e-06, + "loss": 0.7527, + "step": 22440 + }, + { + "epoch": 1.5615218752173612, + "grad_norm": 2.0993666648864746, + "learning_rate": 5.514029100270517e-06, + "loss": 0.7517, + "step": 22450 + }, + { + "epoch": 1.5622174306183487, + "grad_norm": 2.7196967601776123, + "learning_rate": 5.5100037027567476e-06, + "loss": 0.7197, + "step": 22460 + }, + { + "epoch": 1.5629129860193365, + "grad_norm": 2.2761762142181396, + "learning_rate": 5.5059779711794085e-06, + "loss": 0.7513, + "step": 22470 + }, + { + "epoch": 1.563608541420324, + "grad_norm": 1.7197859287261963, + "learning_rate": 5.501951908175445e-06, + "loss": 0.772, + "step": 22480 + }, + { + "epoch": 1.5643040968213118, + "grad_norm": 1.9218417406082153, + "learning_rate": 5.497925516382014e-06, + "loss": 0.7395, + "step": 22490 + }, + { + "epoch": 1.5649996522222995, + "grad_norm": 2.242429494857788, + "learning_rate": 5.493898798436489e-06, + "loss": 0.6669, + "step": 22500 + }, + { + "epoch": 1.5649996522222995, + "eval_loss": 0.9420145153999329, + "eval_runtime": 4616.7115, + "eval_samples_per_second": 3.934, + "eval_steps_per_second": 0.656, + "step": 22500 + }, + { + "epoch": 1.5656952076232873, + "grad_norm": 2.056422472000122, + "learning_rate": 5.4898717569764615e-06, + "loss": 0.6757, + "step": 22510 + }, + { + "epoch": 1.566390763024275, + "grad_norm": 1.7591490745544434, + "learning_rate": 5.48584439463973e-06, + "loss": 0.7396, + "step": 22520 + }, + { + "epoch": 1.5670863184252626, + "grad_norm": 2.302643060684204, + "learning_rate": 5.481816714064304e-06, + "loss": 0.7014, + "step": 22530 + }, + { + "epoch": 1.5677818738262501, + "grad_norm": 2.6441428661346436, + "learning_rate": 5.477788717888404e-06, + "loss": 0.7311, + "step": 22540 + }, + { + "epoch": 1.5684774292272379, + "grad_norm": 2.0407049655914307, + "learning_rate": 5.473760408750455e-06, + "loss": 0.6836, + "step": 22550 + }, + { + "epoch": 1.5691729846282256, + "grad_norm": 1.5858837366104126, + "learning_rate": 5.469731789289087e-06, + "loss": 0.7458, + "step": 22560 + }, + { + "epoch": 1.5698685400292134, + "grad_norm": 1.4491376876831055, + "learning_rate": 5.465702862143133e-06, + "loss": 0.7292, + "step": 22570 + }, + { + "epoch": 1.5705640954302011, + "grad_norm": 1.9504423141479492, + "learning_rate": 5.461673629951629e-06, + "loss": 0.7133, + "step": 22580 + }, + { + "epoch": 1.5712596508311887, + "grad_norm": 1.7462042570114136, + "learning_rate": 5.457644095353812e-06, + "loss": 0.7371, + "step": 22590 + }, + { + "epoch": 1.5719552062321764, + "grad_norm": 1.9718014001846313, + "learning_rate": 5.453614260989113e-06, + "loss": 0.7493, + "step": 22600 + }, + { + "epoch": 1.572650761633164, + "grad_norm": 1.6007637977600098, + "learning_rate": 5.4495841294971616e-06, + "loss": 0.6824, + "step": 22610 + }, + { + "epoch": 1.5733463170341517, + "grad_norm": 2.305799722671509, + "learning_rate": 5.445553703517783e-06, + "loss": 0.7771, + "step": 22620 + }, + { + "epoch": 1.5740418724351395, + "grad_norm": 2.790587902069092, + "learning_rate": 5.4415229856909936e-06, + "loss": 0.7141, + "step": 22630 + }, + { + "epoch": 1.5747374278361272, + "grad_norm": 3.8134000301361084, + "learning_rate": 5.4374919786570015e-06, + "loss": 0.673, + "step": 22640 + }, + { + "epoch": 1.5754329832371148, + "grad_norm": 1.7364468574523926, + "learning_rate": 5.433460685056204e-06, + "loss": 0.6974, + "step": 22650 + }, + { + "epoch": 1.5761285386381025, + "grad_norm": 1.8595508337020874, + "learning_rate": 5.42942910752919e-06, + "loss": 0.7076, + "step": 22660 + }, + { + "epoch": 1.57682409403909, + "grad_norm": 1.5417016744613647, + "learning_rate": 5.425397248716725e-06, + "loss": 0.7391, + "step": 22670 + }, + { + "epoch": 1.5775196494400778, + "grad_norm": 1.9764926433563232, + "learning_rate": 5.4213651112597685e-06, + "loss": 0.6673, + "step": 22680 + }, + { + "epoch": 1.5782152048410656, + "grad_norm": 3.1826224327087402, + "learning_rate": 5.417332697799459e-06, + "loss": 0.756, + "step": 22690 + }, + { + "epoch": 1.5789107602420533, + "grad_norm": 1.753796100616455, + "learning_rate": 5.413300010977113e-06, + "loss": 0.7899, + "step": 22700 + }, + { + "epoch": 1.579606315643041, + "grad_norm": 2.114006996154785, + "learning_rate": 5.40926705343423e-06, + "loss": 0.7472, + "step": 22710 + }, + { + "epoch": 1.5803018710440286, + "grad_norm": 1.9803937673568726, + "learning_rate": 5.405233827812485e-06, + "loss": 0.6948, + "step": 22720 + }, + { + "epoch": 1.5809974264450164, + "grad_norm": 1.2828075885772705, + "learning_rate": 5.401200336753729e-06, + "loss": 0.7367, + "step": 22730 + }, + { + "epoch": 1.581692981846004, + "grad_norm": 2.3725109100341797, + "learning_rate": 5.397166582899987e-06, + "loss": 0.7417, + "step": 22740 + }, + { + "epoch": 1.5823885372469917, + "grad_norm": 2.061596155166626, + "learning_rate": 5.393132568893454e-06, + "loss": 0.7189, + "step": 22750 + }, + { + "epoch": 1.5830840926479794, + "grad_norm": 1.4808231592178345, + "learning_rate": 5.389098297376499e-06, + "loss": 0.6985, + "step": 22760 + }, + { + "epoch": 1.5837796480489672, + "grad_norm": 1.685606837272644, + "learning_rate": 5.3850637709916596e-06, + "loss": 0.7513, + "step": 22770 + }, + { + "epoch": 1.5844752034499547, + "grad_norm": 1.4564034938812256, + "learning_rate": 5.381028992381637e-06, + "loss": 0.6827, + "step": 22780 + }, + { + "epoch": 1.5851707588509425, + "grad_norm": 1.6720178127288818, + "learning_rate": 5.3769939641892975e-06, + "loss": 0.7116, + "step": 22790 + }, + { + "epoch": 1.58586631425193, + "grad_norm": 1.7182692289352417, + "learning_rate": 5.372958689057677e-06, + "loss": 0.7218, + "step": 22800 + }, + { + "epoch": 1.5865618696529178, + "grad_norm": 1.7749145030975342, + "learning_rate": 5.368923169629965e-06, + "loss": 0.6771, + "step": 22810 + }, + { + "epoch": 1.5872574250539055, + "grad_norm": 1.6280686855316162, + "learning_rate": 5.364887408549515e-06, + "loss": 0.7044, + "step": 22820 + }, + { + "epoch": 1.5879529804548933, + "grad_norm": 2.101978063583374, + "learning_rate": 5.360851408459842e-06, + "loss": 0.7088, + "step": 22830 + }, + { + "epoch": 1.588648535855881, + "grad_norm": 1.6970248222351074, + "learning_rate": 5.356815172004613e-06, + "loss": 0.732, + "step": 22840 + }, + { + "epoch": 1.5893440912568686, + "grad_norm": 1.9056779146194458, + "learning_rate": 5.352778701827648e-06, + "loss": 0.7039, + "step": 22850 + }, + { + "epoch": 1.5900396466578564, + "grad_norm": 2.2437665462493896, + "learning_rate": 5.348742000572926e-06, + "loss": 0.7491, + "step": 22860 + }, + { + "epoch": 1.590735202058844, + "grad_norm": 1.3764674663543701, + "learning_rate": 5.344705070884575e-06, + "loss": 0.7022, + "step": 22870 + }, + { + "epoch": 1.5914307574598316, + "grad_norm": 1.8532915115356445, + "learning_rate": 5.340667915406871e-06, + "loss": 0.6515, + "step": 22880 + }, + { + "epoch": 1.5921263128608194, + "grad_norm": 2.085413932800293, + "learning_rate": 5.3366305367842395e-06, + "loss": 0.6934, + "step": 22890 + }, + { + "epoch": 1.5928218682618072, + "grad_norm": 1.6822479963302612, + "learning_rate": 5.33259293766125e-06, + "loss": 0.7269, + "step": 22900 + }, + { + "epoch": 1.5935174236627947, + "grad_norm": 4.330849647521973, + "learning_rate": 5.328555120682622e-06, + "loss": 0.7733, + "step": 22910 + }, + { + "epoch": 1.5942129790637825, + "grad_norm": 1.9433412551879883, + "learning_rate": 5.324517088493209e-06, + "loss": 0.667, + "step": 22920 + }, + { + "epoch": 1.59490853446477, + "grad_norm": 2.055612087249756, + "learning_rate": 5.320478843738014e-06, + "loss": 0.7432, + "step": 22930 + }, + { + "epoch": 1.5956040898657577, + "grad_norm": 1.8862446546554565, + "learning_rate": 5.316440389062174e-06, + "loss": 0.7093, + "step": 22940 + }, + { + "epoch": 1.5962996452667455, + "grad_norm": 2.906574249267578, + "learning_rate": 5.312401727110965e-06, + "loss": 0.6948, + "step": 22950 + }, + { + "epoch": 1.5969952006677333, + "grad_norm": 3.115776300430298, + "learning_rate": 5.3083628605298e-06, + "loss": 0.7467, + "step": 22960 + }, + { + "epoch": 1.597690756068721, + "grad_norm": 1.8498873710632324, + "learning_rate": 5.304323791964223e-06, + "loss": 0.7741, + "step": 22970 + }, + { + "epoch": 1.5983863114697086, + "grad_norm": 1.7039636373519897, + "learning_rate": 5.300284524059913e-06, + "loss": 0.7167, + "step": 22980 + }, + { + "epoch": 1.599081866870696, + "grad_norm": 1.890046238899231, + "learning_rate": 5.296245059462679e-06, + "loss": 0.707, + "step": 22990 + }, + { + "epoch": 1.5997774222716838, + "grad_norm": 1.8164496421813965, + "learning_rate": 5.29220540081846e-06, + "loss": 0.7093, + "step": 23000 + }, + { + "epoch": 1.5997774222716838, + "eval_loss": 0.9368060231208801, + "eval_runtime": 4591.1745, + "eval_samples_per_second": 3.956, + "eval_steps_per_second": 0.659, + "step": 23000 + }, + { + "epoch": 1.6004729776726716, + "grad_norm": 2.173025131225586, + "learning_rate": 5.288165550773318e-06, + "loss": 0.7154, + "step": 23010 + }, + { + "epoch": 1.6011685330736594, + "grad_norm": 1.7406078577041626, + "learning_rate": 5.284125511973444e-06, + "loss": 0.729, + "step": 23020 + }, + { + "epoch": 1.6018640884746471, + "grad_norm": 1.4705417156219482, + "learning_rate": 5.2800852870651505e-06, + "loss": 0.6758, + "step": 23030 + }, + { + "epoch": 1.6025596438756347, + "grad_norm": 1.751541018486023, + "learning_rate": 5.276044878694877e-06, + "loss": 0.7516, + "step": 23040 + }, + { + "epoch": 1.6032551992766224, + "grad_norm": 2.3348429203033447, + "learning_rate": 5.272004289509175e-06, + "loss": 0.6642, + "step": 23050 + }, + { + "epoch": 1.60395075467761, + "grad_norm": 2.4935874938964844, + "learning_rate": 5.2679635221547205e-06, + "loss": 0.7315, + "step": 23060 + }, + { + "epoch": 1.6046463100785977, + "grad_norm": 1.4360607862472534, + "learning_rate": 5.263922579278306e-06, + "loss": 0.6894, + "step": 23070 + }, + { + "epoch": 1.6053418654795855, + "grad_norm": 1.6384557485580444, + "learning_rate": 5.259881463526832e-06, + "loss": 0.7499, + "step": 23080 + }, + { + "epoch": 1.6060374208805732, + "grad_norm": 1.9558604955673218, + "learning_rate": 5.255840177547326e-06, + "loss": 0.7246, + "step": 23090 + }, + { + "epoch": 1.606732976281561, + "grad_norm": 1.8271334171295166, + "learning_rate": 5.251798723986912e-06, + "loss": 0.7401, + "step": 23100 + }, + { + "epoch": 1.6074285316825485, + "grad_norm": 1.998246431350708, + "learning_rate": 5.24775710549283e-06, + "loss": 0.7174, + "step": 23110 + }, + { + "epoch": 1.608124087083536, + "grad_norm": 1.6460175514221191, + "learning_rate": 5.243715324712434e-06, + "loss": 0.6893, + "step": 23120 + }, + { + "epoch": 1.6088196424845238, + "grad_norm": 2.134587049484253, + "learning_rate": 5.239673384293173e-06, + "loss": 0.7159, + "step": 23130 + }, + { + "epoch": 1.6095151978855116, + "grad_norm": 4.196039199829102, + "learning_rate": 5.23563128688261e-06, + "loss": 0.7234, + "step": 23140 + }, + { + "epoch": 1.6102107532864993, + "grad_norm": 2.762273073196411, + "learning_rate": 5.231589035128405e-06, + "loss": 0.6847, + "step": 23150 + }, + { + "epoch": 1.610906308687487, + "grad_norm": 1.6993905305862427, + "learning_rate": 5.227546631678323e-06, + "loss": 0.7237, + "step": 23160 + }, + { + "epoch": 1.6116018640884746, + "grad_norm": 1.9939734935760498, + "learning_rate": 5.223504079180225e-06, + "loss": 0.7138, + "step": 23170 + }, + { + "epoch": 1.6122974194894624, + "grad_norm": 1.8998336791992188, + "learning_rate": 5.219461380282071e-06, + "loss": 0.7038, + "step": 23180 + }, + { + "epoch": 1.61299297489045, + "grad_norm": 1.572652816772461, + "learning_rate": 5.215418537631921e-06, + "loss": 0.6645, + "step": 23190 + }, + { + "epoch": 1.6136885302914377, + "grad_norm": 1.3660876750946045, + "learning_rate": 5.2113755538779195e-06, + "loss": 0.6957, + "step": 23200 + }, + { + "epoch": 1.6143840856924254, + "grad_norm": 3.165212392807007, + "learning_rate": 5.207332431668311e-06, + "loss": 0.6937, + "step": 23210 + }, + { + "epoch": 1.6150796410934132, + "grad_norm": 1.7360254526138306, + "learning_rate": 5.203289173651432e-06, + "loss": 0.716, + "step": 23220 + }, + { + "epoch": 1.615775196494401, + "grad_norm": 11.052386283874512, + "learning_rate": 5.199245782475703e-06, + "loss": 0.7657, + "step": 23230 + }, + { + "epoch": 1.6164707518953885, + "grad_norm": 1.5289231538772583, + "learning_rate": 5.195202260789631e-06, + "loss": 0.7162, + "step": 23240 + }, + { + "epoch": 1.617166307296376, + "grad_norm": 2.310584545135498, + "learning_rate": 5.191158611241815e-06, + "loss": 0.7296, + "step": 23250 + }, + { + "epoch": 1.6178618626973638, + "grad_norm": 1.8366191387176514, + "learning_rate": 5.187114836480931e-06, + "loss": 0.6913, + "step": 23260 + }, + { + "epoch": 1.6185574180983515, + "grad_norm": 2.523252010345459, + "learning_rate": 5.183070939155741e-06, + "loss": 0.6935, + "step": 23270 + }, + { + "epoch": 1.6192529734993393, + "grad_norm": 1.6451199054718018, + "learning_rate": 5.1790269219150866e-06, + "loss": 0.6566, + "step": 23280 + }, + { + "epoch": 1.619948528900327, + "grad_norm": 1.8635560274124146, + "learning_rate": 5.174982787407886e-06, + "loss": 0.7657, + "step": 23290 + }, + { + "epoch": 1.6206440843013146, + "grad_norm": 1.6401623487472534, + "learning_rate": 5.1709385382831374e-06, + "loss": 0.7016, + "step": 23300 + }, + { + "epoch": 1.6213396397023023, + "grad_norm": 1.5546022653579712, + "learning_rate": 5.1668941771899115e-06, + "loss": 0.7054, + "step": 23310 + }, + { + "epoch": 1.6220351951032899, + "grad_norm": 1.5356436967849731, + "learning_rate": 5.162849706777352e-06, + "loss": 0.7132, + "step": 23320 + }, + { + "epoch": 1.6227307505042776, + "grad_norm": 2.2031192779541016, + "learning_rate": 5.158805129694677e-06, + "loss": 0.7622, + "step": 23330 + }, + { + "epoch": 1.6234263059052654, + "grad_norm": 2.1663031578063965, + "learning_rate": 5.154760448591173e-06, + "loss": 0.6841, + "step": 23340 + }, + { + "epoch": 1.6241218613062531, + "grad_norm": 3.3839471340179443, + "learning_rate": 5.150715666116193e-06, + "loss": 0.718, + "step": 23350 + }, + { + "epoch": 1.624817416707241, + "grad_norm": 2.5470683574676514, + "learning_rate": 5.146670784919159e-06, + "loss": 0.7128, + "step": 23360 + }, + { + "epoch": 1.6255129721082284, + "grad_norm": 2.4547693729400635, + "learning_rate": 5.142625807649556e-06, + "loss": 0.6962, + "step": 23370 + }, + { + "epoch": 1.626208527509216, + "grad_norm": 1.8284906148910522, + "learning_rate": 5.138580736956933e-06, + "loss": 0.6353, + "step": 23380 + }, + { + "epoch": 1.6269040829102037, + "grad_norm": 2.2519068717956543, + "learning_rate": 5.1345355754909e-06, + "loss": 0.7032, + "step": 23390 + }, + { + "epoch": 1.6275996383111915, + "grad_norm": 1.3756103515625, + "learning_rate": 5.130490325901124e-06, + "loss": 0.693, + "step": 23400 + }, + { + "epoch": 1.6282951937121792, + "grad_norm": 1.5833208560943604, + "learning_rate": 5.126444990837336e-06, + "loss": 0.766, + "step": 23410 + }, + { + "epoch": 1.628990749113167, + "grad_norm": 2.8524622917175293, + "learning_rate": 5.122399572949315e-06, + "loss": 0.7332, + "step": 23420 + }, + { + "epoch": 1.6296863045141545, + "grad_norm": 1.7187374830245972, + "learning_rate": 5.118354074886898e-06, + "loss": 0.7851, + "step": 23430 + }, + { + "epoch": 1.6303818599151423, + "grad_norm": 2.0744080543518066, + "learning_rate": 5.114308499299978e-06, + "loss": 0.7342, + "step": 23440 + }, + { + "epoch": 1.6310774153161298, + "grad_norm": 1.5176914930343628, + "learning_rate": 5.110262848838493e-06, + "loss": 0.7334, + "step": 23450 + }, + { + "epoch": 1.6317729707171176, + "grad_norm": 2.2592391967773438, + "learning_rate": 5.106217126152432e-06, + "loss": 0.6933, + "step": 23460 + }, + { + "epoch": 1.6324685261181053, + "grad_norm": 1.8283077478408813, + "learning_rate": 5.1021713338918335e-06, + "loss": 0.724, + "step": 23470 + }, + { + "epoch": 1.633164081519093, + "grad_norm": 1.7409590482711792, + "learning_rate": 5.09812547470678e-06, + "loss": 0.7034, + "step": 23480 + }, + { + "epoch": 1.6338596369200808, + "grad_norm": 3.368534564971924, + "learning_rate": 5.094079551247394e-06, + "loss": 0.756, + "step": 23490 + }, + { + "epoch": 1.6345551923210684, + "grad_norm": 3.5774807929992676, + "learning_rate": 5.090033566163848e-06, + "loss": 0.7207, + "step": 23500 + }, + { + "epoch": 1.6345551923210684, + "eval_loss": 0.9341471195220947, + "eval_runtime": 4605.8336, + "eval_samples_per_second": 3.943, + "eval_steps_per_second": 0.657, + "step": 23500 + }, + { + "epoch": 1.635250747722056, + "grad_norm": 2.0271496772766113, + "learning_rate": 5.0859875221063504e-06, + "loss": 0.6748, + "step": 23510 + }, + { + "epoch": 1.6359463031230437, + "grad_norm": 1.5711276531219482, + "learning_rate": 5.081941421725145e-06, + "loss": 0.6544, + "step": 23520 + }, + { + "epoch": 1.6366418585240314, + "grad_norm": 1.8747820854187012, + "learning_rate": 5.077895267670518e-06, + "loss": 0.7119, + "step": 23530 + }, + { + "epoch": 1.6373374139250192, + "grad_norm": 1.683509111404419, + "learning_rate": 5.073849062592789e-06, + "loss": 0.721, + "step": 23540 + }, + { + "epoch": 1.638032969326007, + "grad_norm": 2.272941827774048, + "learning_rate": 5.069802809142312e-06, + "loss": 0.6592, + "step": 23550 + }, + { + "epoch": 1.6387285247269945, + "grad_norm": 1.826262354850769, + "learning_rate": 5.0657565099694685e-06, + "loss": 0.6843, + "step": 23560 + }, + { + "epoch": 1.6394240801279822, + "grad_norm": 2.746196985244751, + "learning_rate": 5.061710167724675e-06, + "loss": 0.7361, + "step": 23570 + }, + { + "epoch": 1.6401196355289698, + "grad_norm": 1.9026020765304565, + "learning_rate": 5.057663785058372e-06, + "loss": 0.7158, + "step": 23580 + }, + { + "epoch": 1.6408151909299575, + "grad_norm": 1.8165072202682495, + "learning_rate": 5.053617364621031e-06, + "loss": 0.6599, + "step": 23590 + }, + { + "epoch": 1.6415107463309453, + "grad_norm": 2.214921712875366, + "learning_rate": 5.049570909063145e-06, + "loss": 0.736, + "step": 23600 + }, + { + "epoch": 1.642206301731933, + "grad_norm": 1.7064130306243896, + "learning_rate": 5.0455244210352296e-06, + "loss": 0.768, + "step": 23610 + }, + { + "epoch": 1.6429018571329206, + "grad_norm": 3.765730619430542, + "learning_rate": 5.041477903187824e-06, + "loss": 0.7054, + "step": 23620 + }, + { + "epoch": 1.6435974125339083, + "grad_norm": 1.2150471210479736, + "learning_rate": 5.037431358171486e-06, + "loss": 0.6567, + "step": 23630 + }, + { + "epoch": 1.6442929679348959, + "grad_norm": 1.7458813190460205, + "learning_rate": 5.03338478863679e-06, + "loss": 0.729, + "step": 23640 + }, + { + "epoch": 1.6449885233358836, + "grad_norm": 2.096709728240967, + "learning_rate": 5.029338197234329e-06, + "loss": 0.7426, + "step": 23650 + }, + { + "epoch": 1.6456840787368714, + "grad_norm": 2.0645411014556885, + "learning_rate": 5.025291586614707e-06, + "loss": 0.7177, + "step": 23660 + }, + { + "epoch": 1.6463796341378591, + "grad_norm": 2.1027934551239014, + "learning_rate": 5.021244959428544e-06, + "loss": 0.6774, + "step": 23670 + }, + { + "epoch": 1.647075189538847, + "grad_norm": 1.7184613943099976, + "learning_rate": 5.017198318326467e-06, + "loss": 0.6884, + "step": 23680 + }, + { + "epoch": 1.6477707449398344, + "grad_norm": 3.462312936782837, + "learning_rate": 5.013151665959116e-06, + "loss": 0.7352, + "step": 23690 + }, + { + "epoch": 1.6484663003408222, + "grad_norm": 2.038325071334839, + "learning_rate": 5.009105004977137e-06, + "loss": 0.6995, + "step": 23700 + }, + { + "epoch": 1.6491618557418097, + "grad_norm": 1.5848388671875, + "learning_rate": 5.005058338031181e-06, + "loss": 0.7476, + "step": 23710 + }, + { + "epoch": 1.6498574111427975, + "grad_norm": 1.6685408353805542, + "learning_rate": 5.001011667771902e-06, + "loss": 0.7145, + "step": 23720 + }, + { + "epoch": 1.6505529665437852, + "grad_norm": 1.8260780572891235, + "learning_rate": 4.9969649968499606e-06, + "loss": 0.7464, + "step": 23730 + }, + { + "epoch": 1.651248521944773, + "grad_norm": 2.20276141166687, + "learning_rate": 4.992918327916011e-06, + "loss": 0.6986, + "step": 23740 + }, + { + "epoch": 1.6519440773457605, + "grad_norm": 1.4512174129486084, + "learning_rate": 4.988871663620711e-06, + "loss": 0.7265, + "step": 23750 + }, + { + "epoch": 1.6526396327467483, + "grad_norm": 1.7557733058929443, + "learning_rate": 4.984825006614715e-06, + "loss": 0.631, + "step": 23760 + }, + { + "epoch": 1.6533351881477358, + "grad_norm": 1.8958665132522583, + "learning_rate": 4.980778359548671e-06, + "loss": 0.7973, + "step": 23770 + }, + { + "epoch": 1.6540307435487236, + "grad_norm": 1.9633070230484009, + "learning_rate": 4.976731725073223e-06, + "loss": 0.723, + "step": 23780 + }, + { + "epoch": 1.6547262989497113, + "grad_norm": 1.3918555974960327, + "learning_rate": 4.9726851058390026e-06, + "loss": 0.6933, + "step": 23790 + }, + { + "epoch": 1.655421854350699, + "grad_norm": 2.593963146209717, + "learning_rate": 4.968638504496634e-06, + "loss": 0.6969, + "step": 23800 + }, + { + "epoch": 1.6561174097516869, + "grad_norm": 1.6226752996444702, + "learning_rate": 4.964591923696731e-06, + "loss": 0.7658, + "step": 23810 + }, + { + "epoch": 1.6568129651526744, + "grad_norm": 3.1963772773742676, + "learning_rate": 4.9605453660898895e-06, + "loss": 0.7053, + "step": 23820 + }, + { + "epoch": 1.657508520553662, + "grad_norm": 5.7426557540893555, + "learning_rate": 4.956498834326697e-06, + "loss": 0.7085, + "step": 23830 + }, + { + "epoch": 1.6582040759546497, + "grad_norm": 2.5272397994995117, + "learning_rate": 4.952452331057718e-06, + "loss": 0.6828, + "step": 23840 + }, + { + "epoch": 1.6588996313556374, + "grad_norm": 4.665675163269043, + "learning_rate": 4.948405858933503e-06, + "loss": 0.6794, + "step": 23850 + }, + { + "epoch": 1.6595951867566252, + "grad_norm": 1.914355754852295, + "learning_rate": 4.944359420604576e-06, + "loss": 0.6494, + "step": 23860 + }, + { + "epoch": 1.660290742157613, + "grad_norm": 1.6600372791290283, + "learning_rate": 4.940313018721444e-06, + "loss": 0.6767, + "step": 23870 + }, + { + "epoch": 1.6609862975586005, + "grad_norm": 2.130527973175049, + "learning_rate": 4.936266655934588e-06, + "loss": 0.6956, + "step": 23880 + }, + { + "epoch": 1.6616818529595883, + "grad_norm": 1.7196900844573975, + "learning_rate": 4.932220334894466e-06, + "loss": 0.72, + "step": 23890 + }, + { + "epoch": 1.6623774083605758, + "grad_norm": 1.9301713705062866, + "learning_rate": 4.9281740582515055e-06, + "loss": 0.6816, + "step": 23900 + }, + { + "epoch": 1.6630729637615635, + "grad_norm": 1.8818150758743286, + "learning_rate": 4.9241278286561055e-06, + "loss": 0.6896, + "step": 23910 + }, + { + "epoch": 1.6637685191625513, + "grad_norm": 1.6322824954986572, + "learning_rate": 4.9200816487586375e-06, + "loss": 0.7063, + "step": 23920 + }, + { + "epoch": 1.664464074563539, + "grad_norm": 4.007488250732422, + "learning_rate": 4.9160355212094344e-06, + "loss": 0.6863, + "step": 23930 + }, + { + "epoch": 1.6651596299645268, + "grad_norm": 1.8934129476547241, + "learning_rate": 4.911989448658798e-06, + "loss": 0.7296, + "step": 23940 + }, + { + "epoch": 1.6658551853655144, + "grad_norm": 1.8656433820724487, + "learning_rate": 4.907943433756996e-06, + "loss": 0.7461, + "step": 23950 + }, + { + "epoch": 1.666550740766502, + "grad_norm": 2.3265485763549805, + "learning_rate": 4.903897479154258e-06, + "loss": 0.7296, + "step": 23960 + }, + { + "epoch": 1.6672462961674896, + "grad_norm": 1.9989250898361206, + "learning_rate": 4.899851587500769e-06, + "loss": 0.7345, + "step": 23970 + }, + { + "epoch": 1.6679418515684774, + "grad_norm": 2.0754520893096924, + "learning_rate": 4.895805761446679e-06, + "loss": 0.6732, + "step": 23980 + }, + { + "epoch": 1.6686374069694652, + "grad_norm": 3.127466917037964, + "learning_rate": 4.891760003642094e-06, + "loss": 0.7403, + "step": 23990 + }, + { + "epoch": 1.669332962370453, + "grad_norm": 3.595698833465576, + "learning_rate": 4.887714316737069e-06, + "loss": 0.7502, + "step": 24000 + }, + { + "epoch": 1.669332962370453, + "eval_loss": 0.9301904439926147, + "eval_runtime": 4625.0135, + "eval_samples_per_second": 3.927, + "eval_steps_per_second": 0.654, + "step": 24000 + }, + { + "epoch": 1.6700285177714405, + "grad_norm": 2.2055881023406982, + "learning_rate": 4.8836687033816205e-06, + "loss": 0.6607, + "step": 24010 + }, + { + "epoch": 1.6707240731724282, + "grad_norm": 1.5816066265106201, + "learning_rate": 4.8796231662257125e-06, + "loss": 0.7435, + "step": 24020 + }, + { + "epoch": 1.6714196285734157, + "grad_norm": 2.323638439178467, + "learning_rate": 4.875577707919261e-06, + "loss": 0.7192, + "step": 24030 + }, + { + "epoch": 1.6721151839744035, + "grad_norm": 2.022634744644165, + "learning_rate": 4.871532331112129e-06, + "loss": 0.6724, + "step": 24040 + }, + { + "epoch": 1.6728107393753913, + "grad_norm": 2.5594065189361572, + "learning_rate": 4.867487038454128e-06, + "loss": 0.741, + "step": 24050 + }, + { + "epoch": 1.673506294776379, + "grad_norm": 2.2339565753936768, + "learning_rate": 4.8634418325950136e-06, + "loss": 0.7078, + "step": 24060 + }, + { + "epoch": 1.6742018501773668, + "grad_norm": 1.7584363222122192, + "learning_rate": 4.859396716184479e-06, + "loss": 0.6688, + "step": 24070 + }, + { + "epoch": 1.6748974055783543, + "grad_norm": 1.9433629512786865, + "learning_rate": 4.855351691872169e-06, + "loss": 0.679, + "step": 24080 + }, + { + "epoch": 1.6755929609793418, + "grad_norm": 2.013472557067871, + "learning_rate": 4.85130676230766e-06, + "loss": 0.7152, + "step": 24090 + }, + { + "epoch": 1.6762885163803296, + "grad_norm": 1.8814442157745361, + "learning_rate": 4.847261930140472e-06, + "loss": 0.7214, + "step": 24100 + }, + { + "epoch": 1.6769840717813174, + "grad_norm": 1.3428525924682617, + "learning_rate": 4.843217198020056e-06, + "loss": 0.6992, + "step": 24110 + }, + { + "epoch": 1.6776796271823051, + "grad_norm": 1.8356012105941772, + "learning_rate": 4.8391725685958e-06, + "loss": 0.6958, + "step": 24120 + }, + { + "epoch": 1.6783751825832929, + "grad_norm": 2.1505167484283447, + "learning_rate": 4.8351280445170265e-06, + "loss": 0.7478, + "step": 24130 + }, + { + "epoch": 1.6790707379842804, + "grad_norm": 1.5865250825881958, + "learning_rate": 4.831083628432988e-06, + "loss": 0.703, + "step": 24140 + }, + { + "epoch": 1.6797662933852682, + "grad_norm": 2.0292909145355225, + "learning_rate": 4.827039322992861e-06, + "loss": 0.7374, + "step": 24150 + }, + { + "epoch": 1.6804618487862557, + "grad_norm": 2.946131467819214, + "learning_rate": 4.8229951308457575e-06, + "loss": 0.7147, + "step": 24160 + }, + { + "epoch": 1.6811574041872435, + "grad_norm": 1.9687060117721558, + "learning_rate": 4.818951054640709e-06, + "loss": 0.6743, + "step": 24170 + }, + { + "epoch": 1.6818529595882312, + "grad_norm": 2.6242854595184326, + "learning_rate": 4.814907097026677e-06, + "loss": 0.6992, + "step": 24180 + }, + { + "epoch": 1.682548514989219, + "grad_norm": 1.5184245109558105, + "learning_rate": 4.81086326065254e-06, + "loss": 0.7007, + "step": 24190 + }, + { + "epoch": 1.6832440703902067, + "grad_norm": 1.755995750427246, + "learning_rate": 4.806819548167101e-06, + "loss": 0.7245, + "step": 24200 + }, + { + "epoch": 1.6839396257911943, + "grad_norm": 1.9014462232589722, + "learning_rate": 4.802775962219078e-06, + "loss": 0.7578, + "step": 24210 + }, + { + "epoch": 1.6846351811921818, + "grad_norm": 1.6220749616622925, + "learning_rate": 4.798732505457108e-06, + "loss": 0.6592, + "step": 24220 + }, + { + "epoch": 1.6853307365931696, + "grad_norm": 1.4836498498916626, + "learning_rate": 4.794689180529744e-06, + "loss": 0.7085, + "step": 24230 + }, + { + "epoch": 1.6860262919941573, + "grad_norm": 1.7928651571273804, + "learning_rate": 4.7906459900854534e-06, + "loss": 0.7054, + "step": 24240 + }, + { + "epoch": 1.686721847395145, + "grad_norm": 1.6451350450515747, + "learning_rate": 4.786602936772613e-06, + "loss": 0.7682, + "step": 24250 + }, + { + "epoch": 1.6874174027961328, + "grad_norm": 1.5265158414840698, + "learning_rate": 4.782560023239512e-06, + "loss": 0.7107, + "step": 24260 + }, + { + "epoch": 1.6881129581971204, + "grad_norm": 1.787006139755249, + "learning_rate": 4.778517252134346e-06, + "loss": 0.6273, + "step": 24270 + }, + { + "epoch": 1.6888085135981081, + "grad_norm": 1.6994988918304443, + "learning_rate": 4.774474626105222e-06, + "loss": 0.693, + "step": 24280 + }, + { + "epoch": 1.6895040689990957, + "grad_norm": 2.6346871852874756, + "learning_rate": 4.770432147800141e-06, + "loss": 0.7183, + "step": 24290 + }, + { + "epoch": 1.6901996244000834, + "grad_norm": 2.805741786956787, + "learning_rate": 4.7663898198670215e-06, + "loss": 0.693, + "step": 24300 + }, + { + "epoch": 1.6908951798010712, + "grad_norm": 2.3502652645111084, + "learning_rate": 4.762347644953674e-06, + "loss": 0.6709, + "step": 24310 + }, + { + "epoch": 1.691590735202059, + "grad_norm": 5.235349178314209, + "learning_rate": 4.758305625707811e-06, + "loss": 0.7251, + "step": 24320 + }, + { + "epoch": 1.6922862906030467, + "grad_norm": 1.9011690616607666, + "learning_rate": 4.754263764777042e-06, + "loss": 0.7333, + "step": 24330 + }, + { + "epoch": 1.6929818460040342, + "grad_norm": 1.9975553750991821, + "learning_rate": 4.750222064808878e-06, + "loss": 0.7147, + "step": 24340 + }, + { + "epoch": 1.6936774014050218, + "grad_norm": 1.7122416496276855, + "learning_rate": 4.74618052845072e-06, + "loss": 0.7034, + "step": 24350 + }, + { + "epoch": 1.6943729568060095, + "grad_norm": 1.7493289709091187, + "learning_rate": 4.742139158349861e-06, + "loss": 0.7108, + "step": 24360 + }, + { + "epoch": 1.6950685122069973, + "grad_norm": 1.9089518785476685, + "learning_rate": 4.738097957153486e-06, + "loss": 0.7522, + "step": 24370 + }, + { + "epoch": 1.695764067607985, + "grad_norm": 1.4907335042953491, + "learning_rate": 4.734056927508672e-06, + "loss": 0.6872, + "step": 24380 + }, + { + "epoch": 1.6964596230089728, + "grad_norm": 1.5499114990234375, + "learning_rate": 4.730016072062381e-06, + "loss": 0.7412, + "step": 24390 + }, + { + "epoch": 1.6971551784099603, + "grad_norm": 4.768270492553711, + "learning_rate": 4.725975393461461e-06, + "loss": 0.7291, + "step": 24400 + }, + { + "epoch": 1.697850733810948, + "grad_norm": 4.363291263580322, + "learning_rate": 4.721934894352646e-06, + "loss": 0.7043, + "step": 24410 + }, + { + "epoch": 1.6985462892119356, + "grad_norm": 1.4633796215057373, + "learning_rate": 4.717894577382552e-06, + "loss": 0.7127, + "step": 24420 + }, + { + "epoch": 1.6992418446129234, + "grad_norm": 3.1273229122161865, + "learning_rate": 4.713854445197672e-06, + "loss": 0.6809, + "step": 24430 + }, + { + "epoch": 1.6999374000139111, + "grad_norm": 1.3734418153762817, + "learning_rate": 4.709814500444382e-06, + "loss": 0.6771, + "step": 24440 + }, + { + "epoch": 1.700632955414899, + "grad_norm": 1.467660665512085, + "learning_rate": 4.705774745768935e-06, + "loss": 0.7288, + "step": 24450 + }, + { + "epoch": 1.7013285108158864, + "grad_norm": 2.178548574447632, + "learning_rate": 4.701735183817457e-06, + "loss": 0.7173, + "step": 24460 + }, + { + "epoch": 1.7020240662168742, + "grad_norm": 1.9116250276565552, + "learning_rate": 4.6976958172359515e-06, + "loss": 0.6868, + "step": 24470 + }, + { + "epoch": 1.7027196216178617, + "grad_norm": 1.710371971130371, + "learning_rate": 4.69365664867029e-06, + "loss": 0.7225, + "step": 24480 + }, + { + "epoch": 1.7034151770188495, + "grad_norm": 2.1531031131744385, + "learning_rate": 4.689617680766219e-06, + "loss": 0.8349, + "step": 24490 + }, + { + "epoch": 1.7041107324198372, + "grad_norm": 1.8737411499023438, + "learning_rate": 4.685578916169346e-06, + "loss": 0.7134, + "step": 24500 + }, + { + "epoch": 1.7041107324198372, + "eval_loss": 0.9288870692253113, + "eval_runtime": 4588.3317, + "eval_samples_per_second": 3.958, + "eval_steps_per_second": 0.66, + "step": 24500 + }, + { + "epoch": 1.704806287820825, + "grad_norm": 1.2940315008163452, + "learning_rate": 4.681540357525154e-06, + "loss": 0.6694, + "step": 24510 + }, + { + "epoch": 1.7055018432218128, + "grad_norm": 2.4392333030700684, + "learning_rate": 4.677502007478984e-06, + "loss": 0.6597, + "step": 24520 + }, + { + "epoch": 1.7061973986228003, + "grad_norm": 2.257929801940918, + "learning_rate": 4.673463868676047e-06, + "loss": 0.6821, + "step": 24530 + }, + { + "epoch": 1.706892954023788, + "grad_norm": 1.792213797569275, + "learning_rate": 4.66942594376141e-06, + "loss": 0.6893, + "step": 24540 + }, + { + "epoch": 1.7075885094247756, + "grad_norm": 1.885727047920227, + "learning_rate": 4.665388235380003e-06, + "loss": 0.754, + "step": 24550 + }, + { + "epoch": 1.7082840648257633, + "grad_norm": 2.037646532058716, + "learning_rate": 4.661350746176613e-06, + "loss": 0.7887, + "step": 24560 + }, + { + "epoch": 1.708979620226751, + "grad_norm": 3.0948503017425537, + "learning_rate": 4.657313478795883e-06, + "loss": 0.6894, + "step": 24570 + }, + { + "epoch": 1.7096751756277389, + "grad_norm": 1.5871737003326416, + "learning_rate": 4.65327643588231e-06, + "loss": 0.7457, + "step": 24580 + }, + { + "epoch": 1.7103707310287264, + "grad_norm": 1.9761390686035156, + "learning_rate": 4.649239620080248e-06, + "loss": 0.6927, + "step": 24590 + }, + { + "epoch": 1.7110662864297141, + "grad_norm": 2.0315051078796387, + "learning_rate": 4.6452030340339e-06, + "loss": 0.6401, + "step": 24600 + }, + { + "epoch": 1.7117618418307017, + "grad_norm": 2.153409719467163, + "learning_rate": 4.641166680387314e-06, + "loss": 0.737, + "step": 24610 + }, + { + "epoch": 1.7124573972316894, + "grad_norm": 1.6171026229858398, + "learning_rate": 4.637130561784393e-06, + "loss": 0.7095, + "step": 24620 + }, + { + "epoch": 1.7131529526326772, + "grad_norm": 1.612365961074829, + "learning_rate": 4.633094680868886e-06, + "loss": 0.7479, + "step": 24630 + }, + { + "epoch": 1.713848508033665, + "grad_norm": 1.7872875928878784, + "learning_rate": 4.629059040284375e-06, + "loss": 0.6852, + "step": 24640 + }, + { + "epoch": 1.7145440634346527, + "grad_norm": 1.7672394514083862, + "learning_rate": 4.6250236426743e-06, + "loss": 0.7461, + "step": 24650 + }, + { + "epoch": 1.7152396188356402, + "grad_norm": 2.0397377014160156, + "learning_rate": 4.62098849068193e-06, + "loss": 0.7393, + "step": 24660 + }, + { + "epoch": 1.715935174236628, + "grad_norm": 2.154817581176758, + "learning_rate": 4.61695358695038e-06, + "loss": 0.6938, + "step": 24670 + }, + { + "epoch": 1.7166307296376155, + "grad_norm": 2.6637027263641357, + "learning_rate": 4.6129189341226e-06, + "loss": 0.6647, + "step": 24680 + }, + { + "epoch": 1.7173262850386033, + "grad_norm": 1.808764934539795, + "learning_rate": 4.608884534841375e-06, + "loss": 0.7017, + "step": 24690 + }, + { + "epoch": 1.718021840439591, + "grad_norm": 1.8286253213882446, + "learning_rate": 4.6048503917493284e-06, + "loss": 0.761, + "step": 24700 + }, + { + "epoch": 1.7187173958405788, + "grad_norm": 1.9420380592346191, + "learning_rate": 4.600816507488905e-06, + "loss": 0.7033, + "step": 24710 + }, + { + "epoch": 1.7194129512415663, + "grad_norm": 1.7580689191818237, + "learning_rate": 4.596782884702391e-06, + "loss": 0.7054, + "step": 24720 + }, + { + "epoch": 1.720108506642554, + "grad_norm": 1.6335105895996094, + "learning_rate": 4.592749526031898e-06, + "loss": 0.7172, + "step": 24730 + }, + { + "epoch": 1.7208040620435416, + "grad_norm": 2.667562484741211, + "learning_rate": 4.588716434119364e-06, + "loss": 0.7042, + "step": 24740 + }, + { + "epoch": 1.7214996174445294, + "grad_norm": 1.8713653087615967, + "learning_rate": 4.58468361160655e-06, + "loss": 0.7149, + "step": 24750 + }, + { + "epoch": 1.7221951728455172, + "grad_norm": 1.4407161474227905, + "learning_rate": 4.580651061135046e-06, + "loss": 0.702, + "step": 24760 + }, + { + "epoch": 1.722890728246505, + "grad_norm": 5.778254508972168, + "learning_rate": 4.5766187853462605e-06, + "loss": 0.6672, + "step": 24770 + }, + { + "epoch": 1.7235862836474927, + "grad_norm": 1.7427797317504883, + "learning_rate": 4.572586786881419e-06, + "loss": 0.7096, + "step": 24780 + }, + { + "epoch": 1.7242818390484802, + "grad_norm": 2.846219539642334, + "learning_rate": 4.568555068381571e-06, + "loss": 0.7496, + "step": 24790 + }, + { + "epoch": 1.7249773944494677, + "grad_norm": 2.056689500808716, + "learning_rate": 4.56452363248758e-06, + "loss": 0.6727, + "step": 24800 + }, + { + "epoch": 1.7256729498504555, + "grad_norm": 1.6566110849380493, + "learning_rate": 4.560492481840124e-06, + "loss": 0.7572, + "step": 24810 + }, + { + "epoch": 1.7263685052514433, + "grad_norm": 1.7641148567199707, + "learning_rate": 4.556461619079695e-06, + "loss": 0.7046, + "step": 24820 + }, + { + "epoch": 1.727064060652431, + "grad_norm": 2.2162888050079346, + "learning_rate": 4.552431046846597e-06, + "loss": 0.6791, + "step": 24830 + }, + { + "epoch": 1.7277596160534188, + "grad_norm": 2.749746561050415, + "learning_rate": 4.548400767780942e-06, + "loss": 0.7303, + "step": 24840 + }, + { + "epoch": 1.7284551714544063, + "grad_norm": 2.2608273029327393, + "learning_rate": 4.5443707845226515e-06, + "loss": 0.7547, + "step": 24850 + }, + { + "epoch": 1.729150726855394, + "grad_norm": 2.4883246421813965, + "learning_rate": 4.5403410997114514e-06, + "loss": 0.717, + "step": 24860 + }, + { + "epoch": 1.7298462822563816, + "grad_norm": 2.180572509765625, + "learning_rate": 4.536311715986873e-06, + "loss": 0.7669, + "step": 24870 + }, + { + "epoch": 1.7305418376573694, + "grad_norm": 1.7946350574493408, + "learning_rate": 4.532282635988253e-06, + "loss": 0.6762, + "step": 24880 + }, + { + "epoch": 1.731237393058357, + "grad_norm": 1.8036636114120483, + "learning_rate": 4.528253862354726e-06, + "loss": 0.6778, + "step": 24890 + }, + { + "epoch": 1.7319329484593449, + "grad_norm": 1.5552568435668945, + "learning_rate": 4.524225397725225e-06, + "loss": 0.7062, + "step": 24900 + }, + { + "epoch": 1.7326285038603326, + "grad_norm": 2.126801013946533, + "learning_rate": 4.520197244738483e-06, + "loss": 0.7456, + "step": 24910 + }, + { + "epoch": 1.7333240592613202, + "grad_norm": 1.4155621528625488, + "learning_rate": 4.516169406033033e-06, + "loss": 0.7348, + "step": 24920 + }, + { + "epoch": 1.7340196146623077, + "grad_norm": 2.610417366027832, + "learning_rate": 4.512141884247189e-06, + "loss": 0.7031, + "step": 24930 + }, + { + "epoch": 1.7347151700632955, + "grad_norm": 3.299368143081665, + "learning_rate": 4.508114682019071e-06, + "loss": 0.7053, + "step": 24940 + }, + { + "epoch": 1.7354107254642832, + "grad_norm": 2.181206226348877, + "learning_rate": 4.504087801986583e-06, + "loss": 0.712, + "step": 24950 + }, + { + "epoch": 1.736106280865271, + "grad_norm": 1.6168698072433472, + "learning_rate": 4.50006124678742e-06, + "loss": 0.6684, + "step": 24960 + }, + { + "epoch": 1.7368018362662587, + "grad_norm": 1.7036614418029785, + "learning_rate": 4.496035019059063e-06, + "loss": 0.7298, + "step": 24970 + }, + { + "epoch": 1.7374973916672463, + "grad_norm": 1.8299601078033447, + "learning_rate": 4.492009121438781e-06, + "loss": 0.7457, + "step": 24980 + }, + { + "epoch": 1.738192947068234, + "grad_norm": 1.9233745336532593, + "learning_rate": 4.487983556563624e-06, + "loss": 0.6865, + "step": 24990 + }, + { + "epoch": 1.7388885024692216, + "grad_norm": 1.6727678775787354, + "learning_rate": 4.483958327070423e-06, + "loss": 0.7125, + "step": 25000 + }, + { + "epoch": 1.7388885024692216, + "eval_loss": 0.9295873641967773, + "eval_runtime": 4586.1257, + "eval_samples_per_second": 3.96, + "eval_steps_per_second": 0.66, + "step": 25000 + }, + { + "epoch": 1.7395840578702093, + "grad_norm": 3.5153305530548096, + "learning_rate": 4.479933435595792e-06, + "loss": 0.6893, + "step": 25010 + }, + { + "epoch": 1.740279613271197, + "grad_norm": 3.3578829765319824, + "learning_rate": 4.475908884776125e-06, + "loss": 0.7412, + "step": 25020 + }, + { + "epoch": 1.7409751686721848, + "grad_norm": 1.8067222833633423, + "learning_rate": 4.471884677247589e-06, + "loss": 0.6849, + "step": 25030 + }, + { + "epoch": 1.7416707240731726, + "grad_norm": 1.9886544942855835, + "learning_rate": 4.467860815646127e-06, + "loss": 0.6642, + "step": 25040 + }, + { + "epoch": 1.7423662794741601, + "grad_norm": 2.2484796047210693, + "learning_rate": 4.463837302607458e-06, + "loss": 0.7044, + "step": 25050 + }, + { + "epoch": 1.7430618348751477, + "grad_norm": 2.470210313796997, + "learning_rate": 4.4598141407670714e-06, + "loss": 0.7181, + "step": 25060 + }, + { + "epoch": 1.7437573902761354, + "grad_norm": 1.8754452466964722, + "learning_rate": 4.455791332760224e-06, + "loss": 0.6516, + "step": 25070 + }, + { + "epoch": 1.7444529456771232, + "grad_norm": 2.0858311653137207, + "learning_rate": 4.451768881221944e-06, + "loss": 0.7017, + "step": 25080 + }, + { + "epoch": 1.745148501078111, + "grad_norm": 2.0306215286254883, + "learning_rate": 4.447746788787025e-06, + "loss": 0.7532, + "step": 25090 + }, + { + "epoch": 1.7458440564790987, + "grad_norm": 4.189837455749512, + "learning_rate": 4.443725058090025e-06, + "loss": 0.6885, + "step": 25100 + }, + { + "epoch": 1.7465396118800862, + "grad_norm": 1.7607864141464233, + "learning_rate": 4.4397036917652654e-06, + "loss": 0.7315, + "step": 25110 + }, + { + "epoch": 1.747235167281074, + "grad_norm": 10.338160514831543, + "learning_rate": 4.435682692446831e-06, + "loss": 0.7632, + "step": 25120 + }, + { + "epoch": 1.7479307226820615, + "grad_norm": 1.5972959995269775, + "learning_rate": 4.431662062768564e-06, + "loss": 0.664, + "step": 25130 + }, + { + "epoch": 1.7486262780830493, + "grad_norm": 2.152918577194214, + "learning_rate": 4.427641805364061e-06, + "loss": 0.7179, + "step": 25140 + }, + { + "epoch": 1.749321833484037, + "grad_norm": 1.680715799331665, + "learning_rate": 4.423621922866682e-06, + "loss": 0.6859, + "step": 25150 + }, + { + "epoch": 1.7500173888850248, + "grad_norm": 3.0638303756713867, + "learning_rate": 4.4196024179095344e-06, + "loss": 0.713, + "step": 25160 + }, + { + "epoch": 1.7507129442860125, + "grad_norm": 1.5566773414611816, + "learning_rate": 4.415583293125486e-06, + "loss": 0.7321, + "step": 25170 + }, + { + "epoch": 1.751408499687, + "grad_norm": 1.899898886680603, + "learning_rate": 4.41156455114715e-06, + "loss": 0.7133, + "step": 25180 + }, + { + "epoch": 1.7521040550879876, + "grad_norm": 2.023768901824951, + "learning_rate": 4.407546194606888e-06, + "loss": 0.7157, + "step": 25190 + }, + { + "epoch": 1.7527996104889754, + "grad_norm": 1.4718644618988037, + "learning_rate": 4.403528226136816e-06, + "loss": 0.6514, + "step": 25200 + }, + { + "epoch": 1.7534951658899631, + "grad_norm": 2.0304229259490967, + "learning_rate": 4.399510648368786e-06, + "loss": 0.699, + "step": 25210 + }, + { + "epoch": 1.7541907212909509, + "grad_norm": 1.8959366083145142, + "learning_rate": 4.3954934639344e-06, + "loss": 0.6932, + "step": 25220 + }, + { + "epoch": 1.7548862766919386, + "grad_norm": 2.6972873210906982, + "learning_rate": 4.3914766754650046e-06, + "loss": 0.7143, + "step": 25230 + }, + { + "epoch": 1.7555818320929262, + "grad_norm": 1.8104190826416016, + "learning_rate": 4.387460285591682e-06, + "loss": 0.7201, + "step": 25240 + }, + { + "epoch": 1.756277387493914, + "grad_norm": 1.4929471015930176, + "learning_rate": 4.383444296945256e-06, + "loss": 0.726, + "step": 25250 + }, + { + "epoch": 1.7569729428949015, + "grad_norm": 2.993394136428833, + "learning_rate": 4.379428712156284e-06, + "loss": 0.6924, + "step": 25260 + }, + { + "epoch": 1.7576684982958892, + "grad_norm": 1.7983479499816895, + "learning_rate": 4.375413533855069e-06, + "loss": 0.6976, + "step": 25270 + }, + { + "epoch": 1.758364053696877, + "grad_norm": 1.7294484376907349, + "learning_rate": 4.371398764671633e-06, + "loss": 0.7179, + "step": 25280 + }, + { + "epoch": 1.7590596090978647, + "grad_norm": 2.0055477619171143, + "learning_rate": 4.36738440723574e-06, + "loss": 0.6923, + "step": 25290 + }, + { + "epoch": 1.7597551644988525, + "grad_norm": 1.5737502574920654, + "learning_rate": 4.3633704641768825e-06, + "loss": 0.7114, + "step": 25300 + }, + { + "epoch": 1.76045071989984, + "grad_norm": 3.0790276527404785, + "learning_rate": 4.3593569381242805e-06, + "loss": 0.7248, + "step": 25310 + }, + { + "epoch": 1.7611462753008276, + "grad_norm": 1.2798984050750732, + "learning_rate": 4.355343831706882e-06, + "loss": 0.7204, + "step": 25320 + }, + { + "epoch": 1.7618418307018153, + "grad_norm": 2.0811476707458496, + "learning_rate": 4.351331147553357e-06, + "loss": 0.7109, + "step": 25330 + }, + { + "epoch": 1.762537386102803, + "grad_norm": 1.9661961793899536, + "learning_rate": 4.347318888292107e-06, + "loss": 0.7354, + "step": 25340 + }, + { + "epoch": 1.7632329415037908, + "grad_norm": 1.6201145648956299, + "learning_rate": 4.343307056551241e-06, + "loss": 0.7115, + "step": 25350 + }, + { + "epoch": 1.7639284969047786, + "grad_norm": 1.3211616277694702, + "learning_rate": 4.339295654958603e-06, + "loss": 0.7107, + "step": 25360 + }, + { + "epoch": 1.7646240523057661, + "grad_norm": 2.4390509128570557, + "learning_rate": 4.335284686141746e-06, + "loss": 0.7087, + "step": 25370 + }, + { + "epoch": 1.765319607706754, + "grad_norm": 4.132873058319092, + "learning_rate": 4.331274152727944e-06, + "loss": 0.6788, + "step": 25380 + }, + { + "epoch": 1.7660151631077414, + "grad_norm": 1.4695786237716675, + "learning_rate": 4.327264057344181e-06, + "loss": 0.7222, + "step": 25390 + }, + { + "epoch": 1.7667107185087292, + "grad_norm": 1.7829478979110718, + "learning_rate": 4.323254402617159e-06, + "loss": 0.7509, + "step": 25400 + }, + { + "epoch": 1.767406273909717, + "grad_norm": 1.7912639379501343, + "learning_rate": 4.319245191173292e-06, + "loss": 0.7017, + "step": 25410 + }, + { + "epoch": 1.7681018293107047, + "grad_norm": 1.526033639907837, + "learning_rate": 4.315236425638697e-06, + "loss": 0.7178, + "step": 25420 + }, + { + "epoch": 1.7687973847116922, + "grad_norm": 1.644758939743042, + "learning_rate": 4.311228108639205e-06, + "loss": 0.706, + "step": 25430 + }, + { + "epoch": 1.76949294011268, + "grad_norm": 2.2057993412017822, + "learning_rate": 4.30722024280035e-06, + "loss": 0.6835, + "step": 25440 + }, + { + "epoch": 1.7701884955136675, + "grad_norm": 1.780983805656433, + "learning_rate": 4.303212830747371e-06, + "loss": 0.674, + "step": 25450 + }, + { + "epoch": 1.7708840509146553, + "grad_norm": 1.436689853668213, + "learning_rate": 4.299205875105214e-06, + "loss": 0.6961, + "step": 25460 + }, + { + "epoch": 1.771579606315643, + "grad_norm": 2.3498525619506836, + "learning_rate": 4.295199378498519e-06, + "loss": 0.7011, + "step": 25470 + }, + { + "epoch": 1.7722751617166308, + "grad_norm": 1.77989661693573, + "learning_rate": 4.291193343551632e-06, + "loss": 0.72, + "step": 25480 + }, + { + "epoch": 1.7729707171176186, + "grad_norm": 4.506668567657471, + "learning_rate": 4.28718777288859e-06, + "loss": 0.701, + "step": 25490 + }, + { + "epoch": 1.773666272518606, + "grad_norm": 1.6870859861373901, + "learning_rate": 4.283182669133132e-06, + "loss": 0.7277, + "step": 25500 + }, + { + "epoch": 1.773666272518606, + "eval_loss": 0.9274883270263672, + "eval_runtime": 4615.3034, + "eval_samples_per_second": 3.935, + "eval_steps_per_second": 0.656, + "step": 25500 + }, + { + "epoch": 1.7743618279195938, + "grad_norm": 1.5911608934402466, + "learning_rate": 4.279178034908685e-06, + "loss": 0.6799, + "step": 25510 + }, + { + "epoch": 1.7750573833205814, + "grad_norm": 2.9365909099578857, + "learning_rate": 4.2751738728383765e-06, + "loss": 0.71, + "step": 25520 + }, + { + "epoch": 1.7757529387215691, + "grad_norm": 1.886038064956665, + "learning_rate": 4.271170185545017e-06, + "loss": 0.7172, + "step": 25530 + }, + { + "epoch": 1.776448494122557, + "grad_norm": 1.7774978876113892, + "learning_rate": 4.2671669756511106e-06, + "loss": 0.6811, + "step": 25540 + }, + { + "epoch": 1.7771440495235447, + "grad_norm": 1.8683706521987915, + "learning_rate": 4.263164245778845e-06, + "loss": 0.6785, + "step": 25550 + }, + { + "epoch": 1.7778396049245322, + "grad_norm": 1.865304946899414, + "learning_rate": 4.259161998550101e-06, + "loss": 0.6868, + "step": 25560 + }, + { + "epoch": 1.77853516032552, + "grad_norm": 1.7159920930862427, + "learning_rate": 4.255160236586432e-06, + "loss": 0.7099, + "step": 25570 + }, + { + "epoch": 1.7792307157265075, + "grad_norm": 1.599256157875061, + "learning_rate": 4.251158962509081e-06, + "loss": 0.6691, + "step": 25580 + }, + { + "epoch": 1.7799262711274952, + "grad_norm": 1.6367076635360718, + "learning_rate": 4.247158178938973e-06, + "loss": 0.7685, + "step": 25590 + }, + { + "epoch": 1.780621826528483, + "grad_norm": 1.489522933959961, + "learning_rate": 4.243157888496705e-06, + "loss": 0.6912, + "step": 25600 + }, + { + "epoch": 1.7813173819294708, + "grad_norm": 3.8910326957702637, + "learning_rate": 4.2391580938025564e-06, + "loss": 0.6682, + "step": 25610 + }, + { + "epoch": 1.7820129373304585, + "grad_norm": 1.7803566455841064, + "learning_rate": 4.235158797476479e-06, + "loss": 0.7046, + "step": 25620 + }, + { + "epoch": 1.782708492731446, + "grad_norm": 2.0363874435424805, + "learning_rate": 4.231160002138104e-06, + "loss": 0.663, + "step": 25630 + }, + { + "epoch": 1.7834040481324336, + "grad_norm": 1.8973560333251953, + "learning_rate": 4.227161710406722e-06, + "loss": 0.6963, + "step": 25640 + }, + { + "epoch": 1.7840996035334213, + "grad_norm": 1.7378597259521484, + "learning_rate": 4.223163924901306e-06, + "loss": 0.7525, + "step": 25650 + }, + { + "epoch": 1.784795158934409, + "grad_norm": 2.162604570388794, + "learning_rate": 4.219166648240493e-06, + "loss": 0.6618, + "step": 25660 + }, + { + "epoch": 1.7854907143353969, + "grad_norm": 2.7639403343200684, + "learning_rate": 4.2151698830425854e-06, + "loss": 0.7427, + "step": 25670 + }, + { + "epoch": 1.7861862697363846, + "grad_norm": 1.5627111196517944, + "learning_rate": 4.2111736319255505e-06, + "loss": 0.7178, + "step": 25680 + }, + { + "epoch": 1.7868818251373721, + "grad_norm": 1.9723204374313354, + "learning_rate": 4.207177897507023e-06, + "loss": 0.7321, + "step": 25690 + }, + { + "epoch": 1.78757738053836, + "grad_norm": 1.7340534925460815, + "learning_rate": 4.203182682404297e-06, + "loss": 0.7054, + "step": 25700 + }, + { + "epoch": 1.7882729359393474, + "grad_norm": 1.7095633745193481, + "learning_rate": 4.1991879892343206e-06, + "loss": 0.7159, + "step": 25710 + }, + { + "epoch": 1.7889684913403352, + "grad_norm": 2.911708116531372, + "learning_rate": 4.1951938206137085e-06, + "loss": 0.7624, + "step": 25720 + }, + { + "epoch": 1.789664046741323, + "grad_norm": 1.9733490943908691, + "learning_rate": 4.1912001791587294e-06, + "loss": 0.6928, + "step": 25730 + }, + { + "epoch": 1.7903596021423107, + "grad_norm": 2.2110800743103027, + "learning_rate": 4.187207067485301e-06, + "loss": 0.7168, + "step": 25740 + }, + { + "epoch": 1.7910551575432985, + "grad_norm": 1.9756742715835571, + "learning_rate": 4.1832144882090054e-06, + "loss": 0.6427, + "step": 25750 + }, + { + "epoch": 1.791750712944286, + "grad_norm": 1.8861068487167358, + "learning_rate": 4.179222443945064e-06, + "loss": 0.6853, + "step": 25760 + }, + { + "epoch": 1.7924462683452735, + "grad_norm": 1.4366862773895264, + "learning_rate": 4.1752309373083574e-06, + "loss": 0.685, + "step": 25770 + }, + { + "epoch": 1.7931418237462613, + "grad_norm": 1.6759233474731445, + "learning_rate": 4.171239970913405e-06, + "loss": 0.6707, + "step": 25780 + }, + { + "epoch": 1.793837379147249, + "grad_norm": 1.7954999208450317, + "learning_rate": 4.167249547374379e-06, + "loss": 0.7366, + "step": 25790 + }, + { + "epoch": 1.7945329345482368, + "grad_norm": 1.4638099670410156, + "learning_rate": 4.163259669305094e-06, + "loss": 0.6763, + "step": 25800 + }, + { + "epoch": 1.7952284899492246, + "grad_norm": 3.0599074363708496, + "learning_rate": 4.159270339319007e-06, + "loss": 0.7055, + "step": 25810 + }, + { + "epoch": 1.795924045350212, + "grad_norm": 1.5981173515319824, + "learning_rate": 4.155281560029218e-06, + "loss": 0.6747, + "step": 25820 + }, + { + "epoch": 1.7966196007511999, + "grad_norm": 1.826743721961975, + "learning_rate": 4.151293334048463e-06, + "loss": 0.7111, + "step": 25830 + }, + { + "epoch": 1.7973151561521874, + "grad_norm": 2.0100717544555664, + "learning_rate": 4.1473056639891185e-06, + "loss": 0.6626, + "step": 25840 + }, + { + "epoch": 1.7980107115531752, + "grad_norm": 1.5715949535369873, + "learning_rate": 4.143318552463193e-06, + "loss": 0.664, + "step": 25850 + }, + { + "epoch": 1.798706266954163, + "grad_norm": 6.547336101531982, + "learning_rate": 4.139332002082333e-06, + "loss": 0.7422, + "step": 25860 + }, + { + "epoch": 1.7994018223551507, + "grad_norm": 2.0072124004364014, + "learning_rate": 4.135346015457816e-06, + "loss": 0.7259, + "step": 25870 + }, + { + "epoch": 1.8000973777561384, + "grad_norm": 1.840713620185852, + "learning_rate": 4.131360595200552e-06, + "loss": 0.7304, + "step": 25880 + }, + { + "epoch": 1.800792933157126, + "grad_norm": 2.281780242919922, + "learning_rate": 4.127375743921076e-06, + "loss": 0.7208, + "step": 25890 + }, + { + "epoch": 1.8014884885581135, + "grad_norm": 1.5475441217422485, + "learning_rate": 4.1233914642295524e-06, + "loss": 0.7083, + "step": 25900 + }, + { + "epoch": 1.8021840439591013, + "grad_norm": 1.859960675239563, + "learning_rate": 4.119407758735776e-06, + "loss": 0.6974, + "step": 25910 + }, + { + "epoch": 1.802879599360089, + "grad_norm": 1.926255702972412, + "learning_rate": 4.115424630049156e-06, + "loss": 0.7018, + "step": 25920 + }, + { + "epoch": 1.8035751547610768, + "grad_norm": 2.1131722927093506, + "learning_rate": 4.111442080778728e-06, + "loss": 0.7079, + "step": 25930 + }, + { + "epoch": 1.8042707101620645, + "grad_norm": 2.2542102336883545, + "learning_rate": 4.107460113533153e-06, + "loss": 0.6499, + "step": 25940 + }, + { + "epoch": 1.804966265563052, + "grad_norm": 5.454006195068359, + "learning_rate": 4.103478730920704e-06, + "loss": 0.6549, + "step": 25950 + }, + { + "epoch": 1.8056618209640398, + "grad_norm": 1.6069576740264893, + "learning_rate": 4.099497935549275e-06, + "loss": 0.6682, + "step": 25960 + }, + { + "epoch": 1.8063573763650274, + "grad_norm": 2.3256187438964844, + "learning_rate": 4.095517730026371e-06, + "loss": 0.6438, + "step": 25970 + }, + { + "epoch": 1.8070529317660151, + "grad_norm": 1.9911335706710815, + "learning_rate": 4.09153811695912e-06, + "loss": 0.7258, + "step": 25980 + }, + { + "epoch": 1.8077484871670029, + "grad_norm": 1.9219112396240234, + "learning_rate": 4.087559098954247e-06, + "loss": 0.6574, + "step": 25990 + }, + { + "epoch": 1.8084440425679906, + "grad_norm": 1.9347059726715088, + "learning_rate": 4.083580678618102e-06, + "loss": 0.6937, + "step": 26000 + }, + { + "epoch": 1.8084440425679906, + "eval_loss": 0.9249603152275085, + "eval_runtime": 4623.9118, + "eval_samples_per_second": 3.928, + "eval_steps_per_second": 0.655, + "step": 26000 + }, + { + "epoch": 1.8091395979689784, + "grad_norm": 1.571662187576294, + "learning_rate": 4.079602858556635e-06, + "loss": 0.7036, + "step": 26010 + }, + { + "epoch": 1.809835153369966, + "grad_norm": 1.6726957559585571, + "learning_rate": 4.075625641375405e-06, + "loss": 0.6911, + "step": 26020 + }, + { + "epoch": 1.8105307087709535, + "grad_norm": 1.8611788749694824, + "learning_rate": 4.071649029679575e-06, + "loss": 0.7708, + "step": 26030 + }, + { + "epoch": 1.8112262641719412, + "grad_norm": 2.481220006942749, + "learning_rate": 4.067673026073916e-06, + "loss": 0.6894, + "step": 26040 + }, + { + "epoch": 1.811921819572929, + "grad_norm": 3.577754020690918, + "learning_rate": 4.0636976331627954e-06, + "loss": 0.6667, + "step": 26050 + }, + { + "epoch": 1.8126173749739167, + "grad_norm": 1.6531307697296143, + "learning_rate": 4.059722853550181e-06, + "loss": 0.722, + "step": 26060 + }, + { + "epoch": 1.8133129303749045, + "grad_norm": 1.9562602043151855, + "learning_rate": 4.055748689839642e-06, + "loss": 0.673, + "step": 26070 + }, + { + "epoch": 1.814008485775892, + "grad_norm": 2.427379608154297, + "learning_rate": 4.051775144634342e-06, + "loss": 0.7079, + "step": 26080 + }, + { + "epoch": 1.8147040411768798, + "grad_norm": 1.8614587783813477, + "learning_rate": 4.047802220537038e-06, + "loss": 0.7146, + "step": 26090 + }, + { + "epoch": 1.8153995965778673, + "grad_norm": 1.8489729166030884, + "learning_rate": 4.043829920150086e-06, + "loss": 0.6514, + "step": 26100 + }, + { + "epoch": 1.816095151978855, + "grad_norm": 1.7313168048858643, + "learning_rate": 4.039858246075427e-06, + "loss": 0.7523, + "step": 26110 + }, + { + "epoch": 1.8167907073798428, + "grad_norm": 2.9523189067840576, + "learning_rate": 4.0358872009145964e-06, + "loss": 0.7067, + "step": 26120 + }, + { + "epoch": 1.8174862627808306, + "grad_norm": 2.14597487449646, + "learning_rate": 4.031916787268712e-06, + "loss": 0.6824, + "step": 26130 + }, + { + "epoch": 1.8181818181818183, + "grad_norm": 1.6713953018188477, + "learning_rate": 4.027947007738484e-06, + "loss": 0.7374, + "step": 26140 + }, + { + "epoch": 1.8188773735828059, + "grad_norm": 1.4517407417297363, + "learning_rate": 4.023977864924203e-06, + "loss": 0.6418, + "step": 26150 + }, + { + "epoch": 1.8195729289837934, + "grad_norm": 2.1957948207855225, + "learning_rate": 4.020009361425746e-06, + "loss": 0.642, + "step": 26160 + }, + { + "epoch": 1.8202684843847812, + "grad_norm": 2.3431150913238525, + "learning_rate": 4.01604149984257e-06, + "loss": 0.7052, + "step": 26170 + }, + { + "epoch": 1.820964039785769, + "grad_norm": 1.3917313814163208, + "learning_rate": 4.012074282773709e-06, + "loss": 0.6108, + "step": 26180 + }, + { + "epoch": 1.8216595951867567, + "grad_norm": 1.4609073400497437, + "learning_rate": 4.00810771281778e-06, + "loss": 0.7072, + "step": 26190 + }, + { + "epoch": 1.8223551505877444, + "grad_norm": 2.749133348464966, + "learning_rate": 4.004141792572968e-06, + "loss": 0.6612, + "step": 26200 + }, + { + "epoch": 1.823050705988732, + "grad_norm": 2.2969610691070557, + "learning_rate": 4.000176524637042e-06, + "loss": 0.7013, + "step": 26210 + }, + { + "epoch": 1.8237462613897197, + "grad_norm": 1.4194257259368896, + "learning_rate": 3.996211911607335e-06, + "loss": 0.7055, + "step": 26220 + }, + { + "epoch": 1.8244418167907073, + "grad_norm": 2.4802889823913574, + "learning_rate": 3.992247956080759e-06, + "loss": 0.6836, + "step": 26230 + }, + { + "epoch": 1.825137372191695, + "grad_norm": 2.2162885665893555, + "learning_rate": 3.988284660653789e-06, + "loss": 0.7045, + "step": 26240 + }, + { + "epoch": 1.8258329275926828, + "grad_norm": 1.5105047225952148, + "learning_rate": 3.984322027922473e-06, + "loss": 0.717, + "step": 26250 + }, + { + "epoch": 1.8265284829936705, + "grad_norm": 3.628929615020752, + "learning_rate": 3.980360060482418e-06, + "loss": 0.7002, + "step": 26260 + }, + { + "epoch": 1.8272240383946583, + "grad_norm": 1.6727979183197021, + "learning_rate": 3.976398760928805e-06, + "loss": 0.6942, + "step": 26270 + }, + { + "epoch": 1.8279195937956458, + "grad_norm": 1.4628766775131226, + "learning_rate": 3.9724381318563645e-06, + "loss": 0.7329, + "step": 26280 + }, + { + "epoch": 1.8286151491966334, + "grad_norm": 1.4445955753326416, + "learning_rate": 3.968478175859399e-06, + "loss": 0.6802, + "step": 26290 + }, + { + "epoch": 1.8293107045976211, + "grad_norm": 1.5917257070541382, + "learning_rate": 3.964518895531767e-06, + "loss": 0.7275, + "step": 26300 + }, + { + "epoch": 1.8300062599986089, + "grad_norm": 2.551870107650757, + "learning_rate": 3.960560293466882e-06, + "loss": 0.7186, + "step": 26310 + }, + { + "epoch": 1.8307018153995966, + "grad_norm": 1.9657893180847168, + "learning_rate": 3.956602372257714e-06, + "loss": 0.6575, + "step": 26320 + }, + { + "epoch": 1.8313973708005844, + "grad_norm": 2.478411912918091, + "learning_rate": 3.952645134496791e-06, + "loss": 0.6617, + "step": 26330 + }, + { + "epoch": 1.832092926201572, + "grad_norm": 1.9206576347351074, + "learning_rate": 3.948688582776189e-06, + "loss": 0.6951, + "step": 26340 + }, + { + "epoch": 1.8327884816025597, + "grad_norm": 4.2881340980529785, + "learning_rate": 3.944732719687534e-06, + "loss": 0.7457, + "step": 26350 + }, + { + "epoch": 1.8334840370035472, + "grad_norm": 3.0615735054016113, + "learning_rate": 3.9407775478220036e-06, + "loss": 0.6808, + "step": 26360 + }, + { + "epoch": 1.834179592404535, + "grad_norm": 2.2199976444244385, + "learning_rate": 3.936823069770323e-06, + "loss": 0.7093, + "step": 26370 + }, + { + "epoch": 1.8348751478055227, + "grad_norm": 1.8509395122528076, + "learning_rate": 3.93286928812276e-06, + "loss": 0.6745, + "step": 26380 + }, + { + "epoch": 1.8355707032065105, + "grad_norm": 2.1274397373199463, + "learning_rate": 3.928916205469129e-06, + "loss": 0.6417, + "step": 26390 + }, + { + "epoch": 1.836266258607498, + "grad_norm": 1.4666999578475952, + "learning_rate": 3.924963824398786e-06, + "loss": 0.6638, + "step": 26400 + }, + { + "epoch": 1.8369618140084858, + "grad_norm": 1.9783587455749512, + "learning_rate": 3.9210121475006265e-06, + "loss": 0.6894, + "step": 26410 + }, + { + "epoch": 1.8376573694094733, + "grad_norm": 3.290480852127075, + "learning_rate": 3.917061177363085e-06, + "loss": 0.6871, + "step": 26420 + }, + { + "epoch": 1.838352924810461, + "grad_norm": 1.409499168395996, + "learning_rate": 3.913110916574133e-06, + "loss": 0.6907, + "step": 26430 + }, + { + "epoch": 1.8390484802114488, + "grad_norm": 2.194251298904419, + "learning_rate": 3.909161367721277e-06, + "loss": 0.7291, + "step": 26440 + }, + { + "epoch": 1.8397440356124366, + "grad_norm": 1.4851640462875366, + "learning_rate": 3.9052125333915596e-06, + "loss": 0.6629, + "step": 26450 + }, + { + "epoch": 1.8404395910134244, + "grad_norm": 1.6062613725662231, + "learning_rate": 3.901264416171552e-06, + "loss": 0.7536, + "step": 26460 + }, + { + "epoch": 1.841135146414412, + "grad_norm": 3.793309450149536, + "learning_rate": 3.897317018647359e-06, + "loss": 0.7171, + "step": 26470 + }, + { + "epoch": 1.8418307018153997, + "grad_norm": 2.163449287414551, + "learning_rate": 3.893370343404611e-06, + "loss": 0.705, + "step": 26480 + }, + { + "epoch": 1.8425262572163872, + "grad_norm": 3.1771023273468018, + "learning_rate": 3.8894243930284665e-06, + "loss": 0.7366, + "step": 26490 + }, + { + "epoch": 1.843221812617375, + "grad_norm": 1.9302486181259155, + "learning_rate": 3.885479170103609e-06, + "loss": 0.7103, + "step": 26500 + }, + { + "epoch": 1.843221812617375, + "eval_loss": 0.9228906035423279, + "eval_runtime": 4600.7065, + "eval_samples_per_second": 3.947, + "eval_steps_per_second": 0.658, + "step": 26500 + }, + { + "epoch": 1.8439173680183627, + "grad_norm": 2.755110502243042, + "learning_rate": 3.881534677214245e-06, + "loss": 0.701, + "step": 26510 + }, + { + "epoch": 1.8446129234193505, + "grad_norm": 2.730861186981201, + "learning_rate": 3.877590916944106e-06, + "loss": 0.7321, + "step": 26520 + }, + { + "epoch": 1.845308478820338, + "grad_norm": 2.2284111976623535, + "learning_rate": 3.873647891876439e-06, + "loss": 0.6816, + "step": 26530 + }, + { + "epoch": 1.8460040342213258, + "grad_norm": 2.1665894985198975, + "learning_rate": 3.869705604594013e-06, + "loss": 0.6748, + "step": 26540 + }, + { + "epoch": 1.8466995896223133, + "grad_norm": 1.6956160068511963, + "learning_rate": 3.865764057679112e-06, + "loss": 0.6969, + "step": 26550 + }, + { + "epoch": 1.847395145023301, + "grad_norm": 1.6402581930160522, + "learning_rate": 3.861823253713535e-06, + "loss": 0.7294, + "step": 26560 + }, + { + "epoch": 1.8480907004242888, + "grad_norm": 1.5033944845199585, + "learning_rate": 3.857883195278593e-06, + "loss": 0.7085, + "step": 26570 + }, + { + "epoch": 1.8487862558252766, + "grad_norm": 2.0093884468078613, + "learning_rate": 3.853943884955113e-06, + "loss": 0.6992, + "step": 26580 + }, + { + "epoch": 1.8494818112262643, + "grad_norm": 2.0879151821136475, + "learning_rate": 3.850005325323428e-06, + "loss": 0.7142, + "step": 26590 + }, + { + "epoch": 1.8501773666272519, + "grad_norm": 1.6245100498199463, + "learning_rate": 3.846067518963381e-06, + "loss": 0.622, + "step": 26600 + }, + { + "epoch": 1.8508729220282394, + "grad_norm": 1.742925763130188, + "learning_rate": 3.8421304684543205e-06, + "loss": 0.7394, + "step": 26610 + }, + { + "epoch": 1.8515684774292271, + "grad_norm": 2.240619659423828, + "learning_rate": 3.838194176375104e-06, + "loss": 0.708, + "step": 26620 + }, + { + "epoch": 1.852264032830215, + "grad_norm": 1.901363492012024, + "learning_rate": 3.834258645304084e-06, + "loss": 0.6814, + "step": 26630 + }, + { + "epoch": 1.8529595882312027, + "grad_norm": 1.7094508409500122, + "learning_rate": 3.830323877819121e-06, + "loss": 0.7217, + "step": 26640 + }, + { + "epoch": 1.8536551436321904, + "grad_norm": 2.2385787963867188, + "learning_rate": 3.826389876497575e-06, + "loss": 0.7408, + "step": 26650 + }, + { + "epoch": 1.854350699033178, + "grad_norm": 1.9295837879180908, + "learning_rate": 3.822456643916302e-06, + "loss": 0.6709, + "step": 26660 + }, + { + "epoch": 1.8550462544341657, + "grad_norm": 1.7912726402282715, + "learning_rate": 3.8185241826516536e-06, + "loss": 0.708, + "step": 26670 + }, + { + "epoch": 1.8557418098351532, + "grad_norm": 1.8920040130615234, + "learning_rate": 3.814592495279481e-06, + "loss": 0.716, + "step": 26680 + }, + { + "epoch": 1.856437365236141, + "grad_norm": 1.5248322486877441, + "learning_rate": 3.8106615843751244e-06, + "loss": 0.7359, + "step": 26690 + }, + { + "epoch": 1.8571329206371288, + "grad_norm": 1.9211211204528809, + "learning_rate": 3.8067314525134147e-06, + "loss": 0.7065, + "step": 26700 + }, + { + "epoch": 1.8578284760381165, + "grad_norm": 1.7975727319717407, + "learning_rate": 3.802802102268675e-06, + "loss": 0.7317, + "step": 26710 + }, + { + "epoch": 1.8585240314391043, + "grad_norm": 1.6307320594787598, + "learning_rate": 3.798873536214716e-06, + "loss": 0.6904, + "step": 26720 + }, + { + "epoch": 1.8592195868400918, + "grad_norm": 1.5273314714431763, + "learning_rate": 3.7949457569248328e-06, + "loss": 0.6774, + "step": 26730 + }, + { + "epoch": 1.8599151422410793, + "grad_norm": 2.1302530765533447, + "learning_rate": 3.791018766971809e-06, + "loss": 0.738, + "step": 26740 + }, + { + "epoch": 1.860610697642067, + "grad_norm": 1.859937071800232, + "learning_rate": 3.7870925689279075e-06, + "loss": 0.7294, + "step": 26750 + }, + { + "epoch": 1.8613062530430549, + "grad_norm": 1.4554426670074463, + "learning_rate": 3.7831671653648754e-06, + "loss": 0.7014, + "step": 26760 + }, + { + "epoch": 1.8620018084440426, + "grad_norm": 2.742192029953003, + "learning_rate": 3.779242558853935e-06, + "loss": 0.768, + "step": 26770 + }, + { + "epoch": 1.8626973638450304, + "grad_norm": 2.391306161880493, + "learning_rate": 3.775318751965791e-06, + "loss": 0.6988, + "step": 26780 + }, + { + "epoch": 1.863392919246018, + "grad_norm": 1.5109362602233887, + "learning_rate": 3.771395747270622e-06, + "loss": 0.6881, + "step": 26790 + }, + { + "epoch": 1.8640884746470057, + "grad_norm": 1.5582871437072754, + "learning_rate": 3.7674735473380807e-06, + "loss": 0.7095, + "step": 26800 + }, + { + "epoch": 1.8647840300479932, + "grad_norm": 1.8798670768737793, + "learning_rate": 3.7635521547372966e-06, + "loss": 0.6613, + "step": 26810 + }, + { + "epoch": 1.865479585448981, + "grad_norm": 1.5554819107055664, + "learning_rate": 3.7596315720368664e-06, + "loss": 0.6774, + "step": 26820 + }, + { + "epoch": 1.8661751408499687, + "grad_norm": 1.8373326063156128, + "learning_rate": 3.7557118018048577e-06, + "loss": 0.7298, + "step": 26830 + }, + { + "epoch": 1.8668706962509565, + "grad_norm": 2.1342225074768066, + "learning_rate": 3.7517928466088044e-06, + "loss": 0.7031, + "step": 26840 + }, + { + "epoch": 1.8675662516519442, + "grad_norm": 3.197488784790039, + "learning_rate": 3.747874709015708e-06, + "loss": 0.7275, + "step": 26850 + }, + { + "epoch": 1.8682618070529318, + "grad_norm": 1.5971283912658691, + "learning_rate": 3.7439573915920343e-06, + "loss": 0.6401, + "step": 26860 + }, + { + "epoch": 1.8689573624539193, + "grad_norm": 1.6130291223526, + "learning_rate": 3.740040896903713e-06, + "loss": 0.6869, + "step": 26870 + }, + { + "epoch": 1.869652917854907, + "grad_norm": 2.7985780239105225, + "learning_rate": 3.7361252275161317e-06, + "loss": 0.6289, + "step": 26880 + }, + { + "epoch": 1.8703484732558948, + "grad_norm": 1.965221643447876, + "learning_rate": 3.7322103859941417e-06, + "loss": 0.7433, + "step": 26890 + }, + { + "epoch": 1.8710440286568826, + "grad_norm": 1.648320198059082, + "learning_rate": 3.7282963749020496e-06, + "loss": 0.6655, + "step": 26900 + }, + { + "epoch": 1.8717395840578703, + "grad_norm": 1.4046196937561035, + "learning_rate": 3.724383196803616e-06, + "loss": 0.7152, + "step": 26910 + }, + { + "epoch": 1.8724351394588579, + "grad_norm": 1.203857183456421, + "learning_rate": 3.720470854262058e-06, + "loss": 0.6711, + "step": 26920 + }, + { + "epoch": 1.8731306948598456, + "grad_norm": 3.834266185760498, + "learning_rate": 3.7165593498400487e-06, + "loss": 0.6863, + "step": 26930 + }, + { + "epoch": 1.8738262502608332, + "grad_norm": 1.6186596155166626, + "learning_rate": 3.7126486860997056e-06, + "loss": 0.7087, + "step": 26940 + }, + { + "epoch": 1.874521805661821, + "grad_norm": 1.9383821487426758, + "learning_rate": 3.7087388656026013e-06, + "loss": 0.7237, + "step": 26950 + }, + { + "epoch": 1.8752173610628087, + "grad_norm": 1.7503395080566406, + "learning_rate": 3.704829890909751e-06, + "loss": 0.7006, + "step": 26960 + }, + { + "epoch": 1.8759129164637964, + "grad_norm": 2.405442237854004, + "learning_rate": 3.700921764581621e-06, + "loss": 0.7469, + "step": 26970 + }, + { + "epoch": 1.8766084718647842, + "grad_norm": 2.1626598834991455, + "learning_rate": 3.6970144891781203e-06, + "loss": 0.7464, + "step": 26980 + }, + { + "epoch": 1.8773040272657717, + "grad_norm": 1.7654370069503784, + "learning_rate": 3.693108067258596e-06, + "loss": 0.7489, + "step": 26990 + }, + { + "epoch": 1.8779995826667593, + "grad_norm": 1.757971167564392, + "learning_rate": 3.6892025013818423e-06, + "loss": 0.7262, + "step": 27000 + }, + { + "epoch": 1.8779995826667593, + "eval_loss": 0.9186991453170776, + "eval_runtime": 4598.8753, + "eval_samples_per_second": 3.949, + "eval_steps_per_second": 0.658, + "step": 27000 + }, + { + "epoch": 1.878695138067747, + "grad_norm": 3.7609806060791016, + "learning_rate": 3.6852977941060887e-06, + "loss": 0.7523, + "step": 27010 + }, + { + "epoch": 1.8793906934687348, + "grad_norm": 1.5845229625701904, + "learning_rate": 3.6813939479890048e-06, + "loss": 0.6685, + "step": 27020 + }, + { + "epoch": 1.8800862488697225, + "grad_norm": 1.814150333404541, + "learning_rate": 3.6774909655876957e-06, + "loss": 0.7218, + "step": 27030 + }, + { + "epoch": 1.8807818042707103, + "grad_norm": 1.4019838571548462, + "learning_rate": 3.6735888494586992e-06, + "loss": 0.6871, + "step": 27040 + }, + { + "epoch": 1.8814773596716978, + "grad_norm": 2.028859853744507, + "learning_rate": 3.6696876021579876e-06, + "loss": 0.6579, + "step": 27050 + }, + { + "epoch": 1.8821729150726856, + "grad_norm": 1.6298279762268066, + "learning_rate": 3.6657872262409616e-06, + "loss": 0.664, + "step": 27060 + }, + { + "epoch": 1.8828684704736731, + "grad_norm": 2.359654188156128, + "learning_rate": 3.6618877242624536e-06, + "loss": 0.7012, + "step": 27070 + }, + { + "epoch": 1.8835640258746609, + "grad_norm": 1.4496018886566162, + "learning_rate": 3.657989098776722e-06, + "loss": 0.6896, + "step": 27080 + }, + { + "epoch": 1.8842595812756486, + "grad_norm": 1.2981252670288086, + "learning_rate": 3.654091352337451e-06, + "loss": 0.6735, + "step": 27090 + }, + { + "epoch": 1.8849551366766364, + "grad_norm": 2.375112295150757, + "learning_rate": 3.6501944874977524e-06, + "loss": 0.685, + "step": 27100 + }, + { + "epoch": 1.8856506920776241, + "grad_norm": 3.5049986839294434, + "learning_rate": 3.6462985068101557e-06, + "loss": 0.6779, + "step": 27110 + }, + { + "epoch": 1.8863462474786117, + "grad_norm": 2.049565553665161, + "learning_rate": 3.642403412826615e-06, + "loss": 0.7227, + "step": 27120 + }, + { + "epoch": 1.8870418028795992, + "grad_norm": 1.7804454565048218, + "learning_rate": 3.6385092080984997e-06, + "loss": 0.741, + "step": 27130 + }, + { + "epoch": 1.887737358280587, + "grad_norm": 2.1661365032196045, + "learning_rate": 3.6346158951766005e-06, + "loss": 0.702, + "step": 27140 + }, + { + "epoch": 1.8884329136815747, + "grad_norm": 2.410181760787964, + "learning_rate": 3.6307234766111206e-06, + "loss": 0.7153, + "step": 27150 + }, + { + "epoch": 1.8891284690825625, + "grad_norm": 1.7265669107437134, + "learning_rate": 3.6268319549516816e-06, + "loss": 0.7108, + "step": 27160 + }, + { + "epoch": 1.8898240244835502, + "grad_norm": 2.3898215293884277, + "learning_rate": 3.622941332747314e-06, + "loss": 0.7545, + "step": 27170 + }, + { + "epoch": 1.8905195798845378, + "grad_norm": 1.8482922315597534, + "learning_rate": 3.6190516125464616e-06, + "loss": 0.7603, + "step": 27180 + }, + { + "epoch": 1.8912151352855255, + "grad_norm": 2.3116350173950195, + "learning_rate": 3.6151627968969747e-06, + "loss": 0.712, + "step": 27190 + }, + { + "epoch": 1.891910690686513, + "grad_norm": 2.0727038383483887, + "learning_rate": 3.611274888346113e-06, + "loss": 0.6722, + "step": 27200 + }, + { + "epoch": 1.8926062460875008, + "grad_norm": 1.4715973138809204, + "learning_rate": 3.60738788944054e-06, + "loss": 0.6965, + "step": 27210 + }, + { + "epoch": 1.8933018014884886, + "grad_norm": 1.5750131607055664, + "learning_rate": 3.6035018027263272e-06, + "loss": 0.6678, + "step": 27220 + }, + { + "epoch": 1.8939973568894763, + "grad_norm": 1.9347894191741943, + "learning_rate": 3.599616630748946e-06, + "loss": 0.6964, + "step": 27230 + }, + { + "epoch": 1.8946929122904639, + "grad_norm": 1.6793467998504639, + "learning_rate": 3.595732376053268e-06, + "loss": 0.6669, + "step": 27240 + }, + { + "epoch": 1.8953884676914516, + "grad_norm": 2.454257011413574, + "learning_rate": 3.5918490411835647e-06, + "loss": 0.7056, + "step": 27250 + }, + { + "epoch": 1.8960840230924392, + "grad_norm": 3.7943339347839355, + "learning_rate": 3.5879666286835084e-06, + "loss": 0.7313, + "step": 27260 + }, + { + "epoch": 1.896779578493427, + "grad_norm": 2.0135931968688965, + "learning_rate": 3.5840851410961585e-06, + "loss": 0.723, + "step": 27270 + }, + { + "epoch": 1.8974751338944147, + "grad_norm": 1.8486860990524292, + "learning_rate": 3.580204580963979e-06, + "loss": 0.6759, + "step": 27280 + }, + { + "epoch": 1.8981706892954024, + "grad_norm": 1.7523396015167236, + "learning_rate": 3.5763249508288197e-06, + "loss": 0.6677, + "step": 27290 + }, + { + "epoch": 1.8988662446963902, + "grad_norm": 2.35956072807312, + "learning_rate": 3.5724462532319225e-06, + "loss": 0.6772, + "step": 27300 + }, + { + "epoch": 1.8995618000973777, + "grad_norm": 1.6280624866485596, + "learning_rate": 3.5685684907139195e-06, + "loss": 0.7129, + "step": 27310 + }, + { + "epoch": 1.9002573554983655, + "grad_norm": 2.0462679862976074, + "learning_rate": 3.564691665814831e-06, + "loss": 0.6673, + "step": 27320 + }, + { + "epoch": 1.900952910899353, + "grad_norm": 1.8961502313613892, + "learning_rate": 3.5608157810740635e-06, + "loss": 0.7399, + "step": 27330 + }, + { + "epoch": 1.9016484663003408, + "grad_norm": 5.827134132385254, + "learning_rate": 3.5569408390304007e-06, + "loss": 0.7047, + "step": 27340 + }, + { + "epoch": 1.9023440217013285, + "grad_norm": 1.5664170980453491, + "learning_rate": 3.553066842222018e-06, + "loss": 0.7207, + "step": 27350 + }, + { + "epoch": 1.9030395771023163, + "grad_norm": 2.438519239425659, + "learning_rate": 3.549193793186468e-06, + "loss": 0.6528, + "step": 27360 + }, + { + "epoch": 1.9037351325033038, + "grad_norm": 2.050731658935547, + "learning_rate": 3.5453216944606804e-06, + "loss": 0.6881, + "step": 27370 + }, + { + "epoch": 1.9044306879042916, + "grad_norm": 1.6574395895004272, + "learning_rate": 3.5414505485809677e-06, + "loss": 0.6911, + "step": 27380 + }, + { + "epoch": 1.9051262433052791, + "grad_norm": 1.6993664503097534, + "learning_rate": 3.5375803580830125e-06, + "loss": 0.6675, + "step": 27390 + }, + { + "epoch": 1.905821798706267, + "grad_norm": 1.8912372589111328, + "learning_rate": 3.5337111255018765e-06, + "loss": 0.6648, + "step": 27400 + }, + { + "epoch": 1.9065173541072546, + "grad_norm": 2.1371243000030518, + "learning_rate": 3.5298428533719885e-06, + "loss": 0.7064, + "step": 27410 + }, + { + "epoch": 1.9072129095082424, + "grad_norm": 1.7865904569625854, + "learning_rate": 3.525975544227154e-06, + "loss": 0.6986, + "step": 27420 + }, + { + "epoch": 1.9079084649092302, + "grad_norm": 2.061286449432373, + "learning_rate": 3.522109200600542e-06, + "loss": 0.6845, + "step": 27430 + }, + { + "epoch": 1.9086040203102177, + "grad_norm": 4.014735698699951, + "learning_rate": 3.5182438250246936e-06, + "loss": 0.674, + "step": 27440 + }, + { + "epoch": 1.9092995757112052, + "grad_norm": 5.202054977416992, + "learning_rate": 3.5143794200315156e-06, + "loss": 0.7009, + "step": 27450 + }, + { + "epoch": 1.909995131112193, + "grad_norm": 1.3079453706741333, + "learning_rate": 3.5105159881522767e-06, + "loss": 0.7608, + "step": 27460 + }, + { + "epoch": 1.9106906865131807, + "grad_norm": 3.5554039478302, + "learning_rate": 3.5066535319176098e-06, + "loss": 0.7157, + "step": 27470 + }, + { + "epoch": 1.9113862419141685, + "grad_norm": 2.8016936779022217, + "learning_rate": 3.502792053857506e-06, + "loss": 0.7145, + "step": 27480 + }, + { + "epoch": 1.9120817973151563, + "grad_norm": 1.9333059787750244, + "learning_rate": 3.498931556501319e-06, + "loss": 0.6635, + "step": 27490 + }, + { + "epoch": 1.9127773527161438, + "grad_norm": 2.025204658508301, + "learning_rate": 3.4950720423777596e-06, + "loss": 0.7019, + "step": 27500 + }, + { + "epoch": 1.9127773527161438, + "eval_loss": 0.9183681607246399, + "eval_runtime": 4616.0348, + "eval_samples_per_second": 3.934, + "eval_steps_per_second": 0.656, + "step": 27500 + }, + { + "epoch": 1.9134729081171316, + "grad_norm": 1.3551404476165771, + "learning_rate": 3.4912135140148928e-06, + "loss": 0.631, + "step": 27510 + }, + { + "epoch": 1.914168463518119, + "grad_norm": 3.83156681060791, + "learning_rate": 3.48735597394014e-06, + "loss": 0.7151, + "step": 27520 + }, + { + "epoch": 1.9148640189191068, + "grad_norm": 1.7178568840026855, + "learning_rate": 3.4834994246802744e-06, + "loss": 0.7247, + "step": 27530 + }, + { + "epoch": 1.9155595743200946, + "grad_norm": 1.8828271627426147, + "learning_rate": 3.47964386876142e-06, + "loss": 0.7415, + "step": 27540 + }, + { + "epoch": 1.9162551297210824, + "grad_norm": 2.038243293762207, + "learning_rate": 3.4757893087090483e-06, + "loss": 0.7187, + "step": 27550 + }, + { + "epoch": 1.9169506851220701, + "grad_norm": 1.4839612245559692, + "learning_rate": 3.471935747047981e-06, + "loss": 0.7006, + "step": 27560 + }, + { + "epoch": 1.9176462405230577, + "grad_norm": 1.6841415166854858, + "learning_rate": 3.4680831863023866e-06, + "loss": 0.696, + "step": 27570 + }, + { + "epoch": 1.9183417959240452, + "grad_norm": 1.8087557554244995, + "learning_rate": 3.4642316289957755e-06, + "loss": 0.6735, + "step": 27580 + }, + { + "epoch": 1.919037351325033, + "grad_norm": 1.9873178005218506, + "learning_rate": 3.4603810776510026e-06, + "loss": 0.6921, + "step": 27590 + }, + { + "epoch": 1.9197329067260207, + "grad_norm": 1.69353187084198, + "learning_rate": 3.4565315347902615e-06, + "loss": 0.7636, + "step": 27600 + }, + { + "epoch": 1.9204284621270085, + "grad_norm": 1.554626226425171, + "learning_rate": 3.452683002935091e-06, + "loss": 0.6837, + "step": 27610 + }, + { + "epoch": 1.9211240175279962, + "grad_norm": 1.9585578441619873, + "learning_rate": 3.4488354846063577e-06, + "loss": 0.6891, + "step": 27620 + }, + { + "epoch": 1.9218195729289838, + "grad_norm": 2.3039987087249756, + "learning_rate": 3.4449889823242744e-06, + "loss": 0.6347, + "step": 27630 + }, + { + "epoch": 1.9225151283299715, + "grad_norm": 2.753787040710449, + "learning_rate": 3.4411434986083827e-06, + "loss": 0.7168, + "step": 27640 + }, + { + "epoch": 1.923210683730959, + "grad_norm": 1.8908294439315796, + "learning_rate": 3.4372990359775587e-06, + "loss": 0.68, + "step": 27650 + }, + { + "epoch": 1.9239062391319468, + "grad_norm": 1.956161379814148, + "learning_rate": 3.433455596950008e-06, + "loss": 0.719, + "step": 27660 + }, + { + "epoch": 1.9246017945329346, + "grad_norm": 1.808939814567566, + "learning_rate": 3.42961318404327e-06, + "loss": 0.6348, + "step": 27670 + }, + { + "epoch": 1.9252973499339223, + "grad_norm": 2.5244390964508057, + "learning_rate": 3.4257717997742073e-06, + "loss": 0.6503, + "step": 27680 + }, + { + "epoch": 1.92599290533491, + "grad_norm": 1.9089771509170532, + "learning_rate": 3.4219314466590113e-06, + "loss": 0.5984, + "step": 27690 + }, + { + "epoch": 1.9266884607358976, + "grad_norm": 1.7644000053405762, + "learning_rate": 3.4180921272131968e-06, + "loss": 0.7923, + "step": 27700 + }, + { + "epoch": 1.9273840161368851, + "grad_norm": 2.057901382446289, + "learning_rate": 3.4142538439516017e-06, + "loss": 0.7343, + "step": 27710 + }, + { + "epoch": 1.928079571537873, + "grad_norm": 1.582972764968872, + "learning_rate": 3.4104165993883843e-06, + "loss": 0.6937, + "step": 27720 + }, + { + "epoch": 1.9287751269388607, + "grad_norm": 1.992632508277893, + "learning_rate": 3.406580396037025e-06, + "loss": 0.6864, + "step": 27730 + }, + { + "epoch": 1.9294706823398484, + "grad_norm": 3.53491473197937, + "learning_rate": 3.402745236410321e-06, + "loss": 0.7319, + "step": 27740 + }, + { + "epoch": 1.9301662377408362, + "grad_norm": 3.669023275375366, + "learning_rate": 3.398911123020385e-06, + "loss": 0.6686, + "step": 27750 + }, + { + "epoch": 1.9308617931418237, + "grad_norm": 2.2862534523010254, + "learning_rate": 3.3950780583786476e-06, + "loss": 0.6716, + "step": 27760 + }, + { + "epoch": 1.9315573485428115, + "grad_norm": 2.379317283630371, + "learning_rate": 3.3912460449958456e-06, + "loss": 0.6599, + "step": 27770 + }, + { + "epoch": 1.932252903943799, + "grad_norm": 1.8233506679534912, + "learning_rate": 3.3874150853820342e-06, + "loss": 0.6995, + "step": 27780 + }, + { + "epoch": 1.9329484593447868, + "grad_norm": 1.6886887550354004, + "learning_rate": 3.3835851820465736e-06, + "loss": 0.7143, + "step": 27790 + }, + { + "epoch": 1.9336440147457745, + "grad_norm": 2.156623125076294, + "learning_rate": 3.379756337498137e-06, + "loss": 0.7068, + "step": 27800 + }, + { + "epoch": 1.9343395701467623, + "grad_norm": 2.0466043949127197, + "learning_rate": 3.3759285542446983e-06, + "loss": 0.6666, + "step": 27810 + }, + { + "epoch": 1.93503512554775, + "grad_norm": 1.3158215284347534, + "learning_rate": 3.372101834793542e-06, + "loss": 0.7516, + "step": 27820 + }, + { + "epoch": 1.9357306809487376, + "grad_norm": 1.8440043926239014, + "learning_rate": 3.368276181651252e-06, + "loss": 0.7047, + "step": 27830 + }, + { + "epoch": 1.936426236349725, + "grad_norm": 2.2353994846343994, + "learning_rate": 3.364451597323714e-06, + "loss": 0.7074, + "step": 27840 + }, + { + "epoch": 1.9371217917507129, + "grad_norm": 1.2795568704605103, + "learning_rate": 3.3606280843161134e-06, + "loss": 0.6577, + "step": 27850 + }, + { + "epoch": 1.9378173471517006, + "grad_norm": 1.731223225593567, + "learning_rate": 3.3568056451329366e-06, + "loss": 0.7548, + "step": 27860 + }, + { + "epoch": 1.9385129025526884, + "grad_norm": 2.2415881156921387, + "learning_rate": 3.352984282277964e-06, + "loss": 0.6929, + "step": 27870 + }, + { + "epoch": 1.9392084579536761, + "grad_norm": 1.8030407428741455, + "learning_rate": 3.349163998254272e-06, + "loss": 0.702, + "step": 27880 + }, + { + "epoch": 1.9399040133546637, + "grad_norm": 4.2967329025268555, + "learning_rate": 3.3453447955642293e-06, + "loss": 0.6545, + "step": 27890 + }, + { + "epoch": 1.9405995687556514, + "grad_norm": 2.113919734954834, + "learning_rate": 3.3415266767095005e-06, + "loss": 0.6814, + "step": 27900 + }, + { + "epoch": 1.941295124156639, + "grad_norm": 2.5689258575439453, + "learning_rate": 3.337709644191031e-06, + "loss": 0.7292, + "step": 27910 + }, + { + "epoch": 1.9419906795576267, + "grad_norm": 2.105177879333496, + "learning_rate": 3.3338937005090655e-06, + "loss": 0.632, + "step": 27920 + }, + { + "epoch": 1.9426862349586145, + "grad_norm": 3.4160244464874268, + "learning_rate": 3.3300788481631276e-06, + "loss": 0.745, + "step": 27930 + }, + { + "epoch": 1.9433817903596022, + "grad_norm": 2.6877288818359375, + "learning_rate": 3.326265089652031e-06, + "loss": 0.7226, + "step": 27940 + }, + { + "epoch": 1.94407734576059, + "grad_norm": 1.5519918203353882, + "learning_rate": 3.3224524274738678e-06, + "loss": 0.6504, + "step": 27950 + }, + { + "epoch": 1.9447729011615775, + "grad_norm": 2.1146302223205566, + "learning_rate": 3.318640864126019e-06, + "loss": 0.728, + "step": 27960 + }, + { + "epoch": 1.945468456562565, + "grad_norm": 4.279436111450195, + "learning_rate": 3.3148304021051413e-06, + "loss": 0.691, + "step": 27970 + }, + { + "epoch": 1.9461640119635528, + "grad_norm": 2.55841064453125, + "learning_rate": 3.3110210439071667e-06, + "loss": 0.674, + "step": 27980 + }, + { + "epoch": 1.9468595673645406, + "grad_norm": 2.4443976879119873, + "learning_rate": 3.307212792027311e-06, + "loss": 0.7371, + "step": 27990 + }, + { + "epoch": 1.9475551227655283, + "grad_norm": 1.6648225784301758, + "learning_rate": 3.303405648960062e-06, + "loss": 0.6939, + "step": 28000 + }, + { + "epoch": 1.9475551227655283, + "eval_loss": 0.9132990837097168, + "eval_runtime": 4601.4253, + "eval_samples_per_second": 3.947, + "eval_steps_per_second": 0.658, + "step": 28000 + } + ], + "logging_steps": 10, + "max_steps": 43131, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4318057928982528.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}