israel's picture
Model save
870d3af verified
raw
history blame
201 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 6267,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 3869.4292984240187,
"learning_rate": 1.594896331738437e-08,
"loss": 14.9229,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 4471.0283607460915,
"learning_rate": 7.974481658692185e-08,
"loss": 14.4971,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 2823.1908727713094,
"learning_rate": 1.594896331738437e-07,
"loss": 13.6783,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 789.0423597691714,
"learning_rate": 2.3923444976076555e-07,
"loss": 9.7488,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 1003.0768847617187,
"learning_rate": 3.189792663476874e-07,
"loss": 7.6243,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 302.84912100061086,
"learning_rate": 3.987240829346093e-07,
"loss": 6.9821,
"step": 25
},
{
"epoch": 0.01,
"grad_norm": 656.7198688085261,
"learning_rate": 4.784688995215311e-07,
"loss": 5.8863,
"step": 30
},
{
"epoch": 0.02,
"grad_norm": 484.09470283669117,
"learning_rate": 5.582137161084529e-07,
"loss": 5.2196,
"step": 35
},
{
"epoch": 0.02,
"grad_norm": 206.10955451247187,
"learning_rate": 6.379585326953748e-07,
"loss": 5.1068,
"step": 40
},
{
"epoch": 0.02,
"grad_norm": 124.99082774996045,
"learning_rate": 7.177033492822967e-07,
"loss": 4.762,
"step": 45
},
{
"epoch": 0.02,
"grad_norm": 84.21647834352541,
"learning_rate": 7.974481658692186e-07,
"loss": 4.6487,
"step": 50
},
{
"epoch": 0.03,
"grad_norm": 88.78548355259916,
"learning_rate": 8.771929824561404e-07,
"loss": 4.4441,
"step": 55
},
{
"epoch": 0.03,
"grad_norm": 61.84251900759107,
"learning_rate": 9.569377990430622e-07,
"loss": 4.2248,
"step": 60
},
{
"epoch": 0.03,
"grad_norm": 50.645931681918356,
"learning_rate": 1.0366826156299842e-06,
"loss": 4.0471,
"step": 65
},
{
"epoch": 0.03,
"grad_norm": 57.2132012438477,
"learning_rate": 1.1164274322169059e-06,
"loss": 3.8855,
"step": 70
},
{
"epoch": 0.04,
"grad_norm": 48.91636382748426,
"learning_rate": 1.196172248803828e-06,
"loss": 3.8015,
"step": 75
},
{
"epoch": 0.04,
"grad_norm": 40.59545123420195,
"learning_rate": 1.2759170653907495e-06,
"loss": 3.6962,
"step": 80
},
{
"epoch": 0.04,
"grad_norm": 35.57431531973308,
"learning_rate": 1.3556618819776716e-06,
"loss": 3.5799,
"step": 85
},
{
"epoch": 0.04,
"grad_norm": 34.758157090434075,
"learning_rate": 1.4354066985645934e-06,
"loss": 3.5206,
"step": 90
},
{
"epoch": 0.05,
"grad_norm": 63.66863856710816,
"learning_rate": 1.5151515151515152e-06,
"loss": 3.3572,
"step": 95
},
{
"epoch": 0.05,
"grad_norm": 42.68061714524391,
"learning_rate": 1.5948963317384373e-06,
"loss": 3.2366,
"step": 100
},
{
"epoch": 0.05,
"grad_norm": 33.593017073472986,
"learning_rate": 1.6746411483253591e-06,
"loss": 3.1241,
"step": 105
},
{
"epoch": 0.05,
"grad_norm": 33.40342282554561,
"learning_rate": 1.7543859649122807e-06,
"loss": 3.0641,
"step": 110
},
{
"epoch": 0.06,
"grad_norm": 32.85644347198469,
"learning_rate": 1.8341307814992026e-06,
"loss": 2.9405,
"step": 115
},
{
"epoch": 0.06,
"grad_norm": 25.841846968636887,
"learning_rate": 1.9138755980861244e-06,
"loss": 2.8831,
"step": 120
},
{
"epoch": 0.06,
"grad_norm": 35.72471056462341,
"learning_rate": 1.9936204146730465e-06,
"loss": 2.8299,
"step": 125
},
{
"epoch": 0.06,
"grad_norm": 31.416213149609632,
"learning_rate": 2.0733652312599685e-06,
"loss": 2.8022,
"step": 130
},
{
"epoch": 0.06,
"grad_norm": 45.77627429889933,
"learning_rate": 2.15311004784689e-06,
"loss": 2.6781,
"step": 135
},
{
"epoch": 0.07,
"grad_norm": 24.637358858601818,
"learning_rate": 2.2328548644338117e-06,
"loss": 2.5941,
"step": 140
},
{
"epoch": 0.07,
"grad_norm": 35.05758095811321,
"learning_rate": 2.3125996810207338e-06,
"loss": 2.586,
"step": 145
},
{
"epoch": 0.07,
"grad_norm": 32.765676969854255,
"learning_rate": 2.392344497607656e-06,
"loss": 2.4517,
"step": 150
},
{
"epoch": 0.07,
"grad_norm": 24.45333532717565,
"learning_rate": 2.4720893141945774e-06,
"loss": 2.4443,
"step": 155
},
{
"epoch": 0.08,
"grad_norm": 33.6204798021058,
"learning_rate": 2.551834130781499e-06,
"loss": 2.3622,
"step": 160
},
{
"epoch": 0.08,
"grad_norm": 27.70808883145236,
"learning_rate": 2.631578947368421e-06,
"loss": 2.4172,
"step": 165
},
{
"epoch": 0.08,
"grad_norm": 37.0276057595572,
"learning_rate": 2.711323763955343e-06,
"loss": 2.3323,
"step": 170
},
{
"epoch": 0.08,
"grad_norm": 26.69697628121263,
"learning_rate": 2.7910685805422648e-06,
"loss": 2.2806,
"step": 175
},
{
"epoch": 0.09,
"grad_norm": 24.80947335765062,
"learning_rate": 2.870813397129187e-06,
"loss": 2.332,
"step": 180
},
{
"epoch": 0.09,
"grad_norm": 30.899583333109856,
"learning_rate": 2.950558213716109e-06,
"loss": 2.268,
"step": 185
},
{
"epoch": 0.09,
"grad_norm": 28.31676255733607,
"learning_rate": 3.0303030303030305e-06,
"loss": 2.1879,
"step": 190
},
{
"epoch": 0.09,
"grad_norm": 23.521624698869246,
"learning_rate": 3.1100478468899525e-06,
"loss": 2.2325,
"step": 195
},
{
"epoch": 0.1,
"grad_norm": 44.17532065511358,
"learning_rate": 3.1897926634768746e-06,
"loss": 2.1291,
"step": 200
},
{
"epoch": 0.1,
"grad_norm": 45.87345858245527,
"learning_rate": 3.269537480063796e-06,
"loss": 2.1718,
"step": 205
},
{
"epoch": 0.1,
"grad_norm": 23.5485291164262,
"learning_rate": 3.3492822966507182e-06,
"loss": 2.1771,
"step": 210
},
{
"epoch": 0.1,
"grad_norm": 37.54905927680125,
"learning_rate": 3.4290271132376394e-06,
"loss": 2.1106,
"step": 215
},
{
"epoch": 0.11,
"grad_norm": 22.731836223351927,
"learning_rate": 3.5087719298245615e-06,
"loss": 2.1052,
"step": 220
},
{
"epoch": 0.11,
"grad_norm": 23.406238453389413,
"learning_rate": 3.5885167464114835e-06,
"loss": 2.0508,
"step": 225
},
{
"epoch": 0.11,
"grad_norm": 32.58382444329781,
"learning_rate": 3.668261562998405e-06,
"loss": 1.9321,
"step": 230
},
{
"epoch": 0.11,
"grad_norm": 34.565278519644735,
"learning_rate": 3.748006379585327e-06,
"loss": 2.0468,
"step": 235
},
{
"epoch": 0.11,
"grad_norm": 22.531926305325616,
"learning_rate": 3.827751196172249e-06,
"loss": 2.0304,
"step": 240
},
{
"epoch": 0.12,
"grad_norm": 25.738791005577827,
"learning_rate": 3.907496012759171e-06,
"loss": 2.0475,
"step": 245
},
{
"epoch": 0.12,
"grad_norm": 58.69199386579353,
"learning_rate": 3.987240829346093e-06,
"loss": 1.9845,
"step": 250
},
{
"epoch": 0.12,
"grad_norm": 37.20449610450609,
"learning_rate": 4.066985645933015e-06,
"loss": 2.0175,
"step": 255
},
{
"epoch": 0.12,
"grad_norm": 89.36416776615029,
"learning_rate": 4.146730462519937e-06,
"loss": 1.9578,
"step": 260
},
{
"epoch": 0.13,
"grad_norm": 101.55252002274557,
"learning_rate": 4.226475279106859e-06,
"loss": 1.9386,
"step": 265
},
{
"epoch": 0.13,
"grad_norm": 104.25772965906495,
"learning_rate": 4.30622009569378e-06,
"loss": 1.9127,
"step": 270
},
{
"epoch": 0.13,
"grad_norm": 73.24247004084259,
"learning_rate": 4.385964912280702e-06,
"loss": 1.9013,
"step": 275
},
{
"epoch": 0.13,
"grad_norm": 55.678633467664405,
"learning_rate": 4.4657097288676235e-06,
"loss": 1.9475,
"step": 280
},
{
"epoch": 0.14,
"grad_norm": 56.80299051795151,
"learning_rate": 4.5454545454545455e-06,
"loss": 1.9298,
"step": 285
},
{
"epoch": 0.14,
"grad_norm": 115.93807192744369,
"learning_rate": 4.6251993620414676e-06,
"loss": 1.9122,
"step": 290
},
{
"epoch": 0.14,
"grad_norm": 138.29720706820768,
"learning_rate": 4.70494417862839e-06,
"loss": 1.9148,
"step": 295
},
{
"epoch": 0.14,
"grad_norm": 64.46697637378894,
"learning_rate": 4.784688995215312e-06,
"loss": 1.8538,
"step": 300
},
{
"epoch": 0.15,
"grad_norm": 31.109569100993028,
"learning_rate": 4.864433811802234e-06,
"loss": 1.8521,
"step": 305
},
{
"epoch": 0.15,
"grad_norm": 93.53966227120632,
"learning_rate": 4.944178628389155e-06,
"loss": 1.8668,
"step": 310
},
{
"epoch": 0.15,
"grad_norm": 71.28746418071842,
"learning_rate": 5.023923444976077e-06,
"loss": 1.8926,
"step": 315
},
{
"epoch": 0.15,
"grad_norm": 100.01208286746005,
"learning_rate": 5.103668261562998e-06,
"loss": 1.8714,
"step": 320
},
{
"epoch": 0.16,
"grad_norm": 178.63921442879118,
"learning_rate": 5.18341307814992e-06,
"loss": 1.8607,
"step": 325
},
{
"epoch": 0.16,
"grad_norm": 87.46697005861438,
"learning_rate": 5.263157894736842e-06,
"loss": 1.8469,
"step": 330
},
{
"epoch": 0.16,
"grad_norm": 86.19419344109204,
"learning_rate": 5.342902711323764e-06,
"loss": 1.8907,
"step": 335
},
{
"epoch": 0.16,
"grad_norm": 153.7018375245623,
"learning_rate": 5.422647527910686e-06,
"loss": 1.8234,
"step": 340
},
{
"epoch": 0.17,
"grad_norm": 95.02919237295767,
"learning_rate": 5.502392344497608e-06,
"loss": 1.8559,
"step": 345
},
{
"epoch": 0.17,
"grad_norm": 25.987821729618794,
"learning_rate": 5.5821371610845296e-06,
"loss": 1.8536,
"step": 350
},
{
"epoch": 0.17,
"grad_norm": 114.75473423739496,
"learning_rate": 5.661881977671452e-06,
"loss": 1.8386,
"step": 355
},
{
"epoch": 0.17,
"grad_norm": 90.84187790615414,
"learning_rate": 5.741626794258374e-06,
"loss": 1.8388,
"step": 360
},
{
"epoch": 0.17,
"grad_norm": 57.34768834466746,
"learning_rate": 5.821371610845296e-06,
"loss": 1.7915,
"step": 365
},
{
"epoch": 0.18,
"grad_norm": 64.03323510367646,
"learning_rate": 5.901116427432218e-06,
"loss": 1.7557,
"step": 370
},
{
"epoch": 0.18,
"grad_norm": 149.80909484333534,
"learning_rate": 5.98086124401914e-06,
"loss": 1.7882,
"step": 375
},
{
"epoch": 0.18,
"grad_norm": 62.12524608189198,
"learning_rate": 6.060606060606061e-06,
"loss": 1.7519,
"step": 380
},
{
"epoch": 0.18,
"grad_norm": 73.82427046525916,
"learning_rate": 6.140350877192983e-06,
"loss": 1.8062,
"step": 385
},
{
"epoch": 0.19,
"grad_norm": 24.76231979460461,
"learning_rate": 6.220095693779905e-06,
"loss": 1.7428,
"step": 390
},
{
"epoch": 0.19,
"grad_norm": 26.93660166927335,
"learning_rate": 6.299840510366827e-06,
"loss": 1.745,
"step": 395
},
{
"epoch": 0.19,
"grad_norm": 37.978703409664675,
"learning_rate": 6.379585326953749e-06,
"loss": 1.7122,
"step": 400
},
{
"epoch": 0.19,
"grad_norm": 82.91890571531636,
"learning_rate": 6.459330143540671e-06,
"loss": 1.7411,
"step": 405
},
{
"epoch": 0.2,
"grad_norm": 29.18376707221025,
"learning_rate": 6.539074960127592e-06,
"loss": 1.7352,
"step": 410
},
{
"epoch": 0.2,
"grad_norm": 49.335502252231784,
"learning_rate": 6.6188197767145144e-06,
"loss": 1.7199,
"step": 415
},
{
"epoch": 0.2,
"grad_norm": 111.40783070766503,
"learning_rate": 6.6985645933014365e-06,
"loss": 1.7613,
"step": 420
},
{
"epoch": 0.2,
"grad_norm": 85.1646918942689,
"learning_rate": 6.778309409888358e-06,
"loss": 1.7133,
"step": 425
},
{
"epoch": 0.21,
"grad_norm": 70.00087276043905,
"learning_rate": 6.858054226475279e-06,
"loss": 1.6908,
"step": 430
},
{
"epoch": 0.21,
"grad_norm": 49.65381943486665,
"learning_rate": 6.937799043062201e-06,
"loss": 1.7231,
"step": 435
},
{
"epoch": 0.21,
"grad_norm": 95.24373804352825,
"learning_rate": 7.017543859649123e-06,
"loss": 1.6745,
"step": 440
},
{
"epoch": 0.21,
"grad_norm": 64.11363991177045,
"learning_rate": 7.097288676236045e-06,
"loss": 1.6983,
"step": 445
},
{
"epoch": 0.22,
"grad_norm": 70.68538220265329,
"learning_rate": 7.177033492822967e-06,
"loss": 1.6824,
"step": 450
},
{
"epoch": 0.22,
"grad_norm": 138.75242464393378,
"learning_rate": 7.256778309409889e-06,
"loss": 1.649,
"step": 455
},
{
"epoch": 0.22,
"grad_norm": 90.06365258303676,
"learning_rate": 7.33652312599681e-06,
"loss": 1.643,
"step": 460
},
{
"epoch": 0.22,
"grad_norm": 72.73042967609133,
"learning_rate": 7.416267942583732e-06,
"loss": 1.6788,
"step": 465
},
{
"epoch": 0.22,
"grad_norm": 27.447874063555897,
"learning_rate": 7.496012759170654e-06,
"loss": 1.6792,
"step": 470
},
{
"epoch": 0.23,
"grad_norm": 73.49470812643965,
"learning_rate": 7.5757575757575764e-06,
"loss": 1.642,
"step": 475
},
{
"epoch": 0.23,
"grad_norm": 80.74352794413599,
"learning_rate": 7.655502392344498e-06,
"loss": 1.6525,
"step": 480
},
{
"epoch": 0.23,
"grad_norm": 73.81716716184098,
"learning_rate": 7.73524720893142e-06,
"loss": 1.5964,
"step": 485
},
{
"epoch": 0.23,
"grad_norm": 38.191600022009645,
"learning_rate": 7.814992025518342e-06,
"loss": 1.6313,
"step": 490
},
{
"epoch": 0.24,
"grad_norm": 18.06253012227548,
"learning_rate": 7.894736842105265e-06,
"loss": 1.6604,
"step": 495
},
{
"epoch": 0.24,
"grad_norm": 24.73459428572392,
"learning_rate": 7.974481658692186e-06,
"loss": 1.5759,
"step": 500
},
{
"epoch": 0.24,
"grad_norm": 50.04381801331699,
"learning_rate": 8.054226475279107e-06,
"loss": 1.5912,
"step": 505
},
{
"epoch": 0.24,
"grad_norm": 35.65117096823928,
"learning_rate": 8.13397129186603e-06,
"loss": 1.5391,
"step": 510
},
{
"epoch": 0.25,
"grad_norm": 18.158882620517634,
"learning_rate": 8.213716108452951e-06,
"loss": 1.6366,
"step": 515
},
{
"epoch": 0.25,
"grad_norm": 71.3492024145027,
"learning_rate": 8.293460925039874e-06,
"loss": 1.587,
"step": 520
},
{
"epoch": 0.25,
"grad_norm": 62.73591440117429,
"learning_rate": 8.373205741626795e-06,
"loss": 1.627,
"step": 525
},
{
"epoch": 0.25,
"grad_norm": 145.02109525928412,
"learning_rate": 8.452950558213718e-06,
"loss": 1.6732,
"step": 530
},
{
"epoch": 0.26,
"grad_norm": 138.46543786648346,
"learning_rate": 8.53269537480064e-06,
"loss": 1.6037,
"step": 535
},
{
"epoch": 0.26,
"grad_norm": 41.17157734542531,
"learning_rate": 8.61244019138756e-06,
"loss": 1.5969,
"step": 540
},
{
"epoch": 0.26,
"grad_norm": 33.82537712897133,
"learning_rate": 8.692185007974482e-06,
"loss": 1.563,
"step": 545
},
{
"epoch": 0.26,
"grad_norm": 113.924690506077,
"learning_rate": 8.771929824561405e-06,
"loss": 1.7031,
"step": 550
},
{
"epoch": 0.27,
"grad_norm": 34.66368029094525,
"learning_rate": 8.851674641148326e-06,
"loss": 1.6131,
"step": 555
},
{
"epoch": 0.27,
"grad_norm": 52.361298252499964,
"learning_rate": 8.931419457735247e-06,
"loss": 1.5907,
"step": 560
},
{
"epoch": 0.27,
"grad_norm": 53.125404631251676,
"learning_rate": 9.01116427432217e-06,
"loss": 1.5827,
"step": 565
},
{
"epoch": 0.27,
"grad_norm": 19.156273263475168,
"learning_rate": 9.090909090909091e-06,
"loss": 1.6302,
"step": 570
},
{
"epoch": 0.28,
"grad_norm": 143.8432334874611,
"learning_rate": 9.170653907496014e-06,
"loss": 1.5555,
"step": 575
},
{
"epoch": 0.28,
"grad_norm": 368.11247127206894,
"learning_rate": 9.250398724082935e-06,
"loss": 1.5961,
"step": 580
},
{
"epoch": 0.28,
"grad_norm": 228.105268523375,
"learning_rate": 9.330143540669856e-06,
"loss": 1.6299,
"step": 585
},
{
"epoch": 0.28,
"grad_norm": 218.72930454225494,
"learning_rate": 9.40988835725678e-06,
"loss": 1.6438,
"step": 590
},
{
"epoch": 0.28,
"grad_norm": 69.19099361690543,
"learning_rate": 9.4896331738437e-06,
"loss": 1.5968,
"step": 595
},
{
"epoch": 0.29,
"grad_norm": 273.44260176317573,
"learning_rate": 9.569377990430623e-06,
"loss": 1.6588,
"step": 600
},
{
"epoch": 0.29,
"grad_norm": 108.41478313884119,
"learning_rate": 9.649122807017545e-06,
"loss": 1.6416,
"step": 605
},
{
"epoch": 0.29,
"grad_norm": 50.18154742266904,
"learning_rate": 9.728867623604467e-06,
"loss": 1.5721,
"step": 610
},
{
"epoch": 0.29,
"grad_norm": 112.04463934660725,
"learning_rate": 9.808612440191389e-06,
"loss": 1.5849,
"step": 615
},
{
"epoch": 0.3,
"grad_norm": 32.28186873084339,
"learning_rate": 9.88835725677831e-06,
"loss": 1.5449,
"step": 620
},
{
"epoch": 0.3,
"grad_norm": 22.56634647014017,
"learning_rate": 9.968102073365233e-06,
"loss": 1.6269,
"step": 625
},
{
"epoch": 0.3,
"grad_norm": 34.02729511631579,
"learning_rate": 9.999993018898467e-06,
"loss": 1.5205,
"step": 630
},
{
"epoch": 0.3,
"grad_norm": 64.92764744969796,
"learning_rate": 9.999950356681914e-06,
"loss": 1.4983,
"step": 635
},
{
"epoch": 0.31,
"grad_norm": 68.0486498997933,
"learning_rate": 9.999868910969066e-06,
"loss": 1.5166,
"step": 640
},
{
"epoch": 0.31,
"grad_norm": 104.02957985164318,
"learning_rate": 9.999748682391682e-06,
"loss": 1.5058,
"step": 645
},
{
"epoch": 0.31,
"grad_norm": 196.21172269323182,
"learning_rate": 9.999589671882346e-06,
"loss": 1.5047,
"step": 650
},
{
"epoch": 0.31,
"grad_norm": 82.03541854753365,
"learning_rate": 9.99939188067447e-06,
"loss": 1.5744,
"step": 655
},
{
"epoch": 0.32,
"grad_norm": 51.00835475129493,
"learning_rate": 9.999155310302273e-06,
"loss": 1.5452,
"step": 660
},
{
"epoch": 0.32,
"grad_norm": 98.82365817799119,
"learning_rate": 9.99887996260078e-06,
"loss": 1.4969,
"step": 665
},
{
"epoch": 0.32,
"grad_norm": 16.874754295539923,
"learning_rate": 9.998565839705803e-06,
"loss": 1.5154,
"step": 670
},
{
"epoch": 0.32,
"grad_norm": 29.85091432442721,
"learning_rate": 9.99821294405392e-06,
"loss": 1.4732,
"step": 675
},
{
"epoch": 0.33,
"grad_norm": 49.03627428907472,
"learning_rate": 9.997821278382464e-06,
"loss": 1.4751,
"step": 680
},
{
"epoch": 0.33,
"grad_norm": 16.240051854993116,
"learning_rate": 9.9973908457295e-06,
"loss": 1.4294,
"step": 685
},
{
"epoch": 0.33,
"grad_norm": 68.52459224489506,
"learning_rate": 9.996921649433798e-06,
"loss": 1.5206,
"step": 690
},
{
"epoch": 0.33,
"grad_norm": 59.24141740781687,
"learning_rate": 9.996413693134806e-06,
"loss": 1.5368,
"step": 695
},
{
"epoch": 0.34,
"grad_norm": 23.81437870004502,
"learning_rate": 9.995866980772628e-06,
"loss": 1.4872,
"step": 700
},
{
"epoch": 0.34,
"grad_norm": 46.773999542197224,
"learning_rate": 9.995281516587992e-06,
"loss": 1.4527,
"step": 705
},
{
"epoch": 0.34,
"grad_norm": 39.50144985995831,
"learning_rate": 9.994657305122211e-06,
"loss": 1.5007,
"step": 710
},
{
"epoch": 0.34,
"grad_norm": 46.84856657395977,
"learning_rate": 9.993994351217153e-06,
"loss": 1.5192,
"step": 715
},
{
"epoch": 0.34,
"grad_norm": 55.47555796478918,
"learning_rate": 9.993292660015204e-06,
"loss": 1.4567,
"step": 720
},
{
"epoch": 0.35,
"grad_norm": 80.68256162482182,
"learning_rate": 9.992552236959224e-06,
"loss": 1.4285,
"step": 725
},
{
"epoch": 0.35,
"grad_norm": 134.31997926928236,
"learning_rate": 9.991773087792511e-06,
"loss": 1.4885,
"step": 730
},
{
"epoch": 0.35,
"grad_norm": 66.37697245789519,
"learning_rate": 9.990955218558751e-06,
"loss": 1.4966,
"step": 735
},
{
"epoch": 0.35,
"grad_norm": 19.850962750681337,
"learning_rate": 9.990098635601972e-06,
"loss": 1.4439,
"step": 740
},
{
"epoch": 0.36,
"grad_norm": 52.40361976199728,
"learning_rate": 9.989203345566495e-06,
"loss": 1.454,
"step": 745
},
{
"epoch": 0.36,
"grad_norm": 33.039136783032085,
"learning_rate": 9.988269355396889e-06,
"loss": 1.4345,
"step": 750
},
{
"epoch": 0.36,
"grad_norm": 41.23047659142119,
"learning_rate": 9.9872966723379e-06,
"loss": 1.4253,
"step": 755
},
{
"epoch": 0.36,
"grad_norm": 35.085916948472395,
"learning_rate": 9.986285303934417e-06,
"loss": 1.4319,
"step": 760
},
{
"epoch": 0.37,
"grad_norm": 39.95359750068918,
"learning_rate": 9.9852352580314e-06,
"loss": 1.4612,
"step": 765
},
{
"epoch": 0.37,
"grad_norm": 36.42736785815932,
"learning_rate": 9.984146542773816e-06,
"loss": 1.436,
"step": 770
},
{
"epoch": 0.37,
"grad_norm": 71.54809292590623,
"learning_rate": 9.98301916660659e-06,
"loss": 1.4549,
"step": 775
},
{
"epoch": 0.37,
"grad_norm": 29.014569585895195,
"learning_rate": 9.98185313827452e-06,
"loss": 1.4602,
"step": 780
},
{
"epoch": 0.38,
"grad_norm": 66.4431423269554,
"learning_rate": 9.980648466822234e-06,
"loss": 1.4127,
"step": 785
},
{
"epoch": 0.38,
"grad_norm": 82.42917588379505,
"learning_rate": 9.979405161594096e-06,
"loss": 1.4307,
"step": 790
},
{
"epoch": 0.38,
"grad_norm": 20.533046985928028,
"learning_rate": 9.978123232234147e-06,
"loss": 1.438,
"step": 795
},
{
"epoch": 0.38,
"grad_norm": 37.93033568692731,
"learning_rate": 9.976802688686028e-06,
"loss": 1.4339,
"step": 800
},
{
"epoch": 0.39,
"grad_norm": 46.508789169836284,
"learning_rate": 9.975443541192893e-06,
"loss": 1.4597,
"step": 805
},
{
"epoch": 0.39,
"grad_norm": 40.00787244204549,
"learning_rate": 9.974045800297353e-06,
"loss": 1.4131,
"step": 810
},
{
"epoch": 0.39,
"grad_norm": 32.46278110990685,
"learning_rate": 9.972609476841368e-06,
"loss": 1.428,
"step": 815
},
{
"epoch": 0.39,
"grad_norm": 15.655135610066681,
"learning_rate": 9.971134581966178e-06,
"loss": 1.3959,
"step": 820
},
{
"epoch": 0.39,
"grad_norm": 55.27555299956493,
"learning_rate": 9.969621127112211e-06,
"loss": 1.3944,
"step": 825
},
{
"epoch": 0.4,
"grad_norm": 51.39607458001549,
"learning_rate": 9.968069124019e-06,
"loss": 1.4171,
"step": 830
},
{
"epoch": 0.4,
"grad_norm": 98.46791098210076,
"learning_rate": 9.966478584725085e-06,
"loss": 1.4095,
"step": 835
},
{
"epoch": 0.4,
"grad_norm": 137.41786647402353,
"learning_rate": 9.964849521567924e-06,
"loss": 1.3476,
"step": 840
},
{
"epoch": 0.4,
"grad_norm": 129.4577840164265,
"learning_rate": 9.963181947183795e-06,
"loss": 1.4229,
"step": 845
},
{
"epoch": 0.41,
"grad_norm": 95.65368163155473,
"learning_rate": 9.961475874507699e-06,
"loss": 1.4233,
"step": 850
},
{
"epoch": 0.41,
"grad_norm": 62.721176787924186,
"learning_rate": 9.95973131677326e-06,
"loss": 1.4064,
"step": 855
},
{
"epoch": 0.41,
"grad_norm": 30.860164466795904,
"learning_rate": 9.957948287512621e-06,
"loss": 1.3802,
"step": 860
},
{
"epoch": 0.41,
"grad_norm": 42.50256652034154,
"learning_rate": 9.956126800556345e-06,
"loss": 1.4213,
"step": 865
},
{
"epoch": 0.42,
"grad_norm": 34.15555075939201,
"learning_rate": 9.954266870033296e-06,
"loss": 1.3552,
"step": 870
},
{
"epoch": 0.42,
"grad_norm": 54.88898418231565,
"learning_rate": 9.952368510370538e-06,
"loss": 1.3625,
"step": 875
},
{
"epoch": 0.42,
"grad_norm": 18.884032632164693,
"learning_rate": 9.950431736293224e-06,
"loss": 1.3334,
"step": 880
},
{
"epoch": 0.42,
"grad_norm": 15.900420220237912,
"learning_rate": 9.948456562824478e-06,
"loss": 1.3605,
"step": 885
},
{
"epoch": 0.43,
"grad_norm": 131.80887482235394,
"learning_rate": 9.946443005285275e-06,
"loss": 1.409,
"step": 890
},
{
"epoch": 0.43,
"grad_norm": 163.96805849557074,
"learning_rate": 9.944391079294333e-06,
"loss": 1.4312,
"step": 895
},
{
"epoch": 0.43,
"grad_norm": 82.25681062434396,
"learning_rate": 9.942300800767984e-06,
"loss": 1.4167,
"step": 900
},
{
"epoch": 0.43,
"grad_norm": 24.811467763643222,
"learning_rate": 9.94017218592005e-06,
"loss": 1.4115,
"step": 905
},
{
"epoch": 0.44,
"grad_norm": 138.99962371211922,
"learning_rate": 9.938005251261716e-06,
"loss": 1.3919,
"step": 910
},
{
"epoch": 0.44,
"grad_norm": 127.34790212770166,
"learning_rate": 9.935800013601415e-06,
"loss": 1.3817,
"step": 915
},
{
"epoch": 0.44,
"grad_norm": 97.32673432981726,
"learning_rate": 9.93355649004468e-06,
"loss": 1.392,
"step": 920
},
{
"epoch": 0.44,
"grad_norm": 116.44363943428493,
"learning_rate": 9.931274697994022e-06,
"loss": 1.4288,
"step": 925
},
{
"epoch": 0.45,
"grad_norm": 32.57153425926579,
"learning_rate": 9.92895465514879e-06,
"loss": 1.4099,
"step": 930
},
{
"epoch": 0.45,
"grad_norm": 40.92256502700726,
"learning_rate": 9.926596379505038e-06,
"loss": 1.39,
"step": 935
},
{
"epoch": 0.45,
"grad_norm": 24.6089028121811,
"learning_rate": 9.924199889355383e-06,
"loss": 1.3834,
"step": 940
},
{
"epoch": 0.45,
"grad_norm": 53.43789806684882,
"learning_rate": 9.921765203288862e-06,
"loss": 1.3443,
"step": 945
},
{
"epoch": 0.45,
"grad_norm": 78.86589096360088,
"learning_rate": 9.919292340190791e-06,
"loss": 1.3232,
"step": 950
},
{
"epoch": 0.46,
"grad_norm": 169.69703753795483,
"learning_rate": 9.916781319242616e-06,
"loss": 1.355,
"step": 955
},
{
"epoch": 0.46,
"grad_norm": 128.35616751891993,
"learning_rate": 9.914232159921763e-06,
"loss": 1.3499,
"step": 960
},
{
"epoch": 0.46,
"grad_norm": 170.8159995835075,
"learning_rate": 9.911644882001492e-06,
"loss": 1.3774,
"step": 965
},
{
"epoch": 0.46,
"grad_norm": 242.3114747587618,
"learning_rate": 9.909019505550739e-06,
"loss": 1.403,
"step": 970
},
{
"epoch": 0.47,
"grad_norm": 19.274644048496455,
"learning_rate": 9.906356050933962e-06,
"loss": 1.3972,
"step": 975
},
{
"epoch": 0.47,
"grad_norm": 124.42550358180652,
"learning_rate": 9.90365453881098e-06,
"loss": 1.4083,
"step": 980
},
{
"epoch": 0.47,
"grad_norm": 60.61205111725167,
"learning_rate": 9.900914990136823e-06,
"loss": 1.3855,
"step": 985
},
{
"epoch": 0.47,
"grad_norm": 66.05748972719077,
"learning_rate": 9.898137426161553e-06,
"loss": 1.366,
"step": 990
},
{
"epoch": 0.48,
"grad_norm": 35.132041098504175,
"learning_rate": 9.895321868430113e-06,
"loss": 1.3247,
"step": 995
},
{
"epoch": 0.48,
"grad_norm": 65.81404594386613,
"learning_rate": 9.892468338782157e-06,
"loss": 1.3491,
"step": 1000
},
{
"epoch": 0.48,
"grad_norm": 51.20887915656867,
"learning_rate": 9.889576859351873e-06,
"loss": 1.3253,
"step": 1005
},
{
"epoch": 0.48,
"grad_norm": 18.238224969318182,
"learning_rate": 9.88664745256782e-06,
"loss": 1.348,
"step": 1010
},
{
"epoch": 0.49,
"grad_norm": 51.986812578913444,
"learning_rate": 9.883680141152754e-06,
"loss": 1.3209,
"step": 1015
},
{
"epoch": 0.49,
"grad_norm": 50.647412133452335,
"learning_rate": 9.880674948123443e-06,
"loss": 1.2969,
"step": 1020
},
{
"epoch": 0.49,
"grad_norm": 105.81658721511505,
"learning_rate": 9.877631896790498e-06,
"loss": 1.3282,
"step": 1025
},
{
"epoch": 0.49,
"grad_norm": 73.03230926870046,
"learning_rate": 9.874551010758187e-06,
"loss": 1.3594,
"step": 1030
},
{
"epoch": 0.5,
"grad_norm": 43.46077556050723,
"learning_rate": 9.871432313924255e-06,
"loss": 1.3671,
"step": 1035
},
{
"epoch": 0.5,
"grad_norm": 33.06794747937316,
"learning_rate": 9.868275830479735e-06,
"loss": 1.3475,
"step": 1040
},
{
"epoch": 0.5,
"grad_norm": 39.348126182956804,
"learning_rate": 9.86508158490876e-06,
"loss": 1.337,
"step": 1045
},
{
"epoch": 0.5,
"grad_norm": 40.99333856290618,
"learning_rate": 9.861849601988384e-06,
"loss": 1.3296,
"step": 1050
},
{
"epoch": 0.51,
"grad_norm": 65.76882059617591,
"learning_rate": 9.85857990678837e-06,
"loss": 1.2971,
"step": 1055
},
{
"epoch": 0.51,
"grad_norm": 54.99205996022647,
"learning_rate": 9.855272524671015e-06,
"loss": 1.316,
"step": 1060
},
{
"epoch": 0.51,
"grad_norm": 42.90872114545079,
"learning_rate": 9.851927481290943e-06,
"loss": 1.2909,
"step": 1065
},
{
"epoch": 0.51,
"grad_norm": 26.211854548393905,
"learning_rate": 9.848544802594903e-06,
"loss": 1.2902,
"step": 1070
},
{
"epoch": 0.51,
"grad_norm": 29.68871749210505,
"learning_rate": 9.845124514821581e-06,
"loss": 1.3173,
"step": 1075
},
{
"epoch": 0.52,
"grad_norm": 15.983386111517836,
"learning_rate": 9.841666644501382e-06,
"loss": 1.3163,
"step": 1080
},
{
"epoch": 0.52,
"grad_norm": 27.945379558497496,
"learning_rate": 9.838171218456234e-06,
"loss": 1.3446,
"step": 1085
},
{
"epoch": 0.52,
"grad_norm": 56.1364144102052,
"learning_rate": 9.834638263799373e-06,
"loss": 1.2684,
"step": 1090
},
{
"epoch": 0.52,
"grad_norm": 16.25358588764152,
"learning_rate": 9.83106780793514e-06,
"loss": 1.3174,
"step": 1095
},
{
"epoch": 0.53,
"grad_norm": 16.315534978552627,
"learning_rate": 9.827459878558758e-06,
"loss": 1.3129,
"step": 1100
},
{
"epoch": 0.53,
"grad_norm": 39.991802716627106,
"learning_rate": 9.823814503656133e-06,
"loss": 1.339,
"step": 1105
},
{
"epoch": 0.53,
"grad_norm": 78.51197875810384,
"learning_rate": 9.82013171150362e-06,
"loss": 1.2832,
"step": 1110
},
{
"epoch": 0.53,
"grad_norm": 131.516408664817,
"learning_rate": 9.816411530667815e-06,
"loss": 1.2999,
"step": 1115
},
{
"epoch": 0.54,
"grad_norm": 105.56204946684674,
"learning_rate": 9.812653990005326e-06,
"loss": 1.2812,
"step": 1120
},
{
"epoch": 0.54,
"grad_norm": 71.45793899692477,
"learning_rate": 9.808859118662558e-06,
"loss": 1.3251,
"step": 1125
},
{
"epoch": 0.54,
"grad_norm": 43.2510250571711,
"learning_rate": 9.805026946075478e-06,
"loss": 1.2962,
"step": 1130
},
{
"epoch": 0.54,
"grad_norm": 38.65847467257962,
"learning_rate": 9.801157501969391e-06,
"loss": 1.3306,
"step": 1135
},
{
"epoch": 0.55,
"grad_norm": 16.82575598106575,
"learning_rate": 9.79725081635871e-06,
"loss": 1.3118,
"step": 1140
},
{
"epoch": 0.55,
"grad_norm": 86.73109834830365,
"learning_rate": 9.793306919546721e-06,
"loss": 1.327,
"step": 1145
},
{
"epoch": 0.55,
"grad_norm": 76.57583694942113,
"learning_rate": 9.78932584212535e-06,
"loss": 1.333,
"step": 1150
},
{
"epoch": 0.55,
"grad_norm": 75.77091164184515,
"learning_rate": 9.785307614974922e-06,
"loss": 1.305,
"step": 1155
},
{
"epoch": 0.56,
"grad_norm": 27.307850503763753,
"learning_rate": 9.781252269263926e-06,
"loss": 1.3128,
"step": 1160
},
{
"epoch": 0.56,
"grad_norm": 18.849974392610964,
"learning_rate": 9.777159836448772e-06,
"loss": 1.2797,
"step": 1165
},
{
"epoch": 0.56,
"grad_norm": 73.94404796962603,
"learning_rate": 9.773030348273549e-06,
"loss": 1.3013,
"step": 1170
},
{
"epoch": 0.56,
"grad_norm": 37.713196588893524,
"learning_rate": 9.768863836769769e-06,
"loss": 1.3134,
"step": 1175
},
{
"epoch": 0.56,
"grad_norm": 22.644006560352732,
"learning_rate": 9.764660334256129e-06,
"loss": 1.2559,
"step": 1180
},
{
"epoch": 0.57,
"grad_norm": 54.26678198371241,
"learning_rate": 9.760419873338261e-06,
"loss": 1.3301,
"step": 1185
},
{
"epoch": 0.57,
"grad_norm": 41.6545047602118,
"learning_rate": 9.756142486908468e-06,
"loss": 1.3054,
"step": 1190
},
{
"epoch": 0.57,
"grad_norm": 124.14120608305727,
"learning_rate": 9.751828208145482e-06,
"loss": 1.2712,
"step": 1195
},
{
"epoch": 0.57,
"grad_norm": 103.65592115751637,
"learning_rate": 9.747477070514197e-06,
"loss": 1.2974,
"step": 1200
},
{
"epoch": 0.58,
"grad_norm": 18.449321501714714,
"learning_rate": 9.743089107765415e-06,
"loss": 1.3109,
"step": 1205
},
{
"epoch": 0.58,
"grad_norm": 23.964828730028064,
"learning_rate": 9.738664353935584e-06,
"loss": 1.3119,
"step": 1210
},
{
"epoch": 0.58,
"grad_norm": 55.244623515575064,
"learning_rate": 9.734202843346522e-06,
"loss": 1.2826,
"step": 1215
},
{
"epoch": 0.58,
"grad_norm": 127.9570188656042,
"learning_rate": 9.729704610605174e-06,
"loss": 1.3552,
"step": 1220
},
{
"epoch": 0.59,
"grad_norm": 57.168348832178594,
"learning_rate": 9.725169690603326e-06,
"loss": 1.3105,
"step": 1225
},
{
"epoch": 0.59,
"grad_norm": 19.119791909003727,
"learning_rate": 9.720598118517335e-06,
"loss": 1.2647,
"step": 1230
},
{
"epoch": 0.59,
"grad_norm": 15.068700087799538,
"learning_rate": 9.715989929807863e-06,
"loss": 1.3296,
"step": 1235
},
{
"epoch": 0.59,
"grad_norm": 13.063236194830704,
"learning_rate": 9.7113451602196e-06,
"loss": 1.2731,
"step": 1240
},
{
"epoch": 0.6,
"grad_norm": 30.918149482626536,
"learning_rate": 9.706663845780984e-06,
"loss": 1.2805,
"step": 1245
},
{
"epoch": 0.6,
"grad_norm": 21.73056567479878,
"learning_rate": 9.701946022803923e-06,
"loss": 1.2877,
"step": 1250
},
{
"epoch": 0.6,
"grad_norm": 14.082597819049196,
"learning_rate": 9.697191727883517e-06,
"loss": 1.245,
"step": 1255
},
{
"epoch": 0.6,
"grad_norm": 13.925884277798485,
"learning_rate": 9.692400997897766e-06,
"loss": 1.2595,
"step": 1260
},
{
"epoch": 0.61,
"grad_norm": 34.48777339166718,
"learning_rate": 9.68757387000729e-06,
"loss": 1.2597,
"step": 1265
},
{
"epoch": 0.61,
"grad_norm": 28.898598379143074,
"learning_rate": 9.68271038165504e-06,
"loss": 1.2472,
"step": 1270
},
{
"epoch": 0.61,
"grad_norm": 24.519580062882014,
"learning_rate": 9.677810570566011e-06,
"loss": 1.276,
"step": 1275
},
{
"epoch": 0.61,
"grad_norm": 15.327264954118917,
"learning_rate": 9.672874474746936e-06,
"loss": 1.2967,
"step": 1280
},
{
"epoch": 0.62,
"grad_norm": 38.74856351082161,
"learning_rate": 9.667902132486009e-06,
"loss": 1.2777,
"step": 1285
},
{
"epoch": 0.62,
"grad_norm": 41.48509321739801,
"learning_rate": 9.662893582352578e-06,
"loss": 1.2827,
"step": 1290
},
{
"epoch": 0.62,
"grad_norm": 18.409433564242203,
"learning_rate": 9.65784886319684e-06,
"loss": 1.2698,
"step": 1295
},
{
"epoch": 0.62,
"grad_norm": 36.50122502444449,
"learning_rate": 9.652768014149558e-06,
"loss": 1.2511,
"step": 1300
},
{
"epoch": 0.62,
"grad_norm": 60.97525961610506,
"learning_rate": 9.647651074621741e-06,
"loss": 1.273,
"step": 1305
},
{
"epoch": 0.63,
"grad_norm": 18.52887382235792,
"learning_rate": 9.642498084304343e-06,
"loss": 1.2939,
"step": 1310
},
{
"epoch": 0.63,
"grad_norm": 105.73659856855303,
"learning_rate": 9.637309083167956e-06,
"loss": 1.283,
"step": 1315
},
{
"epoch": 0.63,
"grad_norm": 54.80489925990439,
"learning_rate": 9.632084111462502e-06,
"loss": 1.2548,
"step": 1320
},
{
"epoch": 0.63,
"grad_norm": 32.581451956274705,
"learning_rate": 9.626823209716918e-06,
"loss": 1.2678,
"step": 1325
},
{
"epoch": 0.64,
"grad_norm": 46.00949069799224,
"learning_rate": 9.62152641873884e-06,
"loss": 1.265,
"step": 1330
},
{
"epoch": 0.64,
"grad_norm": 70.55959087583614,
"learning_rate": 9.616193779614294e-06,
"loss": 1.238,
"step": 1335
},
{
"epoch": 0.64,
"grad_norm": 103.04405383020065,
"learning_rate": 9.610825333707366e-06,
"loss": 1.2319,
"step": 1340
},
{
"epoch": 0.64,
"grad_norm": 60.84507279059485,
"learning_rate": 9.60542112265989e-06,
"loss": 1.2579,
"step": 1345
},
{
"epoch": 0.65,
"grad_norm": 43.11664810609889,
"learning_rate": 9.599981188391121e-06,
"loss": 1.2779,
"step": 1350
},
{
"epoch": 0.65,
"grad_norm": 19.27884813019292,
"learning_rate": 9.594505573097415e-06,
"loss": 1.2673,
"step": 1355
},
{
"epoch": 0.65,
"grad_norm": 41.17415094907435,
"learning_rate": 9.588994319251893e-06,
"loss": 1.2575,
"step": 1360
},
{
"epoch": 0.65,
"grad_norm": 80.52598002804548,
"learning_rate": 9.58344746960412e-06,
"loss": 1.2923,
"step": 1365
},
{
"epoch": 0.66,
"grad_norm": 131.75389469351816,
"learning_rate": 9.577865067179763e-06,
"loss": 1.2309,
"step": 1370
},
{
"epoch": 0.66,
"grad_norm": 34.702302597758866,
"learning_rate": 9.572247155280272e-06,
"loss": 1.2338,
"step": 1375
},
{
"epoch": 0.66,
"grad_norm": 30.45435415150428,
"learning_rate": 9.566593777482535e-06,
"loss": 1.2131,
"step": 1380
},
{
"epoch": 0.66,
"grad_norm": 14.526144691710407,
"learning_rate": 9.560904977638534e-06,
"loss": 1.2528,
"step": 1385
},
{
"epoch": 0.67,
"grad_norm": 34.84087775379032,
"learning_rate": 9.55518079987502e-06,
"loss": 1.2633,
"step": 1390
},
{
"epoch": 0.67,
"grad_norm": 34.37392744333943,
"learning_rate": 9.549421288593157e-06,
"loss": 1.2466,
"step": 1395
},
{
"epoch": 0.67,
"grad_norm": 25.242239512558932,
"learning_rate": 9.543626488468187e-06,
"loss": 1.2109,
"step": 1400
},
{
"epoch": 0.67,
"grad_norm": 54.55579102548687,
"learning_rate": 9.53779644444908e-06,
"loss": 1.2609,
"step": 1405
},
{
"epoch": 0.67,
"grad_norm": 140.8055042358258,
"learning_rate": 9.531931201758179e-06,
"loss": 1.3015,
"step": 1410
},
{
"epoch": 0.68,
"grad_norm": 97.37315493318532,
"learning_rate": 9.52603080589086e-06,
"loss": 1.2571,
"step": 1415
},
{
"epoch": 0.68,
"grad_norm": 46.26704153992051,
"learning_rate": 9.520095302615175e-06,
"loss": 1.2803,
"step": 1420
},
{
"epoch": 0.68,
"grad_norm": 73.34976497555171,
"learning_rate": 9.514124737971495e-06,
"loss": 1.2464,
"step": 1425
},
{
"epoch": 0.68,
"grad_norm": 25.693257105327774,
"learning_rate": 9.508119158272155e-06,
"loss": 1.2979,
"step": 1430
},
{
"epoch": 0.69,
"grad_norm": 30.71411354664324,
"learning_rate": 9.502078610101093e-06,
"loss": 1.2445,
"step": 1435
},
{
"epoch": 0.69,
"grad_norm": 63.90186425929748,
"learning_rate": 9.496003140313488e-06,
"loss": 1.2496,
"step": 1440
},
{
"epoch": 0.69,
"grad_norm": 68.64375119765589,
"learning_rate": 9.4898927960354e-06,
"loss": 1.2801,
"step": 1445
},
{
"epoch": 0.69,
"grad_norm": 30.68818766300311,
"learning_rate": 9.483747624663402e-06,
"loss": 1.2339,
"step": 1450
},
{
"epoch": 0.7,
"grad_norm": 68.82484665312072,
"learning_rate": 9.477567673864217e-06,
"loss": 1.264,
"step": 1455
},
{
"epoch": 0.7,
"grad_norm": 97.33125110484575,
"learning_rate": 9.471352991574338e-06,
"loss": 1.1965,
"step": 1460
},
{
"epoch": 0.7,
"grad_norm": 74.82799272224965,
"learning_rate": 9.465103625999666e-06,
"loss": 1.249,
"step": 1465
},
{
"epoch": 0.7,
"grad_norm": 42.10200882887996,
"learning_rate": 9.458819625615128e-06,
"loss": 1.2599,
"step": 1470
},
{
"epoch": 0.71,
"grad_norm": 21.503760226209035,
"learning_rate": 9.452501039164316e-06,
"loss": 1.2293,
"step": 1475
},
{
"epoch": 0.71,
"grad_norm": 12.705240295699287,
"learning_rate": 9.446147915659085e-06,
"loss": 1.2591,
"step": 1480
},
{
"epoch": 0.71,
"grad_norm": 25.640341994768104,
"learning_rate": 9.439760304379197e-06,
"loss": 1.2464,
"step": 1485
},
{
"epoch": 0.71,
"grad_norm": 15.317438409572903,
"learning_rate": 9.433338254871921e-06,
"loss": 1.2662,
"step": 1490
},
{
"epoch": 0.72,
"grad_norm": 12.969711022046635,
"learning_rate": 9.42688181695166e-06,
"loss": 1.2526,
"step": 1495
},
{
"epoch": 0.72,
"grad_norm": 14.36017295558206,
"learning_rate": 9.42039104069956e-06,
"loss": 1.2555,
"step": 1500
},
{
"epoch": 0.72,
"grad_norm": 30.03616944170716,
"learning_rate": 9.413865976463114e-06,
"loss": 1.2769,
"step": 1505
},
{
"epoch": 0.72,
"grad_norm": 11.360803176898797,
"learning_rate": 9.407306674855791e-06,
"loss": 1.2364,
"step": 1510
},
{
"epoch": 0.73,
"grad_norm": 49.87156329789158,
"learning_rate": 9.400713186756625e-06,
"loss": 1.2477,
"step": 1515
},
{
"epoch": 0.73,
"grad_norm": 86.53910072982244,
"learning_rate": 9.394085563309827e-06,
"loss": 1.2605,
"step": 1520
},
{
"epoch": 0.73,
"grad_norm": 105.84947509159466,
"learning_rate": 9.387423855924392e-06,
"loss": 1.2487,
"step": 1525
},
{
"epoch": 0.73,
"grad_norm": 65.52637068670442,
"learning_rate": 9.380728116273688e-06,
"loss": 1.2423,
"step": 1530
},
{
"epoch": 0.73,
"grad_norm": 119.82179764440622,
"learning_rate": 9.373998396295074e-06,
"loss": 1.2234,
"step": 1535
},
{
"epoch": 0.74,
"grad_norm": 133.77909845657626,
"learning_rate": 9.367234748189481e-06,
"loss": 1.2489,
"step": 1540
},
{
"epoch": 0.74,
"grad_norm": 159.08278727932847,
"learning_rate": 9.360437224421017e-06,
"loss": 1.2952,
"step": 1545
},
{
"epoch": 0.74,
"grad_norm": 63.10040462726909,
"learning_rate": 9.353605877716548e-06,
"loss": 1.2614,
"step": 1550
},
{
"epoch": 0.74,
"grad_norm": 34.04091339102446,
"learning_rate": 9.346740761065306e-06,
"loss": 1.2531,
"step": 1555
},
{
"epoch": 0.75,
"grad_norm": 35.16785924665489,
"learning_rate": 9.339841927718462e-06,
"loss": 1.2603,
"step": 1560
},
{
"epoch": 0.75,
"grad_norm": 23.122318600475715,
"learning_rate": 9.332909431188727e-06,
"loss": 1.2556,
"step": 1565
},
{
"epoch": 0.75,
"grad_norm": 67.9225817759164,
"learning_rate": 9.325943325249919e-06,
"loss": 1.2678,
"step": 1570
},
{
"epoch": 0.75,
"grad_norm": 42.36803318752,
"learning_rate": 9.31894366393657e-06,
"loss": 1.2595,
"step": 1575
},
{
"epoch": 0.76,
"grad_norm": 38.24892791593148,
"learning_rate": 9.311910501543484e-06,
"loss": 1.2649,
"step": 1580
},
{
"epoch": 0.76,
"grad_norm": 33.072986204578875,
"learning_rate": 9.304843892625328e-06,
"loss": 1.2265,
"step": 1585
},
{
"epoch": 0.76,
"grad_norm": 51.375473350432735,
"learning_rate": 9.297743891996206e-06,
"loss": 1.2374,
"step": 1590
},
{
"epoch": 0.76,
"grad_norm": 72.99742141834597,
"learning_rate": 9.290610554729234e-06,
"loss": 1.2347,
"step": 1595
},
{
"epoch": 0.77,
"grad_norm": 44.69780516153163,
"learning_rate": 9.283443936156115e-06,
"loss": 1.2385,
"step": 1600
},
{
"epoch": 0.77,
"grad_norm": 29.754510284616572,
"learning_rate": 9.276244091866706e-06,
"loss": 1.2023,
"step": 1605
},
{
"epoch": 0.77,
"grad_norm": 38.67257841119507,
"learning_rate": 9.269011077708587e-06,
"loss": 1.1824,
"step": 1610
},
{
"epoch": 0.77,
"grad_norm": 13.441846216080211,
"learning_rate": 9.261744949786631e-06,
"loss": 1.2361,
"step": 1615
},
{
"epoch": 0.78,
"grad_norm": 27.55608097284254,
"learning_rate": 9.254445764462567e-06,
"loss": 1.233,
"step": 1620
},
{
"epoch": 0.78,
"grad_norm": 27.32724884687924,
"learning_rate": 9.247113578354542e-06,
"loss": 1.2303,
"step": 1625
},
{
"epoch": 0.78,
"grad_norm": 32.916185542014496,
"learning_rate": 9.239748448336684e-06,
"loss": 1.1871,
"step": 1630
},
{
"epoch": 0.78,
"grad_norm": 40.26776062578829,
"learning_rate": 9.232350431538656e-06,
"loss": 1.2197,
"step": 1635
},
{
"epoch": 0.79,
"grad_norm": 26.539249140139738,
"learning_rate": 9.224919585345224e-06,
"loss": 1.2424,
"step": 1640
},
{
"epoch": 0.79,
"grad_norm": 42.91548620658504,
"learning_rate": 9.217455967395797e-06,
"loss": 1.235,
"step": 1645
},
{
"epoch": 0.79,
"grad_norm": 25.577426610049567,
"learning_rate": 9.209959635583989e-06,
"loss": 1.1816,
"step": 1650
},
{
"epoch": 0.79,
"grad_norm": 145.37706227604983,
"learning_rate": 9.202430648057168e-06,
"loss": 1.2274,
"step": 1655
},
{
"epoch": 0.79,
"grad_norm": 60.28743939991855,
"learning_rate": 9.19486906321601e-06,
"loss": 1.244,
"step": 1660
},
{
"epoch": 0.8,
"grad_norm": 89.40829440645322,
"learning_rate": 9.18727493971403e-06,
"loss": 1.2293,
"step": 1665
},
{
"epoch": 0.8,
"grad_norm": 30.016261250779323,
"learning_rate": 9.179648336457152e-06,
"loss": 1.2178,
"step": 1670
},
{
"epoch": 0.8,
"grad_norm": 69.79806829225218,
"learning_rate": 9.171989312603226e-06,
"loss": 1.2371,
"step": 1675
},
{
"epoch": 0.8,
"grad_norm": 33.75046239282183,
"learning_rate": 9.164297927561589e-06,
"loss": 1.1828,
"step": 1680
},
{
"epoch": 0.81,
"grad_norm": 30.860464361892042,
"learning_rate": 9.156574240992595e-06,
"loss": 1.2215,
"step": 1685
},
{
"epoch": 0.81,
"grad_norm": 12.603211292014063,
"learning_rate": 9.14881831280715e-06,
"loss": 1.1896,
"step": 1690
},
{
"epoch": 0.81,
"grad_norm": 21.710094514034708,
"learning_rate": 9.141030203166256e-06,
"loss": 1.2149,
"step": 1695
},
{
"epoch": 0.81,
"grad_norm": 29.972993735270094,
"learning_rate": 9.133209972480536e-06,
"loss": 1.21,
"step": 1700
},
{
"epoch": 0.82,
"grad_norm": 32.25125230505326,
"learning_rate": 9.12535768140977e-06,
"loss": 1.1865,
"step": 1705
},
{
"epoch": 0.82,
"grad_norm": 14.510629412132799,
"learning_rate": 9.117473390862423e-06,
"loss": 1.2079,
"step": 1710
},
{
"epoch": 0.82,
"grad_norm": 57.04858449477921,
"learning_rate": 9.109557161995172e-06,
"loss": 1.2233,
"step": 1715
},
{
"epoch": 0.82,
"grad_norm": 23.536365605668102,
"learning_rate": 9.101609056212436e-06,
"loss": 1.1857,
"step": 1720
},
{
"epoch": 0.83,
"grad_norm": 33.14477448262465,
"learning_rate": 9.09362913516589e-06,
"loss": 1.2053,
"step": 1725
},
{
"epoch": 0.83,
"grad_norm": 22.001006135208947,
"learning_rate": 9.085617460754e-06,
"loss": 1.2251,
"step": 1730
},
{
"epoch": 0.83,
"grad_norm": 46.04474480037506,
"learning_rate": 9.077574095121527e-06,
"loss": 1.1855,
"step": 1735
},
{
"epoch": 0.83,
"grad_norm": 178.06471092476687,
"learning_rate": 9.069499100659062e-06,
"loss": 1.214,
"step": 1740
},
{
"epoch": 0.84,
"grad_norm": 112.1066127393893,
"learning_rate": 9.061392540002529e-06,
"loss": 1.2568,
"step": 1745
},
{
"epoch": 0.84,
"grad_norm": 32.7696338020363,
"learning_rate": 9.053254476032707e-06,
"loss": 1.2082,
"step": 1750
},
{
"epoch": 0.84,
"grad_norm": 32.2737586121428,
"learning_rate": 9.045084971874738e-06,
"loss": 1.229,
"step": 1755
},
{
"epoch": 0.84,
"grad_norm": 122.12017086229386,
"learning_rate": 9.036884090897636e-06,
"loss": 1.1983,
"step": 1760
},
{
"epoch": 0.84,
"grad_norm": 55.543532987449844,
"learning_rate": 9.028651896713801e-06,
"loss": 1.2442,
"step": 1765
},
{
"epoch": 0.85,
"grad_norm": 47.727538162129946,
"learning_rate": 9.02038845317852e-06,
"loss": 1.224,
"step": 1770
},
{
"epoch": 0.85,
"grad_norm": 31.505266474855347,
"learning_rate": 9.012093824389479e-06,
"loss": 1.1994,
"step": 1775
},
{
"epoch": 0.85,
"grad_norm": 15.24770768078801,
"learning_rate": 9.003768074686252e-06,
"loss": 1.2407,
"step": 1780
},
{
"epoch": 0.85,
"grad_norm": 14.709528337913046,
"learning_rate": 8.995411268649823e-06,
"loss": 1.1652,
"step": 1785
},
{
"epoch": 0.86,
"grad_norm": 14.135555056286536,
"learning_rate": 8.987023471102065e-06,
"loss": 1.2027,
"step": 1790
},
{
"epoch": 0.86,
"grad_norm": 16.444958963981545,
"learning_rate": 8.978604747105247e-06,
"loss": 1.1981,
"step": 1795
},
{
"epoch": 0.86,
"grad_norm": 20.81255987516656,
"learning_rate": 8.970155161961525e-06,
"loss": 1.168,
"step": 1800
},
{
"epoch": 0.86,
"grad_norm": 48.31156166566541,
"learning_rate": 8.961674781212441e-06,
"loss": 1.2039,
"step": 1805
},
{
"epoch": 0.87,
"grad_norm": 18.590219619000113,
"learning_rate": 8.953163670638417e-06,
"loss": 1.2163,
"step": 1810
},
{
"epoch": 0.87,
"grad_norm": 281.28025736153074,
"learning_rate": 8.944621896258226e-06,
"loss": 1.2775,
"step": 1815
},
{
"epoch": 0.87,
"grad_norm": 123.08320379278337,
"learning_rate": 8.936049524328503e-06,
"loss": 1.354,
"step": 1820
},
{
"epoch": 0.87,
"grad_norm": 75.75731691411846,
"learning_rate": 8.927446621343224e-06,
"loss": 1.3129,
"step": 1825
},
{
"epoch": 0.88,
"grad_norm": 34.71176581843331,
"learning_rate": 8.918813254033176e-06,
"loss": 1.273,
"step": 1830
},
{
"epoch": 0.88,
"grad_norm": 49.87961729397193,
"learning_rate": 8.91014948936546e-06,
"loss": 1.2599,
"step": 1835
},
{
"epoch": 0.88,
"grad_norm": 106.4378468049301,
"learning_rate": 8.901455394542964e-06,
"loss": 1.2263,
"step": 1840
},
{
"epoch": 0.88,
"grad_norm": 56.95512794355685,
"learning_rate": 8.89273103700383e-06,
"loss": 1.2202,
"step": 1845
},
{
"epoch": 0.89,
"grad_norm": 24.944003009115644,
"learning_rate": 8.883976484420952e-06,
"loss": 1.2598,
"step": 1850
},
{
"epoch": 0.89,
"grad_norm": 70.57804381679917,
"learning_rate": 8.875191804701435e-06,
"loss": 1.2316,
"step": 1855
},
{
"epoch": 0.89,
"grad_norm": 53.299298773769905,
"learning_rate": 8.866377065986073e-06,
"loss": 1.2213,
"step": 1860
},
{
"epoch": 0.89,
"grad_norm": 40.33989956408778,
"learning_rate": 8.857532336648824e-06,
"loss": 1.2235,
"step": 1865
},
{
"epoch": 0.9,
"grad_norm": 65.5400836902914,
"learning_rate": 8.848657685296273e-06,
"loss": 1.1612,
"step": 1870
},
{
"epoch": 0.9,
"grad_norm": 27.965897185320582,
"learning_rate": 8.839753180767108e-06,
"loss": 1.1724,
"step": 1875
},
{
"epoch": 0.9,
"grad_norm": 45.21745270502914,
"learning_rate": 8.830818892131574e-06,
"loss": 1.2201,
"step": 1880
},
{
"epoch": 0.9,
"grad_norm": 37.48517103110921,
"learning_rate": 8.821854888690951e-06,
"loss": 1.2056,
"step": 1885
},
{
"epoch": 0.9,
"grad_norm": 26.14460775927921,
"learning_rate": 8.812861239977009e-06,
"loss": 1.1405,
"step": 1890
},
{
"epoch": 0.91,
"grad_norm": 30.756963393435644,
"learning_rate": 8.803838015751466e-06,
"loss": 1.2068,
"step": 1895
},
{
"epoch": 0.91,
"grad_norm": 15.872518453502558,
"learning_rate": 8.79478528600546e-06,
"loss": 1.1709,
"step": 1900
},
{
"epoch": 0.91,
"grad_norm": 21.50208498871656,
"learning_rate": 8.785703120958984e-06,
"loss": 1.1677,
"step": 1905
},
{
"epoch": 0.91,
"grad_norm": 10.856118691525518,
"learning_rate": 8.776591591060362e-06,
"loss": 1.2264,
"step": 1910
},
{
"epoch": 0.92,
"grad_norm": 40.954671585435015,
"learning_rate": 8.767450766985695e-06,
"loss": 1.1857,
"step": 1915
},
{
"epoch": 0.92,
"grad_norm": 49.58537287482142,
"learning_rate": 8.758280719638308e-06,
"loss": 1.1964,
"step": 1920
},
{
"epoch": 0.92,
"grad_norm": 13.049704664247738,
"learning_rate": 8.749081520148208e-06,
"loss": 1.2453,
"step": 1925
},
{
"epoch": 0.92,
"grad_norm": 23.785391381219522,
"learning_rate": 8.739853239871525e-06,
"loss": 1.153,
"step": 1930
},
{
"epoch": 0.93,
"grad_norm": 60.40460871725501,
"learning_rate": 8.730595950389968e-06,
"loss": 1.1967,
"step": 1935
},
{
"epoch": 0.93,
"grad_norm": 115.33237835715576,
"learning_rate": 8.72130972351026e-06,
"loss": 1.1745,
"step": 1940
},
{
"epoch": 0.93,
"grad_norm": 74.23673168195631,
"learning_rate": 8.711994631263582e-06,
"loss": 1.2084,
"step": 1945
},
{
"epoch": 0.93,
"grad_norm": 17.086398510709703,
"learning_rate": 8.702650745905022e-06,
"loss": 1.1629,
"step": 1950
},
{
"epoch": 0.94,
"grad_norm": 17.745494370396166,
"learning_rate": 8.69327813991301e-06,
"loss": 1.199,
"step": 1955
},
{
"epoch": 0.94,
"grad_norm": 64.42565981438187,
"learning_rate": 8.683876885988754e-06,
"loss": 1.1997,
"step": 1960
},
{
"epoch": 0.94,
"grad_norm": 47.22856176111695,
"learning_rate": 8.674447057055673e-06,
"loss": 1.1512,
"step": 1965
},
{
"epoch": 0.94,
"grad_norm": 18.373253778486156,
"learning_rate": 8.664988726258846e-06,
"loss": 1.1819,
"step": 1970
},
{
"epoch": 0.95,
"grad_norm": 40.36729372062456,
"learning_rate": 8.655501966964423e-06,
"loss": 1.181,
"step": 1975
},
{
"epoch": 0.95,
"grad_norm": 41.205472594629185,
"learning_rate": 8.64598685275908e-06,
"loss": 1.1711,
"step": 1980
},
{
"epoch": 0.95,
"grad_norm": 64.89381631747843,
"learning_rate": 8.636443457449423e-06,
"loss": 1.1582,
"step": 1985
},
{
"epoch": 0.95,
"grad_norm": 55.965167818409526,
"learning_rate": 8.626871855061438e-06,
"loss": 1.1733,
"step": 1990
},
{
"epoch": 0.96,
"grad_norm": 11.74394826827574,
"learning_rate": 8.617272119839903e-06,
"loss": 1.1641,
"step": 1995
},
{
"epoch": 0.96,
"grad_norm": 18.818718263872356,
"learning_rate": 8.607644326247815e-06,
"loss": 1.2019,
"step": 2000
},
{
"epoch": 0.96,
"grad_norm": 24.356848225004466,
"learning_rate": 8.597988548965816e-06,
"loss": 1.182,
"step": 2005
},
{
"epoch": 0.96,
"grad_norm": 38.96801509455619,
"learning_rate": 8.58830486289161e-06,
"loss": 1.1565,
"step": 2010
},
{
"epoch": 0.96,
"grad_norm": 18.75864031329419,
"learning_rate": 8.578593343139383e-06,
"loss": 1.1664,
"step": 2015
},
{
"epoch": 0.97,
"grad_norm": 11.008308427656528,
"learning_rate": 8.568854065039221e-06,
"loss": 1.1862,
"step": 2020
},
{
"epoch": 0.97,
"grad_norm": 15.710164994932063,
"learning_rate": 8.559087104136525e-06,
"loss": 1.2157,
"step": 2025
},
{
"epoch": 0.97,
"grad_norm": 17.80742108822311,
"learning_rate": 8.549292536191427e-06,
"loss": 1.1919,
"step": 2030
},
{
"epoch": 0.97,
"grad_norm": 52.31204255231068,
"learning_rate": 8.539470437178197e-06,
"loss": 1.1739,
"step": 2035
},
{
"epoch": 0.98,
"grad_norm": 26.783189607816958,
"learning_rate": 8.52962088328466e-06,
"loss": 1.1325,
"step": 2040
},
{
"epoch": 0.98,
"grad_norm": 30.67108147856411,
"learning_rate": 8.519743950911603e-06,
"loss": 1.1427,
"step": 2045
},
{
"epoch": 0.98,
"grad_norm": 35.6598813877968,
"learning_rate": 8.50983971667218e-06,
"loss": 1.1445,
"step": 2050
},
{
"epoch": 0.98,
"grad_norm": 62.30361103014555,
"learning_rate": 8.499908257391324e-06,
"loss": 1.1561,
"step": 2055
},
{
"epoch": 0.99,
"grad_norm": 116.30880412069692,
"learning_rate": 8.489949650105136e-06,
"loss": 1.1698,
"step": 2060
},
{
"epoch": 0.99,
"grad_norm": 48.70919912868703,
"learning_rate": 8.47996397206031e-06,
"loss": 1.1355,
"step": 2065
},
{
"epoch": 0.99,
"grad_norm": 11.716903322124937,
"learning_rate": 8.469951300713513e-06,
"loss": 1.19,
"step": 2070
},
{
"epoch": 0.99,
"grad_norm": 55.354124576341796,
"learning_rate": 8.4599117137308e-06,
"loss": 1.1782,
"step": 2075
},
{
"epoch": 1.0,
"grad_norm": 13.10098703671678,
"learning_rate": 8.449845288986996e-06,
"loss": 1.1548,
"step": 2080
},
{
"epoch": 1.0,
"grad_norm": 35.531377891757835,
"learning_rate": 8.43975210456511e-06,
"loss": 1.1942,
"step": 2085
},
{
"epoch": 1.0,
"eval_loss": 1.175723671913147,
"eval_runtime": 22.7527,
"eval_samples_per_second": 29.975,
"eval_steps_per_second": 3.78,
"step": 2089
},
{
"epoch": 1.0,
"grad_norm": 64.31259722987825,
"learning_rate": 8.429632238755715e-06,
"loss": 1.1549,
"step": 2090
},
{
"epoch": 1.0,
"grad_norm": 21.39883028096174,
"learning_rate": 8.419485770056345e-06,
"loss": 1.0579,
"step": 2095
},
{
"epoch": 1.01,
"grad_norm": 77.26661622606343,
"learning_rate": 8.409312777170887e-06,
"loss": 1.0234,
"step": 2100
},
{
"epoch": 1.01,
"grad_norm": 118.27103629534572,
"learning_rate": 8.399113339008972e-06,
"loss": 1.0453,
"step": 2105
},
{
"epoch": 1.01,
"grad_norm": 10.83419518615051,
"learning_rate": 8.38888753468536e-06,
"loss": 1.0526,
"step": 2110
},
{
"epoch": 1.01,
"grad_norm": 72.66024262226586,
"learning_rate": 8.378635443519327e-06,
"loss": 1.0384,
"step": 2115
},
{
"epoch": 1.01,
"grad_norm": 38.77684521763487,
"learning_rate": 8.36835714503405e-06,
"loss": 1.0805,
"step": 2120
},
{
"epoch": 1.02,
"grad_norm": 41.11902719504846,
"learning_rate": 8.358052718955991e-06,
"loss": 1.0706,
"step": 2125
},
{
"epoch": 1.02,
"grad_norm": 33.58755558749983,
"learning_rate": 8.347722245214278e-06,
"loss": 1.0223,
"step": 2130
},
{
"epoch": 1.02,
"grad_norm": 16.78655551333738,
"learning_rate": 8.337365803940085e-06,
"loss": 1.0325,
"step": 2135
},
{
"epoch": 1.02,
"grad_norm": 15.045629204193741,
"learning_rate": 8.32698347546601e-06,
"loss": 0.9827,
"step": 2140
},
{
"epoch": 1.03,
"grad_norm": 10.386385770355343,
"learning_rate": 8.31657534032545e-06,
"loss": 1.0852,
"step": 2145
},
{
"epoch": 1.03,
"grad_norm": 11.838565864001573,
"learning_rate": 8.306141479251986e-06,
"loss": 1.1009,
"step": 2150
},
{
"epoch": 1.03,
"grad_norm": 19.103702790345068,
"learning_rate": 8.295681973178737e-06,
"loss": 1.0622,
"step": 2155
},
{
"epoch": 1.03,
"grad_norm": 30.27820356954596,
"learning_rate": 8.285196903237756e-06,
"loss": 1.0052,
"step": 2160
},
{
"epoch": 1.04,
"grad_norm": 17.512545947701543,
"learning_rate": 8.274686350759385e-06,
"loss": 1.0538,
"step": 2165
},
{
"epoch": 1.04,
"grad_norm": 17.02979238665457,
"learning_rate": 8.264150397271625e-06,
"loss": 1.0432,
"step": 2170
},
{
"epoch": 1.04,
"grad_norm": 14.250117565435923,
"learning_rate": 8.253589124499513e-06,
"loss": 1.0358,
"step": 2175
},
{
"epoch": 1.04,
"grad_norm": 21.657691854472944,
"learning_rate": 8.243002614364477e-06,
"loss": 1.0336,
"step": 2180
},
{
"epoch": 1.05,
"grad_norm": 26.847149711495824,
"learning_rate": 8.232390948983708e-06,
"loss": 1.0533,
"step": 2185
},
{
"epoch": 1.05,
"grad_norm": 28.332926967675494,
"learning_rate": 8.22175421066952e-06,
"loss": 1.0425,
"step": 2190
},
{
"epoch": 1.05,
"grad_norm": 40.468943873941015,
"learning_rate": 8.211092481928716e-06,
"loss": 1.0442,
"step": 2195
},
{
"epoch": 1.05,
"grad_norm": 103.1466147571405,
"learning_rate": 8.200405845461936e-06,
"loss": 1.0271,
"step": 2200
},
{
"epoch": 1.06,
"grad_norm": 19.81840592856725,
"learning_rate": 8.189694384163032e-06,
"loss": 1.039,
"step": 2205
},
{
"epoch": 1.06,
"grad_norm": 92.75441725487488,
"learning_rate": 8.178958181118408e-06,
"loss": 1.0412,
"step": 2210
},
{
"epoch": 1.06,
"grad_norm": 65.26337425081044,
"learning_rate": 8.168197319606393e-06,
"loss": 1.0292,
"step": 2215
},
{
"epoch": 1.06,
"grad_norm": 48.16857676706638,
"learning_rate": 8.15741188309658e-06,
"loss": 1.0589,
"step": 2220
},
{
"epoch": 1.07,
"grad_norm": 20.28860083230503,
"learning_rate": 8.146601955249187e-06,
"loss": 1.1085,
"step": 2225
},
{
"epoch": 1.07,
"grad_norm": 18.544356908662962,
"learning_rate": 8.135767619914409e-06,
"loss": 1.0253,
"step": 2230
},
{
"epoch": 1.07,
"grad_norm": 54.858949948350926,
"learning_rate": 8.124908961131759e-06,
"loss": 1.0569,
"step": 2235
},
{
"epoch": 1.07,
"grad_norm": 16.01522512004077,
"learning_rate": 8.114026063129423e-06,
"loss": 1.0437,
"step": 2240
},
{
"epoch": 1.07,
"grad_norm": 11.675529471257926,
"learning_rate": 8.103119010323608e-06,
"loss": 1.0305,
"step": 2245
},
{
"epoch": 1.08,
"grad_norm": 22.5562676930228,
"learning_rate": 8.09218788731788e-06,
"loss": 1.0443,
"step": 2250
},
{
"epoch": 1.08,
"grad_norm": 12.811533267496014,
"learning_rate": 8.081232778902517e-06,
"loss": 1.0304,
"step": 2255
},
{
"epoch": 1.08,
"grad_norm": 33.74300992461672,
"learning_rate": 8.07025377005384e-06,
"loss": 1.0305,
"step": 2260
},
{
"epoch": 1.08,
"grad_norm": 11.9386461156158,
"learning_rate": 8.05925094593357e-06,
"loss": 1.0038,
"step": 2265
},
{
"epoch": 1.09,
"grad_norm": 36.44871341029926,
"learning_rate": 8.048224391888149e-06,
"loss": 1.0582,
"step": 2270
},
{
"epoch": 1.09,
"grad_norm": 12.11965617271235,
"learning_rate": 8.03717419344809e-06,
"loss": 1.0115,
"step": 2275
},
{
"epoch": 1.09,
"grad_norm": 21.77486312990387,
"learning_rate": 8.02610043632731e-06,
"loss": 1.0416,
"step": 2280
},
{
"epoch": 1.09,
"grad_norm": 53.681951373927376,
"learning_rate": 8.015003206422466e-06,
"loss": 1.0252,
"step": 2285
},
{
"epoch": 1.1,
"grad_norm": 31.200980457431566,
"learning_rate": 8.003882589812286e-06,
"loss": 1.0308,
"step": 2290
},
{
"epoch": 1.1,
"grad_norm": 15.928989078493425,
"learning_rate": 7.992738672756909e-06,
"loss": 1.0125,
"step": 2295
},
{
"epoch": 1.1,
"grad_norm": 24.536673602683504,
"learning_rate": 7.981571541697201e-06,
"loss": 1.0506,
"step": 2300
},
{
"epoch": 1.1,
"grad_norm": 45.20798198645299,
"learning_rate": 7.970381283254103e-06,
"loss": 1.0187,
"step": 2305
},
{
"epoch": 1.11,
"grad_norm": 18.637019828817333,
"learning_rate": 7.959167984227947e-06,
"loss": 1.0709,
"step": 2310
},
{
"epoch": 1.11,
"grad_norm": 10.828822086637205,
"learning_rate": 7.94793173159778e-06,
"loss": 1.0201,
"step": 2315
},
{
"epoch": 1.11,
"grad_norm": 20.712808146507694,
"learning_rate": 7.936672612520703e-06,
"loss": 1.0394,
"step": 2320
},
{
"epoch": 1.11,
"grad_norm": 25.66403578687023,
"learning_rate": 7.925390714331185e-06,
"loss": 0.9954,
"step": 2325
},
{
"epoch": 1.12,
"grad_norm": 17.744699122207855,
"learning_rate": 7.914086124540385e-06,
"loss": 1.0488,
"step": 2330
},
{
"epoch": 1.12,
"grad_norm": 10.728581836966391,
"learning_rate": 7.902758930835477e-06,
"loss": 0.9978,
"step": 2335
},
{
"epoch": 1.12,
"grad_norm": 14.77659035101248,
"learning_rate": 7.891409221078965e-06,
"loss": 1.0074,
"step": 2340
},
{
"epoch": 1.12,
"grad_norm": 11.324873611774553,
"learning_rate": 7.880037083308015e-06,
"loss": 1.0467,
"step": 2345
},
{
"epoch": 1.12,
"grad_norm": 18.13609193386062,
"learning_rate": 7.868642605733748e-06,
"loss": 1.0262,
"step": 2350
},
{
"epoch": 1.13,
"grad_norm": 21.163363621669056,
"learning_rate": 7.857225876740585e-06,
"loss": 1.0186,
"step": 2355
},
{
"epoch": 1.13,
"grad_norm": 11.682336415878595,
"learning_rate": 7.845786984885532e-06,
"loss": 1.0763,
"step": 2360
},
{
"epoch": 1.13,
"grad_norm": 23.383034088803946,
"learning_rate": 7.834326018897517e-06,
"loss": 1.0527,
"step": 2365
},
{
"epoch": 1.13,
"grad_norm": 25.021889736454884,
"learning_rate": 7.822843067676687e-06,
"loss": 1.062,
"step": 2370
},
{
"epoch": 1.14,
"grad_norm": 40.024432080338464,
"learning_rate": 7.81133822029373e-06,
"loss": 1.0081,
"step": 2375
},
{
"epoch": 1.14,
"grad_norm": 40.54892814630993,
"learning_rate": 7.799811565989168e-06,
"loss": 1.0143,
"step": 2380
},
{
"epoch": 1.14,
"grad_norm": 52.467160321768525,
"learning_rate": 7.788263194172684e-06,
"loss": 1.0009,
"step": 2385
},
{
"epoch": 1.14,
"grad_norm": 19.394954321436536,
"learning_rate": 7.77669319442241e-06,
"loss": 1.0507,
"step": 2390
},
{
"epoch": 1.15,
"grad_norm": 31.842098284984704,
"learning_rate": 7.76510165648425e-06,
"loss": 1.002,
"step": 2395
},
{
"epoch": 1.15,
"grad_norm": 11.799095067117374,
"learning_rate": 7.753488670271168e-06,
"loss": 1.0261,
"step": 2400
},
{
"epoch": 1.15,
"grad_norm": 11.296471286729275,
"learning_rate": 7.7418543258625e-06,
"loss": 1.0273,
"step": 2405
},
{
"epoch": 1.15,
"grad_norm": 14.687616170493245,
"learning_rate": 7.730198713503254e-06,
"loss": 1.0438,
"step": 2410
},
{
"epoch": 1.16,
"grad_norm": 15.935842087773558,
"learning_rate": 7.718521923603404e-06,
"loss": 1.075,
"step": 2415
},
{
"epoch": 1.16,
"grad_norm": 34.66083764640505,
"learning_rate": 7.706824046737202e-06,
"loss": 1.0269,
"step": 2420
},
{
"epoch": 1.16,
"grad_norm": 14.87676217653969,
"learning_rate": 7.69510517364246e-06,
"loss": 1.0433,
"step": 2425
},
{
"epoch": 1.16,
"grad_norm": 24.843001677592717,
"learning_rate": 7.68336539521985e-06,
"loss": 1.0354,
"step": 2430
},
{
"epoch": 1.17,
"grad_norm": 49.94987574448101,
"learning_rate": 7.671604802532212e-06,
"loss": 1.0349,
"step": 2435
},
{
"epoch": 1.17,
"grad_norm": 13.543687363156554,
"learning_rate": 7.65982348680383e-06,
"loss": 1.0423,
"step": 2440
},
{
"epoch": 1.17,
"grad_norm": 16.86556164508137,
"learning_rate": 7.648021539419737e-06,
"loss": 1.0705,
"step": 2445
},
{
"epoch": 1.17,
"grad_norm": 32.47096031936087,
"learning_rate": 7.636199051924995e-06,
"loss": 1.0183,
"step": 2450
},
{
"epoch": 1.18,
"grad_norm": 79.54617274412453,
"learning_rate": 7.624356116023996e-06,
"loss": 1.0353,
"step": 2455
},
{
"epoch": 1.18,
"grad_norm": 142.10890803102654,
"learning_rate": 7.612492823579744e-06,
"loss": 1.0274,
"step": 2460
},
{
"epoch": 1.18,
"grad_norm": 47.23173993730145,
"learning_rate": 7.600609266613146e-06,
"loss": 1.0489,
"step": 2465
},
{
"epoch": 1.18,
"grad_norm": 59.92124516608769,
"learning_rate": 7.588705537302293e-06,
"loss": 0.9989,
"step": 2470
},
{
"epoch": 1.18,
"grad_norm": 59.22799257897706,
"learning_rate": 7.5767817279817505e-06,
"loss": 1.0556,
"step": 2475
},
{
"epoch": 1.19,
"grad_norm": 13.151294957406723,
"learning_rate": 7.564837931141838e-06,
"loss": 1.0351,
"step": 2480
},
{
"epoch": 1.19,
"grad_norm": 17.68345473764651,
"learning_rate": 7.552874239427919e-06,
"loss": 1.0284,
"step": 2485
},
{
"epoch": 1.19,
"grad_norm": 11.385377505873633,
"learning_rate": 7.540890745639671e-06,
"loss": 1.0024,
"step": 2490
},
{
"epoch": 1.19,
"grad_norm": 12.275367858833812,
"learning_rate": 7.5288875427303744e-06,
"loss": 1.0409,
"step": 2495
},
{
"epoch": 1.2,
"grad_norm": 11.83637535490282,
"learning_rate": 7.516864723806187e-06,
"loss": 1.0344,
"step": 2500
},
{
"epoch": 1.2,
"grad_norm": 34.797486247797174,
"learning_rate": 7.504822382125432e-06,
"loss": 1.0355,
"step": 2505
},
{
"epoch": 1.2,
"grad_norm": 30.440405939761202,
"learning_rate": 7.492760611097853e-06,
"loss": 1.0466,
"step": 2510
},
{
"epoch": 1.2,
"grad_norm": 39.27050022356261,
"learning_rate": 7.480679504283911e-06,
"loss": 1.0358,
"step": 2515
},
{
"epoch": 1.21,
"grad_norm": 111.28841219490606,
"learning_rate": 7.468579155394049e-06,
"loss": 1.0479,
"step": 2520
},
{
"epoch": 1.21,
"grad_norm": 17.371669561861527,
"learning_rate": 7.456459658287966e-06,
"loss": 1.0019,
"step": 2525
},
{
"epoch": 1.21,
"grad_norm": 38.99618999463473,
"learning_rate": 7.4443211069738915e-06,
"loss": 1.0507,
"step": 2530
},
{
"epoch": 1.21,
"grad_norm": 67.27417608891825,
"learning_rate": 7.432163595607851e-06,
"loss": 1.068,
"step": 2535
},
{
"epoch": 1.22,
"grad_norm": 25.28556127116984,
"learning_rate": 7.4199872184929386e-06,
"loss": 1.0468,
"step": 2540
},
{
"epoch": 1.22,
"grad_norm": 22.07083531651455,
"learning_rate": 7.4077920700785884e-06,
"loss": 1.0156,
"step": 2545
},
{
"epoch": 1.22,
"grad_norm": 18.96005040941704,
"learning_rate": 7.39557824495984e-06,
"loss": 1.0286,
"step": 2550
},
{
"epoch": 1.22,
"grad_norm": 26.805165205719824,
"learning_rate": 7.383345837876601e-06,
"loss": 1.0801,
"step": 2555
},
{
"epoch": 1.23,
"grad_norm": 22.97648522509187,
"learning_rate": 7.371094943712915e-06,
"loss": 0.9935,
"step": 2560
},
{
"epoch": 1.23,
"grad_norm": 30.930291180833137,
"learning_rate": 7.358825657496228e-06,
"loss": 1.0561,
"step": 2565
},
{
"epoch": 1.23,
"grad_norm": 33.315077001079466,
"learning_rate": 7.34653807439665e-06,
"loss": 1.046,
"step": 2570
},
{
"epoch": 1.23,
"grad_norm": 19.150801336060933,
"learning_rate": 7.334232289726213e-06,
"loss": 1.0413,
"step": 2575
},
{
"epoch": 1.24,
"grad_norm": 20.141715934579043,
"learning_rate": 7.321908398938134e-06,
"loss": 1.0382,
"step": 2580
},
{
"epoch": 1.24,
"grad_norm": 12.731251418816345,
"learning_rate": 7.3095664976260795e-06,
"loss": 1.0305,
"step": 2585
},
{
"epoch": 1.24,
"grad_norm": 16.580467000363992,
"learning_rate": 7.297206681523414e-06,
"loss": 0.975,
"step": 2590
},
{
"epoch": 1.24,
"grad_norm": 32.477639163221376,
"learning_rate": 7.284829046502467e-06,
"loss": 1.0165,
"step": 2595
},
{
"epoch": 1.24,
"grad_norm": 17.307265437975822,
"learning_rate": 7.2724336885737855e-06,
"loss": 1.0223,
"step": 2600
},
{
"epoch": 1.25,
"grad_norm": 22.08915667541513,
"learning_rate": 7.260020703885385e-06,
"loss": 1.0413,
"step": 2605
},
{
"epoch": 1.25,
"grad_norm": 16.68626641098195,
"learning_rate": 7.247590188722011e-06,
"loss": 1.0494,
"step": 2610
},
{
"epoch": 1.25,
"grad_norm": 14.191300352246056,
"learning_rate": 7.235142239504391e-06,
"loss": 1.0011,
"step": 2615
},
{
"epoch": 1.25,
"grad_norm": 11.65852481350864,
"learning_rate": 7.222676952788483e-06,
"loss": 1.0231,
"step": 2620
},
{
"epoch": 1.26,
"grad_norm": 16.888900245514556,
"learning_rate": 7.210194425264723e-06,
"loss": 1.0491,
"step": 2625
},
{
"epoch": 1.26,
"grad_norm": 59.195202328708376,
"learning_rate": 7.197694753757285e-06,
"loss": 1.0551,
"step": 2630
},
{
"epoch": 1.26,
"grad_norm": 60.11386477056807,
"learning_rate": 7.185178035223327e-06,
"loss": 1.0143,
"step": 2635
},
{
"epoch": 1.26,
"grad_norm": 11.576504505121187,
"learning_rate": 7.172644366752233e-06,
"loss": 1.0531,
"step": 2640
},
{
"epoch": 1.27,
"grad_norm": 10.46058230003902,
"learning_rate": 7.160093845564865e-06,
"loss": 1.0268,
"step": 2645
},
{
"epoch": 1.27,
"grad_norm": 26.18520512409148,
"learning_rate": 7.147526569012808e-06,
"loss": 1.0367,
"step": 2650
},
{
"epoch": 1.27,
"grad_norm": 25.2465101190413,
"learning_rate": 7.134942634577615e-06,
"loss": 1.0497,
"step": 2655
},
{
"epoch": 1.27,
"grad_norm": 26.819800933589967,
"learning_rate": 7.12234213987005e-06,
"loss": 1.0402,
"step": 2660
},
{
"epoch": 1.28,
"grad_norm": 68.2338741882758,
"learning_rate": 7.109725182629331e-06,
"loss": 1.0688,
"step": 2665
},
{
"epoch": 1.28,
"grad_norm": 27.32313574583239,
"learning_rate": 7.0970918607223725e-06,
"loss": 1.04,
"step": 2670
},
{
"epoch": 1.28,
"grad_norm": 9.850956485372377,
"learning_rate": 7.084442272143026e-06,
"loss": 1.0213,
"step": 2675
},
{
"epoch": 1.28,
"grad_norm": 34.810157571485284,
"learning_rate": 7.071776515011322e-06,
"loss": 1.0203,
"step": 2680
},
{
"epoch": 1.29,
"grad_norm": 23.81807385682529,
"learning_rate": 7.059094687572701e-06,
"loss": 1.0457,
"step": 2685
},
{
"epoch": 1.29,
"grad_norm": 43.977389848239504,
"learning_rate": 7.046396888197267e-06,
"loss": 1.0365,
"step": 2690
},
{
"epoch": 1.29,
"grad_norm": 48.078854734616904,
"learning_rate": 7.033683215379002e-06,
"loss": 1.038,
"step": 2695
},
{
"epoch": 1.29,
"grad_norm": 23.57919329607693,
"learning_rate": 7.020953767735026e-06,
"loss": 1.0466,
"step": 2700
},
{
"epoch": 1.29,
"grad_norm": 22.818858532991534,
"learning_rate": 7.008208644004816e-06,
"loss": 1.0056,
"step": 2705
},
{
"epoch": 1.3,
"grad_norm": 18.334772366175663,
"learning_rate": 6.995447943049445e-06,
"loss": 1.0252,
"step": 2710
},
{
"epoch": 1.3,
"grad_norm": 11.399434037005719,
"learning_rate": 6.982671763850814e-06,
"loss": 1.0142,
"step": 2715
},
{
"epoch": 1.3,
"grad_norm": 10.28155898730571,
"learning_rate": 6.96988020551089e-06,
"loss": 1.0351,
"step": 2720
},
{
"epoch": 1.3,
"grad_norm": 17.832996126580674,
"learning_rate": 6.957073367250926e-06,
"loss": 1.0158,
"step": 2725
},
{
"epoch": 1.31,
"grad_norm": 48.02957071795844,
"learning_rate": 6.944251348410702e-06,
"loss": 1.0419,
"step": 2730
},
{
"epoch": 1.31,
"grad_norm": 63.920502841577225,
"learning_rate": 6.9314142484477495e-06,
"loss": 1.0106,
"step": 2735
},
{
"epoch": 1.31,
"grad_norm": 19.435179993016284,
"learning_rate": 6.9185621669365824e-06,
"loss": 1.005,
"step": 2740
},
{
"epoch": 1.31,
"grad_norm": 47.99412320017374,
"learning_rate": 6.905695203567919e-06,
"loss": 1.0217,
"step": 2745
},
{
"epoch": 1.32,
"grad_norm": 57.947004355499914,
"learning_rate": 6.892813458147919e-06,
"loss": 1.0484,
"step": 2750
},
{
"epoch": 1.32,
"grad_norm": 30.256013631909916,
"learning_rate": 6.879917030597397e-06,
"loss": 1.0396,
"step": 2755
},
{
"epoch": 1.32,
"grad_norm": 75.76088023250433,
"learning_rate": 6.867006020951057e-06,
"loss": 1.0639,
"step": 2760
},
{
"epoch": 1.32,
"grad_norm": 98.1978174208928,
"learning_rate": 6.85408052935671e-06,
"loss": 1.0113,
"step": 2765
},
{
"epoch": 1.33,
"grad_norm": 44.1181635308628,
"learning_rate": 6.8411406560745056e-06,
"loss": 1.0121,
"step": 2770
},
{
"epoch": 1.33,
"grad_norm": 25.27383288470199,
"learning_rate": 6.828186501476145e-06,
"loss": 1.0495,
"step": 2775
},
{
"epoch": 1.33,
"grad_norm": 92.04463940084823,
"learning_rate": 6.815218166044107e-06,
"loss": 1.2186,
"step": 2780
},
{
"epoch": 1.33,
"grad_norm": 110.44289224202511,
"learning_rate": 6.802235750370869e-06,
"loss": 1.0753,
"step": 2785
},
{
"epoch": 1.34,
"grad_norm": 92.89557453494074,
"learning_rate": 6.789239355158124e-06,
"loss": 1.0942,
"step": 2790
},
{
"epoch": 1.34,
"grad_norm": 20.62905692511695,
"learning_rate": 6.776229081216001e-06,
"loss": 1.0443,
"step": 2795
},
{
"epoch": 1.34,
"grad_norm": 144.88795395727107,
"learning_rate": 6.763205029462286e-06,
"loss": 1.113,
"step": 2800
},
{
"epoch": 1.34,
"grad_norm": 69.5075489930535,
"learning_rate": 6.750167300921635e-06,
"loss": 1.0745,
"step": 2805
},
{
"epoch": 1.35,
"grad_norm": 14.109594279697406,
"learning_rate": 6.737115996724788e-06,
"loss": 1.0656,
"step": 2810
},
{
"epoch": 1.35,
"grad_norm": 25.34341047420732,
"learning_rate": 6.7240512181077955e-06,
"loss": 1.0636,
"step": 2815
},
{
"epoch": 1.35,
"grad_norm": 23.00343909441898,
"learning_rate": 6.71097306641122e-06,
"loss": 1.0303,
"step": 2820
},
{
"epoch": 1.35,
"grad_norm": 47.51176861801936,
"learning_rate": 6.697881643079361e-06,
"loss": 1.0088,
"step": 2825
},
{
"epoch": 1.35,
"grad_norm": 47.837567720208895,
"learning_rate": 6.684777049659457e-06,
"loss": 1.056,
"step": 2830
},
{
"epoch": 1.36,
"grad_norm": 31.84878815503996,
"learning_rate": 6.67165938780091e-06,
"loss": 1.0163,
"step": 2835
},
{
"epoch": 1.36,
"grad_norm": 30.05363223244471,
"learning_rate": 6.658528759254486e-06,
"loss": 1.0036,
"step": 2840
},
{
"epoch": 1.36,
"grad_norm": 28.173152010244216,
"learning_rate": 6.645385265871534e-06,
"loss": 1.0205,
"step": 2845
},
{
"epoch": 1.36,
"grad_norm": 37.432071587397026,
"learning_rate": 6.632229009603193e-06,
"loss": 1.023,
"step": 2850
},
{
"epoch": 1.37,
"grad_norm": 38.081206153241624,
"learning_rate": 6.619060092499597e-06,
"loss": 1.0324,
"step": 2855
},
{
"epoch": 1.37,
"grad_norm": 36.999778028541904,
"learning_rate": 6.605878616709091e-06,
"loss": 1.0177,
"step": 2860
},
{
"epoch": 1.37,
"grad_norm": 30.64923166629876,
"learning_rate": 6.592684684477435e-06,
"loss": 1.0482,
"step": 2865
},
{
"epoch": 1.37,
"grad_norm": 24.411372445554925,
"learning_rate": 6.579478398147006e-06,
"loss": 1.0072,
"step": 2870
},
{
"epoch": 1.38,
"grad_norm": 22.62967266475027,
"learning_rate": 6.566259860156015e-06,
"loss": 1.0411,
"step": 2875
},
{
"epoch": 1.38,
"grad_norm": 40.22330702675117,
"learning_rate": 6.553029173037703e-06,
"loss": 1.0464,
"step": 2880
},
{
"epoch": 1.38,
"grad_norm": 12.276757989932733,
"learning_rate": 6.539786439419551e-06,
"loss": 0.9993,
"step": 2885
},
{
"epoch": 1.38,
"grad_norm": 67.13521060460982,
"learning_rate": 6.526531762022481e-06,
"loss": 0.972,
"step": 2890
},
{
"epoch": 1.39,
"grad_norm": 9.30562079899624,
"learning_rate": 6.513265243660057e-06,
"loss": 1.0359,
"step": 2895
},
{
"epoch": 1.39,
"grad_norm": 15.463181296386741,
"learning_rate": 6.499986987237698e-06,
"loss": 1.0689,
"step": 2900
},
{
"epoch": 1.39,
"grad_norm": 38.35987368702724,
"learning_rate": 6.486697095751866e-06,
"loss": 1.046,
"step": 2905
},
{
"epoch": 1.39,
"grad_norm": 51.29536765440244,
"learning_rate": 6.473395672289276e-06,
"loss": 1.0023,
"step": 2910
},
{
"epoch": 1.4,
"grad_norm": 22.74499992275112,
"learning_rate": 6.460082820026095e-06,
"loss": 1.0224,
"step": 2915
},
{
"epoch": 1.4,
"grad_norm": 14.121542219072454,
"learning_rate": 6.446758642227139e-06,
"loss": 0.9981,
"step": 2920
},
{
"epoch": 1.4,
"grad_norm": 16.030000552808918,
"learning_rate": 6.433423242245074e-06,
"loss": 0.9867,
"step": 2925
},
{
"epoch": 1.4,
"grad_norm": 10.406537832828766,
"learning_rate": 6.420076723519615e-06,
"loss": 1.011,
"step": 2930
},
{
"epoch": 1.4,
"grad_norm": 37.84059572836015,
"learning_rate": 6.40671918957672e-06,
"loss": 1.0331,
"step": 2935
},
{
"epoch": 1.41,
"grad_norm": 14.667305508413728,
"learning_rate": 6.393350744027791e-06,
"loss": 1.0198,
"step": 2940
},
{
"epoch": 1.41,
"grad_norm": 36.13921979135307,
"learning_rate": 6.379971490568873e-06,
"loss": 0.9861,
"step": 2945
},
{
"epoch": 1.41,
"grad_norm": 26.570245127974943,
"learning_rate": 6.366581532979839e-06,
"loss": 1.0279,
"step": 2950
},
{
"epoch": 1.41,
"grad_norm": 17.62017661601059,
"learning_rate": 6.353180975123595e-06,
"loss": 1.0188,
"step": 2955
},
{
"epoch": 1.42,
"grad_norm": 32.95948495024234,
"learning_rate": 6.339769920945271e-06,
"loss": 1.0063,
"step": 2960
},
{
"epoch": 1.42,
"grad_norm": 15.73800595190658,
"learning_rate": 6.326348474471411e-06,
"loss": 1.0148,
"step": 2965
},
{
"epoch": 1.42,
"grad_norm": 56.79929788589073,
"learning_rate": 6.312916739809175e-06,
"loss": 1.0393,
"step": 2970
},
{
"epoch": 1.42,
"grad_norm": 16.480726935596905,
"learning_rate": 6.299474821145523e-06,
"loss": 1.0477,
"step": 2975
},
{
"epoch": 1.43,
"grad_norm": 38.667697609989624,
"learning_rate": 6.286022822746412e-06,
"loss": 1.0143,
"step": 2980
},
{
"epoch": 1.43,
"grad_norm": 58.92904707572273,
"learning_rate": 6.272560848955982e-06,
"loss": 1.0167,
"step": 2985
},
{
"epoch": 1.43,
"grad_norm": 20.60934222232883,
"learning_rate": 6.259089004195753e-06,
"loss": 1.0412,
"step": 2990
},
{
"epoch": 1.43,
"grad_norm": 11.609835173443809,
"learning_rate": 6.2456073929638105e-06,
"loss": 1.0344,
"step": 2995
},
{
"epoch": 1.44,
"grad_norm": 15.925403203742539,
"learning_rate": 6.232116119833998e-06,
"loss": 1.0302,
"step": 3000
},
{
"epoch": 1.44,
"grad_norm": 26.074125068096766,
"learning_rate": 6.218615289455099e-06,
"loss": 0.9989,
"step": 3005
},
{
"epoch": 1.44,
"grad_norm": 45.603722662225124,
"learning_rate": 6.2051050065500385e-06,
"loss": 1.0237,
"step": 3010
},
{
"epoch": 1.44,
"grad_norm": 58.41753678643921,
"learning_rate": 6.191585375915056e-06,
"loss": 1.0412,
"step": 3015
},
{
"epoch": 1.45,
"grad_norm": 25.159511982535044,
"learning_rate": 6.178056502418903e-06,
"loss": 1.0237,
"step": 3020
},
{
"epoch": 1.45,
"grad_norm": 28.18218931514194,
"learning_rate": 6.164518491002024e-06,
"loss": 1.0316,
"step": 3025
},
{
"epoch": 1.45,
"grad_norm": 47.366261148901124,
"learning_rate": 6.1509714466757434e-06,
"loss": 1.0241,
"step": 3030
},
{
"epoch": 1.45,
"grad_norm": 38.38409617127109,
"learning_rate": 6.1374154745214546e-06,
"loss": 1.0001,
"step": 3035
},
{
"epoch": 1.46,
"grad_norm": 38.56253019339064,
"learning_rate": 6.1238506796898e-06,
"loss": 1.0271,
"step": 3040
},
{
"epoch": 1.46,
"grad_norm": 12.608891832086847,
"learning_rate": 6.110277167399861e-06,
"loss": 0.9947,
"step": 3045
},
{
"epoch": 1.46,
"grad_norm": 19.169158702468987,
"learning_rate": 6.0966950429383296e-06,
"loss": 1.0307,
"step": 3050
},
{
"epoch": 1.46,
"grad_norm": 41.03614055535075,
"learning_rate": 6.083104411658713e-06,
"loss": 1.0259,
"step": 3055
},
{
"epoch": 1.46,
"grad_norm": 19.64497035839063,
"learning_rate": 6.069505378980496e-06,
"loss": 1.0333,
"step": 3060
},
{
"epoch": 1.47,
"grad_norm": 37.70889167065541,
"learning_rate": 6.055898050388329e-06,
"loss": 0.995,
"step": 3065
},
{
"epoch": 1.47,
"grad_norm": 10.927911701371581,
"learning_rate": 6.042282531431219e-06,
"loss": 1.0221,
"step": 3070
},
{
"epoch": 1.47,
"grad_norm": 32.56551156991652,
"learning_rate": 6.028658927721698e-06,
"loss": 1.0082,
"step": 3075
},
{
"epoch": 1.47,
"grad_norm": 26.70617395653879,
"learning_rate": 6.015027344935015e-06,
"loss": 0.9973,
"step": 3080
},
{
"epoch": 1.48,
"grad_norm": 14.894385335720711,
"learning_rate": 6.001387888808306e-06,
"loss": 0.9902,
"step": 3085
},
{
"epoch": 1.48,
"grad_norm": 13.211787682951407,
"learning_rate": 5.987740665139781e-06,
"loss": 0.9998,
"step": 3090
},
{
"epoch": 1.48,
"grad_norm": 75.47602180750957,
"learning_rate": 5.9740857797879e-06,
"loss": 1.0065,
"step": 3095
},
{
"epoch": 1.48,
"grad_norm": 14.845864692586872,
"learning_rate": 5.960423338670556e-06,
"loss": 1.0307,
"step": 3100
},
{
"epoch": 1.49,
"grad_norm": 27.650055779167467,
"learning_rate": 5.946753447764245e-06,
"loss": 1.0572,
"step": 3105
},
{
"epoch": 1.49,
"grad_norm": 13.267098397480392,
"learning_rate": 5.933076213103255e-06,
"loss": 1.0206,
"step": 3110
},
{
"epoch": 1.49,
"grad_norm": 25.70538785873925,
"learning_rate": 5.919391740778833e-06,
"loss": 1.0333,
"step": 3115
},
{
"epoch": 1.49,
"grad_norm": 46.960902512905015,
"learning_rate": 5.9057001369383724e-06,
"loss": 1.0452,
"step": 3120
},
{
"epoch": 1.5,
"grad_norm": 23.393286956454627,
"learning_rate": 5.892001507784578e-06,
"loss": 0.9978,
"step": 3125
},
{
"epoch": 1.5,
"grad_norm": 45.22547925367693,
"learning_rate": 5.878295959574652e-06,
"loss": 1.0486,
"step": 3130
},
{
"epoch": 1.5,
"grad_norm": 37.81575355466229,
"learning_rate": 5.864583598619468e-06,
"loss": 1.0479,
"step": 3135
},
{
"epoch": 1.5,
"grad_norm": 75.5523242714923,
"learning_rate": 5.850864531282737e-06,
"loss": 0.9811,
"step": 3140
},
{
"epoch": 1.51,
"grad_norm": 9.322942572595789,
"learning_rate": 5.8371388639802005e-06,
"loss": 1.0022,
"step": 3145
},
{
"epoch": 1.51,
"grad_norm": 120.14854242746634,
"learning_rate": 5.823406703178787e-06,
"loss": 1.0265,
"step": 3150
},
{
"epoch": 1.51,
"grad_norm": 13.951209542979104,
"learning_rate": 5.809668155395793e-06,
"loss": 1.0144,
"step": 3155
},
{
"epoch": 1.51,
"grad_norm": 103.73227753974582,
"learning_rate": 5.7959233271980646e-06,
"loss": 1.0425,
"step": 3160
},
{
"epoch": 1.52,
"grad_norm": 66.43150096676571,
"learning_rate": 5.782172325201155e-06,
"loss": 0.9871,
"step": 3165
},
{
"epoch": 1.52,
"grad_norm": 68.98755866870444,
"learning_rate": 5.768415256068512e-06,
"loss": 0.9809,
"step": 3170
},
{
"epoch": 1.52,
"grad_norm": 80.74624578213705,
"learning_rate": 5.754652226510644e-06,
"loss": 1.0192,
"step": 3175
},
{
"epoch": 1.52,
"grad_norm": 137.9695859016004,
"learning_rate": 5.74088334328429e-06,
"loss": 1.0161,
"step": 3180
},
{
"epoch": 1.52,
"grad_norm": 63.49365339331343,
"learning_rate": 5.727108713191597e-06,
"loss": 1.0079,
"step": 3185
},
{
"epoch": 1.53,
"grad_norm": 40.41265662324084,
"learning_rate": 5.713328443079292e-06,
"loss": 1.0093,
"step": 3190
},
{
"epoch": 1.53,
"grad_norm": 63.38555658611763,
"learning_rate": 5.699542639837844e-06,
"loss": 1.0024,
"step": 3195
},
{
"epoch": 1.53,
"grad_norm": 12.269465174938087,
"learning_rate": 5.685751410400644e-06,
"loss": 1.0122,
"step": 3200
},
{
"epoch": 1.53,
"grad_norm": 58.492079130688204,
"learning_rate": 5.671954861743175e-06,
"loss": 1.0435,
"step": 3205
},
{
"epoch": 1.54,
"grad_norm": 11.186362843081401,
"learning_rate": 5.658153100882178e-06,
"loss": 1.0294,
"step": 3210
},
{
"epoch": 1.54,
"grad_norm": 54.66499331951395,
"learning_rate": 5.644346234874824e-06,
"loss": 0.9912,
"step": 3215
},
{
"epoch": 1.54,
"grad_norm": 46.18717197125881,
"learning_rate": 5.630534370817885e-06,
"loss": 1.026,
"step": 3220
},
{
"epoch": 1.54,
"grad_norm": 44.48201762616146,
"learning_rate": 5.616717615846897e-06,
"loss": 1.0142,
"step": 3225
},
{
"epoch": 1.55,
"grad_norm": 27.188507829156666,
"learning_rate": 5.602896077135342e-06,
"loss": 1.024,
"step": 3230
},
{
"epoch": 1.55,
"grad_norm": 8.217129648382247,
"learning_rate": 5.589069861893799e-06,
"loss": 1.0026,
"step": 3235
},
{
"epoch": 1.55,
"grad_norm": 61.945197661741965,
"learning_rate": 5.575239077369128e-06,
"loss": 0.9939,
"step": 3240
},
{
"epoch": 1.55,
"grad_norm": 13.825033627294602,
"learning_rate": 5.56140383084363e-06,
"loss": 1.0086,
"step": 3245
},
{
"epoch": 1.56,
"grad_norm": 9.234008434343483,
"learning_rate": 5.5475642296342165e-06,
"loss": 1.0152,
"step": 3250
},
{
"epoch": 1.56,
"grad_norm": 20.716738156097975,
"learning_rate": 5.533720381091582e-06,
"loss": 1.0015,
"step": 3255
},
{
"epoch": 1.56,
"grad_norm": 19.43572620769147,
"learning_rate": 5.519872392599357e-06,
"loss": 1.0076,
"step": 3260
},
{
"epoch": 1.56,
"grad_norm": 19.048217911854966,
"learning_rate": 5.506020371573292e-06,
"loss": 1.0498,
"step": 3265
},
{
"epoch": 1.57,
"grad_norm": 16.582318921763868,
"learning_rate": 5.492164425460415e-06,
"loss": 0.9832,
"step": 3270
},
{
"epoch": 1.57,
"grad_norm": 9.98745623448622,
"learning_rate": 5.4783046617382e-06,
"loss": 1.0192,
"step": 3275
},
{
"epoch": 1.57,
"grad_norm": 14.99346327546132,
"learning_rate": 5.4644411879137336e-06,
"loss": 1.0374,
"step": 3280
},
{
"epoch": 1.57,
"grad_norm": 8.817865032724345,
"learning_rate": 5.450574111522881e-06,
"loss": 0.9941,
"step": 3285
},
{
"epoch": 1.57,
"grad_norm": 23.359105027239817,
"learning_rate": 5.436703540129451e-06,
"loss": 1.0082,
"step": 3290
},
{
"epoch": 1.58,
"grad_norm": 16.739020291277352,
"learning_rate": 5.4228295813243616e-06,
"loss": 1.0056,
"step": 3295
},
{
"epoch": 1.58,
"grad_norm": 16.423967398163175,
"learning_rate": 5.408952342724809e-06,
"loss": 0.9916,
"step": 3300
},
{
"epoch": 1.58,
"grad_norm": 9.739811362572759,
"learning_rate": 5.395071931973428e-06,
"loss": 0.982,
"step": 3305
},
{
"epoch": 1.58,
"grad_norm": 23.570550216553134,
"learning_rate": 5.381188456737458e-06,
"loss": 1.0288,
"step": 3310
},
{
"epoch": 1.59,
"grad_norm": 55.47454344744593,
"learning_rate": 5.367302024707911e-06,
"loss": 1.0332,
"step": 3315
},
{
"epoch": 1.59,
"grad_norm": 26.04306253376443,
"learning_rate": 5.353412743598735e-06,
"loss": 1.0073,
"step": 3320
},
{
"epoch": 1.59,
"grad_norm": 8.717762469870518,
"learning_rate": 5.339520721145975e-06,
"loss": 0.9828,
"step": 3325
},
{
"epoch": 1.59,
"grad_norm": 35.571082237462264,
"learning_rate": 5.32562606510694e-06,
"loss": 0.9905,
"step": 3330
},
{
"epoch": 1.6,
"grad_norm": 110.88123693974767,
"learning_rate": 5.3117288832593684e-06,
"loss": 0.9837,
"step": 3335
},
{
"epoch": 1.6,
"grad_norm": 40.91506791872968,
"learning_rate": 5.297829283400593e-06,
"loss": 0.9843,
"step": 3340
},
{
"epoch": 1.6,
"grad_norm": 38.323949248184086,
"learning_rate": 5.283927373346698e-06,
"loss": 0.9933,
"step": 3345
},
{
"epoch": 1.6,
"grad_norm": 30.845490852861253,
"learning_rate": 5.270023260931692e-06,
"loss": 0.9839,
"step": 3350
},
{
"epoch": 1.61,
"grad_norm": 22.64782634857941,
"learning_rate": 5.25611705400666e-06,
"loss": 1.0138,
"step": 3355
},
{
"epoch": 1.61,
"grad_norm": 26.184436548177956,
"learning_rate": 5.242208860438941e-06,
"loss": 0.9931,
"step": 3360
},
{
"epoch": 1.61,
"grad_norm": 13.140886596856888,
"learning_rate": 5.228298788111281e-06,
"loss": 1.0062,
"step": 3365
},
{
"epoch": 1.61,
"grad_norm": 13.609895378416404,
"learning_rate": 5.214386944920997e-06,
"loss": 1.0407,
"step": 3370
},
{
"epoch": 1.62,
"grad_norm": 48.12882451919059,
"learning_rate": 5.200473438779147e-06,
"loss": 1.0238,
"step": 3375
},
{
"epoch": 1.62,
"grad_norm": 47.388558809521165,
"learning_rate": 5.186558377609681e-06,
"loss": 1.045,
"step": 3380
},
{
"epoch": 1.62,
"grad_norm": 40.523042154403385,
"learning_rate": 5.1726418693486205e-06,
"loss": 1.0088,
"step": 3385
},
{
"epoch": 1.62,
"grad_norm": 20.679596413250202,
"learning_rate": 5.158724021943203e-06,
"loss": 0.981,
"step": 3390
},
{
"epoch": 1.63,
"grad_norm": 16.360399823586913,
"learning_rate": 5.1448049433510605e-06,
"loss": 0.9824,
"step": 3395
},
{
"epoch": 1.63,
"grad_norm": 20.99757590116775,
"learning_rate": 5.130884741539367e-06,
"loss": 1.0283,
"step": 3400
},
{
"epoch": 1.63,
"grad_norm": 10.32526806339637,
"learning_rate": 5.1169635244840155e-06,
"loss": 0.9799,
"step": 3405
},
{
"epoch": 1.63,
"grad_norm": 29.547654212423627,
"learning_rate": 5.103041400168773e-06,
"loss": 0.9974,
"step": 3410
},
{
"epoch": 1.63,
"grad_norm": 9.09541909774713,
"learning_rate": 5.089118476584442e-06,
"loss": 1.0364,
"step": 3415
},
{
"epoch": 1.64,
"grad_norm": 51.048402382790165,
"learning_rate": 5.075194861728027e-06,
"loss": 0.9847,
"step": 3420
},
{
"epoch": 1.64,
"grad_norm": 36.621677974601184,
"learning_rate": 5.061270663601894e-06,
"loss": 0.9757,
"step": 3425
},
{
"epoch": 1.64,
"grad_norm": 10.828688380082344,
"learning_rate": 5.04734599021293e-06,
"loss": 1.0214,
"step": 3430
},
{
"epoch": 1.64,
"grad_norm": 33.10946337579703,
"learning_rate": 5.033420949571712e-06,
"loss": 1.0129,
"step": 3435
},
{
"epoch": 1.65,
"grad_norm": 60.9951919761553,
"learning_rate": 5.019495649691666e-06,
"loss": 0.9889,
"step": 3440
},
{
"epoch": 1.65,
"grad_norm": 58.773708006418566,
"learning_rate": 5.005570198588225e-06,
"loss": 0.9727,
"step": 3445
},
{
"epoch": 1.65,
"grad_norm": 15.419720806012116,
"learning_rate": 4.991644704278002e-06,
"loss": 1.012,
"step": 3450
},
{
"epoch": 1.65,
"grad_norm": 55.136613670202486,
"learning_rate": 4.977719274777939e-06,
"loss": 1.0141,
"step": 3455
},
{
"epoch": 1.66,
"grad_norm": 14.916839789353967,
"learning_rate": 4.963794018104477e-06,
"loss": 1.0053,
"step": 3460
},
{
"epoch": 1.66,
"grad_norm": 11.320457215601513,
"learning_rate": 4.949869042272715e-06,
"loss": 0.994,
"step": 3465
},
{
"epoch": 1.66,
"grad_norm": 17.620828214505696,
"learning_rate": 4.935944455295577e-06,
"loss": 0.984,
"step": 3470
},
{
"epoch": 1.66,
"grad_norm": 10.844249910323798,
"learning_rate": 4.922020365182968e-06,
"loss": 0.9833,
"step": 3475
},
{
"epoch": 1.67,
"grad_norm": 12.504670698527754,
"learning_rate": 4.908096879940938e-06,
"loss": 0.9964,
"step": 3480
},
{
"epoch": 1.67,
"grad_norm": 36.07750945167731,
"learning_rate": 4.894174107570852e-06,
"loss": 0.9794,
"step": 3485
},
{
"epoch": 1.67,
"grad_norm": 17.930791025212546,
"learning_rate": 4.880252156068537e-06,
"loss": 0.9863,
"step": 3490
},
{
"epoch": 1.67,
"grad_norm": 13.68679631192367,
"learning_rate": 4.866331133423457e-06,
"loss": 0.9987,
"step": 3495
},
{
"epoch": 1.68,
"grad_norm": 9.882717380883586,
"learning_rate": 4.852411147617868e-06,
"loss": 0.9947,
"step": 3500
},
{
"epoch": 1.68,
"grad_norm": 30.80872267723861,
"learning_rate": 4.838492306625989e-06,
"loss": 0.9749,
"step": 3505
},
{
"epoch": 1.68,
"grad_norm": 11.799677940117443,
"learning_rate": 4.824574718413153e-06,
"loss": 1.0141,
"step": 3510
},
{
"epoch": 1.68,
"grad_norm": 15.968432128538577,
"learning_rate": 4.81065849093498e-06,
"loss": 0.988,
"step": 3515
},
{
"epoch": 1.69,
"grad_norm": 25.712897313697816,
"learning_rate": 4.796743732136531e-06,
"loss": 1.0359,
"step": 3520
},
{
"epoch": 1.69,
"grad_norm": 29.203631424510377,
"learning_rate": 4.782830549951476e-06,
"loss": 0.9835,
"step": 3525
},
{
"epoch": 1.69,
"grad_norm": 8.873595606454442,
"learning_rate": 4.768919052301261e-06,
"loss": 0.985,
"step": 3530
},
{
"epoch": 1.69,
"grad_norm": 46.42935680622523,
"learning_rate": 4.755009347094257e-06,
"loss": 0.9913,
"step": 3535
},
{
"epoch": 1.69,
"grad_norm": 17.076341478099522,
"learning_rate": 4.741101542224938e-06,
"loss": 1.0313,
"step": 3540
},
{
"epoch": 1.7,
"grad_norm": 10.925958833246463,
"learning_rate": 4.7271957455730326e-06,
"loss": 0.9854,
"step": 3545
},
{
"epoch": 1.7,
"grad_norm": 30.127678329835934,
"learning_rate": 4.713292065002695e-06,
"loss": 0.989,
"step": 3550
},
{
"epoch": 1.7,
"grad_norm": 8.733330224642398,
"learning_rate": 4.699390608361665e-06,
"loss": 0.9672,
"step": 3555
},
{
"epoch": 1.7,
"grad_norm": 33.96332709724693,
"learning_rate": 4.685491483480432e-06,
"loss": 1.0112,
"step": 3560
},
{
"epoch": 1.71,
"grad_norm": 25.6864537444401,
"learning_rate": 4.671594798171398e-06,
"loss": 0.9855,
"step": 3565
},
{
"epoch": 1.71,
"grad_norm": 60.16929949077335,
"learning_rate": 4.657700660228039e-06,
"loss": 1.0007,
"step": 3570
},
{
"epoch": 1.71,
"grad_norm": 27.295289345493682,
"learning_rate": 4.643809177424078e-06,
"loss": 0.9906,
"step": 3575
},
{
"epoch": 1.71,
"grad_norm": 50.357119415673765,
"learning_rate": 4.629920457512639e-06,
"loss": 1.0039,
"step": 3580
},
{
"epoch": 1.72,
"grad_norm": 44.45257943584308,
"learning_rate": 4.616034608225415e-06,
"loss": 1.0307,
"step": 3585
},
{
"epoch": 1.72,
"grad_norm": 27.786913863445413,
"learning_rate": 4.60215173727183e-06,
"loss": 0.9879,
"step": 3590
},
{
"epoch": 1.72,
"grad_norm": 48.34585944830353,
"learning_rate": 4.588271952338212e-06,
"loss": 0.9725,
"step": 3595
},
{
"epoch": 1.72,
"grad_norm": 9.009434248028121,
"learning_rate": 4.574395361086944e-06,
"loss": 1.0042,
"step": 3600
},
{
"epoch": 1.73,
"grad_norm": 9.657679661333356,
"learning_rate": 4.56052207115564e-06,
"loss": 1.031,
"step": 3605
},
{
"epoch": 1.73,
"grad_norm": 20.431656441461232,
"learning_rate": 4.546652190156307e-06,
"loss": 1.0133,
"step": 3610
},
{
"epoch": 1.73,
"grad_norm": 11.027509484486162,
"learning_rate": 4.532785825674507e-06,
"loss": 0.9944,
"step": 3615
},
{
"epoch": 1.73,
"grad_norm": 34.1317866168886,
"learning_rate": 4.518923085268531e-06,
"loss": 0.9607,
"step": 3620
},
{
"epoch": 1.74,
"grad_norm": 11.323934493725023,
"learning_rate": 4.505064076468554e-06,
"loss": 1.0051,
"step": 3625
},
{
"epoch": 1.74,
"grad_norm": 9.65537790419105,
"learning_rate": 4.491208906775804e-06,
"loss": 0.9808,
"step": 3630
},
{
"epoch": 1.74,
"grad_norm": 14.986483727940964,
"learning_rate": 4.477357683661734e-06,
"loss": 0.9929,
"step": 3635
},
{
"epoch": 1.74,
"grad_norm": 20.46742252352603,
"learning_rate": 4.463510514567182e-06,
"loss": 0.9877,
"step": 3640
},
{
"epoch": 1.74,
"grad_norm": 8.82644048612429,
"learning_rate": 4.4496675069015425e-06,
"loss": 0.9697,
"step": 3645
},
{
"epoch": 1.75,
"grad_norm": 35.45185363875661,
"learning_rate": 4.4358287680419266e-06,
"loss": 0.938,
"step": 3650
},
{
"epoch": 1.75,
"grad_norm": 8.927218808417328,
"learning_rate": 4.421994405332336e-06,
"loss": 0.9511,
"step": 3655
},
{
"epoch": 1.75,
"grad_norm": 13.398869718334993,
"learning_rate": 4.408164526082829e-06,
"loss": 0.9831,
"step": 3660
},
{
"epoch": 1.75,
"grad_norm": 21.186486974096724,
"learning_rate": 4.394339237568683e-06,
"loss": 0.9822,
"step": 3665
},
{
"epoch": 1.76,
"grad_norm": 31.906189850276977,
"learning_rate": 4.380518647029569e-06,
"loss": 1.0042,
"step": 3670
},
{
"epoch": 1.76,
"grad_norm": 44.23056796524824,
"learning_rate": 4.366702861668717e-06,
"loss": 0.9984,
"step": 3675
},
{
"epoch": 1.76,
"grad_norm": 10.815149381033793,
"learning_rate": 4.352891988652079e-06,
"loss": 0.9797,
"step": 3680
},
{
"epoch": 1.76,
"grad_norm": 8.701295348084267,
"learning_rate": 4.33908613510751e-06,
"loss": 0.997,
"step": 3685
},
{
"epoch": 1.77,
"grad_norm": 11.085426213755271,
"learning_rate": 4.325285408123927e-06,
"loss": 0.9887,
"step": 3690
},
{
"epoch": 1.77,
"grad_norm": 26.650491691099862,
"learning_rate": 4.311489914750482e-06,
"loss": 0.9835,
"step": 3695
},
{
"epoch": 1.77,
"grad_norm": 15.476178046383668,
"learning_rate": 4.2976997619957286e-06,
"loss": 1.0136,
"step": 3700
},
{
"epoch": 1.77,
"grad_norm": 43.02550170839576,
"learning_rate": 4.283915056826798e-06,
"loss": 0.9905,
"step": 3705
},
{
"epoch": 1.78,
"grad_norm": 37.10151634563604,
"learning_rate": 4.270135906168565e-06,
"loss": 0.9985,
"step": 3710
},
{
"epoch": 1.78,
"grad_norm": 37.098934447577655,
"learning_rate": 4.256362416902817e-06,
"loss": 1.0143,
"step": 3715
},
{
"epoch": 1.78,
"grad_norm": 20.54112308633943,
"learning_rate": 4.24259469586743e-06,
"loss": 0.9833,
"step": 3720
},
{
"epoch": 1.78,
"grad_norm": 56.832316432037075,
"learning_rate": 4.228832849855535e-06,
"loss": 0.9957,
"step": 3725
},
{
"epoch": 1.79,
"grad_norm": 37.64198934955718,
"learning_rate": 4.215076985614693e-06,
"loss": 0.9669,
"step": 3730
},
{
"epoch": 1.79,
"grad_norm": 23.041116285350675,
"learning_rate": 4.2013272098460655e-06,
"loss": 0.9809,
"step": 3735
},
{
"epoch": 1.79,
"grad_norm": 24.646605109558998,
"learning_rate": 4.187583629203585e-06,
"loss": 0.9762,
"step": 3740
},
{
"epoch": 1.79,
"grad_norm": 9.561626780358397,
"learning_rate": 4.173846350293132e-06,
"loss": 1.013,
"step": 3745
},
{
"epoch": 1.8,
"grad_norm": 16.620276517775117,
"learning_rate": 4.16011547967171e-06,
"loss": 0.9239,
"step": 3750
},
{
"epoch": 1.8,
"grad_norm": 29.951626947298184,
"learning_rate": 4.146391123846607e-06,
"loss": 0.9617,
"step": 3755
},
{
"epoch": 1.8,
"grad_norm": 22.047445461982683,
"learning_rate": 4.13267338927458e-06,
"loss": 1.0074,
"step": 3760
},
{
"epoch": 1.8,
"grad_norm": 17.773461135419947,
"learning_rate": 4.118962382361029e-06,
"loss": 0.9706,
"step": 3765
},
{
"epoch": 1.8,
"grad_norm": 8.794451154358756,
"learning_rate": 4.105258209459166e-06,
"loss": 1.0097,
"step": 3770
},
{
"epoch": 1.81,
"grad_norm": 8.668811758636195,
"learning_rate": 4.0915609768691945e-06,
"loss": 0.9995,
"step": 3775
},
{
"epoch": 1.81,
"grad_norm": 16.452086855933032,
"learning_rate": 4.077870790837482e-06,
"loss": 1.0228,
"step": 3780
},
{
"epoch": 1.81,
"grad_norm": 12.301249065878231,
"learning_rate": 4.064187757555741e-06,
"loss": 1.0011,
"step": 3785
},
{
"epoch": 1.81,
"grad_norm": 10.172564049384052,
"learning_rate": 4.050511983160196e-06,
"loss": 0.978,
"step": 3790
},
{
"epoch": 1.82,
"grad_norm": 16.042388681983102,
"learning_rate": 4.036843573730774e-06,
"loss": 1.0071,
"step": 3795
},
{
"epoch": 1.82,
"grad_norm": 14.387805086187818,
"learning_rate": 4.023182635290265e-06,
"loss": 0.9592,
"step": 3800
},
{
"epoch": 1.82,
"grad_norm": 9.81959520065953,
"learning_rate": 4.009529273803515e-06,
"loss": 1.0121,
"step": 3805
},
{
"epoch": 1.82,
"grad_norm": 8.344220920288018,
"learning_rate": 3.995883595176592e-06,
"loss": 0.9565,
"step": 3810
},
{
"epoch": 1.83,
"grad_norm": 20.36821445801639,
"learning_rate": 3.982245705255974e-06,
"loss": 1.0107,
"step": 3815
},
{
"epoch": 1.83,
"grad_norm": 11.121790792242514,
"learning_rate": 3.968615709827721e-06,
"loss": 0.9715,
"step": 3820
},
{
"epoch": 1.83,
"grad_norm": 16.342402943091855,
"learning_rate": 3.954993714616659e-06,
"loss": 1.0091,
"step": 3825
},
{
"epoch": 1.83,
"grad_norm": 9.904176782476654,
"learning_rate": 3.941379825285556e-06,
"loss": 0.9693,
"step": 3830
},
{
"epoch": 1.84,
"grad_norm": 11.419934769380342,
"learning_rate": 3.927774147434305e-06,
"loss": 0.9822,
"step": 3835
},
{
"epoch": 1.84,
"grad_norm": 10.452890707593966,
"learning_rate": 3.914176786599109e-06,
"loss": 0.9852,
"step": 3840
},
{
"epoch": 1.84,
"grad_norm": 26.228382661936003,
"learning_rate": 3.9005878482516505e-06,
"loss": 0.9714,
"step": 3845
},
{
"epoch": 1.84,
"grad_norm": 18.93629721807814,
"learning_rate": 3.8870074377982845e-06,
"loss": 1.0015,
"step": 3850
},
{
"epoch": 1.85,
"grad_norm": 12.807379699240501,
"learning_rate": 3.8734356605792175e-06,
"loss": 0.9835,
"step": 3855
},
{
"epoch": 1.85,
"grad_norm": 25.877592721851357,
"learning_rate": 3.8598726218676885e-06,
"loss": 0.9796,
"step": 3860
},
{
"epoch": 1.85,
"grad_norm": 20.897895805112853,
"learning_rate": 3.846318426869155e-06,
"loss": 0.9784,
"step": 3865
},
{
"epoch": 1.85,
"grad_norm": 11.687204967102584,
"learning_rate": 3.832773180720475e-06,
"loss": 1.0237,
"step": 3870
},
{
"epoch": 1.85,
"grad_norm": 9.974454791153411,
"learning_rate": 3.819236988489089e-06,
"loss": 0.9967,
"step": 3875
},
{
"epoch": 1.86,
"grad_norm": 9.796178394040796,
"learning_rate": 3.8057099551722167e-06,
"loss": 0.9752,
"step": 3880
},
{
"epoch": 1.86,
"grad_norm": 11.111172418219686,
"learning_rate": 3.7921921856960275e-06,
"loss": 0.9656,
"step": 3885
},
{
"epoch": 1.86,
"grad_norm": 10.2474731637693,
"learning_rate": 3.7786837849148345e-06,
"loss": 0.9717,
"step": 3890
},
{
"epoch": 1.86,
"grad_norm": 9.121649110534628,
"learning_rate": 3.7651848576102817e-06,
"loss": 0.9876,
"step": 3895
},
{
"epoch": 1.87,
"grad_norm": 34.880912096250306,
"learning_rate": 3.751695508490526e-06,
"loss": 0.9895,
"step": 3900
},
{
"epoch": 1.87,
"grad_norm": 10.935080330855971,
"learning_rate": 3.7382158421894314e-06,
"loss": 0.9845,
"step": 3905
},
{
"epoch": 1.87,
"grad_norm": 11.554841797659506,
"learning_rate": 3.724745963265754e-06,
"loss": 0.9719,
"step": 3910
},
{
"epoch": 1.87,
"grad_norm": 14.925679874948655,
"learning_rate": 3.711285976202331e-06,
"loss": 0.9937,
"step": 3915
},
{
"epoch": 1.88,
"grad_norm": 13.64648892643207,
"learning_rate": 3.697835985405268e-06,
"loss": 0.9643,
"step": 3920
},
{
"epoch": 1.88,
"grad_norm": 36.709510998248206,
"learning_rate": 3.6843960952031403e-06,
"loss": 0.9791,
"step": 3925
},
{
"epoch": 1.88,
"grad_norm": 7.8407391187618884,
"learning_rate": 3.670966409846165e-06,
"loss": 0.981,
"step": 3930
},
{
"epoch": 1.88,
"grad_norm": 16.739518442817054,
"learning_rate": 3.657547033505408e-06,
"loss": 1.0069,
"step": 3935
},
{
"epoch": 1.89,
"grad_norm": 11.298436950240601,
"learning_rate": 3.6441380702719694e-06,
"loss": 0.9742,
"step": 3940
},
{
"epoch": 1.89,
"grad_norm": 7.963293710950193,
"learning_rate": 3.6307396241561763e-06,
"loss": 0.9344,
"step": 3945
},
{
"epoch": 1.89,
"grad_norm": 26.42454880482335,
"learning_rate": 3.6173517990867786e-06,
"loss": 1.0019,
"step": 3950
},
{
"epoch": 1.89,
"grad_norm": 16.045988747973926,
"learning_rate": 3.6039746989101395e-06,
"loss": 1.0088,
"step": 3955
},
{
"epoch": 1.9,
"grad_norm": 13.27979194406066,
"learning_rate": 3.5906084273894325e-06,
"loss": 0.9682,
"step": 3960
},
{
"epoch": 1.9,
"grad_norm": 8.793325623553274,
"learning_rate": 3.5772530882038344e-06,
"loss": 0.9838,
"step": 3965
},
{
"epoch": 1.9,
"grad_norm": 11.821425482751579,
"learning_rate": 3.563908784947725e-06,
"loss": 0.9836,
"step": 3970
},
{
"epoch": 1.9,
"grad_norm": 39.998296542892334,
"learning_rate": 3.550575621129878e-06,
"loss": 1.0189,
"step": 3975
},
{
"epoch": 1.91,
"grad_norm": 15.1093434381566,
"learning_rate": 3.5372537001726636e-06,
"loss": 0.9898,
"step": 3980
},
{
"epoch": 1.91,
"grad_norm": 14.50157891355612,
"learning_rate": 3.523943125411241e-06,
"loss": 0.9531,
"step": 3985
},
{
"epoch": 1.91,
"grad_norm": 13.587062714494829,
"learning_rate": 3.5106440000927605e-06,
"loss": 0.9647,
"step": 3990
},
{
"epoch": 1.91,
"grad_norm": 30.702177901025777,
"learning_rate": 3.4973564273755623e-06,
"loss": 0.9836,
"step": 3995
},
{
"epoch": 1.91,
"grad_norm": 26.40943406882129,
"learning_rate": 3.484080510328376e-06,
"loss": 1.006,
"step": 4000
},
{
"epoch": 1.92,
"grad_norm": 14.716315253426446,
"learning_rate": 3.470816351929518e-06,
"loss": 0.978,
"step": 4005
},
{
"epoch": 1.92,
"grad_norm": 29.74520933128085,
"learning_rate": 3.4575640550660964e-06,
"loss": 0.9628,
"step": 4010
},
{
"epoch": 1.92,
"grad_norm": 30.263732722218563,
"learning_rate": 3.4443237225332175e-06,
"loss": 0.9887,
"step": 4015
},
{
"epoch": 1.92,
"grad_norm": 10.954514736706937,
"learning_rate": 3.431095457033176e-06,
"loss": 0.9583,
"step": 4020
},
{
"epoch": 1.93,
"grad_norm": 37.228214662070975,
"learning_rate": 3.417879361174668e-06,
"loss": 0.9659,
"step": 4025
},
{
"epoch": 1.93,
"grad_norm": 15.16100059468391,
"learning_rate": 3.4046755374719937e-06,
"loss": 0.9335,
"step": 4030
},
{
"epoch": 1.93,
"grad_norm": 12.198537640361408,
"learning_rate": 3.391484088344257e-06,
"loss": 1.035,
"step": 4035
},
{
"epoch": 1.93,
"grad_norm": 13.857890919612432,
"learning_rate": 3.378305116114578e-06,
"loss": 0.9719,
"step": 4040
},
{
"epoch": 1.94,
"grad_norm": 10.272309138820516,
"learning_rate": 3.3651387230092968e-06,
"loss": 0.9536,
"step": 4045
},
{
"epoch": 1.94,
"grad_norm": 19.11512558681597,
"learning_rate": 3.351985011157179e-06,
"loss": 0.9844,
"step": 4050
},
{
"epoch": 1.94,
"grad_norm": 10.035609306539854,
"learning_rate": 3.338844082588622e-06,
"loss": 0.9774,
"step": 4055
},
{
"epoch": 1.94,
"grad_norm": 25.993999378606528,
"learning_rate": 3.3257160392348742e-06,
"loss": 0.9606,
"step": 4060
},
{
"epoch": 1.95,
"grad_norm": 15.20456680256575,
"learning_rate": 3.3126009829272264e-06,
"loss": 0.9607,
"step": 4065
},
{
"epoch": 1.95,
"grad_norm": 12.985047485753542,
"learning_rate": 3.2994990153962368e-06,
"loss": 0.9958,
"step": 4070
},
{
"epoch": 1.95,
"grad_norm": 27.250576147432255,
"learning_rate": 3.286410238270938e-06,
"loss": 0.9707,
"step": 4075
},
{
"epoch": 1.95,
"grad_norm": 23.237297577998998,
"learning_rate": 3.273334753078044e-06,
"loss": 0.9865,
"step": 4080
},
{
"epoch": 1.96,
"grad_norm": 36.2559038386085,
"learning_rate": 3.260272661241169e-06,
"loss": 0.9921,
"step": 4085
},
{
"epoch": 1.96,
"grad_norm": 12.629482250692973,
"learning_rate": 3.2472240640800366e-06,
"loss": 0.9776,
"step": 4090
},
{
"epoch": 1.96,
"grad_norm": 25.679398471263376,
"learning_rate": 3.234189062809695e-06,
"loss": 0.964,
"step": 4095
},
{
"epoch": 1.96,
"grad_norm": 18.41202064781562,
"learning_rate": 3.221167758539735e-06,
"loss": 0.9973,
"step": 4100
},
{
"epoch": 1.97,
"grad_norm": 19.600278086619667,
"learning_rate": 3.2081602522734987e-06,
"loss": 0.9672,
"step": 4105
},
{
"epoch": 1.97,
"grad_norm": 28.129681758236057,
"learning_rate": 3.195166644907303e-06,
"loss": 0.9608,
"step": 4110
},
{
"epoch": 1.97,
"grad_norm": 35.65829361341883,
"learning_rate": 3.182187037229653e-06,
"loss": 0.9868,
"step": 4115
},
{
"epoch": 1.97,
"grad_norm": 53.75973544835431,
"learning_rate": 3.169221529920461e-06,
"loss": 0.9811,
"step": 4120
},
{
"epoch": 1.97,
"grad_norm": 33.402890642008714,
"learning_rate": 3.156270223550267e-06,
"loss": 0.9692,
"step": 4125
},
{
"epoch": 1.98,
"grad_norm": 21.343513185169783,
"learning_rate": 3.1433332185794574e-06,
"loss": 0.9727,
"step": 4130
},
{
"epoch": 1.98,
"grad_norm": 19.548777290841777,
"learning_rate": 3.1304106153574854e-06,
"loss": 0.9821,
"step": 4135
},
{
"epoch": 1.98,
"grad_norm": 18.1115211703872,
"learning_rate": 3.11750251412209e-06,
"loss": 0.9726,
"step": 4140
},
{
"epoch": 1.98,
"grad_norm": 18.685232106404783,
"learning_rate": 3.104609014998529e-06,
"loss": 0.9751,
"step": 4145
},
{
"epoch": 1.99,
"grad_norm": 11.227358846491693,
"learning_rate": 3.091730217998788e-06,
"loss": 0.9731,
"step": 4150
},
{
"epoch": 1.99,
"grad_norm": 11.517323088069816,
"learning_rate": 3.078866223020815e-06,
"loss": 1.0052,
"step": 4155
},
{
"epoch": 1.99,
"grad_norm": 25.0384098221508,
"learning_rate": 3.0660171298477404e-06,
"loss": 1.0083,
"step": 4160
},
{
"epoch": 1.99,
"grad_norm": 16.94768681316917,
"learning_rate": 3.0531830381471067e-06,
"loss": 0.9752,
"step": 4165
},
{
"epoch": 2.0,
"grad_norm": 8.588170176953263,
"learning_rate": 3.0403640474700896e-06,
"loss": 0.9555,
"step": 4170
},
{
"epoch": 2.0,
"grad_norm": 9.215380313524344,
"learning_rate": 3.027560257250733e-06,
"loss": 0.952,
"step": 4175
},
{
"epoch": 2.0,
"eval_loss": 1.0642353296279907,
"eval_runtime": 23.8529,
"eval_samples_per_second": 28.592,
"eval_steps_per_second": 3.605,
"step": 4178
},
{
"epoch": 2.0,
"grad_norm": 13.573858653914106,
"learning_rate": 3.0147717668051724e-06,
"loss": 0.8981,
"step": 4180
},
{
"epoch": 2.0,
"grad_norm": 18.873030657919557,
"learning_rate": 3.001998675330866e-06,
"loss": 0.759,
"step": 4185
},
{
"epoch": 2.01,
"grad_norm": 8.173629821809747,
"learning_rate": 2.9892410819058305e-06,
"loss": 0.7479,
"step": 4190
},
{
"epoch": 2.01,
"grad_norm": 23.25227484245958,
"learning_rate": 2.9764990854878624e-06,
"loss": 0.7643,
"step": 4195
},
{
"epoch": 2.01,
"grad_norm": 17.486628298515132,
"learning_rate": 2.9637727849137756e-06,
"loss": 0.7602,
"step": 4200
},
{
"epoch": 2.01,
"grad_norm": 19.77879353592806,
"learning_rate": 2.951062278898637e-06,
"loss": 0.7625,
"step": 4205
},
{
"epoch": 2.02,
"grad_norm": 10.798809944934707,
"learning_rate": 2.9383676660349985e-06,
"loss": 0.7606,
"step": 4210
},
{
"epoch": 2.02,
"grad_norm": 8.468520083178667,
"learning_rate": 2.925689044792132e-06,
"loss": 0.7785,
"step": 4215
},
{
"epoch": 2.02,
"grad_norm": 7.653837677871906,
"learning_rate": 2.913026513515267e-06,
"loss": 0.7565,
"step": 4220
},
{
"epoch": 2.02,
"grad_norm": 10.409223708870869,
"learning_rate": 2.900380170424822e-06,
"loss": 0.7618,
"step": 4225
},
{
"epoch": 2.02,
"grad_norm": 9.190800677290847,
"learning_rate": 2.887750113615653e-06,
"loss": 0.7538,
"step": 4230
},
{
"epoch": 2.03,
"grad_norm": 9.49794118924554,
"learning_rate": 2.8751364410562864e-06,
"loss": 0.7455,
"step": 4235
},
{
"epoch": 2.03,
"grad_norm": 8.882150205834177,
"learning_rate": 2.862539250588159e-06,
"loss": 0.7393,
"step": 4240
},
{
"epoch": 2.03,
"grad_norm": 10.164923004822544,
"learning_rate": 2.849958639924857e-06,
"loss": 0.752,
"step": 4245
},
{
"epoch": 2.03,
"grad_norm": 7.517801242805189,
"learning_rate": 2.837394706651361e-06,
"loss": 0.7288,
"step": 4250
},
{
"epoch": 2.04,
"grad_norm": 8.567898863308367,
"learning_rate": 2.824847548223294e-06,
"loss": 0.7621,
"step": 4255
},
{
"epoch": 2.04,
"grad_norm": 19.200262587497104,
"learning_rate": 2.8123172619661533e-06,
"loss": 0.7403,
"step": 4260
},
{
"epoch": 2.04,
"grad_norm": 34.338248210616015,
"learning_rate": 2.7998039450745707e-06,
"loss": 0.7527,
"step": 4265
},
{
"epoch": 2.04,
"grad_norm": 43.683571918193586,
"learning_rate": 2.78730769461154e-06,
"loss": 0.7473,
"step": 4270
},
{
"epoch": 2.05,
"grad_norm": 47.33081899689442,
"learning_rate": 2.7748286075076834e-06,
"loss": 0.7542,
"step": 4275
},
{
"epoch": 2.05,
"grad_norm": 33.31457673836802,
"learning_rate": 2.7623667805604904e-06,
"loss": 0.7194,
"step": 4280
},
{
"epoch": 2.05,
"grad_norm": 57.238913173627324,
"learning_rate": 2.749922310433559e-06,
"loss": 0.7551,
"step": 4285
},
{
"epoch": 2.05,
"grad_norm": 51.36622889249512,
"learning_rate": 2.737495293655864e-06,
"loss": 0.7549,
"step": 4290
},
{
"epoch": 2.06,
"grad_norm": 19.947390534463395,
"learning_rate": 2.7250858266209916e-06,
"loss": 0.7166,
"step": 4295
},
{
"epoch": 2.06,
"grad_norm": 24.893700391748535,
"learning_rate": 2.7126940055864048e-06,
"loss": 0.7562,
"step": 4300
},
{
"epoch": 2.06,
"grad_norm": 21.31431192612938,
"learning_rate": 2.700319926672682e-06,
"loss": 0.7409,
"step": 4305
},
{
"epoch": 2.06,
"grad_norm": 32.18603275253525,
"learning_rate": 2.68796368586279e-06,
"loss": 0.7548,
"step": 4310
},
{
"epoch": 2.07,
"grad_norm": 30.148267757127694,
"learning_rate": 2.6756253790013194e-06,
"loss": 0.7442,
"step": 4315
},
{
"epoch": 2.07,
"grad_norm": 24.43578842907068,
"learning_rate": 2.663305101793758e-06,
"loss": 0.7158,
"step": 4320
},
{
"epoch": 2.07,
"grad_norm": 13.424960572619238,
"learning_rate": 2.6510029498057436e-06,
"loss": 0.758,
"step": 4325
},
{
"epoch": 2.07,
"grad_norm": 13.579716211465046,
"learning_rate": 2.6387190184623114e-06,
"loss": 0.757,
"step": 4330
},
{
"epoch": 2.08,
"grad_norm": 8.924024956565525,
"learning_rate": 2.6264534030471723e-06,
"loss": 0.7418,
"step": 4335
},
{
"epoch": 2.08,
"grad_norm": 14.83503565657964,
"learning_rate": 2.614206198701958e-06,
"loss": 0.7533,
"step": 4340
},
{
"epoch": 2.08,
"grad_norm": 9.975381290156001,
"learning_rate": 2.6019775004254955e-06,
"loss": 0.7461,
"step": 4345
},
{
"epoch": 2.08,
"grad_norm": 9.829200445999545,
"learning_rate": 2.589767403073058e-06,
"loss": 0.7639,
"step": 4350
},
{
"epoch": 2.08,
"grad_norm": 8.425053881316707,
"learning_rate": 2.5775760013556426e-06,
"loss": 0.7318,
"step": 4355
},
{
"epoch": 2.09,
"grad_norm": 8.905156242091858,
"learning_rate": 2.5654033898392216e-06,
"loss": 0.7366,
"step": 4360
},
{
"epoch": 2.09,
"grad_norm": 8.906842249660905,
"learning_rate": 2.5532496629440207e-06,
"loss": 0.7419,
"step": 4365
},
{
"epoch": 2.09,
"grad_norm": 8.596602098301865,
"learning_rate": 2.541114914943782e-06,
"loss": 0.7636,
"step": 4370
},
{
"epoch": 2.09,
"grad_norm": 19.19086339399462,
"learning_rate": 2.528999239965027e-06,
"loss": 0.7332,
"step": 4375
},
{
"epoch": 2.1,
"grad_norm": 17.5827694115354,
"learning_rate": 2.5169027319863405e-06,
"loss": 0.7566,
"step": 4380
},
{
"epoch": 2.1,
"grad_norm": 11.500018071018317,
"learning_rate": 2.5048254848376265e-06,
"loss": 0.7774,
"step": 4385
},
{
"epoch": 2.1,
"grad_norm": 11.74266220096718,
"learning_rate": 2.492767592199386e-06,
"loss": 0.7477,
"step": 4390
},
{
"epoch": 2.1,
"grad_norm": 10.657710664619763,
"learning_rate": 2.4807291476019996e-06,
"loss": 0.7472,
"step": 4395
},
{
"epoch": 2.11,
"grad_norm": 11.171730591355747,
"learning_rate": 2.4687102444249845e-06,
"loss": 0.7304,
"step": 4400
},
{
"epoch": 2.11,
"grad_norm": 45.27469169236414,
"learning_rate": 2.456710975896286e-06,
"loss": 0.7777,
"step": 4405
},
{
"epoch": 2.11,
"grad_norm": 23.908057140086385,
"learning_rate": 2.4447314350915468e-06,
"loss": 0.7893,
"step": 4410
},
{
"epoch": 2.11,
"grad_norm": 13.103337073949472,
"learning_rate": 2.432771714933381e-06,
"loss": 0.7635,
"step": 4415
},
{
"epoch": 2.12,
"grad_norm": 35.458654006846174,
"learning_rate": 2.420831908190666e-06,
"loss": 0.7528,
"step": 4420
},
{
"epoch": 2.12,
"grad_norm": 37.72440257908907,
"learning_rate": 2.4089121074778086e-06,
"loss": 0.7478,
"step": 4425
},
{
"epoch": 2.12,
"grad_norm": 23.577001396078227,
"learning_rate": 2.397012405254039e-06,
"loss": 0.731,
"step": 4430
},
{
"epoch": 2.12,
"grad_norm": 15.68264671111477,
"learning_rate": 2.3851328938226808e-06,
"loss": 0.7589,
"step": 4435
},
{
"epoch": 2.13,
"grad_norm": 10.39514418429009,
"learning_rate": 2.3732736653304516e-06,
"loss": 0.7476,
"step": 4440
},
{
"epoch": 2.13,
"grad_norm": 16.102268779895912,
"learning_rate": 2.3614348117667284e-06,
"loss": 0.769,
"step": 4445
},
{
"epoch": 2.13,
"grad_norm": 8.71699571450735,
"learning_rate": 2.3496164249628526e-06,
"loss": 0.7468,
"step": 4450
},
{
"epoch": 2.13,
"grad_norm": 8.118583822053893,
"learning_rate": 2.337818596591408e-06,
"loss": 0.7502,
"step": 4455
},
{
"epoch": 2.13,
"grad_norm": 11.539820155623685,
"learning_rate": 2.3260414181655055e-06,
"loss": 0.7434,
"step": 4460
},
{
"epoch": 2.14,
"grad_norm": 8.702089242325131,
"learning_rate": 2.314284981038088e-06,
"loss": 0.7366,
"step": 4465
},
{
"epoch": 2.14,
"grad_norm": 7.7775037557510105,
"learning_rate": 2.3025493764012034e-06,
"loss": 0.7296,
"step": 4470
},
{
"epoch": 2.14,
"grad_norm": 10.665656742178038,
"learning_rate": 2.290834695285316e-06,
"loss": 0.7732,
"step": 4475
},
{
"epoch": 2.14,
"grad_norm": 12.868713894416537,
"learning_rate": 2.279141028558582e-06,
"loss": 0.7267,
"step": 4480
},
{
"epoch": 2.15,
"grad_norm": 11.159335817787895,
"learning_rate": 2.267468466926162e-06,
"loss": 0.7261,
"step": 4485
},
{
"epoch": 2.15,
"grad_norm": 16.195458256688806,
"learning_rate": 2.255817100929503e-06,
"loss": 0.7119,
"step": 4490
},
{
"epoch": 2.15,
"grad_norm": 9.559359575375394,
"learning_rate": 2.2441870209456403e-06,
"loss": 0.7209,
"step": 4495
},
{
"epoch": 2.15,
"grad_norm": 9.134441378920668,
"learning_rate": 2.23257831718651e-06,
"loss": 0.7799,
"step": 4500
},
{
"epoch": 2.16,
"grad_norm": 8.452664279000338,
"learning_rate": 2.2209910796982253e-06,
"loss": 0.7371,
"step": 4505
},
{
"epoch": 2.16,
"grad_norm": 11.698071823181227,
"learning_rate": 2.2094253983603977e-06,
"loss": 0.716,
"step": 4510
},
{
"epoch": 2.16,
"grad_norm": 8.182051624885792,
"learning_rate": 2.197881362885426e-06,
"loss": 0.7434,
"step": 4515
},
{
"epoch": 2.16,
"grad_norm": 8.484628287200575,
"learning_rate": 2.186359062817815e-06,
"loss": 0.7232,
"step": 4520
},
{
"epoch": 2.17,
"grad_norm": 10.95758419790806,
"learning_rate": 2.1748585875334664e-06,
"loss": 0.749,
"step": 4525
},
{
"epoch": 2.17,
"grad_norm": 11.038671582645499,
"learning_rate": 2.1633800262389925e-06,
"loss": 0.7261,
"step": 4530
},
{
"epoch": 2.17,
"grad_norm": 21.972823256296405,
"learning_rate": 2.151923467971028e-06,
"loss": 0.7352,
"step": 4535
},
{
"epoch": 2.17,
"grad_norm": 11.280964704945895,
"learning_rate": 2.1404890015955276e-06,
"loss": 0.7608,
"step": 4540
},
{
"epoch": 2.18,
"grad_norm": 8.223368268267329,
"learning_rate": 2.129076715807096e-06,
"loss": 0.7414,
"step": 4545
},
{
"epoch": 2.18,
"grad_norm": 14.838336423468508,
"learning_rate": 2.117686699128277e-06,
"loss": 0.7343,
"step": 4550
},
{
"epoch": 2.18,
"grad_norm": 21.281457055176848,
"learning_rate": 2.106319039908879e-06,
"loss": 0.7329,
"step": 4555
},
{
"epoch": 2.18,
"grad_norm": 11.34032359984279,
"learning_rate": 2.0949738263252966e-06,
"loss": 0.7343,
"step": 4560
},
{
"epoch": 2.19,
"grad_norm": 32.798749217830355,
"learning_rate": 2.083651146379809e-06,
"loss": 0.7414,
"step": 4565
},
{
"epoch": 2.19,
"grad_norm": 12.787828952215031,
"learning_rate": 2.072351087899914e-06,
"loss": 0.735,
"step": 4570
},
{
"epoch": 2.19,
"grad_norm": 9.260245807397256,
"learning_rate": 2.061073738537635e-06,
"loss": 0.7693,
"step": 4575
},
{
"epoch": 2.19,
"grad_norm": 8.177228277688725,
"learning_rate": 2.0498191857688487e-06,
"loss": 0.7588,
"step": 4580
},
{
"epoch": 2.19,
"grad_norm": 8.264373304772816,
"learning_rate": 2.0385875168926057e-06,
"loss": 0.714,
"step": 4585
},
{
"epoch": 2.2,
"grad_norm": 9.418141093979697,
"learning_rate": 2.0273788190304443e-06,
"loss": 0.749,
"step": 4590
},
{
"epoch": 2.2,
"grad_norm": 15.369400293883656,
"learning_rate": 2.01619317912573e-06,
"loss": 0.738,
"step": 4595
},
{
"epoch": 2.2,
"grad_norm": 8.595216106046296,
"learning_rate": 2.0050306839429652e-06,
"loss": 0.7221,
"step": 4600
},
{
"epoch": 2.2,
"grad_norm": 22.353169354152648,
"learning_rate": 1.9938914200671315e-06,
"loss": 0.7689,
"step": 4605
},
{
"epoch": 2.21,
"grad_norm": 8.49587918812411,
"learning_rate": 1.9827754739030038e-06,
"loss": 0.7565,
"step": 4610
},
{
"epoch": 2.21,
"grad_norm": 16.90500851943023,
"learning_rate": 1.971682931674491e-06,
"loss": 0.7668,
"step": 4615
},
{
"epoch": 2.21,
"grad_norm": 18.94498594262065,
"learning_rate": 1.9606138794239603e-06,
"loss": 0.7356,
"step": 4620
},
{
"epoch": 2.21,
"grad_norm": 11.25490127875143,
"learning_rate": 1.949568403011574e-06,
"loss": 0.7282,
"step": 4625
},
{
"epoch": 2.22,
"grad_norm": 18.674393210315962,
"learning_rate": 1.9385465881146236e-06,
"loss": 0.7222,
"step": 4630
},
{
"epoch": 2.22,
"grad_norm": 10.301232144485486,
"learning_rate": 1.9275485202268574e-06,
"loss": 0.7711,
"step": 4635
},
{
"epoch": 2.22,
"grad_norm": 15.577367395673454,
"learning_rate": 1.9165742846578315e-06,
"loss": 0.7577,
"step": 4640
},
{
"epoch": 2.22,
"grad_norm": 23.07238819829574,
"learning_rate": 1.9056239665322324e-06,
"loss": 0.7336,
"step": 4645
},
{
"epoch": 2.23,
"grad_norm": 10.870403027118689,
"learning_rate": 1.8946976507892312e-06,
"loss": 0.7288,
"step": 4650
},
{
"epoch": 2.23,
"grad_norm": 18.285278115714913,
"learning_rate": 1.8837954221818133e-06,
"loss": 0.7344,
"step": 4655
},
{
"epoch": 2.23,
"grad_norm": 17.53292991872467,
"learning_rate": 1.8729173652761246e-06,
"loss": 0.7567,
"step": 4660
},
{
"epoch": 2.23,
"grad_norm": 9.60280941176292,
"learning_rate": 1.8620635644508222e-06,
"loss": 0.7259,
"step": 4665
},
{
"epoch": 2.24,
"grad_norm": 24.366139515444544,
"learning_rate": 1.851234103896406e-06,
"loss": 0.7337,
"step": 4670
},
{
"epoch": 2.24,
"grad_norm": 11.529853858473043,
"learning_rate": 1.8404290676145858e-06,
"loss": 0.7417,
"step": 4675
},
{
"epoch": 2.24,
"grad_norm": 15.661761342172195,
"learning_rate": 1.8296485394176067e-06,
"loss": 0.7082,
"step": 4680
},
{
"epoch": 2.24,
"grad_norm": 22.83515627700215,
"learning_rate": 1.8188926029276128e-06,
"loss": 0.7544,
"step": 4685
},
{
"epoch": 2.25,
"grad_norm": 10.167209454508845,
"learning_rate": 1.8081613415759997e-06,
"loss": 0.7454,
"step": 4690
},
{
"epoch": 2.25,
"grad_norm": 16.614094130877746,
"learning_rate": 1.7974548386027584e-06,
"loss": 0.7012,
"step": 4695
},
{
"epoch": 2.25,
"grad_norm": 11.502246657580834,
"learning_rate": 1.78677317705584e-06,
"loss": 0.7122,
"step": 4700
},
{
"epoch": 2.25,
"grad_norm": 8.628311737758853,
"learning_rate": 1.7761164397904995e-06,
"loss": 0.7598,
"step": 4705
},
{
"epoch": 2.25,
"grad_norm": 12.315743559928652,
"learning_rate": 1.7654847094686678e-06,
"loss": 0.7515,
"step": 4710
},
{
"epoch": 2.26,
"grad_norm": 21.60625432882875,
"learning_rate": 1.754878068558295e-06,
"loss": 0.7499,
"step": 4715
},
{
"epoch": 2.26,
"grad_norm": 17.912151590847024,
"learning_rate": 1.7442965993327232e-06,
"loss": 0.7535,
"step": 4720
},
{
"epoch": 2.26,
"grad_norm": 14.127892504172886,
"learning_rate": 1.7337403838700433e-06,
"loss": 0.7333,
"step": 4725
},
{
"epoch": 2.26,
"grad_norm": 13.45556108571135,
"learning_rate": 1.7232095040524538e-06,
"loss": 0.7632,
"step": 4730
},
{
"epoch": 2.27,
"grad_norm": 11.087017809034007,
"learning_rate": 1.7127040415656366e-06,
"loss": 0.7548,
"step": 4735
},
{
"epoch": 2.27,
"grad_norm": 10.79125638347453,
"learning_rate": 1.7022240778981103e-06,
"loss": 0.7166,
"step": 4740
},
{
"epoch": 2.27,
"grad_norm": 24.06493950446032,
"learning_rate": 1.6917696943406125e-06,
"loss": 0.7349,
"step": 4745
},
{
"epoch": 2.27,
"grad_norm": 9.774831716427201,
"learning_rate": 1.6813409719854533e-06,
"loss": 0.7709,
"step": 4750
},
{
"epoch": 2.28,
"grad_norm": 16.679107508859357,
"learning_rate": 1.6709379917259028e-06,
"loss": 0.7474,
"step": 4755
},
{
"epoch": 2.28,
"grad_norm": 9.920714211462231,
"learning_rate": 1.660560834255548e-06,
"loss": 0.7438,
"step": 4760
},
{
"epoch": 2.28,
"grad_norm": 11.334288767392547,
"learning_rate": 1.6502095800676787e-06,
"loss": 0.7482,
"step": 4765
},
{
"epoch": 2.28,
"grad_norm": 8.940407116430526,
"learning_rate": 1.6398843094546607e-06,
"loss": 0.7195,
"step": 4770
},
{
"epoch": 2.29,
"grad_norm": 23.382746729865396,
"learning_rate": 1.629585102507304e-06,
"loss": 0.7224,
"step": 4775
},
{
"epoch": 2.29,
"grad_norm": 9.69382875144751,
"learning_rate": 1.619312039114256e-06,
"loss": 0.7403,
"step": 4780
},
{
"epoch": 2.29,
"grad_norm": 12.201461977885332,
"learning_rate": 1.6090651989613675e-06,
"loss": 0.7524,
"step": 4785
},
{
"epoch": 2.29,
"grad_norm": 12.228963340574253,
"learning_rate": 1.5988446615310894e-06,
"loss": 0.7462,
"step": 4790
},
{
"epoch": 2.3,
"grad_norm": 8.932190166319323,
"learning_rate": 1.5886505061018415e-06,
"loss": 0.7183,
"step": 4795
},
{
"epoch": 2.3,
"grad_norm": 12.899409366631069,
"learning_rate": 1.5784828117474039e-06,
"loss": 0.7115,
"step": 4800
},
{
"epoch": 2.3,
"grad_norm": 15.963603022546133,
"learning_rate": 1.5683416573363124e-06,
"loss": 0.6889,
"step": 4805
},
{
"epoch": 2.3,
"grad_norm": 10.991650806556887,
"learning_rate": 1.5582271215312294e-06,
"loss": 0.7338,
"step": 4810
},
{
"epoch": 2.3,
"grad_norm": 12.889698550087687,
"learning_rate": 1.548139282788349e-06,
"loss": 0.7718,
"step": 4815
},
{
"epoch": 2.31,
"grad_norm": 13.55366289416288,
"learning_rate": 1.5380782193567773e-06,
"loss": 0.7576,
"step": 4820
},
{
"epoch": 2.31,
"grad_norm": 10.1851888965866,
"learning_rate": 1.5280440092779313e-06,
"loss": 0.7318,
"step": 4825
},
{
"epoch": 2.31,
"grad_norm": 11.38080595936654,
"learning_rate": 1.5180367303849368e-06,
"loss": 0.7486,
"step": 4830
},
{
"epoch": 2.31,
"grad_norm": 8.157576367903811,
"learning_rate": 1.5080564603020143e-06,
"loss": 0.748,
"step": 4835
},
{
"epoch": 2.32,
"grad_norm": 11.288292660314134,
"learning_rate": 1.498103276443889e-06,
"loss": 0.7208,
"step": 4840
},
{
"epoch": 2.32,
"grad_norm": 8.397818969484167,
"learning_rate": 1.4881772560151774e-06,
"loss": 0.7426,
"step": 4845
},
{
"epoch": 2.32,
"grad_norm": 7.5416363276364855,
"learning_rate": 1.4782784760098018e-06,
"loss": 0.7215,
"step": 4850
},
{
"epoch": 2.32,
"grad_norm": 9.575657133749672,
"learning_rate": 1.4684070132103855e-06,
"loss": 0.7465,
"step": 4855
},
{
"epoch": 2.33,
"grad_norm": 10.957058553234557,
"learning_rate": 1.4585629441876537e-06,
"loss": 0.7158,
"step": 4860
},
{
"epoch": 2.33,
"grad_norm": 8.815878508869424,
"learning_rate": 1.4487463452998502e-06,
"loss": 0.7509,
"step": 4865
},
{
"epoch": 2.33,
"grad_norm": 10.343564276485274,
"learning_rate": 1.438957292692133e-06,
"loss": 0.7293,
"step": 4870
},
{
"epoch": 2.33,
"grad_norm": 9.085248631531478,
"learning_rate": 1.4291958622959972e-06,
"loss": 0.751,
"step": 4875
},
{
"epoch": 2.34,
"grad_norm": 14.40786095831399,
"learning_rate": 1.4194621298286708e-06,
"loss": 0.7953,
"step": 4880
},
{
"epoch": 2.34,
"grad_norm": 19.99191028207667,
"learning_rate": 1.4097561707925423e-06,
"loss": 0.7559,
"step": 4885
},
{
"epoch": 2.34,
"grad_norm": 9.516258976396523,
"learning_rate": 1.4000780604745606e-06,
"loss": 0.7486,
"step": 4890
},
{
"epoch": 2.34,
"grad_norm": 10.272016256314737,
"learning_rate": 1.3904278739456645e-06,
"loss": 0.7452,
"step": 4895
},
{
"epoch": 2.35,
"grad_norm": 8.459416151048796,
"learning_rate": 1.3808056860601926e-06,
"loss": 0.7311,
"step": 4900
},
{
"epoch": 2.35,
"grad_norm": 8.43881844768494,
"learning_rate": 1.3712115714553014e-06,
"loss": 0.735,
"step": 4905
},
{
"epoch": 2.35,
"grad_norm": 11.332466361570539,
"learning_rate": 1.3616456045503944e-06,
"loss": 0.7382,
"step": 4910
},
{
"epoch": 2.35,
"grad_norm": 9.534472353755127,
"learning_rate": 1.3521078595465332e-06,
"loss": 0.7329,
"step": 4915
},
{
"epoch": 2.36,
"grad_norm": 8.301982627560006,
"learning_rate": 1.3425984104258755e-06,
"loss": 0.7508,
"step": 4920
},
{
"epoch": 2.36,
"grad_norm": 9.621690633624361,
"learning_rate": 1.3331173309510882e-06,
"loss": 0.7335,
"step": 4925
},
{
"epoch": 2.36,
"grad_norm": 11.00567080680548,
"learning_rate": 1.323664694664783e-06,
"loss": 0.7573,
"step": 4930
},
{
"epoch": 2.36,
"grad_norm": 19.59692434421433,
"learning_rate": 1.314240574888946e-06,
"loss": 0.7201,
"step": 4935
},
{
"epoch": 2.36,
"grad_norm": 10.090599465609875,
"learning_rate": 1.3048450447243655e-06,
"loss": 0.7257,
"step": 4940
},
{
"epoch": 2.37,
"grad_norm": 9.828750684081387,
"learning_rate": 1.295478177050069e-06,
"loss": 0.734,
"step": 4945
},
{
"epoch": 2.37,
"grad_norm": 15.654174419379414,
"learning_rate": 1.2861400445227517e-06,
"loss": 0.7437,
"step": 4950
},
{
"epoch": 2.37,
"grad_norm": 7.969395427938827,
"learning_rate": 1.276830719576217e-06,
"loss": 0.7138,
"step": 4955
},
{
"epoch": 2.37,
"grad_norm": 8.203059149025146,
"learning_rate": 1.267550274420819e-06,
"loss": 0.7585,
"step": 4960
},
{
"epoch": 2.38,
"grad_norm": 13.583628287428223,
"learning_rate": 1.258298781042891e-06,
"loss": 0.7303,
"step": 4965
},
{
"epoch": 2.38,
"grad_norm": 7.879366346755731,
"learning_rate": 1.2490763112042003e-06,
"loss": 0.7166,
"step": 4970
},
{
"epoch": 2.38,
"grad_norm": 16.060809371266405,
"learning_rate": 1.2398829364413795e-06,
"loss": 0.7407,
"step": 4975
},
{
"epoch": 2.38,
"grad_norm": 9.053650951349203,
"learning_rate": 1.2307187280653809e-06,
"loss": 0.7652,
"step": 4980
},
{
"epoch": 2.39,
"grad_norm": 8.01540456122723,
"learning_rate": 1.22158375716092e-06,
"loss": 0.7008,
"step": 4985
},
{
"epoch": 2.39,
"grad_norm": 10.583351328711046,
"learning_rate": 1.2124780945859204e-06,
"loss": 0.7284,
"step": 4990
},
{
"epoch": 2.39,
"grad_norm": 11.572709459418732,
"learning_rate": 1.2034018109709716e-06,
"loss": 0.7394,
"step": 4995
},
{
"epoch": 2.39,
"grad_norm": 9.99745384024092,
"learning_rate": 1.1943549767187724e-06,
"loss": 0.7384,
"step": 5000
},
{
"epoch": 2.4,
"grad_norm": 11.24678731096917,
"learning_rate": 1.185337662003595e-06,
"loss": 0.703,
"step": 5005
},
{
"epoch": 2.4,
"grad_norm": 9.677017250675467,
"learning_rate": 1.1763499367707288e-06,
"loss": 0.7264,
"step": 5010
},
{
"epoch": 2.4,
"grad_norm": 7.552697122815217,
"learning_rate": 1.1673918707359511e-06,
"loss": 0.7502,
"step": 5015
},
{
"epoch": 2.4,
"grad_norm": 9.840596403048146,
"learning_rate": 1.1584635333849726e-06,
"loss": 0.7582,
"step": 5020
},
{
"epoch": 2.41,
"grad_norm": 8.026726996541981,
"learning_rate": 1.1495649939729109e-06,
"loss": 0.7087,
"step": 5025
},
{
"epoch": 2.41,
"grad_norm": 9.740129599144494,
"learning_rate": 1.140696321523746e-06,
"loss": 0.7411,
"step": 5030
},
{
"epoch": 2.41,
"grad_norm": 8.062422138734368,
"learning_rate": 1.1318575848297831e-06,
"loss": 0.7161,
"step": 5035
},
{
"epoch": 2.41,
"grad_norm": 11.609940199087632,
"learning_rate": 1.1230488524511285e-06,
"loss": 0.7414,
"step": 5040
},
{
"epoch": 2.42,
"grad_norm": 8.698037796583495,
"learning_rate": 1.1142701927151456e-06,
"loss": 0.7175,
"step": 5045
},
{
"epoch": 2.42,
"grad_norm": 7.503665797750975,
"learning_rate": 1.1055216737159364e-06,
"loss": 0.7453,
"step": 5050
},
{
"epoch": 2.42,
"grad_norm": 13.206211842902329,
"learning_rate": 1.096803363313803e-06,
"loss": 0.7172,
"step": 5055
},
{
"epoch": 2.42,
"grad_norm": 17.157310852676563,
"learning_rate": 1.0881153291347318e-06,
"loss": 0.7552,
"step": 5060
},
{
"epoch": 2.42,
"grad_norm": 9.343526331226034,
"learning_rate": 1.079457638569857e-06,
"loss": 0.7488,
"step": 5065
},
{
"epoch": 2.43,
"grad_norm": 9.883822798408264,
"learning_rate": 1.0708303587749485e-06,
"loss": 0.7446,
"step": 5070
},
{
"epoch": 2.43,
"grad_norm": 16.897153501882222,
"learning_rate": 1.0622335566698878e-06,
"loss": 0.7534,
"step": 5075
},
{
"epoch": 2.43,
"grad_norm": 8.417429521525417,
"learning_rate": 1.0536672989381414e-06,
"loss": 0.7146,
"step": 5080
},
{
"epoch": 2.43,
"grad_norm": 16.309172720846213,
"learning_rate": 1.0451316520262578e-06,
"loss": 0.7206,
"step": 5085
},
{
"epoch": 2.44,
"grad_norm": 9.618508424878408,
"learning_rate": 1.0366266821433391e-06,
"loss": 0.7408,
"step": 5090
},
{
"epoch": 2.44,
"grad_norm": 9.285221873976461,
"learning_rate": 1.0281524552605316e-06,
"loss": 0.7692,
"step": 5095
},
{
"epoch": 2.44,
"grad_norm": 15.165799846285555,
"learning_rate": 1.0197090371105207e-06,
"loss": 0.7274,
"step": 5100
},
{
"epoch": 2.44,
"grad_norm": 17.31396346013296,
"learning_rate": 1.0112964931870095e-06,
"loss": 0.7721,
"step": 5105
},
{
"epoch": 2.45,
"grad_norm": 11.145717804142992,
"learning_rate": 1.0029148887442196e-06,
"loss": 0.7329,
"step": 5110
},
{
"epoch": 2.45,
"grad_norm": 16.731688446444117,
"learning_rate": 9.945642887963842e-07,
"loss": 0.7194,
"step": 5115
},
{
"epoch": 2.45,
"grad_norm": 8.051552678835794,
"learning_rate": 9.862447581172346e-07,
"loss": 0.7291,
"step": 5120
},
{
"epoch": 2.45,
"grad_norm": 8.994796345901458,
"learning_rate": 9.779563612395127e-07,
"loss": 0.7218,
"step": 5125
},
{
"epoch": 2.46,
"grad_norm": 26.668521020540187,
"learning_rate": 9.696991624544556e-07,
"loss": 0.7484,
"step": 5130
},
{
"epoch": 2.46,
"grad_norm": 16.107701168687417,
"learning_rate": 9.614732258113095e-07,
"loss": 0.7002,
"step": 5135
},
{
"epoch": 2.46,
"grad_norm": 13.524729181046652,
"learning_rate": 9.532786151168222e-07,
"loss": 0.7337,
"step": 5140
},
{
"epoch": 2.46,
"grad_norm": 13.413856135608489,
"learning_rate": 9.451153939347579e-07,
"loss": 0.7271,
"step": 5145
},
{
"epoch": 2.47,
"grad_norm": 15.10809865849482,
"learning_rate": 9.369836255853954e-07,
"loss": 0.725,
"step": 5150
},
{
"epoch": 2.47,
"grad_norm": 13.67072240001044,
"learning_rate": 9.28883373145042e-07,
"loss": 0.7458,
"step": 5155
},
{
"epoch": 2.47,
"grad_norm": 10.23166929412297,
"learning_rate": 9.208146994455463e-07,
"loss": 0.7082,
"step": 5160
},
{
"epoch": 2.47,
"grad_norm": 14.847265986754174,
"learning_rate": 9.12777667073802e-07,
"loss": 0.7193,
"step": 5165
},
{
"epoch": 2.47,
"grad_norm": 12.54666521621409,
"learning_rate": 9.047723383712736e-07,
"loss": 0.7434,
"step": 5170
},
{
"epoch": 2.48,
"grad_norm": 10.8635180362816,
"learning_rate": 8.967987754335023e-07,
"loss": 0.7383,
"step": 5175
},
{
"epoch": 2.48,
"grad_norm": 10.667693629276858,
"learning_rate": 8.888570401096341e-07,
"loss": 0.7825,
"step": 5180
},
{
"epoch": 2.48,
"grad_norm": 16.6556994347205,
"learning_rate": 8.809471940019315e-07,
"loss": 0.7318,
"step": 5185
},
{
"epoch": 2.48,
"grad_norm": 15.396571847752627,
"learning_rate": 8.730692984653033e-07,
"loss": 0.7245,
"step": 5190
},
{
"epoch": 2.49,
"grad_norm": 15.495620178838923,
"learning_rate": 8.652234146068206e-07,
"loss": 0.7376,
"step": 5195
},
{
"epoch": 2.49,
"grad_norm": 9.307318194729497,
"learning_rate": 8.574096032852475e-07,
"loss": 0.751,
"step": 5200
},
{
"epoch": 2.49,
"grad_norm": 8.480451423247285,
"learning_rate": 8.496279251105754e-07,
"loss": 0.7622,
"step": 5205
},
{
"epoch": 2.49,
"grad_norm": 10.112190402629206,
"learning_rate": 8.418784404435365e-07,
"loss": 0.7319,
"step": 5210
},
{
"epoch": 2.5,
"grad_norm": 7.6676763292330925,
"learning_rate": 8.341612093951523e-07,
"loss": 0.7584,
"step": 5215
},
{
"epoch": 2.5,
"grad_norm": 7.978594089335979,
"learning_rate": 8.264762918262559e-07,
"loss": 0.7338,
"step": 5220
},
{
"epoch": 2.5,
"grad_norm": 8.560900012240543,
"learning_rate": 8.188237473470345e-07,
"loss": 0.7632,
"step": 5225
},
{
"epoch": 2.5,
"grad_norm": 7.781276201046599,
"learning_rate": 8.112036353165631e-07,
"loss": 0.7394,
"step": 5230
},
{
"epoch": 2.51,
"grad_norm": 8.369647974955507,
"learning_rate": 8.036160148423449e-07,
"loss": 0.7266,
"step": 5235
},
{
"epoch": 2.51,
"grad_norm": 8.390043923759736,
"learning_rate": 7.960609447798568e-07,
"loss": 0.7299,
"step": 5240
},
{
"epoch": 2.51,
"grad_norm": 8.654662294314257,
"learning_rate": 7.885384837320825e-07,
"loss": 0.7516,
"step": 5245
},
{
"epoch": 2.51,
"grad_norm": 9.64958524811817,
"learning_rate": 7.810486900490749e-07,
"loss": 0.7227,
"step": 5250
},
{
"epoch": 2.52,
"grad_norm": 10.516735917585073,
"learning_rate": 7.735916218274858e-07,
"loss": 0.733,
"step": 5255
},
{
"epoch": 2.52,
"grad_norm": 9.474506053724289,
"learning_rate": 7.661673369101236e-07,
"loss": 0.7184,
"step": 5260
},
{
"epoch": 2.52,
"grad_norm": 8.87857178669819,
"learning_rate": 7.587758928855071e-07,
"loss": 0.719,
"step": 5265
},
{
"epoch": 2.52,
"grad_norm": 11.241788014616255,
"learning_rate": 7.514173470874126e-07,
"loss": 0.7384,
"step": 5270
},
{
"epoch": 2.53,
"grad_norm": 13.794637492647029,
"learning_rate": 7.44091756594435e-07,
"loss": 0.7598,
"step": 5275
},
{
"epoch": 2.53,
"grad_norm": 9.938050619731781,
"learning_rate": 7.367991782295392e-07,
"loss": 0.7318,
"step": 5280
},
{
"epoch": 2.53,
"grad_norm": 8.242984239136046,
"learning_rate": 7.295396685596251e-07,
"loss": 0.7339,
"step": 5285
},
{
"epoch": 2.53,
"grad_norm": 8.29360911682493,
"learning_rate": 7.223132838950858e-07,
"loss": 0.7065,
"step": 5290
},
{
"epoch": 2.53,
"grad_norm": 8.080148808694046,
"learning_rate": 7.151200802893682e-07,
"loss": 0.7526,
"step": 5295
},
{
"epoch": 2.54,
"grad_norm": 8.354847338486607,
"learning_rate": 7.079601135385455e-07,
"loss": 0.7221,
"step": 5300
},
{
"epoch": 2.54,
"grad_norm": 7.678575988440592,
"learning_rate": 7.008334391808764e-07,
"loss": 0.7295,
"step": 5305
},
{
"epoch": 2.54,
"grad_norm": 15.113037791431386,
"learning_rate": 6.937401124963811e-07,
"loss": 0.6989,
"step": 5310
},
{
"epoch": 2.54,
"grad_norm": 8.121071873672062,
"learning_rate": 6.866801885064056e-07,
"loss": 0.708,
"step": 5315
},
{
"epoch": 2.55,
"grad_norm": 27.419604641506886,
"learning_rate": 6.79653721973203e-07,
"loss": 0.7599,
"step": 5320
},
{
"epoch": 2.55,
"grad_norm": 8.020230779677535,
"learning_rate": 6.726607673995e-07,
"loss": 0.7242,
"step": 5325
},
{
"epoch": 2.55,
"grad_norm": 15.422125618536919,
"learning_rate": 6.65701379028082e-07,
"loss": 0.7325,
"step": 5330
},
{
"epoch": 2.55,
"grad_norm": 8.802092516811596,
"learning_rate": 6.587756108413684e-07,
"loss": 0.7062,
"step": 5335
},
{
"epoch": 2.56,
"grad_norm": 14.493722433635703,
"learning_rate": 6.518835165609916e-07,
"loss": 0.7513,
"step": 5340
},
{
"epoch": 2.56,
"grad_norm": 9.854878668671034,
"learning_rate": 6.450251496473869e-07,
"loss": 0.7058,
"step": 5345
},
{
"epoch": 2.56,
"grad_norm": 8.663936524289891,
"learning_rate": 6.382005632993698e-07,
"loss": 0.7156,
"step": 5350
},
{
"epoch": 2.56,
"grad_norm": 11.296888275541626,
"learning_rate": 6.314098104537325e-07,
"loss": 0.725,
"step": 5355
},
{
"epoch": 2.57,
"grad_norm": 8.818247869981048,
"learning_rate": 6.246529437848242e-07,
"loss": 0.732,
"step": 5360
},
{
"epoch": 2.57,
"grad_norm": 13.63319281435814,
"learning_rate": 6.179300157041462e-07,
"loss": 0.7116,
"step": 5365
},
{
"epoch": 2.57,
"grad_norm": 22.91909312494361,
"learning_rate": 6.11241078359951e-07,
"loss": 0.7173,
"step": 5370
},
{
"epoch": 2.57,
"grad_norm": 11.663880746412323,
"learning_rate": 6.045861836368244e-07,
"loss": 0.7243,
"step": 5375
},
{
"epoch": 2.58,
"grad_norm": 11.987158359585134,
"learning_rate": 5.979653831553012e-07,
"loss": 0.6928,
"step": 5380
},
{
"epoch": 2.58,
"grad_norm": 15.606214505250815,
"learning_rate": 5.913787282714473e-07,
"loss": 0.7206,
"step": 5385
},
{
"epoch": 2.58,
"grad_norm": 8.66193772214303,
"learning_rate": 5.848262700764695e-07,
"loss": 0.7064,
"step": 5390
},
{
"epoch": 2.58,
"grad_norm": 20.465553995783704,
"learning_rate": 5.78308059396322e-07,
"loss": 0.7596,
"step": 5395
},
{
"epoch": 2.58,
"grad_norm": 15.74957034904241,
"learning_rate": 5.718241467913055e-07,
"loss": 0.6928,
"step": 5400
},
{
"epoch": 2.59,
"grad_norm": 13.86541967075925,
"learning_rate": 5.653745825556805e-07,
"loss": 0.7323,
"step": 5405
},
{
"epoch": 2.59,
"grad_norm": 10.205658966297406,
"learning_rate": 5.589594167172729e-07,
"loss": 0.7423,
"step": 5410
},
{
"epoch": 2.59,
"grad_norm": 10.708864590667854,
"learning_rate": 5.525786990370901e-07,
"loss": 0.7005,
"step": 5415
},
{
"epoch": 2.59,
"grad_norm": 13.115132519297296,
"learning_rate": 5.462324790089302e-07,
"loss": 0.6997,
"step": 5420
},
{
"epoch": 2.6,
"grad_norm": 7.830652126666401,
"learning_rate": 5.399208058590021e-07,
"loss": 0.7609,
"step": 5425
},
{
"epoch": 2.6,
"grad_norm": 9.616159273753071,
"learning_rate": 5.336437285455443e-07,
"loss": 0.7277,
"step": 5430
},
{
"epoch": 2.6,
"grad_norm": 9.33446933684336,
"learning_rate": 5.27401295758439e-07,
"loss": 0.7285,
"step": 5435
},
{
"epoch": 2.6,
"grad_norm": 8.1377363438378,
"learning_rate": 5.211935559188419e-07,
"loss": 0.7085,
"step": 5440
},
{
"epoch": 2.61,
"grad_norm": 12.991527070961066,
"learning_rate": 5.150205571788014e-07,
"loss": 0.7138,
"step": 5445
},
{
"epoch": 2.61,
"grad_norm": 10.218991474480918,
"learning_rate": 5.088823474208892e-07,
"loss": 0.7017,
"step": 5450
},
{
"epoch": 2.61,
"grad_norm": 11.226645954609767,
"learning_rate": 5.027789742578226e-07,
"loss": 0.7051,
"step": 5455
},
{
"epoch": 2.61,
"grad_norm": 8.60851765504506,
"learning_rate": 4.967104850321042e-07,
"loss": 0.7098,
"step": 5460
},
{
"epoch": 2.62,
"grad_norm": 7.069670065355717,
"learning_rate": 4.906769268156453e-07,
"loss": 0.6954,
"step": 5465
},
{
"epoch": 2.62,
"grad_norm": 7.373778854049451,
"learning_rate": 4.846783464094073e-07,
"loss": 0.7064,
"step": 5470
},
{
"epoch": 2.62,
"grad_norm": 8.676327671251288,
"learning_rate": 4.787147903430383e-07,
"loss": 0.7045,
"step": 5475
},
{
"epoch": 2.62,
"grad_norm": 9.99962909634723,
"learning_rate": 4.727863048745068e-07,
"loss": 0.6852,
"step": 5480
},
{
"epoch": 2.63,
"grad_norm": 10.824654375789379,
"learning_rate": 4.668929359897489e-07,
"loss": 0.705,
"step": 5485
},
{
"epoch": 2.63,
"grad_norm": 7.9263818558309715,
"learning_rate": 4.6103472940230766e-07,
"loss": 0.7312,
"step": 5490
},
{
"epoch": 2.63,
"grad_norm": 8.906831088436562,
"learning_rate": 4.552117305529824e-07,
"loss": 0.7152,
"step": 5495
},
{
"epoch": 2.63,
"grad_norm": 14.773400743031617,
"learning_rate": 4.4942398460947166e-07,
"loss": 0.7286,
"step": 5500
},
{
"epoch": 2.64,
"grad_norm": 12.898476488712348,
"learning_rate": 4.436715364660238e-07,
"loss": 0.7399,
"step": 5505
},
{
"epoch": 2.64,
"grad_norm": 12.521000085593839,
"learning_rate": 4.379544307430961e-07,
"loss": 0.7361,
"step": 5510
},
{
"epoch": 2.64,
"grad_norm": 8.122546429660403,
"learning_rate": 4.322727117869951e-07,
"loss": 0.7025,
"step": 5515
},
{
"epoch": 2.64,
"grad_norm": 7.443886233747539,
"learning_rate": 4.266264236695461e-07,
"loss": 0.7016,
"step": 5520
},
{
"epoch": 2.64,
"grad_norm": 7.812367613742729,
"learning_rate": 4.2101561018774216e-07,
"loss": 0.7228,
"step": 5525
},
{
"epoch": 2.65,
"grad_norm": 10.560953973562839,
"learning_rate": 4.154403148634062e-07,
"loss": 0.7383,
"step": 5530
},
{
"epoch": 2.65,
"grad_norm": 10.153543760073466,
"learning_rate": 4.0990058094285966e-07,
"loss": 0.7196,
"step": 5535
},
{
"epoch": 2.65,
"grad_norm": 7.881952768301664,
"learning_rate": 4.04396451396577e-07,
"loss": 0.7062,
"step": 5540
},
{
"epoch": 2.65,
"grad_norm": 11.008811562438115,
"learning_rate": 3.989279689188608e-07,
"loss": 0.7249,
"step": 5545
},
{
"epoch": 2.66,
"grad_norm": 7.9805696665256525,
"learning_rate": 3.934951759275052e-07,
"loss": 0.7199,
"step": 5550
},
{
"epoch": 2.66,
"grad_norm": 8.920685845549864,
"learning_rate": 3.880981145634705e-07,
"loss": 0.7544,
"step": 5555
},
{
"epoch": 2.66,
"grad_norm": 8.294739378389218,
"learning_rate": 3.827368266905551e-07,
"loss": 0.7293,
"step": 5560
},
{
"epoch": 2.66,
"grad_norm": 8.455618534265481,
"learning_rate": 3.77411353895068e-07,
"loss": 0.7139,
"step": 5565
},
{
"epoch": 2.67,
"grad_norm": 7.387465683410082,
"learning_rate": 3.721217374855124e-07,
"loss": 0.7014,
"step": 5570
},
{
"epoch": 2.67,
"grad_norm": 11.920317681136604,
"learning_rate": 3.668680184922563e-07,
"loss": 0.7144,
"step": 5575
},
{
"epoch": 2.67,
"grad_norm": 9.162778608152495,
"learning_rate": 3.616502376672254e-07,
"loss": 0.735,
"step": 5580
},
{
"epoch": 2.67,
"grad_norm": 8.902791390546845,
"learning_rate": 3.564684354835768e-07,
"loss": 0.7252,
"step": 5585
},
{
"epoch": 2.68,
"grad_norm": 8.684671490626085,
"learning_rate": 3.5132265213539074e-07,
"loss": 0.7401,
"step": 5590
},
{
"epoch": 2.68,
"grad_norm": 8.08097799047911,
"learning_rate": 3.462129275373577e-07,
"loss": 0.7181,
"step": 5595
},
{
"epoch": 2.68,
"grad_norm": 7.794546002284403,
"learning_rate": 3.4113930132446735e-07,
"loss": 0.7302,
"step": 5600
},
{
"epoch": 2.68,
"grad_norm": 8.286586210374672,
"learning_rate": 3.361018128517052e-07,
"loss": 0.685,
"step": 5605
},
{
"epoch": 2.69,
"grad_norm": 7.615526961881427,
"learning_rate": 3.3110050119374016e-07,
"loss": 0.7432,
"step": 5610
},
{
"epoch": 2.69,
"grad_norm": 9.171441156001265,
"learning_rate": 3.2613540514462917e-07,
"loss": 0.716,
"step": 5615
},
{
"epoch": 2.69,
"grad_norm": 10.407163161607505,
"learning_rate": 3.212065632175104e-07,
"loss": 0.7599,
"step": 5620
},
{
"epoch": 2.69,
"grad_norm": 8.401348722207088,
"learning_rate": 3.163140136443088e-07,
"loss": 0.722,
"step": 5625
},
{
"epoch": 2.7,
"grad_norm": 8.131667194111651,
"learning_rate": 3.114577943754371e-07,
"loss": 0.7064,
"step": 5630
},
{
"epoch": 2.7,
"grad_norm": 11.978854736564982,
"learning_rate": 3.0663794307950024e-07,
"loss": 0.7256,
"step": 5635
},
{
"epoch": 2.7,
"grad_norm": 7.7142441304523315,
"learning_rate": 3.018544971430071e-07,
"loss": 0.7007,
"step": 5640
},
{
"epoch": 2.7,
"grad_norm": 6.916528348667669,
"learning_rate": 2.97107493670078e-07,
"loss": 0.702,
"step": 5645
},
{
"epoch": 2.7,
"grad_norm": 10.845962762443746,
"learning_rate": 2.923969694821577e-07,
"loss": 0.7123,
"step": 5650
},
{
"epoch": 2.71,
"grad_norm": 9.905926911598188,
"learning_rate": 2.877229611177268e-07,
"loss": 0.7026,
"step": 5655
},
{
"epoch": 2.71,
"grad_norm": 8.33768566550249,
"learning_rate": 2.8308550483202244e-07,
"loss": 0.6965,
"step": 5660
},
{
"epoch": 2.71,
"grad_norm": 10.65958641694257,
"learning_rate": 2.7848463659675697e-07,
"loss": 0.6826,
"step": 5665
},
{
"epoch": 2.71,
"grad_norm": 8.662606522365623,
"learning_rate": 2.739203920998346e-07,
"loss": 0.7128,
"step": 5670
},
{
"epoch": 2.72,
"grad_norm": 9.991620526255327,
"learning_rate": 2.6939280674508016e-07,
"loss": 0.7475,
"step": 5675
},
{
"epoch": 2.72,
"grad_norm": 8.96611126789256,
"learning_rate": 2.6490191565195836e-07,
"loss": 0.733,
"step": 5680
},
{
"epoch": 2.72,
"grad_norm": 13.128816808416095,
"learning_rate": 2.604477536553079e-07,
"loss": 0.7028,
"step": 5685
},
{
"epoch": 2.72,
"grad_norm": 12.626021867909257,
"learning_rate": 2.560303553050669e-07,
"loss": 0.7048,
"step": 5690
},
{
"epoch": 2.73,
"grad_norm": 7.889306824857499,
"learning_rate": 2.5164975486600576e-07,
"loss": 0.7175,
"step": 5695
},
{
"epoch": 2.73,
"grad_norm": 10.135931514112464,
"learning_rate": 2.473059863174626e-07,
"loss": 0.6909,
"step": 5700
},
{
"epoch": 2.73,
"grad_norm": 11.345982422697064,
"learning_rate": 2.429990833530771e-07,
"loss": 0.6984,
"step": 5705
},
{
"epoch": 2.73,
"grad_norm": 7.684012755885655,
"learning_rate": 2.387290793805336e-07,
"loss": 0.7099,
"step": 5710
},
{
"epoch": 2.74,
"grad_norm": 9.078060238453725,
"learning_rate": 2.3449600752129598e-07,
"loss": 0.704,
"step": 5715
},
{
"epoch": 2.74,
"grad_norm": 9.227703578167397,
"learning_rate": 2.3029990061035677e-07,
"loss": 0.6959,
"step": 5720
},
{
"epoch": 2.74,
"grad_norm": 8.91154328525788,
"learning_rate": 2.2614079119597732e-07,
"loss": 0.7105,
"step": 5725
},
{
"epoch": 2.74,
"grad_norm": 7.377011388974649,
"learning_rate": 2.2201871153943956e-07,
"loss": 0.6844,
"step": 5730
},
{
"epoch": 2.75,
"grad_norm": 8.125344496585377,
"learning_rate": 2.179336936147941e-07,
"loss": 0.7447,
"step": 5735
},
{
"epoch": 2.75,
"grad_norm": 9.938519546771527,
"learning_rate": 2.1388576910860937e-07,
"loss": 0.7302,
"step": 5740
},
{
"epoch": 2.75,
"grad_norm": 9.37704834822478,
"learning_rate": 2.0987496941973107e-07,
"loss": 0.7388,
"step": 5745
},
{
"epoch": 2.75,
"grad_norm": 9.894506231776,
"learning_rate": 2.0590132565903475e-07,
"loss": 0.7008,
"step": 5750
},
{
"epoch": 2.75,
"grad_norm": 11.619718467028806,
"learning_rate": 2.0196486864918653e-07,
"loss": 0.7289,
"step": 5755
},
{
"epoch": 2.76,
"grad_norm": 9.259196685793114,
"learning_rate": 1.9806562892440207e-07,
"loss": 0.7267,
"step": 5760
},
{
"epoch": 2.76,
"grad_norm": 8.002624364425463,
"learning_rate": 1.9420363673021192e-07,
"loss": 0.7297,
"step": 5765
},
{
"epoch": 2.76,
"grad_norm": 7.626971580210646,
"learning_rate": 1.9037892202322495e-07,
"loss": 0.7336,
"step": 5770
},
{
"epoch": 2.76,
"grad_norm": 8.496312309254654,
"learning_rate": 1.865915144708985e-07,
"loss": 0.7361,
"step": 5775
},
{
"epoch": 2.77,
"grad_norm": 7.655732252044507,
"learning_rate": 1.8284144345130538e-07,
"loss": 0.7271,
"step": 5780
},
{
"epoch": 2.77,
"grad_norm": 8.964994769419953,
"learning_rate": 1.791287380529061e-07,
"loss": 0.6829,
"step": 5785
},
{
"epoch": 2.77,
"grad_norm": 8.1730834317757,
"learning_rate": 1.75453427074328e-07,
"loss": 0.734,
"step": 5790
},
{
"epoch": 2.77,
"grad_norm": 7.545356947209614,
"learning_rate": 1.7181553902413438e-07,
"loss": 0.7359,
"step": 5795
},
{
"epoch": 2.78,
"grad_norm": 7.483036402676814,
"learning_rate": 1.6821510212060966e-07,
"loss": 0.733,
"step": 5800
},
{
"epoch": 2.78,
"grad_norm": 10.222936253561748,
"learning_rate": 1.646521442915372e-07,
"loss": 0.7141,
"step": 5805
},
{
"epoch": 2.78,
"grad_norm": 8.076547261834426,
"learning_rate": 1.6112669317398354e-07,
"loss": 0.7494,
"step": 5810
},
{
"epoch": 2.78,
"grad_norm": 8.57795533125446,
"learning_rate": 1.5763877611408463e-07,
"loss": 0.7119,
"step": 5815
},
{
"epoch": 2.79,
"grad_norm": 8.152009877308489,
"learning_rate": 1.541884201668331e-07,
"loss": 0.7434,
"step": 5820
},
{
"epoch": 2.79,
"grad_norm": 12.137316314165183,
"learning_rate": 1.5077565209586699e-07,
"loss": 0.7214,
"step": 5825
},
{
"epoch": 2.79,
"grad_norm": 12.829350921568361,
"learning_rate": 1.4740049837326576e-07,
"loss": 0.7492,
"step": 5830
},
{
"epoch": 2.79,
"grad_norm": 8.413049484985029,
"learning_rate": 1.440629851793407e-07,
"loss": 0.7208,
"step": 5835
},
{
"epoch": 2.8,
"grad_norm": 7.579533888710205,
"learning_rate": 1.407631384024366e-07,
"loss": 0.7064,
"step": 5840
},
{
"epoch": 2.8,
"grad_norm": 7.7088719881192524,
"learning_rate": 1.3750098363872478e-07,
"loss": 0.7356,
"step": 5845
},
{
"epoch": 2.8,
"grad_norm": 9.913852333414841,
"learning_rate": 1.342765461920109e-07,
"loss": 0.7322,
"step": 5850
},
{
"epoch": 2.8,
"grad_norm": 10.510541398852599,
"learning_rate": 1.3108985107353477e-07,
"loss": 0.7197,
"step": 5855
},
{
"epoch": 2.81,
"grad_norm": 7.899537503149924,
"learning_rate": 1.2794092300177698e-07,
"loss": 0.7061,
"step": 5860
},
{
"epoch": 2.81,
"grad_norm": 7.8169729054358905,
"learning_rate": 1.2482978640226905e-07,
"loss": 0.7297,
"step": 5865
},
{
"epoch": 2.81,
"grad_norm": 11.55382784631987,
"learning_rate": 1.2175646540740105e-07,
"loss": 0.7087,
"step": 5870
},
{
"epoch": 2.81,
"grad_norm": 7.604422775499449,
"learning_rate": 1.1872098385623587e-07,
"loss": 0.7154,
"step": 5875
},
{
"epoch": 2.81,
"grad_norm": 8.940418775671073,
"learning_rate": 1.1572336529432571e-07,
"loss": 0.7201,
"step": 5880
},
{
"epoch": 2.82,
"grad_norm": 10.329306370360369,
"learning_rate": 1.12763632973526e-07,
"loss": 0.7162,
"step": 5885
},
{
"epoch": 2.82,
"grad_norm": 8.169196324366384,
"learning_rate": 1.0984180985181892e-07,
"loss": 0.7414,
"step": 5890
},
{
"epoch": 2.82,
"grad_norm": 12.087886275122234,
"learning_rate": 1.0695791859313299e-07,
"loss": 0.6968,
"step": 5895
},
{
"epoch": 2.82,
"grad_norm": 8.537355715483418,
"learning_rate": 1.0411198156716706e-07,
"loss": 0.7209,
"step": 5900
},
{
"epoch": 2.83,
"grad_norm": 8.41625123879191,
"learning_rate": 1.013040208492172e-07,
"loss": 0.7197,
"step": 5905
},
{
"epoch": 2.83,
"grad_norm": 10.23514342668057,
"learning_rate": 9.853405822000783e-08,
"loss": 0.7288,
"step": 5910
},
{
"epoch": 2.83,
"grad_norm": 7.659324275796891,
"learning_rate": 9.580211516551862e-08,
"loss": 0.711,
"step": 5915
},
{
"epoch": 2.83,
"grad_norm": 7.713966070891584,
"learning_rate": 9.310821287682126e-08,
"loss": 0.7159,
"step": 5920
},
{
"epoch": 2.84,
"grad_norm": 9.568636090534852,
"learning_rate": 9.045237224991233e-08,
"loss": 0.7458,
"step": 5925
},
{
"epoch": 2.84,
"grad_norm": 8.769262183580002,
"learning_rate": 8.783461388555348e-08,
"loss": 0.714,
"step": 5930
},
{
"epoch": 2.84,
"grad_norm": 8.08959664121657,
"learning_rate": 8.525495808911156e-08,
"loss": 0.7031,
"step": 5935
},
{
"epoch": 2.84,
"grad_norm": 8.104266657711255,
"learning_rate": 8.271342487039758e-08,
"loss": 0.7203,
"step": 5940
},
{
"epoch": 2.85,
"grad_norm": 7.869774216285634,
"learning_rate": 8.021003394351857e-08,
"loss": 0.7248,
"step": 5945
},
{
"epoch": 2.85,
"grad_norm": 7.487145931546606,
"learning_rate": 7.774480472671486e-08,
"loss": 0.7647,
"step": 5950
},
{
"epoch": 2.85,
"grad_norm": 7.725391778447949,
"learning_rate": 7.531775634222138e-08,
"loss": 0.6769,
"step": 5955
},
{
"epoch": 2.85,
"grad_norm": 8.876272831247563,
"learning_rate": 7.292890761610716e-08,
"loss": 0.7263,
"step": 5960
},
{
"epoch": 2.86,
"grad_norm": 7.672441998306433,
"learning_rate": 7.057827707813769e-08,
"loss": 0.6926,
"step": 5965
},
{
"epoch": 2.86,
"grad_norm": 8.579204953946617,
"learning_rate": 6.826588296162895e-08,
"loss": 0.7025,
"step": 5970
},
{
"epoch": 2.86,
"grad_norm": 7.666942511324298,
"learning_rate": 6.59917432033036e-08,
"loss": 0.7179,
"step": 5975
},
{
"epoch": 2.86,
"grad_norm": 9.32767397545717,
"learning_rate": 6.375587544315609e-08,
"loss": 0.7023,
"step": 5980
},
{
"epoch": 2.87,
"grad_norm": 8.0615614585719,
"learning_rate": 6.15582970243117e-08,
"loss": 0.7424,
"step": 5985
},
{
"epoch": 2.87,
"grad_norm": 8.110495258638995,
"learning_rate": 5.9399024992894405e-08,
"loss": 0.7235,
"step": 5990
},
{
"epoch": 2.87,
"grad_norm": 8.108005753201349,
"learning_rate": 5.727807609789471e-08,
"loss": 0.7024,
"step": 5995
},
{
"epoch": 2.87,
"grad_norm": 9.069242594976567,
"learning_rate": 5.519546679103871e-08,
"loss": 0.7263,
"step": 6000
},
{
"epoch": 2.87,
"grad_norm": 8.350453129638888,
"learning_rate": 5.315121322666095e-08,
"loss": 0.7058,
"step": 6005
},
{
"epoch": 2.88,
"grad_norm": 7.885371778863312,
"learning_rate": 5.114533126157839e-08,
"loss": 0.7416,
"step": 6010
},
{
"epoch": 2.88,
"grad_norm": 10.061849501968886,
"learning_rate": 4.917783645496888e-08,
"loss": 0.7128,
"step": 6015
},
{
"epoch": 2.88,
"grad_norm": 8.320164129300217,
"learning_rate": 4.724874406825008e-08,
"loss": 0.7249,
"step": 6020
},
{
"epoch": 2.88,
"grad_norm": 9.845719064536823,
"learning_rate": 4.5358069064959096e-08,
"loss": 0.6999,
"step": 6025
},
{
"epoch": 2.89,
"grad_norm": 8.134419128411688,
"learning_rate": 4.350582611063969e-08,
"loss": 0.7311,
"step": 6030
},
{
"epoch": 2.89,
"grad_norm": 8.995983017728303,
"learning_rate": 4.1692029572725225e-08,
"loss": 0.7669,
"step": 6035
},
{
"epoch": 2.89,
"grad_norm": 7.535551327770064,
"learning_rate": 3.9916693520430924e-08,
"loss": 0.7148,
"step": 6040
},
{
"epoch": 2.89,
"grad_norm": 10.354852774044371,
"learning_rate": 3.8179831724640125e-08,
"loss": 0.6997,
"step": 6045
},
{
"epoch": 2.9,
"grad_norm": 8.173462297408502,
"learning_rate": 3.6481457657802085e-08,
"loss": 0.7024,
"step": 6050
},
{
"epoch": 2.9,
"grad_norm": 12.435995217803038,
"learning_rate": 3.482158449382378e-08,
"loss": 0.7036,
"step": 6055
},
{
"epoch": 2.9,
"grad_norm": 8.136588437287768,
"learning_rate": 3.32002251079705e-08,
"loss": 0.7428,
"step": 6060
},
{
"epoch": 2.9,
"grad_norm": 8.04419894708745,
"learning_rate": 3.1617392076764284e-08,
"loss": 0.7047,
"step": 6065
},
{
"epoch": 2.91,
"grad_norm": 8.522106899894323,
"learning_rate": 3.007309767788569e-08,
"loss": 0.7026,
"step": 6070
},
{
"epoch": 2.91,
"grad_norm": 8.292855169890199,
"learning_rate": 2.8567353890082696e-08,
"loss": 0.7258,
"step": 6075
},
{
"epoch": 2.91,
"grad_norm": 7.9382007995475865,
"learning_rate": 2.710017239307139e-08,
"loss": 0.7334,
"step": 6080
},
{
"epoch": 2.91,
"grad_norm": 7.975208167567208,
"learning_rate": 2.5671564567451012e-08,
"loss": 0.7279,
"step": 6085
},
{
"epoch": 2.92,
"grad_norm": 8.609966650432614,
"learning_rate": 2.4281541494613482e-08,
"loss": 0.719,
"step": 6090
},
{
"epoch": 2.92,
"grad_norm": 7.760259067686616,
"learning_rate": 2.2930113956655674e-08,
"loss": 0.6789,
"step": 6095
},
{
"epoch": 2.92,
"grad_norm": 7.353913586541014,
"learning_rate": 2.161729243629951e-08,
"loss": 0.685,
"step": 6100
},
{
"epoch": 2.92,
"grad_norm": 8.072939392396535,
"learning_rate": 2.0343087116807548e-08,
"loss": 0.7313,
"step": 6105
},
{
"epoch": 2.92,
"grad_norm": 6.80836775505279,
"learning_rate": 1.9107507881905297e-08,
"loss": 0.7291,
"step": 6110
},
{
"epoch": 2.93,
"grad_norm": 9.419726891807665,
"learning_rate": 1.7910564315704036e-08,
"loss": 0.7257,
"step": 6115
},
{
"epoch": 2.93,
"grad_norm": 7.9389969079534515,
"learning_rate": 1.6752265702628112e-08,
"loss": 0.7039,
"step": 6120
},
{
"epoch": 2.93,
"grad_norm": 8.681586144393854,
"learning_rate": 1.5632621027339977e-08,
"loss": 0.716,
"step": 6125
},
{
"epoch": 2.93,
"grad_norm": 8.376945692937491,
"learning_rate": 1.4551638974673598e-08,
"loss": 0.7331,
"step": 6130
},
{
"epoch": 2.94,
"grad_norm": 7.621763687688844,
"learning_rate": 1.3509327929563942e-08,
"loss": 0.6956,
"step": 6135
},
{
"epoch": 2.94,
"grad_norm": 7.812267883480422,
"learning_rate": 1.2505695976985366e-08,
"loss": 0.7299,
"step": 6140
},
{
"epoch": 2.94,
"grad_norm": 7.641083024769762,
"learning_rate": 1.1540750901886111e-08,
"loss": 0.7335,
"step": 6145
},
{
"epoch": 2.94,
"grad_norm": 8.009321519066628,
"learning_rate": 1.0614500189129463e-08,
"loss": 0.7172,
"step": 6150
},
{
"epoch": 2.95,
"grad_norm": 9.772581366147175,
"learning_rate": 9.72695102343435e-09,
"loss": 0.7208,
"step": 6155
},
{
"epoch": 2.95,
"grad_norm": 10.73910145028275,
"learning_rate": 8.878110289322062e-09,
"loss": 0.7165,
"step": 6160
},
{
"epoch": 2.95,
"grad_norm": 7.393425930603982,
"learning_rate": 8.067984571059617e-09,
"loss": 0.6963,
"step": 6165
},
{
"epoch": 2.95,
"grad_norm": 8.714935332962217,
"learning_rate": 7.296580152610921e-09,
"loss": 0.6937,
"step": 6170
},
{
"epoch": 2.96,
"grad_norm": 9.904460671791293,
"learning_rate": 6.5639030175873584e-09,
"loss": 0.7115,
"step": 6175
},
{
"epoch": 2.96,
"grad_norm": 9.70593343271801,
"learning_rate": 5.869958849202273e-09,
"loss": 0.7082,
"step": 6180
},
{
"epoch": 2.96,
"grad_norm": 9.10569954484473,
"learning_rate": 5.21475303022434e-09,
"loss": 0.698,
"step": 6185
},
{
"epoch": 2.96,
"grad_norm": 8.470430832121329,
"learning_rate": 4.5982906429398175e-09,
"loss": 0.7077,
"step": 6190
},
{
"epoch": 2.97,
"grad_norm": 7.86136077496912,
"learning_rate": 4.020576469108139e-09,
"loss": 0.7298,
"step": 6195
},
{
"epoch": 2.97,
"grad_norm": 10.157207292314927,
"learning_rate": 3.4816149899297157e-09,
"loss": 0.709,
"step": 6200
},
{
"epoch": 2.97,
"grad_norm": 7.7559615218098115,
"learning_rate": 2.981410386007633e-09,
"loss": 0.7024,
"step": 6205
},
{
"epoch": 2.97,
"grad_norm": 8.548129309435721,
"learning_rate": 2.519966537317675e-09,
"loss": 0.7286,
"step": 6210
},
{
"epoch": 2.98,
"grad_norm": 10.416164554879964,
"learning_rate": 2.097287023176131e-09,
"loss": 0.7211,
"step": 6215
},
{
"epoch": 2.98,
"grad_norm": 7.7996136982624575,
"learning_rate": 1.7133751222137007e-09,
"loss": 0.7121,
"step": 6220
},
{
"epoch": 2.98,
"grad_norm": 10.803620676059829,
"learning_rate": 1.3682338123488515e-09,
"loss": 0.7745,
"step": 6225
},
{
"epoch": 2.98,
"grad_norm": 7.971139906542856,
"learning_rate": 1.0618657707656133e-09,
"loss": 0.7101,
"step": 6230
},
{
"epoch": 2.98,
"grad_norm": 7.954667661411465,
"learning_rate": 7.942733738924846e-10,
"loss": 0.716,
"step": 6235
},
{
"epoch": 2.99,
"grad_norm": 9.867567603815916,
"learning_rate": 5.654586973835585e-10,
"loss": 0.6958,
"step": 6240
},
{
"epoch": 2.99,
"grad_norm": 6.933332473514785,
"learning_rate": 3.7542351610353465e-10,
"loss": 0.7204,
"step": 6245
},
{
"epoch": 2.99,
"grad_norm": 8.12503988079313,
"learning_rate": 2.2416930411217618e-10,
"loss": 0.7047,
"step": 6250
},
{
"epoch": 2.99,
"grad_norm": 7.777240822136092,
"learning_rate": 1.1169723465487281e-10,
"loss": 0.7169,
"step": 6255
},
{
"epoch": 3.0,
"grad_norm": 8.237367788526779,
"learning_rate": 3.8008180152093734e-11,
"loss": 0.7302,
"step": 6260
},
{
"epoch": 3.0,
"grad_norm": 8.043768982649194,
"learning_rate": 3.1027121938365103e-12,
"loss": 0.7033,
"step": 6265
},
{
"epoch": 3.0,
"eval_loss": 1.080156922340393,
"eval_runtime": 23.9158,
"eval_samples_per_second": 28.517,
"eval_steps_per_second": 3.596,
"step": 6267
},
{
"epoch": 3.0,
"step": 6267,
"total_flos": 215735838179328.0,
"train_loss": 1.135861443868375,
"train_runtime": 24908.5749,
"train_samples_per_second": 4.025,
"train_steps_per_second": 0.252
}
],
"logging_steps": 5,
"max_steps": 6267,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 215735838179328.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}