codi_finetune_2nd / checkpoint-9000 /trainer_state.json
Dang-gu's picture
Upload folder using huggingface_hub
8a63570 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.578466581503531,
"eval_steps": 500,
"global_step": 9000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0006427406461150345,
"grad_norm": 0.458984375,
"learning_rate": 1e-05,
"loss": 3.0419,
"step": 10
},
{
"epoch": 0.001285481292230069,
"grad_norm": 8.8125,
"learning_rate": 2e-05,
"loss": 3.0641,
"step": 20
},
{
"epoch": 0.0019282219383451036,
"grad_norm": 0.62109375,
"learning_rate": 1.9999950454033063e-05,
"loss": 3.2405,
"step": 30
},
{
"epoch": 0.002570962584460138,
"grad_norm": 0.5625,
"learning_rate": 1.9999801816623205e-05,
"loss": 2.8447,
"step": 40
},
{
"epoch": 0.0032137032305751727,
"grad_norm": 0.60546875,
"learning_rate": 1.9999554089243305e-05,
"loss": 2.9348,
"step": 50
},
{
"epoch": 0.003856443876690207,
"grad_norm": 0.69921875,
"learning_rate": 1.9999207274348143e-05,
"loss": 2.966,
"step": 60
},
{
"epoch": 0.004499184522805242,
"grad_norm": 2.9375,
"learning_rate": 1.9998761375374376e-05,
"loss": 2.8849,
"step": 70
},
{
"epoch": 0.005141925168920276,
"grad_norm": 0.953125,
"learning_rate": 1.9998216396740497e-05,
"loss": 2.8674,
"step": 80
},
{
"epoch": 0.0057846658150353105,
"grad_norm": 0.96484375,
"learning_rate": 1.9997572343846814e-05,
"loss": 2.789,
"step": 90
},
{
"epoch": 0.006427406461150345,
"grad_norm": 0.63671875,
"learning_rate": 1.9996829223075363e-05,
"loss": 2.8306,
"step": 100
},
{
"epoch": 0.0070701471072653795,
"grad_norm": 0.91796875,
"learning_rate": 1.9995987041789876e-05,
"loss": 2.5481,
"step": 110
},
{
"epoch": 0.007712887753380414,
"grad_norm": 0.84765625,
"learning_rate": 1.999504580833569e-05,
"loss": 2.6249,
"step": 120
},
{
"epoch": 0.00835562839949545,
"grad_norm": 0.54296875,
"learning_rate": 1.9994005532039665e-05,
"loss": 2.6078,
"step": 130
},
{
"epoch": 0.008998369045610483,
"grad_norm": 1.328125,
"learning_rate": 1.9992866223210105e-05,
"loss": 2.4607,
"step": 140
},
{
"epoch": 0.009641109691725517,
"grad_norm": 1.0703125,
"learning_rate": 1.999162789313664e-05,
"loss": 2.3672,
"step": 150
},
{
"epoch": 0.010283850337840551,
"grad_norm": 0.796875,
"learning_rate": 1.9990290554090123e-05,
"loss": 2.4135,
"step": 160
},
{
"epoch": 0.010926590983955587,
"grad_norm": 1.1484375,
"learning_rate": 1.9988854219322507e-05,
"loss": 2.5179,
"step": 170
},
{
"epoch": 0.011569331630070621,
"grad_norm": 0.55078125,
"learning_rate": 1.9987318903066704e-05,
"loss": 2.3391,
"step": 180
},
{
"epoch": 0.012212072276185655,
"grad_norm": 1.171875,
"learning_rate": 1.9985684620536466e-05,
"loss": 2.4168,
"step": 190
},
{
"epoch": 0.01285481292230069,
"grad_norm": 1.4609375,
"learning_rate": 1.9983951387926216e-05,
"loss": 2.2706,
"step": 200
},
{
"epoch": 0.013497553568415725,
"grad_norm": 0.68359375,
"learning_rate": 1.998211922241088e-05,
"loss": 2.2684,
"step": 210
},
{
"epoch": 0.014140294214530759,
"grad_norm": 1.1640625,
"learning_rate": 1.9980188142145755e-05,
"loss": 2.2022,
"step": 220
},
{
"epoch": 0.014783034860645793,
"grad_norm": 0.52734375,
"learning_rate": 1.997815816626628e-05,
"loss": 2.2821,
"step": 230
},
{
"epoch": 0.015425775506760829,
"grad_norm": 1.359375,
"learning_rate": 1.9976029314887882e-05,
"loss": 2.1055,
"step": 240
},
{
"epoch": 0.016068516152875863,
"grad_norm": 0.890625,
"learning_rate": 1.9973801609105757e-05,
"loss": 2.1481,
"step": 250
},
{
"epoch": 0.0167112567989909,
"grad_norm": 1.2265625,
"learning_rate": 1.9971475070994675e-05,
"loss": 2.1277,
"step": 260
},
{
"epoch": 0.01735399744510593,
"grad_norm": 0.6796875,
"learning_rate": 1.9969049723608753e-05,
"loss": 2.1143,
"step": 270
},
{
"epoch": 0.017996738091220967,
"grad_norm": 0.828125,
"learning_rate": 1.9966525590981228e-05,
"loss": 2.1824,
"step": 280
},
{
"epoch": 0.018639478737336002,
"grad_norm": 1.6171875,
"learning_rate": 1.9963902698124212e-05,
"loss": 2.1849,
"step": 290
},
{
"epoch": 0.019282219383451035,
"grad_norm": 0.96875,
"learning_rate": 1.996118107102847e-05,
"loss": 2.1996,
"step": 300
},
{
"epoch": 0.01992496002956607,
"grad_norm": 1.1171875,
"learning_rate": 1.9958360736663117e-05,
"loss": 2.1914,
"step": 310
},
{
"epoch": 0.020567700675681103,
"grad_norm": 0.62109375,
"learning_rate": 1.99554417229754e-05,
"loss": 2.0519,
"step": 320
},
{
"epoch": 0.02121044132179614,
"grad_norm": 0.890625,
"learning_rate": 1.995242405889039e-05,
"loss": 2.0804,
"step": 330
},
{
"epoch": 0.021853181967911174,
"grad_norm": 0.71875,
"learning_rate": 1.99493077743107e-05,
"loss": 2.0424,
"step": 340
},
{
"epoch": 0.022495922614026206,
"grad_norm": 1.4296875,
"learning_rate": 1.99460929001162e-05,
"loss": 2.1151,
"step": 350
},
{
"epoch": 0.023138663260141242,
"grad_norm": 0.7421875,
"learning_rate": 1.9942779468163696e-05,
"loss": 1.8499,
"step": 360
},
{
"epoch": 0.023781403906256278,
"grad_norm": 0.8671875,
"learning_rate": 1.9939367511286635e-05,
"loss": 2.1271,
"step": 370
},
{
"epoch": 0.02442414455237131,
"grad_norm": 0.75,
"learning_rate": 1.993585706329475e-05,
"loss": 2.0739,
"step": 380
},
{
"epoch": 0.025066885198486346,
"grad_norm": 1.3671875,
"learning_rate": 1.9932248158973746e-05,
"loss": 2.0379,
"step": 390
},
{
"epoch": 0.02570962584460138,
"grad_norm": 0.78515625,
"learning_rate": 1.992854083408496e-05,
"loss": 1.9566,
"step": 400
},
{
"epoch": 0.026352366490716414,
"grad_norm": 0.859375,
"learning_rate": 1.992473512536499e-05,
"loss": 2.0377,
"step": 410
},
{
"epoch": 0.02699510713683145,
"grad_norm": 0.81640625,
"learning_rate": 1.992083107052534e-05,
"loss": 1.9839,
"step": 420
},
{
"epoch": 0.027637847782946486,
"grad_norm": 0.80859375,
"learning_rate": 1.9916828708252046e-05,
"loss": 1.9477,
"step": 430
},
{
"epoch": 0.028280588429061518,
"grad_norm": 0.703125,
"learning_rate": 1.9912728078205285e-05,
"loss": 2.0033,
"step": 440
},
{
"epoch": 0.028923329075176554,
"grad_norm": 1.03125,
"learning_rate": 1.9908529221018994e-05,
"loss": 1.9196,
"step": 450
},
{
"epoch": 0.029566069721291586,
"grad_norm": 1.0546875,
"learning_rate": 1.9904232178300465e-05,
"loss": 1.9712,
"step": 460
},
{
"epoch": 0.03020881036740662,
"grad_norm": 1.1875,
"learning_rate": 1.9899836992629922e-05,
"loss": 1.9251,
"step": 470
},
{
"epoch": 0.030851551013521657,
"grad_norm": 0.71484375,
"learning_rate": 1.989534370756011e-05,
"loss": 1.9695,
"step": 480
},
{
"epoch": 0.03149429165963669,
"grad_norm": 0.9765625,
"learning_rate": 1.989075236761586e-05,
"loss": 1.9208,
"step": 490
},
{
"epoch": 0.032137032305751725,
"grad_norm": 0.74609375,
"learning_rate": 1.988606301829365e-05,
"loss": 1.9474,
"step": 500
},
{
"epoch": 0.03277977295186676,
"grad_norm": 0.6796875,
"learning_rate": 1.9881275706061146e-05,
"loss": 1.8636,
"step": 510
},
{
"epoch": 0.0334225135979818,
"grad_norm": 0.9453125,
"learning_rate": 1.987639047835675e-05,
"loss": 1.9595,
"step": 520
},
{
"epoch": 0.03406525424409683,
"grad_norm": 0.9921875,
"learning_rate": 1.987140738358913e-05,
"loss": 1.8648,
"step": 530
},
{
"epoch": 0.03470799489021186,
"grad_norm": 1.453125,
"learning_rate": 1.9866326471136738e-05,
"loss": 1.9641,
"step": 540
},
{
"epoch": 0.0353507355363269,
"grad_norm": 1.1328125,
"learning_rate": 1.986114779134731e-05,
"loss": 1.9649,
"step": 550
},
{
"epoch": 0.03599347618244193,
"grad_norm": 0.921875,
"learning_rate": 1.9855871395537395e-05,
"loss": 1.9086,
"step": 560
},
{
"epoch": 0.036636216828556965,
"grad_norm": 1.6875,
"learning_rate": 1.9850497335991815e-05,
"loss": 1.8802,
"step": 570
},
{
"epoch": 0.037278957474672005,
"grad_norm": 0.78125,
"learning_rate": 1.984502566596317e-05,
"loss": 1.8694,
"step": 580
},
{
"epoch": 0.03792169812078704,
"grad_norm": 0.984375,
"learning_rate": 1.9839456439671288e-05,
"loss": 1.9474,
"step": 590
},
{
"epoch": 0.03856443876690207,
"grad_norm": 0.87890625,
"learning_rate": 1.9833789712302714e-05,
"loss": 1.8633,
"step": 600
},
{
"epoch": 0.03920717941301711,
"grad_norm": 0.8515625,
"learning_rate": 1.9828025540010144e-05,
"loss": 1.9034,
"step": 610
},
{
"epoch": 0.03984992005913214,
"grad_norm": 0.77734375,
"learning_rate": 1.9822163979911878e-05,
"loss": 1.9021,
"step": 620
},
{
"epoch": 0.04049266070524717,
"grad_norm": 1.2421875,
"learning_rate": 1.981620509009125e-05,
"loss": 1.8945,
"step": 630
},
{
"epoch": 0.041135401351362205,
"grad_norm": 0.87109375,
"learning_rate": 1.9810148929596047e-05,
"loss": 1.9007,
"step": 640
},
{
"epoch": 0.041778141997477244,
"grad_norm": 0.85546875,
"learning_rate": 1.9803995558437938e-05,
"loss": 1.8548,
"step": 650
},
{
"epoch": 0.04242088264359228,
"grad_norm": 0.87890625,
"learning_rate": 1.9797745037591866e-05,
"loss": 1.952,
"step": 660
},
{
"epoch": 0.04306362328970731,
"grad_norm": 1.265625,
"learning_rate": 1.979139742899545e-05,
"loss": 1.8855,
"step": 670
},
{
"epoch": 0.04370636393582235,
"grad_norm": 0.7734375,
"learning_rate": 1.9784952795548374e-05,
"loss": 1.8523,
"step": 680
},
{
"epoch": 0.04434910458193738,
"grad_norm": 0.99609375,
"learning_rate": 1.9778411201111755e-05,
"loss": 1.808,
"step": 690
},
{
"epoch": 0.04499184522805241,
"grad_norm": 1.4296875,
"learning_rate": 1.9771772710507516e-05,
"loss": 1.8894,
"step": 700
},
{
"epoch": 0.04563458587416745,
"grad_norm": 1.0390625,
"learning_rate": 1.976503738951775e-05,
"loss": 1.8869,
"step": 710
},
{
"epoch": 0.046277326520282484,
"grad_norm": 1.1640625,
"learning_rate": 1.9758205304884048e-05,
"loss": 1.9048,
"step": 720
},
{
"epoch": 0.04692006716639752,
"grad_norm": 1.015625,
"learning_rate": 1.9751276524306865e-05,
"loss": 1.848,
"step": 730
},
{
"epoch": 0.047562807812512556,
"grad_norm": 1.0546875,
"learning_rate": 1.974425111644482e-05,
"loss": 1.8434,
"step": 740
},
{
"epoch": 0.04820554845862759,
"grad_norm": 0.8828125,
"learning_rate": 1.9737129150914045e-05,
"loss": 1.9363,
"step": 750
},
{
"epoch": 0.04884828910474262,
"grad_norm": 1.109375,
"learning_rate": 1.972991069828747e-05,
"loss": 1.8212,
"step": 760
},
{
"epoch": 0.04949102975085766,
"grad_norm": 1.0390625,
"learning_rate": 1.9722595830094138e-05,
"loss": 1.8591,
"step": 770
},
{
"epoch": 0.05013377039697269,
"grad_norm": 1.0,
"learning_rate": 1.9715184618818493e-05,
"loss": 1.9277,
"step": 780
},
{
"epoch": 0.050776511043087724,
"grad_norm": 0.921875,
"learning_rate": 1.9707677137899662e-05,
"loss": 1.8552,
"step": 790
},
{
"epoch": 0.05141925168920276,
"grad_norm": 0.89453125,
"learning_rate": 1.9700073461730725e-05,
"loss": 1.8265,
"step": 800
},
{
"epoch": 0.052061992335317796,
"grad_norm": 1.3125,
"learning_rate": 1.969237366565798e-05,
"loss": 1.7932,
"step": 810
},
{
"epoch": 0.05270473298143283,
"grad_norm": 0.76171875,
"learning_rate": 1.9684577825980192e-05,
"loss": 1.8122,
"step": 820
},
{
"epoch": 0.05334747362754787,
"grad_norm": 0.9296875,
"learning_rate": 1.9676686019947852e-05,
"loss": 1.8671,
"step": 830
},
{
"epoch": 0.0539902142736629,
"grad_norm": 0.9765625,
"learning_rate": 1.9668698325762378e-05,
"loss": 1.884,
"step": 840
},
{
"epoch": 0.05463295491977793,
"grad_norm": 1.0234375,
"learning_rate": 1.9660614822575394e-05,
"loss": 1.9472,
"step": 850
},
{
"epoch": 0.05527569556589297,
"grad_norm": 1.8984375,
"learning_rate": 1.9652435590487878e-05,
"loss": 1.7799,
"step": 860
},
{
"epoch": 0.055918436212008,
"grad_norm": 0.8125,
"learning_rate": 1.964416071054944e-05,
"loss": 1.8548,
"step": 870
},
{
"epoch": 0.056561176858123036,
"grad_norm": 1.15625,
"learning_rate": 1.963579026475745e-05,
"loss": 1.8213,
"step": 880
},
{
"epoch": 0.05720391750423807,
"grad_norm": 1.0078125,
"learning_rate": 1.962732433605629e-05,
"loss": 1.8675,
"step": 890
},
{
"epoch": 0.05784665815035311,
"grad_norm": 1.0625,
"learning_rate": 1.961876300833647e-05,
"loss": 1.8382,
"step": 900
},
{
"epoch": 0.05848939879646814,
"grad_norm": 1.3359375,
"learning_rate": 1.9610106366433846e-05,
"loss": 1.8516,
"step": 910
},
{
"epoch": 0.05913213944258317,
"grad_norm": 1.1796875,
"learning_rate": 1.9601354496128765e-05,
"loss": 1.8137,
"step": 920
},
{
"epoch": 0.05977488008869821,
"grad_norm": 0.8046875,
"learning_rate": 1.9592507484145193e-05,
"loss": 1.9144,
"step": 930
},
{
"epoch": 0.06041762073481324,
"grad_norm": 0.796875,
"learning_rate": 1.9583565418149887e-05,
"loss": 1.8284,
"step": 940
},
{
"epoch": 0.061060361380928276,
"grad_norm": 0.90234375,
"learning_rate": 1.9574528386751507e-05,
"loss": 1.7851,
"step": 950
},
{
"epoch": 0.061703102027043315,
"grad_norm": 0.8828125,
"learning_rate": 1.9565396479499744e-05,
"loss": 1.769,
"step": 960
},
{
"epoch": 0.06234584267315835,
"grad_norm": 0.921875,
"learning_rate": 1.9556169786884436e-05,
"loss": 1.8202,
"step": 970
},
{
"epoch": 0.06298858331927339,
"grad_norm": 1.015625,
"learning_rate": 1.9546848400334658e-05,
"loss": 1.8815,
"step": 980
},
{
"epoch": 0.06363132396538841,
"grad_norm": 0.82421875,
"learning_rate": 1.953743241221784e-05,
"loss": 1.866,
"step": 990
},
{
"epoch": 0.06427406461150345,
"grad_norm": 0.87109375,
"learning_rate": 1.9527921915838827e-05,
"loss": 1.8496,
"step": 1000
},
{
"epoch": 0.06491680525761849,
"grad_norm": 1.6015625,
"learning_rate": 1.9518317005438964e-05,
"loss": 1.7597,
"step": 1010
},
{
"epoch": 0.06555954590373352,
"grad_norm": 1.0625,
"learning_rate": 1.9508617776195167e-05,
"loss": 1.8705,
"step": 1020
},
{
"epoch": 0.06620228654984855,
"grad_norm": 0.90234375,
"learning_rate": 1.9498824324218973e-05,
"loss": 1.787,
"step": 1030
},
{
"epoch": 0.0668450271959636,
"grad_norm": 1.1328125,
"learning_rate": 1.9488936746555593e-05,
"loss": 1.7781,
"step": 1040
},
{
"epoch": 0.06748776784207862,
"grad_norm": 0.8046875,
"learning_rate": 1.9478955141182948e-05,
"loss": 1.7506,
"step": 1050
},
{
"epoch": 0.06813050848819366,
"grad_norm": 0.94921875,
"learning_rate": 1.946887960701069e-05,
"loss": 1.8555,
"step": 1060
},
{
"epoch": 0.0687732491343087,
"grad_norm": 0.8515625,
"learning_rate": 1.9458710243879242e-05,
"loss": 1.7989,
"step": 1070
},
{
"epoch": 0.06941598978042372,
"grad_norm": 1.109375,
"learning_rate": 1.944844715255879e-05,
"loss": 1.7939,
"step": 1080
},
{
"epoch": 0.07005873042653876,
"grad_norm": 1.046875,
"learning_rate": 1.9438090434748285e-05,
"loss": 1.7889,
"step": 1090
},
{
"epoch": 0.0707014710726538,
"grad_norm": 0.90625,
"learning_rate": 1.942764019307445e-05,
"loss": 1.7511,
"step": 1100
},
{
"epoch": 0.07134421171876883,
"grad_norm": 1.1875,
"learning_rate": 1.9417096531090752e-05,
"loss": 1.8022,
"step": 1110
},
{
"epoch": 0.07198695236488387,
"grad_norm": 1.1640625,
"learning_rate": 1.940645955327637e-05,
"loss": 1.8737,
"step": 1120
},
{
"epoch": 0.0726296930109989,
"grad_norm": 0.98046875,
"learning_rate": 1.9395729365035185e-05,
"loss": 1.764,
"step": 1130
},
{
"epoch": 0.07327243365711393,
"grad_norm": 1.6171875,
"learning_rate": 1.9384906072694703e-05,
"loss": 1.8344,
"step": 1140
},
{
"epoch": 0.07391517430322897,
"grad_norm": 1.1328125,
"learning_rate": 1.9373989783505017e-05,
"loss": 1.8173,
"step": 1150
},
{
"epoch": 0.07455791494934401,
"grad_norm": 0.9375,
"learning_rate": 1.936298060563775e-05,
"loss": 1.8271,
"step": 1160
},
{
"epoch": 0.07520065559545903,
"grad_norm": 0.83203125,
"learning_rate": 1.9351878648184973e-05,
"loss": 1.8369,
"step": 1170
},
{
"epoch": 0.07584339624157407,
"grad_norm": 1.1328125,
"learning_rate": 1.9340684021158133e-05,
"loss": 1.8255,
"step": 1180
},
{
"epoch": 0.07648613688768911,
"grad_norm": 1.125,
"learning_rate": 1.932939683548695e-05,
"loss": 1.7836,
"step": 1190
},
{
"epoch": 0.07712887753380414,
"grad_norm": 0.80078125,
"learning_rate": 1.9318017203018334e-05,
"loss": 1.7955,
"step": 1200
},
{
"epoch": 0.07777161817991918,
"grad_norm": 0.73828125,
"learning_rate": 1.9306545236515264e-05,
"loss": 1.748,
"step": 1210
},
{
"epoch": 0.07841435882603422,
"grad_norm": 1.5078125,
"learning_rate": 1.929498104965567e-05,
"loss": 1.7522,
"step": 1220
},
{
"epoch": 0.07905709947214924,
"grad_norm": 1.3828125,
"learning_rate": 1.928332475703132e-05,
"loss": 1.8157,
"step": 1230
},
{
"epoch": 0.07969984011826428,
"grad_norm": 0.83984375,
"learning_rate": 1.9271576474146667e-05,
"loss": 1.7609,
"step": 1240
},
{
"epoch": 0.0803425807643793,
"grad_norm": 0.84765625,
"learning_rate": 1.9259736317417723e-05,
"loss": 1.8088,
"step": 1250
},
{
"epoch": 0.08098532141049435,
"grad_norm": 1.40625,
"learning_rate": 1.9247804404170888e-05,
"loss": 1.8767,
"step": 1260
},
{
"epoch": 0.08162806205660939,
"grad_norm": 0.87890625,
"learning_rate": 1.9235780852641798e-05,
"loss": 1.7125,
"step": 1270
},
{
"epoch": 0.08227080270272441,
"grad_norm": 1.078125,
"learning_rate": 1.9223665781974154e-05,
"loss": 1.8067,
"step": 1280
},
{
"epoch": 0.08291354334883945,
"grad_norm": 0.890625,
"learning_rate": 1.921145931221853e-05,
"loss": 1.669,
"step": 1290
},
{
"epoch": 0.08355628399495449,
"grad_norm": 1.0,
"learning_rate": 1.9199161564331196e-05,
"loss": 1.7822,
"step": 1300
},
{
"epoch": 0.08419902464106951,
"grad_norm": 1.03125,
"learning_rate": 1.9186772660172916e-05,
"loss": 1.8901,
"step": 1310
},
{
"epoch": 0.08484176528718455,
"grad_norm": 0.75390625,
"learning_rate": 1.9174292722507735e-05,
"loss": 1.7516,
"step": 1320
},
{
"epoch": 0.08548450593329959,
"grad_norm": 1.1328125,
"learning_rate": 1.916172187500177e-05,
"loss": 1.8496,
"step": 1330
},
{
"epoch": 0.08612724657941462,
"grad_norm": 1.046875,
"learning_rate": 1.914906024222198e-05,
"loss": 1.8261,
"step": 1340
},
{
"epoch": 0.08676998722552966,
"grad_norm": 0.953125,
"learning_rate": 1.913630794963493e-05,
"loss": 1.7513,
"step": 1350
},
{
"epoch": 0.0874127278716447,
"grad_norm": 0.73046875,
"learning_rate": 1.9123465123605558e-05,
"loss": 1.6671,
"step": 1360
},
{
"epoch": 0.08805546851775972,
"grad_norm": 1.4921875,
"learning_rate": 1.9110531891395906e-05,
"loss": 1.8489,
"step": 1370
},
{
"epoch": 0.08869820916387476,
"grad_norm": 0.67578125,
"learning_rate": 1.9097508381163877e-05,
"loss": 1.7958,
"step": 1380
},
{
"epoch": 0.0893409498099898,
"grad_norm": 0.8359375,
"learning_rate": 1.9084394721961956e-05,
"loss": 1.7382,
"step": 1390
},
{
"epoch": 0.08998369045610483,
"grad_norm": 1.296875,
"learning_rate": 1.907119104373592e-05,
"loss": 1.8107,
"step": 1400
},
{
"epoch": 0.09062643110221986,
"grad_norm": 1.140625,
"learning_rate": 1.9057897477323572e-05,
"loss": 1.829,
"step": 1410
},
{
"epoch": 0.0912691717483349,
"grad_norm": 1.078125,
"learning_rate": 1.9044514154453434e-05,
"loss": 1.7685,
"step": 1420
},
{
"epoch": 0.09191191239444993,
"grad_norm": 0.890625,
"learning_rate": 1.903104120774344e-05,
"loss": 1.8001,
"step": 1430
},
{
"epoch": 0.09255465304056497,
"grad_norm": 1.078125,
"learning_rate": 1.9017478770699624e-05,
"loss": 1.7632,
"step": 1440
},
{
"epoch": 0.09319739368668001,
"grad_norm": 0.99609375,
"learning_rate": 1.90038269777148e-05,
"loss": 1.7482,
"step": 1450
},
{
"epoch": 0.09384013433279503,
"grad_norm": 1.421875,
"learning_rate": 1.899008596406722e-05,
"loss": 1.8016,
"step": 1460
},
{
"epoch": 0.09448287497891007,
"grad_norm": 0.96875,
"learning_rate": 1.897625586591925e-05,
"loss": 1.7998,
"step": 1470
},
{
"epoch": 0.09512561562502511,
"grad_norm": 0.796875,
"learning_rate": 1.896233682031601e-05,
"loss": 1.7887,
"step": 1480
},
{
"epoch": 0.09576835627114014,
"grad_norm": 1.0078125,
"learning_rate": 1.8948328965184004e-05,
"loss": 1.7226,
"step": 1490
},
{
"epoch": 0.09641109691725518,
"grad_norm": 1.09375,
"learning_rate": 1.8934232439329786e-05,
"loss": 1.7656,
"step": 1500
},
{
"epoch": 0.09705383756337022,
"grad_norm": 0.7734375,
"learning_rate": 1.8920047382438554e-05,
"loss": 1.7482,
"step": 1510
},
{
"epoch": 0.09769657820948524,
"grad_norm": 0.63671875,
"learning_rate": 1.890577393507278e-05,
"loss": 1.7171,
"step": 1520
},
{
"epoch": 0.09833931885560028,
"grad_norm": 0.8984375,
"learning_rate": 1.8891412238670814e-05,
"loss": 1.7527,
"step": 1530
},
{
"epoch": 0.09898205950171532,
"grad_norm": 0.9453125,
"learning_rate": 1.887696243554549e-05,
"loss": 1.7824,
"step": 1540
},
{
"epoch": 0.09962480014783034,
"grad_norm": 1.03125,
"learning_rate": 1.8862424668882687e-05,
"loss": 1.8317,
"step": 1550
},
{
"epoch": 0.10026754079394538,
"grad_norm": 0.81640625,
"learning_rate": 1.8847799082739957e-05,
"loss": 1.785,
"step": 1560
},
{
"epoch": 0.10091028144006042,
"grad_norm": 1.1171875,
"learning_rate": 1.8833085822045062e-05,
"loss": 1.7549,
"step": 1570
},
{
"epoch": 0.10155302208617545,
"grad_norm": 1.109375,
"learning_rate": 1.8818285032594537e-05,
"loss": 1.803,
"step": 1580
},
{
"epoch": 0.10219576273229049,
"grad_norm": 1.046875,
"learning_rate": 1.8803396861052284e-05,
"loss": 1.7646,
"step": 1590
},
{
"epoch": 0.10283850337840553,
"grad_norm": 1.1171875,
"learning_rate": 1.878842145494806e-05,
"loss": 1.7383,
"step": 1600
},
{
"epoch": 0.10348124402452055,
"grad_norm": 1.140625,
"learning_rate": 1.8773358962676066e-05,
"loss": 1.7851,
"step": 1610
},
{
"epoch": 0.10412398467063559,
"grad_norm": 1.015625,
"learning_rate": 1.8758209533493447e-05,
"loss": 1.8107,
"step": 1620
},
{
"epoch": 0.10476672531675063,
"grad_norm": 0.94140625,
"learning_rate": 1.874297331751883e-05,
"loss": 1.8237,
"step": 1630
},
{
"epoch": 0.10540946596286566,
"grad_norm": 1.046875,
"learning_rate": 1.8727650465730827e-05,
"loss": 1.8021,
"step": 1640
},
{
"epoch": 0.1060522066089807,
"grad_norm": 0.94140625,
"learning_rate": 1.8712241129966532e-05,
"loss": 1.7842,
"step": 1650
},
{
"epoch": 0.10669494725509573,
"grad_norm": 1.0625,
"learning_rate": 1.8696745462920046e-05,
"loss": 1.7979,
"step": 1660
},
{
"epoch": 0.10733768790121076,
"grad_norm": 1.234375,
"learning_rate": 1.868116361814092e-05,
"loss": 1.746,
"step": 1670
},
{
"epoch": 0.1079804285473258,
"grad_norm": 0.91015625,
"learning_rate": 1.8665495750032664e-05,
"loss": 1.8202,
"step": 1680
},
{
"epoch": 0.10862316919344084,
"grad_norm": 0.91796875,
"learning_rate": 1.8649742013851225e-05,
"loss": 1.7969,
"step": 1690
},
{
"epoch": 0.10926590983955586,
"grad_norm": 0.859375,
"learning_rate": 1.8633902565703415e-05,
"loss": 1.7795,
"step": 1700
},
{
"epoch": 0.1099086504856709,
"grad_norm": 1.5546875,
"learning_rate": 1.861797756254539e-05,
"loss": 1.7474,
"step": 1710
},
{
"epoch": 0.11055139113178594,
"grad_norm": 1.0078125,
"learning_rate": 1.8601967162181082e-05,
"loss": 1.8134,
"step": 1720
},
{
"epoch": 0.11119413177790097,
"grad_norm": 1.0,
"learning_rate": 1.8585871523260653e-05,
"loss": 1.7977,
"step": 1730
},
{
"epoch": 0.111836872424016,
"grad_norm": 0.7734375,
"learning_rate": 1.8569690805278894e-05,
"loss": 1.7256,
"step": 1740
},
{
"epoch": 0.11247961307013103,
"grad_norm": 0.8046875,
"learning_rate": 1.8553425168573667e-05,
"loss": 1.7866,
"step": 1750
},
{
"epoch": 0.11312235371624607,
"grad_norm": 1.0234375,
"learning_rate": 1.8537074774324318e-05,
"loss": 1.7567,
"step": 1760
},
{
"epoch": 0.11376509436236111,
"grad_norm": 1.328125,
"learning_rate": 1.8520639784550068e-05,
"loss": 1.769,
"step": 1770
},
{
"epoch": 0.11440783500847614,
"grad_norm": 1.125,
"learning_rate": 1.85041203621084e-05,
"loss": 1.7404,
"step": 1780
},
{
"epoch": 0.11505057565459118,
"grad_norm": 1.078125,
"learning_rate": 1.848751667069347e-05,
"loss": 1.785,
"step": 1790
},
{
"epoch": 0.11569331630070621,
"grad_norm": 1.7421875,
"learning_rate": 1.847082887483447e-05,
"loss": 1.6875,
"step": 1800
},
{
"epoch": 0.11633605694682124,
"grad_norm": 0.8828125,
"learning_rate": 1.8454057139893992e-05,
"loss": 1.7259,
"step": 1810
},
{
"epoch": 0.11697879759293628,
"grad_norm": 1.0078125,
"learning_rate": 1.84372016320664e-05,
"loss": 1.7666,
"step": 1820
},
{
"epoch": 0.11762153823905132,
"grad_norm": 1.0078125,
"learning_rate": 1.842026251837619e-05,
"loss": 1.7275,
"step": 1830
},
{
"epoch": 0.11826427888516634,
"grad_norm": 1.2265625,
"learning_rate": 1.8403239966676304e-05,
"loss": 1.8194,
"step": 1840
},
{
"epoch": 0.11890701953128138,
"grad_norm": 1.1328125,
"learning_rate": 1.8386134145646505e-05,
"loss": 1.7619,
"step": 1850
},
{
"epoch": 0.11954976017739642,
"grad_norm": 1.8828125,
"learning_rate": 1.836894522479168e-05,
"loss": 1.761,
"step": 1860
},
{
"epoch": 0.12019250082351145,
"grad_norm": 0.82421875,
"learning_rate": 1.835167337444017e-05,
"loss": 1.722,
"step": 1870
},
{
"epoch": 0.12083524146962649,
"grad_norm": 0.9375,
"learning_rate": 1.8334318765742078e-05,
"loss": 1.7103,
"step": 1880
},
{
"epoch": 0.12147798211574153,
"grad_norm": 1.5703125,
"learning_rate": 1.8316881570667583e-05,
"loss": 1.7891,
"step": 1890
},
{
"epoch": 0.12212072276185655,
"grad_norm": 0.77734375,
"learning_rate": 1.8299361962005218e-05,
"loss": 1.7342,
"step": 1900
},
{
"epoch": 0.12276346340797159,
"grad_norm": 1.125,
"learning_rate": 1.8281760113360177e-05,
"loss": 1.6901,
"step": 1910
},
{
"epoch": 0.12340620405408663,
"grad_norm": 1.203125,
"learning_rate": 1.8264076199152582e-05,
"loss": 1.7208,
"step": 1920
},
{
"epoch": 0.12404894470020165,
"grad_norm": 1.109375,
"learning_rate": 1.8246310394615753e-05,
"loss": 1.7616,
"step": 1930
},
{
"epoch": 0.1246916853463167,
"grad_norm": 0.71484375,
"learning_rate": 1.822846287579449e-05,
"loss": 1.7558,
"step": 1940
},
{
"epoch": 0.12533442599243172,
"grad_norm": 1.3125,
"learning_rate": 1.8210533819543304e-05,
"loss": 1.7703,
"step": 1950
},
{
"epoch": 0.12597716663854677,
"grad_norm": 0.83203125,
"learning_rate": 1.8192523403524685e-05,
"loss": 1.7272,
"step": 1960
},
{
"epoch": 0.1266199072846618,
"grad_norm": 0.71875,
"learning_rate": 1.8174431806207326e-05,
"loss": 1.7556,
"step": 1970
},
{
"epoch": 0.12726264793077682,
"grad_norm": 1.2890625,
"learning_rate": 1.815625920686436e-05,
"loss": 1.7382,
"step": 1980
},
{
"epoch": 0.12790538857689188,
"grad_norm": 0.93359375,
"learning_rate": 1.813800578557159e-05,
"loss": 1.754,
"step": 1990
},
{
"epoch": 0.1285481292230069,
"grad_norm": 0.98046875,
"learning_rate": 1.8119671723205708e-05,
"loss": 1.8341,
"step": 2000
},
{
"epoch": 0.12919086986912193,
"grad_norm": 0.921875,
"learning_rate": 1.8101257201442468e-05,
"loss": 1.8343,
"step": 2010
},
{
"epoch": 0.12983361051523698,
"grad_norm": 0.95703125,
"learning_rate": 1.8082762402754936e-05,
"loss": 1.7762,
"step": 2020
},
{
"epoch": 0.130476351161352,
"grad_norm": 0.8125,
"learning_rate": 1.8064187510411646e-05,
"loss": 1.7232,
"step": 2030
},
{
"epoch": 0.13111909180746703,
"grad_norm": 1.15625,
"learning_rate": 1.80455327084748e-05,
"loss": 1.7593,
"step": 2040
},
{
"epoch": 0.13176183245358208,
"grad_norm": 0.890625,
"learning_rate": 1.8026798181798434e-05,
"loss": 1.6904,
"step": 2050
},
{
"epoch": 0.1324045730996971,
"grad_norm": 1.0703125,
"learning_rate": 1.8007984116026604e-05,
"loss": 1.7421,
"step": 2060
},
{
"epoch": 0.13304731374581213,
"grad_norm": 1.078125,
"learning_rate": 1.7989090697591517e-05,
"loss": 1.7673,
"step": 2070
},
{
"epoch": 0.1336900543919272,
"grad_norm": 0.83984375,
"learning_rate": 1.7970118113711715e-05,
"loss": 1.7927,
"step": 2080
},
{
"epoch": 0.1343327950380422,
"grad_norm": 0.9375,
"learning_rate": 1.7951066552390204e-05,
"loss": 1.781,
"step": 2090
},
{
"epoch": 0.13497553568415724,
"grad_norm": 1.3203125,
"learning_rate": 1.7931936202412582e-05,
"loss": 1.6528,
"step": 2100
},
{
"epoch": 0.1356182763302723,
"grad_norm": 0.75390625,
"learning_rate": 1.791272725334519e-05,
"loss": 1.7846,
"step": 2110
},
{
"epoch": 0.13626101697638732,
"grad_norm": 0.9765625,
"learning_rate": 1.789343989553322e-05,
"loss": 1.7236,
"step": 2120
},
{
"epoch": 0.13690375762250234,
"grad_norm": 1.0546875,
"learning_rate": 1.787407432009883e-05,
"loss": 1.7879,
"step": 2130
},
{
"epoch": 0.1375464982686174,
"grad_norm": 0.87109375,
"learning_rate": 1.7854630718939254e-05,
"loss": 1.6429,
"step": 2140
},
{
"epoch": 0.13818923891473242,
"grad_norm": 1.4921875,
"learning_rate": 1.7835109284724886e-05,
"loss": 1.7904,
"step": 2150
},
{
"epoch": 0.13883197956084745,
"grad_norm": 0.828125,
"learning_rate": 1.7815510210897407e-05,
"loss": 1.7258,
"step": 2160
},
{
"epoch": 0.1394747202069625,
"grad_norm": 1.0703125,
"learning_rate": 1.779583369166782e-05,
"loss": 1.6941,
"step": 2170
},
{
"epoch": 0.14011746085307752,
"grad_norm": 0.9375,
"learning_rate": 1.7776079922014564e-05,
"loss": 1.7171,
"step": 2180
},
{
"epoch": 0.14076020149919255,
"grad_norm": 1.171875,
"learning_rate": 1.7756249097681563e-05,
"loss": 1.7273,
"step": 2190
},
{
"epoch": 0.1414029421453076,
"grad_norm": 1.1640625,
"learning_rate": 1.7736341415176286e-05,
"loss": 1.722,
"step": 2200
},
{
"epoch": 0.14204568279142263,
"grad_norm": 0.9140625,
"learning_rate": 1.7716357071767812e-05,
"loss": 1.711,
"step": 2210
},
{
"epoch": 0.14268842343753765,
"grad_norm": 0.9140625,
"learning_rate": 1.7696296265484863e-05,
"loss": 1.7142,
"step": 2220
},
{
"epoch": 0.1433311640836527,
"grad_norm": 0.96875,
"learning_rate": 1.7676159195113853e-05,
"loss": 1.7341,
"step": 2230
},
{
"epoch": 0.14397390472976773,
"grad_norm": 0.9921875,
"learning_rate": 1.7655946060196894e-05,
"loss": 1.7583,
"step": 2240
},
{
"epoch": 0.14461664537588276,
"grad_norm": 0.96875,
"learning_rate": 1.7635657061029863e-05,
"loss": 1.6996,
"step": 2250
},
{
"epoch": 0.1452593860219978,
"grad_norm": 0.91015625,
"learning_rate": 1.7615292398660372e-05,
"loss": 1.7818,
"step": 2260
},
{
"epoch": 0.14590212666811284,
"grad_norm": 1.546875,
"learning_rate": 1.7594852274885795e-05,
"loss": 1.7629,
"step": 2270
},
{
"epoch": 0.14654486731422786,
"grad_norm": 1.5625,
"learning_rate": 1.7574336892251277e-05,
"loss": 1.7498,
"step": 2280
},
{
"epoch": 0.14718760796034291,
"grad_norm": 1.3046875,
"learning_rate": 1.7553746454047705e-05,
"loss": 1.7655,
"step": 2290
},
{
"epoch": 0.14783034860645794,
"grad_norm": 1.5234375,
"learning_rate": 1.753308116430972e-05,
"loss": 1.7429,
"step": 2300
},
{
"epoch": 0.14847308925257297,
"grad_norm": 1.015625,
"learning_rate": 1.751234122781367e-05,
"loss": 1.7795,
"step": 2310
},
{
"epoch": 0.14911582989868802,
"grad_norm": 1.40625,
"learning_rate": 1.7491526850075593e-05,
"loss": 1.7375,
"step": 2320
},
{
"epoch": 0.14975857054480304,
"grad_norm": 0.88671875,
"learning_rate": 1.7470638237349195e-05,
"loss": 1.7663,
"step": 2330
},
{
"epoch": 0.15040131119091807,
"grad_norm": 0.85546875,
"learning_rate": 1.7449675596623765e-05,
"loss": 1.7934,
"step": 2340
},
{
"epoch": 0.15104405183703312,
"grad_norm": 1.015625,
"learning_rate": 1.742863913562218e-05,
"loss": 1.7493,
"step": 2350
},
{
"epoch": 0.15168679248314815,
"grad_norm": 1.109375,
"learning_rate": 1.7407529062798784e-05,
"loss": 1.7814,
"step": 2360
},
{
"epoch": 0.15232953312926317,
"grad_norm": 1.0,
"learning_rate": 1.7386345587337382e-05,
"loss": 1.7127,
"step": 2370
},
{
"epoch": 0.15297227377537823,
"grad_norm": 1.125,
"learning_rate": 1.7365088919149124e-05,
"loss": 1.7743,
"step": 2380
},
{
"epoch": 0.15361501442149325,
"grad_norm": 1.1796875,
"learning_rate": 1.734375926887045e-05,
"loss": 1.7204,
"step": 2390
},
{
"epoch": 0.15425775506760828,
"grad_norm": 2.25,
"learning_rate": 1.7322356847860985e-05,
"loss": 1.8416,
"step": 2400
},
{
"epoch": 0.15490049571372333,
"grad_norm": 1.046875,
"learning_rate": 1.7300881868201456e-05,
"loss": 1.7365,
"step": 2410
},
{
"epoch": 0.15554323635983835,
"grad_norm": 1.140625,
"learning_rate": 1.7279334542691596e-05,
"loss": 1.7371,
"step": 2420
},
{
"epoch": 0.15618597700595338,
"grad_norm": 0.921875,
"learning_rate": 1.725771508484802e-05,
"loss": 1.7034,
"step": 2430
},
{
"epoch": 0.15682871765206843,
"grad_norm": 1.0234375,
"learning_rate": 1.7236023708902113e-05,
"loss": 1.7502,
"step": 2440
},
{
"epoch": 0.15747145829818346,
"grad_norm": 0.6640625,
"learning_rate": 1.7214260629797913e-05,
"loss": 1.681,
"step": 2450
},
{
"epoch": 0.15811419894429848,
"grad_norm": 0.953125,
"learning_rate": 1.7192426063189982e-05,
"loss": 1.6597,
"step": 2460
},
{
"epoch": 0.1587569395904135,
"grad_norm": 1.0234375,
"learning_rate": 1.7170520225441264e-05,
"loss": 1.7344,
"step": 2470
},
{
"epoch": 0.15939968023652856,
"grad_norm": 0.765625,
"learning_rate": 1.714854333362094e-05,
"loss": 1.7025,
"step": 2480
},
{
"epoch": 0.1600424208826436,
"grad_norm": 1.6875,
"learning_rate": 1.712649560550228e-05,
"loss": 1.7985,
"step": 2490
},
{
"epoch": 0.1606851615287586,
"grad_norm": 1.03125,
"learning_rate": 1.710437725956049e-05,
"loss": 1.7754,
"step": 2500
},
{
"epoch": 0.16132790217487367,
"grad_norm": 1.15625,
"learning_rate": 1.7082188514970534e-05,
"loss": 1.7558,
"step": 2510
},
{
"epoch": 0.1619706428209887,
"grad_norm": 1.140625,
"learning_rate": 1.705992959160497e-05,
"loss": 1.6337,
"step": 2520
},
{
"epoch": 0.16261338346710372,
"grad_norm": 1.0703125,
"learning_rate": 1.7037600710031783e-05,
"loss": 1.7362,
"step": 2530
},
{
"epoch": 0.16325612411321877,
"grad_norm": 0.796875,
"learning_rate": 1.701520209151217e-05,
"loss": 1.7269,
"step": 2540
},
{
"epoch": 0.1638988647593338,
"grad_norm": 0.859375,
"learning_rate": 1.699273395799838e-05,
"loss": 1.8314,
"step": 2550
},
{
"epoch": 0.16454160540544882,
"grad_norm": 0.8671875,
"learning_rate": 1.697019653213149e-05,
"loss": 1.7012,
"step": 2560
},
{
"epoch": 0.16518434605156387,
"grad_norm": 0.984375,
"learning_rate": 1.694759003723921e-05,
"loss": 1.7954,
"step": 2570
},
{
"epoch": 0.1658270866976789,
"grad_norm": 0.9296875,
"learning_rate": 1.692491469733367e-05,
"loss": 1.6907,
"step": 2580
},
{
"epoch": 0.16646982734379392,
"grad_norm": 1.0546875,
"learning_rate": 1.6902170737109203e-05,
"loss": 1.6991,
"step": 2590
},
{
"epoch": 0.16711256798990898,
"grad_norm": 1.8203125,
"learning_rate": 1.6879358381940103e-05,
"loss": 1.715,
"step": 2600
},
{
"epoch": 0.167755308636024,
"grad_norm": 1.0078125,
"learning_rate": 1.6856477857878415e-05,
"loss": 1.7482,
"step": 2610
},
{
"epoch": 0.16839804928213903,
"grad_norm": 1.046875,
"learning_rate": 1.683352939165167e-05,
"loss": 1.7095,
"step": 2620
},
{
"epoch": 0.16904078992825408,
"grad_norm": 0.8515625,
"learning_rate": 1.6810513210660665e-05,
"loss": 1.6142,
"step": 2630
},
{
"epoch": 0.1696835305743691,
"grad_norm": 1.1171875,
"learning_rate": 1.678742954297718e-05,
"loss": 1.7428,
"step": 2640
},
{
"epoch": 0.17032627122048413,
"grad_norm": 1.0703125,
"learning_rate": 1.676427861734175e-05,
"loss": 1.7764,
"step": 2650
},
{
"epoch": 0.17096901186659919,
"grad_norm": 1.2421875,
"learning_rate": 1.674106066316137e-05,
"loss": 1.7345,
"step": 2660
},
{
"epoch": 0.1716117525127142,
"grad_norm": 1.0546875,
"learning_rate": 1.671777591050724e-05,
"loss": 1.7397,
"step": 2670
},
{
"epoch": 0.17225449315882924,
"grad_norm": 1.109375,
"learning_rate": 1.669442459011248e-05,
"loss": 1.7448,
"step": 2680
},
{
"epoch": 0.1728972338049443,
"grad_norm": 0.984375,
"learning_rate": 1.667100693336983e-05,
"loss": 1.747,
"step": 2690
},
{
"epoch": 0.17353997445105931,
"grad_norm": 1.0546875,
"learning_rate": 1.664752317232939e-05,
"loss": 1.7379,
"step": 2700
},
{
"epoch": 0.17418271509717434,
"grad_norm": 0.984375,
"learning_rate": 1.6623973539696283e-05,
"loss": 1.8181,
"step": 2710
},
{
"epoch": 0.1748254557432894,
"grad_norm": 1.1015625,
"learning_rate": 1.6600358268828376e-05,
"loss": 1.6882,
"step": 2720
},
{
"epoch": 0.17546819638940442,
"grad_norm": 0.96484375,
"learning_rate": 1.6576677593733955e-05,
"loss": 1.7557,
"step": 2730
},
{
"epoch": 0.17611093703551944,
"grad_norm": 1.125,
"learning_rate": 1.6552931749069403e-05,
"loss": 1.7477,
"step": 2740
},
{
"epoch": 0.1767536776816345,
"grad_norm": 0.77734375,
"learning_rate": 1.6529120970136893e-05,
"loss": 1.7788,
"step": 2750
},
{
"epoch": 0.17739641832774952,
"grad_norm": 0.8046875,
"learning_rate": 1.6505245492882044e-05,
"loss": 1.7058,
"step": 2760
},
{
"epoch": 0.17803915897386455,
"grad_norm": 1.1171875,
"learning_rate": 1.6481305553891563e-05,
"loss": 1.8183,
"step": 2770
},
{
"epoch": 0.1786818996199796,
"grad_norm": 0.93359375,
"learning_rate": 1.6457301390390945e-05,
"loss": 1.7238,
"step": 2780
},
{
"epoch": 0.17932464026609463,
"grad_norm": 0.99609375,
"learning_rate": 1.6433233240242085e-05,
"loss": 1.7097,
"step": 2790
},
{
"epoch": 0.17996738091220965,
"grad_norm": 1.015625,
"learning_rate": 1.6409101341940937e-05,
"loss": 1.7503,
"step": 2800
},
{
"epoch": 0.1806101215583247,
"grad_norm": 1.0859375,
"learning_rate": 1.638490593461515e-05,
"loss": 1.7222,
"step": 2810
},
{
"epoch": 0.18125286220443973,
"grad_norm": 0.984375,
"learning_rate": 1.6360647258021698e-05,
"loss": 1.67,
"step": 2820
},
{
"epoch": 0.18189560285055476,
"grad_norm": 1.078125,
"learning_rate": 1.633632555254449e-05,
"loss": 1.741,
"step": 2830
},
{
"epoch": 0.1825383434966698,
"grad_norm": 0.74609375,
"learning_rate": 1.6311941059192015e-05,
"loss": 1.698,
"step": 2840
},
{
"epoch": 0.18318108414278483,
"grad_norm": 1.0234375,
"learning_rate": 1.6287494019594928e-05,
"loss": 1.7521,
"step": 2850
},
{
"epoch": 0.18382382478889986,
"grad_norm": 1.3203125,
"learning_rate": 1.626298467600368e-05,
"loss": 1.7319,
"step": 2860
},
{
"epoch": 0.1844665654350149,
"grad_norm": 1.1640625,
"learning_rate": 1.623841327128609e-05,
"loss": 1.7694,
"step": 2870
},
{
"epoch": 0.18510930608112994,
"grad_norm": 1.203125,
"learning_rate": 1.6213780048924964e-05,
"loss": 1.774,
"step": 2880
},
{
"epoch": 0.18575204672724496,
"grad_norm": 0.8203125,
"learning_rate": 1.6189085253015656e-05,
"loss": 1.7477,
"step": 2890
},
{
"epoch": 0.18639478737336002,
"grad_norm": 0.85546875,
"learning_rate": 1.616432912826369e-05,
"loss": 1.6605,
"step": 2900
},
{
"epoch": 0.18703752801947504,
"grad_norm": 1.046875,
"learning_rate": 1.613951191998228e-05,
"loss": 1.7154,
"step": 2910
},
{
"epoch": 0.18768026866559007,
"grad_norm": 0.9453125,
"learning_rate": 1.6114633874089955e-05,
"loss": 1.7665,
"step": 2920
},
{
"epoch": 0.18832300931170512,
"grad_norm": 1.2578125,
"learning_rate": 1.608969523710807e-05,
"loss": 1.6787,
"step": 2930
},
{
"epoch": 0.18896574995782014,
"grad_norm": 1.1484375,
"learning_rate": 1.6064696256158408e-05,
"loss": 1.6682,
"step": 2940
},
{
"epoch": 0.18960849060393517,
"grad_norm": 0.99609375,
"learning_rate": 1.6039637178960704e-05,
"loss": 1.7495,
"step": 2950
},
{
"epoch": 0.19025123125005022,
"grad_norm": 1.0546875,
"learning_rate": 1.6014518253830204e-05,
"loss": 1.6987,
"step": 2960
},
{
"epoch": 0.19089397189616525,
"grad_norm": 1.1640625,
"learning_rate": 1.5989339729675187e-05,
"loss": 1.6799,
"step": 2970
},
{
"epoch": 0.19153671254228027,
"grad_norm": 1.1640625,
"learning_rate": 1.5964101855994527e-05,
"loss": 1.7866,
"step": 2980
},
{
"epoch": 0.19217945318839533,
"grad_norm": 1.171875,
"learning_rate": 1.593880488287519e-05,
"loss": 1.8115,
"step": 2990
},
{
"epoch": 0.19282219383451035,
"grad_norm": 0.94921875,
"learning_rate": 1.5913449060989777e-05,
"loss": 1.7679,
"step": 3000
},
{
"epoch": 0.19346493448062538,
"grad_norm": 1.0390625,
"learning_rate": 1.5888034641594024e-05,
"loss": 1.7431,
"step": 3010
},
{
"epoch": 0.19410767512674043,
"grad_norm": 0.96875,
"learning_rate": 1.5862561876524337e-05,
"loss": 1.7064,
"step": 3020
},
{
"epoch": 0.19475041577285546,
"grad_norm": 0.84375,
"learning_rate": 1.5837031018195266e-05,
"loss": 1.7073,
"step": 3030
},
{
"epoch": 0.19539315641897048,
"grad_norm": 1.0703125,
"learning_rate": 1.5811442319597028e-05,
"loss": 1.6424,
"step": 3040
},
{
"epoch": 0.19603589706508553,
"grad_norm": 1.3203125,
"learning_rate": 1.578579603429298e-05,
"loss": 1.7724,
"step": 3050
},
{
"epoch": 0.19667863771120056,
"grad_norm": 1.0546875,
"learning_rate": 1.5760092416417124e-05,
"loss": 1.7448,
"step": 3060
},
{
"epoch": 0.19732137835731559,
"grad_norm": 1.0,
"learning_rate": 1.5734331720671584e-05,
"loss": 1.7734,
"step": 3070
},
{
"epoch": 0.19796411900343064,
"grad_norm": 1.0625,
"learning_rate": 1.5708514202324074e-05,
"loss": 1.7086,
"step": 3080
},
{
"epoch": 0.19860685964954566,
"grad_norm": 1.1953125,
"learning_rate": 1.5682640117205376e-05,
"loss": 1.7351,
"step": 3090
},
{
"epoch": 0.1992496002956607,
"grad_norm": 0.7265625,
"learning_rate": 1.56567097217068e-05,
"loss": 1.6818,
"step": 3100
},
{
"epoch": 0.19989234094177574,
"grad_norm": 1.109375,
"learning_rate": 1.5630723272777656e-05,
"loss": 1.802,
"step": 3110
},
{
"epoch": 0.20053508158789077,
"grad_norm": 2.34375,
"learning_rate": 1.5604681027922686e-05,
"loss": 1.7713,
"step": 3120
},
{
"epoch": 0.2011778222340058,
"grad_norm": 1.203125,
"learning_rate": 1.557858324519953e-05,
"loss": 1.7553,
"step": 3130
},
{
"epoch": 0.20182056288012085,
"grad_norm": 0.9609375,
"learning_rate": 1.5552430183216173e-05,
"loss": 1.7304,
"step": 3140
},
{
"epoch": 0.20246330352623587,
"grad_norm": 1.03125,
"learning_rate": 1.5526222101128355e-05,
"loss": 1.7812,
"step": 3150
},
{
"epoch": 0.2031060441723509,
"grad_norm": 1.03125,
"learning_rate": 1.5499959258637033e-05,
"loss": 1.6848,
"step": 3160
},
{
"epoch": 0.20374878481846595,
"grad_norm": 1.234375,
"learning_rate": 1.5473641915985792e-05,
"loss": 1.6899,
"step": 3170
},
{
"epoch": 0.20439152546458098,
"grad_norm": 0.94921875,
"learning_rate": 1.5447270333958265e-05,
"loss": 1.7511,
"step": 3180
},
{
"epoch": 0.205034266110696,
"grad_norm": 0.921875,
"learning_rate": 1.542084477387557e-05,
"loss": 1.7477,
"step": 3190
},
{
"epoch": 0.20567700675681105,
"grad_norm": 0.953125,
"learning_rate": 1.5394365497593683e-05,
"loss": 1.7675,
"step": 3200
},
{
"epoch": 0.20631974740292608,
"grad_norm": 1.1171875,
"learning_rate": 1.5367832767500873e-05,
"loss": 1.6689,
"step": 3210
},
{
"epoch": 0.2069624880490411,
"grad_norm": 0.9609375,
"learning_rate": 1.5341246846515096e-05,
"loss": 1.7262,
"step": 3220
},
{
"epoch": 0.20760522869515616,
"grad_norm": 0.95703125,
"learning_rate": 1.5314607998081385e-05,
"loss": 1.7819,
"step": 3230
},
{
"epoch": 0.20824796934127118,
"grad_norm": 0.88671875,
"learning_rate": 1.528791648616924e-05,
"loss": 1.71,
"step": 3240
},
{
"epoch": 0.2088907099873862,
"grad_norm": 1.21875,
"learning_rate": 1.5261172575270014e-05,
"loss": 1.8036,
"step": 3250
},
{
"epoch": 0.20953345063350126,
"grad_norm": 0.83984375,
"learning_rate": 1.5234376530394297e-05,
"loss": 1.7522,
"step": 3260
},
{
"epoch": 0.2101761912796163,
"grad_norm": 1.0,
"learning_rate": 1.5207528617069272e-05,
"loss": 1.7288,
"step": 3270
},
{
"epoch": 0.2108189319257313,
"grad_norm": 0.9296875,
"learning_rate": 1.5180629101336109e-05,
"loss": 1.7152,
"step": 3280
},
{
"epoch": 0.21146167257184637,
"grad_norm": 1.359375,
"learning_rate": 1.5153678249747307e-05,
"loss": 1.7825,
"step": 3290
},
{
"epoch": 0.2121044132179614,
"grad_norm": 0.81640625,
"learning_rate": 1.5126676329364072e-05,
"loss": 1.6981,
"step": 3300
},
{
"epoch": 0.21274715386407642,
"grad_norm": 1.171875,
"learning_rate": 1.5099623607753651e-05,
"loss": 1.7109,
"step": 3310
},
{
"epoch": 0.21338989451019147,
"grad_norm": 1.046875,
"learning_rate": 1.5072520352986696e-05,
"loss": 1.7009,
"step": 3320
},
{
"epoch": 0.2140326351563065,
"grad_norm": 1.125,
"learning_rate": 1.50453668336346e-05,
"loss": 1.6994,
"step": 3330
},
{
"epoch": 0.21467537580242152,
"grad_norm": 1.046875,
"learning_rate": 1.5018163318766835e-05,
"loss": 1.727,
"step": 3340
},
{
"epoch": 0.21531811644853657,
"grad_norm": 1.0390625,
"learning_rate": 1.4990910077948293e-05,
"loss": 1.7093,
"step": 3350
},
{
"epoch": 0.2159608570946516,
"grad_norm": 1.0234375,
"learning_rate": 1.4963607381236608e-05,
"loss": 1.7136,
"step": 3360
},
{
"epoch": 0.21660359774076662,
"grad_norm": 0.8984375,
"learning_rate": 1.4936255499179479e-05,
"loss": 1.745,
"step": 3370
},
{
"epoch": 0.21724633838688168,
"grad_norm": 1.109375,
"learning_rate": 1.4908854702812e-05,
"loss": 1.7144,
"step": 3380
},
{
"epoch": 0.2178890790329967,
"grad_norm": 0.87890625,
"learning_rate": 1.4881405263653955e-05,
"loss": 1.7291,
"step": 3390
},
{
"epoch": 0.21853181967911173,
"grad_norm": 1.3671875,
"learning_rate": 1.4853907453707148e-05,
"loss": 1.8049,
"step": 3400
},
{
"epoch": 0.21917456032522678,
"grad_norm": 1.0078125,
"learning_rate": 1.4826361545452697e-05,
"loss": 1.8222,
"step": 3410
},
{
"epoch": 0.2198173009713418,
"grad_norm": 1.3515625,
"learning_rate": 1.4798767811848331e-05,
"loss": 1.7501,
"step": 3420
},
{
"epoch": 0.22046004161745683,
"grad_norm": 0.80859375,
"learning_rate": 1.4771126526325695e-05,
"loss": 1.7232,
"step": 3430
},
{
"epoch": 0.22110278226357188,
"grad_norm": 0.9140625,
"learning_rate": 1.474343796278763e-05,
"loss": 1.7281,
"step": 3440
},
{
"epoch": 0.2217455229096869,
"grad_norm": 0.92578125,
"learning_rate": 1.4715702395605468e-05,
"loss": 1.7007,
"step": 3450
},
{
"epoch": 0.22238826355580193,
"grad_norm": 1.1796875,
"learning_rate": 1.4687920099616311e-05,
"loss": 1.6244,
"step": 3460
},
{
"epoch": 0.22303100420191696,
"grad_norm": 1.5625,
"learning_rate": 1.4660091350120297e-05,
"loss": 1.657,
"step": 3470
},
{
"epoch": 0.223673744848032,
"grad_norm": 0.89453125,
"learning_rate": 1.4632216422877891e-05,
"loss": 1.7143,
"step": 3480
},
{
"epoch": 0.22431648549414704,
"grad_norm": 1.234375,
"learning_rate": 1.4604295594107139e-05,
"loss": 1.7564,
"step": 3490
},
{
"epoch": 0.22495922614026206,
"grad_norm": 1.3203125,
"learning_rate": 1.4576329140480925e-05,
"loss": 1.6929,
"step": 3500
},
{
"epoch": 0.22560196678637712,
"grad_norm": 1.3046875,
"learning_rate": 1.4548317339124251e-05,
"loss": 1.7734,
"step": 3510
},
{
"epoch": 0.22624470743249214,
"grad_norm": 0.96875,
"learning_rate": 1.452026046761148e-05,
"loss": 1.714,
"step": 3520
},
{
"epoch": 0.22688744807860717,
"grad_norm": 0.89453125,
"learning_rate": 1.4492158803963568e-05,
"loss": 1.7787,
"step": 3530
},
{
"epoch": 0.22753018872472222,
"grad_norm": 0.93359375,
"learning_rate": 1.4464012626645336e-05,
"loss": 1.7508,
"step": 3540
},
{
"epoch": 0.22817292937083725,
"grad_norm": 1.015625,
"learning_rate": 1.4435822214562705e-05,
"loss": 1.6806,
"step": 3550
},
{
"epoch": 0.22881567001695227,
"grad_norm": 0.8671875,
"learning_rate": 1.4407587847059914e-05,
"loss": 1.7545,
"step": 3560
},
{
"epoch": 0.22945841066306732,
"grad_norm": 1.265625,
"learning_rate": 1.4379309803916772e-05,
"loss": 1.7719,
"step": 3570
},
{
"epoch": 0.23010115130918235,
"grad_norm": 1.2265625,
"learning_rate": 1.4350988365345879e-05,
"loss": 1.7533,
"step": 3580
},
{
"epoch": 0.23074389195529738,
"grad_norm": 1.03125,
"learning_rate": 1.4322623811989844e-05,
"loss": 1.6717,
"step": 3590
},
{
"epoch": 0.23138663260141243,
"grad_norm": 1.2890625,
"learning_rate": 1.4294216424918515e-05,
"loss": 1.8084,
"step": 3600
},
{
"epoch": 0.23202937324752745,
"grad_norm": 1.1796875,
"learning_rate": 1.426576648562618e-05,
"loss": 1.7027,
"step": 3610
},
{
"epoch": 0.23267211389364248,
"grad_norm": 0.9296875,
"learning_rate": 1.4237274276028792e-05,
"loss": 1.7553,
"step": 3620
},
{
"epoch": 0.23331485453975753,
"grad_norm": 0.96484375,
"learning_rate": 1.4208740078461168e-05,
"loss": 1.7069,
"step": 3630
},
{
"epoch": 0.23395759518587256,
"grad_norm": 0.8359375,
"learning_rate": 1.4180164175674184e-05,
"loss": 1.6952,
"step": 3640
},
{
"epoch": 0.23460033583198758,
"grad_norm": 0.91015625,
"learning_rate": 1.4151546850831989e-05,
"loss": 1.6905,
"step": 3650
},
{
"epoch": 0.23524307647810264,
"grad_norm": 1.078125,
"learning_rate": 1.4122888387509193e-05,
"loss": 1.7128,
"step": 3660
},
{
"epoch": 0.23588581712421766,
"grad_norm": 0.94140625,
"learning_rate": 1.4094189069688046e-05,
"loss": 1.7267,
"step": 3670
},
{
"epoch": 0.2365285577703327,
"grad_norm": 0.91015625,
"learning_rate": 1.4065449181755644e-05,
"loss": 1.7449,
"step": 3680
},
{
"epoch": 0.23717129841644774,
"grad_norm": 1.3203125,
"learning_rate": 1.403666900850109e-05,
"loss": 1.7336,
"step": 3690
},
{
"epoch": 0.23781403906256277,
"grad_norm": 0.87890625,
"learning_rate": 1.4007848835112688e-05,
"loss": 1.6449,
"step": 3700
},
{
"epoch": 0.2384567797086778,
"grad_norm": 0.80078125,
"learning_rate": 1.397898894717511e-05,
"loss": 1.7207,
"step": 3710
},
{
"epoch": 0.23909952035479284,
"grad_norm": 1.265625,
"learning_rate": 1.3950089630666564e-05,
"loss": 1.7177,
"step": 3720
},
{
"epoch": 0.23974226100090787,
"grad_norm": 0.84765625,
"learning_rate": 1.392115117195597e-05,
"loss": 1.7184,
"step": 3730
},
{
"epoch": 0.2403850016470229,
"grad_norm": 1.21875,
"learning_rate": 1.3892173857800108e-05,
"loss": 1.7254,
"step": 3740
},
{
"epoch": 0.24102774229313795,
"grad_norm": 1.21875,
"learning_rate": 1.386315797534079e-05,
"loss": 1.7199,
"step": 3750
},
{
"epoch": 0.24167048293925297,
"grad_norm": 0.96484375,
"learning_rate": 1.3834103812102012e-05,
"loss": 1.6975,
"step": 3760
},
{
"epoch": 0.242313223585368,
"grad_norm": 1.140625,
"learning_rate": 1.3805011655987084e-05,
"loss": 1.7183,
"step": 3770
},
{
"epoch": 0.24295596423148305,
"grad_norm": 0.9765625,
"learning_rate": 1.3775881795275817e-05,
"loss": 1.6751,
"step": 3780
},
{
"epoch": 0.24359870487759808,
"grad_norm": 1.0625,
"learning_rate": 1.3746714518621631e-05,
"loss": 1.6311,
"step": 3790
},
{
"epoch": 0.2442414455237131,
"grad_norm": 1.1796875,
"learning_rate": 1.3717510115048709e-05,
"loss": 1.7294,
"step": 3800
},
{
"epoch": 0.24488418616982816,
"grad_norm": 1.0625,
"learning_rate": 1.3688268873949135e-05,
"loss": 1.6922,
"step": 3810
},
{
"epoch": 0.24552692681594318,
"grad_norm": 1.5859375,
"learning_rate": 1.3658991085080027e-05,
"loss": 1.6738,
"step": 3820
},
{
"epoch": 0.2461696674620582,
"grad_norm": 1.046875,
"learning_rate": 1.362967703856065e-05,
"loss": 1.6963,
"step": 3830
},
{
"epoch": 0.24681240810817326,
"grad_norm": 1.125,
"learning_rate": 1.360032702486956e-05,
"loss": 1.8388,
"step": 3840
},
{
"epoch": 0.24745514875428828,
"grad_norm": 1.1875,
"learning_rate": 1.3570941334841722e-05,
"loss": 1.7457,
"step": 3850
},
{
"epoch": 0.2480978894004033,
"grad_norm": 1.0,
"learning_rate": 1.3541520259665621e-05,
"loss": 1.6294,
"step": 3860
},
{
"epoch": 0.24874063004651836,
"grad_norm": 1.078125,
"learning_rate": 1.3512064090880381e-05,
"loss": 1.7499,
"step": 3870
},
{
"epoch": 0.2493833706926334,
"grad_norm": 1.28125,
"learning_rate": 1.3482573120372873e-05,
"loss": 1.7487,
"step": 3880
},
{
"epoch": 0.2500261113387484,
"grad_norm": 1.546875,
"learning_rate": 1.3453047640374828e-05,
"loss": 1.7238,
"step": 3890
},
{
"epoch": 0.25066885198486344,
"grad_norm": 0.859375,
"learning_rate": 1.3423487943459934e-05,
"loss": 1.7135,
"step": 3900
},
{
"epoch": 0.2513115926309785,
"grad_norm": 1.109375,
"learning_rate": 1.3393894322540952e-05,
"loss": 1.6844,
"step": 3910
},
{
"epoch": 0.25195433327709355,
"grad_norm": 0.94921875,
"learning_rate": 1.3364267070866788e-05,
"loss": 1.7126,
"step": 3920
},
{
"epoch": 0.25259707392320857,
"grad_norm": 1.0234375,
"learning_rate": 1.3334606482019607e-05,
"loss": 1.7103,
"step": 3930
},
{
"epoch": 0.2532398145693236,
"grad_norm": 1.1328125,
"learning_rate": 1.3304912849911929e-05,
"loss": 1.694,
"step": 3940
},
{
"epoch": 0.2538825552154386,
"grad_norm": 0.91015625,
"learning_rate": 1.3275186468783688e-05,
"loss": 1.7423,
"step": 3950
},
{
"epoch": 0.25452529586155365,
"grad_norm": 0.75390625,
"learning_rate": 1.3245427633199347e-05,
"loss": 1.7131,
"step": 3960
},
{
"epoch": 0.25516803650766867,
"grad_norm": 0.90234375,
"learning_rate": 1.3215636638044962e-05,
"loss": 1.7375,
"step": 3970
},
{
"epoch": 0.25581077715378375,
"grad_norm": 1.3125,
"learning_rate": 1.3185813778525265e-05,
"loss": 1.7256,
"step": 3980
},
{
"epoch": 0.2564535177998988,
"grad_norm": 1.1015625,
"learning_rate": 1.3155959350160739e-05,
"loss": 1.6983,
"step": 3990
},
{
"epoch": 0.2570962584460138,
"grad_norm": 1.0234375,
"learning_rate": 1.312607364878469e-05,
"loss": 1.6608,
"step": 4000
},
{
"epoch": 0.25773899909212883,
"grad_norm": 0.94140625,
"learning_rate": 1.3096156970540305e-05,
"loss": 1.6938,
"step": 4010
},
{
"epoch": 0.25838173973824385,
"grad_norm": 0.97265625,
"learning_rate": 1.3066209611877748e-05,
"loss": 1.7964,
"step": 4020
},
{
"epoch": 0.2590244803843589,
"grad_norm": 0.71484375,
"learning_rate": 1.3036231869551173e-05,
"loss": 1.6742,
"step": 4030
},
{
"epoch": 0.25966722103047396,
"grad_norm": 0.9765625,
"learning_rate": 1.3006224040615833e-05,
"loss": 1.621,
"step": 4040
},
{
"epoch": 0.260309961676589,
"grad_norm": 1.1171875,
"learning_rate": 1.297618642242511e-05,
"loss": 1.7152,
"step": 4050
},
{
"epoch": 0.260952702322704,
"grad_norm": 1.265625,
"learning_rate": 1.2946119312627565e-05,
"loss": 1.7553,
"step": 4060
},
{
"epoch": 0.26159544296881904,
"grad_norm": 1.1484375,
"learning_rate": 1.291602300916401e-05,
"loss": 1.7938,
"step": 4070
},
{
"epoch": 0.26223818361493406,
"grad_norm": 1.2890625,
"learning_rate": 1.2885897810264534e-05,
"loss": 1.794,
"step": 4080
},
{
"epoch": 0.2628809242610491,
"grad_norm": 0.9375,
"learning_rate": 1.2855744014445554e-05,
"loss": 1.7033,
"step": 4090
},
{
"epoch": 0.26352366490716417,
"grad_norm": 1.0546875,
"learning_rate": 1.2825561920506873e-05,
"loss": 1.6797,
"step": 4100
},
{
"epoch": 0.2641664055532792,
"grad_norm": 0.875,
"learning_rate": 1.2795351827528693e-05,
"loss": 1.8342,
"step": 4110
},
{
"epoch": 0.2648091461993942,
"grad_norm": 0.9453125,
"learning_rate": 1.2765114034868664e-05,
"loss": 1.7093,
"step": 4120
},
{
"epoch": 0.26545188684550924,
"grad_norm": 0.87890625,
"learning_rate": 1.2734848842158925e-05,
"loss": 1.7036,
"step": 4130
},
{
"epoch": 0.26609462749162427,
"grad_norm": 1.0390625,
"learning_rate": 1.2704556549303124e-05,
"loss": 1.6945,
"step": 4140
},
{
"epoch": 0.2667373681377393,
"grad_norm": 1.03125,
"learning_rate": 1.2674237456473444e-05,
"loss": 1.6948,
"step": 4150
},
{
"epoch": 0.2673801087838544,
"grad_norm": 0.75,
"learning_rate": 1.2643891864107646e-05,
"loss": 1.6862,
"step": 4160
},
{
"epoch": 0.2680228494299694,
"grad_norm": 1.265625,
"learning_rate": 1.2613520072906069e-05,
"loss": 1.7412,
"step": 4170
},
{
"epoch": 0.2686655900760844,
"grad_norm": 0.96875,
"learning_rate": 1.2583122383828665e-05,
"loss": 1.7044,
"step": 4180
},
{
"epoch": 0.26930833072219945,
"grad_norm": 0.87890625,
"learning_rate": 1.2552699098092016e-05,
"loss": 1.7292,
"step": 4190
},
{
"epoch": 0.2699510713683145,
"grad_norm": 1.0,
"learning_rate": 1.2522250517166344e-05,
"loss": 1.7285,
"step": 4200
},
{
"epoch": 0.2705938120144295,
"grad_norm": 1.1796875,
"learning_rate": 1.2491776942772525e-05,
"loss": 1.7442,
"step": 4210
},
{
"epoch": 0.2712365526605446,
"grad_norm": 0.8828125,
"learning_rate": 1.2461278676879099e-05,
"loss": 1.717,
"step": 4220
},
{
"epoch": 0.2718792933066596,
"grad_norm": 1.1171875,
"learning_rate": 1.2430756021699283e-05,
"loss": 1.7403,
"step": 4230
},
{
"epoch": 0.27252203395277463,
"grad_norm": 0.89453125,
"learning_rate": 1.2400209279687968e-05,
"loss": 1.7318,
"step": 4240
},
{
"epoch": 0.27316477459888966,
"grad_norm": 1.25,
"learning_rate": 1.2369638753538728e-05,
"loss": 1.6867,
"step": 4250
},
{
"epoch": 0.2738075152450047,
"grad_norm": 0.87890625,
"learning_rate": 1.2339044746180826e-05,
"loss": 1.7731,
"step": 4260
},
{
"epoch": 0.2744502558911197,
"grad_norm": 0.859375,
"learning_rate": 1.2308427560776187e-05,
"loss": 1.6815,
"step": 4270
},
{
"epoch": 0.2750929965372348,
"grad_norm": 1.2421875,
"learning_rate": 1.2277787500716424e-05,
"loss": 1.7348,
"step": 4280
},
{
"epoch": 0.2757357371833498,
"grad_norm": 0.90234375,
"learning_rate": 1.2247124869619823e-05,
"loss": 1.7091,
"step": 4290
},
{
"epoch": 0.27637847782946484,
"grad_norm": 0.76171875,
"learning_rate": 1.2216439971328323e-05,
"loss": 1.6958,
"step": 4300
},
{
"epoch": 0.27702121847557987,
"grad_norm": 1.140625,
"learning_rate": 1.2185733109904512e-05,
"loss": 1.7388,
"step": 4310
},
{
"epoch": 0.2776639591216949,
"grad_norm": 1.046875,
"learning_rate": 1.2155004589628622e-05,
"loss": 1.6458,
"step": 4320
},
{
"epoch": 0.2783066997678099,
"grad_norm": 0.73828125,
"learning_rate": 1.21242547149955e-05,
"loss": 1.679,
"step": 4330
},
{
"epoch": 0.278949440413925,
"grad_norm": 0.98046875,
"learning_rate": 1.2093483790711604e-05,
"loss": 1.691,
"step": 4340
},
{
"epoch": 0.27959218106004,
"grad_norm": 0.859375,
"learning_rate": 1.2062692121691972e-05,
"loss": 1.7202,
"step": 4350
},
{
"epoch": 0.28023492170615505,
"grad_norm": 0.83984375,
"learning_rate": 1.2031880013057204e-05,
"loss": 1.6382,
"step": 4360
},
{
"epoch": 0.2808776623522701,
"grad_norm": 1.4765625,
"learning_rate": 1.2001047770130448e-05,
"loss": 1.7238,
"step": 4370
},
{
"epoch": 0.2815204029983851,
"grad_norm": 0.83203125,
"learning_rate": 1.197019569843436e-05,
"loss": 1.7531,
"step": 4380
},
{
"epoch": 0.2821631436445001,
"grad_norm": 1.0,
"learning_rate": 1.1939324103688081e-05,
"loss": 1.6925,
"step": 4390
},
{
"epoch": 0.2828058842906152,
"grad_norm": 1.2421875,
"learning_rate": 1.1908433291804217e-05,
"loss": 1.8275,
"step": 4400
},
{
"epoch": 0.28344862493673023,
"grad_norm": 1.015625,
"learning_rate": 1.18775235688858e-05,
"loss": 1.7867,
"step": 4410
},
{
"epoch": 0.28409136558284526,
"grad_norm": 0.921875,
"learning_rate": 1.1846595241223246e-05,
"loss": 1.6453,
"step": 4420
},
{
"epoch": 0.2847341062289603,
"grad_norm": 2.625,
"learning_rate": 1.1815648615291337e-05,
"loss": 1.7489,
"step": 4430
},
{
"epoch": 0.2853768468750753,
"grad_norm": 1.0234375,
"learning_rate": 1.1784683997746178e-05,
"loss": 1.7083,
"step": 4440
},
{
"epoch": 0.28601958752119033,
"grad_norm": 1.03125,
"learning_rate": 1.1753701695422146e-05,
"loss": 1.7238,
"step": 4450
},
{
"epoch": 0.2866623281673054,
"grad_norm": 1.375,
"learning_rate": 1.1722702015328868e-05,
"loss": 1.7005,
"step": 4460
},
{
"epoch": 0.28730506881342044,
"grad_norm": 1.3046875,
"learning_rate": 1.1691685264648176e-05,
"loss": 1.731,
"step": 4470
},
{
"epoch": 0.28794780945953546,
"grad_norm": 0.98046875,
"learning_rate": 1.166065175073104e-05,
"loss": 1.715,
"step": 4480
},
{
"epoch": 0.2885905501056505,
"grad_norm": 1.1640625,
"learning_rate": 1.1629601781094557e-05,
"loss": 1.7201,
"step": 4490
},
{
"epoch": 0.2892332907517655,
"grad_norm": 0.96875,
"learning_rate": 1.1598535663418884e-05,
"loss": 1.7189,
"step": 4500
},
{
"epoch": 0.28987603139788054,
"grad_norm": 1.1953125,
"learning_rate": 1.1567453705544181e-05,
"loss": 1.6972,
"step": 4510
},
{
"epoch": 0.2905187720439956,
"grad_norm": 1.140625,
"learning_rate": 1.1536356215467582e-05,
"loss": 1.7408,
"step": 4520
},
{
"epoch": 0.29116151269011065,
"grad_norm": 1.0859375,
"learning_rate": 1.1505243501340135e-05,
"loss": 1.715,
"step": 4530
},
{
"epoch": 0.29180425333622567,
"grad_norm": 1.0625,
"learning_rate": 1.1474115871463736e-05,
"loss": 1.6914,
"step": 4540
},
{
"epoch": 0.2924469939823407,
"grad_norm": 0.921875,
"learning_rate": 1.1442973634288088e-05,
"loss": 1.6996,
"step": 4550
},
{
"epoch": 0.2930897346284557,
"grad_norm": 0.8515625,
"learning_rate": 1.1411817098407647e-05,
"loss": 1.6862,
"step": 4560
},
{
"epoch": 0.29373247527457075,
"grad_norm": 0.9453125,
"learning_rate": 1.138064657255855e-05,
"loss": 1.701,
"step": 4570
},
{
"epoch": 0.29437521592068583,
"grad_norm": 1.125,
"learning_rate": 1.1349462365615563e-05,
"loss": 1.7646,
"step": 4580
},
{
"epoch": 0.29501795656680085,
"grad_norm": 1.0390625,
"learning_rate": 1.1318264786589028e-05,
"loss": 1.7119,
"step": 4590
},
{
"epoch": 0.2956606972129159,
"grad_norm": 1.0625,
"learning_rate": 1.1287054144621784e-05,
"loss": 1.6709,
"step": 4600
},
{
"epoch": 0.2963034378590309,
"grad_norm": 1.2421875,
"learning_rate": 1.1255830748986123e-05,
"loss": 1.7181,
"step": 4610
},
{
"epoch": 0.29694617850514593,
"grad_norm": 1.1171875,
"learning_rate": 1.1224594909080704e-05,
"loss": 1.7136,
"step": 4620
},
{
"epoch": 0.29758891915126096,
"grad_norm": 1.3828125,
"learning_rate": 1.1193346934427512e-05,
"loss": 1.7369,
"step": 4630
},
{
"epoch": 0.29823165979737604,
"grad_norm": 1.046875,
"learning_rate": 1.1162087134668766e-05,
"loss": 1.761,
"step": 4640
},
{
"epoch": 0.29887440044349106,
"grad_norm": 0.9296875,
"learning_rate": 1.113081581956387e-05,
"loss": 1.6734,
"step": 4650
},
{
"epoch": 0.2995171410896061,
"grad_norm": 1.0078125,
"learning_rate": 1.1099533298986331e-05,
"loss": 1.7784,
"step": 4660
},
{
"epoch": 0.3001598817357211,
"grad_norm": 0.9140625,
"learning_rate": 1.10682398829207e-05,
"loss": 1.7395,
"step": 4670
},
{
"epoch": 0.30080262238183614,
"grad_norm": 1.1796875,
"learning_rate": 1.1036935881459478e-05,
"loss": 1.6175,
"step": 4680
},
{
"epoch": 0.30144536302795116,
"grad_norm": 0.80859375,
"learning_rate": 1.1005621604800079e-05,
"loss": 1.6363,
"step": 4690
},
{
"epoch": 0.30208810367406624,
"grad_norm": 2.140625,
"learning_rate": 1.0974297363241724e-05,
"loss": 1.6852,
"step": 4700
},
{
"epoch": 0.30273084432018127,
"grad_norm": 0.80078125,
"learning_rate": 1.094296346718238e-05,
"loss": 1.7092,
"step": 4710
},
{
"epoch": 0.3033735849662963,
"grad_norm": 1.1171875,
"learning_rate": 1.0911620227115678e-05,
"loss": 1.7391,
"step": 4720
},
{
"epoch": 0.3040163256124113,
"grad_norm": 0.90625,
"learning_rate": 1.0880267953627852e-05,
"loss": 1.6888,
"step": 4730
},
{
"epoch": 0.30465906625852635,
"grad_norm": 1.0390625,
"learning_rate": 1.0848906957394642e-05,
"loss": 1.6997,
"step": 4740
},
{
"epoch": 0.30530180690464137,
"grad_norm": 0.76953125,
"learning_rate": 1.0817537549178222e-05,
"loss": 1.7172,
"step": 4750
},
{
"epoch": 0.30594454755075645,
"grad_norm": 1.1484375,
"learning_rate": 1.0786160039824122e-05,
"loss": 1.7329,
"step": 4760
},
{
"epoch": 0.3065872881968715,
"grad_norm": 0.8984375,
"learning_rate": 1.0754774740258159e-05,
"loss": 1.7277,
"step": 4770
},
{
"epoch": 0.3072300288429865,
"grad_norm": 1.0390625,
"learning_rate": 1.0723381961483325e-05,
"loss": 1.7147,
"step": 4780
},
{
"epoch": 0.3078727694891015,
"grad_norm": 3.265625,
"learning_rate": 1.0691982014576744e-05,
"loss": 1.7482,
"step": 4790
},
{
"epoch": 0.30851551013521655,
"grad_norm": 1.0234375,
"learning_rate": 1.066057521068656e-05,
"loss": 1.7195,
"step": 4800
},
{
"epoch": 0.3091582507813316,
"grad_norm": 1.6796875,
"learning_rate": 1.0629161861028862e-05,
"loss": 1.6071,
"step": 4810
},
{
"epoch": 0.30980099142744666,
"grad_norm": 1.203125,
"learning_rate": 1.0597742276884615e-05,
"loss": 1.7493,
"step": 4820
},
{
"epoch": 0.3104437320735617,
"grad_norm": 1.078125,
"learning_rate": 1.056631676959655e-05,
"loss": 1.7358,
"step": 4830
},
{
"epoch": 0.3110864727196767,
"grad_norm": 1.421875,
"learning_rate": 1.0534885650566095e-05,
"loss": 1.6428,
"step": 4840
},
{
"epoch": 0.31172921336579174,
"grad_norm": 0.921875,
"learning_rate": 1.0503449231250292e-05,
"loss": 1.6361,
"step": 4850
},
{
"epoch": 0.31237195401190676,
"grad_norm": 1.2578125,
"learning_rate": 1.047200782315869e-05,
"loss": 1.7175,
"step": 4860
},
{
"epoch": 0.3130146946580218,
"grad_norm": 0.828125,
"learning_rate": 1.044056173785029e-05,
"loss": 1.7489,
"step": 4870
},
{
"epoch": 0.31365743530413687,
"grad_norm": 0.91015625,
"learning_rate": 1.040911128693043e-05,
"loss": 1.7763,
"step": 4880
},
{
"epoch": 0.3143001759502519,
"grad_norm": 0.9375,
"learning_rate": 1.037765678204771e-05,
"loss": 1.7808,
"step": 4890
},
{
"epoch": 0.3149429165963669,
"grad_norm": 1.0625,
"learning_rate": 1.0346198534890903e-05,
"loss": 1.6635,
"step": 4900
},
{
"epoch": 0.31558565724248194,
"grad_norm": 1.6875,
"learning_rate": 1.0314736857185865e-05,
"loss": 1.697,
"step": 4910
},
{
"epoch": 0.31622839788859697,
"grad_norm": 1.0625,
"learning_rate": 1.0283272060692442e-05,
"loss": 1.7429,
"step": 4920
},
{
"epoch": 0.316871138534712,
"grad_norm": 1.1875,
"learning_rate": 1.0251804457201387e-05,
"loss": 1.6465,
"step": 4930
},
{
"epoch": 0.317513879180827,
"grad_norm": 0.89453125,
"learning_rate": 1.0220334358531271e-05,
"loss": 1.728,
"step": 4940
},
{
"epoch": 0.3181566198269421,
"grad_norm": 1.265625,
"learning_rate": 1.0188862076525386e-05,
"loss": 1.7189,
"step": 4950
},
{
"epoch": 0.3187993604730571,
"grad_norm": 1.3203125,
"learning_rate": 1.0157387923048659e-05,
"loss": 1.7378,
"step": 4960
},
{
"epoch": 0.31944210111917215,
"grad_norm": 0.84765625,
"learning_rate": 1.012591220998457e-05,
"loss": 1.6847,
"step": 4970
},
{
"epoch": 0.3200848417652872,
"grad_norm": 0.90625,
"learning_rate": 1.0094435249232037e-05,
"loss": 1.6955,
"step": 4980
},
{
"epoch": 0.3207275824114022,
"grad_norm": 1.015625,
"learning_rate": 1.0062957352702358e-05,
"loss": 1.7114,
"step": 4990
},
{
"epoch": 0.3213703230575172,
"grad_norm": 1.171875,
"learning_rate": 1.0031478832316099e-05,
"loss": 1.6695,
"step": 5000
},
{
"epoch": 0.3220130637036323,
"grad_norm": 1.1015625,
"learning_rate": 1e-05,
"loss": 1.6945,
"step": 5010
},
{
"epoch": 0.32265580434974733,
"grad_norm": 10.0625,
"learning_rate": 9.968521167683906e-06,
"loss": 1.7108,
"step": 5020
},
{
"epoch": 0.32329854499586236,
"grad_norm": 1.0234375,
"learning_rate": 9.937042647297643e-06,
"loss": 1.7139,
"step": 5030
},
{
"epoch": 0.3239412856419774,
"grad_norm": 2.203125,
"learning_rate": 9.905564750767968e-06,
"loss": 1.7593,
"step": 5040
},
{
"epoch": 0.3245840262880924,
"grad_norm": 0.953125,
"learning_rate": 9.874087790015435e-06,
"loss": 1.6597,
"step": 5050
},
{
"epoch": 0.32522676693420743,
"grad_norm": 0.98046875,
"learning_rate": 9.842612076951343e-06,
"loss": 1.682,
"step": 5060
},
{
"epoch": 0.3258695075803225,
"grad_norm": 1.03125,
"learning_rate": 9.811137923474618e-06,
"loss": 1.7427,
"step": 5070
},
{
"epoch": 0.32651224822643754,
"grad_norm": 1.0234375,
"learning_rate": 9.779665641468734e-06,
"loss": 1.7048,
"step": 5080
},
{
"epoch": 0.32715498887255257,
"grad_norm": 1.5234375,
"learning_rate": 9.748195542798617e-06,
"loss": 1.6532,
"step": 5090
},
{
"epoch": 0.3277977295186676,
"grad_norm": 0.81640625,
"learning_rate": 9.716727939307563e-06,
"loss": 1.6839,
"step": 5100
},
{
"epoch": 0.3284404701647826,
"grad_norm": 1.1875,
"learning_rate": 9.685263142814138e-06,
"loss": 1.671,
"step": 5110
},
{
"epoch": 0.32908321081089764,
"grad_norm": 1.0625,
"learning_rate": 9.653801465109102e-06,
"loss": 1.6263,
"step": 5120
},
{
"epoch": 0.3297259514570127,
"grad_norm": 0.96484375,
"learning_rate": 9.622343217952291e-06,
"loss": 1.7392,
"step": 5130
},
{
"epoch": 0.33036869210312775,
"grad_norm": 0.7734375,
"learning_rate": 9.590888713069574e-06,
"loss": 1.6135,
"step": 5140
},
{
"epoch": 0.3310114327492428,
"grad_norm": 1.4453125,
"learning_rate": 9.559438262149713e-06,
"loss": 1.6987,
"step": 5150
},
{
"epoch": 0.3316541733953578,
"grad_norm": 0.953125,
"learning_rate": 9.527992176841315e-06,
"loss": 1.662,
"step": 5160
},
{
"epoch": 0.3322969140414728,
"grad_norm": 2.921875,
"learning_rate": 9.496550768749713e-06,
"loss": 1.6787,
"step": 5170
},
{
"epoch": 0.33293965468758785,
"grad_norm": 1.140625,
"learning_rate": 9.46511434943391e-06,
"loss": 1.6947,
"step": 5180
},
{
"epoch": 0.33358239533370293,
"grad_norm": 1.203125,
"learning_rate": 9.433683230403452e-06,
"loss": 1.8179,
"step": 5190
},
{
"epoch": 0.33422513597981796,
"grad_norm": 1.09375,
"learning_rate": 9.402257723115389e-06,
"loss": 1.703,
"step": 5200
},
{
"epoch": 0.334867876625933,
"grad_norm": 1.171875,
"learning_rate": 9.37083813897114e-06,
"loss": 1.7319,
"step": 5210
},
{
"epoch": 0.335510617272048,
"grad_norm": 1.109375,
"learning_rate": 9.339424789313445e-06,
"loss": 1.6642,
"step": 5220
},
{
"epoch": 0.33615335791816303,
"grad_norm": 1.1796875,
"learning_rate": 9.308017985423262e-06,
"loss": 1.6882,
"step": 5230
},
{
"epoch": 0.33679609856427806,
"grad_norm": 1.125,
"learning_rate": 9.27661803851668e-06,
"loss": 1.6838,
"step": 5240
},
{
"epoch": 0.33743883921039314,
"grad_norm": 0.98828125,
"learning_rate": 9.245225259741846e-06,
"loss": 1.6981,
"step": 5250
},
{
"epoch": 0.33808157985650816,
"grad_norm": 1.4296875,
"learning_rate": 9.213839960175881e-06,
"loss": 1.7938,
"step": 5260
},
{
"epoch": 0.3387243205026232,
"grad_norm": 0.88671875,
"learning_rate": 9.182462450821782e-06,
"loss": 1.6507,
"step": 5270
},
{
"epoch": 0.3393670611487382,
"grad_norm": 1.0625,
"learning_rate": 9.151093042605363e-06,
"loss": 1.6994,
"step": 5280
},
{
"epoch": 0.34000980179485324,
"grad_norm": 1.125,
"learning_rate": 9.11973204637215e-06,
"loss": 1.6593,
"step": 5290
},
{
"epoch": 0.34065254244096826,
"grad_norm": 1.1875,
"learning_rate": 9.088379772884325e-06,
"loss": 1.708,
"step": 5300
},
{
"epoch": 0.34129528308708335,
"grad_norm": 0.953125,
"learning_rate": 9.057036532817622e-06,
"loss": 1.7161,
"step": 5310
},
{
"epoch": 0.34193802373319837,
"grad_norm": 1.1796875,
"learning_rate": 9.025702636758278e-06,
"loss": 1.7392,
"step": 5320
},
{
"epoch": 0.3425807643793134,
"grad_norm": 1.0625,
"learning_rate": 8.994378395199921e-06,
"loss": 1.6983,
"step": 5330
},
{
"epoch": 0.3432235050254284,
"grad_norm": 1.3671875,
"learning_rate": 8.963064118540525e-06,
"loss": 1.7665,
"step": 5340
},
{
"epoch": 0.34386624567154345,
"grad_norm": 1.3515625,
"learning_rate": 8.931760117079305e-06,
"loss": 1.7038,
"step": 5350
},
{
"epoch": 0.34450898631765847,
"grad_norm": 0.98046875,
"learning_rate": 8.900466701013672e-06,
"loss": 1.7007,
"step": 5360
},
{
"epoch": 0.34515172696377355,
"grad_norm": 1.1015625,
"learning_rate": 8.86918418043613e-06,
"loss": 1.6677,
"step": 5370
},
{
"epoch": 0.3457944676098886,
"grad_norm": 0.9375,
"learning_rate": 8.837912865331236e-06,
"loss": 1.6247,
"step": 5380
},
{
"epoch": 0.3464372082560036,
"grad_norm": 0.7734375,
"learning_rate": 8.80665306557249e-06,
"loss": 1.6865,
"step": 5390
},
{
"epoch": 0.34707994890211863,
"grad_norm": 1.15625,
"learning_rate": 8.7754050909193e-06,
"loss": 1.6453,
"step": 5400
},
{
"epoch": 0.34772268954823365,
"grad_norm": 1.1640625,
"learning_rate": 8.74416925101388e-06,
"loss": 1.7498,
"step": 5410
},
{
"epoch": 0.3483654301943487,
"grad_norm": 0.828125,
"learning_rate": 8.712945855378218e-06,
"loss": 1.7581,
"step": 5420
},
{
"epoch": 0.34900817084046376,
"grad_norm": 0.83984375,
"learning_rate": 8.681735213410974e-06,
"loss": 1.7167,
"step": 5430
},
{
"epoch": 0.3496509114865788,
"grad_norm": 1.375,
"learning_rate": 8.65053763438444e-06,
"loss": 1.6842,
"step": 5440
},
{
"epoch": 0.3502936521326938,
"grad_norm": 0.8515625,
"learning_rate": 8.619353427441452e-06,
"loss": 1.7526,
"step": 5450
},
{
"epoch": 0.35093639277880884,
"grad_norm": 1.125,
"learning_rate": 8.588182901592355e-06,
"loss": 1.7505,
"step": 5460
},
{
"epoch": 0.35157913342492386,
"grad_norm": 1.4921875,
"learning_rate": 8.557026365711912e-06,
"loss": 1.769,
"step": 5470
},
{
"epoch": 0.3522218740710389,
"grad_norm": 1.15625,
"learning_rate": 8.525884128536268e-06,
"loss": 1.7306,
"step": 5480
},
{
"epoch": 0.35286461471715397,
"grad_norm": 1.0546875,
"learning_rate": 8.494756498659865e-06,
"loss": 1.6367,
"step": 5490
},
{
"epoch": 0.353507355363269,
"grad_norm": 1.0703125,
"learning_rate": 8.46364378453242e-06,
"loss": 1.6766,
"step": 5500
},
{
"epoch": 0.354150096009384,
"grad_norm": 0.94921875,
"learning_rate": 8.43254629445582e-06,
"loss": 1.6862,
"step": 5510
},
{
"epoch": 0.35479283665549904,
"grad_norm": 0.99609375,
"learning_rate": 8.40146433658112e-06,
"loss": 1.7138,
"step": 5520
},
{
"epoch": 0.35543557730161407,
"grad_norm": 2.015625,
"learning_rate": 8.370398218905441e-06,
"loss": 1.717,
"step": 5530
},
{
"epoch": 0.3560783179477291,
"grad_norm": 1.2421875,
"learning_rate": 8.339348249268963e-06,
"loss": 1.6968,
"step": 5540
},
{
"epoch": 0.3567210585938442,
"grad_norm": 1.390625,
"learning_rate": 8.308314735351826e-06,
"loss": 1.7034,
"step": 5550
},
{
"epoch": 0.3573637992399592,
"grad_norm": 1.25,
"learning_rate": 8.277297984671134e-06,
"loss": 1.6566,
"step": 5560
},
{
"epoch": 0.3580065398860742,
"grad_norm": 1.125,
"learning_rate": 8.24629830457786e-06,
"loss": 1.689,
"step": 5570
},
{
"epoch": 0.35864928053218925,
"grad_norm": 0.89453125,
"learning_rate": 8.215316002253825e-06,
"loss": 1.7245,
"step": 5580
},
{
"epoch": 0.3592920211783043,
"grad_norm": 1.796875,
"learning_rate": 8.184351384708666e-06,
"loss": 1.6886,
"step": 5590
},
{
"epoch": 0.3599347618244193,
"grad_norm": 1.1171875,
"learning_rate": 8.153404758776757e-06,
"loss": 1.7266,
"step": 5600
},
{
"epoch": 0.3605775024705344,
"grad_norm": 1.1640625,
"learning_rate": 8.122476431114205e-06,
"loss": 1.7683,
"step": 5610
},
{
"epoch": 0.3612202431166494,
"grad_norm": 1.171875,
"learning_rate": 8.091566708195786e-06,
"loss": 1.7441,
"step": 5620
},
{
"epoch": 0.36186298376276443,
"grad_norm": 0.83984375,
"learning_rate": 8.060675896311925e-06,
"loss": 1.6591,
"step": 5630
},
{
"epoch": 0.36250572440887946,
"grad_norm": 0.96484375,
"learning_rate": 8.029804301565645e-06,
"loss": 1.7238,
"step": 5640
},
{
"epoch": 0.3631484650549945,
"grad_norm": 0.86328125,
"learning_rate": 7.998952229869556e-06,
"loss": 1.6526,
"step": 5650
},
{
"epoch": 0.3637912057011095,
"grad_norm": 1.1953125,
"learning_rate": 7.968119986942798e-06,
"loss": 1.7302,
"step": 5660
},
{
"epoch": 0.3644339463472246,
"grad_norm": 1.28125,
"learning_rate": 7.937307878308033e-06,
"loss": 1.7281,
"step": 5670
},
{
"epoch": 0.3650766869933396,
"grad_norm": 1.265625,
"learning_rate": 7.906516209288399e-06,
"loss": 1.7069,
"step": 5680
},
{
"epoch": 0.36571942763945464,
"grad_norm": 1.0234375,
"learning_rate": 7.875745285004504e-06,
"loss": 1.7153,
"step": 5690
},
{
"epoch": 0.36636216828556967,
"grad_norm": 1.21875,
"learning_rate": 7.844995410371381e-06,
"loss": 1.6505,
"step": 5700
},
{
"epoch": 0.3670049089316847,
"grad_norm": 1.65625,
"learning_rate": 7.814266890095493e-06,
"loss": 1.7055,
"step": 5710
},
{
"epoch": 0.3676476495777997,
"grad_norm": 1.0390625,
"learning_rate": 7.78356002867168e-06,
"loss": 1.7126,
"step": 5720
},
{
"epoch": 0.3682903902239148,
"grad_norm": 1.296875,
"learning_rate": 7.752875130380182e-06,
"loss": 1.7205,
"step": 5730
},
{
"epoch": 0.3689331308700298,
"grad_norm": 1.015625,
"learning_rate": 7.722212499283579e-06,
"loss": 1.7175,
"step": 5740
},
{
"epoch": 0.36957587151614485,
"grad_norm": 3.6875,
"learning_rate": 7.69157243922382e-06,
"loss": 1.7011,
"step": 5750
},
{
"epoch": 0.3702186121622599,
"grad_norm": 1.1171875,
"learning_rate": 7.660955253819178e-06,
"loss": 1.8167,
"step": 5760
},
{
"epoch": 0.3708613528083749,
"grad_norm": 1.078125,
"learning_rate": 7.630361246461276e-06,
"loss": 1.7789,
"step": 5770
},
{
"epoch": 0.3715040934544899,
"grad_norm": 0.90234375,
"learning_rate": 7.599790720312035e-06,
"loss": 1.7267,
"step": 5780
},
{
"epoch": 0.372146834100605,
"grad_norm": 1.1796875,
"learning_rate": 7.5692439783007225e-06,
"loss": 1.7034,
"step": 5790
},
{
"epoch": 0.37278957474672003,
"grad_norm": 0.88671875,
"learning_rate": 7.538721323120905e-06,
"loss": 1.7108,
"step": 5800
},
{
"epoch": 0.37343231539283506,
"grad_norm": 0.921875,
"learning_rate": 7.5082230572274805e-06,
"loss": 1.7298,
"step": 5810
},
{
"epoch": 0.3740750560389501,
"grad_norm": 1.1953125,
"learning_rate": 7.477749482833659e-06,
"loss": 1.7672,
"step": 5820
},
{
"epoch": 0.3747177966850651,
"grad_norm": 1.5078125,
"learning_rate": 7.447300901907988e-06,
"loss": 1.6882,
"step": 5830
},
{
"epoch": 0.37536053733118013,
"grad_norm": 1.203125,
"learning_rate": 7.416877616171336e-06,
"loss": 1.7687,
"step": 5840
},
{
"epoch": 0.3760032779772952,
"grad_norm": 1.1875,
"learning_rate": 7.386479927093934e-06,
"loss": 1.7094,
"step": 5850
},
{
"epoch": 0.37664601862341024,
"grad_norm": 1.125,
"learning_rate": 7.356108135892357e-06,
"loss": 1.7794,
"step": 5860
},
{
"epoch": 0.37728875926952526,
"grad_norm": 1.015625,
"learning_rate": 7.325762543526559e-06,
"loss": 1.6798,
"step": 5870
},
{
"epoch": 0.3779314999156403,
"grad_norm": 1.0,
"learning_rate": 7.29544345069688e-06,
"loss": 1.6328,
"step": 5880
},
{
"epoch": 0.3785742405617553,
"grad_norm": 1.2265625,
"learning_rate": 7.265151157841079e-06,
"loss": 1.7291,
"step": 5890
},
{
"epoch": 0.37921698120787034,
"grad_norm": 0.86328125,
"learning_rate": 7.234885965131337e-06,
"loss": 1.6752,
"step": 5900
},
{
"epoch": 0.3798597218539854,
"grad_norm": 1.09375,
"learning_rate": 7.204648172471311e-06,
"loss": 1.717,
"step": 5910
},
{
"epoch": 0.38050246250010045,
"grad_norm": 1.1015625,
"learning_rate": 7.174438079493128e-06,
"loss": 1.7098,
"step": 5920
},
{
"epoch": 0.38114520314621547,
"grad_norm": 1.1640625,
"learning_rate": 7.144255985554447e-06,
"loss": 1.6681,
"step": 5930
},
{
"epoch": 0.3817879437923305,
"grad_norm": 1.671875,
"learning_rate": 7.11410218973547e-06,
"loss": 1.6714,
"step": 5940
},
{
"epoch": 0.3824306844384455,
"grad_norm": 1.203125,
"learning_rate": 7.0839769908359944e-06,
"loss": 1.7326,
"step": 5950
},
{
"epoch": 0.38307342508456055,
"grad_norm": 1.1484375,
"learning_rate": 7.053880687372436e-06,
"loss": 1.6754,
"step": 5960
},
{
"epoch": 0.3837161657306756,
"grad_norm": 1.03125,
"learning_rate": 7.023813577574894e-06,
"loss": 1.6816,
"step": 5970
},
{
"epoch": 0.38435890637679065,
"grad_norm": 1.1640625,
"learning_rate": 6.993775959384168e-06,
"loss": 1.7969,
"step": 5980
},
{
"epoch": 0.3850016470229057,
"grad_norm": 0.9921875,
"learning_rate": 6.96376813044883e-06,
"loss": 1.6831,
"step": 5990
},
{
"epoch": 0.3856443876690207,
"grad_norm": 1.1640625,
"learning_rate": 6.9337903881222555e-06,
"loss": 1.6608,
"step": 6000
},
{
"epoch": 0.38628712831513573,
"grad_norm": 1.1015625,
"learning_rate": 6.903843029459696e-06,
"loss": 1.7346,
"step": 6010
},
{
"epoch": 0.38692986896125076,
"grad_norm": 1.1796875,
"learning_rate": 6.873926351215312e-06,
"loss": 1.7735,
"step": 6020
},
{
"epoch": 0.3875726096073658,
"grad_norm": 1.6171875,
"learning_rate": 6.844040649839264e-06,
"loss": 1.715,
"step": 6030
},
{
"epoch": 0.38821535025348086,
"grad_norm": 0.828125,
"learning_rate": 6.8141862214747355e-06,
"loss": 1.647,
"step": 6040
},
{
"epoch": 0.3888580908995959,
"grad_norm": 0.984375,
"learning_rate": 6.784363361955041e-06,
"loss": 1.72,
"step": 6050
},
{
"epoch": 0.3895008315457109,
"grad_norm": 1.1953125,
"learning_rate": 6.754572366800656e-06,
"loss": 1.6878,
"step": 6060
},
{
"epoch": 0.39014357219182594,
"grad_norm": 0.9765625,
"learning_rate": 6.724813531216316e-06,
"loss": 1.6373,
"step": 6070
},
{
"epoch": 0.39078631283794096,
"grad_norm": 1.1171875,
"learning_rate": 6.695087150088071e-06,
"loss": 1.7257,
"step": 6080
},
{
"epoch": 0.391429053484056,
"grad_norm": 1.234375,
"learning_rate": 6.665393517980394e-06,
"loss": 1.6369,
"step": 6090
},
{
"epoch": 0.39207179413017107,
"grad_norm": 1.2578125,
"learning_rate": 6.635732929133214e-06,
"loss": 1.6913,
"step": 6100
},
{
"epoch": 0.3927145347762861,
"grad_norm": 1.1796875,
"learning_rate": 6.606105677459051e-06,
"loss": 1.7101,
"step": 6110
},
{
"epoch": 0.3933572754224011,
"grad_norm": 1.2265625,
"learning_rate": 6.5765120565400675e-06,
"loss": 1.7865,
"step": 6120
},
{
"epoch": 0.39400001606851615,
"grad_norm": 1.0,
"learning_rate": 6.546952359625176e-06,
"loss": 1.7338,
"step": 6130
},
{
"epoch": 0.39464275671463117,
"grad_norm": 1.0625,
"learning_rate": 6.517426879627131e-06,
"loss": 1.7372,
"step": 6140
},
{
"epoch": 0.3952854973607462,
"grad_norm": 0.96484375,
"learning_rate": 6.487935909119623e-06,
"loss": 1.6966,
"step": 6150
},
{
"epoch": 0.3959282380068613,
"grad_norm": 0.77734375,
"learning_rate": 6.458479740334382e-06,
"loss": 1.6769,
"step": 6160
},
{
"epoch": 0.3965709786529763,
"grad_norm": 0.96484375,
"learning_rate": 6.4290586651582786e-06,
"loss": 1.7546,
"step": 6170
},
{
"epoch": 0.39721371929909133,
"grad_norm": 1.1796875,
"learning_rate": 6.3996729751304446e-06,
"loss": 1.7707,
"step": 6180
},
{
"epoch": 0.39785645994520635,
"grad_norm": 0.9375,
"learning_rate": 6.370322961439355e-06,
"loss": 1.7398,
"step": 6190
},
{
"epoch": 0.3984992005913214,
"grad_norm": 1.140625,
"learning_rate": 6.3410089149199785e-06,
"loss": 1.699,
"step": 6200
},
{
"epoch": 0.3991419412374364,
"grad_norm": 1.09375,
"learning_rate": 6.3117311260508666e-06,
"loss": 1.7004,
"step": 6210
},
{
"epoch": 0.3997846818835515,
"grad_norm": 1.125,
"learning_rate": 6.282489884951295e-06,
"loss": 1.6898,
"step": 6220
},
{
"epoch": 0.4004274225296665,
"grad_norm": 1.1484375,
"learning_rate": 6.253285481378371e-06,
"loss": 1.6982,
"step": 6230
},
{
"epoch": 0.40107016317578154,
"grad_norm": 0.93359375,
"learning_rate": 6.224118204724186e-06,
"loss": 1.7026,
"step": 6240
},
{
"epoch": 0.40171290382189656,
"grad_norm": 1.09375,
"learning_rate": 6.194988344012917e-06,
"loss": 1.6386,
"step": 6250
},
{
"epoch": 0.4023556444680116,
"grad_norm": 1.1953125,
"learning_rate": 6.165896187897994e-06,
"loss": 1.7697,
"step": 6260
},
{
"epoch": 0.4029983851141266,
"grad_norm": 1.03125,
"learning_rate": 6.136842024659211e-06,
"loss": 1.6783,
"step": 6270
},
{
"epoch": 0.4036411257602417,
"grad_norm": 1.234375,
"learning_rate": 6.107826142199895e-06,
"loss": 1.7209,
"step": 6280
},
{
"epoch": 0.4042838664063567,
"grad_norm": 1.1015625,
"learning_rate": 6.078848828044033e-06,
"loss": 1.6696,
"step": 6290
},
{
"epoch": 0.40492660705247174,
"grad_norm": 0.80859375,
"learning_rate": 6.049910369333441e-06,
"loss": 1.7255,
"step": 6300
},
{
"epoch": 0.40556934769858677,
"grad_norm": 1.453125,
"learning_rate": 6.021011052824894e-06,
"loss": 1.6588,
"step": 6310
},
{
"epoch": 0.4062120883447018,
"grad_norm": 1.3515625,
"learning_rate": 5.9921511648873164e-06,
"loss": 1.7546,
"step": 6320
},
{
"epoch": 0.4068548289908168,
"grad_norm": 0.91796875,
"learning_rate": 5.963330991498915e-06,
"loss": 1.7166,
"step": 6330
},
{
"epoch": 0.4074975696369319,
"grad_norm": 1.109375,
"learning_rate": 5.934550818244361e-06,
"loss": 1.6429,
"step": 6340
},
{
"epoch": 0.4081403102830469,
"grad_norm": 1.0,
"learning_rate": 5.9058109303119545e-06,
"loss": 1.681,
"step": 6350
},
{
"epoch": 0.40878305092916195,
"grad_norm": 0.8359375,
"learning_rate": 5.877111612490813e-06,
"loss": 1.7439,
"step": 6360
},
{
"epoch": 0.409425791575277,
"grad_norm": 1.328125,
"learning_rate": 5.848453149168013e-06,
"loss": 1.7528,
"step": 6370
},
{
"epoch": 0.410068532221392,
"grad_norm": 1.484375,
"learning_rate": 5.819835824325823e-06,
"loss": 1.689,
"step": 6380
},
{
"epoch": 0.410711272867507,
"grad_norm": 0.96875,
"learning_rate": 5.791259921538835e-06,
"loss": 1.7727,
"step": 6390
},
{
"epoch": 0.4113540135136221,
"grad_norm": 1.125,
"learning_rate": 5.762725723971212e-06,
"loss": 1.6252,
"step": 6400
},
{
"epoch": 0.41199675415973713,
"grad_norm": 1.140625,
"learning_rate": 5.734233514373822e-06,
"loss": 1.6618,
"step": 6410
},
{
"epoch": 0.41263949480585216,
"grad_norm": 0.9609375,
"learning_rate": 5.705783575081487e-06,
"loss": 1.6596,
"step": 6420
},
{
"epoch": 0.4132822354519672,
"grad_norm": 1.109375,
"learning_rate": 5.6773761880101564e-06,
"loss": 1.7205,
"step": 6430
},
{
"epoch": 0.4139249760980822,
"grad_norm": 0.92578125,
"learning_rate": 5.649011634654125e-06,
"loss": 1.6959,
"step": 6440
},
{
"epoch": 0.41456771674419723,
"grad_norm": 1.40625,
"learning_rate": 5.620690196083228e-06,
"loss": 1.6523,
"step": 6450
},
{
"epoch": 0.4152104573903123,
"grad_norm": 2.875,
"learning_rate": 5.592412152940088e-06,
"loss": 1.7141,
"step": 6460
},
{
"epoch": 0.41585319803642734,
"grad_norm": 1.0,
"learning_rate": 5.564177785437297e-06,
"loss": 1.7625,
"step": 6470
},
{
"epoch": 0.41649593868254237,
"grad_norm": 1.1015625,
"learning_rate": 5.5359873733546636e-06,
"loss": 1.7273,
"step": 6480
},
{
"epoch": 0.4171386793286574,
"grad_norm": 0.87109375,
"learning_rate": 5.507841196036434e-06,
"loss": 1.8089,
"step": 6490
},
{
"epoch": 0.4177814199747724,
"grad_norm": 1.0859375,
"learning_rate": 5.4797395323885265e-06,
"loss": 1.7406,
"step": 6500
},
{
"epoch": 0.41842416062088744,
"grad_norm": 1.0234375,
"learning_rate": 5.4516826608757465e-06,
"loss": 1.6867,
"step": 6510
},
{
"epoch": 0.4190669012670025,
"grad_norm": 1.015625,
"learning_rate": 5.423670859519079e-06,
"loss": 1.6484,
"step": 6520
},
{
"epoch": 0.41970964191311755,
"grad_norm": 0.8828125,
"learning_rate": 5.3957044058928655e-06,
"loss": 1.7048,
"step": 6530
},
{
"epoch": 0.4203523825592326,
"grad_norm": 0.89453125,
"learning_rate": 5.36778357712211e-06,
"loss": 1.7044,
"step": 6540
},
{
"epoch": 0.4209951232053476,
"grad_norm": 0.99609375,
"learning_rate": 5.339908649879704e-06,
"loss": 1.7864,
"step": 6550
},
{
"epoch": 0.4216378638514626,
"grad_norm": 1.171875,
"learning_rate": 5.312079900383693e-06,
"loss": 1.7418,
"step": 6560
},
{
"epoch": 0.42228060449757765,
"grad_norm": 1.0703125,
"learning_rate": 5.284297604394535e-06,
"loss": 1.8014,
"step": 6570
},
{
"epoch": 0.42292334514369273,
"grad_norm": 1.5546875,
"learning_rate": 5.256562037212372e-06,
"loss": 1.7288,
"step": 6580
},
{
"epoch": 0.42356608578980776,
"grad_norm": 0.765625,
"learning_rate": 5.2288734736743075e-06,
"loss": 1.71,
"step": 6590
},
{
"epoch": 0.4242088264359228,
"grad_norm": 0.73828125,
"learning_rate": 5.20123218815167e-06,
"loss": 1.7094,
"step": 6600
},
{
"epoch": 0.4248515670820378,
"grad_norm": 1.0546875,
"learning_rate": 5.1736384545473026e-06,
"loss": 1.6777,
"step": 6610
},
{
"epoch": 0.42549430772815283,
"grad_norm": 0.9296875,
"learning_rate": 5.1460925462928545e-06,
"loss": 1.7222,
"step": 6620
},
{
"epoch": 0.42613704837426786,
"grad_norm": 1.1796875,
"learning_rate": 5.118594736346048e-06,
"loss": 1.7647,
"step": 6630
},
{
"epoch": 0.42677978902038294,
"grad_norm": 1.1328125,
"learning_rate": 5.091145297188003e-06,
"loss": 1.7045,
"step": 6640
},
{
"epoch": 0.42742252966649796,
"grad_norm": 1.046875,
"learning_rate": 5.063744500820521e-06,
"loss": 1.684,
"step": 6650
},
{
"epoch": 0.428065270312613,
"grad_norm": 1.1953125,
"learning_rate": 5.0363926187633925e-06,
"loss": 1.7023,
"step": 6660
},
{
"epoch": 0.428708010958728,
"grad_norm": 1.1328125,
"learning_rate": 5.009089922051707e-06,
"loss": 1.6451,
"step": 6670
},
{
"epoch": 0.42935075160484304,
"grad_norm": 0.99609375,
"learning_rate": 4.981836681233168e-06,
"loss": 1.7367,
"step": 6680
},
{
"epoch": 0.42999349225095806,
"grad_norm": 1.328125,
"learning_rate": 4.9546331663654034e-06,
"loss": 1.6693,
"step": 6690
},
{
"epoch": 0.43063623289707315,
"grad_norm": 1.3125,
"learning_rate": 4.927479647013305e-06,
"loss": 1.7379,
"step": 6700
},
{
"epoch": 0.43127897354318817,
"grad_norm": 0.83203125,
"learning_rate": 4.900376392246354e-06,
"loss": 1.669,
"step": 6710
},
{
"epoch": 0.4319217141893032,
"grad_norm": 1.0859375,
"learning_rate": 4.873323670635929e-06,
"loss": 1.7333,
"step": 6720
},
{
"epoch": 0.4325644548354182,
"grad_norm": 0.8359375,
"learning_rate": 4.846321750252696e-06,
"loss": 1.6984,
"step": 6730
},
{
"epoch": 0.43320719548153325,
"grad_norm": 1.6796875,
"learning_rate": 4.819370898663895e-06,
"loss": 1.7067,
"step": 6740
},
{
"epoch": 0.4338499361276483,
"grad_norm": 0.93359375,
"learning_rate": 4.792471382930731e-06,
"loss": 1.6956,
"step": 6750
},
{
"epoch": 0.43449267677376335,
"grad_norm": 0.8515625,
"learning_rate": 4.765623469605706e-06,
"loss": 1.7058,
"step": 6760
},
{
"epoch": 0.4351354174198784,
"grad_norm": 0.9453125,
"learning_rate": 4.738827424729989e-06,
"loss": 1.7446,
"step": 6770
},
{
"epoch": 0.4357781580659934,
"grad_norm": 1.0546875,
"learning_rate": 4.71208351383076e-06,
"loss": 1.7698,
"step": 6780
},
{
"epoch": 0.43642089871210843,
"grad_norm": 1.1875,
"learning_rate": 4.685392001918618e-06,
"loss": 1.7285,
"step": 6790
},
{
"epoch": 0.43706363935822345,
"grad_norm": 1.1171875,
"learning_rate": 4.658753153484908e-06,
"loss": 1.7491,
"step": 6800
},
{
"epoch": 0.4377063800043385,
"grad_norm": 0.8125,
"learning_rate": 4.63216723249913e-06,
"loss": 1.6926,
"step": 6810
},
{
"epoch": 0.43834912065045356,
"grad_norm": 0.82421875,
"learning_rate": 4.605634502406321e-06,
"loss": 1.7485,
"step": 6820
},
{
"epoch": 0.4389918612965686,
"grad_norm": 1.3203125,
"learning_rate": 4.579155226124437e-06,
"loss": 1.6569,
"step": 6830
},
{
"epoch": 0.4396346019426836,
"grad_norm": 0.9140625,
"learning_rate": 4.552729666041736e-06,
"loss": 1.751,
"step": 6840
},
{
"epoch": 0.44027734258879864,
"grad_norm": 1.0859375,
"learning_rate": 4.526358084014213e-06,
"loss": 1.8229,
"step": 6850
},
{
"epoch": 0.44092008323491366,
"grad_norm": 0.98828125,
"learning_rate": 4.500040741362971e-06,
"loss": 1.6954,
"step": 6860
},
{
"epoch": 0.4415628238810287,
"grad_norm": 1.0390625,
"learning_rate": 4.473777898871647e-06,
"loss": 1.6678,
"step": 6870
},
{
"epoch": 0.44220556452714377,
"grad_norm": 1.0703125,
"learning_rate": 4.447569816783829e-06,
"loss": 1.6844,
"step": 6880
},
{
"epoch": 0.4428483051732588,
"grad_norm": 1.390625,
"learning_rate": 4.4214167548004725e-06,
"loss": 1.7419,
"step": 6890
},
{
"epoch": 0.4434910458193738,
"grad_norm": 0.94140625,
"learning_rate": 4.39531897207732e-06,
"loss": 1.7206,
"step": 6900
},
{
"epoch": 0.44413378646548884,
"grad_norm": 0.81640625,
"learning_rate": 4.369276727222349e-06,
"loss": 1.7947,
"step": 6910
},
{
"epoch": 0.44477652711160387,
"grad_norm": 1.1796875,
"learning_rate": 4.343290278293202e-06,
"loss": 1.7724,
"step": 6920
},
{
"epoch": 0.4454192677577189,
"grad_norm": 0.98828125,
"learning_rate": 4.317359882794627e-06,
"loss": 1.7104,
"step": 6930
},
{
"epoch": 0.4460620084038339,
"grad_norm": 1.0390625,
"learning_rate": 4.291485797675928e-06,
"loss": 1.7179,
"step": 6940
},
{
"epoch": 0.446704749049949,
"grad_norm": 1.1328125,
"learning_rate": 4.265668279328419e-06,
"loss": 1.7458,
"step": 6950
},
{
"epoch": 0.447347489696064,
"grad_norm": 1.0,
"learning_rate": 4.239907583582877e-06,
"loss": 1.7055,
"step": 6960
},
{
"epoch": 0.44799023034217905,
"grad_norm": 0.70703125,
"learning_rate": 4.214203965707022e-06,
"loss": 1.7475,
"step": 6970
},
{
"epoch": 0.4486329709882941,
"grad_norm": 1.03125,
"learning_rate": 4.188557680402974e-06,
"loss": 1.669,
"step": 6980
},
{
"epoch": 0.4492757116344091,
"grad_norm": 1.0625,
"learning_rate": 4.162968981804733e-06,
"loss": 1.6994,
"step": 6990
},
{
"epoch": 0.44991845228052413,
"grad_norm": 1.0625,
"learning_rate": 4.137438123475662e-06,
"loss": 1.7615,
"step": 7000
},
{
"epoch": 0.4505611929266392,
"grad_norm": 1.1953125,
"learning_rate": 4.1119653584059775e-06,
"loss": 1.6852,
"step": 7010
},
{
"epoch": 0.45120393357275423,
"grad_norm": 1.3671875,
"learning_rate": 4.086550939010228e-06,
"loss": 1.7365,
"step": 7020
},
{
"epoch": 0.45184667421886926,
"grad_norm": 1.4921875,
"learning_rate": 4.061195117124812e-06,
"loss": 1.6873,
"step": 7030
},
{
"epoch": 0.4524894148649843,
"grad_norm": 1.0859375,
"learning_rate": 4.035898144005474e-06,
"loss": 1.7313,
"step": 7040
},
{
"epoch": 0.4531321555110993,
"grad_norm": 0.94921875,
"learning_rate": 4.0106602703248165e-06,
"loss": 1.7299,
"step": 7050
},
{
"epoch": 0.45377489615721434,
"grad_norm": 0.90625,
"learning_rate": 3.985481746169798e-06,
"loss": 1.703,
"step": 7060
},
{
"epoch": 0.4544176368033294,
"grad_norm": 1.078125,
"learning_rate": 3.9603628210393e-06,
"loss": 1.679,
"step": 7070
},
{
"epoch": 0.45506037744944444,
"grad_norm": 1.125,
"learning_rate": 3.935303743841595e-06,
"loss": 1.6709,
"step": 7080
},
{
"epoch": 0.45570311809555947,
"grad_norm": 1.1875,
"learning_rate": 3.910304762891933e-06,
"loss": 1.7258,
"step": 7090
},
{
"epoch": 0.4563458587416745,
"grad_norm": 0.96484375,
"learning_rate": 3.885366125910049e-06,
"loss": 1.7946,
"step": 7100
},
{
"epoch": 0.4569885993877895,
"grad_norm": 1.109375,
"learning_rate": 3.860488080017721e-06,
"loss": 1.7382,
"step": 7110
},
{
"epoch": 0.45763134003390454,
"grad_norm": 1.4453125,
"learning_rate": 3.83567087173631e-06,
"loss": 1.6783,
"step": 7120
},
{
"epoch": 0.4582740806800196,
"grad_norm": 0.94140625,
"learning_rate": 3.8109147469843435e-06,
"loss": 1.7216,
"step": 7130
},
{
"epoch": 0.45891682132613465,
"grad_norm": 1.1640625,
"learning_rate": 3.7862199510750407e-06,
"loss": 1.7224,
"step": 7140
},
{
"epoch": 0.4595595619722497,
"grad_norm": 0.99609375,
"learning_rate": 3.761586728713912e-06,
"loss": 1.6983,
"step": 7150
},
{
"epoch": 0.4602023026183647,
"grad_norm": 0.91015625,
"learning_rate": 3.7370153239963214e-06,
"loss": 1.6992,
"step": 7160
},
{
"epoch": 0.4608450432644797,
"grad_norm": 1.0703125,
"learning_rate": 3.7125059804050746e-06,
"loss": 1.7179,
"step": 7170
},
{
"epoch": 0.46148778391059475,
"grad_norm": 1.125,
"learning_rate": 3.688058940807986e-06,
"loss": 1.6712,
"step": 7180
},
{
"epoch": 0.46213052455670983,
"grad_norm": 0.99609375,
"learning_rate": 3.663674447455513e-06,
"loss": 1.6995,
"step": 7190
},
{
"epoch": 0.46277326520282486,
"grad_norm": 1.1875,
"learning_rate": 3.6393527419783047e-06,
"loss": 1.6762,
"step": 7200
},
{
"epoch": 0.4634160058489399,
"grad_norm": 1.1015625,
"learning_rate": 3.615094065384849e-06,
"loss": 1.6916,
"step": 7210
},
{
"epoch": 0.4640587464950549,
"grad_norm": 0.83984375,
"learning_rate": 3.590898658059062e-06,
"loss": 1.7022,
"step": 7220
},
{
"epoch": 0.46470148714116993,
"grad_norm": 1.140625,
"learning_rate": 3.5667667597579193e-06,
"loss": 1.672,
"step": 7230
},
{
"epoch": 0.46534422778728496,
"grad_norm": 0.9453125,
"learning_rate": 3.542698609609059e-06,
"loss": 1.6352,
"step": 7240
},
{
"epoch": 0.46598696843340004,
"grad_norm": 1.2578125,
"learning_rate": 3.5186944461084404e-06,
"loss": 1.7051,
"step": 7250
},
{
"epoch": 0.46662970907951506,
"grad_norm": 0.8515625,
"learning_rate": 3.4947545071179644e-06,
"loss": 1.6944,
"step": 7260
},
{
"epoch": 0.4672724497256301,
"grad_norm": 1.125,
"learning_rate": 3.470879029863107e-06,
"loss": 1.732,
"step": 7270
},
{
"epoch": 0.4679151903717451,
"grad_norm": 0.8203125,
"learning_rate": 3.447068250930601e-06,
"loss": 1.7558,
"step": 7280
},
{
"epoch": 0.46855793101786014,
"grad_norm": 0.875,
"learning_rate": 3.423322406266051e-06,
"loss": 1.7348,
"step": 7290
},
{
"epoch": 0.46920067166397517,
"grad_norm": 1.09375,
"learning_rate": 3.399641731171628e-06,
"loss": 1.7695,
"step": 7300
},
{
"epoch": 0.46984341231009025,
"grad_norm": 0.8984375,
"learning_rate": 3.3760264603037196e-06,
"loss": 1.7403,
"step": 7310
},
{
"epoch": 0.4704861529562053,
"grad_norm": 0.9453125,
"learning_rate": 3.3524768276706155e-06,
"loss": 1.6421,
"step": 7320
},
{
"epoch": 0.4711288936023203,
"grad_norm": 0.92578125,
"learning_rate": 3.3289930666301695e-06,
"loss": 1.6217,
"step": 7330
},
{
"epoch": 0.4717716342484353,
"grad_norm": 1.1328125,
"learning_rate": 3.3055754098875247e-06,
"loss": 1.7421,
"step": 7340
},
{
"epoch": 0.47241437489455035,
"grad_norm": 1.1171875,
"learning_rate": 3.2822240894927616e-06,
"loss": 1.734,
"step": 7350
},
{
"epoch": 0.4730571155406654,
"grad_norm": 1.1640625,
"learning_rate": 3.2589393368386315e-06,
"loss": 1.7184,
"step": 7360
},
{
"epoch": 0.47369985618678045,
"grad_norm": 1.078125,
"learning_rate": 3.2357213826582522e-06,
"loss": 1.6888,
"step": 7370
},
{
"epoch": 0.4743425968328955,
"grad_norm": 0.984375,
"learning_rate": 3.2125704570228244e-06,
"loss": 1.752,
"step": 7380
},
{
"epoch": 0.4749853374790105,
"grad_norm": 0.921875,
"learning_rate": 3.1894867893393387e-06,
"loss": 1.743,
"step": 7390
},
{
"epoch": 0.47562807812512553,
"grad_norm": 0.83984375,
"learning_rate": 3.1664706083483345e-06,
"loss": 1.6909,
"step": 7400
},
{
"epoch": 0.47627081877124056,
"grad_norm": 1.015625,
"learning_rate": 3.14352214212159e-06,
"loss": 1.7387,
"step": 7410
},
{
"epoch": 0.4769135594173556,
"grad_norm": 0.890625,
"learning_rate": 3.1206416180599e-06,
"loss": 1.6899,
"step": 7420
},
{
"epoch": 0.47755630006347066,
"grad_norm": 1.2578125,
"learning_rate": 3.0978292628908003e-06,
"loss": 1.6898,
"step": 7430
},
{
"epoch": 0.4781990407095857,
"grad_norm": 0.984375,
"learning_rate": 3.075085302666334e-06,
"loss": 1.6989,
"step": 7440
},
{
"epoch": 0.4788417813557007,
"grad_norm": 1.125,
"learning_rate": 3.0524099627607916e-06,
"loss": 1.7251,
"step": 7450
},
{
"epoch": 0.47948452200181574,
"grad_norm": 0.72265625,
"learning_rate": 3.029803467868515e-06,
"loss": 1.6895,
"step": 7460
},
{
"epoch": 0.48012726264793076,
"grad_norm": 0.99609375,
"learning_rate": 3.007266042001623e-06,
"loss": 1.6724,
"step": 7470
},
{
"epoch": 0.4807700032940458,
"grad_norm": 1.0859375,
"learning_rate": 2.984797908487832e-06,
"loss": 1.731,
"step": 7480
},
{
"epoch": 0.48141274394016087,
"grad_norm": 0.94140625,
"learning_rate": 2.9623992899682197e-06,
"loss": 1.7454,
"step": 7490
},
{
"epoch": 0.4820554845862759,
"grad_norm": 1.0625,
"learning_rate": 2.940070408395034e-06,
"loss": 1.674,
"step": 7500
},
{
"epoch": 0.4826982252323909,
"grad_norm": 1.09375,
"learning_rate": 2.917811485029468e-06,
"loss": 1.7311,
"step": 7510
},
{
"epoch": 0.48334096587850595,
"grad_norm": 1.140625,
"learning_rate": 2.895622740439514e-06,
"loss": 1.6394,
"step": 7520
},
{
"epoch": 0.48398370652462097,
"grad_norm": 1.03125,
"learning_rate": 2.8735043944977226e-06,
"loss": 1.6737,
"step": 7530
},
{
"epoch": 0.484626447170736,
"grad_norm": 1.2734375,
"learning_rate": 2.851456666379063e-06,
"loss": 1.6369,
"step": 7540
},
{
"epoch": 0.4852691878168511,
"grad_norm": 1.0,
"learning_rate": 2.829479774558739e-06,
"loss": 1.749,
"step": 7550
},
{
"epoch": 0.4859119284629661,
"grad_norm": 1.1640625,
"learning_rate": 2.807573936810023e-06,
"loss": 1.6698,
"step": 7560
},
{
"epoch": 0.48655466910908113,
"grad_norm": 1.1875,
"learning_rate": 2.7857393702020887e-06,
"loss": 1.7104,
"step": 7570
},
{
"epoch": 0.48719740975519615,
"grad_norm": 1.140625,
"learning_rate": 2.763976291097892e-06,
"loss": 1.7477,
"step": 7580
},
{
"epoch": 0.4878401504013112,
"grad_norm": 0.7734375,
"learning_rate": 2.7422849151519827e-06,
"loss": 1.6346,
"step": 7590
},
{
"epoch": 0.4884828910474262,
"grad_norm": 1.078125,
"learning_rate": 2.7206654573084047e-06,
"loss": 1.7394,
"step": 7600
},
{
"epoch": 0.4891256316935413,
"grad_norm": 1.1796875,
"learning_rate": 2.6991181317985436e-06,
"loss": 1.6725,
"step": 7610
},
{
"epoch": 0.4897683723396563,
"grad_norm": 1.1953125,
"learning_rate": 2.67764315213902e-06,
"loss": 1.6944,
"step": 7620
},
{
"epoch": 0.49041111298577134,
"grad_norm": 0.93359375,
"learning_rate": 2.6562407311295513e-06,
"loss": 1.708,
"step": 7630
},
{
"epoch": 0.49105385363188636,
"grad_norm": 1.1875,
"learning_rate": 2.6349110808508784e-06,
"loss": 1.7259,
"step": 7640
},
{
"epoch": 0.4916965942780014,
"grad_norm": 0.9296875,
"learning_rate": 2.6136544126626206e-06,
"loss": 1.7211,
"step": 7650
},
{
"epoch": 0.4923393349241164,
"grad_norm": 1.15625,
"learning_rate": 2.592470937201217e-06,
"loss": 1.7362,
"step": 7660
},
{
"epoch": 0.4929820755702315,
"grad_norm": 0.75390625,
"learning_rate": 2.5713608643778233e-06,
"loss": 1.6407,
"step": 7670
},
{
"epoch": 0.4936248162163465,
"grad_norm": 1.0390625,
"learning_rate": 2.5503244033762364e-06,
"loss": 1.7767,
"step": 7680
},
{
"epoch": 0.49426755686246154,
"grad_norm": 1.0625,
"learning_rate": 2.529361762650807e-06,
"loss": 1.6979,
"step": 7690
},
{
"epoch": 0.49491029750857657,
"grad_norm": 1.0,
"learning_rate": 2.5084731499244095e-06,
"loss": 1.7267,
"step": 7700
},
{
"epoch": 0.4955530381546916,
"grad_norm": 0.89453125,
"learning_rate": 2.4876587721863353e-06,
"loss": 1.7194,
"step": 7710
},
{
"epoch": 0.4961957788008066,
"grad_norm": 0.8125,
"learning_rate": 2.466918835690285e-06,
"loss": 1.6665,
"step": 7720
},
{
"epoch": 0.4968385194469217,
"grad_norm": 1.3359375,
"learning_rate": 2.4462535459522973e-06,
"loss": 1.7087,
"step": 7730
},
{
"epoch": 0.4974812600930367,
"grad_norm": 1.0703125,
"learning_rate": 2.4256631077487293e-06,
"loss": 1.6859,
"step": 7740
},
{
"epoch": 0.49812400073915175,
"grad_norm": 0.984375,
"learning_rate": 2.4051477251142053e-06,
"loss": 1.7492,
"step": 7750
},
{
"epoch": 0.4987667413852668,
"grad_norm": 1.15625,
"learning_rate": 2.3847076013396307e-06,
"loss": 1.691,
"step": 7760
},
{
"epoch": 0.4994094820313818,
"grad_norm": 1.2421875,
"learning_rate": 2.364342938970138e-06,
"loss": 1.6986,
"step": 7770
},
{
"epoch": 0.5000522226774968,
"grad_norm": 0.83203125,
"learning_rate": 2.344053939803106e-06,
"loss": 1.6884,
"step": 7780
},
{
"epoch": 0.5006949633236119,
"grad_norm": 1.3203125,
"learning_rate": 2.323840804886154e-06,
"loss": 1.7066,
"step": 7790
},
{
"epoch": 0.5013377039697269,
"grad_norm": 1.0234375,
"learning_rate": 2.3037037345151403e-06,
"loss": 1.7178,
"step": 7800
},
{
"epoch": 0.5019804446158419,
"grad_norm": 1.3125,
"learning_rate": 2.2836429282321915e-06,
"loss": 1.6345,
"step": 7810
},
{
"epoch": 0.502623185261957,
"grad_norm": 0.89453125,
"learning_rate": 2.263658584823717e-06,
"loss": 1.6852,
"step": 7820
},
{
"epoch": 0.5032659259080721,
"grad_norm": 0.84375,
"learning_rate": 2.2437509023184425e-06,
"loss": 1.6761,
"step": 7830
},
{
"epoch": 0.5039086665541871,
"grad_norm": 1.1171875,
"learning_rate": 2.2239200779854363e-06,
"loss": 1.768,
"step": 7840
},
{
"epoch": 0.5045514072003021,
"grad_norm": 1.109375,
"learning_rate": 2.2041663083321828e-06,
"loss": 1.7629,
"step": 7850
},
{
"epoch": 0.5051941478464171,
"grad_norm": 1.0625,
"learning_rate": 2.184489789102596e-06,
"loss": 1.6428,
"step": 7860
},
{
"epoch": 0.5058368884925322,
"grad_norm": 0.9609375,
"learning_rate": 2.164890715275115e-06,
"loss": 1.7071,
"step": 7870
},
{
"epoch": 0.5064796291386472,
"grad_norm": 1.03125,
"learning_rate": 2.1453692810607506e-06,
"loss": 1.6629,
"step": 7880
},
{
"epoch": 0.5071223697847622,
"grad_norm": 0.95703125,
"learning_rate": 2.125925679901174e-06,
"loss": 1.7425,
"step": 7890
},
{
"epoch": 0.5077651104308772,
"grad_norm": 1.0546875,
"learning_rate": 2.1065601044667804e-06,
"loss": 1.641,
"step": 7900
},
{
"epoch": 0.5084078510769923,
"grad_norm": 1.3828125,
"learning_rate": 2.0872727466548115e-06,
"loss": 1.6154,
"step": 7910
},
{
"epoch": 0.5090505917231073,
"grad_norm": 1.2109375,
"learning_rate": 2.0680637975874195e-06,
"loss": 1.7772,
"step": 7920
},
{
"epoch": 0.5096933323692223,
"grad_norm": 1.015625,
"learning_rate": 2.0489334476097975e-06,
"loss": 1.7118,
"step": 7930
},
{
"epoch": 0.5103360730153373,
"grad_norm": 0.90234375,
"learning_rate": 2.0298818862882843e-06,
"loss": 1.7284,
"step": 7940
},
{
"epoch": 0.5109788136614525,
"grad_norm": 1.015625,
"learning_rate": 2.0109093024084848e-06,
"loss": 1.7781,
"step": 7950
},
{
"epoch": 0.5116215543075675,
"grad_norm": 1.0546875,
"learning_rate": 1.992015883973397e-06,
"loss": 1.7109,
"step": 7960
},
{
"epoch": 0.5122642949536825,
"grad_norm": 1.0546875,
"learning_rate": 1.9732018182015668e-06,
"loss": 1.7513,
"step": 7970
},
{
"epoch": 0.5129070355997976,
"grad_norm": 1.1953125,
"learning_rate": 1.954467291525203e-06,
"loss": 1.7591,
"step": 7980
},
{
"epoch": 0.5135497762459126,
"grad_norm": 0.74609375,
"learning_rate": 1.9358124895883555e-06,
"loss": 1.6807,
"step": 7990
},
{
"epoch": 0.5141925168920276,
"grad_norm": 0.8515625,
"learning_rate": 1.9172375972450652e-06,
"loss": 1.7017,
"step": 8000
},
{
"epoch": 0.5148352575381426,
"grad_norm": 0.98828125,
"learning_rate": 1.8987427985575346e-06,
"loss": 1.7274,
"step": 8010
},
{
"epoch": 0.5154779981842577,
"grad_norm": 1.171875,
"learning_rate": 1.8803282767942955e-06,
"loss": 1.7167,
"step": 8020
},
{
"epoch": 0.5161207388303727,
"grad_norm": 1.1484375,
"learning_rate": 1.8619942144284076e-06,
"loss": 1.7589,
"step": 8030
},
{
"epoch": 0.5167634794764877,
"grad_norm": 1.0859375,
"learning_rate": 1.843740793135641e-06,
"loss": 1.7583,
"step": 8040
},
{
"epoch": 0.5174062201226027,
"grad_norm": 1.2109375,
"learning_rate": 1.8255681937926762e-06,
"loss": 1.7986,
"step": 8050
},
{
"epoch": 0.5180489607687178,
"grad_norm": 1.0390625,
"learning_rate": 1.807476596475316e-06,
"loss": 1.6925,
"step": 8060
},
{
"epoch": 0.5186917014148329,
"grad_norm": 1.15625,
"learning_rate": 1.7894661804566983e-06,
"loss": 1.7238,
"step": 8070
},
{
"epoch": 0.5193344420609479,
"grad_norm": 0.9453125,
"learning_rate": 1.7715371242055136e-06,
"loss": 1.6862,
"step": 8080
},
{
"epoch": 0.519977182707063,
"grad_norm": 1.015625,
"learning_rate": 1.753689605384249e-06,
"loss": 1.6657,
"step": 8090
},
{
"epoch": 0.520619923353178,
"grad_norm": 1.0234375,
"learning_rate": 1.7359238008474223e-06,
"loss": 1.7383,
"step": 8100
},
{
"epoch": 0.521262663999293,
"grad_norm": 1.3125,
"learning_rate": 1.7182398866398254e-06,
"loss": 1.6816,
"step": 8110
},
{
"epoch": 0.521905404645408,
"grad_norm": 0.9296875,
"learning_rate": 1.700638037994783e-06,
"loss": 1.7226,
"step": 8120
},
{
"epoch": 0.522548145291523,
"grad_norm": 1.2890625,
"learning_rate": 1.683118429332421e-06,
"loss": 1.7693,
"step": 8130
},
{
"epoch": 0.5231908859376381,
"grad_norm": 1.171875,
"learning_rate": 1.6656812342579242e-06,
"loss": 1.7043,
"step": 8140
},
{
"epoch": 0.5238336265837531,
"grad_norm": 0.875,
"learning_rate": 1.648326625559834e-06,
"loss": 1.7983,
"step": 8150
},
{
"epoch": 0.5244763672298681,
"grad_norm": 0.9765625,
"learning_rate": 1.6310547752083228e-06,
"loss": 1.7757,
"step": 8160
},
{
"epoch": 0.5251191078759831,
"grad_norm": 1.2421875,
"learning_rate": 1.6138658543534968e-06,
"loss": 1.6681,
"step": 8170
},
{
"epoch": 0.5257618485220982,
"grad_norm": 1.828125,
"learning_rate": 1.5967600333236965e-06,
"loss": 1.7397,
"step": 8180
},
{
"epoch": 0.5264045891682133,
"grad_norm": 1.203125,
"learning_rate": 1.579737481623813e-06,
"loss": 1.7653,
"step": 8190
},
{
"epoch": 0.5270473298143283,
"grad_norm": 0.87890625,
"learning_rate": 1.5627983679335989e-06,
"loss": 1.7328,
"step": 8200
},
{
"epoch": 0.5276900704604434,
"grad_norm": 0.89453125,
"learning_rate": 1.5459428601060101e-06,
"loss": 1.7309,
"step": 8210
},
{
"epoch": 0.5283328111065584,
"grad_norm": 1.453125,
"learning_rate": 1.5291711251655317e-06,
"loss": 1.7638,
"step": 8220
},
{
"epoch": 0.5289755517526734,
"grad_norm": 0.7890625,
"learning_rate": 1.5124833293065322e-06,
"loss": 1.6607,
"step": 8230
},
{
"epoch": 0.5296182923987884,
"grad_norm": 0.95703125,
"learning_rate": 1.4958796378916007e-06,
"loss": 1.6668,
"step": 8240
},
{
"epoch": 0.5302610330449035,
"grad_norm": 0.94921875,
"learning_rate": 1.4793602154499354e-06,
"loss": 1.7186,
"step": 8250
},
{
"epoch": 0.5309037736910185,
"grad_norm": 0.734375,
"learning_rate": 1.4629252256756821e-06,
"loss": 1.7094,
"step": 8260
},
{
"epoch": 0.5315465143371335,
"grad_norm": 1.296875,
"learning_rate": 1.4465748314263338e-06,
"loss": 1.764,
"step": 8270
},
{
"epoch": 0.5321892549832485,
"grad_norm": 0.875,
"learning_rate": 1.43030919472111e-06,
"loss": 1.6155,
"step": 8280
},
{
"epoch": 0.5328319956293636,
"grad_norm": 0.8515625,
"learning_rate": 1.4141284767393525e-06,
"loss": 1.7376,
"step": 8290
},
{
"epoch": 0.5334747362754786,
"grad_norm": 1.1953125,
"learning_rate": 1.3980328378189178e-06,
"loss": 1.7018,
"step": 8300
},
{
"epoch": 0.5341174769215937,
"grad_norm": 0.83984375,
"learning_rate": 1.3820224374546132e-06,
"loss": 1.6741,
"step": 8310
},
{
"epoch": 0.5347602175677088,
"grad_norm": 0.86328125,
"learning_rate": 1.366097434296587e-06,
"loss": 1.6559,
"step": 8320
},
{
"epoch": 0.5354029582138238,
"grad_norm": 0.8671875,
"learning_rate": 1.3502579861487763e-06,
"loss": 1.7797,
"step": 8330
},
{
"epoch": 0.5360456988599388,
"grad_norm": 1.2421875,
"learning_rate": 1.3345042499673378e-06,
"loss": 1.6661,
"step": 8340
},
{
"epoch": 0.5366884395060538,
"grad_norm": 1.203125,
"learning_rate": 1.3188363818590855e-06,
"loss": 1.7208,
"step": 8350
},
{
"epoch": 0.5373311801521689,
"grad_norm": 1.140625,
"learning_rate": 1.3032545370799587e-06,
"loss": 1.725,
"step": 8360
},
{
"epoch": 0.5379739207982839,
"grad_norm": 1.0390625,
"learning_rate": 1.2877588700334676e-06,
"loss": 1.692,
"step": 8370
},
{
"epoch": 0.5386166614443989,
"grad_norm": 1.3359375,
"learning_rate": 1.272349534269176e-06,
"loss": 1.7045,
"step": 8380
},
{
"epoch": 0.5392594020905139,
"grad_norm": 0.94140625,
"learning_rate": 1.2570266824811717e-06,
"loss": 1.7557,
"step": 8390
},
{
"epoch": 0.539902142736629,
"grad_norm": 1.015625,
"learning_rate": 1.2417904665065573e-06,
"loss": 1.7558,
"step": 8400
},
{
"epoch": 0.540544883382744,
"grad_norm": 1.1953125,
"learning_rate": 1.226641037323939e-06,
"loss": 1.7185,
"step": 8410
},
{
"epoch": 0.541187624028859,
"grad_norm": 0.953125,
"learning_rate": 1.2115785450519434e-06,
"loss": 1.7403,
"step": 8420
},
{
"epoch": 0.5418303646749741,
"grad_norm": 0.9453125,
"learning_rate": 1.1966031389477196e-06,
"loss": 1.7509,
"step": 8430
},
{
"epoch": 0.5424731053210892,
"grad_norm": 0.97265625,
"learning_rate": 1.1817149674054618e-06,
"loss": 1.7101,
"step": 8440
},
{
"epoch": 0.5431158459672042,
"grad_norm": 1.2578125,
"learning_rate": 1.166914177954942e-06,
"loss": 1.7901,
"step": 8450
},
{
"epoch": 0.5437585866133192,
"grad_norm": 1.140625,
"learning_rate": 1.1522009172600467e-06,
"loss": 1.6784,
"step": 8460
},
{
"epoch": 0.5444013272594342,
"grad_norm": 1.4375,
"learning_rate": 1.1375753311173165e-06,
"loss": 1.6697,
"step": 8470
},
{
"epoch": 0.5450440679055493,
"grad_norm": 1.0,
"learning_rate": 1.1230375644545166e-06,
"loss": 1.7349,
"step": 8480
},
{
"epoch": 0.5456868085516643,
"grad_norm": 1.328125,
"learning_rate": 1.1085877613291863e-06,
"loss": 1.7256,
"step": 8490
},
{
"epoch": 0.5463295491977793,
"grad_norm": 1.0859375,
"learning_rate": 1.094226064927223e-06,
"loss": 1.73,
"step": 8500
},
{
"epoch": 0.5469722898438943,
"grad_norm": 0.87890625,
"learning_rate": 1.079952617561447e-06,
"loss": 1.7609,
"step": 8510
},
{
"epoch": 0.5476150304900094,
"grad_norm": 1.2265625,
"learning_rate": 1.0657675606702166e-06,
"loss": 1.6854,
"step": 8520
},
{
"epoch": 0.5482577711361244,
"grad_norm": 1.3984375,
"learning_rate": 1.0516710348159987e-06,
"loss": 1.6998,
"step": 8530
},
{
"epoch": 0.5489005117822394,
"grad_norm": 1.6640625,
"learning_rate": 1.0376631796839941e-06,
"loss": 1.6465,
"step": 8540
},
{
"epoch": 0.5495432524283546,
"grad_norm": 0.94921875,
"learning_rate": 1.0237441340807496e-06,
"loss": 1.7513,
"step": 8550
},
{
"epoch": 0.5501859930744696,
"grad_norm": 0.9453125,
"learning_rate": 1.009914035932782e-06,
"loss": 1.6838,
"step": 8560
},
{
"epoch": 0.5508287337205846,
"grad_norm": 0.890625,
"learning_rate": 9.961730222852007e-07,
"loss": 1.7282,
"step": 8570
},
{
"epoch": 0.5514714743666996,
"grad_norm": 1.046875,
"learning_rate": 9.82521229300377e-07,
"loss": 1.6842,
"step": 8580
},
{
"epoch": 0.5521142150128147,
"grad_norm": 0.9765625,
"learning_rate": 9.689587922565624e-07,
"loss": 1.6717,
"step": 8590
},
{
"epoch": 0.5527569556589297,
"grad_norm": 0.79296875,
"learning_rate": 9.554858455465688e-07,
"loss": 1.706,
"step": 8600
},
{
"epoch": 0.5533996963050447,
"grad_norm": 0.81640625,
"learning_rate": 9.421025226764302e-07,
"loss": 1.7494,
"step": 8610
},
{
"epoch": 0.5540424369511597,
"grad_norm": 0.9453125,
"learning_rate": 9.288089562640845e-07,
"loss": 1.7058,
"step": 8620
},
{
"epoch": 0.5546851775972748,
"grad_norm": 0.79296875,
"learning_rate": 9.156052780380464e-07,
"loss": 1.7548,
"step": 8630
},
{
"epoch": 0.5553279182433898,
"grad_norm": 0.99609375,
"learning_rate": 9.024916188361233e-07,
"loss": 1.6629,
"step": 8640
},
{
"epoch": 0.5559706588895048,
"grad_norm": 1.09375,
"learning_rate": 8.894681086040946e-07,
"loss": 1.797,
"step": 8650
},
{
"epoch": 0.5566133995356198,
"grad_norm": 0.890625,
"learning_rate": 8.765348763944448e-07,
"loss": 1.7439,
"step": 8660
},
{
"epoch": 0.557256140181735,
"grad_norm": 1.2890625,
"learning_rate": 8.636920503650715e-07,
"loss": 1.6643,
"step": 8670
},
{
"epoch": 0.55789888082785,
"grad_norm": 1.2421875,
"learning_rate": 8.509397577780254e-07,
"loss": 1.7223,
"step": 8680
},
{
"epoch": 0.558541621473965,
"grad_norm": 1.140625,
"learning_rate": 8.38278124998232e-07,
"loss": 1.7488,
"step": 8690
},
{
"epoch": 0.55918436212008,
"grad_norm": 0.921875,
"learning_rate": 8.25707277492268e-07,
"loss": 1.7919,
"step": 8700
},
{
"epoch": 0.5598271027661951,
"grad_norm": 1.2578125,
"learning_rate": 8.132273398270873e-07,
"loss": 1.6887,
"step": 8710
},
{
"epoch": 0.5604698434123101,
"grad_norm": 1.015625,
"learning_rate": 8.008384356688059e-07,
"loss": 1.7171,
"step": 8720
},
{
"epoch": 0.5611125840584251,
"grad_norm": 1.0078125,
"learning_rate": 7.885406877814716e-07,
"loss": 1.7252,
"step": 8730
},
{
"epoch": 0.5617553247045401,
"grad_norm": 1.0703125,
"learning_rate": 7.763342180258493e-07,
"loss": 1.6797,
"step": 8740
},
{
"epoch": 0.5623980653506552,
"grad_norm": 1.0546875,
"learning_rate": 7.642191473582017e-07,
"loss": 1.7024,
"step": 8750
},
{
"epoch": 0.5630408059967702,
"grad_norm": 0.83203125,
"learning_rate": 7.521955958291149e-07,
"loss": 1.6801,
"step": 8760
},
{
"epoch": 0.5636835466428852,
"grad_norm": 1.1484375,
"learning_rate": 7.402636825822796e-07,
"loss": 1.6948,
"step": 8770
},
{
"epoch": 0.5643262872890003,
"grad_norm": 0.9921875,
"learning_rate": 7.284235258533356e-07,
"loss": 1.7275,
"step": 8780
},
{
"epoch": 0.5649690279351154,
"grad_norm": 0.91796875,
"learning_rate": 7.166752429686841e-07,
"loss": 1.709,
"step": 8790
},
{
"epoch": 0.5656117685812304,
"grad_norm": 0.81640625,
"learning_rate": 7.050189503443328e-07,
"loss": 1.813,
"step": 8800
},
{
"epoch": 0.5662545092273454,
"grad_norm": 0.91015625,
"learning_rate": 6.934547634847377e-07,
"loss": 1.6886,
"step": 8810
},
{
"epoch": 0.5668972498734605,
"grad_norm": 0.98046875,
"learning_rate": 6.819827969816661e-07,
"loss": 1.7213,
"step": 8820
},
{
"epoch": 0.5675399905195755,
"grad_norm": 1.078125,
"learning_rate": 6.706031645130506e-07,
"loss": 1.7021,
"step": 8830
},
{
"epoch": 0.5681827311656905,
"grad_norm": 0.95703125,
"learning_rate": 6.59315978841869e-07,
"loss": 1.7319,
"step": 8840
},
{
"epoch": 0.5688254718118055,
"grad_norm": 0.89453125,
"learning_rate": 6.481213518150287e-07,
"loss": 1.7041,
"step": 8850
},
{
"epoch": 0.5694682124579206,
"grad_norm": 1.0234375,
"learning_rate": 6.370193943622549e-07,
"loss": 1.7471,
"step": 8860
},
{
"epoch": 0.5701109531040356,
"grad_norm": 0.94140625,
"learning_rate": 6.260102164949855e-07,
"loss": 1.7725,
"step": 8870
},
{
"epoch": 0.5707536937501506,
"grad_norm": 0.69921875,
"learning_rate": 6.150939273053014e-07,
"loss": 1.603,
"step": 8880
},
{
"epoch": 0.5713964343962656,
"grad_norm": 0.90625,
"learning_rate": 6.042706349648153e-07,
"loss": 1.6237,
"step": 8890
},
{
"epoch": 0.5720391750423807,
"grad_norm": 1.3515625,
"learning_rate": 5.935404467236283e-07,
"loss": 1.666,
"step": 8900
},
{
"epoch": 0.5726819156884957,
"grad_norm": 1.109375,
"learning_rate": 5.829034689092517e-07,
"loss": 1.6685,
"step": 8910
},
{
"epoch": 0.5733246563346108,
"grad_norm": 0.94140625,
"learning_rate": 5.723598069255531e-07,
"loss": 1.6965,
"step": 8920
},
{
"epoch": 0.5739673969807259,
"grad_norm": 0.890625,
"learning_rate": 5.619095652517181e-07,
"loss": 1.7085,
"step": 8930
},
{
"epoch": 0.5746101376268409,
"grad_norm": 1.96875,
"learning_rate": 5.515528474412135e-07,
"loss": 1.7548,
"step": 8940
},
{
"epoch": 0.5752528782729559,
"grad_norm": 0.9140625,
"learning_rate": 5.412897561207597e-07,
"loss": 1.6175,
"step": 8950
},
{
"epoch": 0.5758956189190709,
"grad_norm": 1.5546875,
"learning_rate": 5.311203929893105e-07,
"loss": 1.732,
"step": 8960
},
{
"epoch": 0.576538359565186,
"grad_norm": 1.140625,
"learning_rate": 5.210448588170558e-07,
"loss": 1.6451,
"step": 8970
},
{
"epoch": 0.577181100211301,
"grad_norm": 1.125,
"learning_rate": 5.110632534444094e-07,
"loss": 1.7237,
"step": 8980
},
{
"epoch": 0.577823840857416,
"grad_norm": 0.95703125,
"learning_rate": 5.011756757810293e-07,
"loss": 1.6691,
"step": 8990
},
{
"epoch": 0.578466581503531,
"grad_norm": 1.203125,
"learning_rate": 4.913822238048361e-07,
"loss": 1.7374,
"step": 9000
}
],
"logging_steps": 10,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 3.406468034246738e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}