bge-m3-p2-512 / checkpoint-13750 /trainer_state.json
BKM1804's picture
Upload folder using huggingface_hub
b694484 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 13750,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0036363636363636364,
"grad_norm": 8.730692897709844,
"learning_rate": 1.0827317615529767e-06,
"loss": 0.1943,
"step": 50
},
{
"epoch": 0.007272727272727273,
"grad_norm": 6.563599993841062,
"learning_rate": 1.2745743112470354e-06,
"loss": 0.1472,
"step": 100
},
{
"epoch": 0.01090909090909091,
"grad_norm": 11.109271986600454,
"learning_rate": 1.386795008860794e-06,
"loss": 0.1713,
"step": 150
},
{
"epoch": 0.014545454545454545,
"grad_norm": 5.772674964443883,
"learning_rate": 1.4664168609410934e-06,
"loss": 0.1585,
"step": 200
},
{
"epoch": 0.01818181818181818,
"grad_norm": 14.76596653457522,
"learning_rate": 1.5281763674824356e-06,
"loss": 0.1429,
"step": 250
},
{
"epoch": 0.02181818181818182,
"grad_norm": 9.568061757019173,
"learning_rate": 1.5786375585548526e-06,
"loss": 0.1316,
"step": 300
},
{
"epoch": 0.025454545454545455,
"grad_norm": 10.301645195685637,
"learning_rate": 1.621301887696672e-06,
"loss": 0.1305,
"step": 350
},
{
"epoch": 0.02909090909090909,
"grad_norm": 5.6993526158027645,
"learning_rate": 1.6582594106351517e-06,
"loss": 0.109,
"step": 400
},
{
"epoch": 0.03272727272727273,
"grad_norm": 4.915542646471226,
"learning_rate": 1.6908582561686116e-06,
"loss": 0.1269,
"step": 450
},
{
"epoch": 0.03636363636363636,
"grad_norm": 9.392642904031657,
"learning_rate": 1.720018917176494e-06,
"loss": 0.1201,
"step": 500
},
{
"epoch": 0.04,
"grad_norm": 5.047170718950901,
"learning_rate": 1.746397943764599e-06,
"loss": 0.1304,
"step": 550
},
{
"epoch": 0.04363636363636364,
"grad_norm": 6.865869458534065,
"learning_rate": 1.7704801082489109e-06,
"loss": 0.1185,
"step": 600
},
{
"epoch": 0.04727272727272727,
"grad_norm": 7.641476199550763,
"learning_rate": 1.7926335520703266e-06,
"loss": 0.1071,
"step": 650
},
{
"epoch": 0.05090909090909091,
"grad_norm": 5.401679074654614,
"learning_rate": 1.8131444373907304e-06,
"loss": 0.1153,
"step": 700
},
{
"epoch": 0.05454545454545454,
"grad_norm": 6.576489460422398,
"learning_rate": 1.8322396147902533e-06,
"loss": 0.1104,
"step": 750
},
{
"epoch": 0.05818181818181818,
"grad_norm": 7.069491006747404,
"learning_rate": 1.8501019603292104e-06,
"loss": 0.1164,
"step": 800
},
{
"epoch": 0.06181818181818182,
"grad_norm": 5.9290634450105975,
"learning_rate": 1.866881054798162e-06,
"loss": 0.1219,
"step": 850
},
{
"epoch": 0.06545454545454546,
"grad_norm": 5.426281987614212,
"learning_rate": 1.88270080586267e-06,
"loss": 0.1215,
"step": 900
},
{
"epoch": 0.06909090909090909,
"grad_norm": 7.8695827198040105,
"learning_rate": 1.8976650066475357e-06,
"loss": 0.1016,
"step": 950
},
{
"epoch": 0.07272727272727272,
"grad_norm": 16.511242359906724,
"learning_rate": 1.9118614668705526e-06,
"loss": 0.1005,
"step": 1000
},
{
"epoch": 0.07636363636363637,
"grad_norm": 5.456737119903023,
"learning_rate": 1.9253651350044893e-06,
"loss": 0.0967,
"step": 1050
},
{
"epoch": 0.08,
"grad_norm": 3.0750937470823105,
"learning_rate": 1.9382404934586575e-06,
"loss": 0.0989,
"step": 1100
},
{
"epoch": 0.08363636363636363,
"grad_norm": 8.699529648266866,
"learning_rate": 1.9505434209019962e-06,
"loss": 0.0988,
"step": 1150
},
{
"epoch": 0.08727272727272728,
"grad_norm": 6.77391900591751,
"learning_rate": 1.9623226579429693e-06,
"loss": 0.1078,
"step": 1200
},
{
"epoch": 0.09090909090909091,
"grad_norm": 6.290303703702585,
"learning_rate": 1.973620973411895e-06,
"loss": 0.1118,
"step": 1250
},
{
"epoch": 0.09454545454545454,
"grad_norm": 4.7489952229331,
"learning_rate": 1.984476101764385e-06,
"loss": 0.1014,
"step": 1300
},
{
"epoch": 0.09818181818181818,
"grad_norm": 5.92022086118128,
"learning_rate": 1.9949215034764293e-06,
"loss": 0.0865,
"step": 1350
},
{
"epoch": 0.10181818181818182,
"grad_norm": 3.9298710865494013,
"learning_rate": 1.9961212121212122e-06,
"loss": 0.1019,
"step": 1400
},
{
"epoch": 0.10545454545454545,
"grad_norm": 1.35952985911884,
"learning_rate": 1.9880404040404038e-06,
"loss": 0.0889,
"step": 1450
},
{
"epoch": 0.10909090909090909,
"grad_norm": 6.074760388330947,
"learning_rate": 1.9799595959595958e-06,
"loss": 0.088,
"step": 1500
},
{
"epoch": 0.11272727272727273,
"grad_norm": 3.281001635643186,
"learning_rate": 1.9718787878787877e-06,
"loss": 0.0939,
"step": 1550
},
{
"epoch": 0.11636363636363636,
"grad_norm": 2.834658163409011,
"learning_rate": 1.9637979797979797e-06,
"loss": 0.0941,
"step": 1600
},
{
"epoch": 0.12,
"grad_norm": 5.716408171893896,
"learning_rate": 1.9557171717171717e-06,
"loss": 0.0869,
"step": 1650
},
{
"epoch": 0.12363636363636364,
"grad_norm": 4.355380070215489,
"learning_rate": 1.9476363636363637e-06,
"loss": 0.0879,
"step": 1700
},
{
"epoch": 0.12727272727272726,
"grad_norm": 2.56506898084542,
"learning_rate": 1.939555555555555e-06,
"loss": 0.0833,
"step": 1750
},
{
"epoch": 0.13090909090909092,
"grad_norm": 3.1998963994898744,
"learning_rate": 1.9314747474747476e-06,
"loss": 0.0827,
"step": 1800
},
{
"epoch": 0.13454545454545455,
"grad_norm": 2.993946484706046,
"learning_rate": 1.923393939393939e-06,
"loss": 0.0884,
"step": 1850
},
{
"epoch": 0.13818181818181818,
"grad_norm": 4.066355592263713,
"learning_rate": 1.915313131313131e-06,
"loss": 0.0843,
"step": 1900
},
{
"epoch": 0.14181818181818182,
"grad_norm": 3.9444301109471422,
"learning_rate": 1.9072323232323231e-06,
"loss": 0.079,
"step": 1950
},
{
"epoch": 0.14545454545454545,
"grad_norm": 4.998815586950459,
"learning_rate": 1.899151515151515e-06,
"loss": 0.0742,
"step": 2000
},
{
"epoch": 0.14909090909090908,
"grad_norm": 4.98600431497313,
"learning_rate": 1.891070707070707e-06,
"loss": 0.0883,
"step": 2050
},
{
"epoch": 0.15272727272727274,
"grad_norm": 7.068049048520739,
"learning_rate": 1.8829898989898988e-06,
"loss": 0.0888,
"step": 2100
},
{
"epoch": 0.15636363636363637,
"grad_norm": 6.445827098301751,
"learning_rate": 1.8749090909090908e-06,
"loss": 0.083,
"step": 2150
},
{
"epoch": 0.16,
"grad_norm": 4.422103821997445,
"learning_rate": 1.8668282828282826e-06,
"loss": 0.0784,
"step": 2200
},
{
"epoch": 0.16363636363636364,
"grad_norm": 10.593030877025758,
"learning_rate": 1.8589090909090909e-06,
"loss": 0.0907,
"step": 2250
},
{
"epoch": 0.16727272727272727,
"grad_norm": 5.73637966279623,
"learning_rate": 1.8508282828282828e-06,
"loss": 0.0745,
"step": 2300
},
{
"epoch": 0.1709090909090909,
"grad_norm": 4.435065232821809,
"learning_rate": 1.8427474747474748e-06,
"loss": 0.0764,
"step": 2350
},
{
"epoch": 0.17454545454545456,
"grad_norm": 6.932446538015379,
"learning_rate": 1.8346666666666666e-06,
"loss": 0.082,
"step": 2400
},
{
"epoch": 0.1781818181818182,
"grad_norm": 8.365214538194435,
"learning_rate": 1.8265858585858586e-06,
"loss": 0.0792,
"step": 2450
},
{
"epoch": 0.18181818181818182,
"grad_norm": 3.8114318602084714,
"learning_rate": 1.8185050505050503e-06,
"loss": 0.0819,
"step": 2500
},
{
"epoch": 0.18545454545454546,
"grad_norm": 8.151155598162092,
"learning_rate": 1.8104242424242425e-06,
"loss": 0.0796,
"step": 2550
},
{
"epoch": 0.1890909090909091,
"grad_norm": 3.853054291632494,
"learning_rate": 1.8023434343434343e-06,
"loss": 0.0724,
"step": 2600
},
{
"epoch": 0.19272727272727272,
"grad_norm": 2.7845888134928454,
"learning_rate": 1.7942626262626262e-06,
"loss": 0.0778,
"step": 2650
},
{
"epoch": 0.19636363636363635,
"grad_norm": 6.178204576735234,
"learning_rate": 1.786181818181818e-06,
"loss": 0.0852,
"step": 2700
},
{
"epoch": 0.2,
"grad_norm": 3.2752914823154926,
"learning_rate": 1.7781010101010102e-06,
"loss": 0.0724,
"step": 2750
},
{
"epoch": 0.20363636363636364,
"grad_norm": 4.209509329574692,
"learning_rate": 1.770020202020202e-06,
"loss": 0.0793,
"step": 2800
},
{
"epoch": 0.20727272727272728,
"grad_norm": 6.139612323065608,
"learning_rate": 1.761939393939394e-06,
"loss": 0.07,
"step": 2850
},
{
"epoch": 0.2109090909090909,
"grad_norm": 2.455643448424175,
"learning_rate": 1.7538585858585857e-06,
"loss": 0.0738,
"step": 2900
},
{
"epoch": 0.21454545454545454,
"grad_norm": 5.4333976968022695,
"learning_rate": 1.7457777777777779e-06,
"loss": 0.0783,
"step": 2950
},
{
"epoch": 0.21818181818181817,
"grad_norm": 5.0280981201819,
"learning_rate": 1.7376969696969696e-06,
"loss": 0.0615,
"step": 3000
},
{
"epoch": 0.22181818181818183,
"grad_norm": 6.437028886166945,
"learning_rate": 1.7296161616161616e-06,
"loss": 0.0766,
"step": 3050
},
{
"epoch": 0.22545454545454546,
"grad_norm": 2.5493783641885575,
"learning_rate": 1.7215353535353534e-06,
"loss": 0.0667,
"step": 3100
},
{
"epoch": 0.2290909090909091,
"grad_norm": 7.176307787653395,
"learning_rate": 1.7134545454545456e-06,
"loss": 0.0814,
"step": 3150
},
{
"epoch": 0.23272727272727273,
"grad_norm": 6.4838446342779585,
"learning_rate": 1.7053737373737373e-06,
"loss": 0.0804,
"step": 3200
},
{
"epoch": 0.23636363636363636,
"grad_norm": 6.05439389409086,
"learning_rate": 1.6972929292929293e-06,
"loss": 0.0758,
"step": 3250
},
{
"epoch": 0.24,
"grad_norm": 4.8914160530492925,
"learning_rate": 1.6893737373737374e-06,
"loss": 0.0726,
"step": 3300
},
{
"epoch": 0.24363636363636362,
"grad_norm": 5.1032181254180875,
"learning_rate": 1.6812929292929292e-06,
"loss": 0.0742,
"step": 3350
},
{
"epoch": 0.24727272727272728,
"grad_norm": 5.512044376565606,
"learning_rate": 1.6732121212121211e-06,
"loss": 0.0721,
"step": 3400
},
{
"epoch": 0.2509090909090909,
"grad_norm": 6.215671409867116,
"learning_rate": 1.6651313131313131e-06,
"loss": 0.0714,
"step": 3450
},
{
"epoch": 0.2545454545454545,
"grad_norm": 4.934931799389331,
"learning_rate": 1.657050505050505e-06,
"loss": 0.0834,
"step": 3500
},
{
"epoch": 0.2581818181818182,
"grad_norm": 5.766772809824982,
"learning_rate": 1.6489696969696969e-06,
"loss": 0.0719,
"step": 3550
},
{
"epoch": 0.26181818181818184,
"grad_norm": 2.3502736155525907,
"learning_rate": 1.6408888888888888e-06,
"loss": 0.0667,
"step": 3600
},
{
"epoch": 0.26545454545454544,
"grad_norm": 5.212095666658788,
"learning_rate": 1.6328080808080806e-06,
"loss": 0.0657,
"step": 3650
},
{
"epoch": 0.2690909090909091,
"grad_norm": 2.2102868298776994,
"learning_rate": 1.6247272727272728e-06,
"loss": 0.0746,
"step": 3700
},
{
"epoch": 0.2727272727272727,
"grad_norm": 7.107367934409905,
"learning_rate": 1.6166464646464645e-06,
"loss": 0.0686,
"step": 3750
},
{
"epoch": 0.27636363636363637,
"grad_norm": 4.9640350043256065,
"learning_rate": 1.6085656565656565e-06,
"loss": 0.0715,
"step": 3800
},
{
"epoch": 0.28,
"grad_norm": 3.8508170066035556,
"learning_rate": 1.6004848484848483e-06,
"loss": 0.0755,
"step": 3850
},
{
"epoch": 0.28363636363636363,
"grad_norm": 6.406204149035612,
"learning_rate": 1.5924040404040405e-06,
"loss": 0.0722,
"step": 3900
},
{
"epoch": 0.2872727272727273,
"grad_norm": 6.4755620057100005,
"learning_rate": 1.5843232323232322e-06,
"loss": 0.0653,
"step": 3950
},
{
"epoch": 0.2909090909090909,
"grad_norm": 4.7041545767525985,
"learning_rate": 1.5762424242424242e-06,
"loss": 0.061,
"step": 4000
},
{
"epoch": 0.29454545454545455,
"grad_norm": 6.597148868978393,
"learning_rate": 1.568161616161616e-06,
"loss": 0.0631,
"step": 4050
},
{
"epoch": 0.29818181818181816,
"grad_norm": 4.24188377459276,
"learning_rate": 1.5600808080808082e-06,
"loss": 0.0767,
"step": 4100
},
{
"epoch": 0.3018181818181818,
"grad_norm": 5.185458229586008,
"learning_rate": 1.552e-06,
"loss": 0.0768,
"step": 4150
},
{
"epoch": 0.3054545454545455,
"grad_norm": 7.282950927099741,
"learning_rate": 1.543919191919192e-06,
"loss": 0.0696,
"step": 4200
},
{
"epoch": 0.3090909090909091,
"grad_norm": 6.602554878720333,
"learning_rate": 1.5358383838383837e-06,
"loss": 0.0644,
"step": 4250
},
{
"epoch": 0.31272727272727274,
"grad_norm": 5.8760748346703755,
"learning_rate": 1.5277575757575756e-06,
"loss": 0.0629,
"step": 4300
},
{
"epoch": 0.31636363636363635,
"grad_norm": 5.87511103099038,
"learning_rate": 1.5198383838383837e-06,
"loss": 0.0699,
"step": 4350
},
{
"epoch": 0.32,
"grad_norm": 5.07793513016435,
"learning_rate": 1.5117575757575757e-06,
"loss": 0.0744,
"step": 4400
},
{
"epoch": 0.3236363636363636,
"grad_norm": 3.3381013307087595,
"learning_rate": 1.5036767676767677e-06,
"loss": 0.0671,
"step": 4450
},
{
"epoch": 0.32727272727272727,
"grad_norm": 6.087362688149139,
"learning_rate": 1.4955959595959594e-06,
"loss": 0.0687,
"step": 4500
},
{
"epoch": 0.33090909090909093,
"grad_norm": 3.459475929834813,
"learning_rate": 1.4875151515151514e-06,
"loss": 0.0685,
"step": 4550
},
{
"epoch": 0.33454545454545453,
"grad_norm": 4.891090834007153,
"learning_rate": 1.4794343434343432e-06,
"loss": 0.0725,
"step": 4600
},
{
"epoch": 0.3381818181818182,
"grad_norm": 4.691041549495357,
"learning_rate": 1.4713535353535354e-06,
"loss": 0.061,
"step": 4650
},
{
"epoch": 0.3418181818181818,
"grad_norm": 5.924562513722335,
"learning_rate": 1.4632727272727271e-06,
"loss": 0.0758,
"step": 4700
},
{
"epoch": 0.34545454545454546,
"grad_norm": 4.580382327650832,
"learning_rate": 1.4551919191919191e-06,
"loss": 0.0743,
"step": 4750
},
{
"epoch": 0.3490909090909091,
"grad_norm": 4.6681099203941026,
"learning_rate": 1.4471111111111109e-06,
"loss": 0.0656,
"step": 4800
},
{
"epoch": 0.3527272727272727,
"grad_norm": 5.030610606632313,
"learning_rate": 1.439030303030303e-06,
"loss": 0.0737,
"step": 4850
},
{
"epoch": 0.3563636363636364,
"grad_norm": 7.614614950202259,
"learning_rate": 1.4309494949494948e-06,
"loss": 0.0686,
"step": 4900
},
{
"epoch": 0.36,
"grad_norm": 5.981464365403329,
"learning_rate": 1.4228686868686868e-06,
"loss": 0.0748,
"step": 4950
},
{
"epoch": 0.36363636363636365,
"grad_norm": 4.277903564607442,
"learning_rate": 1.4147878787878786e-06,
"loss": 0.0568,
"step": 5000
},
{
"epoch": 0.36727272727272725,
"grad_norm": 17.93564630692927,
"learning_rate": 1.4067070707070707e-06,
"loss": 0.0663,
"step": 5050
},
{
"epoch": 0.3709090909090909,
"grad_norm": 2.2042619733871374,
"learning_rate": 1.3986262626262625e-06,
"loss": 0.0648,
"step": 5100
},
{
"epoch": 0.37454545454545457,
"grad_norm": 4.712133353867323,
"learning_rate": 1.3905454545454545e-06,
"loss": 0.0773,
"step": 5150
},
{
"epoch": 0.3781818181818182,
"grad_norm": 5.573958863346438,
"learning_rate": 1.3824646464646462e-06,
"loss": 0.0679,
"step": 5200
},
{
"epoch": 0.38181818181818183,
"grad_norm": 4.848377798083522,
"learning_rate": 1.3743838383838384e-06,
"loss": 0.0671,
"step": 5250
},
{
"epoch": 0.38545454545454544,
"grad_norm": 5.980348193388087,
"learning_rate": 1.3663030303030302e-06,
"loss": 0.0694,
"step": 5300
},
{
"epoch": 0.3890909090909091,
"grad_norm": 2.672380009268972,
"learning_rate": 1.3582222222222222e-06,
"loss": 0.0676,
"step": 5350
},
{
"epoch": 0.3927272727272727,
"grad_norm": 2.964040453903747,
"learning_rate": 1.350141414141414e-06,
"loss": 0.0643,
"step": 5400
},
{
"epoch": 0.39636363636363636,
"grad_norm": 7.038083972721559,
"learning_rate": 1.342060606060606e-06,
"loss": 0.0632,
"step": 5450
},
{
"epoch": 0.4,
"grad_norm": 3.8224124558617496,
"learning_rate": 1.3339797979797979e-06,
"loss": 0.0731,
"step": 5500
},
{
"epoch": 0.4036363636363636,
"grad_norm": 7.048096138652444,
"learning_rate": 1.3258989898989899e-06,
"loss": 0.0594,
"step": 5550
},
{
"epoch": 0.4072727272727273,
"grad_norm": 5.745560964316516,
"learning_rate": 1.3178181818181816e-06,
"loss": 0.0532,
"step": 5600
},
{
"epoch": 0.4109090909090909,
"grad_norm": 4.188291560272238,
"learning_rate": 1.3097373737373736e-06,
"loss": 0.0633,
"step": 5650
},
{
"epoch": 0.41454545454545455,
"grad_norm": 7.49562784549071,
"learning_rate": 1.3018181818181817e-06,
"loss": 0.0681,
"step": 5700
},
{
"epoch": 0.41818181818181815,
"grad_norm": 3.5275023161774404,
"learning_rate": 1.2937373737373737e-06,
"loss": 0.0664,
"step": 5750
},
{
"epoch": 0.4218181818181818,
"grad_norm": 4.119155442422377,
"learning_rate": 1.2856565656565656e-06,
"loss": 0.0617,
"step": 5800
},
{
"epoch": 0.4254545454545455,
"grad_norm": 3.065330909676055,
"learning_rate": 1.2775757575757576e-06,
"loss": 0.055,
"step": 5850
},
{
"epoch": 0.4290909090909091,
"grad_norm": 5.499516032340102,
"learning_rate": 1.2694949494949494e-06,
"loss": 0.0666,
"step": 5900
},
{
"epoch": 0.43272727272727274,
"grad_norm": 7.319764009851145,
"learning_rate": 1.2614141414141414e-06,
"loss": 0.0679,
"step": 5950
},
{
"epoch": 0.43636363636363634,
"grad_norm": 3.842328064446604,
"learning_rate": 1.2533333333333333e-06,
"loss": 0.0627,
"step": 6000
},
{
"epoch": 0.44,
"grad_norm": 1.6429715042707909,
"learning_rate": 1.2452525252525253e-06,
"loss": 0.0566,
"step": 6050
},
{
"epoch": 0.44363636363636366,
"grad_norm": 3.3628676599920797,
"learning_rate": 1.237171717171717e-06,
"loss": 0.0678,
"step": 6100
},
{
"epoch": 0.44727272727272727,
"grad_norm": 2.2255401941396613,
"learning_rate": 1.229090909090909e-06,
"loss": 0.0536,
"step": 6150
},
{
"epoch": 0.4509090909090909,
"grad_norm": 4.667731753921009,
"learning_rate": 1.221010101010101e-06,
"loss": 0.0704,
"step": 6200
},
{
"epoch": 0.45454545454545453,
"grad_norm": 4.3551424870793465,
"learning_rate": 1.212929292929293e-06,
"loss": 0.0637,
"step": 6250
},
{
"epoch": 0.4581818181818182,
"grad_norm": 3.3583233273895865,
"learning_rate": 1.2048484848484848e-06,
"loss": 0.0627,
"step": 6300
},
{
"epoch": 0.4618181818181818,
"grad_norm": 4.2019753851553165,
"learning_rate": 1.1967676767676767e-06,
"loss": 0.0549,
"step": 6350
},
{
"epoch": 0.46545454545454545,
"grad_norm": 2.5872795563931024,
"learning_rate": 1.1886868686868687e-06,
"loss": 0.0544,
"step": 6400
},
{
"epoch": 0.4690909090909091,
"grad_norm": 7.041225020724765,
"learning_rate": 1.1806060606060607e-06,
"loss": 0.06,
"step": 6450
},
{
"epoch": 0.4727272727272727,
"grad_norm": 5.958896992660691,
"learning_rate": 1.1725252525252524e-06,
"loss": 0.0564,
"step": 6500
},
{
"epoch": 0.4763636363636364,
"grad_norm": 4.476780431745298,
"learning_rate": 1.1644444444444444e-06,
"loss": 0.053,
"step": 6550
},
{
"epoch": 0.48,
"grad_norm": 1.356776109954863,
"learning_rate": 1.1565252525252525e-06,
"loss": 0.0601,
"step": 6600
},
{
"epoch": 0.48363636363636364,
"grad_norm": 8.192732907739552,
"learning_rate": 1.1484444444444443e-06,
"loss": 0.0569,
"step": 6650
},
{
"epoch": 0.48727272727272725,
"grad_norm": 4.683769267327389,
"learning_rate": 1.1403636363636365e-06,
"loss": 0.0573,
"step": 6700
},
{
"epoch": 0.4909090909090909,
"grad_norm": 2.2927166931841096,
"learning_rate": 1.1322828282828282e-06,
"loss": 0.0555,
"step": 6750
},
{
"epoch": 0.49454545454545457,
"grad_norm": 5.921072631266765,
"learning_rate": 1.1242020202020202e-06,
"loss": 0.0751,
"step": 6800
},
{
"epoch": 0.49818181818181817,
"grad_norm": 6.639001409977233,
"learning_rate": 1.116121212121212e-06,
"loss": 0.0585,
"step": 6850
},
{
"epoch": 0.5018181818181818,
"grad_norm": 4.5171318312031525,
"learning_rate": 1.108040404040404e-06,
"loss": 0.0598,
"step": 6900
},
{
"epoch": 0.5054545454545455,
"grad_norm": 3.889837713901535,
"learning_rate": 1.099959595959596e-06,
"loss": 0.0557,
"step": 6950
},
{
"epoch": 0.509090909090909,
"grad_norm": 4.7778306090713905,
"learning_rate": 1.091878787878788e-06,
"loss": 0.069,
"step": 7000
},
{
"epoch": 0.5127272727272727,
"grad_norm": 7.318064341364163,
"learning_rate": 1.0837979797979797e-06,
"loss": 0.0589,
"step": 7050
},
{
"epoch": 0.5163636363636364,
"grad_norm": 0.8134545807541753,
"learning_rate": 1.0757171717171716e-06,
"loss": 0.0568,
"step": 7100
},
{
"epoch": 0.52,
"grad_norm": 8.47016607740995,
"learning_rate": 1.0676363636363636e-06,
"loss": 0.065,
"step": 7150
},
{
"epoch": 0.5236363636363637,
"grad_norm": 4.216730157024882,
"learning_rate": 1.0595555555555556e-06,
"loss": 0.0551,
"step": 7200
},
{
"epoch": 0.5272727272727272,
"grad_norm": 8.409317988298476,
"learning_rate": 1.0514747474747473e-06,
"loss": 0.0591,
"step": 7250
},
{
"epoch": 0.5309090909090909,
"grad_norm": 3.8221711861975938,
"learning_rate": 1.0433939393939393e-06,
"loss": 0.0652,
"step": 7300
},
{
"epoch": 0.5345454545454545,
"grad_norm": 5.941432574111638,
"learning_rate": 1.0353131313131313e-06,
"loss": 0.0555,
"step": 7350
},
{
"epoch": 0.5381818181818182,
"grad_norm": 3.7955500798877027,
"learning_rate": 1.0272323232323233e-06,
"loss": 0.0639,
"step": 7400
},
{
"epoch": 0.5418181818181819,
"grad_norm": 5.077036391729846,
"learning_rate": 1.019151515151515e-06,
"loss": 0.0574,
"step": 7450
},
{
"epoch": 0.5454545454545454,
"grad_norm": 2.967208140674572,
"learning_rate": 1.011070707070707e-06,
"loss": 0.055,
"step": 7500
},
{
"epoch": 0.5490909090909091,
"grad_norm": 6.4064840134469065,
"learning_rate": 1.002989898989899e-06,
"loss": 0.0563,
"step": 7550
},
{
"epoch": 0.5527272727272727,
"grad_norm": 8.224100322274762,
"learning_rate": 9.949090909090908e-07,
"loss": 0.0542,
"step": 7600
},
{
"epoch": 0.5563636363636364,
"grad_norm": 7.195740329809225,
"learning_rate": 9.868282828282827e-07,
"loss": 0.0518,
"step": 7650
},
{
"epoch": 0.56,
"grad_norm": 1.4648844395390421,
"learning_rate": 9.787474747474747e-07,
"loss": 0.0513,
"step": 7700
},
{
"epoch": 0.5636363636363636,
"grad_norm": 1.2171654793506612,
"learning_rate": 9.706666666666667e-07,
"loss": 0.0611,
"step": 7750
},
{
"epoch": 0.5672727272727273,
"grad_norm": 3.5164849479935687,
"learning_rate": 9.625858585858584e-07,
"loss": 0.0637,
"step": 7800
},
{
"epoch": 0.5709090909090909,
"grad_norm": 7.553161151311784,
"learning_rate": 9.545050505050504e-07,
"loss": 0.0566,
"step": 7850
},
{
"epoch": 0.5745454545454546,
"grad_norm": 5.745398629139221,
"learning_rate": 9.464242424242424e-07,
"loss": 0.065,
"step": 7900
},
{
"epoch": 0.5781818181818181,
"grad_norm": 3.814619257014882,
"learning_rate": 9.383434343434344e-07,
"loss": 0.0598,
"step": 7950
},
{
"epoch": 0.5818181818181818,
"grad_norm": 4.131828897750148,
"learning_rate": 9.302626262626262e-07,
"loss": 0.0712,
"step": 8000
},
{
"epoch": 0.5854545454545454,
"grad_norm": 3.4914242723281075,
"learning_rate": 9.221818181818182e-07,
"loss": 0.0608,
"step": 8050
},
{
"epoch": 0.5890909090909091,
"grad_norm": 5.282146490628778,
"learning_rate": 9.141010101010101e-07,
"loss": 0.0555,
"step": 8100
},
{
"epoch": 0.5927272727272728,
"grad_norm": 6.151329591511757,
"learning_rate": 9.060202020202021e-07,
"loss": 0.0597,
"step": 8150
},
{
"epoch": 0.5963636363636363,
"grad_norm": 0.5491620334300515,
"learning_rate": 8.979393939393939e-07,
"loss": 0.0525,
"step": 8200
},
{
"epoch": 0.6,
"grad_norm": 6.024173360125982,
"learning_rate": 8.898585858585859e-07,
"loss": 0.0622,
"step": 8250
},
{
"epoch": 0.6036363636363636,
"grad_norm": 7.030861805690174,
"learning_rate": 8.817777777777778e-07,
"loss": 0.0556,
"step": 8300
},
{
"epoch": 0.6072727272727273,
"grad_norm": 5.380176092336188,
"learning_rate": 8.736969696969697e-07,
"loss": 0.0542,
"step": 8350
},
{
"epoch": 0.610909090909091,
"grad_norm": 5.417266910258118,
"learning_rate": 8.656161616161616e-07,
"loss": 0.0492,
"step": 8400
},
{
"epoch": 0.6145454545454545,
"grad_norm": 4.935151132976808,
"learning_rate": 8.575353535353535e-07,
"loss": 0.0492,
"step": 8450
},
{
"epoch": 0.6181818181818182,
"grad_norm": 4.659946484892871,
"learning_rate": 8.494545454545455e-07,
"loss": 0.0571,
"step": 8500
},
{
"epoch": 0.6218181818181818,
"grad_norm": 6.771681262010034,
"learning_rate": 8.413737373737373e-07,
"loss": 0.0599,
"step": 8550
},
{
"epoch": 0.6254545454545455,
"grad_norm": 6.08702835668505,
"learning_rate": 8.332929292929293e-07,
"loss": 0.0685,
"step": 8600
},
{
"epoch": 0.6290909090909091,
"grad_norm": 6.534364094809468,
"learning_rate": 8.252121212121212e-07,
"loss": 0.0546,
"step": 8650
},
{
"epoch": 0.6327272727272727,
"grad_norm": 3.834673453780823,
"learning_rate": 8.171313131313131e-07,
"loss": 0.0597,
"step": 8700
},
{
"epoch": 0.6363636363636364,
"grad_norm": 6.698669517846524,
"learning_rate": 8.09050505050505e-07,
"loss": 0.056,
"step": 8750
},
{
"epoch": 0.64,
"grad_norm": 5.4043455793793775,
"learning_rate": 8.00969696969697e-07,
"loss": 0.0613,
"step": 8800
},
{
"epoch": 0.6436363636363637,
"grad_norm": 1.660910042565409,
"learning_rate": 7.928888888888889e-07,
"loss": 0.0598,
"step": 8850
},
{
"epoch": 0.6472727272727272,
"grad_norm": 7.795287679730981,
"learning_rate": 7.848080808080808e-07,
"loss": 0.0507,
"step": 8900
},
{
"epoch": 0.6509090909090909,
"grad_norm": 3.293884222491616,
"learning_rate": 7.767272727272727e-07,
"loss": 0.0505,
"step": 8950
},
{
"epoch": 0.6545454545454545,
"grad_norm": 4.230324246940526,
"learning_rate": 7.686464646464647e-07,
"loss": 0.048,
"step": 9000
},
{
"epoch": 0.6581818181818182,
"grad_norm": 3.140145943767063,
"learning_rate": 7.605656565656565e-07,
"loss": 0.0641,
"step": 9050
},
{
"epoch": 0.6618181818181819,
"grad_norm": 4.863206340412036,
"learning_rate": 7.526464646464646e-07,
"loss": 0.0598,
"step": 9100
},
{
"epoch": 0.6654545454545454,
"grad_norm": 3.458397202112644,
"learning_rate": 7.445656565656565e-07,
"loss": 0.0483,
"step": 9150
},
{
"epoch": 0.6690909090909091,
"grad_norm": 4.78959754830013,
"learning_rate": 7.364848484848485e-07,
"loss": 0.0536,
"step": 9200
},
{
"epoch": 0.6727272727272727,
"grad_norm": 10.740436647088291,
"learning_rate": 7.284040404040404e-07,
"loss": 0.0572,
"step": 9250
},
{
"epoch": 0.6763636363636364,
"grad_norm": 1.6489558557462445,
"learning_rate": 7.204848484848484e-07,
"loss": 0.0604,
"step": 9300
},
{
"epoch": 0.68,
"grad_norm": 7.3316901995966335,
"learning_rate": 7.124040404040403e-07,
"loss": 0.0559,
"step": 9350
},
{
"epoch": 0.6836363636363636,
"grad_norm": 1.5208003767788665,
"learning_rate": 7.043232323232323e-07,
"loss": 0.0523,
"step": 9400
},
{
"epoch": 0.6872727272727273,
"grad_norm": 3.7519776533636673,
"learning_rate": 6.962424242424242e-07,
"loss": 0.0482,
"step": 9450
},
{
"epoch": 0.6909090909090909,
"grad_norm": 1.6222324278842861,
"learning_rate": 6.881616161616161e-07,
"loss": 0.0572,
"step": 9500
},
{
"epoch": 0.6945454545454546,
"grad_norm": 7.0411497148292925,
"learning_rate": 6.80080808080808e-07,
"loss": 0.0571,
"step": 9550
},
{
"epoch": 0.6981818181818182,
"grad_norm": 2.6841788063694616,
"learning_rate": 6.72e-07,
"loss": 0.0564,
"step": 9600
},
{
"epoch": 0.7018181818181818,
"grad_norm": 6.819000782187993,
"learning_rate": 6.639191919191919e-07,
"loss": 0.0605,
"step": 9650
},
{
"epoch": 0.7054545454545454,
"grad_norm": 7.92210806178369,
"learning_rate": 6.558383838383838e-07,
"loss": 0.0548,
"step": 9700
},
{
"epoch": 0.7090909090909091,
"grad_norm": 4.811455749568285,
"learning_rate": 6.477575757575757e-07,
"loss": 0.0528,
"step": 9750
},
{
"epoch": 0.7127272727272728,
"grad_norm": 5.308321038455987,
"learning_rate": 6.396767676767676e-07,
"loss": 0.0527,
"step": 9800
},
{
"epoch": 0.7163636363636363,
"grad_norm": 5.069220520833826,
"learning_rate": 6.315959595959595e-07,
"loss": 0.0566,
"step": 9850
},
{
"epoch": 0.72,
"grad_norm": 5.151662369356777,
"learning_rate": 6.235151515151514e-07,
"loss": 0.0567,
"step": 9900
},
{
"epoch": 0.7236363636363636,
"grad_norm": 6.063613002898996,
"learning_rate": 6.154343434343434e-07,
"loss": 0.0587,
"step": 9950
},
{
"epoch": 0.7272727272727273,
"grad_norm": 1.0628386687029727,
"learning_rate": 6.073535353535353e-07,
"loss": 0.0539,
"step": 10000
},
{
"epoch": 0.730909090909091,
"grad_norm": 4.641899576056589,
"learning_rate": 5.992727272727272e-07,
"loss": 0.0516,
"step": 10050
},
{
"epoch": 0.7345454545454545,
"grad_norm": 4.650551400255244,
"learning_rate": 5.911919191919191e-07,
"loss": 0.0471,
"step": 10100
},
{
"epoch": 0.7381818181818182,
"grad_norm": 4.567296054840706,
"learning_rate": 5.831111111111111e-07,
"loss": 0.0479,
"step": 10150
},
{
"epoch": 0.7418181818181818,
"grad_norm": 5.044836996902614,
"learning_rate": 5.750303030303029e-07,
"loss": 0.0561,
"step": 10200
},
{
"epoch": 0.7454545454545455,
"grad_norm": 4.248950379851986,
"learning_rate": 5.669494949494949e-07,
"loss": 0.0601,
"step": 10250
},
{
"epoch": 0.7490909090909091,
"grad_norm": 2.028141754034167,
"learning_rate": 5.588686868686868e-07,
"loss": 0.0422,
"step": 10300
},
{
"epoch": 0.7527272727272727,
"grad_norm": 10.007581888321717,
"learning_rate": 5.507878787878788e-07,
"loss": 0.0469,
"step": 10350
},
{
"epoch": 0.7563636363636363,
"grad_norm": 4.577989595773856,
"learning_rate": 5.427070707070706e-07,
"loss": 0.05,
"step": 10400
},
{
"epoch": 0.76,
"grad_norm": 6.521133984672062,
"learning_rate": 5.346262626262626e-07,
"loss": 0.0428,
"step": 10450
},
{
"epoch": 0.7636363636363637,
"grad_norm": 3.9509494509580705,
"learning_rate": 5.265454545454545e-07,
"loss": 0.0522,
"step": 10500
},
{
"epoch": 0.7672727272727272,
"grad_norm": 6.944726977217027,
"learning_rate": 5.184646464646465e-07,
"loss": 0.0515,
"step": 10550
},
{
"epoch": 0.7709090909090909,
"grad_norm": 3.831893622894902,
"learning_rate": 5.103838383838383e-07,
"loss": 0.0541,
"step": 10600
},
{
"epoch": 0.7745454545454545,
"grad_norm": 4.8538480771451695,
"learning_rate": 5.023030303030302e-07,
"loss": 0.06,
"step": 10650
},
{
"epoch": 0.7781818181818182,
"grad_norm": 9.741124343678399,
"learning_rate": 4.942222222222222e-07,
"loss": 0.0522,
"step": 10700
},
{
"epoch": 0.7818181818181819,
"grad_norm": 1.637375802142195,
"learning_rate": 4.861414141414141e-07,
"loss": 0.0479,
"step": 10750
},
{
"epoch": 0.7854545454545454,
"grad_norm": 2.3084795257142936,
"learning_rate": 4.78060606060606e-07,
"loss": 0.0626,
"step": 10800
},
{
"epoch": 0.7890909090909091,
"grad_norm": 6.674598298862028,
"learning_rate": 4.69979797979798e-07,
"loss": 0.0476,
"step": 10850
},
{
"epoch": 0.7927272727272727,
"grad_norm": 5.520833007044753,
"learning_rate": 4.618989898989899e-07,
"loss": 0.0473,
"step": 10900
},
{
"epoch": 0.7963636363636364,
"grad_norm": 7.535471892284138,
"learning_rate": 4.5381818181818183e-07,
"loss": 0.0541,
"step": 10950
},
{
"epoch": 0.8,
"grad_norm": 2.119845815305954,
"learning_rate": 4.457373737373737e-07,
"loss": 0.0469,
"step": 11000
},
{
"epoch": 0.8036363636363636,
"grad_norm": 4.865422744810339,
"learning_rate": 4.376565656565656e-07,
"loss": 0.0542,
"step": 11050
},
{
"epoch": 0.8072727272727273,
"grad_norm": 4.381485410017174,
"learning_rate": 4.2957575757575754e-07,
"loss": 0.0595,
"step": 11100
},
{
"epoch": 0.8109090909090909,
"grad_norm": 4.763162545803381,
"learning_rate": 4.2149494949494947e-07,
"loss": 0.0542,
"step": 11150
},
{
"epoch": 0.8145454545454546,
"grad_norm": 3.986784442089026,
"learning_rate": 4.134141414141414e-07,
"loss": 0.0533,
"step": 11200
},
{
"epoch": 0.8181818181818182,
"grad_norm": 1.9368964454962925,
"learning_rate": 4.053333333333333e-07,
"loss": 0.051,
"step": 11250
},
{
"epoch": 0.8218181818181818,
"grad_norm": 5.557279558773371,
"learning_rate": 3.9725252525252523e-07,
"loss": 0.0507,
"step": 11300
},
{
"epoch": 0.8254545454545454,
"grad_norm": 5.301180819559454,
"learning_rate": 3.8917171717171715e-07,
"loss": 0.0478,
"step": 11350
},
{
"epoch": 0.8290909090909091,
"grad_norm": 6.393245840471723,
"learning_rate": 3.810909090909091e-07,
"loss": 0.0541,
"step": 11400
},
{
"epoch": 0.8327272727272728,
"grad_norm": 5.140132428255607,
"learning_rate": 3.731717171717171e-07,
"loss": 0.0574,
"step": 11450
},
{
"epoch": 0.8363636363636363,
"grad_norm": 4.020573635641951,
"learning_rate": 3.6509090909090904e-07,
"loss": 0.0488,
"step": 11500
},
{
"epoch": 0.84,
"grad_norm": 8.669170555909107,
"learning_rate": 3.5701010101010096e-07,
"loss": 0.0459,
"step": 11550
},
{
"epoch": 0.8436363636363636,
"grad_norm": 2.7227046748798425,
"learning_rate": 3.489292929292929e-07,
"loss": 0.0427,
"step": 11600
},
{
"epoch": 0.8472727272727273,
"grad_norm": 8.264092633240582,
"learning_rate": 3.408484848484848e-07,
"loss": 0.0443,
"step": 11650
},
{
"epoch": 0.850909090909091,
"grad_norm": 2.2161705174100543,
"learning_rate": 3.3276767676767673e-07,
"loss": 0.05,
"step": 11700
},
{
"epoch": 0.8545454545454545,
"grad_norm": 4.049181895573167,
"learning_rate": 3.2468686868686865e-07,
"loss": 0.045,
"step": 11750
},
{
"epoch": 0.8581818181818182,
"grad_norm": 6.448219337405561,
"learning_rate": 3.1660606060606057e-07,
"loss": 0.0445,
"step": 11800
},
{
"epoch": 0.8618181818181818,
"grad_norm": 1.6198743531280058,
"learning_rate": 3.0852525252525255e-07,
"loss": 0.0436,
"step": 11850
},
{
"epoch": 0.8654545454545455,
"grad_norm": 5.887817361300306,
"learning_rate": 3.0044444444444447e-07,
"loss": 0.058,
"step": 11900
},
{
"epoch": 0.8690909090909091,
"grad_norm": 5.420521679930149,
"learning_rate": 2.923636363636364e-07,
"loss": 0.0578,
"step": 11950
},
{
"epoch": 0.8727272727272727,
"grad_norm": 6.9211836011036105,
"learning_rate": 2.842828282828283e-07,
"loss": 0.0468,
"step": 12000
},
{
"epoch": 0.8763636363636363,
"grad_norm": 6.962499989042162,
"learning_rate": 2.7620202020202024e-07,
"loss": 0.0565,
"step": 12050
},
{
"epoch": 0.88,
"grad_norm": 2.251073369325601,
"learning_rate": 2.681212121212121e-07,
"loss": 0.051,
"step": 12100
},
{
"epoch": 0.8836363636363637,
"grad_norm": 6.051100878218682,
"learning_rate": 2.6004040404040403e-07,
"loss": 0.0494,
"step": 12150
},
{
"epoch": 0.8872727272727273,
"grad_norm": 5.559109629686928,
"learning_rate": 2.5195959595959595e-07,
"loss": 0.0508,
"step": 12200
},
{
"epoch": 0.8909090909090909,
"grad_norm": 2.7004728150520614,
"learning_rate": 2.4387878787878787e-07,
"loss": 0.0514,
"step": 12250
},
{
"epoch": 0.8945454545454545,
"grad_norm": 7.949939262110135,
"learning_rate": 2.3579797979797977e-07,
"loss": 0.0573,
"step": 12300
},
{
"epoch": 0.8981818181818182,
"grad_norm": 6.02491008010814,
"learning_rate": 2.277171717171717e-07,
"loss": 0.0526,
"step": 12350
},
{
"epoch": 0.9018181818181819,
"grad_norm": 2.893965651713638,
"learning_rate": 2.196363636363636e-07,
"loss": 0.0553,
"step": 12400
},
{
"epoch": 0.9054545454545454,
"grad_norm": 0.7646769278371894,
"learning_rate": 2.1155555555555554e-07,
"loss": 0.0485,
"step": 12450
},
{
"epoch": 0.9090909090909091,
"grad_norm": 2.4601955688086847,
"learning_rate": 2.0347474747474746e-07,
"loss": 0.0437,
"step": 12500
},
{
"epoch": 0.9127272727272727,
"grad_norm": 6.845607279107924,
"learning_rate": 1.9539393939393938e-07,
"loss": 0.0456,
"step": 12550
},
{
"epoch": 0.9163636363636364,
"grad_norm": 3.267607748623562,
"learning_rate": 1.873131313131313e-07,
"loss": 0.0451,
"step": 12600
},
{
"epoch": 0.92,
"grad_norm": 2.1934900287360666,
"learning_rate": 1.7923232323232322e-07,
"loss": 0.0461,
"step": 12650
},
{
"epoch": 0.9236363636363636,
"grad_norm": 2.090160880581652,
"learning_rate": 1.7115151515151512e-07,
"loss": 0.0505,
"step": 12700
},
{
"epoch": 0.9272727272727272,
"grad_norm": 4.966097333564902,
"learning_rate": 1.6307070707070704e-07,
"loss": 0.0537,
"step": 12750
},
{
"epoch": 0.9309090909090909,
"grad_norm": 2.1214979148493076,
"learning_rate": 1.5498989898989896e-07,
"loss": 0.0461,
"step": 12800
},
{
"epoch": 0.9345454545454546,
"grad_norm": 4.331745712735202,
"learning_rate": 1.4690909090909089e-07,
"loss": 0.0499,
"step": 12850
},
{
"epoch": 0.9381818181818182,
"grad_norm": 8.581243563077976,
"learning_rate": 1.3882828282828284e-07,
"loss": 0.0493,
"step": 12900
},
{
"epoch": 0.9418181818181818,
"grad_norm": 4.054442881590284,
"learning_rate": 1.3074747474747476e-07,
"loss": 0.0556,
"step": 12950
},
{
"epoch": 0.9454545454545454,
"grad_norm": 5.0357651442227285,
"learning_rate": 1.2266666666666665e-07,
"loss": 0.0523,
"step": 13000
},
{
"epoch": 0.9490909090909091,
"grad_norm": 4.173721876206746,
"learning_rate": 1.1458585858585858e-07,
"loss": 0.042,
"step": 13050
},
{
"epoch": 0.9527272727272728,
"grad_norm": 5.514831485888669,
"learning_rate": 1.065050505050505e-07,
"loss": 0.0492,
"step": 13100
},
{
"epoch": 0.9563636363636364,
"grad_norm": 5.89925297032685,
"learning_rate": 9.842424242424242e-08,
"loss": 0.0493,
"step": 13150
},
{
"epoch": 0.96,
"grad_norm": 6.457997696881727,
"learning_rate": 9.034343434343433e-08,
"loss": 0.0516,
"step": 13200
},
{
"epoch": 0.9636363636363636,
"grad_norm": 10.243171836638412,
"learning_rate": 8.226262626262625e-08,
"loss": 0.0529,
"step": 13250
},
{
"epoch": 0.9672727272727273,
"grad_norm": 2.4837463351233695,
"learning_rate": 7.418181818181817e-08,
"loss": 0.0418,
"step": 13300
},
{
"epoch": 0.9709090909090909,
"grad_norm": 7.115315330310798,
"learning_rate": 6.61010101010101e-08,
"loss": 0.0537,
"step": 13350
},
{
"epoch": 0.9745454545454545,
"grad_norm": 7.137360943243018,
"learning_rate": 5.818181818181818e-08,
"loss": 0.0446,
"step": 13400
},
{
"epoch": 0.9781818181818182,
"grad_norm": 5.279850881667171,
"learning_rate": 5.0101010101010105e-08,
"loss": 0.0504,
"step": 13450
},
{
"epoch": 0.9818181818181818,
"grad_norm": 4.781435096498879,
"learning_rate": 4.202020202020202e-08,
"loss": 0.0519,
"step": 13500
},
{
"epoch": 0.9854545454545455,
"grad_norm": 4.141078938486959,
"learning_rate": 3.393939393939394e-08,
"loss": 0.057,
"step": 13550
},
{
"epoch": 0.9890909090909091,
"grad_norm": 3.983226535807257,
"learning_rate": 2.5858585858585858e-08,
"loss": 0.0588,
"step": 13600
},
{
"epoch": 0.9927272727272727,
"grad_norm": 8.448308055260652,
"learning_rate": 1.7777777777777777e-08,
"loss": 0.0578,
"step": 13650
},
{
"epoch": 0.9963636363636363,
"grad_norm": 2.0151546432182283,
"learning_rate": 9.696969696969696e-09,
"loss": 0.0503,
"step": 13700
},
{
"epoch": 1.0,
"grad_norm": 2.6109640799264664,
"learning_rate": 1.6161616161616161e-09,
"loss": 0.0573,
"step": 13750
}
],
"logging_steps": 50,
"max_steps": 13750,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 3000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}