|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9998222222222222, |
|
"eval_steps": 5000, |
|
"global_step": 2812, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0035555555555555557, |
|
"grad_norm": 0.4803234934806824, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5376, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0071111111111111115, |
|
"grad_norm": 0.4609735310077667, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3932, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010666666666666666, |
|
"grad_norm": 0.4800471365451813, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6084, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.014222222222222223, |
|
"grad_norm": 0.513812780380249, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.5579, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 0.5741678476333618, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5945, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.021333333333333333, |
|
"grad_norm": 0.42774268984794617, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.6154, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.024888888888888887, |
|
"grad_norm": 0.37852978706359863, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.5581, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.028444444444444446, |
|
"grad_norm": 0.48577743768692017, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.57, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 0.5572775602340698, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.6154, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 0.4657267928123474, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5583, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03911111111111111, |
|
"grad_norm": 0.48570355772972107, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.6356, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.042666666666666665, |
|
"grad_norm": 0.5374159812927246, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.5911, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04622222222222222, |
|
"grad_norm": 0.4544757306575775, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.5462, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.049777777777777775, |
|
"grad_norm": 0.49096816778182983, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.4696, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 0.45160388946533203, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4397, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05688888888888889, |
|
"grad_norm": 0.4654102325439453, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.61, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.060444444444444446, |
|
"grad_norm": 0.5608298182487488, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.5799, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 0.5087274312973022, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.6534, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06755555555555555, |
|
"grad_norm": 0.5886797308921814, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.5561, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 0.5276679396629333, |
|
"learning_rate": 4e-05, |
|
"loss": 1.5705, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07466666666666667, |
|
"grad_norm": 0.4331255257129669, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.554, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07822222222222222, |
|
"grad_norm": 0.5068897604942322, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.451, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08177777777777778, |
|
"grad_norm": 0.48896610736846924, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.6522, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08533333333333333, |
|
"grad_norm": 0.5350568890571594, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.5571, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 0.6370730400085449, |
|
"learning_rate": 5e-05, |
|
"loss": 1.6112, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09244444444444444, |
|
"grad_norm": 0.5409080386161804, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.5878, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 0.5505415797233582, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.6938, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09955555555555555, |
|
"grad_norm": 0.557578980922699, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.6776, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.10311111111111111, |
|
"grad_norm": 0.5505691170692444, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.7429, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 0.5328088998794556, |
|
"learning_rate": 6e-05, |
|
"loss": 1.6633, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11022222222222222, |
|
"grad_norm": 0.4911518394947052, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.4746, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11377777777777778, |
|
"grad_norm": 0.5215829610824585, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.4723, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11733333333333333, |
|
"grad_norm": 0.5551868081092834, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.5322, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12088888888888889, |
|
"grad_norm": 0.607334554195404, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.7652, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 0.4849174916744232, |
|
"learning_rate": 7e-05, |
|
"loss": 1.5915, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 0.5409530997276306, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.6263, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13155555555555556, |
|
"grad_norm": 0.5955730080604553, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.598, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1351111111111111, |
|
"grad_norm": 0.5792020559310913, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.7498, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13866666666666666, |
|
"grad_norm": 0.6078203916549683, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.5885, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 0.470548540353775, |
|
"learning_rate": 8e-05, |
|
"loss": 1.4596, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14577777777777778, |
|
"grad_norm": 0.600089967250824, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.6343, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14933333333333335, |
|
"grad_norm": 0.5364957451820374, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.4748, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15288888888888888, |
|
"grad_norm": 0.5835921168327332, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.6809, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.15644444444444444, |
|
"grad_norm": 0.5760766863822937, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.6276, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5958523154258728, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4696, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16355555555555557, |
|
"grad_norm": 0.5406598448753357, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.625, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1671111111111111, |
|
"grad_norm": 0.5989697575569153, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.6854, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17066666666666666, |
|
"grad_norm": 0.594978392124176, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.5774, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.17422222222222222, |
|
"grad_norm": 0.5095472931861877, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.7095, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 0.7304088473320007, |
|
"learning_rate": 0.0001, |
|
"loss": 1.6842, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18133333333333335, |
|
"grad_norm": 0.6202882528305054, |
|
"learning_rate": 9.95674740484429e-05, |
|
"loss": 1.696, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.18488888888888888, |
|
"grad_norm": 0.49087047576904297, |
|
"learning_rate": 9.913494809688582e-05, |
|
"loss": 1.6507, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.18844444444444444, |
|
"grad_norm": 0.6279426217079163, |
|
"learning_rate": 9.870242214532872e-05, |
|
"loss": 1.6809, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 0.602878987789154, |
|
"learning_rate": 9.826989619377162e-05, |
|
"loss": 1.6173, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 0.6200081706047058, |
|
"learning_rate": 9.783737024221454e-05, |
|
"loss": 1.6787, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.1991111111111111, |
|
"grad_norm": 0.6582353115081787, |
|
"learning_rate": 9.740484429065745e-05, |
|
"loss": 1.7095, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.20266666666666666, |
|
"grad_norm": 0.541404664516449, |
|
"learning_rate": 9.697231833910035e-05, |
|
"loss": 1.8418, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.20622222222222222, |
|
"grad_norm": 0.5303178429603577, |
|
"learning_rate": 9.653979238754325e-05, |
|
"loss": 1.6112, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.20977777777777779, |
|
"grad_norm": 0.527113676071167, |
|
"learning_rate": 9.610726643598617e-05, |
|
"loss": 1.6454, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 0.5398814678192139, |
|
"learning_rate": 9.567474048442907e-05, |
|
"loss": 1.5353, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.21688888888888888, |
|
"grad_norm": 0.5357266068458557, |
|
"learning_rate": 9.524221453287197e-05, |
|
"loss": 1.6816, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22044444444444444, |
|
"grad_norm": 0.4769757390022278, |
|
"learning_rate": 9.480968858131488e-05, |
|
"loss": 1.6625, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 0.5274131298065186, |
|
"learning_rate": 9.437716262975779e-05, |
|
"loss": 1.7129, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.22755555555555557, |
|
"grad_norm": 0.6454447507858276, |
|
"learning_rate": 9.394463667820069e-05, |
|
"loss": 1.5577, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 0.5967287421226501, |
|
"learning_rate": 9.35121107266436e-05, |
|
"loss": 1.3905, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.23466666666666666, |
|
"grad_norm": 0.57136470079422, |
|
"learning_rate": 9.307958477508652e-05, |
|
"loss": 1.6029, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.23822222222222222, |
|
"grad_norm": 0.5300968885421753, |
|
"learning_rate": 9.264705882352942e-05, |
|
"loss": 1.7363, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24177777777777779, |
|
"grad_norm": 0.44372138381004333, |
|
"learning_rate": 9.221453287197233e-05, |
|
"loss": 1.6467, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.24533333333333332, |
|
"grad_norm": 0.45939210057258606, |
|
"learning_rate": 9.178200692041523e-05, |
|
"loss": 1.5413, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 0.5719866752624512, |
|
"learning_rate": 9.134948096885813e-05, |
|
"loss": 1.5702, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25244444444444447, |
|
"grad_norm": 0.4535101652145386, |
|
"learning_rate": 9.091695501730105e-05, |
|
"loss": 1.5331, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.5633770227432251, |
|
"learning_rate": 9.048442906574395e-05, |
|
"loss": 1.696, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.25955555555555554, |
|
"grad_norm": 0.6633341908454895, |
|
"learning_rate": 9.005190311418685e-05, |
|
"loss": 1.5428, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.26311111111111113, |
|
"grad_norm": 0.6005223393440247, |
|
"learning_rate": 8.961937716262977e-05, |
|
"loss": 1.8278, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.5239768624305725, |
|
"learning_rate": 8.918685121107267e-05, |
|
"loss": 1.6761, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2702222222222222, |
|
"grad_norm": 0.4788583517074585, |
|
"learning_rate": 8.875432525951558e-05, |
|
"loss": 1.548, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.2737777777777778, |
|
"grad_norm": 0.5428124666213989, |
|
"learning_rate": 8.832179930795848e-05, |
|
"loss": 1.6972, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2773333333333333, |
|
"grad_norm": 0.42001649737358093, |
|
"learning_rate": 8.78892733564014e-05, |
|
"loss": 1.6244, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2808888888888889, |
|
"grad_norm": 0.5100952386856079, |
|
"learning_rate": 8.74567474048443e-05, |
|
"loss": 1.6704, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 0.49174782633781433, |
|
"learning_rate": 8.70242214532872e-05, |
|
"loss": 1.6876, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 0.543880820274353, |
|
"learning_rate": 8.659169550173011e-05, |
|
"loss": 1.599, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29155555555555557, |
|
"grad_norm": 0.4966542422771454, |
|
"learning_rate": 8.615916955017301e-05, |
|
"loss": 1.4723, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.2951111111111111, |
|
"grad_norm": 0.44916948676109314, |
|
"learning_rate": 8.572664359861592e-05, |
|
"loss": 1.452, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2986666666666667, |
|
"grad_norm": 0.5791043639183044, |
|
"learning_rate": 8.529411764705883e-05, |
|
"loss": 1.7569, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 0.5880780816078186, |
|
"learning_rate": 8.486159169550173e-05, |
|
"loss": 1.4779, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.30577777777777776, |
|
"grad_norm": 0.5578981041908264, |
|
"learning_rate": 8.442906574394463e-05, |
|
"loss": 1.6577, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.30933333333333335, |
|
"grad_norm": 0.6132334470748901, |
|
"learning_rate": 8.399653979238755e-05, |
|
"loss": 1.5862, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3128888888888889, |
|
"grad_norm": 0.512484073638916, |
|
"learning_rate": 8.356401384083046e-05, |
|
"loss": 1.4891, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3164444444444444, |
|
"grad_norm": 0.4793022871017456, |
|
"learning_rate": 8.313148788927336e-05, |
|
"loss": 1.601, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5512364506721497, |
|
"learning_rate": 8.269896193771626e-05, |
|
"loss": 1.6191, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.32355555555555554, |
|
"grad_norm": 0.5528599619865417, |
|
"learning_rate": 8.226643598615918e-05, |
|
"loss": 1.6555, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.32711111111111113, |
|
"grad_norm": 0.4718135595321655, |
|
"learning_rate": 8.183391003460208e-05, |
|
"loss": 1.3796, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33066666666666666, |
|
"grad_norm": 0.5744655728340149, |
|
"learning_rate": 8.140138408304498e-05, |
|
"loss": 1.7323, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3342222222222222, |
|
"grad_norm": 0.5298731923103333, |
|
"learning_rate": 8.09688581314879e-05, |
|
"loss": 1.6613, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 0.507189154624939, |
|
"learning_rate": 8.05363321799308e-05, |
|
"loss": 1.4932, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3413333333333333, |
|
"grad_norm": 0.5233726501464844, |
|
"learning_rate": 8.01038062283737e-05, |
|
"loss": 1.5989, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3448888888888889, |
|
"grad_norm": 0.5754904747009277, |
|
"learning_rate": 7.967128027681662e-05, |
|
"loss": 1.6657, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.34844444444444445, |
|
"grad_norm": 0.6196070909500122, |
|
"learning_rate": 7.923875432525953e-05, |
|
"loss": 1.6808, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 0.461910605430603, |
|
"learning_rate": 7.880622837370243e-05, |
|
"loss": 1.6012, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 0.4632907807826996, |
|
"learning_rate": 7.837370242214533e-05, |
|
"loss": 1.5879, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3591111111111111, |
|
"grad_norm": 0.48907166719436646, |
|
"learning_rate": 7.794117647058824e-05, |
|
"loss": 1.6109, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.3626666666666667, |
|
"grad_norm": 0.5417441129684448, |
|
"learning_rate": 7.750865051903114e-05, |
|
"loss": 1.5687, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.3662222222222222, |
|
"grad_norm": 0.519092857837677, |
|
"learning_rate": 7.707612456747404e-05, |
|
"loss": 1.6197, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.36977777777777776, |
|
"grad_norm": 0.45026785135269165, |
|
"learning_rate": 7.664359861591696e-05, |
|
"loss": 1.5194, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 0.5378026366233826, |
|
"learning_rate": 7.621107266435986e-05, |
|
"loss": 1.419, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3768888888888889, |
|
"grad_norm": 0.4889166057109833, |
|
"learning_rate": 7.577854671280276e-05, |
|
"loss": 1.6356, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.3804444444444444, |
|
"grad_norm": 0.5200234055519104, |
|
"learning_rate": 7.534602076124569e-05, |
|
"loss": 1.7118, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.4398309886455536, |
|
"learning_rate": 7.491349480968859e-05, |
|
"loss": 1.525, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.38755555555555554, |
|
"grad_norm": 0.43217843770980835, |
|
"learning_rate": 7.448096885813149e-05, |
|
"loss": 1.5643, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 0.5189200043678284, |
|
"learning_rate": 7.40484429065744e-05, |
|
"loss": 1.6891, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.39466666666666667, |
|
"grad_norm": 0.5237637758255005, |
|
"learning_rate": 7.36159169550173e-05, |
|
"loss": 1.4445, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.3982222222222222, |
|
"grad_norm": 0.5614046454429626, |
|
"learning_rate": 7.318339100346021e-05, |
|
"loss": 1.6439, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.4017777777777778, |
|
"grad_norm": 0.4191811680793762, |
|
"learning_rate": 7.275086505190312e-05, |
|
"loss": 1.4344, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.4053333333333333, |
|
"grad_norm": 0.5043474435806274, |
|
"learning_rate": 7.231833910034602e-05, |
|
"loss": 1.7017, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 0.40949752926826477, |
|
"learning_rate": 7.188581314878892e-05, |
|
"loss": 1.5903, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.41244444444444445, |
|
"grad_norm": 0.4599238634109497, |
|
"learning_rate": 7.145328719723184e-05, |
|
"loss": 1.5666, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 0.5736513137817383, |
|
"learning_rate": 7.102076124567474e-05, |
|
"loss": 1.5525, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.41955555555555557, |
|
"grad_norm": 0.5437818169593811, |
|
"learning_rate": 7.058823529411765e-05, |
|
"loss": 1.6668, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.4231111111111111, |
|
"grad_norm": 0.4318547248840332, |
|
"learning_rate": 7.015570934256056e-05, |
|
"loss": 1.4075, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 0.44979867339134216, |
|
"learning_rate": 6.972318339100347e-05, |
|
"loss": 1.3973, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43022222222222223, |
|
"grad_norm": 0.4653863310813904, |
|
"learning_rate": 6.929065743944637e-05, |
|
"loss": 1.4578, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.43377777777777776, |
|
"grad_norm": 0.5257763266563416, |
|
"learning_rate": 6.885813148788927e-05, |
|
"loss": 1.5667, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.43733333333333335, |
|
"grad_norm": 0.5117728114128113, |
|
"learning_rate": 6.842560553633219e-05, |
|
"loss": 1.6843, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4408888888888889, |
|
"grad_norm": 0.4611428678035736, |
|
"learning_rate": 6.799307958477509e-05, |
|
"loss": 1.7192, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.586694061756134, |
|
"learning_rate": 6.756055363321799e-05, |
|
"loss": 1.6057, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.43433499336242676, |
|
"learning_rate": 6.71280276816609e-05, |
|
"loss": 1.6829, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.45155555555555554, |
|
"grad_norm": 0.5851064920425415, |
|
"learning_rate": 6.66955017301038e-05, |
|
"loss": 1.6039, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.45511111111111113, |
|
"grad_norm": 0.5549812912940979, |
|
"learning_rate": 6.626297577854672e-05, |
|
"loss": 1.6076, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.45866666666666667, |
|
"grad_norm": 0.4489051103591919, |
|
"learning_rate": 6.583044982698962e-05, |
|
"loss": 1.6347, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 0.5636203289031982, |
|
"learning_rate": 6.539792387543253e-05, |
|
"loss": 1.3819, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.4657777777777778, |
|
"grad_norm": 0.5598219037055969, |
|
"learning_rate": 6.496539792387544e-05, |
|
"loss": 1.6914, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.4693333333333333, |
|
"grad_norm": 0.44435766339302063, |
|
"learning_rate": 6.453287197231834e-05, |
|
"loss": 1.5487, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.4728888888888889, |
|
"grad_norm": 0.4664863646030426, |
|
"learning_rate": 6.410034602076125e-05, |
|
"loss": 1.5156, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.47644444444444445, |
|
"grad_norm": 0.5192257761955261, |
|
"learning_rate": 6.366782006920415e-05, |
|
"loss": 1.5507, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6014835238456726, |
|
"learning_rate": 6.323529411764705e-05, |
|
"loss": 1.4809, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.48355555555555557, |
|
"grad_norm": 0.5586997270584106, |
|
"learning_rate": 6.280276816608997e-05, |
|
"loss": 1.5619, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.4871111111111111, |
|
"grad_norm": 0.4059390723705292, |
|
"learning_rate": 6.237024221453287e-05, |
|
"loss": 1.4799, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49066666666666664, |
|
"grad_norm": 0.41877785325050354, |
|
"learning_rate": 6.193771626297578e-05, |
|
"loss": 1.4187, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.49422222222222223, |
|
"grad_norm": 0.4456830620765686, |
|
"learning_rate": 6.15051903114187e-05, |
|
"loss": 1.5507, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 0.45245644450187683, |
|
"learning_rate": 6.10726643598616e-05, |
|
"loss": 1.5478, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5013333333333333, |
|
"grad_norm": 0.4327283501625061, |
|
"learning_rate": 6.06401384083045e-05, |
|
"loss": 1.5734, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5048888888888889, |
|
"grad_norm": 0.6047600507736206, |
|
"learning_rate": 6.020761245674741e-05, |
|
"loss": 1.6916, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.5084444444444445, |
|
"grad_norm": 0.4880015254020691, |
|
"learning_rate": 5.9775086505190316e-05, |
|
"loss": 1.6346, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.4689754843711853, |
|
"learning_rate": 5.934256055363322e-05, |
|
"loss": 1.5714, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 0.4588066041469574, |
|
"learning_rate": 5.8910034602076125e-05, |
|
"loss": 1.5547, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.5191111111111111, |
|
"grad_norm": 0.45055222511291504, |
|
"learning_rate": 5.847750865051903e-05, |
|
"loss": 1.4591, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5226666666666666, |
|
"grad_norm": 0.49660903215408325, |
|
"learning_rate": 5.8044982698961933e-05, |
|
"loss": 1.7409, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.5262222222222223, |
|
"grad_norm": 0.48975858092308044, |
|
"learning_rate": 5.761245674740484e-05, |
|
"loss": 1.5158, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5297777777777778, |
|
"grad_norm": 0.4958665668964386, |
|
"learning_rate": 5.7179930795847756e-05, |
|
"loss": 1.4612, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.4389738142490387, |
|
"learning_rate": 5.6747404844290664e-05, |
|
"loss": 1.6067, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.5368888888888889, |
|
"grad_norm": 0.44060131907463074, |
|
"learning_rate": 5.631487889273357e-05, |
|
"loss": 1.3391, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.5404444444444444, |
|
"grad_norm": 0.5496039390563965, |
|
"learning_rate": 5.588235294117647e-05, |
|
"loss": 1.5108, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 0.5372530221939087, |
|
"learning_rate": 5.544982698961938e-05, |
|
"loss": 1.4777, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.5475555555555556, |
|
"grad_norm": 0.5017176270484924, |
|
"learning_rate": 5.501730103806229e-05, |
|
"loss": 1.5007, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 0.5397398471832275, |
|
"learning_rate": 5.458477508650519e-05, |
|
"loss": 1.6057, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.5546666666666666, |
|
"grad_norm": 0.5731176733970642, |
|
"learning_rate": 5.41522491349481e-05, |
|
"loss": 1.6644, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.5582222222222222, |
|
"grad_norm": 0.48925384879112244, |
|
"learning_rate": 5.3719723183391005e-05, |
|
"loss": 1.5506, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.5617777777777778, |
|
"grad_norm": 0.47176703810691833, |
|
"learning_rate": 5.3287197231833906e-05, |
|
"loss": 1.5355, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5653333333333334, |
|
"grad_norm": 0.45709192752838135, |
|
"learning_rate": 5.285467128027683e-05, |
|
"loss": 1.4926, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.5231002569198608, |
|
"learning_rate": 5.242214532871973e-05, |
|
"loss": 1.5645, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5724444444444444, |
|
"grad_norm": 0.4811376929283142, |
|
"learning_rate": 5.1989619377162636e-05, |
|
"loss": 1.5169, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.5673519968986511, |
|
"learning_rate": 5.1557093425605544e-05, |
|
"loss": 1.4974, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.5795555555555556, |
|
"grad_norm": 0.5168330669403076, |
|
"learning_rate": 5.1124567474048445e-05, |
|
"loss": 1.4242, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.5831111111111111, |
|
"grad_norm": 0.49934643507003784, |
|
"learning_rate": 5.069204152249135e-05, |
|
"loss": 1.6342, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 0.44559621810913086, |
|
"learning_rate": 5.0259515570934254e-05, |
|
"loss": 1.5204, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.5902222222222222, |
|
"grad_norm": 0.5243204832077026, |
|
"learning_rate": 4.982698961937716e-05, |
|
"loss": 1.4703, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.5937777777777777, |
|
"grad_norm": 0.5117699503898621, |
|
"learning_rate": 4.9394463667820076e-05, |
|
"loss": 1.7011, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.5973333333333334, |
|
"grad_norm": 0.42527756094932556, |
|
"learning_rate": 4.896193771626298e-05, |
|
"loss": 1.4331, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6008888888888889, |
|
"grad_norm": 0.5092193484306335, |
|
"learning_rate": 4.8529411764705885e-05, |
|
"loss": 1.4378, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.5100553631782532, |
|
"learning_rate": 4.809688581314879e-05, |
|
"loss": 1.5999, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 0.5817019939422607, |
|
"learning_rate": 4.7664359861591694e-05, |
|
"loss": 1.6094, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.6115555555555555, |
|
"grad_norm": 0.41723236441612244, |
|
"learning_rate": 4.723183391003461e-05, |
|
"loss": 1.3555, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.6151111111111112, |
|
"grad_norm": 0.5209750533103943, |
|
"learning_rate": 4.679930795847751e-05, |
|
"loss": 1.56, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.6186666666666667, |
|
"grad_norm": 0.48648136854171753, |
|
"learning_rate": 4.636678200692042e-05, |
|
"loss": 1.48, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.5282444357872009, |
|
"learning_rate": 4.5934256055363325e-05, |
|
"loss": 1.5003, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.6257777777777778, |
|
"grad_norm": 0.4915744364261627, |
|
"learning_rate": 4.5501730103806226e-05, |
|
"loss": 1.4575, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.6293333333333333, |
|
"grad_norm": 0.568629801273346, |
|
"learning_rate": 4.506920415224914e-05, |
|
"loss": 1.4804, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.6328888888888888, |
|
"grad_norm": 0.48679119348526, |
|
"learning_rate": 4.463667820069204e-05, |
|
"loss": 1.5015, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.6364444444444445, |
|
"grad_norm": 0.5196161866188049, |
|
"learning_rate": 4.420415224913495e-05, |
|
"loss": 1.561, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.4946150779724121, |
|
"learning_rate": 4.377162629757786e-05, |
|
"loss": 1.5151, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.6435555555555555, |
|
"grad_norm": 0.40968433022499084, |
|
"learning_rate": 4.333910034602076e-05, |
|
"loss": 1.5373, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.6471111111111111, |
|
"grad_norm": 0.6429114937782288, |
|
"learning_rate": 4.290657439446367e-05, |
|
"loss": 1.4867, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.6506666666666666, |
|
"grad_norm": 0.474758118391037, |
|
"learning_rate": 4.247404844290658e-05, |
|
"loss": 1.5035, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.6542222222222223, |
|
"grad_norm": 0.51218581199646, |
|
"learning_rate": 4.204152249134948e-05, |
|
"loss": 1.5725, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.49607232213020325, |
|
"learning_rate": 4.160899653979239e-05, |
|
"loss": 1.5352, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.6613333333333333, |
|
"grad_norm": 0.4859633147716522, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 1.4925, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.6648888888888889, |
|
"grad_norm": 0.4592611491680145, |
|
"learning_rate": 4.0743944636678205e-05, |
|
"loss": 1.5156, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.6684444444444444, |
|
"grad_norm": 0.47916117310523987, |
|
"learning_rate": 4.031141868512111e-05, |
|
"loss": 1.545, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 0.5305891633033752, |
|
"learning_rate": 3.9878892733564014e-05, |
|
"loss": 1.5384, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.4583602547645569, |
|
"learning_rate": 3.944636678200692e-05, |
|
"loss": 1.5635, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6791111111111111, |
|
"grad_norm": 0.5024811625480652, |
|
"learning_rate": 3.901384083044983e-05, |
|
"loss": 1.5016, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6826666666666666, |
|
"grad_norm": 0.5373083353042603, |
|
"learning_rate": 3.858131487889274e-05, |
|
"loss": 1.5399, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6862222222222222, |
|
"grad_norm": 0.415873646736145, |
|
"learning_rate": 3.8148788927335645e-05, |
|
"loss": 1.6384, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.6897777777777778, |
|
"grad_norm": 0.46044766902923584, |
|
"learning_rate": 3.7716262975778546e-05, |
|
"loss": 1.5588, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.37672215700149536, |
|
"learning_rate": 3.7283737024221454e-05, |
|
"loss": 1.49, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.6968888888888889, |
|
"grad_norm": 0.40345415472984314, |
|
"learning_rate": 3.685121107266436e-05, |
|
"loss": 1.3789, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.7004444444444444, |
|
"grad_norm": 0.49566739797592163, |
|
"learning_rate": 3.641868512110726e-05, |
|
"loss": 1.3318, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.48587340116500854, |
|
"learning_rate": 3.598615916955018e-05, |
|
"loss": 1.4758, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.7075555555555556, |
|
"grad_norm": 0.596446692943573, |
|
"learning_rate": 3.5553633217993085e-05, |
|
"loss": 1.5347, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.4951972961425781, |
|
"learning_rate": 3.5121107266435986e-05, |
|
"loss": 1.2704, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.7146666666666667, |
|
"grad_norm": 0.4018455743789673, |
|
"learning_rate": 3.4688581314878894e-05, |
|
"loss": 1.5217, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.7182222222222222, |
|
"grad_norm": 0.5961456894874573, |
|
"learning_rate": 3.4256055363321795e-05, |
|
"loss": 1.7622, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.7217777777777777, |
|
"grad_norm": 0.4609779119491577, |
|
"learning_rate": 3.382352941176471e-05, |
|
"loss": 1.5252, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.7253333333333334, |
|
"grad_norm": 0.43142765760421753, |
|
"learning_rate": 3.339100346020762e-05, |
|
"loss": 1.4461, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 0.4517713785171509, |
|
"learning_rate": 3.295847750865052e-05, |
|
"loss": 1.5962, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.7324444444444445, |
|
"grad_norm": 0.48978036642074585, |
|
"learning_rate": 3.2525951557093426e-05, |
|
"loss": 1.5366, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 0.40700387954711914, |
|
"learning_rate": 3.2093425605536334e-05, |
|
"loss": 1.5572, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.7395555555555555, |
|
"grad_norm": 0.5392187237739563, |
|
"learning_rate": 3.166089965397924e-05, |
|
"loss": 1.5552, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.7431111111111111, |
|
"grad_norm": 0.44562795758247375, |
|
"learning_rate": 3.122837370242215e-05, |
|
"loss": 1.5194, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.44013121724128723, |
|
"learning_rate": 3.079584775086505e-05, |
|
"loss": 1.3915, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.7502222222222222, |
|
"grad_norm": 0.4336005747318268, |
|
"learning_rate": 3.036332179930796e-05, |
|
"loss": 1.4715, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.7537777777777778, |
|
"grad_norm": 0.5743895769119263, |
|
"learning_rate": 2.9930795847750863e-05, |
|
"loss": 1.5256, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.7573333333333333, |
|
"grad_norm": 0.45934122800827026, |
|
"learning_rate": 2.9498269896193774e-05, |
|
"loss": 1.5873, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.7608888888888888, |
|
"grad_norm": 0.5065307021141052, |
|
"learning_rate": 2.9065743944636682e-05, |
|
"loss": 1.5582, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.511725664138794, |
|
"learning_rate": 2.8633217993079586e-05, |
|
"loss": 1.4813, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.5018854737281799, |
|
"learning_rate": 2.820069204152249e-05, |
|
"loss": 1.5786, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.7715555555555556, |
|
"grad_norm": 0.5292341113090515, |
|
"learning_rate": 2.77681660899654e-05, |
|
"loss": 1.6471, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.7751111111111111, |
|
"grad_norm": 0.3638977110385895, |
|
"learning_rate": 2.733564013840831e-05, |
|
"loss": 1.4863, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.7786666666666666, |
|
"grad_norm": 0.49934521317481995, |
|
"learning_rate": 2.6903114186851214e-05, |
|
"loss": 1.4587, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.5519063472747803, |
|
"learning_rate": 2.647058823529412e-05, |
|
"loss": 1.4915, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.7857777777777778, |
|
"grad_norm": 0.5289978384971619, |
|
"learning_rate": 2.6038062283737023e-05, |
|
"loss": 1.5924, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7893333333333333, |
|
"grad_norm": 0.4768216907978058, |
|
"learning_rate": 2.560553633217993e-05, |
|
"loss": 1.4847, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7928888888888889, |
|
"grad_norm": 0.5400416254997253, |
|
"learning_rate": 2.5173010380622842e-05, |
|
"loss": 1.5664, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7964444444444444, |
|
"grad_norm": 0.5170167684555054, |
|
"learning_rate": 2.4740484429065743e-05, |
|
"loss": 1.7191, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.4661620259284973, |
|
"learning_rate": 2.430795847750865e-05, |
|
"loss": 1.584, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.8035555555555556, |
|
"grad_norm": 0.48751363158226013, |
|
"learning_rate": 2.387543252595156e-05, |
|
"loss": 1.6166, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.8071111111111111, |
|
"grad_norm": 0.48189136385917664, |
|
"learning_rate": 2.3442906574394467e-05, |
|
"loss": 1.412, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.8106666666666666, |
|
"grad_norm": 0.4377134144306183, |
|
"learning_rate": 2.301038062283737e-05, |
|
"loss": 1.4633, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.8142222222222222, |
|
"grad_norm": 0.4900270998477936, |
|
"learning_rate": 2.2577854671280275e-05, |
|
"loss": 1.5968, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.5673394203186035, |
|
"learning_rate": 2.2145328719723187e-05, |
|
"loss": 1.4443, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.8213333333333334, |
|
"grad_norm": 0.5573922395706177, |
|
"learning_rate": 2.171280276816609e-05, |
|
"loss": 1.5951, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.8248888888888889, |
|
"grad_norm": 0.5815632343292236, |
|
"learning_rate": 2.1280276816609e-05, |
|
"loss": 1.54, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.8284444444444444, |
|
"grad_norm": 0.5276745557785034, |
|
"learning_rate": 2.0847750865051903e-05, |
|
"loss": 1.5903, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.4580680727958679, |
|
"learning_rate": 2.041522491349481e-05, |
|
"loss": 1.6143, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.5015590190887451, |
|
"learning_rate": 1.998269896193772e-05, |
|
"loss": 1.4156, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.8391111111111111, |
|
"grad_norm": 0.508334219455719, |
|
"learning_rate": 1.9550173010380623e-05, |
|
"loss": 1.5272, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.8426666666666667, |
|
"grad_norm": 0.44540607929229736, |
|
"learning_rate": 1.9117647058823528e-05, |
|
"loss": 1.5522, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.8462222222222222, |
|
"grad_norm": 0.5358651876449585, |
|
"learning_rate": 1.868512110726644e-05, |
|
"loss": 1.4527, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.8497777777777777, |
|
"grad_norm": 0.5191397666931152, |
|
"learning_rate": 1.8252595155709343e-05, |
|
"loss": 1.5627, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.4698735773563385, |
|
"learning_rate": 1.782006920415225e-05, |
|
"loss": 1.4434, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.8568888888888889, |
|
"grad_norm": 0.4450030028820038, |
|
"learning_rate": 1.7387543252595156e-05, |
|
"loss": 1.383, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.8604444444444445, |
|
"grad_norm": 0.47164052724838257, |
|
"learning_rate": 1.6955017301038063e-05, |
|
"loss": 1.5499, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 0.4113026559352875, |
|
"learning_rate": 1.652249134948097e-05, |
|
"loss": 1.499, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.8675555555555555, |
|
"grad_norm": 0.48519784212112427, |
|
"learning_rate": 1.6089965397923876e-05, |
|
"loss": 1.5236, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.5262069702148438, |
|
"learning_rate": 1.5657439446366783e-05, |
|
"loss": 1.5144, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.8746666666666667, |
|
"grad_norm": 0.42513802647590637, |
|
"learning_rate": 1.522491349480969e-05, |
|
"loss": 1.4932, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.8782222222222222, |
|
"grad_norm": 0.47486934065818787, |
|
"learning_rate": 1.4792387543252596e-05, |
|
"loss": 1.4398, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.8817777777777778, |
|
"grad_norm": 0.5634934902191162, |
|
"learning_rate": 1.4359861591695503e-05, |
|
"loss": 1.65, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.8853333333333333, |
|
"grad_norm": 0.43849506974220276, |
|
"learning_rate": 1.3927335640138408e-05, |
|
"loss": 1.5201, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.4285069704055786, |
|
"learning_rate": 1.3494809688581317e-05, |
|
"loss": 1.492, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.8924444444444445, |
|
"grad_norm": 0.5707066655158997, |
|
"learning_rate": 1.3062283737024222e-05, |
|
"loss": 1.5991, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.4697231948375702, |
|
"learning_rate": 1.2629757785467128e-05, |
|
"loss": 1.5585, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.8995555555555556, |
|
"grad_norm": 0.4958859384059906, |
|
"learning_rate": 1.2197231833910034e-05, |
|
"loss": 1.5496, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.9031111111111111, |
|
"grad_norm": 0.4845017194747925, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.592, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.4356600344181061, |
|
"learning_rate": 1.1332179930795848e-05, |
|
"loss": 1.5873, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.9102222222222223, |
|
"grad_norm": 0.3917759954929352, |
|
"learning_rate": 1.0899653979238756e-05, |
|
"loss": 1.4984, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.9137777777777778, |
|
"grad_norm": 0.48273682594299316, |
|
"learning_rate": 1.046712802768166e-05, |
|
"loss": 1.4288, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.9173333333333333, |
|
"grad_norm": 0.5662245750427246, |
|
"learning_rate": 1.0034602076124568e-05, |
|
"loss": 1.5123, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.9208888888888889, |
|
"grad_norm": 0.510188639163971, |
|
"learning_rate": 9.602076124567474e-06, |
|
"loss": 1.4915, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.5999191403388977, |
|
"learning_rate": 9.169550173010382e-06, |
|
"loss": 1.7374, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 0.5733387470245361, |
|
"learning_rate": 8.737024221453288e-06, |
|
"loss": 1.5551, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.9315555555555556, |
|
"grad_norm": 0.3804311156272888, |
|
"learning_rate": 8.304498269896194e-06, |
|
"loss": 1.5631, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.9351111111111111, |
|
"grad_norm": 0.44998374581336975, |
|
"learning_rate": 7.8719723183391e-06, |
|
"loss": 1.7566, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.9386666666666666, |
|
"grad_norm": 0.4219221770763397, |
|
"learning_rate": 7.439446366782007e-06, |
|
"loss": 1.454, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.48252835869789124, |
|
"learning_rate": 7.006920415224914e-06, |
|
"loss": 1.5107, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.9457777777777778, |
|
"grad_norm": 0.5446690320968628, |
|
"learning_rate": 6.5743944636678194e-06, |
|
"loss": 1.5333, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.9493333333333334, |
|
"grad_norm": 0.4676448106765747, |
|
"learning_rate": 6.141868512110726e-06, |
|
"loss": 1.4822, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.9528888888888889, |
|
"grad_norm": 0.41428953409194946, |
|
"learning_rate": 5.709342560553633e-06, |
|
"loss": 1.398, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.9564444444444444, |
|
"grad_norm": 0.39139533042907715, |
|
"learning_rate": 5.2768166089965395e-06, |
|
"loss": 1.539, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5213208794593811, |
|
"learning_rate": 4.8442906574394464e-06, |
|
"loss": 1.535, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.9635555555555556, |
|
"grad_norm": 0.6053948402404785, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.484, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.9671111111111111, |
|
"grad_norm": 0.44833922386169434, |
|
"learning_rate": 3.9792387543252595e-06, |
|
"loss": 1.3107, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.9706666666666667, |
|
"grad_norm": 0.4769100844860077, |
|
"learning_rate": 3.5467128027681665e-06, |
|
"loss": 1.5114, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.9742222222222222, |
|
"grad_norm": 0.5112429857254028, |
|
"learning_rate": 3.1141868512110726e-06, |
|
"loss": 1.5357, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.4884449243545532, |
|
"learning_rate": 2.6816608996539796e-06, |
|
"loss": 1.4739, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.9813333333333333, |
|
"grad_norm": 0.5470872521400452, |
|
"learning_rate": 2.249134948096886e-06, |
|
"loss": 1.5557, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.9848888888888889, |
|
"grad_norm": 0.517669141292572, |
|
"learning_rate": 1.8166089965397926e-06, |
|
"loss": 1.5388, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.9884444444444445, |
|
"grad_norm": 0.46402767300605774, |
|
"learning_rate": 1.3840830449826992e-06, |
|
"loss": 1.5509, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 0.5298945307731628, |
|
"learning_rate": 9.515570934256056e-07, |
|
"loss": 1.533, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.5506584644317627, |
|
"learning_rate": 5.190311418685121e-07, |
|
"loss": 1.404, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.9991111111111111, |
|
"grad_norm": 0.5148972868919373, |
|
"learning_rate": 8.65051903114187e-08, |
|
"loss": 1.558, |
|
"step": 2810 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2812, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1067056282035814e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|