|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9998676022772408, |
|
"eval_steps": 500, |
|
"global_step": 1888, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005295908910366742, |
|
"grad_norm": 11.06487524439423, |
|
"learning_rate": 1.0582010582010582e-07, |
|
"loss": 1.8074, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026479544551833707, |
|
"grad_norm": 10.35297291588404, |
|
"learning_rate": 5.291005291005291e-07, |
|
"loss": 1.7698, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005295908910366741, |
|
"grad_norm": 4.410422081469438, |
|
"learning_rate": 1.0582010582010582e-06, |
|
"loss": 1.7238, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007943863365550112, |
|
"grad_norm": 2.369320008302558, |
|
"learning_rate": 1.5873015873015873e-06, |
|
"loss": 1.686, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010591817820733483, |
|
"grad_norm": 1.8383527182275317, |
|
"learning_rate": 2.1164021164021164e-06, |
|
"loss": 1.6501, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013239772275916854, |
|
"grad_norm": 1.2958509244773313, |
|
"learning_rate": 2.6455026455026455e-06, |
|
"loss": 1.6124, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.015887726731100223, |
|
"grad_norm": 1.174964641661081, |
|
"learning_rate": 3.1746031746031746e-06, |
|
"loss": 1.6416, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.018535681186283594, |
|
"grad_norm": 1.1099336321706879, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 1.6185, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.021183635641466966, |
|
"grad_norm": 1.0938179405927853, |
|
"learning_rate": 4.232804232804233e-06, |
|
"loss": 1.6105, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.023831590096650337, |
|
"grad_norm": 1.1671668437307012, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 1.6252, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.026479544551833708, |
|
"grad_norm": 1.1541560199576304, |
|
"learning_rate": 5.291005291005291e-06, |
|
"loss": 1.5897, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02912749900701708, |
|
"grad_norm": 1.143774194892201, |
|
"learning_rate": 5.820105820105821e-06, |
|
"loss": 1.6112, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03177545346220045, |
|
"grad_norm": 1.1206001249958375, |
|
"learning_rate": 6.349206349206349e-06, |
|
"loss": 1.5747, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03442340791738382, |
|
"grad_norm": 1.0737292608277489, |
|
"learning_rate": 6.878306878306879e-06, |
|
"loss": 1.596, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03707136237256719, |
|
"grad_norm": 1.1075534221517305, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 1.5953, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03971931682775056, |
|
"grad_norm": 1.1099499955003087, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 1.6047, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04236727128293393, |
|
"grad_norm": 1.123069012889379, |
|
"learning_rate": 8.465608465608466e-06, |
|
"loss": 1.5838, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0450152257381173, |
|
"grad_norm": 1.090428544643004, |
|
"learning_rate": 8.994708994708995e-06, |
|
"loss": 1.5758, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.047663180193300674, |
|
"grad_norm": 1.0534191502769323, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 1.6057, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.050311134648484045, |
|
"grad_norm": 1.137231525194321, |
|
"learning_rate": 1.0052910052910054e-05, |
|
"loss": 1.5962, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.052959089103667416, |
|
"grad_norm": 1.0968326074569497, |
|
"learning_rate": 1.0582010582010582e-05, |
|
"loss": 1.5966, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05560704355885079, |
|
"grad_norm": 1.1309290184481768, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.5828, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05825499801403416, |
|
"grad_norm": 1.10504410469273, |
|
"learning_rate": 1.1640211640211641e-05, |
|
"loss": 1.5675, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06090295246921753, |
|
"grad_norm": 1.0798493680750558, |
|
"learning_rate": 1.216931216931217e-05, |
|
"loss": 1.6153, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0635509069244009, |
|
"grad_norm": 1.1570911354201185, |
|
"learning_rate": 1.2698412698412699e-05, |
|
"loss": 1.6054, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06619886137958426, |
|
"grad_norm": 1.0753506577133993, |
|
"learning_rate": 1.322751322751323e-05, |
|
"loss": 1.5781, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06884681583476764, |
|
"grad_norm": 1.0993021833530066, |
|
"learning_rate": 1.3756613756613758e-05, |
|
"loss": 1.5899, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.071494770289951, |
|
"grad_norm": 1.1542372309330358, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.5751, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07414272474513438, |
|
"grad_norm": 1.1332353266729192, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 1.5921, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07679067920031775, |
|
"grad_norm": 1.1191457024952258, |
|
"learning_rate": 1.5343915343915344e-05, |
|
"loss": 1.5918, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07943863365550112, |
|
"grad_norm": 1.2189932458288275, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 1.5967, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08208658811068449, |
|
"grad_norm": 1.1599874502262428, |
|
"learning_rate": 1.6402116402116404e-05, |
|
"loss": 1.5997, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08473454256586786, |
|
"grad_norm": 1.1115171747004207, |
|
"learning_rate": 1.693121693121693e-05, |
|
"loss": 1.5613, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08738249702105123, |
|
"grad_norm": 1.0864466240123662, |
|
"learning_rate": 1.7460317460317463e-05, |
|
"loss": 1.5883, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0900304514762346, |
|
"grad_norm": 1.1860151614783536, |
|
"learning_rate": 1.798941798941799e-05, |
|
"loss": 1.5764, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09267840593141798, |
|
"grad_norm": 1.1010264235042715, |
|
"learning_rate": 1.851851851851852e-05, |
|
"loss": 1.5785, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09532636038660135, |
|
"grad_norm": 1.1498652209509064, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 1.5779, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09797431484178472, |
|
"grad_norm": 1.121779220254807, |
|
"learning_rate": 1.9576719576719577e-05, |
|
"loss": 1.5774, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10062226929696809, |
|
"grad_norm": 1.0944181878692356, |
|
"learning_rate": 1.999998290445824e-05, |
|
"loss": 1.5871, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10327022375215146, |
|
"grad_norm": 1.0838843974621926, |
|
"learning_rate": 1.9999384566633966e-05, |
|
"loss": 1.5812, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10591817820733483, |
|
"grad_norm": 1.1397215331428574, |
|
"learning_rate": 1.999793151017222e-05, |
|
"loss": 1.6098, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1085661326625182, |
|
"grad_norm": 1.2295255721168679, |
|
"learning_rate": 1.9995623859276082e-05, |
|
"loss": 1.5554, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11121408711770157, |
|
"grad_norm": 1.1311330807062197, |
|
"learning_rate": 1.999246181119692e-05, |
|
"loss": 1.5841, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11386204157288494, |
|
"grad_norm": 1.189685647731816, |
|
"learning_rate": 1.9988445636217512e-05, |
|
"loss": 1.5959, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11650999602806832, |
|
"grad_norm": 1.0965577845127703, |
|
"learning_rate": 1.9983575677628944e-05, |
|
"loss": 1.5535, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11915795048325169, |
|
"grad_norm": 1.1449588639602188, |
|
"learning_rate": 1.9977852351701272e-05, |
|
"loss": 1.5942, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12180590493843506, |
|
"grad_norm": 1.0900575902349108, |
|
"learning_rate": 1.9971276147647937e-05, |
|
"loss": 1.5776, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12445385939361843, |
|
"grad_norm": 1.1000072950998065, |
|
"learning_rate": 1.9965401487447783e-05, |
|
"loss": 1.5687, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1271018138488018, |
|
"grad_norm": 1.1195092642070894, |
|
"learning_rate": 1.9957291568257425e-05, |
|
"loss": 1.5822, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12974976830398516, |
|
"grad_norm": 1.1252728597253978, |
|
"learning_rate": 1.9948330528418044e-05, |
|
"loss": 1.5561, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13239772275916853, |
|
"grad_norm": 1.1266747688901304, |
|
"learning_rate": 1.9938519133893562e-05, |
|
"loss": 1.562, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1350456772143519, |
|
"grad_norm": 1.1086910563981198, |
|
"learning_rate": 1.992785822333376e-05, |
|
"loss": 1.5904, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13769363166953527, |
|
"grad_norm": 1.0777260472777783, |
|
"learning_rate": 1.9916348708002622e-05, |
|
"loss": 1.5831, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14034158612471864, |
|
"grad_norm": 1.1550395863394685, |
|
"learning_rate": 1.9903991571700422e-05, |
|
"loss": 1.5581, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.142989540579902, |
|
"grad_norm": 1.0664620318538827, |
|
"learning_rate": 1.9890787870679628e-05, |
|
"loss": 1.5744, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14563749503508538, |
|
"grad_norm": 1.14251257482884, |
|
"learning_rate": 1.987673873355464e-05, |
|
"loss": 1.5714, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14828544949026876, |
|
"grad_norm": 1.197497783667311, |
|
"learning_rate": 1.986184536120529e-05, |
|
"loss": 1.5816, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15093340394545213, |
|
"grad_norm": 1.0819442869899158, |
|
"learning_rate": 1.984610902667423e-05, |
|
"loss": 1.5708, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1535813584006355, |
|
"grad_norm": 1.1346045409466028, |
|
"learning_rate": 1.982953107505807e-05, |
|
"loss": 1.5658, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15622931285581887, |
|
"grad_norm": 1.2009442681600062, |
|
"learning_rate": 1.981211292339245e-05, |
|
"loss": 1.5715, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15887726731100224, |
|
"grad_norm": 1.0818769850873184, |
|
"learning_rate": 1.9793856060530882e-05, |
|
"loss": 1.5685, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1615252217661856, |
|
"grad_norm": 1.098824738778624, |
|
"learning_rate": 1.9774762047017503e-05, |
|
"loss": 1.5531, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16417317622136898, |
|
"grad_norm": 1.1351699166294587, |
|
"learning_rate": 1.9754832514953674e-05, |
|
"loss": 1.6106, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16682113067655235, |
|
"grad_norm": 1.1817572262803626, |
|
"learning_rate": 1.973406916785849e-05, |
|
"loss": 1.5738, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16946908513173572, |
|
"grad_norm": 1.122480345238289, |
|
"learning_rate": 1.971247378052315e-05, |
|
"loss": 1.5614, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1721170395869191, |
|
"grad_norm": 1.1372734218681475, |
|
"learning_rate": 1.969004819885926e-05, |
|
"loss": 1.5674, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17476499404210247, |
|
"grad_norm": 1.107310816365505, |
|
"learning_rate": 1.9666794339741052e-05, |
|
"loss": 1.551, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17741294849728584, |
|
"grad_norm": 1.1272731404259275, |
|
"learning_rate": 1.9642714190841537e-05, |
|
"loss": 1.5304, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1800609029524692, |
|
"grad_norm": 1.0831491448717634, |
|
"learning_rate": 1.9617809810462586e-05, |
|
"loss": 1.5858, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18270885740765258, |
|
"grad_norm": 1.114095409388134, |
|
"learning_rate": 1.9592083327359025e-05, |
|
"loss": 1.5858, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18535681186283595, |
|
"grad_norm": 1.1045780203627584, |
|
"learning_rate": 1.9565536940556643e-05, |
|
"loss": 1.602, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18800476631801932, |
|
"grad_norm": 1.064137031483729, |
|
"learning_rate": 1.9538172919164246e-05, |
|
"loss": 1.567, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1906527207732027, |
|
"grad_norm": 1.1404216929199258, |
|
"learning_rate": 1.950999360217969e-05, |
|
"loss": 1.5767, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19330067522838607, |
|
"grad_norm": 1.0729534295841254, |
|
"learning_rate": 1.9481001398289957e-05, |
|
"loss": 1.5748, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19594862968356944, |
|
"grad_norm": 1.0356756901012456, |
|
"learning_rate": 1.9451198785665263e-05, |
|
"loss": 1.5623, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1985965841387528, |
|
"grad_norm": 1.0650197367365721, |
|
"learning_rate": 1.942058831174723e-05, |
|
"loss": 1.5758, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20124453859393618, |
|
"grad_norm": 1.0856587790066892, |
|
"learning_rate": 1.9389172593031142e-05, |
|
"loss": 1.5499, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.20389249304911955, |
|
"grad_norm": 1.1164198411812878, |
|
"learning_rate": 1.9356954314842294e-05, |
|
"loss": 1.5774, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.20654044750430292, |
|
"grad_norm": 1.1319738961510737, |
|
"learning_rate": 1.9323936231106456e-05, |
|
"loss": 1.5735, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2091884019594863, |
|
"grad_norm": 1.1255967891893999, |
|
"learning_rate": 1.929012116411447e-05, |
|
"loss": 1.5632, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21183635641466966, |
|
"grad_norm": 1.108494426707579, |
|
"learning_rate": 1.9255512004281028e-05, |
|
"loss": 1.5783, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21448431086985303, |
|
"grad_norm": 1.0770535769415948, |
|
"learning_rate": 1.9220111709897584e-05, |
|
"loss": 1.5678, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2171322653250364, |
|
"grad_norm": 1.0415829053423116, |
|
"learning_rate": 1.9183923306879495e-05, |
|
"loss": 1.5816, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 1.087188068573505, |
|
"learning_rate": 1.9146949888507392e-05, |
|
"loss": 1.5638, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22242817423540315, |
|
"grad_norm": 1.1116864961195356, |
|
"learning_rate": 1.910919461516275e-05, |
|
"loss": 1.5658, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22507612869058652, |
|
"grad_norm": 1.1417370823215336, |
|
"learning_rate": 1.9070660714057764e-05, |
|
"loss": 1.5425, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2277240831457699, |
|
"grad_norm": 1.117200915498827, |
|
"learning_rate": 1.903135147895948e-05, |
|
"loss": 1.5714, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23037203760095326, |
|
"grad_norm": 1.246338337601015, |
|
"learning_rate": 1.899127026990828e-05, |
|
"loss": 1.5646, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23301999205613663, |
|
"grad_norm": 1.1173376295296633, |
|
"learning_rate": 1.8950420512930653e-05, |
|
"loss": 1.5556, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23566794651132, |
|
"grad_norm": 1.1002049060932855, |
|
"learning_rate": 1.8908805699746347e-05, |
|
"loss": 1.5534, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23831590096650337, |
|
"grad_norm": 1.1410000128349649, |
|
"learning_rate": 1.8866429387469925e-05, |
|
"loss": 1.5602, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24096385542168675, |
|
"grad_norm": 1.2018225004111127, |
|
"learning_rate": 1.883198249024526e-05, |
|
"loss": 1.5863, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24361180987687012, |
|
"grad_norm": 1.0679808550327838, |
|
"learning_rate": 1.8788244651103884e-05, |
|
"loss": 1.5676, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2462597643320535, |
|
"grad_norm": 1.1076122026749544, |
|
"learning_rate": 1.8743755618082132e-05, |
|
"loss": 1.5561, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24890771878723686, |
|
"grad_norm": 1.0702127304936218, |
|
"learning_rate": 1.8698519193974607e-05, |
|
"loss": 1.5647, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.25155567324242023, |
|
"grad_norm": 1.1039374045842976, |
|
"learning_rate": 1.8652539245460758e-05, |
|
"loss": 1.5669, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2542036276976036, |
|
"grad_norm": 1.066323478050412, |
|
"learning_rate": 1.8605819702774355e-05, |
|
"loss": 1.5789, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.256851582152787, |
|
"grad_norm": 1.0544943779687024, |
|
"learning_rate": 1.8558364559367565e-05, |
|
"loss": 1.5836, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2594995366079703, |
|
"grad_norm": 1.1202578955380416, |
|
"learning_rate": 1.851017787156957e-05, |
|
"loss": 1.5519, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2621474910631537, |
|
"grad_norm": 1.10948092278241, |
|
"learning_rate": 1.846126375823987e-05, |
|
"loss": 1.5645, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26479544551833706, |
|
"grad_norm": 1.0939909602427282, |
|
"learning_rate": 1.841162640041622e-05, |
|
"loss": 1.5553, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.26744339997352046, |
|
"grad_norm": 1.0748182421005708, |
|
"learning_rate": 1.836127004095722e-05, |
|
"loss": 1.552, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2700913544287038, |
|
"grad_norm": 1.0428546609239973, |
|
"learning_rate": 1.8310198984179664e-05, |
|
"loss": 1.5798, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2727393088838872, |
|
"grad_norm": 1.116361248332442, |
|
"learning_rate": 1.825841759549062e-05, |
|
"loss": 1.5906, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.27538726333907054, |
|
"grad_norm": 1.1910997615260301, |
|
"learning_rate": 1.820593030101429e-05, |
|
"loss": 1.5497, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27803521779425394, |
|
"grad_norm": 1.1198458554793917, |
|
"learning_rate": 1.8152741587213663e-05, |
|
"loss": 1.5698, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2806831722494373, |
|
"grad_norm": 1.0441545797715532, |
|
"learning_rate": 1.8098856000507044e-05, |
|
"loss": 1.5629, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.2833311267046207, |
|
"grad_norm": 1.078958367326335, |
|
"learning_rate": 1.804427814687942e-05, |
|
"loss": 1.563, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.285979081159804, |
|
"grad_norm": 1.049511571157811, |
|
"learning_rate": 1.7989012691488773e-05, |
|
"loss": 1.5546, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2886270356149874, |
|
"grad_norm": 1.0830947470396424, |
|
"learning_rate": 1.7933064358267304e-05, |
|
"loss": 1.5591, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.29127499007017077, |
|
"grad_norm": 1.1188147326045856, |
|
"learning_rate": 1.787643792951764e-05, |
|
"loss": 1.5987, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29392294452535417, |
|
"grad_norm": 1.1323146467371565, |
|
"learning_rate": 1.781913824550408e-05, |
|
"loss": 1.5372, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.2965708989805375, |
|
"grad_norm": 1.126908360081288, |
|
"learning_rate": 1.7761170204038826e-05, |
|
"loss": 1.5776, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2992188534357209, |
|
"grad_norm": 1.080368974521426, |
|
"learning_rate": 1.7702538760063364e-05, |
|
"loss": 1.5402, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.30186680789090425, |
|
"grad_norm": 1.0596514703800766, |
|
"learning_rate": 1.7643248925224926e-05, |
|
"loss": 1.554, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.30451476234608765, |
|
"grad_norm": 1.0465918096539288, |
|
"learning_rate": 1.7583305767448088e-05, |
|
"loss": 1.5687, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.307162716801271, |
|
"grad_norm": 1.0376883938486323, |
|
"learning_rate": 1.75227144105016e-05, |
|
"loss": 1.5305, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3098106712564544, |
|
"grad_norm": 1.074362321961027, |
|
"learning_rate": 1.7461480033560415e-05, |
|
"loss": 1.563, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.31245862571163774, |
|
"grad_norm": 1.07766817131444, |
|
"learning_rate": 1.7399607870762973e-05, |
|
"loss": 1.5599, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31510658016682114, |
|
"grad_norm": 1.192806036017365, |
|
"learning_rate": 1.733710321076383e-05, |
|
"loss": 1.5761, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3177545346220045, |
|
"grad_norm": 1.0556632469495062, |
|
"learning_rate": 1.727397139628157e-05, |
|
"loss": 1.5472, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3204024890771879, |
|
"grad_norm": 1.1350269437645966, |
|
"learning_rate": 1.721021782364216e-05, |
|
"loss": 1.5819, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3230504435323712, |
|
"grad_norm": 1.0604476544784243, |
|
"learning_rate": 1.714584794231764e-05, |
|
"loss": 1.5391, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3256983979875546, |
|
"grad_norm": 1.118350982024723, |
|
"learning_rate": 1.7080867254460363e-05, |
|
"loss": 1.5588, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.32834635244273797, |
|
"grad_norm": 1.0677975558889676, |
|
"learning_rate": 1.7015281314432665e-05, |
|
"loss": 1.5933, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.33099430689792136, |
|
"grad_norm": 1.089736574490509, |
|
"learning_rate": 1.6949095728332084e-05, |
|
"loss": 1.5816, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3336422613531047, |
|
"grad_norm": 1.0447343101414006, |
|
"learning_rate": 1.68823161535122e-05, |
|
"loss": 1.5483, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3362902158082881, |
|
"grad_norm": 1.0665843264776165, |
|
"learning_rate": 1.6814948298099024e-05, |
|
"loss": 1.5767, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.33893817026347145, |
|
"grad_norm": 1.0351650120702554, |
|
"learning_rate": 1.6746997920503104e-05, |
|
"loss": 1.5795, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.34158612471865485, |
|
"grad_norm": 1.0484644916794084, |
|
"learning_rate": 1.667847082892732e-05, |
|
"loss": 1.5537, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3442340791738382, |
|
"grad_norm": 1.197623024306328, |
|
"learning_rate": 1.6609372880870393e-05, |
|
"loss": 1.5694, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3468820336290216, |
|
"grad_norm": 1.0442156663539222, |
|
"learning_rate": 1.6539709982626213e-05, |
|
"loss": 1.5609, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.34952998808420493, |
|
"grad_norm": 1.1123431780366937, |
|
"learning_rate": 1.6469488088778996e-05, |
|
"loss": 1.5387, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35217794253938833, |
|
"grad_norm": 1.0433338753312666, |
|
"learning_rate": 1.639871320169429e-05, |
|
"loss": 1.569, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3548258969945717, |
|
"grad_norm": 1.0849577930136236, |
|
"learning_rate": 1.6327391371005917e-05, |
|
"loss": 1.5727, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3574738514497551, |
|
"grad_norm": 1.046861580099341, |
|
"learning_rate": 1.625552869309886e-05, |
|
"loss": 1.5343, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3601218059049384, |
|
"grad_norm": 1.0715008652245688, |
|
"learning_rate": 1.6183131310588173e-05, |
|
"loss": 1.5458, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3627697603601218, |
|
"grad_norm": 1.0874487989288704, |
|
"learning_rate": 1.611020541179392e-05, |
|
"loss": 1.5776, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.36541771481530516, |
|
"grad_norm": 1.056395887760786, |
|
"learning_rate": 1.6036757230212216e-05, |
|
"loss": 1.5423, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36806566927048856, |
|
"grad_norm": 1.0608701846522224, |
|
"learning_rate": 1.5962793043982406e-05, |
|
"loss": 1.5409, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3707136237256719, |
|
"grad_norm": 1.0362877764811316, |
|
"learning_rate": 1.588831917535043e-05, |
|
"loss": 1.5468, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3733615781808553, |
|
"grad_norm": 1.027124601973276, |
|
"learning_rate": 1.5813341990128415e-05, |
|
"loss": 1.5512, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.37600953263603865, |
|
"grad_norm": 1.0312792307083625, |
|
"learning_rate": 1.5737867897150542e-05, |
|
"loss": 1.5415, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.37865748709122204, |
|
"grad_norm": 1.0808334145657998, |
|
"learning_rate": 1.5661903347725238e-05, |
|
"loss": 1.5719, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3813054415464054, |
|
"grad_norm": 1.0223414325603908, |
|
"learning_rate": 1.558545483508373e-05, |
|
"loss": 1.5578, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3839533960015888, |
|
"grad_norm": 1.0603081644668413, |
|
"learning_rate": 1.5508528893825045e-05, |
|
"loss": 1.5396, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38660135045677213, |
|
"grad_norm": 1.0770479780831002, |
|
"learning_rate": 1.5431132099357418e-05, |
|
"loss": 1.5593, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.38924930491195553, |
|
"grad_norm": 0.9919011715354272, |
|
"learning_rate": 1.535327106733627e-05, |
|
"loss": 1.5289, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3918972593671389, |
|
"grad_norm": 1.1157987761320602, |
|
"learning_rate": 1.527495245309872e-05, |
|
"loss": 1.5404, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.39454521382232227, |
|
"grad_norm": 1.024198117077415, |
|
"learning_rate": 1.5196182951094685e-05, |
|
"loss": 1.5328, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3971931682775056, |
|
"grad_norm": 0.9977432943925557, |
|
"learning_rate": 1.5116969294314679e-05, |
|
"loss": 1.5444, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.399841122732689, |
|
"grad_norm": 1.0236719689047817, |
|
"learning_rate": 1.5037318253714288e-05, |
|
"loss": 1.5359, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.40248907718787236, |
|
"grad_norm": 1.0162827582624117, |
|
"learning_rate": 1.4957236637635415e-05, |
|
"loss": 1.5531, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.40513703164305576, |
|
"grad_norm": 1.0188868081662166, |
|
"learning_rate": 1.4876731291224304e-05, |
|
"loss": 1.5521, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4077849860982391, |
|
"grad_norm": 1.0217286340114786, |
|
"learning_rate": 1.4795809095846463e-05, |
|
"loss": 1.5549, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4104329405534225, |
|
"grad_norm": 1.028917956544073, |
|
"learning_rate": 1.471447696849844e-05, |
|
"loss": 1.5334, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.41308089500860584, |
|
"grad_norm": 1.0500908897714312, |
|
"learning_rate": 1.4632741861216597e-05, |
|
"loss": 1.5266, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.41572884946378924, |
|
"grad_norm": 1.005286165371403, |
|
"learning_rate": 1.4550610760482853e-05, |
|
"loss": 1.5434, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.4183768039189726, |
|
"grad_norm": 1.0814951413359943, |
|
"learning_rate": 1.446809068662752e-05, |
|
"loss": 1.5444, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.421024758374156, |
|
"grad_norm": 1.1011439679669661, |
|
"learning_rate": 1.4385188693229204e-05, |
|
"loss": 1.5569, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4236727128293393, |
|
"grad_norm": 1.016923604209529, |
|
"learning_rate": 1.4301911866511904e-05, |
|
"loss": 1.5557, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4263206672845227, |
|
"grad_norm": 0.9901274231469388, |
|
"learning_rate": 1.4218267324739281e-05, |
|
"loss": 1.5157, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.42896862173970607, |
|
"grad_norm": 1.1652933991218297, |
|
"learning_rate": 1.4134262217606232e-05, |
|
"loss": 1.5573, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.43161657619488947, |
|
"grad_norm": 1.0773884902263529, |
|
"learning_rate": 1.4049903725627743e-05, |
|
"loss": 1.552, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4342645306500728, |
|
"grad_norm": 1.0333923700883418, |
|
"learning_rate": 1.3965199059525114e-05, |
|
"loss": 1.5354, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4369124851052562, |
|
"grad_norm": 0.9949649082088061, |
|
"learning_rate": 1.3880155459609621e-05, |
|
"loss": 1.5532, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 1.0750837236994202, |
|
"learning_rate": 1.3794780195163622e-05, |
|
"loss": 1.5455, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.44220839401562295, |
|
"grad_norm": 1.0045353502512582, |
|
"learning_rate": 1.370908056381921e-05, |
|
"loss": 1.5479, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4448563484708063, |
|
"grad_norm": 1.0458832746708828, |
|
"learning_rate": 1.3623063890934427e-05, |
|
"loss": 1.5456, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4475043029259897, |
|
"grad_norm": 0.976728693908767, |
|
"learning_rate": 1.3536737528967108e-05, |
|
"loss": 1.5607, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.45015225738117304, |
|
"grad_norm": 1.0311218737493033, |
|
"learning_rate": 1.3450108856846427e-05, |
|
"loss": 1.525, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45280021183635644, |
|
"grad_norm": 0.9872604341886291, |
|
"learning_rate": 1.3363185279342157e-05, |
|
"loss": 1.5195, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.4554481662915398, |
|
"grad_norm": 1.0149041346987562, |
|
"learning_rate": 1.3275974226431741e-05, |
|
"loss": 1.5246, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4580961207467232, |
|
"grad_norm": 0.9934776810831784, |
|
"learning_rate": 1.3188483152665184e-05, |
|
"loss": 1.5558, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.4607440752019065, |
|
"grad_norm": 1.0720157680198927, |
|
"learning_rate": 1.3100719536527872e-05, |
|
"loss": 1.5596, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4633920296570899, |
|
"grad_norm": 1.0084638038262754, |
|
"learning_rate": 1.3012690879801336e-05, |
|
"loss": 1.552, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.46603998411227326, |
|
"grad_norm": 1.0656341079347242, |
|
"learning_rate": 1.2924404706922004e-05, |
|
"loss": 1.5671, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.46868793856745666, |
|
"grad_norm": 1.001854533922662, |
|
"learning_rate": 1.283586856433805e-05, |
|
"loss": 1.5455, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.47133589302264, |
|
"grad_norm": 1.112510941323857, |
|
"learning_rate": 1.2747090019864337e-05, |
|
"loss": 1.5575, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4739838474778234, |
|
"grad_norm": 1.040754615982929, |
|
"learning_rate": 1.2658076662035549e-05, |
|
"loss": 1.5545, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.47663180193300675, |
|
"grad_norm": 1.0210395751767485, |
|
"learning_rate": 1.256883609945753e-05, |
|
"loss": 1.5364, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.47927975638819015, |
|
"grad_norm": 1.0275079409852799, |
|
"learning_rate": 1.247937596015695e-05, |
|
"loss": 1.5183, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4819277108433735, |
|
"grad_norm": 1.0640533107131247, |
|
"learning_rate": 1.2389703890929245e-05, |
|
"loss": 1.5597, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.4845756652985569, |
|
"grad_norm": 1.0119600619480633, |
|
"learning_rate": 1.2299827556685031e-05, |
|
"loss": 1.5154, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.48722361975374023, |
|
"grad_norm": 1.020830011977681, |
|
"learning_rate": 1.2209754639794897e-05, |
|
"loss": 1.5034, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.48987157420892363, |
|
"grad_norm": 1.053661577221828, |
|
"learning_rate": 1.2119492839432754e-05, |
|
"loss": 1.535, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.492519528664107, |
|
"grad_norm": 1.0735392400040396, |
|
"learning_rate": 1.2029049870917735e-05, |
|
"loss": 1.5392, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4951674831192904, |
|
"grad_norm": 1.040125386980757, |
|
"learning_rate": 1.1938433465054698e-05, |
|
"loss": 1.5088, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.4978154375744737, |
|
"grad_norm": 1.0748654533713191, |
|
"learning_rate": 1.1847651367473429e-05, |
|
"loss": 1.5291, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5004633920296571, |
|
"grad_norm": 1.070960019314352, |
|
"learning_rate": 1.1756711337966572e-05, |
|
"loss": 1.5408, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5031113464848405, |
|
"grad_norm": 1.0678876377133644, |
|
"learning_rate": 1.1665621149826327e-05, |
|
"loss": 1.522, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5057593009400239, |
|
"grad_norm": 1.0532185752896248, |
|
"learning_rate": 1.1574388589180032e-05, |
|
"loss": 1.5436, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5084072553952071, |
|
"grad_norm": 1.0851511914260046, |
|
"learning_rate": 1.148302145432461e-05, |
|
"loss": 1.5522, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5110552098503905, |
|
"grad_norm": 1.02938231795702, |
|
"learning_rate": 1.1391527555060002e-05, |
|
"loss": 1.5231, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.513703164305574, |
|
"grad_norm": 1.0668601829517106, |
|
"learning_rate": 1.1299914712021616e-05, |
|
"loss": 1.5545, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5163511187607573, |
|
"grad_norm": 1.0106858243798442, |
|
"learning_rate": 1.1208190756011815e-05, |
|
"loss": 1.5279, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5189990732159406, |
|
"grad_norm": 1.0566089105177372, |
|
"learning_rate": 1.111636352733059e-05, |
|
"loss": 1.5619, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.521647027671124, |
|
"grad_norm": 1.0229597172767808, |
|
"learning_rate": 1.1024440875105383e-05, |
|
"loss": 1.5288, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5242949821263074, |
|
"grad_norm": 1.0470712970731715, |
|
"learning_rate": 1.0932430656620159e-05, |
|
"loss": 1.536, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5269429365814908, |
|
"grad_norm": 1.0663817640403133, |
|
"learning_rate": 1.0840340736643803e-05, |
|
"loss": 1.5583, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5295908910366741, |
|
"grad_norm": 1.0439017159315356, |
|
"learning_rate": 1.0748178986757852e-05, |
|
"loss": 1.5304, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5322388454918575, |
|
"grad_norm": 1.0790858663927478, |
|
"learning_rate": 1.0655953284683656e-05, |
|
"loss": 1.5127, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5348867999470409, |
|
"grad_norm": 1.0536530566954696, |
|
"learning_rate": 1.0563671513609013e-05, |
|
"loss": 1.543, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5375347544022243, |
|
"grad_norm": 1.0170237895907281, |
|
"learning_rate": 1.0471341561514337e-05, |
|
"loss": 1.5194, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5401827088574076, |
|
"grad_norm": 1.0473205826242136, |
|
"learning_rate": 1.0378971320498425e-05, |
|
"loss": 1.5136, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.542830663312591, |
|
"grad_norm": 1.0567217848188775, |
|
"learning_rate": 1.0286568686103846e-05, |
|
"loss": 1.5226, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5454786177677744, |
|
"grad_norm": 1.0208955618061542, |
|
"learning_rate": 1.0194141556642065e-05, |
|
"loss": 1.5499, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5481265722229578, |
|
"grad_norm": 1.014238754109568, |
|
"learning_rate": 1.0101697832518316e-05, |
|
"loss": 1.5291, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5507745266781411, |
|
"grad_norm": 1.0293706768418351, |
|
"learning_rate": 1.0009245415556297e-05, |
|
"loss": 1.5252, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5534224811333245, |
|
"grad_norm": 1.0505758556707483, |
|
"learning_rate": 9.916792208322743e-06, |
|
"loss": 1.5266, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5560704355885079, |
|
"grad_norm": 1.0130623575273145, |
|
"learning_rate": 9.824346113451943e-06, |
|
"loss": 1.5228, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5587183900436913, |
|
"grad_norm": 1.0351862096161806, |
|
"learning_rate": 9.73191503297023e-06, |
|
"loss": 1.5258, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5613663444988746, |
|
"grad_norm": 1.04995407064205, |
|
"learning_rate": 9.639506867620574e-06, |
|
"loss": 1.5231, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.564014298954058, |
|
"grad_norm": 1.0436659926908782, |
|
"learning_rate": 9.547129516187197e-06, |
|
"loss": 1.518, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5666622534092414, |
|
"grad_norm": 1.046638110053409, |
|
"learning_rate": 9.454790874820452e-06, |
|
"loss": 1.5098, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5693102078644248, |
|
"grad_norm": 1.0241677741798514, |
|
"learning_rate": 9.36249883636187e-06, |
|
"loss": 1.5425, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.571958162319608, |
|
"grad_norm": 0.9941017772339601, |
|
"learning_rate": 9.27026128966949e-06, |
|
"loss": 1.5142, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5746061167747915, |
|
"grad_norm": 0.9716061498004498, |
|
"learning_rate": 9.178086118943556e-06, |
|
"loss": 1.5455, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5772540712299749, |
|
"grad_norm": 1.0256633172012475, |
|
"learning_rate": 9.085981203052594e-06, |
|
"loss": 1.5383, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5799020256851583, |
|
"grad_norm": 1.0223919398580668, |
|
"learning_rate": 8.993954414859944e-06, |
|
"loss": 1.5331, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5825499801403415, |
|
"grad_norm": 1.007132533021316, |
|
"learning_rate": 8.902013620550822e-06, |
|
"loss": 1.5329, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5851979345955249, |
|
"grad_norm": 0.9942218744170546, |
|
"learning_rate": 8.810166678959931e-06, |
|
"loss": 1.5389, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5878458890507083, |
|
"grad_norm": 1.0110745590358547, |
|
"learning_rate": 8.718421440899711e-06, |
|
"loss": 1.5628, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5904938435058917, |
|
"grad_norm": 1.0107377987377537, |
|
"learning_rate": 8.626785748489284e-06, |
|
"loss": 1.5368, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.593141797961075, |
|
"grad_norm": 0.9913006390349152, |
|
"learning_rate": 8.535267434484116e-06, |
|
"loss": 1.5344, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5957897524162584, |
|
"grad_norm": 0.9777988491979502, |
|
"learning_rate": 8.443874321606505e-06, |
|
"loss": 1.5227, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5984377068714418, |
|
"grad_norm": 1.0189135477068292, |
|
"learning_rate": 8.352614221876932e-06, |
|
"loss": 1.5214, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6010856613266252, |
|
"grad_norm": 1.0138767745745147, |
|
"learning_rate": 8.261494935946271e-06, |
|
"loss": 1.5247, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6037336157818085, |
|
"grad_norm": 0.9761877999287727, |
|
"learning_rate": 8.170524252429059e-06, |
|
"loss": 1.5103, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6063815702369919, |
|
"grad_norm": 1.0040567049619376, |
|
"learning_rate": 8.079709947237719e-06, |
|
"loss": 1.5022, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6090295246921753, |
|
"grad_norm": 1.013457073795383, |
|
"learning_rate": 7.989059782917899e-06, |
|
"loss": 1.53, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6116774791473587, |
|
"grad_norm": 0.9757606354115737, |
|
"learning_rate": 7.898581507984966e-06, |
|
"loss": 1.5178, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.614325433602542, |
|
"grad_norm": 0.9943607585307255, |
|
"learning_rate": 7.80828285626167e-06, |
|
"loss": 1.5275, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6169733880577254, |
|
"grad_norm": 1.0023636505282656, |
|
"learning_rate": 7.718171546217088e-06, |
|
"loss": 1.5443, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6196213425129088, |
|
"grad_norm": 1.0171146246719223, |
|
"learning_rate": 7.628255280306869e-06, |
|
"loss": 1.5192, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6222692969680922, |
|
"grad_norm": 1.0247392112526459, |
|
"learning_rate": 7.538541744314854e-06, |
|
"loss": 1.5348, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6249172514232755, |
|
"grad_norm": 0.990257878872753, |
|
"learning_rate": 7.449038606696102e-06, |
|
"loss": 1.5396, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6275652058784589, |
|
"grad_norm": 0.9939434167335955, |
|
"learning_rate": 7.359753517921441e-06, |
|
"loss": 1.5167, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6302131603336423, |
|
"grad_norm": 0.9896269105787017, |
|
"learning_rate": 7.2706941098234975e-06, |
|
"loss": 1.5367, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6328611147888257, |
|
"grad_norm": 1.012229868356229, |
|
"learning_rate": 7.1818679949443625e-06, |
|
"loss": 1.5503, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.635509069244009, |
|
"grad_norm": 1.000043265309018, |
|
"learning_rate": 7.09328276588491e-06, |
|
"loss": 1.5315, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6381570236991924, |
|
"grad_norm": 1.0289266588222385, |
|
"learning_rate": 7.004945994655762e-06, |
|
"loss": 1.4967, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6408049781543758, |
|
"grad_norm": 1.0025882987592574, |
|
"learning_rate": 6.9168652320301034e-06, |
|
"loss": 1.5477, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6434529326095592, |
|
"grad_norm": 0.9919331762414756, |
|
"learning_rate": 6.829048006898223e-06, |
|
"loss": 1.534, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6461008870647424, |
|
"grad_norm": 1.0019076590702611, |
|
"learning_rate": 6.741501825623989e-06, |
|
"loss": 1.5117, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6487488415199258, |
|
"grad_norm": 1.0543772866427898, |
|
"learning_rate": 6.6542341714032226e-06, |
|
"loss": 1.5244, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6513967959751092, |
|
"grad_norm": 1.0306054721119788, |
|
"learning_rate": 6.567252503624049e-06, |
|
"loss": 1.5411, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6540447504302926, |
|
"grad_norm": 1.0302893738157837, |
|
"learning_rate": 6.480564257229294e-06, |
|
"loss": 1.4942, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6566927048854759, |
|
"grad_norm": 1.0109127444615766, |
|
"learning_rate": 6.394176842080975e-06, |
|
"loss": 1.502, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"grad_norm": 0.9781847599103843, |
|
"learning_rate": 6.308097642326909e-06, |
|
"loss": 1.5066, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6619886137958427, |
|
"grad_norm": 0.9824997539943708, |
|
"learning_rate": 6.222334015769553e-06, |
|
"loss": 1.5101, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6646365682510261, |
|
"grad_norm": 0.9987899689076005, |
|
"learning_rate": 6.136893293237078e-06, |
|
"loss": 1.5185, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6672845227062094, |
|
"grad_norm": 1.006107504524868, |
|
"learning_rate": 6.051782777956738e-06, |
|
"loss": 1.5239, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6699324771613928, |
|
"grad_norm": 1.0044418449699575, |
|
"learning_rate": 5.967009744930633e-06, |
|
"loss": 1.4952, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6725804316165762, |
|
"grad_norm": 1.0039662646096303, |
|
"learning_rate": 5.882581440313842e-06, |
|
"loss": 1.5116, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6752283860717596, |
|
"grad_norm": 1.0462978677216113, |
|
"learning_rate": 5.798505080795047e-06, |
|
"loss": 1.5258, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6778763405269429, |
|
"grad_norm": 0.9979354408371577, |
|
"learning_rate": 5.7147878529796905e-06, |
|
"loss": 1.5315, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6805242949821263, |
|
"grad_norm": 0.9776481970979145, |
|
"learning_rate": 5.631436912775652e-06, |
|
"loss": 1.5075, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6831722494373097, |
|
"grad_norm": 1.0087198024702337, |
|
"learning_rate": 5.54845938478161e-06, |
|
"loss": 1.5047, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6858202038924931, |
|
"grad_norm": 0.9903441971804162, |
|
"learning_rate": 5.465862361678043e-06, |
|
"loss": 1.5517, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6884681583476764, |
|
"grad_norm": 1.023066931211098, |
|
"learning_rate": 5.383652903620952e-06, |
|
"loss": 1.4905, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6911161128028598, |
|
"grad_norm": 0.9932405098752322, |
|
"learning_rate": 5.3018380376384075e-06, |
|
"loss": 1.5218, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6937640672580432, |
|
"grad_norm": 0.9757939837419622, |
|
"learning_rate": 5.220424757029876e-06, |
|
"loss": 1.5217, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6964120217132266, |
|
"grad_norm": 0.9861240044120464, |
|
"learning_rate": 5.139420020768466e-06, |
|
"loss": 1.5173, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6990599761684099, |
|
"grad_norm": 1.0526302805244943, |
|
"learning_rate": 5.05883075290609e-06, |
|
"loss": 1.5295, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7017079306235933, |
|
"grad_norm": 1.0185424558326355, |
|
"learning_rate": 4.978663841981604e-06, |
|
"loss": 1.516, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7043558850787767, |
|
"grad_norm": 1.018775036262236, |
|
"learning_rate": 4.898926140432027e-06, |
|
"loss": 1.5074, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7070038395339601, |
|
"grad_norm": 1.0312565088617913, |
|
"learning_rate": 4.819624464006783e-06, |
|
"loss": 1.5431, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7096517939891434, |
|
"grad_norm": 1.0212263411238394, |
|
"learning_rate": 4.7407655911851205e-06, |
|
"loss": 1.515, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7122997484443268, |
|
"grad_norm": 0.9933683646722576, |
|
"learning_rate": 4.6623562625967165e-06, |
|
"loss": 1.5281, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7149477028995102, |
|
"grad_norm": 0.9624331021666575, |
|
"learning_rate": 4.5844031804455e-06, |
|
"loss": 1.5367, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7175956573546936, |
|
"grad_norm": 1.026883861445322, |
|
"learning_rate": 4.506913007936756e-06, |
|
"loss": 1.5182, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7202436118098768, |
|
"grad_norm": 1.002938064771753, |
|
"learning_rate": 4.429892368707594e-06, |
|
"loss": 1.5185, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7228915662650602, |
|
"grad_norm": 0.9794563064534025, |
|
"learning_rate": 4.3533478462607706e-06, |
|
"loss": 1.5147, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7255395207202436, |
|
"grad_norm": 0.984697237202532, |
|
"learning_rate": 4.2772859834019444e-06, |
|
"loss": 1.5085, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.728187475175427, |
|
"grad_norm": 1.0067564757025862, |
|
"learning_rate": 4.201713281680417e-06, |
|
"loss": 1.5337, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7308354296306103, |
|
"grad_norm": 0.9812201953008316, |
|
"learning_rate": 4.126636200833407e-06, |
|
"loss": 1.5331, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7334833840857937, |
|
"grad_norm": 1.007828581920917, |
|
"learning_rate": 4.0520611582338874e-06, |
|
"loss": 1.5258, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7361313385409771, |
|
"grad_norm": 1.0411554611974254, |
|
"learning_rate": 3.977994528342049e-06, |
|
"loss": 1.5241, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7387792929961605, |
|
"grad_norm": 1.0034067110278442, |
|
"learning_rate": 3.904442642160412e-06, |
|
"loss": 1.5204, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7414272474513438, |
|
"grad_norm": 0.9595126781640712, |
|
"learning_rate": 3.831411786692698e-06, |
|
"loss": 1.5158, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7440752019065272, |
|
"grad_norm": 0.9783084053302383, |
|
"learning_rate": 3.7589082044064198e-06, |
|
"loss": 1.5312, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7467231563617106, |
|
"grad_norm": 0.9678174617335311, |
|
"learning_rate": 3.686938092699287e-06, |
|
"loss": 1.5302, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.749371110816894, |
|
"grad_norm": 0.9871715872104168, |
|
"learning_rate": 3.6155076033694848e-06, |
|
"loss": 1.517, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7520190652720773, |
|
"grad_norm": 1.0044736545312436, |
|
"learning_rate": 3.5446228420898333e-06, |
|
"loss": 1.4888, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7546670197272607, |
|
"grad_norm": 0.9716268636229163, |
|
"learning_rate": 3.474289867885876e-06, |
|
"loss": 1.5379, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7573149741824441, |
|
"grad_norm": 0.9890097406031959, |
|
"learning_rate": 3.404514692617994e-06, |
|
"loss": 1.523, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7599629286376274, |
|
"grad_norm": 1.0216348999301932, |
|
"learning_rate": 3.3353032804675157e-06, |
|
"loss": 1.5337, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7626108830928108, |
|
"grad_norm": 1.0038373151987001, |
|
"learning_rate": 3.2666615474269202e-06, |
|
"loss": 1.5187, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7652588375479942, |
|
"grad_norm": 0.9909609500438246, |
|
"learning_rate": 3.19859536079416e-06, |
|
"loss": 1.5172, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7679067920031776, |
|
"grad_norm": 0.9689018876021506, |
|
"learning_rate": 3.1311105386711206e-06, |
|
"loss": 1.5179, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7705547464583609, |
|
"grad_norm": 0.49237533241415254, |
|
"learning_rate": 3.064212849466335e-06, |
|
"loss": 1.5243, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7732027009135443, |
|
"grad_norm": 1.028791952556653, |
|
"learning_rate": 2.997908011401902e-06, |
|
"loss": 1.5131, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7758506553687277, |
|
"grad_norm": 0.988433671716438, |
|
"learning_rate": 2.932201692024701e-06, |
|
"loss": 1.4977, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7784986098239111, |
|
"grad_norm": 1.033980515849518, |
|
"learning_rate": 2.8670995077219597e-06, |
|
"loss": 1.5317, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7811465642790943, |
|
"grad_norm": 0.9775113095673127, |
|
"learning_rate": 2.8026070232411852e-06, |
|
"loss": 1.4968, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7837945187342777, |
|
"grad_norm": 0.9877497208445748, |
|
"learning_rate": 2.7387297512144808e-06, |
|
"loss": 1.5224, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7864424731894611, |
|
"grad_norm": 1.009024038211844, |
|
"learning_rate": 2.67547315168737e-06, |
|
"loss": 1.5423, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7890904276446445, |
|
"grad_norm": 0.9732391094592455, |
|
"learning_rate": 2.6128426316520773e-06, |
|
"loss": 1.5248, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7917383820998278, |
|
"grad_norm": 1.0039025745395442, |
|
"learning_rate": 2.550843544585342e-06, |
|
"loss": 1.5622, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.7943863365550112, |
|
"grad_norm": 0.9788028397156042, |
|
"learning_rate": 2.4894811899908387e-06, |
|
"loss": 1.5103, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7970342910101946, |
|
"grad_norm": 0.9653871232501977, |
|
"learning_rate": 2.428760812946177e-06, |
|
"loss": 1.5425, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.799682245465378, |
|
"grad_norm": 0.9738743806340848, |
|
"learning_rate": 2.368687603654574e-06, |
|
"loss": 1.5121, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8023301999205613, |
|
"grad_norm": 0.9969072135710934, |
|
"learning_rate": 2.3092666970012103e-06, |
|
"loss": 1.5026, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8049781543757447, |
|
"grad_norm": 0.9655875746133022, |
|
"learning_rate": 2.250503172114301e-06, |
|
"loss": 1.5075, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8076261088309281, |
|
"grad_norm": 0.9664980760046697, |
|
"learning_rate": 2.1924020519309742e-06, |
|
"loss": 1.5198, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8102740632861115, |
|
"grad_norm": 0.9699897090047268, |
|
"learning_rate": 2.1349683027679e-06, |
|
"loss": 1.5223, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8129220177412948, |
|
"grad_norm": 0.9900349045998372, |
|
"learning_rate": 2.078206833896792e-06, |
|
"loss": 1.5062, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8155699721964782, |
|
"grad_norm": 1.0154071244795133, |
|
"learning_rate": 2.022122497124782e-06, |
|
"loss": 1.4834, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8182179266516616, |
|
"grad_norm": 0.967706724069395, |
|
"learning_rate": 1.9667200863796965e-06, |
|
"loss": 1.5366, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.820865881106845, |
|
"grad_norm": 0.9742445082631996, |
|
"learning_rate": 1.9120043373002804e-06, |
|
"loss": 1.5226, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8235138355620283, |
|
"grad_norm": 0.9855108838889343, |
|
"learning_rate": 1.8579799268314224e-06, |
|
"loss": 1.5561, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8261617900172117, |
|
"grad_norm": 1.0038064244529792, |
|
"learning_rate": 1.8046514728243725e-06, |
|
"loss": 1.5247, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8288097444723951, |
|
"grad_norm": 0.9739289064816902, |
|
"learning_rate": 1.7520235336420144e-06, |
|
"loss": 1.4929, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8314576989275785, |
|
"grad_norm": 0.9680126539518724, |
|
"learning_rate": 1.7001006077692584e-06, |
|
"loss": 1.4823, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8341056533827618, |
|
"grad_norm": 1.0214721547722743, |
|
"learning_rate": 1.648887133428485e-06, |
|
"loss": 1.51, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8367536078379452, |
|
"grad_norm": 0.98797077056357, |
|
"learning_rate": 1.5983874882002083e-06, |
|
"loss": 1.5218, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8394015622931286, |
|
"grad_norm": 0.9726850095038396, |
|
"learning_rate": 1.5486059886488825e-06, |
|
"loss": 1.5028, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.842049516748312, |
|
"grad_norm": 0.9907275183986879, |
|
"learning_rate": 1.4995468899539278e-06, |
|
"loss": 1.5141, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8446974712034953, |
|
"grad_norm": 1.0095657865872754, |
|
"learning_rate": 1.4512143855460237e-06, |
|
"loss": 1.5251, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8473454256586787, |
|
"grad_norm": 0.9948902717671079, |
|
"learning_rate": 1.403612606748659e-06, |
|
"loss": 1.4785, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.849993380113862, |
|
"grad_norm": 0.9845596560770083, |
|
"learning_rate": 1.356745622424992e-06, |
|
"loss": 1.5187, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8526413345690455, |
|
"grad_norm": 0.953506811187465, |
|
"learning_rate": 1.3106174386300686e-06, |
|
"loss": 1.5425, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8552892890242287, |
|
"grad_norm": 0.9618810193914135, |
|
"learning_rate": 1.2652319982683926e-06, |
|
"loss": 1.529, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8579372434794121, |
|
"grad_norm": 0.9947966334717282, |
|
"learning_rate": 1.220593180756884e-06, |
|
"loss": 1.5154, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8605851979345955, |
|
"grad_norm": 1.0030199127391055, |
|
"learning_rate": 1.1767048016933024e-06, |
|
"loss": 1.5311, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8632331523897789, |
|
"grad_norm": 0.9719704049091674, |
|
"learning_rate": 1.1335706125300782e-06, |
|
"loss": 1.4993, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8658811068449622, |
|
"grad_norm": 0.9971991350833769, |
|
"learning_rate": 1.091194300253654e-06, |
|
"loss": 1.5328, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8685290613001456, |
|
"grad_norm": 0.9386404691705372, |
|
"learning_rate": 1.049579487069351e-06, |
|
"loss": 1.5276, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.871177015755329, |
|
"grad_norm": 0.956420931896452, |
|
"learning_rate": 1.008729730091721e-06, |
|
"loss": 1.5119, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8738249702105124, |
|
"grad_norm": 0.9735098420048076, |
|
"learning_rate": 9.686485210405206e-07, |
|
"loss": 1.5079, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8764729246656957, |
|
"grad_norm": 0.9920608419716006, |
|
"learning_rate": 9.293392859422401e-07, |
|
"loss": 1.5142, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.9643322536422883, |
|
"learning_rate": 8.90805384837251e-07, |
|
"loss": 1.5117, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8817688335760625, |
|
"grad_norm": 0.9788979391472972, |
|
"learning_rate": 8.530501114926082e-07, |
|
"loss": 1.5015, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8844167880312459, |
|
"grad_norm": 0.9647617399657337, |
|
"learning_rate": 8.160766931205066e-07, |
|
"loss": 1.5293, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8870647424864292, |
|
"grad_norm": 0.9564397438564627, |
|
"learning_rate": 7.798882901024196e-07, |
|
"loss": 1.5041, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.8897126969416126, |
|
"grad_norm": 0.9552845261427283, |
|
"learning_rate": 7.444879957189732e-07, |
|
"loss": 1.5371, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.892360651396796, |
|
"grad_norm": 0.9814428213129086, |
|
"learning_rate": 7.098788358855313e-07, |
|
"loss": 1.5187, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8950086058519794, |
|
"grad_norm": 0.9877612943504955, |
|
"learning_rate": 6.760637688935457e-07, |
|
"loss": 1.5049, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8976565603071627, |
|
"grad_norm": 0.9806604488270347, |
|
"learning_rate": 6.430456851577072e-07, |
|
"loss": 1.5063, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9003045147623461, |
|
"grad_norm": 0.9576410171206939, |
|
"learning_rate": 6.108274069688603e-07, |
|
"loss": 1.5208, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9029524692175295, |
|
"grad_norm": 0.9818044085686626, |
|
"learning_rate": 5.794116882527712e-07, |
|
"loss": 1.5124, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.9056004236727129, |
|
"grad_norm": 0.9685653401983324, |
|
"learning_rate": 5.488012143347399e-07, |
|
"loss": 1.5219, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9082483781278962, |
|
"grad_norm": 0.9809825210830677, |
|
"learning_rate": 5.189986017100446e-07, |
|
"loss": 1.4738, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9108963325830796, |
|
"grad_norm": 0.975406693103622, |
|
"learning_rate": 4.900063978203118e-07, |
|
"loss": 1.5399, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.913544287038263, |
|
"grad_norm": 0.986056861811607, |
|
"learning_rate": 4.6182708083575724e-07, |
|
"loss": 1.513, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9161922414934464, |
|
"grad_norm": 0.9798958647556985, |
|
"learning_rate": 4.3446305944335856e-07, |
|
"loss": 1.5153, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9188401959486296, |
|
"grad_norm": 0.9693236881798926, |
|
"learning_rate": 4.079166726409767e-07, |
|
"loss": 1.5199, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.921488150403813, |
|
"grad_norm": 0.9743074736324623, |
|
"learning_rate": 3.821901895374147e-07, |
|
"loss": 1.5116, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9241361048589964, |
|
"grad_norm": 0.971024863447133, |
|
"learning_rate": 3.572858091584641e-07, |
|
"loss": 1.5119, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9267840593141798, |
|
"grad_norm": 0.9677442463184608, |
|
"learning_rate": 3.3320566025894727e-07, |
|
"loss": 1.5257, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9294320137693631, |
|
"grad_norm": 0.9827347277692877, |
|
"learning_rate": 3.0995180114074206e-07, |
|
"loss": 1.5172, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9320799682245465, |
|
"grad_norm": 0.9743959370781557, |
|
"learning_rate": 2.875262194768513e-07, |
|
"loss": 1.5366, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9347279226797299, |
|
"grad_norm": 0.9564493157330359, |
|
"learning_rate": 2.6593083214151215e-07, |
|
"loss": 1.5029, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9373758771349133, |
|
"grad_norm": 0.9809452479317344, |
|
"learning_rate": 2.4516748504632815e-07, |
|
"loss": 1.5233, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9400238315900966, |
|
"grad_norm": 0.9597734400844808, |
|
"learning_rate": 2.2523795298249996e-07, |
|
"loss": 1.5228, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.94267178604528, |
|
"grad_norm": 0.9477114517816689, |
|
"learning_rate": 2.0614393946911891e-07, |
|
"loss": 1.5101, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9453197405004634, |
|
"grad_norm": 0.9698387988540002, |
|
"learning_rate": 1.8788707660755024e-07, |
|
"loss": 1.508, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9479676949556468, |
|
"grad_norm": 0.9757478306640798, |
|
"learning_rate": 1.704689249419289e-07, |
|
"loss": 1.5049, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.9506156494108301, |
|
"grad_norm": 0.9668834988577203, |
|
"learning_rate": 1.5389097332577318e-07, |
|
"loss": 1.5293, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.9532636038660135, |
|
"grad_norm": 0.9901171627850629, |
|
"learning_rate": 1.381546387947097e-07, |
|
"loss": 1.5221, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.9559115583211969, |
|
"grad_norm": 0.9566404806968487, |
|
"learning_rate": 1.2326126644536362e-07, |
|
"loss": 1.5071, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9585595127763803, |
|
"grad_norm": 0.9607827035134089, |
|
"learning_rate": 1.0921212932037517e-07, |
|
"loss": 1.5157, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9612074672315636, |
|
"grad_norm": 0.9975142952632257, |
|
"learning_rate": 9.600842829958101e-08, |
|
"loss": 1.5242, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.963855421686747, |
|
"grad_norm": 0.9753226507807833, |
|
"learning_rate": 8.365129199737864e-08, |
|
"loss": 1.5068, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9665033761419304, |
|
"grad_norm": 0.979318592057113, |
|
"learning_rate": 7.214177666624134e-08, |
|
"loss": 1.5272, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9691513305971138, |
|
"grad_norm": 0.9696032798439096, |
|
"learning_rate": 6.148086610644144e-08, |
|
"loss": 1.508, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9717992850522971, |
|
"grad_norm": 0.9600338608905342, |
|
"learning_rate": 5.166947158195768e-08, |
|
"loss": 1.496, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.9744472395074805, |
|
"grad_norm": 0.9384033653578022, |
|
"learning_rate": 4.2708431742577436e-08, |
|
"loss": 1.5241, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9770951939626639, |
|
"grad_norm": 0.9567487185767485, |
|
"learning_rate": 3.459851255221858e-08, |
|
"loss": 1.5165, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9797431484178473, |
|
"grad_norm": 0.9880298117128525, |
|
"learning_rate": 2.734040722345066e-08, |
|
"loss": 1.5055, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9823911028730306, |
|
"grad_norm": 0.9695727389619365, |
|
"learning_rate": 2.0934736158245707e-08, |
|
"loss": 1.5072, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.985039057328214, |
|
"grad_norm": 0.9611774727863346, |
|
"learning_rate": 1.5382046894943936e-08, |
|
"loss": 1.5124, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9876870117833974, |
|
"grad_norm": 0.9495491860194544, |
|
"learning_rate": 1.068281406145788e-08, |
|
"loss": 1.5035, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.9903349662385807, |
|
"grad_norm": 1.3371313882358693, |
|
"learning_rate": 6.837439334695939e-09, |
|
"loss": 1.486, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.992982920693764, |
|
"grad_norm": 0.9652831685373289, |
|
"learning_rate": 3.8462514062298505e-09, |
|
"loss": 1.5044, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.9956308751489474, |
|
"grad_norm": 0.9593787027270587, |
|
"learning_rate": 1.7095059542038272e-09, |
|
"loss": 1.5225, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.9982788296041308, |
|
"grad_norm": 0.9573474795742825, |
|
"learning_rate": 4.273856214742633e-10, |
|
"loss": 1.524, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.9998676022772408, |
|
"eval_loss": 1.526898980140686, |
|
"eval_runtime": 445.6497, |
|
"eval_samples_per_second": 60.011, |
|
"eval_steps_per_second": 3.752, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.9998676022772408, |
|
"step": 1888, |
|
"total_flos": 106218135748608.0, |
|
"train_loss": 1.5447227579809852, |
|
"train_runtime": 15813.94, |
|
"train_samples_per_second": 15.283, |
|
"train_steps_per_second": 0.119 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1888, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 106218135748608.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|