|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9993049349617714, |
|
"eval_steps": 500, |
|
"global_step": 1258, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007943600436898023, |
|
"grad_norm": 11.12145892458997, |
|
"learning_rate": 1.5873015873015874e-07, |
|
"loss": 1.8009, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003971800218449012, |
|
"grad_norm": 10.234395651621842, |
|
"learning_rate": 7.936507936507937e-07, |
|
"loss": 1.7723, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007943600436898023, |
|
"grad_norm": 2.755899780672782, |
|
"learning_rate": 1.5873015873015873e-06, |
|
"loss": 1.709, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011915400655347037, |
|
"grad_norm": 1.5712351118319727, |
|
"learning_rate": 2.380952380952381e-06, |
|
"loss": 1.6524, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.015887200873796047, |
|
"grad_norm": 1.1660528045797167, |
|
"learning_rate": 3.1746031746031746e-06, |
|
"loss": 1.6423, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01985900109224506, |
|
"grad_norm": 1.0216120526399777, |
|
"learning_rate": 3.968253968253968e-06, |
|
"loss": 1.6244, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023830801310694073, |
|
"grad_norm": 0.9509263430769295, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 1.6241, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.027802601529143083, |
|
"grad_norm": 0.9135838434062775, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.6066, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03177440174759209, |
|
"grad_norm": 0.9277594740139052, |
|
"learning_rate": 6.349206349206349e-06, |
|
"loss": 1.5847, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035746201966041107, |
|
"grad_norm": 0.8992831827632032, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.5999, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03971800218449012, |
|
"grad_norm": 0.9718735971756692, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 1.6023, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04368980240293913, |
|
"grad_norm": 0.947148151716203, |
|
"learning_rate": 8.730158730158731e-06, |
|
"loss": 1.5825, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04766160262138815, |
|
"grad_norm": 0.9159573540177632, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 1.5978, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05163340283983715, |
|
"grad_norm": 0.9226608146834196, |
|
"learning_rate": 1.031746031746032e-05, |
|
"loss": 1.6003, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.055605203058286166, |
|
"grad_norm": 0.9212318659674721, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.5856, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05957700327673518, |
|
"grad_norm": 0.8933451806059763, |
|
"learning_rate": 1.1904761904761905e-05, |
|
"loss": 1.5845, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06354880349518419, |
|
"grad_norm": 0.9657915647359273, |
|
"learning_rate": 1.2698412698412699e-05, |
|
"loss": 1.6089, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0675206037136332, |
|
"grad_norm": 0.9337955988860143, |
|
"learning_rate": 1.3492063492063494e-05, |
|
"loss": 1.5815, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07149240393208221, |
|
"grad_norm": 0.9873409918119863, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.5816, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07546420415053123, |
|
"grad_norm": 0.9015172917838395, |
|
"learning_rate": 1.507936507936508e-05, |
|
"loss": 1.5958, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07943600436898024, |
|
"grad_norm": 0.9772975467717536, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 1.5911, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08340780458742925, |
|
"grad_norm": 0.9472359700179834, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.5865, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08737960480587827, |
|
"grad_norm": 0.9247784591978276, |
|
"learning_rate": 1.7460317460317463e-05, |
|
"loss": 1.5793, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09135140502432727, |
|
"grad_norm": 0.9231839444533193, |
|
"learning_rate": 1.8253968253968254e-05, |
|
"loss": 1.5822, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0953232052427763, |
|
"grad_norm": 0.9573224627010042, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 1.5718, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0992950054612253, |
|
"grad_norm": 1.0158728633269807, |
|
"learning_rate": 1.9841269841269845e-05, |
|
"loss": 1.5773, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1032668056796743, |
|
"grad_norm": 1.0442473658793399, |
|
"learning_rate": 1.999938384153589e-05, |
|
"loss": 1.585, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.10723860589812333, |
|
"grad_norm": 0.9568516629501055, |
|
"learning_rate": 1.999688082790923e-05, |
|
"loss": 1.5868, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11121040611657233, |
|
"grad_norm": 0.9665579055787764, |
|
"learning_rate": 1.9992452930796544e-05, |
|
"loss": 1.5776, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11518220633502135, |
|
"grad_norm": 0.9323962932240065, |
|
"learning_rate": 1.9986101002782376e-05, |
|
"loss": 1.5789, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.11915400655347036, |
|
"grad_norm": 0.9477001351382732, |
|
"learning_rate": 1.997782626692034e-05, |
|
"loss": 1.5814, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12312580677191937, |
|
"grad_norm": 0.9311645028761003, |
|
"learning_rate": 1.9967630316497663e-05, |
|
"loss": 1.5658, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12709760699036837, |
|
"grad_norm": 0.9585549328757471, |
|
"learning_rate": 1.995551511472836e-05, |
|
"loss": 1.5843, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1310694072088174, |
|
"grad_norm": 0.94350147619133, |
|
"learning_rate": 1.994148299437524e-05, |
|
"loss": 1.5589, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1350412074272664, |
|
"grad_norm": 0.9702241501746585, |
|
"learning_rate": 1.9925536657300734e-05, |
|
"loss": 1.5783, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13901300764571542, |
|
"grad_norm": 0.90878186459247, |
|
"learning_rate": 1.990767917394666e-05, |
|
"loss": 1.5716, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.14298480786416443, |
|
"grad_norm": 0.9926126694962801, |
|
"learning_rate": 1.9887913982743e-05, |
|
"loss": 1.5705, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14695660808261343, |
|
"grad_norm": 0.8958939695984969, |
|
"learning_rate": 1.986624488944585e-05, |
|
"loss": 1.5738, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15092840830106247, |
|
"grad_norm": 0.9194318242066775, |
|
"learning_rate": 1.984267606640462e-05, |
|
"loss": 1.5729, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15490020851951147, |
|
"grad_norm": 0.9497866986358652, |
|
"learning_rate": 1.9817212051758667e-05, |
|
"loss": 1.5674, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.15887200873796048, |
|
"grad_norm": 0.8988871329419085, |
|
"learning_rate": 1.978985774856346e-05, |
|
"loss": 1.5683, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16284380895640949, |
|
"grad_norm": 0.9722584116447944, |
|
"learning_rate": 1.9760618423846526e-05, |
|
"loss": 1.5737, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1668156091748585, |
|
"grad_norm": 0.9803590939025236, |
|
"learning_rate": 1.9729499707593284e-05, |
|
"loss": 1.5826, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17078740939330753, |
|
"grad_norm": 0.9344503381295605, |
|
"learning_rate": 1.9696507591663003e-05, |
|
"loss": 1.5565, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.17475920961175653, |
|
"grad_norm": 0.9071381723958235, |
|
"learning_rate": 1.9661648428635066e-05, |
|
"loss": 1.5621, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17873100983020554, |
|
"grad_norm": 0.9288351049804889, |
|
"learning_rate": 1.962492893058582e-05, |
|
"loss": 1.5532, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.18270281004865455, |
|
"grad_norm": 0.9288525670728277, |
|
"learning_rate": 1.9586356167796145e-05, |
|
"loss": 1.5801, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18667461026710355, |
|
"grad_norm": 0.9183899709915191, |
|
"learning_rate": 1.954593756739009e-05, |
|
"loss": 1.5801, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1906464104855526, |
|
"grad_norm": 0.9877621229108748, |
|
"learning_rate": 1.9512278901942467e-05, |
|
"loss": 1.5817, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1946182107040016, |
|
"grad_norm": 0.9364533708770005, |
|
"learning_rate": 1.9468557643703262e-05, |
|
"loss": 1.571, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1985900109224506, |
|
"grad_norm": 0.9156871855852468, |
|
"learning_rate": 1.942301322976593e-05, |
|
"loss": 1.5693, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2025618111408996, |
|
"grad_norm": 0.91239458392149, |
|
"learning_rate": 1.9375654429634866e-05, |
|
"loss": 1.556, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2065336113593486, |
|
"grad_norm": 0.904505667076919, |
|
"learning_rate": 1.9326490362171625e-05, |
|
"loss": 1.5763, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21050541157779765, |
|
"grad_norm": 0.901682231936739, |
|
"learning_rate": 1.9275530493839118e-05, |
|
"loss": 1.5706, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.21447721179624665, |
|
"grad_norm": 0.9063579769395875, |
|
"learning_rate": 1.9222784636878853e-05, |
|
"loss": 1.567, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.21844901201469566, |
|
"grad_norm": 0.9214787619294059, |
|
"learning_rate": 1.91682629474216e-05, |
|
"loss": 1.5742, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.22242081223314467, |
|
"grad_norm": 0.9524031793810314, |
|
"learning_rate": 1.9111975923531858e-05, |
|
"loss": 1.5653, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22639261245159367, |
|
"grad_norm": 0.9437628509942092, |
|
"learning_rate": 1.905393440318645e-05, |
|
"loss": 1.555, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2303644126700427, |
|
"grad_norm": 0.9556933869893008, |
|
"learning_rate": 1.8994149562187702e-05, |
|
"loss": 1.5625, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2343362128884917, |
|
"grad_norm": 0.9430484585606292, |
|
"learning_rate": 1.8932632912011565e-05, |
|
"loss": 1.5567, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.23830801310694072, |
|
"grad_norm": 0.9741243595635902, |
|
"learning_rate": 1.886939629759107e-05, |
|
"loss": 1.5553, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.24227981332538973, |
|
"grad_norm": 0.9052975360645378, |
|
"learning_rate": 1.8804451895035645e-05, |
|
"loss": 1.5793, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.24625161354383873, |
|
"grad_norm": 0.9742325579737205, |
|
"learning_rate": 1.873781220928659e-05, |
|
"loss": 1.5605, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.25022341376228774, |
|
"grad_norm": 0.9031213199856675, |
|
"learning_rate": 1.866949007170929e-05, |
|
"loss": 1.5649, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.25419521398073675, |
|
"grad_norm": 0.8702720949465248, |
|
"learning_rate": 1.859949863762256e-05, |
|
"loss": 1.5749, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2581670141991858, |
|
"grad_norm": 0.961271121572224, |
|
"learning_rate": 1.852785138376558e-05, |
|
"loss": 1.5752, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2621388144176348, |
|
"grad_norm": 0.8718585392590509, |
|
"learning_rate": 1.8454562105703e-05, |
|
"loss": 1.5577, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2661106146360838, |
|
"grad_norm": 0.8690540556968002, |
|
"learning_rate": 1.8379644915168623e-05, |
|
"loss": 1.55, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.2700824148545328, |
|
"grad_norm": 0.9097212286930987, |
|
"learning_rate": 1.83031142373482e-05, |
|
"loss": 1.5737, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.27405421507298183, |
|
"grad_norm": 0.8712230897009824, |
|
"learning_rate": 1.822498480810189e-05, |
|
"loss": 1.5739, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.27802601529143084, |
|
"grad_norm": 0.948552271738661, |
|
"learning_rate": 1.8145271671126892e-05, |
|
"loss": 1.5657, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.28199781550987985, |
|
"grad_norm": 0.9284269691863282, |
|
"learning_rate": 1.8063990175060807e-05, |
|
"loss": 1.5601, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.28596961572832885, |
|
"grad_norm": 0.8854698701217475, |
|
"learning_rate": 1.798115597052629e-05, |
|
"loss": 1.5595, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.28994141594677786, |
|
"grad_norm": 0.9222629630617156, |
|
"learning_rate": 1.7896785007117526e-05, |
|
"loss": 1.5682, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.29391321616522686, |
|
"grad_norm": 0.9259111426870978, |
|
"learning_rate": 1.781089353032918e-05, |
|
"loss": 1.5614, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2978850163836759, |
|
"grad_norm": 0.9425926440581255, |
|
"learning_rate": 1.7723498078428355e-05, |
|
"loss": 1.5726, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.30185681660212493, |
|
"grad_norm": 0.8788362482569841, |
|
"learning_rate": 1.7634615479270157e-05, |
|
"loss": 1.542, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.30582861682057394, |
|
"grad_norm": 0.8993764823284865, |
|
"learning_rate": 1.754426284705753e-05, |
|
"loss": 1.5517, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.30980041703902295, |
|
"grad_norm": 0.9114957468679018, |
|
"learning_rate": 1.7452457579045948e-05, |
|
"loss": 1.5566, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.31377221725747195, |
|
"grad_norm": 0.9303896756930642, |
|
"learning_rate": 1.7359217352193587e-05, |
|
"loss": 1.5733, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.31774401747592096, |
|
"grad_norm": 0.909449512224505, |
|
"learning_rate": 1.726456011975767e-05, |
|
"loss": 1.5492, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.32171581769436997, |
|
"grad_norm": 0.9015167969914157, |
|
"learning_rate": 1.716850410783758e-05, |
|
"loss": 1.5696, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.32568761791281897, |
|
"grad_norm": 0.9132981601959252, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 1.5507, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.329659418131268, |
|
"grad_norm": 0.8960449583705886, |
|
"learning_rate": 1.6972269993045004e-05, |
|
"loss": 1.5903, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.333631218349717, |
|
"grad_norm": 0.8844544873628968, |
|
"learning_rate": 1.6872129674738866e-05, |
|
"loss": 1.5593, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.33760301856816605, |
|
"grad_norm": 0.9059334124525383, |
|
"learning_rate": 1.6770666138805904e-05, |
|
"loss": 1.5829, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.34157481878661505, |
|
"grad_norm": 0.8586072355217647, |
|
"learning_rate": 1.666789892188841e-05, |
|
"loss": 1.5577, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.34554661900506406, |
|
"grad_norm": 0.929783164434847, |
|
"learning_rate": 1.6563847811650376e-05, |
|
"loss": 1.5684, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.34951841922351307, |
|
"grad_norm": 0.8725442050763763, |
|
"learning_rate": 1.64585328429674e-05, |
|
"loss": 1.5448, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3534902194419621, |
|
"grad_norm": 0.8749100355256942, |
|
"learning_rate": 1.635197429406901e-05, |
|
"loss": 1.5726, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.3574620196604111, |
|
"grad_norm": 0.948191551262984, |
|
"learning_rate": 1.6244192682634143e-05, |
|
"loss": 1.5464, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3614338198788601, |
|
"grad_norm": 0.9305834084339885, |
|
"learning_rate": 1.6135208761840457e-05, |
|
"loss": 1.559, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.3654056200973091, |
|
"grad_norm": 0.9397991327586414, |
|
"learning_rate": 1.602504351636838e-05, |
|
"loss": 1.5534, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3693774203157581, |
|
"grad_norm": 0.9213324585480839, |
|
"learning_rate": 1.591371815836051e-05, |
|
"loss": 1.5542, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3733492205342071, |
|
"grad_norm": 0.8853761110334394, |
|
"learning_rate": 1.580125412333728e-05, |
|
"loss": 1.5402, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.37732102075265617, |
|
"grad_norm": 0.8691498605561269, |
|
"learning_rate": 1.5687673066069568e-05, |
|
"loss": 1.552, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3812928209711052, |
|
"grad_norm": 0.8998572635366405, |
|
"learning_rate": 1.5572996856409094e-05, |
|
"loss": 1.5638, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3852646211895542, |
|
"grad_norm": 0.8941950276870779, |
|
"learning_rate": 1.5457247575077445e-05, |
|
"loss": 1.5406, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.3892364214080032, |
|
"grad_norm": 0.9097426424762942, |
|
"learning_rate": 1.534044750941444e-05, |
|
"loss": 1.5472, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3932082216264522, |
|
"grad_norm": 0.8791345662658966, |
|
"learning_rate": 1.5222619149086746e-05, |
|
"loss": 1.5413, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3971800218449012, |
|
"grad_norm": 0.8776919953132802, |
|
"learning_rate": 1.5103785181757533e-05, |
|
"loss": 1.5396, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4011518220633502, |
|
"grad_norm": 0.8664994594605832, |
|
"learning_rate": 1.4983968488718005e-05, |
|
"loss": 1.5426, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4051236222817992, |
|
"grad_norm": 0.8931546183056348, |
|
"learning_rate": 1.4863192140481624e-05, |
|
"loss": 1.5537, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4090954225002482, |
|
"grad_norm": 0.8442200510246032, |
|
"learning_rate": 1.4741479392341941e-05, |
|
"loss": 1.5586, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4130672227186972, |
|
"grad_norm": 0.9413240109314772, |
|
"learning_rate": 1.4618853679894813e-05, |
|
"loss": 1.5202, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4170390229371463, |
|
"grad_norm": 0.8865446672373302, |
|
"learning_rate": 1.4495338614525927e-05, |
|
"loss": 1.5503, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4210108231555953, |
|
"grad_norm": 0.9668394693388267, |
|
"learning_rate": 1.437095797886445e-05, |
|
"loss": 1.5488, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4249826233740443, |
|
"grad_norm": 0.9214606309227755, |
|
"learning_rate": 1.4245735722203736e-05, |
|
"loss": 1.54, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4289544235924933, |
|
"grad_norm": 0.9555320905995948, |
|
"learning_rate": 1.4119695955889925e-05, |
|
"loss": 1.5492, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4329262238109423, |
|
"grad_norm": 0.8802626029124945, |
|
"learning_rate": 1.3992862948679332e-05, |
|
"loss": 1.549, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4368980240293913, |
|
"grad_norm": 0.8902159443351422, |
|
"learning_rate": 1.3865261122065551e-05, |
|
"loss": 1.5482, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4408698242478403, |
|
"grad_norm": 0.8427883741871806, |
|
"learning_rate": 1.3736915045577122e-05, |
|
"loss": 1.5487, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.44484162446628933, |
|
"grad_norm": 0.8251277406307889, |
|
"learning_rate": 1.3607849432046717e-05, |
|
"loss": 1.5478, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.44881342468473834, |
|
"grad_norm": 0.8371689103015839, |
|
"learning_rate": 1.3478089132852717e-05, |
|
"loss": 1.5597, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.45278522490318734, |
|
"grad_norm": 0.8217449927915415, |
|
"learning_rate": 1.3347659133134118e-05, |
|
"loss": 1.5141, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4567570251216364, |
|
"grad_norm": 0.8372002721470211, |
|
"learning_rate": 1.3216584546979702e-05, |
|
"loss": 1.5338, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4607288253400854, |
|
"grad_norm": 0.9501487795845303, |
|
"learning_rate": 1.3084890612592325e-05, |
|
"loss": 1.5633, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4647006255585344, |
|
"grad_norm": 0.8558411230061426, |
|
"learning_rate": 1.2952602687429364e-05, |
|
"loss": 1.5623, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.4686724257769834, |
|
"grad_norm": 0.830027301501606, |
|
"learning_rate": 1.2819746243320176e-05, |
|
"loss": 1.5512, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.47264422599543243, |
|
"grad_norm": 0.903709450047426, |
|
"learning_rate": 1.2686346861561538e-05, |
|
"loss": 1.5608, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.47661602621388144, |
|
"grad_norm": 0.961449581540646, |
|
"learning_rate": 1.2552430227992005e-05, |
|
"loss": 1.542, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.48058782643233044, |
|
"grad_norm": 0.877769489004286, |
|
"learning_rate": 1.2418022128046144e-05, |
|
"loss": 1.539, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.48455962665077945, |
|
"grad_norm": 0.9130539885425439, |
|
"learning_rate": 1.2283148441789586e-05, |
|
"loss": 1.5276, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.48853142686922846, |
|
"grad_norm": 0.882980412032183, |
|
"learning_rate": 1.2147835138935868e-05, |
|
"loss": 1.5204, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.49250322708767746, |
|
"grad_norm": 0.8366128423326633, |
|
"learning_rate": 1.2012108273846011e-05, |
|
"loss": 1.5362, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4964750273061265, |
|
"grad_norm": 0.8538967577380823, |
|
"learning_rate": 1.1875993980511772e-05, |
|
"loss": 1.5187, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5004468275245755, |
|
"grad_norm": 0.8942558682507143, |
|
"learning_rate": 1.1739518467523614e-05, |
|
"loss": 1.5386, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5044186277430245, |
|
"grad_norm": 0.9187709823388034, |
|
"learning_rate": 1.1602708013024255e-05, |
|
"loss": 1.5245, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5083904279614735, |
|
"grad_norm": 0.8442877528307388, |
|
"learning_rate": 1.146558895964888e-05, |
|
"loss": 1.5596, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5123622281799225, |
|
"grad_norm": 0.898548400584606, |
|
"learning_rate": 1.1328187709452884e-05, |
|
"loss": 1.5406, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5163340283983716, |
|
"grad_norm": 0.8811961105698817, |
|
"learning_rate": 1.119053071882822e-05, |
|
"loss": 1.5356, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5203058286168206, |
|
"grad_norm": 0.8804203720770784, |
|
"learning_rate": 1.1052644493409255e-05, |
|
"loss": 1.5528, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5242776288352696, |
|
"grad_norm": 0.8797158963008079, |
|
"learning_rate": 1.091455558296914e-05, |
|
"loss": 1.5372, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5282494290537186, |
|
"grad_norm": 0.8350754715847598, |
|
"learning_rate": 1.077629057630771e-05, |
|
"loss": 1.5452, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5322212292721676, |
|
"grad_norm": 0.8903077066951119, |
|
"learning_rate": 1.0637876096131852e-05, |
|
"loss": 1.5283, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5361930294906166, |
|
"grad_norm": 0.8576752803021083, |
|
"learning_rate": 1.049933879392933e-05, |
|
"loss": 1.5433, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5401648297090657, |
|
"grad_norm": 0.8467022082112134, |
|
"learning_rate": 1.0360705344837105e-05, |
|
"loss": 1.5133, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5441366299275147, |
|
"grad_norm": 0.8817844142301194, |
|
"learning_rate": 1.0222002442505057e-05, |
|
"loss": 1.5411, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5481084301459637, |
|
"grad_norm": 0.8151311907804857, |
|
"learning_rate": 1.0083256793956186e-05, |
|
"loss": 1.5326, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5520802303644127, |
|
"grad_norm": 0.8251603017546312, |
|
"learning_rate": 9.944495114444205e-06, |
|
"loss": 1.5082, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.5560520305828617, |
|
"grad_norm": 0.8869207810981268, |
|
"learning_rate": 9.805744122309557e-06, |
|
"loss": 1.5481, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5600238308013107, |
|
"grad_norm": 0.8719890884427858, |
|
"learning_rate": 9.667030533834862e-06, |
|
"loss": 1.5223, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5639956310197597, |
|
"grad_norm": 0.8664009171759752, |
|
"learning_rate": 9.528381058100707e-06, |
|
"loss": 1.5289, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5679674312382087, |
|
"grad_norm": 0.8819223345245485, |
|
"learning_rate": 9.389822391842886e-06, |
|
"loss": 1.5248, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5719392314566577, |
|
"grad_norm": 0.8462371220192982, |
|
"learning_rate": 9.251381214311974e-06, |
|
"loss": 1.5263, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5759110316751067, |
|
"grad_norm": 0.8556000311877207, |
|
"learning_rate": 9.113084182136267e-06, |
|
"loss": 1.5464, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5798828318935557, |
|
"grad_norm": 0.8854678303214264, |
|
"learning_rate": 8.974957924189108e-06, |
|
"loss": 1.5388, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.5838546321120047, |
|
"grad_norm": 0.8605998763470911, |
|
"learning_rate": 8.837029036461537e-06, |
|
"loss": 1.5391, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.5878264323304537, |
|
"grad_norm": 0.8176529738699428, |
|
"learning_rate": 8.699324076941278e-06, |
|
"loss": 1.5578, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5917982325489027, |
|
"grad_norm": 0.8500747154468798, |
|
"learning_rate": 8.561869560499017e-06, |
|
"loss": 1.5387, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.5957700327673519, |
|
"grad_norm": 0.8074215452026124, |
|
"learning_rate": 8.42469195378304e-06, |
|
"loss": 1.5312, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5997418329858009, |
|
"grad_norm": 0.8452784103185206, |
|
"learning_rate": 8.287817670123101e-06, |
|
"loss": 1.5258, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6037136332042499, |
|
"grad_norm": 0.8486586156355476, |
|
"learning_rate": 8.151273064444583e-06, |
|
"loss": 1.5192, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6076854334226989, |
|
"grad_norm": 0.8146588435190393, |
|
"learning_rate": 8.015084428193895e-06, |
|
"loss": 1.5045, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6116572336411479, |
|
"grad_norm": 0.8320002856800587, |
|
"learning_rate": 7.879277984276106e-06, |
|
"loss": 1.5371, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6156290338595969, |
|
"grad_norm": 0.8314805091840901, |
|
"learning_rate": 7.74387988200576e-06, |
|
"loss": 1.5364, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6196008340780459, |
|
"grad_norm": 0.8340274339801351, |
|
"learning_rate": 7.608916192071856e-06, |
|
"loss": 1.5326, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6235726342964949, |
|
"grad_norm": 0.8445705617901014, |
|
"learning_rate": 7.474412901517998e-06, |
|
"loss": 1.543, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6275444345149439, |
|
"grad_norm": 0.8631402798545373, |
|
"learning_rate": 7.340395908738622e-06, |
|
"loss": 1.5258, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6315162347333929, |
|
"grad_norm": 0.839562146660397, |
|
"learning_rate": 7.206891018492308e-06, |
|
"loss": 1.5464, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6354880349518419, |
|
"grad_norm": 0.879806527044698, |
|
"learning_rate": 7.073923936933091e-06, |
|
"loss": 1.541, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6394598351702909, |
|
"grad_norm": 0.8536606816656509, |
|
"learning_rate": 6.941520266660819e-06, |
|
"loss": 1.5206, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6434316353887399, |
|
"grad_norm": 0.8318733759254066, |
|
"learning_rate": 6.809705501791379e-06, |
|
"loss": 1.5407, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6474034356071889, |
|
"grad_norm": 0.8459590871404428, |
|
"learning_rate": 6.678505023047871e-06, |
|
"loss": 1.5216, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6513752358256379, |
|
"grad_norm": 0.8295606496141018, |
|
"learning_rate": 6.5479440928735695e-06, |
|
"loss": 1.5388, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.655347036044087, |
|
"grad_norm": 0.8553791665518854, |
|
"learning_rate": 6.418047850567699e-06, |
|
"loss": 1.5045, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.659318836262536, |
|
"grad_norm": 0.8249727222480653, |
|
"learning_rate": 6.2888413074448954e-06, |
|
"loss": 1.5061, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.663290636480985, |
|
"grad_norm": 0.8486443716521056, |
|
"learning_rate": 6.160349342019299e-06, |
|
"loss": 1.5202, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.667262436699434, |
|
"grad_norm": 0.8425578204636239, |
|
"learning_rate": 6.032596695214272e-06, |
|
"loss": 1.524, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.671234236917883, |
|
"grad_norm": 0.8607932133751491, |
|
"learning_rate": 5.905607965598542e-06, |
|
"loss": 1.5054, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.6752060371363321, |
|
"grad_norm": 0.87671463806252, |
|
"learning_rate": 5.779407604649794e-06, |
|
"loss": 1.5256, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.6791778373547811, |
|
"grad_norm": 0.8240359232861163, |
|
"learning_rate": 5.6540199120465985e-06, |
|
"loss": 1.5311, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6831496375732301, |
|
"grad_norm": 0.8880311823283904, |
|
"learning_rate": 5.529469030989511e-06, |
|
"loss": 1.5073, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6871214377916791, |
|
"grad_norm": 0.8623990682016829, |
|
"learning_rate": 5.405778943552373e-06, |
|
"loss": 1.5379, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6910932380101281, |
|
"grad_norm": 0.8262496371808098, |
|
"learning_rate": 5.282973466064567e-06, |
|
"loss": 1.5145, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.6950650382285771, |
|
"grad_norm": 0.8482342550959973, |
|
"learning_rate": 5.161076244525254e-06, |
|
"loss": 1.5275, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.6990368384470261, |
|
"grad_norm": 0.8328878311342953, |
|
"learning_rate": 5.040110750050356e-06, |
|
"loss": 1.528, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7030086386654751, |
|
"grad_norm": 0.8164657341322779, |
|
"learning_rate": 4.9201002743532175e-06, |
|
"loss": 1.5092, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7069804388839241, |
|
"grad_norm": 0.8490381133150996, |
|
"learning_rate": 4.801067925259857e-06, |
|
"loss": 1.5454, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7109522391023732, |
|
"grad_norm": 0.8440725903075691, |
|
"learning_rate": 4.683036622259562e-06, |
|
"loss": 1.5224, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7149240393208222, |
|
"grad_norm": 0.8366180827607501, |
|
"learning_rate": 4.566029092091798e-06, |
|
"loss": 1.5413, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7188958395392712, |
|
"grad_norm": 0.8427766816018408, |
|
"learning_rate": 4.450067864370187e-06, |
|
"loss": 1.5173, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.7228676397577202, |
|
"grad_norm": 0.8028067037799445, |
|
"learning_rate": 4.335175267244495e-06, |
|
"loss": 1.5275, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7268394399761692, |
|
"grad_norm": 0.795885144548523, |
|
"learning_rate": 4.221373423101356e-06, |
|
"loss": 1.5166, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.7308112401946182, |
|
"grad_norm": 0.8378592839978902, |
|
"learning_rate": 4.108684244304668e-06, |
|
"loss": 1.5444, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7347830404130672, |
|
"grad_norm": 0.8299609184798702, |
|
"learning_rate": 3.997129428976374e-06, |
|
"loss": 1.5373, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.7387548406315162, |
|
"grad_norm": 0.8200382060222382, |
|
"learning_rate": 3.886730456818546e-06, |
|
"loss": 1.5203, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7427266408499652, |
|
"grad_norm": 0.8198010231353143, |
|
"learning_rate": 3.7775085849774973e-06, |
|
"loss": 1.5291, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7466984410684142, |
|
"grad_norm": 0.8232628031861067, |
|
"learning_rate": 3.6694848439507347e-06, |
|
"loss": 1.5334, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7506702412868632, |
|
"grad_norm": 0.81406141716096, |
|
"learning_rate": 3.562680033537598e-06, |
|
"loss": 1.5091, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7546420415053123, |
|
"grad_norm": 0.8087303425914779, |
|
"learning_rate": 3.457114718834278e-06, |
|
"loss": 1.5313, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.7586138417237613, |
|
"grad_norm": 0.8083510625412864, |
|
"learning_rate": 3.3528092262740284e-06, |
|
"loss": 1.5234, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7625856419422103, |
|
"grad_norm": 0.8366676453174993, |
|
"learning_rate": 3.2497836397133663e-06, |
|
"loss": 1.538, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7665574421606594, |
|
"grad_norm": 0.8393538795255264, |
|
"learning_rate": 3.148057796564944e-06, |
|
"loss": 1.5216, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.7705292423791084, |
|
"grad_norm": 0.8520167906125237, |
|
"learning_rate": 3.0476512839778894e-06, |
|
"loss": 1.5293, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7745010425975574, |
|
"grad_norm": 0.8371019471875751, |
|
"learning_rate": 2.948583435066329e-06, |
|
"loss": 1.513, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.7784728428160064, |
|
"grad_norm": 0.8298639126272734, |
|
"learning_rate": 2.850873325186826e-06, |
|
"loss": 1.5268, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.7824446430344554, |
|
"grad_norm": 0.8374513555969967, |
|
"learning_rate": 2.7545397682654693e-06, |
|
"loss": 1.5097, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.7864164432529044, |
|
"grad_norm": 0.8200846092055774, |
|
"learning_rate": 2.6596013131752498e-06, |
|
"loss": 1.543, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.7903882434713534, |
|
"grad_norm": 0.8148429888845868, |
|
"learning_rate": 2.566076240164536e-06, |
|
"loss": 1.5522, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.7943600436898024, |
|
"grad_norm": 0.8131435969188188, |
|
"learning_rate": 2.47398255733722e-06, |
|
"loss": 1.5245, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7983318439082514, |
|
"grad_norm": 0.8284789559376622, |
|
"learning_rate": 2.383337997185299e-06, |
|
"loss": 1.5427, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8023036441267004, |
|
"grad_norm": 0.8321861504525199, |
|
"learning_rate": 2.2941600131744978e-06, |
|
"loss": 1.5072, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8062754443451494, |
|
"grad_norm": 0.8065047141388554, |
|
"learning_rate": 2.2064657763836474e-06, |
|
"loss": 1.5179, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8102472445635984, |
|
"grad_norm": 0.7992276385422232, |
|
"learning_rate": 2.120272172198412e-06, |
|
"loss": 1.5272, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8142190447820474, |
|
"grad_norm": 0.8229954286323785, |
|
"learning_rate": 2.0355957970600406e-06, |
|
"loss": 1.5027, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.8181908450004964, |
|
"grad_norm": 0.8275865130237602, |
|
"learning_rate": 1.952452955269738e-06, |
|
"loss": 1.5267, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8221626452189454, |
|
"grad_norm": 0.8081049582664315, |
|
"learning_rate": 1.870859655849304e-06, |
|
"loss": 1.5426, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.8261344454373944, |
|
"grad_norm": 0.823108756311465, |
|
"learning_rate": 1.7908316094586343e-06, |
|
"loss": 1.5388, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8301062456558436, |
|
"grad_norm": 0.8355851750630425, |
|
"learning_rate": 1.7123842253706302e-06, |
|
"loss": 1.5014, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.8340780458742926, |
|
"grad_norm": 0.8374873929351686, |
|
"learning_rate": 1.6355326085041944e-06, |
|
"loss": 1.5008, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8380498460927416, |
|
"grad_norm": 0.8230242479079336, |
|
"learning_rate": 1.5602915565157895e-06, |
|
"loss": 1.5249, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.8420216463111906, |
|
"grad_norm": 0.8028112832237307, |
|
"learning_rate": 1.4866755569501757e-06, |
|
"loss": 1.5134, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8459934465296396, |
|
"grad_norm": 0.7982837438136364, |
|
"learning_rate": 1.4146987844508509e-06, |
|
"loss": 1.5127, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.8499652467480886, |
|
"grad_norm": 0.799526922604763, |
|
"learning_rate": 1.344375098030759e-06, |
|
"loss": 1.5145, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8539370469665376, |
|
"grad_norm": 0.8028491606704994, |
|
"learning_rate": 1.2757180384037505e-06, |
|
"loss": 1.5601, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.8579088471849866, |
|
"grad_norm": 0.8062739981966481, |
|
"learning_rate": 1.2087408253773326e-06, |
|
"loss": 1.5104, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8618806474034356, |
|
"grad_norm": 0.810426514136181, |
|
"learning_rate": 1.1434563553072332e-06, |
|
"loss": 1.5271, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.8658524476218846, |
|
"grad_norm": 0.8106217030883788, |
|
"learning_rate": 1.0798771986142186e-06, |
|
"loss": 1.5275, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.8698242478403336, |
|
"grad_norm": 0.8239831789081276, |
|
"learning_rate": 1.018015597363673e-06, |
|
"loss": 1.5435, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.8737960480587826, |
|
"grad_norm": 0.8032988356076526, |
|
"learning_rate": 9.578834629084199e-07, |
|
"loss": 1.5006, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8777678482772316, |
|
"grad_norm": 0.8107500288685499, |
|
"learning_rate": 8.994923735952033e-07, |
|
"loss": 1.5175, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.8817396484956807, |
|
"grad_norm": 0.7947044768084438, |
|
"learning_rate": 8.428535725353016e-07, |
|
"loss": 1.5134, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8857114487141297, |
|
"grad_norm": 0.8184180092012469, |
|
"learning_rate": 7.879779654396724e-07, |
|
"loss": 1.5308, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.8896832489325787, |
|
"grad_norm": 0.7812672670695122, |
|
"learning_rate": 7.348761185190889e-07, |
|
"loss": 1.5286, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.8936550491510277, |
|
"grad_norm": 0.8031429398431277, |
|
"learning_rate": 6.835582564496257e-07, |
|
"loss": 1.5189, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.8976268493694767, |
|
"grad_norm": 0.8262986271960238, |
|
"learning_rate": 6.340342604039151e-07, |
|
"loss": 1.5137, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9015986495879257, |
|
"grad_norm": 0.8091884297906192, |
|
"learning_rate": 5.86313666148538e-07, |
|
"loss": 1.5287, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.9055704498063747, |
|
"grad_norm": 0.8083089870588875, |
|
"learning_rate": 5.40405662207939e-07, |
|
"loss": 1.5211, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9095422500248238, |
|
"grad_norm": 0.8080847975437143, |
|
"learning_rate": 4.963190880951807e-07, |
|
"loss": 1.4942, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.9135140502432728, |
|
"grad_norm": 0.8112755205962424, |
|
"learning_rate": 4.540624326099108e-07, |
|
"loss": 1.5361, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9174858504617218, |
|
"grad_norm": 0.8137159093665213, |
|
"learning_rate": 4.136438322038594e-07, |
|
"loss": 1.5305, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.9214576506801708, |
|
"grad_norm": 0.8099723012257717, |
|
"learning_rate": 3.7507106941418127e-07, |
|
"loss": 1.5136, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9254294508986198, |
|
"grad_norm": 0.7874754581462157, |
|
"learning_rate": 3.3835157136493434e-07, |
|
"loss": 1.5192, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.9294012511170688, |
|
"grad_norm": 0.8097635833636108, |
|
"learning_rate": 3.0349240833699944e-07, |
|
"loss": 1.53, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9333730513355178, |
|
"grad_norm": 0.7839597922407979, |
|
"learning_rate": 2.7050029240671573e-07, |
|
"loss": 1.5406, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.9373448515539669, |
|
"grad_norm": 0.8036152094174389, |
|
"learning_rate": 2.3938157615347455e-07, |
|
"loss": 1.5142, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.9413166517724159, |
|
"grad_norm": 0.7969437729501767, |
|
"learning_rate": 2.1014225143654145e-07, |
|
"loss": 1.5168, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.9452884519908649, |
|
"grad_norm": 0.7986549378127612, |
|
"learning_rate": 1.8278794824133417e-07, |
|
"loss": 1.5233, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9492602522093139, |
|
"grad_norm": 0.807465267893701, |
|
"learning_rate": 1.5732393359537912e-07, |
|
"loss": 1.5193, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.9532320524277629, |
|
"grad_norm": 0.8289613150037851, |
|
"learning_rate": 1.3375511055415346e-07, |
|
"loss": 1.5306, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9572038526462119, |
|
"grad_norm": 0.8200430426978503, |
|
"learning_rate": 1.1208601725700374e-07, |
|
"loss": 1.5185, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.9611756528646609, |
|
"grad_norm": 0.8207956989586505, |
|
"learning_rate": 9.232082605334369e-08, |
|
"loss": 1.5261, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.9651474530831099, |
|
"grad_norm": 0.8115966572361262, |
|
"learning_rate": 7.446334269926803e-08, |
|
"loss": 1.5243, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.9691192533015589, |
|
"grad_norm": 0.7861487020593475, |
|
"learning_rate": 5.851700562476304e-08, |
|
"loss": 1.5168, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.9730910535200079, |
|
"grad_norm": 0.7965492838019634, |
|
"learning_rate": 4.448488527164174e-08, |
|
"loss": 1.5238, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.9770628537384569, |
|
"grad_norm": 0.7940697296490887, |
|
"learning_rate": 3.236968350233793e-08, |
|
"loss": 1.5131, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.9810346539569059, |
|
"grad_norm": 0.8173719106280355, |
|
"learning_rate": 2.217373307966009e-08, |
|
"loss": 1.5142, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.9850064541753549, |
|
"grad_norm": 0.7895572135274385, |
|
"learning_rate": 1.389899721762844e-08, |
|
"loss": 1.5156, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.988978254393804, |
|
"grad_norm": 0.7929243131364883, |
|
"learning_rate": 7.54706920345738e-09, |
|
"loss": 1.5024, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.992950054612253, |
|
"grad_norm": 0.7966487036007772, |
|
"learning_rate": 3.119172090771949e-09, |
|
"loss": 1.5061, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9969218548307021, |
|
"grad_norm": 0.779081898680904, |
|
"learning_rate": 6.161584641128926e-10, |
|
"loss": 1.526, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.9993049349617714, |
|
"eval_loss": 1.5326507091522217, |
|
"eval_runtime": 268.2952, |
|
"eval_samples_per_second": 99.681, |
|
"eval_steps_per_second": 4.156, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.9993049349617714, |
|
"step": 1258, |
|
"total_flos": 106161864966144.0, |
|
"train_loss": 1.5477431688475496, |
|
"train_runtime": 10888.6684, |
|
"train_samples_per_second": 22.196, |
|
"train_steps_per_second": 0.116 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1258, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 106161864966144.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|