|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 185568, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9865278496292465e-05, |
|
"loss": 3.1195, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.973055699258493e-05, |
|
"loss": 2.3468, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.95958354888774e-05, |
|
"loss": 2.1174, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.946111398516986e-05, |
|
"loss": 1.9833, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9326392481462324e-05, |
|
"loss": 1.8805, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9191670977754786e-05, |
|
"loss": 1.803, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9056949474047256e-05, |
|
"loss": 1.741, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.892222797033972e-05, |
|
"loss": 1.7125, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.878750646663218e-05, |
|
"loss": 1.659, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8652784962924645e-05, |
|
"loss": 1.6171, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8518063459217114e-05, |
|
"loss": 1.5846, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.838334195550957e-05, |
|
"loss": 1.57, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824862045180203e-05, |
|
"loss": 1.5535, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8113898948094496e-05, |
|
"loss": 1.5426, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7979177444386966e-05, |
|
"loss": 1.5053, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.784445594067943e-05, |
|
"loss": 1.4924, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.770973443697189e-05, |
|
"loss": 1.4624, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7575012933264354e-05, |
|
"loss": 1.465, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7440291429556824e-05, |
|
"loss": 1.4493, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.730556992584929e-05, |
|
"loss": 1.4349, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.717084842214175e-05, |
|
"loss": 1.4282, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703612691843421e-05, |
|
"loss": 1.397, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.690140541472668e-05, |
|
"loss": 1.4094, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6766683911019145e-05, |
|
"loss": 1.3788, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.663196240731161e-05, |
|
"loss": 1.3869, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.649724090360407e-05, |
|
"loss": 1.3738, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6362519399896534e-05, |
|
"loss": 1.3786, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6227797896189e-05, |
|
"loss": 1.3482, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6093076392481466e-05, |
|
"loss": 1.339, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.595835488877393e-05, |
|
"loss": 1.3486, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.582363338506639e-05, |
|
"loss": 1.32, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.568891188135886e-05, |
|
"loss": 1.3085, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5554190377651324e-05, |
|
"loss": 1.318, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.541946887394379e-05, |
|
"loss": 1.2964, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.528474737023625e-05, |
|
"loss": 1.3008, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.515002586652871e-05, |
|
"loss": 1.2903, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5015304362821176e-05, |
|
"loss": 1.2753, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.488058285911364e-05, |
|
"loss": 1.2971, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.47458613554061e-05, |
|
"loss": 1.2741, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461113985169857e-05, |
|
"loss": 1.2801, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4476418347991034e-05, |
|
"loss": 1.2865, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.43416968442835e-05, |
|
"loss": 1.2644, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.420697534057596e-05, |
|
"loss": 1.2686, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.407225383686843e-05, |
|
"loss": 1.2518, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.393753233316089e-05, |
|
"loss": 1.2541, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3802810829453355e-05, |
|
"loss": 1.2372, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.366808932574582e-05, |
|
"loss": 1.2392, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.353336782203829e-05, |
|
"loss": 1.2406, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.339864631833075e-05, |
|
"loss": 1.243, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3263924814623213e-05, |
|
"loss": 1.2331, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3129203310915676e-05, |
|
"loss": 1.2337, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.299448180720814e-05, |
|
"loss": 1.2269, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285976030350061e-05, |
|
"loss": 1.2175, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.272503879979307e-05, |
|
"loss": 1.2061, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2590317296085535e-05, |
|
"loss": 1.2246, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2455595792378e-05, |
|
"loss": 1.2102, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.232087428867047e-05, |
|
"loss": 1.2054, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.218615278496293e-05, |
|
"loss": 1.2038, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.205143128125539e-05, |
|
"loss": 1.207, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1916709777547856e-05, |
|
"loss": 1.1969, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.178198827384032e-05, |
|
"loss": 1.1912, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.164726677013278e-05, |
|
"loss": 1.1835, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1512545266425244e-05, |
|
"loss": 1.1881, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.137782376271771e-05, |
|
"loss": 1.1942, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.124310225901018e-05, |
|
"loss": 1.1888, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.110838075530264e-05, |
|
"loss": 1.1791, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.09736592515951e-05, |
|
"loss": 1.167, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0838937747887565e-05, |
|
"loss": 1.1637, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0704216244180035e-05, |
|
"loss": 1.1595, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05694947404725e-05, |
|
"loss": 1.1715, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043477323676496e-05, |
|
"loss": 1.1551, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0300051733057424e-05, |
|
"loss": 1.1677, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.016533022934989e-05, |
|
"loss": 1.1316, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0030608725642356e-05, |
|
"loss": 1.1413, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.989588722193482e-05, |
|
"loss": 1.1546, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.976116571822728e-05, |
|
"loss": 1.1584, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9626444214519745e-05, |
|
"loss": 1.1517, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9491722710812214e-05, |
|
"loss": 1.1307, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.935700120710468e-05, |
|
"loss": 1.1478, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.922227970339714e-05, |
|
"loss": 1.1346, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.90875581996896e-05, |
|
"loss": 1.1425, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.895283669598207e-05, |
|
"loss": 1.1365, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8818115192274536e-05, |
|
"loss": 1.1413, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.868339368856699e-05, |
|
"loss": 1.1331, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8548672184859454e-05, |
|
"loss": 1.1312, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8413950681151924e-05, |
|
"loss": 1.1317, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.827922917744439e-05, |
|
"loss": 1.1213, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.814450767373685e-05, |
|
"loss": 1.1305, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.800978617002931e-05, |
|
"loss": 1.1299, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.787506466632178e-05, |
|
"loss": 1.1151, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7740343162614245e-05, |
|
"loss": 1.1168, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.760562165890671e-05, |
|
"loss": 1.1149, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747090015519917e-05, |
|
"loss": 1.1119, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.733617865149164e-05, |
|
"loss": 1.1265, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7201457147784103e-05, |
|
"loss": 1.1231, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7066735644076566e-05, |
|
"loss": 1.1267, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693201414036903e-05, |
|
"loss": 1.1143, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.67972926366615e-05, |
|
"loss": 1.0906, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.666257113295396e-05, |
|
"loss": 1.1066, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6527849629246425e-05, |
|
"loss": 1.1026, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.639312812553889e-05, |
|
"loss": 1.0802, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.625840662183135e-05, |
|
"loss": 1.1051, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.612368511812382e-05, |
|
"loss": 1.0943, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.598896361441628e-05, |
|
"loss": 1.0868, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5854242110708746e-05, |
|
"loss": 1.0907, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.571952060700121e-05, |
|
"loss": 1.0774, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.558479910329368e-05, |
|
"loss": 1.0752, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5450077599586134e-05, |
|
"loss": 1.088, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.53153560958786e-05, |
|
"loss": 1.0812, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.518063459217106e-05, |
|
"loss": 1.0844, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.504591308846353e-05, |
|
"loss": 1.0718, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.491119158475599e-05, |
|
"loss": 1.0805, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4776470081048455e-05, |
|
"loss": 1.0649, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.464174857734092e-05, |
|
"loss": 1.0796, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.450702707363339e-05, |
|
"loss": 1.0731, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.437230556992585e-05, |
|
"loss": 1.0601, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4237584066218314e-05, |
|
"loss": 1.0622, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4102862562510777e-05, |
|
"loss": 1.0632, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3968141058803246e-05, |
|
"loss": 1.0682, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.383341955509571e-05, |
|
"loss": 1.0501, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.369869805138817e-05, |
|
"loss": 1.0816, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3563976547680635e-05, |
|
"loss": 1.0739, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3429255043973104e-05, |
|
"loss": 1.0651, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.329453354026557e-05, |
|
"loss": 1.0508, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.315981203655803e-05, |
|
"loss": 0.9982, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.302509053285049e-05, |
|
"loss": 0.9883, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2890369029142956e-05, |
|
"loss": 0.9885, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2755647525435426e-05, |
|
"loss": 0.987, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.262092602172789e-05, |
|
"loss": 0.981, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.248620451802035e-05, |
|
"loss": 0.9875, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2351483014312814e-05, |
|
"loss": 0.9876, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.221676151060528e-05, |
|
"loss": 0.9906, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.208204000689774e-05, |
|
"loss": 0.996, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.19473185031902e-05, |
|
"loss": 0.9807, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1812596999482666e-05, |
|
"loss": 0.9892, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1677875495775135e-05, |
|
"loss": 0.9615, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.15431539920676e-05, |
|
"loss": 0.9805, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.140843248836006e-05, |
|
"loss": 0.9958, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1273710984652524e-05, |
|
"loss": 0.9971, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1138989480944993e-05, |
|
"loss": 0.971, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1004267977237456e-05, |
|
"loss": 0.9822, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.086954647352992e-05, |
|
"loss": 0.9825, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.073482496982238e-05, |
|
"loss": 0.9795, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.060010346611485e-05, |
|
"loss": 0.9778, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0465381962407315e-05, |
|
"loss": 0.9758, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0330660458699777e-05, |
|
"loss": 0.9662, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0195938954992244e-05, |
|
"loss": 0.966, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0061217451284707e-05, |
|
"loss": 0.9717, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.992649594757717e-05, |
|
"loss": 0.9594, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9791774443869636e-05, |
|
"loss": 0.9808, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.96570529401621e-05, |
|
"loss": 0.9743, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9522331436454565e-05, |
|
"loss": 0.9745, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9387609932747028e-05, |
|
"loss": 0.9836, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9252888429039494e-05, |
|
"loss": 0.98, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9118166925331957e-05, |
|
"loss": 0.9708, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8983445421624416e-05, |
|
"loss": 0.9793, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8848723917916883e-05, |
|
"loss": 0.9525, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8714002414209345e-05, |
|
"loss": 0.9679, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.857928091050181e-05, |
|
"loss": 0.9749, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8444559406794275e-05, |
|
"loss": 0.9701, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8309837903086737e-05, |
|
"loss": 0.9681, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8175116399379204e-05, |
|
"loss": 0.9745, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8040394895671667e-05, |
|
"loss": 0.968, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7905673391964133e-05, |
|
"loss": 0.9673, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7770951888256596e-05, |
|
"loss": 0.9543, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7636230384549062e-05, |
|
"loss": 0.9582, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7501508880841525e-05, |
|
"loss": 0.9659, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.736678737713399e-05, |
|
"loss": 0.9692, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7232065873426454e-05, |
|
"loss": 0.9556, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.709734436971892e-05, |
|
"loss": 0.9622, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6962622866011383e-05, |
|
"loss": 0.9626, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.682790136230385e-05, |
|
"loss": 0.9571, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6693179858596312e-05, |
|
"loss": 0.9588, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6558458354888775e-05, |
|
"loss": 0.9544, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.642373685118124e-05, |
|
"loss": 0.9631, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6289015347473704e-05, |
|
"loss": 0.9521, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.615429384376617e-05, |
|
"loss": 0.9605, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6019572340058633e-05, |
|
"loss": 0.9685, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.58848508363511e-05, |
|
"loss": 0.9613, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.575012933264356e-05, |
|
"loss": 0.9532, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5615407828936022e-05, |
|
"loss": 0.9633, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5480686325228488e-05, |
|
"loss": 0.9481, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.534596482152095e-05, |
|
"loss": 0.9613, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5211243317813417e-05, |
|
"loss": 0.9468, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.507652181410588e-05, |
|
"loss": 0.9594, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4941800310398343e-05, |
|
"loss": 0.9495, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.480707880669081e-05, |
|
"loss": 0.9541, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4672357302983272e-05, |
|
"loss": 0.9411, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.453763579927574e-05, |
|
"loss": 0.9481, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.44029142955682e-05, |
|
"loss": 0.9534, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4268192791860667e-05, |
|
"loss": 0.9563, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.413347128815313e-05, |
|
"loss": 0.9487, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3998749784445597e-05, |
|
"loss": 0.9463, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.386402828073806e-05, |
|
"loss": 0.9479, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3729306777030526e-05, |
|
"loss": 0.9388, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.359458527332299e-05, |
|
"loss": 0.9305, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3459863769615455e-05, |
|
"loss": 0.9405, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3325142265907914e-05, |
|
"loss": 0.936, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.319042076220038e-05, |
|
"loss": 0.9516, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3055699258492843e-05, |
|
"loss": 0.9472, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.292097775478531e-05, |
|
"loss": 0.9376, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2786256251077773e-05, |
|
"loss": 0.9483, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.265153474737024e-05, |
|
"loss": 0.94, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.25168132436627e-05, |
|
"loss": 0.9361, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2382091739955165e-05, |
|
"loss": 0.947, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.224737023624763e-05, |
|
"loss": 0.9327, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2112648732540094e-05, |
|
"loss": 0.9388, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.197792722883256e-05, |
|
"loss": 0.9282, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1843205725125023e-05, |
|
"loss": 0.9314, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1708484221417486e-05, |
|
"loss": 0.9222, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.157376271770995e-05, |
|
"loss": 0.9232, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1439041214002415e-05, |
|
"loss": 0.9311, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1304319710294878e-05, |
|
"loss": 0.9316, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1169598206587344e-05, |
|
"loss": 0.9242, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1034876702879807e-05, |
|
"loss": 0.9236, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0900155199172273e-05, |
|
"loss": 0.93, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0765433695464736e-05, |
|
"loss": 0.9386, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0630712191757202e-05, |
|
"loss": 0.9246, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0495990688049665e-05, |
|
"loss": 0.93, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.036126918434213e-05, |
|
"loss": 0.9226, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0226547680634594e-05, |
|
"loss": 0.9188, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0091826176927057e-05, |
|
"loss": 0.9269, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.995710467321952e-05, |
|
"loss": 0.9303, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9822383169511986e-05, |
|
"loss": 0.9347, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.968766166580445e-05, |
|
"loss": 0.9217, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9552940162096915e-05, |
|
"loss": 0.9387, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9418218658389378e-05, |
|
"loss": 0.9241, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9283497154681844e-05, |
|
"loss": 0.9269, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9148775650974307e-05, |
|
"loss": 0.9132, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.901405414726677e-05, |
|
"loss": 0.9005, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8879332643559236e-05, |
|
"loss": 0.9172, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.87446111398517e-05, |
|
"loss": 0.9289, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8609889636144165e-05, |
|
"loss": 0.9153, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8475168132436625e-05, |
|
"loss": 0.9163, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.834044662872909e-05, |
|
"loss": 0.9115, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8205725125021554e-05, |
|
"loss": 0.9271, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.807100362131402e-05, |
|
"loss": 0.9104, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7936282117606483e-05, |
|
"loss": 0.9099, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.780156061389895e-05, |
|
"loss": 0.9189, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7666839110191412e-05, |
|
"loss": 0.9156, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.753211760648388e-05, |
|
"loss": 0.9096, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.739739610277634e-05, |
|
"loss": 0.9118, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7262674599068808e-05, |
|
"loss": 0.9193, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.712795309536127e-05, |
|
"loss": 0.9009, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6993231591653737e-05, |
|
"loss": 0.9068, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6858510087946196e-05, |
|
"loss": 0.9101, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6723788584238663e-05, |
|
"loss": 0.9142, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6589067080531125e-05, |
|
"loss": 0.8746, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.645434557682359e-05, |
|
"loss": 0.8355, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6319624073116054e-05, |
|
"loss": 0.8432, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.618490256940852e-05, |
|
"loss": 0.8385, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6050181065700984e-05, |
|
"loss": 0.8273, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.591545956199345e-05, |
|
"loss": 0.8178, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5780738058285913e-05, |
|
"loss": 0.831, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5646016554578376e-05, |
|
"loss": 0.8338, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5511295050870842e-05, |
|
"loss": 0.8318, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5376573547163305e-05, |
|
"loss": 0.8405, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5241852043455768e-05, |
|
"loss": 0.8325, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5107130539748232e-05, |
|
"loss": 0.8221, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4972409036040697e-05, |
|
"loss": 0.844, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4837687532333161e-05, |
|
"loss": 0.8285, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4702966028625626e-05, |
|
"loss": 0.8404, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.456824452491809e-05, |
|
"loss": 0.8232, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4433523021210555e-05, |
|
"loss": 0.83, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.429880151750302e-05, |
|
"loss": 0.8246, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4164080013795482e-05, |
|
"loss": 0.8369, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4029358510087947e-05, |
|
"loss": 0.8344, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3894637006380412e-05, |
|
"loss": 0.8266, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3759915502672876e-05, |
|
"loss": 0.842, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3625193998965339e-05, |
|
"loss": 0.8304, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3490472495257803e-05, |
|
"loss": 0.8235, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3355750991550266e-05, |
|
"loss": 0.8343, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3221029487842731e-05, |
|
"loss": 0.8277, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3086307984135195e-05, |
|
"loss": 0.8305, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.295158648042766e-05, |
|
"loss": 0.8429, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2816864976720125e-05, |
|
"loss": 0.8168, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.268214347301259e-05, |
|
"loss": 0.83, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2547421969305054e-05, |
|
"loss": 0.8351, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2412700465597517e-05, |
|
"loss": 0.8232, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2277978961889981e-05, |
|
"loss": 0.8317, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2143257458182446e-05, |
|
"loss": 0.8188, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.200853595447491e-05, |
|
"loss": 0.8267, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1873814450767375e-05, |
|
"loss": 0.8215, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.173909294705984e-05, |
|
"loss": 0.8201, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1604371443352302e-05, |
|
"loss": 0.8155, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1469649939644767e-05, |
|
"loss": 0.8119, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1334928435937231e-05, |
|
"loss": 0.8207, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1200206932229696e-05, |
|
"loss": 0.8314, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.106548542852216e-05, |
|
"loss": 0.8212, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0930763924814625e-05, |
|
"loss": 0.8173, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0796042421107088e-05, |
|
"loss": 0.8195, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0661320917399552e-05, |
|
"loss": 0.8237, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0526599413692017e-05, |
|
"loss": 0.8198, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.039187790998448e-05, |
|
"loss": 0.8151, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0257156406276944e-05, |
|
"loss": 0.8398, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0122434902569409e-05, |
|
"loss": 0.8272, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.987713398861872e-06, |
|
"loss": 0.8155, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.852991895154336e-06, |
|
"loss": 0.8135, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.718270391446801e-06, |
|
"loss": 0.8246, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.583548887739266e-06, |
|
"loss": 0.8085, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.44882738403173e-06, |
|
"loss": 0.8156, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.314105880324195e-06, |
|
"loss": 0.8135, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.179384376616658e-06, |
|
"loss": 0.8184, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.044662872909122e-06, |
|
"loss": 0.8198, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.909941369201587e-06, |
|
"loss": 0.8167, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.775219865494051e-06, |
|
"loss": 0.8109, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.640498361786516e-06, |
|
"loss": 0.7983, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.50577685807898e-06, |
|
"loss": 0.8107, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.371055354371443e-06, |
|
"loss": 0.8026, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.236333850663908e-06, |
|
"loss": 0.8, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.101612346956372e-06, |
|
"loss": 0.8089, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.966890843248837e-06, |
|
"loss": 0.8055, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.832169339541301e-06, |
|
"loss": 0.8187, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.697447835833766e-06, |
|
"loss": 0.8103, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.562726332126228e-06, |
|
"loss": 0.8111, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.428004828418693e-06, |
|
"loss": 0.8168, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.293283324711157e-06, |
|
"loss": 0.815, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.158561821003622e-06, |
|
"loss": 0.8166, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.023840317296086e-06, |
|
"loss": 0.8057, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.889118813588551e-06, |
|
"loss": 0.8001, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.754397309881014e-06, |
|
"loss": 0.806, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.619675806173478e-06, |
|
"loss": 0.8116, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.484954302465943e-06, |
|
"loss": 0.8093, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.3502327987584074e-06, |
|
"loss": 0.815, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.215511295050871e-06, |
|
"loss": 0.7953, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.080789791343335e-06, |
|
"loss": 0.801, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.946068287635799e-06, |
|
"loss": 0.8272, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.811346783928263e-06, |
|
"loss": 0.8055, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.676625280220728e-06, |
|
"loss": 0.7992, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.541903776513192e-06, |
|
"loss": 0.8054, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.407182272805656e-06, |
|
"loss": 0.8029, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.2724607690981205e-06, |
|
"loss": 0.804, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.137739265390585e-06, |
|
"loss": 0.8052, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.003017761683049e-06, |
|
"loss": 0.7955, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.868296257975513e-06, |
|
"loss": 0.7952, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.733574754267978e-06, |
|
"loss": 0.8058, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.598853250560442e-06, |
|
"loss": 0.8031, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.464131746852906e-06, |
|
"loss": 0.8181, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.32941024314537e-06, |
|
"loss": 0.8076, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.194688739437834e-06, |
|
"loss": 0.8172, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.059967235730298e-06, |
|
"loss": 0.7994, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.925245732022763e-06, |
|
"loss": 0.801, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7905242283152265e-06, |
|
"loss": 0.806, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.655802724607691e-06, |
|
"loss": 0.8023, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5210812209001556e-06, |
|
"loss": 0.8102, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3863597171926193e-06, |
|
"loss": 0.8119, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.251638213485084e-06, |
|
"loss": 0.807, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.116916709777548e-06, |
|
"loss": 0.7956, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.982195206070012e-06, |
|
"loss": 0.8035, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8474737023624763e-06, |
|
"loss": 0.8076, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.712752198654941e-06, |
|
"loss": 0.7995, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.578030694947405e-06, |
|
"loss": 0.7979, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.443309191239869e-06, |
|
"loss": 0.7933, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3085876875323332e-06, |
|
"loss": 0.8008, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1738661838247974e-06, |
|
"loss": 0.7969, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0391446801172615e-06, |
|
"loss": 0.8077, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.904423176409726e-06, |
|
"loss": 0.7988, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7697016727021902e-06, |
|
"loss": 0.8078, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6349801689946541e-06, |
|
"loss": 0.7973, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5002586652871185e-06, |
|
"loss": 0.7927, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3655371615795828e-06, |
|
"loss": 0.8139, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.230815657872047e-06, |
|
"loss": 0.7866, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.0960941541645111e-06, |
|
"loss": 0.7991, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.613726504569755e-07, |
|
"loss": 0.8022, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.266511467494397e-07, |
|
"loss": 0.7863, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.919296430419037e-07, |
|
"loss": 0.7973, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.572081393343681e-07, |
|
"loss": 0.7922, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.224866356268322e-07, |
|
"loss": 0.7922, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.8776513191929646e-07, |
|
"loss": 0.7878, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.5304362821176065e-07, |
|
"loss": 0.8012, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.8322124504224867e-08, |
|
"loss": 0.798, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 185568, |
|
"total_flos": 2.7769538443832525e+17, |
|
"train_loss": 1.0130458773570052, |
|
"train_runtime": 63769.0674, |
|
"train_samples_per_second": 29.1, |
|
"train_steps_per_second": 2.91 |
|
} |
|
], |
|
"max_steps": 185568, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.7769538443832525e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|