|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.6920639267619961, |
|
"global_step": 86500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.997283134350093e-05, |
|
"loss": 1.6457, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994566268700186e-05, |
|
"loss": 1.6172, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.991849403050279e-05, |
|
"loss": 1.6618, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.989132537400373e-05, |
|
"loss": 1.6384, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.986415671750466e-05, |
|
"loss": 1.6445, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.983698806100559e-05, |
|
"loss": 1.644, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.980981940450652e-05, |
|
"loss": 1.6763, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9782650748007454e-05, |
|
"loss": 1.6557, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9755482091508384e-05, |
|
"loss": 1.6375, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.972831343500931e-05, |
|
"loss": 1.6605, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.970114477851024e-05, |
|
"loss": 1.6571, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.967397612201118e-05, |
|
"loss": 1.6556, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.964680746551211e-05, |
|
"loss": 1.6593, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.961963880901304e-05, |
|
"loss": 1.6578, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.959247015251397e-05, |
|
"loss": 1.6519, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9565301496014905e-05, |
|
"loss": 1.6479, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9538132839515835e-05, |
|
"loss": 1.6705, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9510964183016765e-05, |
|
"loss": 1.6807, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9483795526517694e-05, |
|
"loss": 1.6667, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.945662687001863e-05, |
|
"loss": 1.6604, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.942945821351956e-05, |
|
"loss": 1.6885, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.940228955702049e-05, |
|
"loss": 1.6648, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.937512090052142e-05, |
|
"loss": 1.6483, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9347952244022357e-05, |
|
"loss": 1.6835, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9320783587523286e-05, |
|
"loss": 1.6797, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9293614931024216e-05, |
|
"loss": 1.6698, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9266446274525146e-05, |
|
"loss": 1.663, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.923927761802608e-05, |
|
"loss": 1.6552, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.921210896152701e-05, |
|
"loss": 1.6769, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.918494030502794e-05, |
|
"loss": 1.6656, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.915777164852887e-05, |
|
"loss": 1.6515, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.913060299202981e-05, |
|
"loss": 1.6606, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.910343433553074e-05, |
|
"loss": 1.6586, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.907626567903167e-05, |
|
"loss": 1.6945, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.90490970225326e-05, |
|
"loss": 1.7064, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9021928366033534e-05, |
|
"loss": 1.6779, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.899475970953446e-05, |
|
"loss": 1.6548, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.896759105303539e-05, |
|
"loss": 1.6983, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.894042239653632e-05, |
|
"loss": 1.6528, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.891325374003726e-05, |
|
"loss": 1.6845, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.888608508353818e-05, |
|
"loss": 1.672, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.885891642703912e-05, |
|
"loss": 1.6565, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.883174777054005e-05, |
|
"loss": 1.6549, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.8804579114040985e-05, |
|
"loss": 1.6702, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.877741045754191e-05, |
|
"loss": 1.6959, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.8750241801042845e-05, |
|
"loss": 1.6585, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.8723073144543774e-05, |
|
"loss": 1.678, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.869590448804471e-05, |
|
"loss": 1.6927, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8668735831545634e-05, |
|
"loss": 1.6876, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.864156717504657e-05, |
|
"loss": 1.6907, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.86143985185475e-05, |
|
"loss": 1.6822, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8587229862048436e-05, |
|
"loss": 1.7038, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.856006120554936e-05, |
|
"loss": 1.6671, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.8532892549050296e-05, |
|
"loss": 1.6546, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.8505723892551226e-05, |
|
"loss": 1.6896, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.847855523605216e-05, |
|
"loss": 1.6644, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8451386579553085e-05, |
|
"loss": 1.6728, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.842421792305402e-05, |
|
"loss": 1.687, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.839704926655495e-05, |
|
"loss": 1.6557, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.836988061005588e-05, |
|
"loss": 1.7109, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.834271195355681e-05, |
|
"loss": 1.7037, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.831554329705775e-05, |
|
"loss": 1.6809, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.828837464055868e-05, |
|
"loss": 1.6785, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.826120598405961e-05, |
|
"loss": 1.6842, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.8234037327560536e-05, |
|
"loss": 1.6569, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.820686867106147e-05, |
|
"loss": 1.662, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.81797000145624e-05, |
|
"loss": 1.6857, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.815253135806333e-05, |
|
"loss": 1.7066, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.812536270156426e-05, |
|
"loss": 1.6824, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.80981940450652e-05, |
|
"loss": 1.6847, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.807102538856613e-05, |
|
"loss": 1.6629, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.804385673206706e-05, |
|
"loss": 1.6498, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.801668807556799e-05, |
|
"loss": 1.6833, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.7989519419068924e-05, |
|
"loss": 1.6693, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.7962350762569854e-05, |
|
"loss": 1.6912, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.7935182106070784e-05, |
|
"loss": 1.6591, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.7908013449571714e-05, |
|
"loss": 1.6655, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.788084479307265e-05, |
|
"loss": 1.6699, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.785367613657358e-05, |
|
"loss": 1.6544, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.782650748007451e-05, |
|
"loss": 1.6876, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.779933882357544e-05, |
|
"loss": 1.6916, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.7772170167076376e-05, |
|
"loss": 1.6976, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.7745001510577305e-05, |
|
"loss": 1.6905, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.7717832854078235e-05, |
|
"loss": 1.6886, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.7690664197579165e-05, |
|
"loss": 1.6585, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.76634955410801e-05, |
|
"loss": 1.6877, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.763632688458103e-05, |
|
"loss": 1.6786, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.760915822808196e-05, |
|
"loss": 1.676, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.758198957158289e-05, |
|
"loss": 1.6702, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.755482091508383e-05, |
|
"loss": 1.6733, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.752765225858476e-05, |
|
"loss": 1.6813, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.750048360208569e-05, |
|
"loss": 1.6726, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7473314945586616e-05, |
|
"loss": 1.6844, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.744614628908755e-05, |
|
"loss": 1.6694, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.741897763258848e-05, |
|
"loss": 1.6795, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.739180897608941e-05, |
|
"loss": 1.6512, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.736464031959034e-05, |
|
"loss": 1.7015, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.733747166309128e-05, |
|
"loss": 1.6633, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.73103030065922e-05, |
|
"loss": 1.648, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.728313435009314e-05, |
|
"loss": 1.6985, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.725596569359407e-05, |
|
"loss": 1.6694, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7228797037095004e-05, |
|
"loss": 1.709, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.720162838059593e-05, |
|
"loss": 1.5724, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7174459724096864e-05, |
|
"loss": 1.5297, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.7147291067597793e-05, |
|
"loss": 1.5361, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.712012241109873e-05, |
|
"loss": 1.5141, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.709295375459965e-05, |
|
"loss": 1.5395, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.706578509810059e-05, |
|
"loss": 1.5198, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.703861644160152e-05, |
|
"loss": 1.5742, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.701144778510245e-05, |
|
"loss": 1.5446, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.698427912860338e-05, |
|
"loss": 1.5587, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.695711047210431e-05, |
|
"loss": 1.5487, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.6929941815605245e-05, |
|
"loss": 1.5434, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.6902773159106175e-05, |
|
"loss": 1.5424, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.6875604502607104e-05, |
|
"loss": 1.5343, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.6848435846108034e-05, |
|
"loss": 1.5479, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.682126718960897e-05, |
|
"loss": 1.5453, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.67940985331099e-05, |
|
"loss": 1.5283, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.676692987661083e-05, |
|
"loss": 1.5689, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.673976122011176e-05, |
|
"loss": 1.5636, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.6712592563612696e-05, |
|
"loss": 1.5483, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.6685423907113626e-05, |
|
"loss": 1.5425, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.6658255250614556e-05, |
|
"loss": 1.5662, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.6631086594115485e-05, |
|
"loss": 1.576, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.660391793761642e-05, |
|
"loss": 1.5613, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.657674928111735e-05, |
|
"loss": 1.5404, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.654958062461828e-05, |
|
"loss": 1.5614, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.652241196811921e-05, |
|
"loss": 1.551, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.649524331162015e-05, |
|
"loss": 1.5603, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.646807465512108e-05, |
|
"loss": 1.5794, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.644090599862201e-05, |
|
"loss": 1.5551, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.641373734212294e-05, |
|
"loss": 1.5965, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.638656868562387e-05, |
|
"loss": 1.5662, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.63594000291248e-05, |
|
"loss": 1.6079, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.633223137262573e-05, |
|
"loss": 1.5807, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.630506271612666e-05, |
|
"loss": 1.5592, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.62778940596276e-05, |
|
"loss": 1.5741, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.625072540312852e-05, |
|
"loss": 1.5827, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.622355674662946e-05, |
|
"loss": 1.5656, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.619638809013039e-05, |
|
"loss": 1.5764, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.6169219433631325e-05, |
|
"loss": 1.556, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.614205077713225e-05, |
|
"loss": 1.582, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.6114882120633184e-05, |
|
"loss": 1.5789, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.6087713464134114e-05, |
|
"loss": 1.5719, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.606054480763505e-05, |
|
"loss": 1.581, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.603337615113597e-05, |
|
"loss": 1.5885, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.600620749463691e-05, |
|
"loss": 1.5516, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.597903883813784e-05, |
|
"loss": 1.59, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.5951870181638776e-05, |
|
"loss": 1.5968, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.59247015251397e-05, |
|
"loss": 1.5865, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.5897532868640636e-05, |
|
"loss": 1.5682, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.5870364212141565e-05, |
|
"loss": 1.5711, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.58431955556425e-05, |
|
"loss": 1.5595, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.5816026899143425e-05, |
|
"loss": 1.5898, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.578885824264436e-05, |
|
"loss": 1.6049, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.576168958614529e-05, |
|
"loss": 1.613, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.573452092964622e-05, |
|
"loss": 1.5761, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.570735227314715e-05, |
|
"loss": 1.5901, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.568018361664809e-05, |
|
"loss": 1.6102, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.565301496014902e-05, |
|
"loss": 1.5958, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.5625846303649946e-05, |
|
"loss": 1.5903, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.5598677647150876e-05, |
|
"loss": 1.5798, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.557150899065181e-05, |
|
"loss": 1.5832, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.554434033415274e-05, |
|
"loss": 1.6358, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.551717167765367e-05, |
|
"loss": 1.5969, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.54900030211546e-05, |
|
"loss": 1.6018, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.546283436465554e-05, |
|
"loss": 1.5919, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.543566570815647e-05, |
|
"loss": 1.5925, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.54084970516574e-05, |
|
"loss": 1.6047, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.538132839515833e-05, |
|
"loss": 1.6178, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.5354159738659264e-05, |
|
"loss": 1.5892, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.5326991082160194e-05, |
|
"loss": 1.6217, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.5299822425661123e-05, |
|
"loss": 1.5807, |
|
"step": 86500 |
|
} |
|
], |
|
"max_steps": 920178, |
|
"num_train_epochs": 18, |
|
"total_flos": 1930009185792000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|