|
{ |
|
"best_metric": 4.342620849609375, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/det-adj-noun/lstm/4/checkpoints/checkpoint-228960", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 228960, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8193, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5516, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0608, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9965, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9474, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.9067, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7482, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6243, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5246, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4571, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.4095, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.3562, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2988, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.2415, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.1819, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.122, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.074, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 6.0331, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9837, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9423, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.907, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.86, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.838, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7998, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979875363855125e-05, |
|
"loss": 5.7797, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7431, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.7159, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6918, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97652262273129e-05, |
|
"loss": 5.674, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.975684027980238e-05, |
|
"loss": 5.6498, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974845433229186e-05, |
|
"loss": 5.6279, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.617, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973169881607455e-05, |
|
"loss": 5.58, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.972331286856403e-05, |
|
"loss": 5.573, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714926921053515e-05, |
|
"loss": 5.5536, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5374, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5226, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9689785457325684e-05, |
|
"loss": 5.4817, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9681399509815164e-05, |
|
"loss": 5.4874, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4534, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4561, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.4345, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4366, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.4068, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3918, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3965, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3847, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3722, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3542, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.332, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3432, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.3239, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956404538107908e-05, |
|
"loss": 5.3225, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955565943356856e-05, |
|
"loss": 5.2913, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954727348605804e-05, |
|
"loss": 5.2958, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953888753854752e-05, |
|
"loss": 5.273, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530501591037e-05, |
|
"loss": 5.2939, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.952211564352648e-05, |
|
"loss": 5.258, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9513746074819686e-05, |
|
"loss": 5.2597, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9505360127309166e-05, |
|
"loss": 5.2452, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9496974179798646e-05, |
|
"loss": 5.2317, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948858823228813e-05, |
|
"loss": 5.2339, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948020228477761e-05, |
|
"loss": 5.2216, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9471816337267086e-05, |
|
"loss": 5.1945, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9463430389756566e-05, |
|
"loss": 5.2007, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9455044442246046e-05, |
|
"loss": 5.2104, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9446658494735526e-05, |
|
"loss": 5.1875, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9438272547225006e-05, |
|
"loss": 5.1765, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9429902978518215e-05, |
|
"loss": 5.159, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9421517031007695e-05, |
|
"loss": 5.1617, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413131083497175e-05, |
|
"loss": 5.1777, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404745135986655e-05, |
|
"loss": 5.1614, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.939637556727987e-05, |
|
"loss": 5.1471, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938798961976935e-05, |
|
"loss": 5.15, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937960367225883e-05, |
|
"loss": 5.1461, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937121772474831e-05, |
|
"loss": 5.1278, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.936283177723779e-05, |
|
"loss": 5.1132, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935444582972727e-05, |
|
"loss": 5.1184, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934605988221675e-05, |
|
"loss": 5.0984, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933769031350996e-05, |
|
"loss": 5.0998, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932930436599944e-05, |
|
"loss": 5.1044, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932091841848892e-05, |
|
"loss": 5.0902, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93125324709784e-05, |
|
"loss": 5.0931, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930416290227161e-05, |
|
"loss": 5.0813, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295793333564825e-05, |
|
"loss": 5.0611, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0642, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0609, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0672, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0484, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9253879974815954e-05, |
|
"loss": 5.0472, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9245494027305433e-05, |
|
"loss": 5.0452, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9237108079794913e-05, |
|
"loss": 5.0372, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922872213228439e-05, |
|
"loss": 5.0347, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922033618477387e-05, |
|
"loss": 5.0237, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921195023726335e-05, |
|
"loss": 5.0093, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920356428975283e-05, |
|
"loss": 5.0178, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919517834224231e-05, |
|
"loss": 5.0058, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918679239473179e-05, |
|
"loss": 5.0017, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917840644722127e-05, |
|
"loss": 4.9931, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917003687851449e-05, |
|
"loss": 4.9923, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916165093100397e-05, |
|
"loss": 4.9966, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915326498349345e-05, |
|
"loss": 4.9846, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914487903598292e-05, |
|
"loss": 4.9739, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91364930884724e-05, |
|
"loss": 4.9696, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912810714096188e-05, |
|
"loss": 4.9615, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911972119345136e-05, |
|
"loss": 4.9481, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911135162474457e-05, |
|
"loss": 4.9444, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910296567723405e-05, |
|
"loss": 4.9483, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909457972972353e-05, |
|
"loss": 4.9445, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908619378221301e-05, |
|
"loss": 4.9323, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907780783470249e-05, |
|
"loss": 4.9296, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906943826599571e-05, |
|
"loss": 4.9301, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906105231848519e-05, |
|
"loss": 4.9332, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905266637097467e-05, |
|
"loss": 4.9281, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9044280423464147e-05, |
|
"loss": 4.9207, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9035910854757356e-05, |
|
"loss": 4.9198, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9027541286050565e-05, |
|
"loss": 4.9169, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9019155338540045e-05, |
|
"loss": 4.9064, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9010769391029525e-05, |
|
"loss": 4.8998, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9002383443519005e-05, |
|
"loss": 4.9036, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8993997496008485e-05, |
|
"loss": 4.9031, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8985611548497965e-05, |
|
"loss": 4.8806, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977225600987445e-05, |
|
"loss": 4.8813, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968839653476924e-05, |
|
"loss": 4.8789, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896047008477014e-05, |
|
"loss": 4.871, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.895208413725962e-05, |
|
"loss": 4.8733, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.89436981897491e-05, |
|
"loss": 4.8763, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893531224223858e-05, |
|
"loss": 4.8616, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892692629472806e-05, |
|
"loss": 4.8619, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891854034721754e-05, |
|
"loss": 4.8506, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891015439970702e-05, |
|
"loss": 4.8671, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.89017684521965e-05, |
|
"loss": 4.8578, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889338250468598e-05, |
|
"loss": 4.8493, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888501293597919e-05, |
|
"loss": 4.8461, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887662698846867e-05, |
|
"loss": 4.8456, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886824104095815e-05, |
|
"loss": 4.8457, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885985509344763e-05, |
|
"loss": 4.8345, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8851485524740845e-05, |
|
"loss": 4.8332, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8843099577230325e-05, |
|
"loss": 4.8331, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8834713629719805e-05, |
|
"loss": 4.8297, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882632768220928e-05, |
|
"loss": 4.8287, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881794173469876e-05, |
|
"loss": 4.809, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880955578718824e-05, |
|
"loss": 4.8195, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880116983967772e-05, |
|
"loss": 4.8045, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879281664977466e-05, |
|
"loss": 4.7945, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878443070226414e-05, |
|
"loss": 4.8131, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877604475475362e-05, |
|
"loss": 4.8018, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87676588072431e-05, |
|
"loss": 4.8021, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875927285973258e-05, |
|
"loss": 4.8048, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875088691222206e-05, |
|
"loss": 4.7824, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.7586212158203125, |
|
"eval_runtime": 284.5791, |
|
"eval_samples_per_second": 1340.896, |
|
"eval_steps_per_second": 41.904, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874250096471154e-05, |
|
"loss": 4.7858, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873411501720102e-05, |
|
"loss": 4.7817, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872574544849423e-05, |
|
"loss": 4.7995, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.871735950098371e-05, |
|
"loss": 4.7787, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.870897355347319e-05, |
|
"loss": 4.7864, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.870058760596267e-05, |
|
"loss": 4.7659, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.869220165845215e-05, |
|
"loss": 4.7714, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.868381571094163e-05, |
|
"loss": 4.7606, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867544614223484e-05, |
|
"loss": 4.7651, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866706019472432e-05, |
|
"loss": 4.7658, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86586742472138e-05, |
|
"loss": 4.7685, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865028829970329e-05, |
|
"loss": 4.768, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8641918730996496e-05, |
|
"loss": 4.7485, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8633532783485976e-05, |
|
"loss": 4.7474, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8625146835975456e-05, |
|
"loss": 4.7442, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8616760888464936e-05, |
|
"loss": 4.7355, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8608374940954416e-05, |
|
"loss": 4.7497, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8599988993443896e-05, |
|
"loss": 4.7337, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8591603045933376e-05, |
|
"loss": 4.7324, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8583217098422856e-05, |
|
"loss": 4.7522, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574847529716065e-05, |
|
"loss": 4.7305, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566461582205545e-05, |
|
"loss": 4.7309, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558075634695025e-05, |
|
"loss": 4.7267, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8549689687184505e-05, |
|
"loss": 4.733, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541303739673985e-05, |
|
"loss": 4.7209, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532917792163465e-05, |
|
"loss": 4.7152, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524531844652945e-05, |
|
"loss": 4.7121, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8516145897142425e-05, |
|
"loss": 4.7203, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850779270723937e-05, |
|
"loss": 4.7009, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849940675972885e-05, |
|
"loss": 4.7072, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849103719102206e-05, |
|
"loss": 4.7081, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848265124351154e-05, |
|
"loss": 4.7029, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847426529600102e-05, |
|
"loss": 4.7069, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.84658793484905e-05, |
|
"loss": 4.6969, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845750977978371e-05, |
|
"loss": 4.6981, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8449123832273194e-05, |
|
"loss": 4.6981, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8440737884762674e-05, |
|
"loss": 4.6748, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8432351937252154e-05, |
|
"loss": 4.6861, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8423965989741634e-05, |
|
"loss": 4.6701, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8415580042231114e-05, |
|
"loss": 4.6805, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840719409472059e-05, |
|
"loss": 4.6811, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839880814721007e-05, |
|
"loss": 4.684, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839042219969955e-05, |
|
"loss": 4.6743, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838205263099276e-05, |
|
"loss": 4.6728, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8373666683482236e-05, |
|
"loss": 4.68, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8365280735971716e-05, |
|
"loss": 4.6741, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8356894788461196e-05, |
|
"loss": 4.6687, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834852521975441e-05, |
|
"loss": 4.6621, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834013927224389e-05, |
|
"loss": 4.6491, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833175332473337e-05, |
|
"loss": 4.6673, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832336737722285e-05, |
|
"loss": 4.6602, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.831498142971233e-05, |
|
"loss": 4.6592, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830659548220181e-05, |
|
"loss": 4.6472, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829820953469129e-05, |
|
"loss": 4.6551, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82898399659845e-05, |
|
"loss": 4.639, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828145401847398e-05, |
|
"loss": 4.6596, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827306807096346e-05, |
|
"loss": 4.6403, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.826468212345294e-05, |
|
"loss": 4.6485, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825629617594242e-05, |
|
"loss": 4.6394, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.824792660723563e-05, |
|
"loss": 4.6318, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823954065972511e-05, |
|
"loss": 4.6338, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82311547122146e-05, |
|
"loss": 4.6388, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222768764704077e-05, |
|
"loss": 4.6132, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214382817193557e-05, |
|
"loss": 4.6228, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8205996869683037e-05, |
|
"loss": 4.6354, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197610922172516e-05, |
|
"loss": 4.6222, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189224974661996e-05, |
|
"loss": 4.6159, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180855405955206e-05, |
|
"loss": 4.6117, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172485837248415e-05, |
|
"loss": 4.611, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164099889737895e-05, |
|
"loss": 4.6349, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155713942227375e-05, |
|
"loss": 4.6248, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147327994716854e-05, |
|
"loss": 4.6156, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138942047206334e-05, |
|
"loss": 4.6214, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8130556099695814e-05, |
|
"loss": 4.6215, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8122170152185294e-05, |
|
"loss": 4.6148, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8113784204674774e-05, |
|
"loss": 4.5982, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8105398257164254e-05, |
|
"loss": 4.6119, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8097012309653734e-05, |
|
"loss": 4.6027, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8088626362143214e-05, |
|
"loss": 4.5993, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8080240414632694e-05, |
|
"loss": 4.6105, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80718708459259e-05, |
|
"loss": 4.601, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806348489841538e-05, |
|
"loss": 4.6082, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80551153297086e-05, |
|
"loss": 4.6023, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804672938219807e-05, |
|
"loss": 4.5807, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803834343468755e-05, |
|
"loss": 4.596, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802995748717703e-05, |
|
"loss": 4.5893, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802157153966652e-05, |
|
"loss": 4.6019, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8013185592156e-05, |
|
"loss": 4.5912, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800479964464548e-05, |
|
"loss": 4.5936, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799641369713496e-05, |
|
"loss": 4.5934, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798802774962444e-05, |
|
"loss": 4.5871, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797965818091765e-05, |
|
"loss": 4.5908, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797127223340713e-05, |
|
"loss": 4.5842, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796288628589661e-05, |
|
"loss": 4.5716, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795451671718982e-05, |
|
"loss": 4.5831, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79461307696793e-05, |
|
"loss": 4.5824, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793774482216878e-05, |
|
"loss": 4.5725, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792935887465826e-05, |
|
"loss": 4.5788, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792097292714774e-05, |
|
"loss": 4.5766, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7912586979637217e-05, |
|
"loss": 4.5869, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790421741093043e-05, |
|
"loss": 4.5712, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789583146341991e-05, |
|
"loss": 4.5642, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788744551590939e-05, |
|
"loss": 4.5657, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787905956839887e-05, |
|
"loss": 4.5637, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787067362088835e-05, |
|
"loss": 4.5453, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786228767337783e-05, |
|
"loss": 4.5597, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785391810467104e-05, |
|
"loss": 4.558, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784553215716052e-05, |
|
"loss": 4.5568, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783714620965e-05, |
|
"loss": 4.5518, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782876026213948e-05, |
|
"loss": 4.5443, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782037431462896e-05, |
|
"loss": 4.5504, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7811988367118434e-05, |
|
"loss": 4.5619, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780360241960792e-05, |
|
"loss": 4.555, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77952164720974e-05, |
|
"loss": 4.555, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778683052458688e-05, |
|
"loss": 4.5506, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777844457707636e-05, |
|
"loss": 4.5615, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777007500836957e-05, |
|
"loss": 4.5449, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776168906085905e-05, |
|
"loss": 4.5401, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775331949215226e-05, |
|
"loss": 4.548, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774493354464174e-05, |
|
"loss": 4.5506, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773654759713122e-05, |
|
"loss": 4.5301, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77281616496207e-05, |
|
"loss": 4.5322, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771977570211018e-05, |
|
"loss": 4.5385, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771138975459966e-05, |
|
"loss": 4.5306, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770300380708914e-05, |
|
"loss": 4.5345, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769461785957862e-05, |
|
"loss": 4.5406, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7686231912068105e-05, |
|
"loss": 4.5222, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7677845964557585e-05, |
|
"loss": 4.5323, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7669460017047065e-05, |
|
"loss": 4.5197, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7661074069536545e-05, |
|
"loss": 4.5439, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7652704500829754e-05, |
|
"loss": 4.5354, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7644318553319234e-05, |
|
"loss": 4.5236, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7635932605808714e-05, |
|
"loss": 4.5264, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7627546658298194e-05, |
|
"loss": 4.5246, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7619160710787674e-05, |
|
"loss": 4.5323, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761077476327715e-05, |
|
"loss": 4.5264, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760238881576663e-05, |
|
"loss": 4.5218, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759400286825611e-05, |
|
"loss": 4.5206, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758563329954932e-05, |
|
"loss": 4.5192, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75772473520388e-05, |
|
"loss": 4.5218, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756887778333202e-05, |
|
"loss": 4.5086, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75604918358215e-05, |
|
"loss": 4.5199, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755210588831097e-05, |
|
"loss": 4.507, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754371994080045e-05, |
|
"loss": 4.4974, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753535037209367e-05, |
|
"loss": 4.5191, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.752696442458315e-05, |
|
"loss": 4.5092, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751857847707262e-05, |
|
"loss": 4.5107, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75101925295621e-05, |
|
"loss": 4.5138, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750180658205158e-05, |
|
"loss": 4.4996, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.477122783660889, |
|
"eval_runtime": 284.0973, |
|
"eval_samples_per_second": 1343.17, |
|
"eval_steps_per_second": 41.975, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749342063454106e-05, |
|
"loss": 4.5034, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748503468703054e-05, |
|
"loss": 4.4938, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747664873952003e-05, |
|
"loss": 4.5195, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746826279200951e-05, |
|
"loss": 4.4957, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745987684449899e-05, |
|
"loss": 4.5104, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745149089698847e-05, |
|
"loss": 4.492, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.744310494947795e-05, |
|
"loss": 4.4931, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743471900196743e-05, |
|
"loss": 4.4856, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742633305445691e-05, |
|
"loss": 4.4989, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741794710694639e-05, |
|
"loss": 4.4967, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740956115943586e-05, |
|
"loss": 4.4993, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740117521192534e-05, |
|
"loss": 4.502, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7392805643218556e-05, |
|
"loss": 4.4836, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384419695708036e-05, |
|
"loss": 4.4876, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.737603374819751e-05, |
|
"loss": 4.482, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736764780068699e-05, |
|
"loss": 4.4787, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7359278231980205e-05, |
|
"loss": 4.4878, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7350892284469685e-05, |
|
"loss": 4.4798, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7342506336959165e-05, |
|
"loss": 4.4774, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7334120389448645e-05, |
|
"loss": 4.4946, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7325734441938125e-05, |
|
"loss": 4.4796, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7317348494427605e-05, |
|
"loss": 4.4803, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7308962546917085e-05, |
|
"loss": 4.4793, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7300576599406565e-05, |
|
"loss": 4.4877, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7292207030699774e-05, |
|
"loss": 4.4737, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7283821083189254e-05, |
|
"loss": 4.4718, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727545151448246e-05, |
|
"loss": 4.4699, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726706556697194e-05, |
|
"loss": 4.4722, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725867961946143e-05, |
|
"loss": 4.4626, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725029367195091e-05, |
|
"loss": 4.4676, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724190772444039e-05, |
|
"loss": 4.4688, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723352177692987e-05, |
|
"loss": 4.4666, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722513582941935e-05, |
|
"loss": 4.4743, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721674988190883e-05, |
|
"loss": 4.4613, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720838031320204e-05, |
|
"loss": 4.464, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719999436569152e-05, |
|
"loss": 4.4636, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7191608418181e-05, |
|
"loss": 4.4486, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718322247067048e-05, |
|
"loss": 4.4528, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717483652315996e-05, |
|
"loss": 4.4447, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716645057564944e-05, |
|
"loss": 4.4516, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715808100694265e-05, |
|
"loss": 4.4593, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714969505943213e-05, |
|
"loss": 4.4614, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7141309111921614e-05, |
|
"loss": 4.4513, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7132923164411094e-05, |
|
"loss": 4.451, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124537216900574e-05, |
|
"loss": 4.4623, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711615126939005e-05, |
|
"loss": 4.4477, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710778170068326e-05, |
|
"loss": 4.4524, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709939575317274e-05, |
|
"loss": 4.4473, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709100980566222e-05, |
|
"loss": 4.4301, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7082623858151696e-05, |
|
"loss": 4.4518, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7074237910641176e-05, |
|
"loss": 4.4431, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7065851963130656e-05, |
|
"loss": 4.4462, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7057466015620136e-05, |
|
"loss": 4.4387, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7049080068109616e-05, |
|
"loss": 4.4435, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704071049940283e-05, |
|
"loss": 4.426, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703232455189231e-05, |
|
"loss": 4.4539, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702393860438179e-05, |
|
"loss": 4.4293, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701555265687127e-05, |
|
"loss": 4.4453, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700716670936075e-05, |
|
"loss": 4.4311, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699878076185023e-05, |
|
"loss": 4.4313, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699039481433971e-05, |
|
"loss": 4.4273, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698202524563292e-05, |
|
"loss": 4.4378, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69736392981224e-05, |
|
"loss": 4.4115, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.696525335061188e-05, |
|
"loss": 4.4247, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695686740310136e-05, |
|
"loss": 4.4331, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694848145559084e-05, |
|
"loss": 4.4248, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694011188688405e-05, |
|
"loss": 4.4185, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693172593937353e-05, |
|
"loss": 4.4139, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6923339991863017e-05, |
|
"loss": 4.416, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6914970423156226e-05, |
|
"loss": 4.4412, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6906584475645706e-05, |
|
"loss": 4.4277, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6898198528135186e-05, |
|
"loss": 4.4208, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6889812580624665e-05, |
|
"loss": 4.4292, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6881426633114145e-05, |
|
"loss": 4.4301, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6873040685603625e-05, |
|
"loss": 4.425, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864654738093105e-05, |
|
"loss": 4.4071, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856268790582585e-05, |
|
"loss": 4.4241, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684788284307206e-05, |
|
"loss": 4.4192, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683949689556154e-05, |
|
"loss": 4.4086, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683111094805102e-05, |
|
"loss": 4.4253, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68227250005405e-05, |
|
"loss": 4.4154, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6814355431833714e-05, |
|
"loss": 4.4202, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6805969484323194e-05, |
|
"loss": 4.4221, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6797583536812674e-05, |
|
"loss": 4.4018, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6789197589302154e-05, |
|
"loss": 4.4128, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6780811641791634e-05, |
|
"loss": 4.4072, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677245845188858e-05, |
|
"loss": 4.417, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676407250437806e-05, |
|
"loss": 4.4162, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675568655686753e-05, |
|
"loss": 4.4116, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674730060935701e-05, |
|
"loss": 4.414, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673891466184649e-05, |
|
"loss": 4.41, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673052871433597e-05, |
|
"loss": 4.4087, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672214276682545e-05, |
|
"loss": 4.404, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671377319811867e-05, |
|
"loss": 4.4042, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670538725060815e-05, |
|
"loss": 4.4092, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669700130309763e-05, |
|
"loss": 4.4033, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668861535558711e-05, |
|
"loss": 4.3992, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668022940807659e-05, |
|
"loss": 4.407, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66718598393698e-05, |
|
"loss": 4.4057, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666347389185928e-05, |
|
"loss": 4.418, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665508794434876e-05, |
|
"loss": 4.3983, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664670199683824e-05, |
|
"loss": 4.3957, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663831604932772e-05, |
|
"loss": 4.3978, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6629930101817197e-05, |
|
"loss": 4.3955, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6621544154306677e-05, |
|
"loss": 4.3766, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661317458559989e-05, |
|
"loss": 4.3916, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660478863808937e-05, |
|
"loss": 4.3944, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659640269057885e-05, |
|
"loss": 4.3881, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658801674306833e-05, |
|
"loss": 4.3926, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657964717436154e-05, |
|
"loss": 4.3813, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657126122685102e-05, |
|
"loss": 4.3825, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65628752793405e-05, |
|
"loss": 4.4015, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655448933182998e-05, |
|
"loss": 4.3884, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654610338431946e-05, |
|
"loss": 4.397, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653773381561267e-05, |
|
"loss": 4.3886, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652934786810215e-05, |
|
"loss": 4.402, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652096192059163e-05, |
|
"loss": 4.387, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651257597308111e-05, |
|
"loss": 4.3835, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6504206404374326e-05, |
|
"loss": 4.3867, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6495820456863806e-05, |
|
"loss": 4.3922, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6487434509353286e-05, |
|
"loss": 4.3747, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6479048561842766e-05, |
|
"loss": 4.3755, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6470662614332246e-05, |
|
"loss": 4.3776, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6462293045625455e-05, |
|
"loss": 4.3811, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6453907098114935e-05, |
|
"loss": 4.3804, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6445521150604415e-05, |
|
"loss": 4.3877, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6437151581897624e-05, |
|
"loss": 4.3674, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6428765634387104e-05, |
|
"loss": 4.3772, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6420379686876584e-05, |
|
"loss": 4.3676, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6411993739366064e-05, |
|
"loss": 4.3908, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6403607791855544e-05, |
|
"loss": 4.3882, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6395221844345024e-05, |
|
"loss": 4.3741, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6386835896834504e-05, |
|
"loss": 4.3715, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6378449949323984e-05, |
|
"loss": 4.3762, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6370064001813464e-05, |
|
"loss": 4.3806, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6361678054302944e-05, |
|
"loss": 4.3811, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6353292106792424e-05, |
|
"loss": 4.3708, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6344906159281903e-05, |
|
"loss": 4.3737, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633653659057511e-05, |
|
"loss": 4.3724, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632815064306459e-05, |
|
"loss": 4.378, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631976469555407e-05, |
|
"loss": 4.363, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631137874804355e-05, |
|
"loss": 4.3717, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630299280053303e-05, |
|
"loss": 4.366, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629462323182625e-05, |
|
"loss": 4.3498, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628623728431573e-05, |
|
"loss": 4.3764, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.627785133680521e-05, |
|
"loss": 4.3654, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626946538929469e-05, |
|
"loss": 4.368, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.62610958205879e-05, |
|
"loss": 4.372, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.625270987307738e-05, |
|
"loss": 4.3559, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.342620849609375, |
|
"eval_runtime": 328.6628, |
|
"eval_samples_per_second": 1161.041, |
|
"eval_steps_per_second": 36.283, |
|
"step": 228960 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 9.40925261274002e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|