|
{ |
|
"best_metric": 4.106700420379639, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-case/lstm/4/checkpoints/checkpoint-610560", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 610560, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8203, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5554, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0617, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9928, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9463, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8938, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7206, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994131474623009e-05, |
|
"loss": 6.6102, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993292879871958e-05, |
|
"loss": 6.5164, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992454285120906e-05, |
|
"loss": 6.447, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3857, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.328, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.2684, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.2061, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.1519, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0943, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0496, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 6.0038, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9633, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9267, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8922, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8559, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8247, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 5.796, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7777, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7387, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.7171, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6922, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97652262273129e-05, |
|
"loss": 5.6726, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.975684027980238e-05, |
|
"loss": 5.6463, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974845433229186e-05, |
|
"loss": 5.6222, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974006838478134e-05, |
|
"loss": 5.6155, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973168243727082e-05, |
|
"loss": 5.5884, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97232964897603e-05, |
|
"loss": 5.5783, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714926921053515e-05, |
|
"loss": 5.5648, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5395, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5286, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9689785457325684e-05, |
|
"loss": 5.4887, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9681399509815164e-05, |
|
"loss": 5.4899, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4549, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4551, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.4362, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.435, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.4091, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.4015, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3932, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3755, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3797, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959757279231743e-05, |
|
"loss": 5.3497, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3351, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3448, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.3373, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956404538107908e-05, |
|
"loss": 5.3124, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955565943356856e-05, |
|
"loss": 5.301, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.2988, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953890391735125e-05, |
|
"loss": 5.283, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530517969840727e-05, |
|
"loss": 5.2966, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522132022330207e-05, |
|
"loss": 5.2574, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2572, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2491, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 5.2377, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 5.2392, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2173, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 5.2118, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.2034, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.2166, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.1889, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.1878, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942991935732195e-05, |
|
"loss": 5.1617, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.1689, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941316384110464e-05, |
|
"loss": 5.1778, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404794272397856e-05, |
|
"loss": 5.1681, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396408324887336e-05, |
|
"loss": 5.1465, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1595, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1548, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371250482355776e-05, |
|
"loss": 5.1311, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.1289, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.1113, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.0985, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.0977, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.1003, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.0981, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.1038, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 5.0892, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.0694, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0711, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0705, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0586, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0501, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9253879974815954e-05, |
|
"loss": 5.0399, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9245494027305433e-05, |
|
"loss": 5.0393, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9237108079794913e-05, |
|
"loss": 5.0403, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0274, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922036894238133e-05, |
|
"loss": 5.0294, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921198299487081e-05, |
|
"loss": 5.0181, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920359704736029e-05, |
|
"loss": 5.0141, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919521109984978e-05, |
|
"loss": 4.9998, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918684153114299e-05, |
|
"loss": 4.9976, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917845558363247e-05, |
|
"loss": 4.9913, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917006963612195e-05, |
|
"loss": 4.9944, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916168368861143e-05, |
|
"loss": 4.9953, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915329774110091e-05, |
|
"loss": 4.9728, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914491179359039e-05, |
|
"loss": 4.978, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913652584607987e-05, |
|
"loss": 4.9667, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128156277373076e-05, |
|
"loss": 4.9644, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119770329862556e-05, |
|
"loss": 4.9573, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.9506, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9102998434841516e-05, |
|
"loss": 4.9497, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9094612487330996e-05, |
|
"loss": 4.9416, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9086226539820476e-05, |
|
"loss": 4.939, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907784059230996e-05, |
|
"loss": 4.9309, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906945464479944e-05, |
|
"loss": 4.9213, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906106869728892e-05, |
|
"loss": 4.9316, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9052682749778396e-05, |
|
"loss": 4.9214, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9044296802267876e-05, |
|
"loss": 4.9222, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.9173, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9027541286050565e-05, |
|
"loss": 4.9176, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.9086, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.8933, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.901, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994013874812214e-05, |
|
"loss": 4.9055, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.89856279273017e-05, |
|
"loss": 4.8914, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897724197979118e-05, |
|
"loss": 4.8845, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896885603228066e-05, |
|
"loss": 4.882, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896047008477014e-05, |
|
"loss": 4.8806, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.895210051606335e-05, |
|
"loss": 4.8705, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.894371456855283e-05, |
|
"loss": 4.8736, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893532862104231e-05, |
|
"loss": 4.8628, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892695905233552e-05, |
|
"loss": 4.8634, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8545, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.8728, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890180120980396e-05, |
|
"loss": 4.855, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.846, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888504569358665e-05, |
|
"loss": 4.8419, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.8434, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8446, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8859887851055094e-05, |
|
"loss": 4.8441, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8851501903544574e-05, |
|
"loss": 4.8329, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8843115956034054e-05, |
|
"loss": 4.8338, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8834730008523534e-05, |
|
"loss": 4.8293, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8826344061013014e-05, |
|
"loss": 4.8271, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881797449230622e-05, |
|
"loss": 4.8143, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88095885447957e-05, |
|
"loss": 4.8265, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880120259728518e-05, |
|
"loss": 4.8126, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879281664977466e-05, |
|
"loss": 4.7996, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87844634598716e-05, |
|
"loss": 4.8145, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877607751236109e-05, |
|
"loss": 4.8026, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876769156485057e-05, |
|
"loss": 4.8028, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875930561734005e-05, |
|
"loss": 4.8086, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875093604863326e-05, |
|
"loss": 4.7898, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.756908416748047, |
|
"eval_runtime": 298.6182, |
|
"eval_samples_per_second": 1277.856, |
|
"eval_steps_per_second": 39.934, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874255010112274e-05, |
|
"loss": 4.7827, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873416415361222e-05, |
|
"loss": 4.7867, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.87257782061017e-05, |
|
"loss": 4.788, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717408637394906e-05, |
|
"loss": 4.7828, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709022689884386e-05, |
|
"loss": 4.7875, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700636742373866e-05, |
|
"loss": 4.7709, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692250794863346e-05, |
|
"loss": 4.7771, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683881226156555e-05, |
|
"loss": 4.7643, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867549527864604e-05, |
|
"loss": 4.7734, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866710933113552e-05, |
|
"loss": 4.7597, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8658723383625e-05, |
|
"loss": 4.7591, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865033743611448e-05, |
|
"loss": 4.7594, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864195148860396e-05, |
|
"loss": 4.7523, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.863356554109344e-05, |
|
"loss": 4.7478, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862517959358292e-05, |
|
"loss": 4.7445, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86167936460724e-05, |
|
"loss": 4.7309, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8608407698561874e-05, |
|
"loss": 4.7425, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8600021751051354e-05, |
|
"loss": 4.7357, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8591635803540834e-05, |
|
"loss": 4.728, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8583249856030314e-05, |
|
"loss": 4.7474, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574863908519794e-05, |
|
"loss": 4.7299, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566477961009274e-05, |
|
"loss": 4.7319, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.855810839230249e-05, |
|
"loss": 4.7253, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854972244479197e-05, |
|
"loss": 4.7435, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854133649728145e-05, |
|
"loss": 4.7118, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853295054977093e-05, |
|
"loss": 4.7135, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852456460226041e-05, |
|
"loss": 4.7168, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851617865474989e-05, |
|
"loss": 4.7186, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850779270723937e-05, |
|
"loss": 4.703, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849942313853258e-05, |
|
"loss": 4.7032, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849105356982579e-05, |
|
"loss": 4.7127, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848266762231527e-05, |
|
"loss": 4.7096, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847428167480475e-05, |
|
"loss": 4.7108, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8465912106097964e-05, |
|
"loss": 4.7054, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8457526158587444e-05, |
|
"loss": 4.6966, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8449140211076924e-05, |
|
"loss": 4.7052, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8440754263566404e-05, |
|
"loss": 4.6758, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8432368316055884e-05, |
|
"loss": 4.6963, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8423982368545363e-05, |
|
"loss": 4.6715, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8415596421034843e-05, |
|
"loss": 4.6844, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840721047352432e-05, |
|
"loss": 4.6757, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83988245260138e-05, |
|
"loss": 4.6873, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839043857850328e-05, |
|
"loss": 4.6754, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838205263099276e-05, |
|
"loss": 4.6738, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8373666683482236e-05, |
|
"loss": 4.676, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8365280735971716e-05, |
|
"loss": 4.6671, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.835691116726493e-05, |
|
"loss": 4.682, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834852521975441e-05, |
|
"loss": 4.6564, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834013927224389e-05, |
|
"loss": 4.6505, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833175332473337e-05, |
|
"loss": 4.67, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832336737722285e-05, |
|
"loss": 4.6691, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.831498142971233e-05, |
|
"loss": 4.6536, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830659548220181e-05, |
|
"loss": 4.6484, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829822591349502e-05, |
|
"loss": 4.6545, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82898399659845e-05, |
|
"loss": 4.6417, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828145401847398e-05, |
|
"loss": 4.6647, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827306807096346e-05, |
|
"loss": 4.6385, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.826468212345294e-05, |
|
"loss": 4.646, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.825629617594242e-05, |
|
"loss": 4.6395, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.82479102284319e-05, |
|
"loss": 4.6333, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823952428092138e-05, |
|
"loss": 4.6439, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823113833341086e-05, |
|
"loss": 4.6299, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.822275238590034e-05, |
|
"loss": 4.6259, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214382817193557e-05, |
|
"loss": 4.6282, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8205996869683037e-05, |
|
"loss": 4.6424, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197627300976246e-05, |
|
"loss": 4.6241, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189241353465726e-05, |
|
"loss": 4.6216, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180855405955206e-05, |
|
"loss": 4.6148, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172469458444685e-05, |
|
"loss": 4.6161, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164083510934165e-05, |
|
"loss": 4.6353, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155697563423645e-05, |
|
"loss": 4.632, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147327994716854e-05, |
|
"loss": 4.6147, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138958426010064e-05, |
|
"loss": 4.627, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813057247849955e-05, |
|
"loss": 4.6293, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812218653098903e-05, |
|
"loss": 4.6164, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811380058347851e-05, |
|
"loss": 4.617, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810541463596799e-05, |
|
"loss": 4.6064, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809702868845747e-05, |
|
"loss": 4.5952, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808864274094695e-05, |
|
"loss": 4.5979, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808025679343642e-05, |
|
"loss": 4.6045, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.807188722472964e-05, |
|
"loss": 4.6056, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806350127721912e-05, |
|
"loss": 4.6232, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80551153297086e-05, |
|
"loss": 4.6089, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804672938219807e-05, |
|
"loss": 4.5892, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803834343468755e-05, |
|
"loss": 4.5962, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802995748717703e-05, |
|
"loss": 4.6017, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802157153966652e-05, |
|
"loss": 4.5964, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8013185592156e-05, |
|
"loss": 4.5884, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800479964464548e-05, |
|
"loss": 4.5873, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799643007593869e-05, |
|
"loss": 4.5864, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798804412842817e-05, |
|
"loss": 4.5927, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797965818091765e-05, |
|
"loss": 4.5817, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797127223340713e-05, |
|
"loss": 4.5869, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796288628589661e-05, |
|
"loss": 4.5824, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795450033838609e-05, |
|
"loss": 4.5824, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794611439087557e-05, |
|
"loss": 4.5724, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793772844336505e-05, |
|
"loss": 4.5773, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792934249585453e-05, |
|
"loss": 4.5723, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792095654834401e-05, |
|
"loss": 4.5749, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791257060083349e-05, |
|
"loss": 4.5789, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790418465332297e-05, |
|
"loss": 4.5645, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789581508461618e-05, |
|
"loss": 4.5674, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788744551590939e-05, |
|
"loss": 4.5644, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787905956839887e-05, |
|
"loss": 4.5626, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787067362088835e-05, |
|
"loss": 4.5564, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786228767337783e-05, |
|
"loss": 4.5581, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785390172586731e-05, |
|
"loss": 4.5614, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784553215716052e-05, |
|
"loss": 4.5513, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783714620965e-05, |
|
"loss": 4.5545, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782876026213948e-05, |
|
"loss": 4.5494, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782037431462896e-05, |
|
"loss": 4.5409, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7811988367118434e-05, |
|
"loss": 4.5633, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780361879841165e-05, |
|
"loss": 4.5465, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779523285090114e-05, |
|
"loss": 4.5533, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7786863282194346e-05, |
|
"loss": 4.5497, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7778477334683826e-05, |
|
"loss": 4.5589, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7770091387173306e-05, |
|
"loss": 4.5495, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7761705439662786e-05, |
|
"loss": 4.5353, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775331949215226e-05, |
|
"loss": 4.5462, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774493354464174e-05, |
|
"loss": 4.5517, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773654759713122e-05, |
|
"loss": 4.5364, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.77281616496207e-05, |
|
"loss": 4.5401, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771979208091391e-05, |
|
"loss": 4.5343, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771140613340339e-05, |
|
"loss": 4.5426, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7703020185892875e-05, |
|
"loss": 4.5301, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7694634238382355e-05, |
|
"loss": 4.5373, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7686264669675564e-05, |
|
"loss": 4.5279, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7677878722165044e-05, |
|
"loss": 4.5341, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7669492774654524e-05, |
|
"loss": 4.5227, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7661106827144004e-05, |
|
"loss": 4.5453, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7652720879633484e-05, |
|
"loss": 4.5349, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7644334932122964e-05, |
|
"loss": 4.52, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7635948984612444e-05, |
|
"loss": 4.5239, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762757941590565e-05, |
|
"loss": 4.5253, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761919346839513e-05, |
|
"loss": 4.5294, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761080752088461e-05, |
|
"loss": 4.5374, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760242157337409e-05, |
|
"loss": 4.5186, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759403562586357e-05, |
|
"loss": 4.5237, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758564967835306e-05, |
|
"loss": 4.5207, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757728010964627e-05, |
|
"loss": 4.5229, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756889416213575e-05, |
|
"loss": 4.5139, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756050821462523e-05, |
|
"loss": 4.5228, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755212226711471e-05, |
|
"loss": 4.513, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7543769077211646e-05, |
|
"loss": 4.5074, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7535383129701126e-05, |
|
"loss": 4.5209, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7526997182190606e-05, |
|
"loss": 4.5128, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7518611234680086e-05, |
|
"loss": 4.5113, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7510225287169566e-05, |
|
"loss": 4.5144, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7501839339659046e-05, |
|
"loss": 4.5057, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.476215839385986, |
|
"eval_runtime": 296.0227, |
|
"eval_samples_per_second": 1289.06, |
|
"eval_steps_per_second": 40.284, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7493453392148526e-05, |
|
"loss": 4.4964, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748506744463801e-05, |
|
"loss": 4.4979, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747668149712749e-05, |
|
"loss": 4.51, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746829554961697e-05, |
|
"loss": 4.4999, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7459909602106446e-05, |
|
"loss": 4.51, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7451523654595926e-05, |
|
"loss": 4.4954, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7443137707085406e-05, |
|
"loss": 4.4992, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7434751759574886e-05, |
|
"loss": 4.4932, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7426365812064366e-05, |
|
"loss": 4.5048, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7417996243357575e-05, |
|
"loss": 4.4918, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7409610295847055e-05, |
|
"loss": 4.4915, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401224348336535e-05, |
|
"loss": 4.4914, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7392838400826015e-05, |
|
"loss": 4.4894, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384452453315495e-05, |
|
"loss": 4.4841, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376066505804975e-05, |
|
"loss": 4.4854, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736768055829446e-05, |
|
"loss": 4.4751, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735929461078394e-05, |
|
"loss": 4.4805, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735090866327342e-05, |
|
"loss": 4.4761, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73425227157629e-05, |
|
"loss": 4.4729, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733413676825238e-05, |
|
"loss": 4.4989, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732576719954559e-05, |
|
"loss": 4.479, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731738125203507e-05, |
|
"loss": 4.4748, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730899530452455e-05, |
|
"loss": 4.4789, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730060935701403e-05, |
|
"loss": 4.4933, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729223978830724e-05, |
|
"loss": 4.4671, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728385384079672e-05, |
|
"loss": 4.4709, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.72754678932862e-05, |
|
"loss": 4.4707, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726708194577568e-05, |
|
"loss": 4.4775, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725869599826516e-05, |
|
"loss": 4.4592, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725031005075464e-05, |
|
"loss": 4.4656, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724192410324412e-05, |
|
"loss": 4.4678, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.72335381557336e-05, |
|
"loss": 4.4745, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722515220822308e-05, |
|
"loss": 4.4796, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721678263951629e-05, |
|
"loss": 4.4692, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720839669200577e-05, |
|
"loss": 4.4626, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720001074449525e-05, |
|
"loss": 4.4718, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719162479698473e-05, |
|
"loss": 4.4509, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718323884947421e-05, |
|
"loss": 4.4637, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717485290196369e-05, |
|
"loss": 4.4459, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71664833332569e-05, |
|
"loss": 4.4526, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7158097385746384e-05, |
|
"loss": 4.4548, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7149711438235864e-05, |
|
"loss": 4.4638, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7141325490725343e-05, |
|
"loss": 4.4538, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7132939543214823e-05, |
|
"loss": 4.4517, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124553595704303e-05, |
|
"loss": 4.4531, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711616764819378e-05, |
|
"loss": 4.4486, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710778170068326e-05, |
|
"loss": 4.4614, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70994285107802e-05, |
|
"loss": 4.4412, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709104256326968e-05, |
|
"loss": 4.431, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708265661575916e-05, |
|
"loss": 4.4559, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.707427066824864e-05, |
|
"loss": 4.4506, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.706588472073812e-05, |
|
"loss": 4.4405, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70574987732276e-05, |
|
"loss": 4.44, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704912920452082e-05, |
|
"loss": 4.4428, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70407432570103e-05, |
|
"loss": 4.4318, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703235730949978e-05, |
|
"loss": 4.4518, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702397136198926e-05, |
|
"loss": 4.432, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701558541447874e-05, |
|
"loss": 4.4392, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700719946696822e-05, |
|
"loss": 4.4324, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69988135194577e-05, |
|
"loss": 4.4331, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6990443950750906e-05, |
|
"loss": 4.4335, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6982058003240386e-05, |
|
"loss": 4.4316, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973672055729866e-05, |
|
"loss": 4.4238, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965302487023075e-05, |
|
"loss": 4.4269, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6956916539512555e-05, |
|
"loss": 4.4403, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6948530592002035e-05, |
|
"loss": 4.4267, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6940144644491515e-05, |
|
"loss": 4.4232, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6931758696980995e-05, |
|
"loss": 4.4191, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6923372749470475e-05, |
|
"loss": 4.4201, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6914986801959955e-05, |
|
"loss": 4.4415, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690661723325317e-05, |
|
"loss": 4.4354, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6898231285742644e-05, |
|
"loss": 4.4231, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6889845338232124e-05, |
|
"loss": 4.4328, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6881459390721604e-05, |
|
"loss": 4.4375, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6873073443211084e-05, |
|
"loss": 4.4212, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864687495700564e-05, |
|
"loss": 4.4259, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856301548190044e-05, |
|
"loss": 4.4203, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684793197948325e-05, |
|
"loss": 4.4105, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683954603197274e-05, |
|
"loss": 4.4167, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683116008446222e-05, |
|
"loss": 4.4128, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.68227741369517e-05, |
|
"loss": 4.42, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.681438818944118e-05, |
|
"loss": 4.4327, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680600224193066e-05, |
|
"loss": 4.429, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679761629442014e-05, |
|
"loss": 4.4023, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678923034690962e-05, |
|
"loss": 4.4198, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678086077820283e-05, |
|
"loss": 4.416, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677247483069231e-05, |
|
"loss": 4.4188, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676408888318179e-05, |
|
"loss": 4.4118, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6755719314475e-05, |
|
"loss": 4.4044, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674733336696448e-05, |
|
"loss": 4.4119, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673894741945396e-05, |
|
"loss": 4.4094, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673056147194344e-05, |
|
"loss": 4.4049, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672219190323665e-05, |
|
"loss": 4.4104, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671380595572613e-05, |
|
"loss": 4.4042, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670542000821561e-05, |
|
"loss": 4.4095, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669703406070509e-05, |
|
"loss": 4.3983, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668864811319457e-05, |
|
"loss": 4.4009, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668026216568405e-05, |
|
"loss": 4.3992, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667187621817353e-05, |
|
"loss": 4.4075, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666350664946674e-05, |
|
"loss": 4.4067, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665512070195622e-05, |
|
"loss": 4.3931, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66467347544457e-05, |
|
"loss": 4.3971, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6638348806935175e-05, |
|
"loss": 4.3958, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662996285942466e-05, |
|
"loss": 4.3972, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662157691191414e-05, |
|
"loss": 4.3863, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661320734320736e-05, |
|
"loss": 4.3885, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660482139569683e-05, |
|
"loss": 4.3991, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659643544818631e-05, |
|
"loss": 4.391, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658804950067579e-05, |
|
"loss": 4.3873, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657966355316527e-05, |
|
"loss": 4.3856, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657127760565475e-05, |
|
"loss": 4.3794, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656289165814423e-05, |
|
"loss": 4.3972, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655452208943744e-05, |
|
"loss": 4.3897, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654615252073065e-05, |
|
"loss": 4.3851, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653776657322013e-05, |
|
"loss": 4.3887, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529380625709615e-05, |
|
"loss": 4.4018, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6520994678199095e-05, |
|
"loss": 4.3901, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6512608730688575e-05, |
|
"loss": 4.3772, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6504222783178055e-05, |
|
"loss": 4.3875, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6495836835667535e-05, |
|
"loss": 4.3912, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6487450888157015e-05, |
|
"loss": 4.3814, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6479081319450224e-05, |
|
"loss": 4.3844, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6470695371939704e-05, |
|
"loss": 4.3795, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.646232580323291e-05, |
|
"loss": 4.3883, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645393985572239e-05, |
|
"loss": 4.3748, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.644555390821187e-05, |
|
"loss": 4.3865, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643716796070135e-05, |
|
"loss": 4.3691, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642878201319083e-05, |
|
"loss": 4.3849, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642039606568031e-05, |
|
"loss": 4.368, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64120101181698e-05, |
|
"loss": 4.3928, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640362417065928e-05, |
|
"loss": 4.3853, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639523822314876e-05, |
|
"loss": 4.3749, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638686865444197e-05, |
|
"loss": 4.3695, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637848270693145e-05, |
|
"loss": 4.3771, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637009675942093e-05, |
|
"loss": 4.3769, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636171081191041e-05, |
|
"loss": 4.3875, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635332486439989e-05, |
|
"loss": 4.3679, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634493891688936e-05, |
|
"loss": 4.3778, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633655296937884e-05, |
|
"loss": 4.3738, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632816702186832e-05, |
|
"loss": 4.3809, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63197810743578e-05, |
|
"loss": 4.3641, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631141150565102e-05, |
|
"loss": 4.377, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630304193694423e-05, |
|
"loss": 4.3699, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629465598943371e-05, |
|
"loss": 4.3633, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628627004192319e-05, |
|
"loss": 4.3757, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6277884094412666e-05, |
|
"loss": 4.3724, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269498146902146e-05, |
|
"loss": 4.3624, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6261112199391626e-05, |
|
"loss": 4.374, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6252726251881106e-05, |
|
"loss": 4.3637, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.342019557952881, |
|
"eval_runtime": 293.1681, |
|
"eval_samples_per_second": 1301.612, |
|
"eval_steps_per_second": 40.676, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6244356683174315e-05, |
|
"loss": 4.3607, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6235970735663795e-05, |
|
"loss": 4.3545, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6227584788153275e-05, |
|
"loss": 4.3654, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6219198840642755e-05, |
|
"loss": 4.3622, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6210812893132235e-05, |
|
"loss": 4.3764, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6202426945621715e-05, |
|
"loss": 4.3528, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61940409981112e-05, |
|
"loss": 4.3607, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618565505060068e-05, |
|
"loss": 4.3528, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617728548189389e-05, |
|
"loss": 4.3715, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616889953438337e-05, |
|
"loss": 4.3513, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616051358687285e-05, |
|
"loss": 4.3529, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615212763936233e-05, |
|
"loss": 4.3591, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614375807065554e-05, |
|
"loss": 4.3498, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.613537212314502e-05, |
|
"loss": 4.3495, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61269861756345e-05, |
|
"loss": 4.3491, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611860022812398e-05, |
|
"loss": 4.3439, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611021428061346e-05, |
|
"loss": 4.3416, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610182833310294e-05, |
|
"loss": 4.3449, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609344238559242e-05, |
|
"loss": 4.3396, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.60850564380819e-05, |
|
"loss": 4.3674, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6076686869375116e-05, |
|
"loss": 4.3454, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6068300921864595e-05, |
|
"loss": 4.345, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6059931353157805e-05, |
|
"loss": 4.3488, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6051545405647285e-05, |
|
"loss": 4.3596, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6043159458136764e-05, |
|
"loss": 4.3388, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6034773510626244e-05, |
|
"loss": 4.3421, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6026403941919454e-05, |
|
"loss": 4.3403, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6018017994408933e-05, |
|
"loss": 4.3454, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6009632046898413e-05, |
|
"loss": 4.3334, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6001246099387893e-05, |
|
"loss": 4.3387, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599286015187737e-05, |
|
"loss": 4.3426, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598447420436685e-05, |
|
"loss": 4.3464, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597610463566007e-05, |
|
"loss": 4.3523, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596771868814955e-05, |
|
"loss": 4.3442, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595933274063903e-05, |
|
"loss": 4.3373, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59509467931285e-05, |
|
"loss": 4.3432, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.594256084561798e-05, |
|
"loss": 4.3287, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.593417489810746e-05, |
|
"loss": 4.3363, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.592578895059694e-05, |
|
"loss": 4.3226, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.591740300308642e-05, |
|
"loss": 4.327, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590903343437963e-05, |
|
"loss": 4.3311, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590064748686911e-05, |
|
"loss": 4.3443, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.589226153935859e-05, |
|
"loss": 4.3356, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588387559184808e-05, |
|
"loss": 4.3273, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587550602314129e-05, |
|
"loss": 4.3315, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5867136454434496e-05, |
|
"loss": 4.3272, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5858750506923976e-05, |
|
"loss": 4.339, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5850364559413456e-05, |
|
"loss": 4.3211, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5841978611902936e-05, |
|
"loss": 4.3117, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833592664392416e-05, |
|
"loss": 4.335, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5825206716881896e-05, |
|
"loss": 4.3308, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5816837148175105e-05, |
|
"loss": 4.317, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5808451200664585e-05, |
|
"loss": 4.3255, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5800065253154065e-05, |
|
"loss": 4.3269, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791679305643545e-05, |
|
"loss": 4.3115, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783293358133025e-05, |
|
"loss": 4.3325, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.577490741062251e-05, |
|
"loss": 4.3115, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576653784191572e-05, |
|
"loss": 4.3266, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57581518944052e-05, |
|
"loss": 4.3122, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574976594689468e-05, |
|
"loss": 4.3237, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574137999938416e-05, |
|
"loss": 4.312, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573299405187364e-05, |
|
"loss": 4.3162, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572460810436312e-05, |
|
"loss": 4.3046, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57162221568526e-05, |
|
"loss": 4.3144, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570783620934208e-05, |
|
"loss": 4.3297, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569946664063529e-05, |
|
"loss": 4.3113, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569108069312477e-05, |
|
"loss": 4.3122, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568269474561425e-05, |
|
"loss": 4.3029, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5674325176907465e-05, |
|
"loss": 4.3089, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5665955608200674e-05, |
|
"loss": 4.3284, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5657569660690154e-05, |
|
"loss": 4.3221, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5649183713179634e-05, |
|
"loss": 4.3152, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5640797765669114e-05, |
|
"loss": 4.3222, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.563242819696232e-05, |
|
"loss": 4.3225, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56240422494518e-05, |
|
"loss": 4.3118, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561565630194128e-05, |
|
"loss": 4.3145, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560727035443076e-05, |
|
"loss": 4.3123, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559888440692024e-05, |
|
"loss": 4.3034, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559049845940972e-05, |
|
"loss": 4.3051, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55821125118992e-05, |
|
"loss": 4.3031, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557372656438868e-05, |
|
"loss": 4.31, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55653569956819e-05, |
|
"loss": 4.3226, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.555697104817138e-05, |
|
"loss": 4.3194, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554858510066086e-05, |
|
"loss": 4.2941, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554019915315033e-05, |
|
"loss": 4.3174, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553181320563981e-05, |
|
"loss": 4.3088, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552342725812929e-05, |
|
"loss": 4.3092, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551504131061877e-05, |
|
"loss": 4.3038, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550667174191198e-05, |
|
"loss": 4.2965, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549828579440146e-05, |
|
"loss": 4.3069, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548989984689094e-05, |
|
"loss": 4.3016, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548151389938042e-05, |
|
"loss": 4.3005, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54731279518699e-05, |
|
"loss": 4.3034, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.546474200435939e-05, |
|
"loss": 4.2988, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5456372435652596e-05, |
|
"loss": 4.3089, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5447986488142076e-05, |
|
"loss": 4.2961, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5439600540631556e-05, |
|
"loss": 4.2949, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431214593121036e-05, |
|
"loss": 4.2935, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5422828645610516e-05, |
|
"loss": 4.3071, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414442698099996e-05, |
|
"loss": 4.3, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406056750589476e-05, |
|
"loss": 4.2911, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5397670803078956e-05, |
|
"loss": 4.2907, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5389301234372165e-05, |
|
"loss": 4.2963, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5380915286861645e-05, |
|
"loss": 4.2986, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5372529339351125e-05, |
|
"loss": 4.2881, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536415977064434e-05, |
|
"loss": 4.2844, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535577382313382e-05, |
|
"loss": 4.2982, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53473878756233e-05, |
|
"loss": 4.2866, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533900192811278e-05, |
|
"loss": 4.2877, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533061598060226e-05, |
|
"loss": 4.2837, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532223003309174e-05, |
|
"loss": 4.2853, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531384408558122e-05, |
|
"loss": 4.2975, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5305458138070694e-05, |
|
"loss": 4.2845, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5297072190560174e-05, |
|
"loss": 4.2862, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528871900065712e-05, |
|
"loss": 4.2921, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52803330531466e-05, |
|
"loss": 4.2997, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.527194710563608e-05, |
|
"loss": 4.2982, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526356115812556e-05, |
|
"loss": 4.2771, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.525517521061504e-05, |
|
"loss": 4.2894, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.524678926310452e-05, |
|
"loss": 4.2892, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5238403315594e-05, |
|
"loss": 4.2877, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5230033746887215e-05, |
|
"loss": 4.2908, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5221647799376694e-05, |
|
"loss": 4.2796, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521326185186617e-05, |
|
"loss": 4.2908, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520487590435565e-05, |
|
"loss": 4.2777, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519648995684513e-05, |
|
"loss": 4.2888, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518810400933461e-05, |
|
"loss": 4.2738, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5179734440627817e-05, |
|
"loss": 4.2902, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5171348493117297e-05, |
|
"loss": 4.2718, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5162962545606777e-05, |
|
"loss": 4.3017, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5154576598096256e-05, |
|
"loss": 4.2832, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514619065058574e-05, |
|
"loss": 4.2839, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513780470307522e-05, |
|
"loss": 4.2733, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51294187555647e-05, |
|
"loss": 4.2856, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512103280805418e-05, |
|
"loss": 4.2824, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511266323934739e-05, |
|
"loss": 4.293, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510427729183687e-05, |
|
"loss": 4.2757, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509589134432635e-05, |
|
"loss": 4.284, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508752177561956e-05, |
|
"loss": 4.2806, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507913582810904e-05, |
|
"loss": 4.2924, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507074988059852e-05, |
|
"loss": 4.2686, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062363933088e-05, |
|
"loss": 4.2843, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.505399436438121e-05, |
|
"loss": 4.2766, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50456084168707e-05, |
|
"loss": 4.2751, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.503722246936018e-05, |
|
"loss": 4.2836, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502883652184966e-05, |
|
"loss": 4.2806, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502045057433914e-05, |
|
"loss": 4.2721, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501206462682862e-05, |
|
"loss": 4.2812, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50036786793181e-05, |
|
"loss": 4.2767, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.258913040161133, |
|
"eval_runtime": 293.9356, |
|
"eval_samples_per_second": 1298.213, |
|
"eval_steps_per_second": 40.57, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4995309110611306e-05, |
|
"loss": 4.271, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4986923163100786e-05, |
|
"loss": 4.261, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4978537215590266e-05, |
|
"loss": 4.276, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4970151268079746e-05, |
|
"loss": 4.2723, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4961765320569226e-05, |
|
"loss": 4.2868, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4953379373058706e-05, |
|
"loss": 4.2678, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.494499342554818e-05, |
|
"loss": 4.2673, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4936607478037665e-05, |
|
"loss": 4.2653, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4928221530527145e-05, |
|
"loss": 4.2806, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4919835583016625e-05, |
|
"loss": 4.2678, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4911449635506105e-05, |
|
"loss": 4.264, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4903063687995585e-05, |
|
"loss": 4.2678, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4894694119288794e-05, |
|
"loss": 4.2617, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886308171778274e-05, |
|
"loss": 4.2644, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4877922224267754e-05, |
|
"loss": 4.2674, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4869536276757234e-05, |
|
"loss": 4.2494, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4861150329246714e-05, |
|
"loss": 4.2585, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4852764381736194e-05, |
|
"loss": 4.2578, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4844378434225674e-05, |
|
"loss": 4.2528, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4835992486715154e-05, |
|
"loss": 4.2768, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4827606539204634e-05, |
|
"loss": 4.2656, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481923697049785e-05, |
|
"loss": 4.2578, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481085102298733e-05, |
|
"loss": 4.2633, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480246507547681e-05, |
|
"loss": 4.2751, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479407912796629e-05, |
|
"loss": 4.254, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478569318045577e-05, |
|
"loss": 4.2588, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477730723294524e-05, |
|
"loss": 4.2559, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476892128543472e-05, |
|
"loss": 4.258, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.47605353379242e-05, |
|
"loss": 4.2497, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475218214802115e-05, |
|
"loss": 4.2589, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474379620051063e-05, |
|
"loss": 4.2591, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473541025300011e-05, |
|
"loss": 4.2629, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472702430548959e-05, |
|
"loss": 4.2691, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471863835797907e-05, |
|
"loss": 4.2605, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471025241046855e-05, |
|
"loss": 4.2527, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470186646295803e-05, |
|
"loss": 4.2598, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.469348051544751e-05, |
|
"loss": 4.2512, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4685110946740717e-05, |
|
"loss": 4.2521, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4676724999230197e-05, |
|
"loss": 4.2426, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668339051719676e-05, |
|
"loss": 4.2464, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4659953104209156e-05, |
|
"loss": 4.2502, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4651583535502366e-05, |
|
"loss": 4.2612, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4643197587991846e-05, |
|
"loss": 4.2516, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4634811640481325e-05, |
|
"loss": 4.2485, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4626425692970805e-05, |
|
"loss": 4.2508, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4618039745460285e-05, |
|
"loss": 4.248, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.46096701767535e-05, |
|
"loss": 4.2582, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460128422924298e-05, |
|
"loss": 4.242, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459289828173246e-05, |
|
"loss": 4.2303, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458451233422194e-05, |
|
"loss": 4.2539, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457614276551515e-05, |
|
"loss": 4.2521, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.456775681800463e-05, |
|
"loss": 4.237, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455937087049411e-05, |
|
"loss": 4.2447, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455098492298359e-05, |
|
"loss": 4.249, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.454259897547307e-05, |
|
"loss": 4.2321, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.453422940676628e-05, |
|
"loss": 4.2534, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.452584345925576e-05, |
|
"loss": 4.2351, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451745751174524e-05, |
|
"loss": 4.2466, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450907156423472e-05, |
|
"loss": 4.2353, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4500685616724206e-05, |
|
"loss": 4.2441, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4492316048017415e-05, |
|
"loss": 4.2367, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4483930100506895e-05, |
|
"loss": 4.242, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4475544152996375e-05, |
|
"loss": 4.2262, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4467158205485855e-05, |
|
"loss": 4.2404, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4458772257975335e-05, |
|
"loss": 4.2489, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4450386310464815e-05, |
|
"loss": 4.2369, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4442000362954295e-05, |
|
"loss": 4.2325, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4433614415443775e-05, |
|
"loss": 4.2262, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4425228467933254e-05, |
|
"loss": 4.2318, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441684252042273e-05, |
|
"loss": 4.2483, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440845657291221e-05, |
|
"loss": 4.2471, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440007062540169e-05, |
|
"loss": 4.2405, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4391684677891174e-05, |
|
"loss": 4.2463, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438331510918438e-05, |
|
"loss": 4.2485, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.43749455404776e-05, |
|
"loss": 4.2336, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436655959296708e-05, |
|
"loss": 4.2356, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.435819002426029e-05, |
|
"loss": 4.2407, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434980407674977e-05, |
|
"loss": 4.2285, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434141812923925e-05, |
|
"loss": 4.2274, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.433303218172873e-05, |
|
"loss": 4.231, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.43246462342182e-05, |
|
"loss": 4.2327, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.431626028670768e-05, |
|
"loss": 4.2489, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.430787433919716e-05, |
|
"loss": 4.2474, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429948839168664e-05, |
|
"loss": 4.2216, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429110244417613e-05, |
|
"loss": 4.2394, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428271649666561e-05, |
|
"loss": 4.2358, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427433054915509e-05, |
|
"loss": 4.2357, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426594460164457e-05, |
|
"loss": 4.2323, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.425757503293778e-05, |
|
"loss": 4.2224, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424918908542726e-05, |
|
"loss": 4.2309, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424080313791674e-05, |
|
"loss": 4.2294, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.423241719040622e-05, |
|
"loss": 4.2283, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.42240312428957e-05, |
|
"loss": 4.2266, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4215661674188906e-05, |
|
"loss": 4.231, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4207275726678386e-05, |
|
"loss": 4.233, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4198889779167866e-05, |
|
"loss": 4.2309, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4190503831657346e-05, |
|
"loss": 4.2195, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4182117884146826e-05, |
|
"loss": 4.2255, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417374831544004e-05, |
|
"loss": 4.231, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416536236792952e-05, |
|
"loss": 4.2326, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4156976420419e-05, |
|
"loss": 4.2192, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414859047290848e-05, |
|
"loss": 4.2226, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414020452539796e-05, |
|
"loss": 4.2267, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413183495669117e-05, |
|
"loss": 4.2262, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.412346538798438e-05, |
|
"loss": 4.2196, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411507944047386e-05, |
|
"loss": 4.2157, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410669349296334e-05, |
|
"loss": 4.2235, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409830754545282e-05, |
|
"loss": 4.2176, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40899215979423e-05, |
|
"loss": 4.2161, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408153565043178e-05, |
|
"loss": 4.2146, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407314970292126e-05, |
|
"loss": 4.2151, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406476375541074e-05, |
|
"loss": 4.2273, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4056394186703955e-05, |
|
"loss": 4.2179, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4048008239193435e-05, |
|
"loss": 4.2144, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4039638670486644e-05, |
|
"loss": 4.2235, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031252722976124e-05, |
|
"loss": 4.2325, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4022866775465604e-05, |
|
"loss": 4.229, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4014480827955084e-05, |
|
"loss": 4.2089, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400609488044456e-05, |
|
"loss": 4.2173, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.399770893293404e-05, |
|
"loss": 4.2214, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398932298542352e-05, |
|
"loss": 4.2164, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398095341671673e-05, |
|
"loss": 4.2243, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397256746920621e-05, |
|
"loss": 4.211, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.396418152169569e-05, |
|
"loss": 4.2196, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.395579557418517e-05, |
|
"loss": 4.2168, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.394740962667465e-05, |
|
"loss": 4.2198, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393902367916413e-05, |
|
"loss": 4.2063, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393065411045734e-05, |
|
"loss": 4.2193, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392226816294682e-05, |
|
"loss": 4.2123, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39138822154363e-05, |
|
"loss": 4.2262, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390549626792578e-05, |
|
"loss": 4.2143, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389711032041526e-05, |
|
"loss": 4.221, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388872437290474e-05, |
|
"loss": 4.206, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388035480419795e-05, |
|
"loss": 4.2219, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387196885668744e-05, |
|
"loss": 4.2172, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.386358290917692e-05, |
|
"loss": 4.2235, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38551969616664e-05, |
|
"loss": 4.2068, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.384681101415588e-05, |
|
"loss": 4.2227, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383842506664536e-05, |
|
"loss": 4.211, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383003911913484e-05, |
|
"loss": 4.2268, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3821669550428046e-05, |
|
"loss": 4.2077, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3813283602917526e-05, |
|
"loss": 4.2185, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3804897655407006e-05, |
|
"loss": 4.2112, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3796511707896486e-05, |
|
"loss": 4.2068, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3788125760385966e-05, |
|
"loss": 4.2182, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3779739812875446e-05, |
|
"loss": 4.2147, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377135386536492e-05, |
|
"loss": 4.2068, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3762984296658135e-05, |
|
"loss": 4.2136, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375459834914762e-05, |
|
"loss": 4.215, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.2025299072265625, |
|
"eval_runtime": 295.0207, |
|
"eval_samples_per_second": 1293.438, |
|
"eval_steps_per_second": 40.421, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.37462124016371e-05, |
|
"loss": 4.2057, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3737826454126575e-05, |
|
"loss": 4.199, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3729440506616055e-05, |
|
"loss": 4.2105, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3721054559105535e-05, |
|
"loss": 4.2082, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3712668611595015e-05, |
|
"loss": 4.2192, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3704282664084495e-05, |
|
"loss": 4.2062, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3695896716573975e-05, |
|
"loss": 4.203, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3687510769063455e-05, |
|
"loss": 4.2031, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3679124821552935e-05, |
|
"loss": 4.2163, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3670738874042415e-05, |
|
"loss": 4.2032, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3662352926531895e-05, |
|
"loss": 4.2038, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3653966979021375e-05, |
|
"loss": 4.2022, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3645597410314584e-05, |
|
"loss": 4.2015, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.363721146280407e-05, |
|
"loss": 4.2026, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362882551529355e-05, |
|
"loss": 4.2039, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362043956778303e-05, |
|
"loss": 4.1886, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.361205362027251e-05, |
|
"loss": 4.1932, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.360366767276199e-05, |
|
"loss": 4.198, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3595281725251463e-05, |
|
"loss": 4.1887, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.358689577774094e-05, |
|
"loss": 4.2128, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357850983023042e-05, |
|
"loss": 4.2054, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.35701238827199e-05, |
|
"loss": 4.1999, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.356173793520938e-05, |
|
"loss": 4.1956, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.355335198769886e-05, |
|
"loss": 4.2112, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.354498241899207e-05, |
|
"loss": 4.1965, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.353659647148155e-05, |
|
"loss": 4.1988, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.352821052397104e-05, |
|
"loss": 4.1903, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.351984095526425e-05, |
|
"loss": 4.1968, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.351145500775373e-05, |
|
"loss": 4.1937, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.350306906024321e-05, |
|
"loss": 4.1933, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.349468311273269e-05, |
|
"loss": 4.1983, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.348629716522217e-05, |
|
"loss": 4.202, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.347792759651538e-05, |
|
"loss": 4.205, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.346954164900486e-05, |
|
"loss": 4.2025, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.346115570149434e-05, |
|
"loss": 4.1906, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.345276975398382e-05, |
|
"loss": 4.2012, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3444400185277026e-05, |
|
"loss": 4.1934, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3436014237766506e-05, |
|
"loss": 4.1836, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.342762829025599e-05, |
|
"loss": 4.1882, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.341924234274547e-05, |
|
"loss": 4.1857, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.341085639523495e-05, |
|
"loss": 4.1894, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.340247044772443e-05, |
|
"loss": 4.2037, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.339408450021391e-05, |
|
"loss": 4.1889, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.338569855270339e-05, |
|
"loss": 4.1902, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.33773289839966e-05, |
|
"loss": 4.1968, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336894303648608e-05, |
|
"loss": 4.1877, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336055708897556e-05, |
|
"loss": 4.1941, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.335217114146504e-05, |
|
"loss": 4.1835, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.334378519395452e-05, |
|
"loss": 4.1707, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.333541562524773e-05, |
|
"loss": 4.1988, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.332702967773721e-05, |
|
"loss": 4.1894, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.331864373022669e-05, |
|
"loss": 4.1842, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.331025778271618e-05, |
|
"loss": 4.1843, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3301888214009386e-05, |
|
"loss": 4.1865, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3293502266498866e-05, |
|
"loss": 4.1725, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3285132697792075e-05, |
|
"loss": 4.1911, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3276746750281555e-05, |
|
"loss": 4.1819, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3268360802771035e-05, |
|
"loss": 4.1873, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3259974855260515e-05, |
|
"loss": 4.1806, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3251588907749995e-05, |
|
"loss": 4.1836, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.324320296023947e-05, |
|
"loss": 4.178, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.323481701272895e-05, |
|
"loss": 4.1869, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.322643106521843e-05, |
|
"loss": 4.1723, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3218061496511644e-05, |
|
"loss": 4.1811, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3209675549001124e-05, |
|
"loss": 4.1904, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3201289601490604e-05, |
|
"loss": 4.1814, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3192903653980084e-05, |
|
"loss": 4.1754, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31845340852733e-05, |
|
"loss": 4.1732, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.317614813776277e-05, |
|
"loss": 4.1728, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.316776219025225e-05, |
|
"loss": 4.1883, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.315937624274173e-05, |
|
"loss": 4.192, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.315100667403494e-05, |
|
"loss": 4.1832, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.314262072652442e-05, |
|
"loss": 4.1898, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31342347790139e-05, |
|
"loss": 4.1912, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.312584883150338e-05, |
|
"loss": 4.1848, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31174792627966e-05, |
|
"loss": 4.1767, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3109109694089814e-05, |
|
"loss": 4.18, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3100723746579294e-05, |
|
"loss": 4.1777, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3092337799068773e-05, |
|
"loss": 4.1688, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.308395185155825e-05, |
|
"loss": 4.1797, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.307556590404773e-05, |
|
"loss": 4.1733, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3067179956537207e-05, |
|
"loss": 4.1901, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3058794009026687e-05, |
|
"loss": 4.1935, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3050408061516166e-05, |
|
"loss": 4.164, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3042022114005646e-05, |
|
"loss": 4.1863, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3033636166495126e-05, |
|
"loss": 4.1761, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3025250218984606e-05, |
|
"loss": 4.1803, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3016880650277815e-05, |
|
"loss": 4.1762, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.30084947027673e-05, |
|
"loss": 4.1686, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.300010875525678e-05, |
|
"loss": 4.177, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.299172280774626e-05, |
|
"loss": 4.1733, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.298333686023574e-05, |
|
"loss": 4.1743, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.297495091272522e-05, |
|
"loss": 4.1707, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.29665649652147e-05, |
|
"loss": 4.1784, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.295817901770418e-05, |
|
"loss": 4.1778, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.294980944899739e-05, |
|
"loss": 4.1754, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.294142350148687e-05, |
|
"loss": 4.1652, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.293303755397635e-05, |
|
"loss": 4.1721, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.292465160646583e-05, |
|
"loss": 4.1739, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2916265658955304e-05, |
|
"loss": 4.1802, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.290789609024852e-05, |
|
"loss": 4.1655, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2899510142738e-05, |
|
"loss": 4.1709, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2891124195227487e-05, |
|
"loss": 4.1672, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.288273824771696e-05, |
|
"loss": 4.175, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.287435230020644e-05, |
|
"loss": 4.1623, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.286596635269592e-05, |
|
"loss": 4.1667, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.285759678398913e-05, |
|
"loss": 4.1679, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284921083647861e-05, |
|
"loss": 4.1665, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284082488896809e-05, |
|
"loss": 4.1616, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.283243894145757e-05, |
|
"loss": 4.1667, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.282406937275078e-05, |
|
"loss": 4.1618, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.281568342524026e-05, |
|
"loss": 4.1686, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.280729747772974e-05, |
|
"loss": 4.1702, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2798911530219224e-05, |
|
"loss": 4.1582, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2790541961512434e-05, |
|
"loss": 4.1711, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.278217239280565e-05, |
|
"loss": 4.1806, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.277378644529513e-05, |
|
"loss": 4.1781, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.276541687658834e-05, |
|
"loss": 4.1569, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.275703092907782e-05, |
|
"loss": 4.165, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.27486449815673e-05, |
|
"loss": 4.1691, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274025903405678e-05, |
|
"loss": 4.1625, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.273188946534999e-05, |
|
"loss": 4.1723, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.272350351783947e-05, |
|
"loss": 4.1584, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.271511757032895e-05, |
|
"loss": 4.1696, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.270673162281843e-05, |
|
"loss": 4.1629, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.269834567530791e-05, |
|
"loss": 4.1714, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268995972779739e-05, |
|
"loss": 4.157, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268157378028687e-05, |
|
"loss": 4.1625, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.267318783277635e-05, |
|
"loss": 4.166, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.266480188526583e-05, |
|
"loss": 4.1733, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2656432316559036e-05, |
|
"loss": 4.1664, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2648046369048516e-05, |
|
"loss": 4.1661, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2639660421537996e-05, |
|
"loss": 4.1561, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2631274474027476e-05, |
|
"loss": 4.1715, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2622888526516956e-05, |
|
"loss": 4.1653, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2614502579006436e-05, |
|
"loss": 4.1737, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2606133010299645e-05, |
|
"loss": 4.1601, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.259774706278913e-05, |
|
"loss": 4.1706, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.258936111527861e-05, |
|
"loss": 4.1599, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.258097516776809e-05, |
|
"loss": 4.1809, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.257258922025757e-05, |
|
"loss": 4.155, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.256420327274705e-05, |
|
"loss": 4.1712, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.255581732523653e-05, |
|
"loss": 4.1609, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.254743137772601e-05, |
|
"loss": 4.1597, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.253906180901922e-05, |
|
"loss": 4.1672, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.25306758615087e-05, |
|
"loss": 4.1612, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.252228991399818e-05, |
|
"loss": 4.1651, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.251390396648766e-05, |
|
"loss": 4.1585, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.250553439778087e-05, |
|
"loss": 4.1682, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.161040306091309, |
|
"eval_runtime": 311.0389, |
|
"eval_samples_per_second": 1226.827, |
|
"eval_steps_per_second": 38.339, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2497164829074085e-05, |
|
"loss": 4.1598, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2488778881563565e-05, |
|
"loss": 4.149, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2480392934053045e-05, |
|
"loss": 4.1581, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2472006986542525e-05, |
|
"loss": 4.1583, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2463621039032005e-05, |
|
"loss": 4.1681, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2455235091521485e-05, |
|
"loss": 4.1609, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2446849144010965e-05, |
|
"loss": 4.1482, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.243846319650044e-05, |
|
"loss": 4.1589, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2430093627793654e-05, |
|
"loss": 4.1652, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2421707680283134e-05, |
|
"loss": 4.1563, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2413321732772614e-05, |
|
"loss": 4.1555, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.240493578526209e-05, |
|
"loss": 4.1531, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23965662165553e-05, |
|
"loss": 4.1571, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.238818026904478e-05, |
|
"loss": 4.1519, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.237979432153426e-05, |
|
"loss": 4.158, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.237140837402374e-05, |
|
"loss": 4.141, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.236302242651322e-05, |
|
"loss": 4.1447, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23546364790027e-05, |
|
"loss": 4.1485, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.234626691029591e-05, |
|
"loss": 4.1436, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.233788096278539e-05, |
|
"loss": 4.1638, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.232949501527487e-05, |
|
"loss": 4.1608, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.232110906776435e-05, |
|
"loss": 4.1528, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.231272312025383e-05, |
|
"loss": 4.1476, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.230433717274331e-05, |
|
"loss": 4.1603, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.229596760403652e-05, |
|
"loss": 4.1483, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2287581656526e-05, |
|
"loss": 4.153, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.227919570901549e-05, |
|
"loss": 4.1443, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.227080976150497e-05, |
|
"loss": 4.152, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.226242381399445e-05, |
|
"loss": 4.1491, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.225403786648393e-05, |
|
"loss": 4.1461, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.224565191897341e-05, |
|
"loss": 4.1508, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.223726597146289e-05, |
|
"loss": 4.1568, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2228896402756096e-05, |
|
"loss": 4.1548, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2220510455245576e-05, |
|
"loss": 4.1565, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2212124507735056e-05, |
|
"loss": 4.1439, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2203738560224536e-05, |
|
"loss": 4.1591, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2195352612714016e-05, |
|
"loss": 4.1477, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2186966665203496e-05, |
|
"loss": 4.1363, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2178580717692976e-05, |
|
"loss": 4.1425, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2170211148986185e-05, |
|
"loss": 4.1385, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.216182520147567e-05, |
|
"loss": 4.1454, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.215343925396515e-05, |
|
"loss": 4.1558, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2145053306454625e-05, |
|
"loss": 4.1422, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2136667358944105e-05, |
|
"loss": 4.1454, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2128281411433585e-05, |
|
"loss": 4.1511, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.21199118427268e-05, |
|
"loss": 4.1431, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2111525895216274e-05, |
|
"loss": 4.1505, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2103139947705754e-05, |
|
"loss": 4.1364, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2094754000195234e-05, |
|
"loss": 4.1269, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2086368052684714e-05, |
|
"loss": 4.1534, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.207799848397792e-05, |
|
"loss": 4.1481, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.206961253646741e-05, |
|
"loss": 4.1356, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.206122658895689e-05, |
|
"loss": 4.1461, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.205284064144637e-05, |
|
"loss": 4.1393, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.204447107273958e-05, |
|
"loss": 4.1298, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.203608512522906e-05, |
|
"loss": 4.1417, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.202769917771854e-05, |
|
"loss": 4.1399, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.201931323020802e-05, |
|
"loss": 4.1426, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.20109272826975e-05, |
|
"loss": 4.1356, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.200254133518698e-05, |
|
"loss": 4.1398, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.199415538767646e-05, |
|
"loss": 4.1344, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.198578581896967e-05, |
|
"loss": 4.1453, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.197739987145915e-05, |
|
"loss": 4.1279, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.196901392394863e-05, |
|
"loss": 4.1351, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.196062797643811e-05, |
|
"loss": 4.1451, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.195224202892759e-05, |
|
"loss": 4.1384, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1943856081417074e-05, |
|
"loss": 4.1335, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1935470133906554e-05, |
|
"loss": 4.13, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1927084186396034e-05, |
|
"loss": 4.1274, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1918698238885514e-05, |
|
"loss": 4.1402, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.191032867017872e-05, |
|
"loss": 4.1503, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.19019427226682e-05, |
|
"loss": 4.1402, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.189357315396141e-05, |
|
"loss": 4.1465, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.188518720645089e-05, |
|
"loss": 4.1453, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.187680125894037e-05, |
|
"loss": 4.1469, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.186841531142985e-05, |
|
"loss": 4.1373, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.186002936391933e-05, |
|
"loss": 4.1354, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.185165979521254e-05, |
|
"loss": 4.1359, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.184327384770203e-05, |
|
"loss": 4.122, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.183488790019151e-05, |
|
"loss": 4.1391, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.182650195268099e-05, |
|
"loss": 4.1328, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.181811600517046e-05, |
|
"loss": 4.1469, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.180973005765994e-05, |
|
"loss": 4.1477, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.180136048895316e-05, |
|
"loss": 4.1214, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.179297454144264e-05, |
|
"loss": 4.1483, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.178458859393211e-05, |
|
"loss": 4.1338, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.177620264642159e-05, |
|
"loss": 4.1379, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.176781669891107e-05, |
|
"loss": 4.134, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.175943075140055e-05, |
|
"loss": 4.1285, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.175104480389003e-05, |
|
"loss": 4.1358, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.174265885637951e-05, |
|
"loss": 4.1311, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1734289287672726e-05, |
|
"loss": 4.133, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1725903340162206e-05, |
|
"loss": 4.1311, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1717533771455415e-05, |
|
"loss": 4.1362, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1709147823944895e-05, |
|
"loss": 4.1406, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1700761876434375e-05, |
|
"loss": 4.1273, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1692375928923855e-05, |
|
"loss": 4.1278, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1683989981413334e-05, |
|
"loss": 4.13, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1675604033902814e-05, |
|
"loss": 4.1294, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1667218086392294e-05, |
|
"loss": 4.1419, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1658832138881774e-05, |
|
"loss": 4.1278, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1650462570174983e-05, |
|
"loss": 4.1278, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.164207662266446e-05, |
|
"loss": 4.1261, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.163369067515395e-05, |
|
"loss": 4.1379, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.162530472764343e-05, |
|
"loss": 4.1173, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.161691878013291e-05, |
|
"loss": 4.1253, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.160854921142612e-05, |
|
"loss": 4.1265, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.16001632639156e-05, |
|
"loss": 4.1267, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.159177731640508e-05, |
|
"loss": 4.12, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.158339136889456e-05, |
|
"loss": 4.1255, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.157502180018777e-05, |
|
"loss": 4.1195, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.156663585267725e-05, |
|
"loss": 4.1286, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.155824990516673e-05, |
|
"loss": 4.1293, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.154986395765621e-05, |
|
"loss": 4.1208, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.154149438894942e-05, |
|
"loss": 4.1266, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1533108441438904e-05, |
|
"loss": 4.1392, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.152473887273211e-05, |
|
"loss": 4.1414, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.151635292522159e-05, |
|
"loss": 4.1189, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.150796697771107e-05, |
|
"loss": 4.123, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.149958103020055e-05, |
|
"loss": 4.1304, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.149119508269003e-05, |
|
"loss": 4.1196, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.148284189278697e-05, |
|
"loss": 4.1358, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.147445594527645e-05, |
|
"loss": 4.1115, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.146606999776593e-05, |
|
"loss": 4.1339, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.145768405025541e-05, |
|
"loss": 4.1203, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.144929810274489e-05, |
|
"loss": 4.1327, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.144091215523437e-05, |
|
"loss": 4.1169, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.143252620772386e-05, |
|
"loss": 4.1237, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.142414026021334e-05, |
|
"loss": 4.1217, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.141575431270282e-05, |
|
"loss": 4.1342, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.14073683651923e-05, |
|
"loss": 4.1227, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.139898241768177e-05, |
|
"loss": 4.1295, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.139059647017125e-05, |
|
"loss": 4.1179, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.138221052266073e-05, |
|
"loss": 4.1347, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1373840953953946e-05, |
|
"loss": 4.1245, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.136545500644342e-05, |
|
"loss": 4.1303, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1357085437736635e-05, |
|
"loss": 4.1238, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1348699490226115e-05, |
|
"loss": 4.13, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1340313542715595e-05, |
|
"loss": 4.1222, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1331927595205075e-05, |
|
"loss": 4.1385, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1323541647694555e-05, |
|
"loss": 4.1172, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1315155700184035e-05, |
|
"loss": 4.1316, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1306769752673515e-05, |
|
"loss": 4.1223, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1298383805162995e-05, |
|
"loss": 4.1233, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1289997857652475e-05, |
|
"loss": 4.1218, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1281628288945684e-05, |
|
"loss": 4.1221, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1273242341435164e-05, |
|
"loss": 4.125, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.126487277272837e-05, |
|
"loss": 4.1218, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.125648682521785e-05, |
|
"loss": 4.1327, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.130150318145752, |
|
"eval_runtime": 308.9439, |
|
"eval_samples_per_second": 1235.147, |
|
"eval_steps_per_second": 38.599, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.124810087770733e-05, |
|
"loss": 4.1213, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123971493019681e-05, |
|
"loss": 4.1128, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123132898268629e-05, |
|
"loss": 4.1199, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.122294303517577e-05, |
|
"loss": 4.1176, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.121455708766526e-05, |
|
"loss": 4.1285, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.120617114015474e-05, |
|
"loss": 4.125, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.119778519264422e-05, |
|
"loss": 4.118, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11893992451337e-05, |
|
"loss": 4.1157, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.118101329762318e-05, |
|
"loss": 4.1276, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.117262735011266e-05, |
|
"loss": 4.1198, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.116424140260213e-05, |
|
"loss": 4.1164, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.115585545509161e-05, |
|
"loss": 4.1137, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.114748588638483e-05, |
|
"loss": 4.1199, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11390999388743e-05, |
|
"loss": 4.1147, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113071399136378e-05, |
|
"loss": 4.1185, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.112232804385326e-05, |
|
"loss": 4.1016, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.111394209634274e-05, |
|
"loss": 4.1092, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.110555614883223e-05, |
|
"loss": 4.1114, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.109717020132171e-05, |
|
"loss": 4.1065, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108878425381119e-05, |
|
"loss": 4.1243, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108039830630067e-05, |
|
"loss": 4.1241, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.107202873759388e-05, |
|
"loss": 4.1152, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.106364279008336e-05, |
|
"loss": 4.1094, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.105525684257284e-05, |
|
"loss": 4.1237, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.104687089506232e-05, |
|
"loss": 4.1122, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.10384849475518e-05, |
|
"loss": 4.115, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.103009900004128e-05, |
|
"loss": 4.1052, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1021729431334486e-05, |
|
"loss": 4.1164, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1013343483823966e-05, |
|
"loss": 4.1158, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1004957536313446e-05, |
|
"loss": 4.1041, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0996571588802926e-05, |
|
"loss": 4.112, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0988185641292406e-05, |
|
"loss": 4.1202, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097979969378189e-05, |
|
"loss": 4.1142, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097141374627137e-05, |
|
"loss": 4.1252, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0963027798760846e-05, |
|
"loss": 4.1086, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0954641851250326e-05, |
|
"loss": 4.1184, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.094628866134727e-05, |
|
"loss": 4.113, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.093790271383675e-05, |
|
"loss": 4.1014, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.092951676632623e-05, |
|
"loss": 4.1077, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.092113081881571e-05, |
|
"loss": 4.1017, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.091274487130519e-05, |
|
"loss": 4.1056, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0904358923794664e-05, |
|
"loss": 4.1193, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.089597297628415e-05, |
|
"loss": 4.1083, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.088758702877363e-05, |
|
"loss": 4.1076, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.087920108126311e-05, |
|
"loss": 4.1149, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.087083151255632e-05, |
|
"loss": 4.1077, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.08624455650458e-05, |
|
"loss": 4.109, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0854075996339015e-05, |
|
"loss": 4.1056, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0845690048828495e-05, |
|
"loss": 4.0837, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.083730410131797e-05, |
|
"loss": 4.1236, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.082891815380745e-05, |
|
"loss": 4.1059, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.082053220629693e-05, |
|
"loss": 4.1072, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.081214625878641e-05, |
|
"loss": 4.1053, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.080376031127589e-05, |
|
"loss": 4.1048, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.079537436376537e-05, |
|
"loss": 4.0954, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0787004795058584e-05, |
|
"loss": 4.1012, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0778618847548064e-05, |
|
"loss": 4.1054, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0770232900037544e-05, |
|
"loss": 4.1068, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0761846952527024e-05, |
|
"loss": 4.1084, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0753461005016504e-05, |
|
"loss": 4.0983, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0745075057505984e-05, |
|
"loss": 4.0992, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0736689109995464e-05, |
|
"loss": 4.1147, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.072831954128867e-05, |
|
"loss": 4.0881, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071993359377815e-05, |
|
"loss": 4.099, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071154764626763e-05, |
|
"loss": 4.1079, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.070316169875711e-05, |
|
"loss": 4.1042, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.069477575124659e-05, |
|
"loss": 4.0975, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.068638980373607e-05, |
|
"loss": 4.0996, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.067800385622555e-05, |
|
"loss": 4.0884, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.066961790871503e-05, |
|
"loss": 4.1038, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.066123196120451e-05, |
|
"loss": 4.1163, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.065287877130146e-05, |
|
"loss": 4.1061, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.064449282379094e-05, |
|
"loss": 4.1118, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.063610687628042e-05, |
|
"loss": 4.1116, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06277209287699e-05, |
|
"loss": 4.1156, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0619351360063106e-05, |
|
"loss": 4.0988, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0610965412552586e-05, |
|
"loss": 4.1028, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0602579465042066e-05, |
|
"loss": 4.1025, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0594193517531546e-05, |
|
"loss": 4.0846, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0585807570021026e-05, |
|
"loss": 4.1028, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0577421622510506e-05, |
|
"loss": 4.099, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0569035674999986e-05, |
|
"loss": 4.1082, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0560649727489466e-05, |
|
"loss": 4.1172, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.055228015878268e-05, |
|
"loss": 4.0922, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0543894211272155e-05, |
|
"loss": 4.1076, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0535508263761635e-05, |
|
"loss": 4.1003, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0527122316251115e-05, |
|
"loss": 4.1049, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0518736368740595e-05, |
|
"loss": 4.0973, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0510350421230075e-05, |
|
"loss": 4.0975, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0501964473719555e-05, |
|
"loss": 4.0978, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0493578526209035e-05, |
|
"loss": 4.0974, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0485208957502244e-05, |
|
"loss": 4.0982, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0476823009991724e-05, |
|
"loss": 4.0952, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0468437062481204e-05, |
|
"loss": 4.1064, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.046005111497069e-05, |
|
"loss": 4.1046, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.045166516746017e-05, |
|
"loss": 4.0935, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.044327921994965e-05, |
|
"loss": 4.0916, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.043489327243913e-05, |
|
"loss": 4.0956, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.042650732492861e-05, |
|
"loss": 4.1011, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.041812137741809e-05, |
|
"loss": 4.1034, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.04097518087113e-05, |
|
"loss": 4.0976, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.040136586120078e-05, |
|
"loss": 4.0923, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.039297991369026e-05, |
|
"loss": 4.094, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.038459396617974e-05, |
|
"loss": 4.1091, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.037620801866921e-05, |
|
"loss": 4.0798, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.036783844996243e-05, |
|
"loss": 4.0944, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035945250245191e-05, |
|
"loss": 4.0883, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035106655494139e-05, |
|
"loss": 4.0964, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.034268060743087e-05, |
|
"loss": 4.0863, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0334311038724084e-05, |
|
"loss": 4.099, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0325925091213564e-05, |
|
"loss": 4.085, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0317539143703044e-05, |
|
"loss": 4.097, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030915319619252e-05, |
|
"loss": 4.0978, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0300767248682e-05, |
|
"loss": 4.0865, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.029239767997521e-05, |
|
"loss": 4.096, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0284011732464686e-05, |
|
"loss": 4.1093, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0275625784954166e-05, |
|
"loss": 4.0983, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0267239837443646e-05, |
|
"loss": 4.0958, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.025887026873686e-05, |
|
"loss": 4.0822, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.025048432122634e-05, |
|
"loss": 4.0986, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.024209837371582e-05, |
|
"loss": 4.096, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.023372880500904e-05, |
|
"loss": 4.0984, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.022534285749852e-05, |
|
"loss": 4.0796, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.021695690998799e-05, |
|
"loss": 4.1014, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.020857096247747e-05, |
|
"loss": 4.09, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.020018501496695e-05, |
|
"loss": 4.0975, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.019179906745643e-05, |
|
"loss": 4.0861, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.018341311994591e-05, |
|
"loss": 4.0939, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.017502717243539e-05, |
|
"loss": 4.088, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01666576037286e-05, |
|
"loss": 4.1037, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015827165621808e-05, |
|
"loss": 4.0935, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0149902087511296e-05, |
|
"loss": 4.096, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0141516140000776e-05, |
|
"loss": 4.0834, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0133130192490256e-05, |
|
"loss": 4.1038, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0124744244979736e-05, |
|
"loss": 4.093, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0116358297469216e-05, |
|
"loss": 4.0984, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0107972349958696e-05, |
|
"loss": 4.0942, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0099586402448175e-05, |
|
"loss": 4.0984, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0091216833741385e-05, |
|
"loss": 4.0889, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0082830886230865e-05, |
|
"loss": 4.1062, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0074444938720344e-05, |
|
"loss": 4.0867, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0066058991209824e-05, |
|
"loss": 4.1008, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0057673043699304e-05, |
|
"loss": 4.0962, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0049287096188784e-05, |
|
"loss": 4.0896, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0040901148678264e-05, |
|
"loss": 4.0891, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.003253157997148e-05, |
|
"loss": 4.0915, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.002414563246096e-05, |
|
"loss": 4.0954, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.001575968495044e-05, |
|
"loss": 4.091, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000737373743992e-05, |
|
"loss": 4.0996, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.106700420379639, |
|
"eval_runtime": 292.0792, |
|
"eval_samples_per_second": 1306.464, |
|
"eval_steps_per_second": 40.828, |
|
"step": 610560 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 2.5050369276340938e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|