|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 710, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.938151902453357e-07, |
|
"loss": 1.5844, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.876303804906714e-07, |
|
"loss": 1.1717, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.814455707360072e-07, |
|
"loss": 1.1709, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1752607609813429e-06, |
|
"loss": 1.5208, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4690759512266787e-06, |
|
"loss": 1.2788, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.7628911414720144e-06, |
|
"loss": 1.39, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.05670633171735e-06, |
|
"loss": 1.4615, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.3505215219626858e-06, |
|
"loss": 1.2082, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.6443367122080216e-06, |
|
"loss": 1.3854, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.9381519024533573e-06, |
|
"loss": 1.1711, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.2319670926986927e-06, |
|
"loss": 1.3714, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.525782282944029e-06, |
|
"loss": 1.6743, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.819597473189365e-06, |
|
"loss": 1.2937, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.1134126634347e-06, |
|
"loss": 1.2628, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.407227853680035e-06, |
|
"loss": 1.3822, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.7010430439253716e-06, |
|
"loss": 1.3788, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.994858234170707e-06, |
|
"loss": 1.5392, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 5.288673424416043e-06, |
|
"loss": 1.3841, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 5.582488614661379e-06, |
|
"loss": 1.3883, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.876303804906715e-06, |
|
"loss": 1.444, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 6.17011899515205e-06, |
|
"loss": 1.0966, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 6.463934185397385e-06, |
|
"loss": 1.4783, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 6.757749375642722e-06, |
|
"loss": 1.3966, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 7.051564565888058e-06, |
|
"loss": 1.602, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 7.345379756133393e-06, |
|
"loss": 1.4453, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 7.63919494637873e-06, |
|
"loss": 1.2995, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 7.933010136624066e-06, |
|
"loss": 1.318, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.2268253268694e-06, |
|
"loss": 1.1796, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.520640517114736e-06, |
|
"loss": 1.31, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.81445570736007e-06, |
|
"loss": 1.4279, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.108270897605407e-06, |
|
"loss": 1.5326, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.402086087850743e-06, |
|
"loss": 1.2816, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.695901278096078e-06, |
|
"loss": 1.4097, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.989716468341414e-06, |
|
"loss": 1.3857, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.028353165858675e-05, |
|
"loss": 1.5109, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.0577346848832086e-05, |
|
"loss": 1.5855, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.0871162039077422e-05, |
|
"loss": 1.4847, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.1164977229322759e-05, |
|
"loss": 1.3497, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.1458792419568093e-05, |
|
"loss": 1.3788, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.175260760981343e-05, |
|
"loss": 1.4044, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.2046422800058766e-05, |
|
"loss": 1.3204, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.23402379903041e-05, |
|
"loss": 1.4074, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.2634053180549435e-05, |
|
"loss": 1.3244, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.292786837079477e-05, |
|
"loss": 1.4781, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.3221683561040107e-05, |
|
"loss": 1.166, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.3515498751285443e-05, |
|
"loss": 1.5956, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.380931394153078e-05, |
|
"loss": 1.3736, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.4103129131776116e-05, |
|
"loss": 1.5388, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.439694432202145e-05, |
|
"loss": 1.4948, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4690759512266786e-05, |
|
"loss": 1.3604, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4984574702512122e-05, |
|
"loss": 1.5697, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.527838989275746e-05, |
|
"loss": 1.3266, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.5572205083002795e-05, |
|
"loss": 1.2487, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.586602027324813e-05, |
|
"loss": 1.3963, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6159835463493464e-05, |
|
"loss": 1.3293, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.64536506537388e-05, |
|
"loss": 1.5669, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6747465843984136e-05, |
|
"loss": 1.3252, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7041281034229472e-05, |
|
"loss": 1.3639, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.733509622447481e-05, |
|
"loss": 1.3889, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.762891141472014e-05, |
|
"loss": 1.3824, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7922726604965478e-05, |
|
"loss": 1.4756, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8216541795210814e-05, |
|
"loss": 1.2453, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.851035698545615e-05, |
|
"loss": 1.454, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8804172175701486e-05, |
|
"loss": 1.26, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9097987365946822e-05, |
|
"loss": 1.2652, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9391802556192155e-05, |
|
"loss": 1.424, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.968561774643749e-05, |
|
"loss": 1.4516, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9979432936682828e-05, |
|
"loss": 1.4799, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.0273248126928164e-05, |
|
"loss": 1.3161, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.05670633171735e-05, |
|
"loss": 1.4528, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0860878507418836e-05, |
|
"loss": 1.54, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.1154693697664172e-05, |
|
"loss": 1.3282, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.144850888790951e-05, |
|
"loss": 1.2715, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.1742324078154845e-05, |
|
"loss": 1.616, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.203613926840018e-05, |
|
"loss": 1.451, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2329954458645517e-05, |
|
"loss": 1.3391, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2623769648890853e-05, |
|
"loss": 1.3733, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2917584839136186e-05, |
|
"loss": 1.2611, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.3211400029381523e-05, |
|
"loss": 1.2367, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.350521521962686e-05, |
|
"loss": 1.3784, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.3799030409872195e-05, |
|
"loss": 1.2371, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.409284560011753e-05, |
|
"loss": 1.326, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.4386660790362864e-05, |
|
"loss": 1.5421, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.46804759806082e-05, |
|
"loss": 1.5432, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.4974291170853536e-05, |
|
"loss": 1.4241, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.526810636109887e-05, |
|
"loss": 1.4539, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.556192155134421e-05, |
|
"loss": 1.3261, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.585573674158954e-05, |
|
"loss": 1.3152, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.614955193183488e-05, |
|
"loss": 1.4292, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.6443367122080214e-05, |
|
"loss": 1.1705, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.6737182312325554e-05, |
|
"loss": 1.1443, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.7030997502570886e-05, |
|
"loss": 1.4355, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.732481269281622e-05, |
|
"loss": 1.3192, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.761862788306156e-05, |
|
"loss": 1.4356, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.791244307330689e-05, |
|
"loss": 1.1797, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.820625826355223e-05, |
|
"loss": 1.1635, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8500073453797564e-05, |
|
"loss": 1.2257, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.87938886440429e-05, |
|
"loss": 1.322, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9087703834288236e-05, |
|
"loss": 1.3281, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9381519024533573e-05, |
|
"loss": 1.0323, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.967533421477891e-05, |
|
"loss": 1.1299, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.9969149405024245e-05, |
|
"loss": 1.2833, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.0262964595269578e-05, |
|
"loss": 1.3501, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.055677978551492e-05, |
|
"loss": 1.1508, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.085059497576025e-05, |
|
"loss": 1.418, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.114441016600559e-05, |
|
"loss": 1.2717, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.143822535625092e-05, |
|
"loss": 1.3274, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.173204054649626e-05, |
|
"loss": 1.3633, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.202585573674159e-05, |
|
"loss": 1.2618, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.231967092698693e-05, |
|
"loss": 1.4994, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.2613486117232264e-05, |
|
"loss": 1.4325, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.29073013074776e-05, |
|
"loss": 1.3578, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.3201116497722936e-05, |
|
"loss": 1.2844, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.349493168796827e-05, |
|
"loss": 1.1555, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.378874687821361e-05, |
|
"loss": 1.3242, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.4082562068458945e-05, |
|
"loss": 1.1545, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.437637725870428e-05, |
|
"loss": 1.1827, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.467019244894962e-05, |
|
"loss": 1.5443, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.4964007639194954e-05, |
|
"loss": 1.1105, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.525782282944028e-05, |
|
"loss": 1.3643, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5551638019685626e-05, |
|
"loss": 1.335, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5845453209930955e-05, |
|
"loss": 1.2855, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.61392684001763e-05, |
|
"loss": 1.5569, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.643308359042163e-05, |
|
"loss": 1.3648, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.672689878066697e-05, |
|
"loss": 1.1981, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.70207139709123e-05, |
|
"loss": 1.281, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7314529161157636e-05, |
|
"loss": 1.2883, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.760834435140297e-05, |
|
"loss": 1.4385, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.790215954164831e-05, |
|
"loss": 1.4013, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.8195974731893645e-05, |
|
"loss": 1.3303, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.848978992213898e-05, |
|
"loss": 1.3222, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.878360511238431e-05, |
|
"loss": 1.3686, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.9077420302629654e-05, |
|
"loss": 1.3684, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.937123549287498e-05, |
|
"loss": 1.3496, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.9665050683120326e-05, |
|
"loss": 1.2483, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.9958865873365655e-05, |
|
"loss": 1.3186, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.025268106361099e-05, |
|
"loss": 1.3585, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.054649625385633e-05, |
|
"loss": 1.1332, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.0840311444101664e-05, |
|
"loss": 1.3868, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.1134126634347e-05, |
|
"loss": 1.5011, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.1427941824592336e-05, |
|
"loss": 1.2125, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.172175701483767e-05, |
|
"loss": 1.2079, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.201557220508301e-05, |
|
"loss": 1.5137, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2309387395328345e-05, |
|
"loss": 1.0962, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.260320258557368e-05, |
|
"loss": 1.0864, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.289701777581902e-05, |
|
"loss": 1.433, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.3190832966064354e-05, |
|
"loss": 1.1769, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.348464815630969e-05, |
|
"loss": 1.3221, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.377846334655502e-05, |
|
"loss": 1.3977, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.407227853680036e-05, |
|
"loss": 1.1165, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.436609372704569e-05, |
|
"loss": 1.2971, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.4659908917291035e-05, |
|
"loss": 1.0846, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.4953724107536364e-05, |
|
"loss": 1.2692, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.524753929778171e-05, |
|
"loss": 1.6022, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.5541354488027036e-05, |
|
"loss": 1.2185, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.583516967827237e-05, |
|
"loss": 1.1814, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.612898486851771e-05, |
|
"loss": 1.2761, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.6422800058763045e-05, |
|
"loss": 1.3127, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.671661524900838e-05, |
|
"loss": 1.4407, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.701043043925372e-05, |
|
"loss": 1.3185, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.730424562949905e-05, |
|
"loss": 1.2999, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.759806081974439e-05, |
|
"loss": 1.364, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.789187600998972e-05, |
|
"loss": 1.0288, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.818569120023506e-05, |
|
"loss": 1.4062, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.847950639048039e-05, |
|
"loss": 1.3316, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.877332158072573e-05, |
|
"loss": 1.5267, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.9067136770971064e-05, |
|
"loss": 1.3439, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.93609519612164e-05, |
|
"loss": 1.2067, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.9654767151461736e-05, |
|
"loss": 1.2325, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.994858234170707e-05, |
|
"loss": 1.082, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 5.024239753195241e-05, |
|
"loss": 1.2253, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 5.053621272219774e-05, |
|
"loss": 1.3254, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 5.083002791244308e-05, |
|
"loss": 1.4549, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.112384310268842e-05, |
|
"loss": 1.1586, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 5.1417658292933754e-05, |
|
"loss": 1.3143, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 5.171147348317908e-05, |
|
"loss": 1.2859, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.2005288673424426e-05, |
|
"loss": 1.4026, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 5.229910386366976e-05, |
|
"loss": 1.5041, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.259291905391509e-05, |
|
"loss": 1.3742, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.288673424416043e-05, |
|
"loss": 1.2539, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.318054943440577e-05, |
|
"loss": 1.3051, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.347436462465111e-05, |
|
"loss": 1.291, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.3768179814896436e-05, |
|
"loss": 1.2192, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.406199500514177e-05, |
|
"loss": 1.312, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.4355810195387116e-05, |
|
"loss": 1.2242, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.464962538563244e-05, |
|
"loss": 1.4149, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.494344057587778e-05, |
|
"loss": 1.0762, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.523725576612312e-05, |
|
"loss": 1.5351, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.553107095636845e-05, |
|
"loss": 1.2344, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.582488614661378e-05, |
|
"loss": 1.4752, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.6118701336859126e-05, |
|
"loss": 1.4017, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.641251652710446e-05, |
|
"loss": 1.2617, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.670633171734979e-05, |
|
"loss": 1.4561, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.700014690759513e-05, |
|
"loss": 1.2679, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.729396209784047e-05, |
|
"loss": 1.171, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.75877772880858e-05, |
|
"loss": 1.277, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.7881592478331136e-05, |
|
"loss": 1.1648, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.817540766857647e-05, |
|
"loss": 1.4565, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.8469222858821816e-05, |
|
"loss": 1.246, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.8763038049067145e-05, |
|
"loss": 1.2742, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.905685323931248e-05, |
|
"loss": 1.2846, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.935066842955782e-05, |
|
"loss": 1.3021, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.964448361980315e-05, |
|
"loss": 1.4153, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.993829881004849e-05, |
|
"loss": 1.1717, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 6.0232114000293826e-05, |
|
"loss": 1.3415, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 6.0525929190539155e-05, |
|
"loss": 1.19, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 6.081974438078449e-05, |
|
"loss": 1.1805, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 6.111355957102983e-05, |
|
"loss": 1.3612, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 6.140737476127517e-05, |
|
"loss": 1.3753, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 6.17011899515205e-05, |
|
"loss": 1.3976, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.199500514176584e-05, |
|
"loss": 1.2598, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.228882033201118e-05, |
|
"loss": 1.3799, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.25826355222565e-05, |
|
"loss": 1.4741, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 6.287645071250184e-05, |
|
"loss": 1.2601, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 6.317026590274719e-05, |
|
"loss": 1.1886, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 6.346408109299252e-05, |
|
"loss": 1.5535, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.375789628323785e-05, |
|
"loss": 1.3573, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.405171147348318e-05, |
|
"loss": 1.2504, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.434552666372853e-05, |
|
"loss": 1.3212, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.463934185397386e-05, |
|
"loss": 1.177, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.493315704421919e-05, |
|
"loss": 1.1466, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.522697223446453e-05, |
|
"loss": 1.3115, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.552078742470986e-05, |
|
"loss": 1.163, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.58146026149552e-05, |
|
"loss": 1.2517, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.610841780520054e-05, |
|
"loss": 1.4645, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.640223299544587e-05, |
|
"loss": 1.5033, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 6.669604818569121e-05, |
|
"loss": 1.367, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 6.698986337593655e-05, |
|
"loss": 1.3936, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 6.728367856618188e-05, |
|
"loss": 1.2716, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 6.757749375642722e-05, |
|
"loss": 1.2563, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.787130894667255e-05, |
|
"loss": 1.3766, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.816512413691789e-05, |
|
"loss": 1.1206, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 6.845893932716323e-05, |
|
"loss": 1.0943, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 6.875275451740856e-05, |
|
"loss": 1.3942, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 6.90465697076539e-05, |
|
"loss": 1.2673, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 6.934038489789923e-05, |
|
"loss": 1.3732, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 6.963420008814456e-05, |
|
"loss": 1.1018, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 6.992801527838991e-05, |
|
"loss": 1.1131, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 7.022183046863524e-05, |
|
"loss": 1.178, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 7.051564565888057e-05, |
|
"loss": 1.2849, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.08094608491259e-05, |
|
"loss": 1.2847, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.110327603937125e-05, |
|
"loss": 0.9812, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 7.139709122961659e-05, |
|
"loss": 1.0997, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 7.169090641986191e-05, |
|
"loss": 1.2575, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.198472161010725e-05, |
|
"loss": 1.3092, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.22785368003526e-05, |
|
"loss": 1.1123, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.257235199059792e-05, |
|
"loss": 1.3787, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.286616718084326e-05, |
|
"loss": 1.2164, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.315998237108859e-05, |
|
"loss": 1.2863, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.345379756133394e-05, |
|
"loss": 1.3277, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.374761275157926e-05, |
|
"loss": 1.2248, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.40414279418246e-05, |
|
"loss": 1.4587, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.433524313206994e-05, |
|
"loss": 1.3788, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.462905832231527e-05, |
|
"loss": 1.3173, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.492287351256061e-05, |
|
"loss": 1.2555, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.521668870280595e-05, |
|
"loss": 1.1032, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.551050389305128e-05, |
|
"loss": 1.271, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.580431908329662e-05, |
|
"loss": 1.1156, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.609813427354195e-05, |
|
"loss": 1.1455, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.639194946378729e-05, |
|
"loss": 1.4856, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.668576465403263e-05, |
|
"loss": 1.0758, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.697957984427796e-05, |
|
"loss": 1.3166, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.72733950345233e-05, |
|
"loss": 1.2848, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.756721022476862e-05, |
|
"loss": 1.2372, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.786102541501397e-05, |
|
"loss": 1.4906, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.815484060525931e-05, |
|
"loss": 1.3265, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.844865579550464e-05, |
|
"loss": 1.1439, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.874247098574997e-05, |
|
"loss": 1.2426, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.903628617599532e-05, |
|
"loss": 1.2351, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.933010136624065e-05, |
|
"loss": 1.3957, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.962391655648597e-05, |
|
"loss": 1.3555, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.991773174673131e-05, |
|
"loss": 1.2983, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.021154693697666e-05, |
|
"loss": 1.2816, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 8.050536212722198e-05, |
|
"loss": 1.3042, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.079917731746732e-05, |
|
"loss": 1.3328, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.109299250771266e-05, |
|
"loss": 1.2964, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 8.1386807697958e-05, |
|
"loss": 1.1836, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.168062288820333e-05, |
|
"loss": 1.2704, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.197443807844866e-05, |
|
"loss": 1.3228, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.2268253268694e-05, |
|
"loss": 1.0871, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.256206845893934e-05, |
|
"loss": 1.3571, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.285588364918467e-05, |
|
"loss": 1.4434, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.314969883943001e-05, |
|
"loss": 1.1603, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.344351402967535e-05, |
|
"loss": 1.1378, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 8.373732921992068e-05, |
|
"loss": 1.4653, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 8.403114441016602e-05, |
|
"loss": 1.0566, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.432495960041135e-05, |
|
"loss": 1.0505, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.461877479065669e-05, |
|
"loss": 1.3879, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.491258998090203e-05, |
|
"loss": 1.1117, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.520640517114736e-05, |
|
"loss": 1.2697, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 8.55002203613927e-05, |
|
"loss": 1.3577, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.579403555163803e-05, |
|
"loss": 1.0753, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.608785074188337e-05, |
|
"loss": 1.2443, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.638166593212871e-05, |
|
"loss": 1.047, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.667548112237403e-05, |
|
"loss": 1.2097, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.696929631261938e-05, |
|
"loss": 1.5352, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.726311150286472e-05, |
|
"loss": 1.1819, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.755692669311004e-05, |
|
"loss": 1.1436, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.785074188335537e-05, |
|
"loss": 1.1968, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.814455707360072e-05, |
|
"loss": 1.2693, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.843837226384606e-05, |
|
"loss": 1.3791, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 8.873218745409138e-05, |
|
"loss": 1.2777, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 8.902600264433672e-05, |
|
"loss": 1.248, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.931981783458207e-05, |
|
"loss": 1.2936, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 8.961363302482739e-05, |
|
"loss": 0.9981, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 8.990744821507273e-05, |
|
"loss": 1.3413, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.020126340531806e-05, |
|
"loss": 1.2833, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.049507859556341e-05, |
|
"loss": 1.4853, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.078889378580874e-05, |
|
"loss": 1.2807, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.108270897605407e-05, |
|
"loss": 1.1716, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.137652416629941e-05, |
|
"loss": 1.1964, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.167033935654475e-05, |
|
"loss": 1.0411, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.196415454679008e-05, |
|
"loss": 1.1827, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.225796973703542e-05, |
|
"loss": 1.2726, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.255178492728075e-05, |
|
"loss": 1.4049, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.284560011752609e-05, |
|
"loss": 1.09, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.313941530777143e-05, |
|
"loss": 1.2616, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.343323049801676e-05, |
|
"loss": 1.2126, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.37270456882621e-05, |
|
"loss": 1.3458, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.402086087850743e-05, |
|
"loss": 1.4486, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.431467606875277e-05, |
|
"loss": 1.2901, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.46084912589981e-05, |
|
"loss": 1.2061, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.490230644924344e-05, |
|
"loss": 1.2587, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 9.519612163948878e-05, |
|
"loss": 1.2236, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 9.548993682973412e-05, |
|
"loss": 1.1454, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.578375201997944e-05, |
|
"loss": 1.2289, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.607756721022479e-05, |
|
"loss": 1.1934, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.637138240047012e-05, |
|
"loss": 1.3799, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.666519759071545e-05, |
|
"loss": 1.0258, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.695901278096078e-05, |
|
"loss": 1.4939, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 9.725282797120613e-05, |
|
"loss": 1.186, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.754664316145146e-05, |
|
"loss": 1.4199, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.784045835169679e-05, |
|
"loss": 1.3008, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.813427354194213e-05, |
|
"loss": 1.1806, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.842808873218748e-05, |
|
"loss": 1.3952, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.87219039224328e-05, |
|
"loss": 1.2403, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.901571911267814e-05, |
|
"loss": 1.1104, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.930953430292347e-05, |
|
"loss": 1.2103, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.960334949316881e-05, |
|
"loss": 1.0564, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.989716468341415e-05, |
|
"loss": 1.3984, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.99999963334151e-05, |
|
"loss": 1.2017, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.999997637330952e-05, |
|
"loss": 1.2251, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.99999390565975e-05, |
|
"loss": 1.2006, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.999988438329202e-05, |
|
"loss": 1.236, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.999981235341205e-05, |
|
"loss": 1.3684, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.999972296698259e-05, |
|
"loss": 1.1272, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.999961622403467e-05, |
|
"loss": 1.2259, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.999949212460534e-05, |
|
"loss": 1.1502, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.999935066873766e-05, |
|
"loss": 1.1134, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.999919185648079e-05, |
|
"loss": 1.306, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.99990156878898e-05, |
|
"loss": 1.3023, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.999882216302589e-05, |
|
"loss": 1.3383, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.99986112819562e-05, |
|
"loss": 1.2076, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.999838304475396e-05, |
|
"loss": 1.3298, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.99981374514984e-05, |
|
"loss": 1.4117, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.999787450227475e-05, |
|
"loss": 1.2157, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.999759419717429e-05, |
|
"loss": 1.1052, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 9.999729653629435e-05, |
|
"loss": 1.4931, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.999698151973824e-05, |
|
"loss": 1.2367, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.999664914761532e-05, |
|
"loss": 1.1763, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.999629942004095e-05, |
|
"loss": 1.2708, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.999593233713654e-05, |
|
"loss": 1.114, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.999554789902954e-05, |
|
"loss": 1.066, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.999514610585336e-05, |
|
"loss": 1.223, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.99947269577475e-05, |
|
"loss": 1.1002, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.999429045485746e-05, |
|
"loss": 1.1679, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.999383659733476e-05, |
|
"loss": 1.3512, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.999336538533694e-05, |
|
"loss": 1.445, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.99928768190276e-05, |
|
"loss": 1.2979, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.99923708985763e-05, |
|
"loss": 1.3093, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.999184762415869e-05, |
|
"loss": 1.2169, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.99913069959564e-05, |
|
"loss": 1.1727, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.99907490141571e-05, |
|
"loss": 1.2972, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.999017367895449e-05, |
|
"loss": 1.0836, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.99895809905483e-05, |
|
"loss": 1.0422, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.998897094914424e-05, |
|
"loss": 1.3384, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.99883435549541e-05, |
|
"loss": 1.19, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.998769880819565e-05, |
|
"loss": 1.2808, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.998703670909273e-05, |
|
"loss": 1.0036, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.998635725787515e-05, |
|
"loss": 1.0307, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.998566045477877e-05, |
|
"loss": 1.1171, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.99849463000455e-05, |
|
"loss": 1.2063, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.998421479392322e-05, |
|
"loss": 1.2263, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.998346593666587e-05, |
|
"loss": 0.8941, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.99826997285334e-05, |
|
"loss": 1.0527, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.998191616979178e-05, |
|
"loss": 1.2203, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.9981115260713e-05, |
|
"loss": 1.2373, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.998029700157512e-05, |
|
"loss": 1.0372, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.997946139266214e-05, |
|
"loss": 1.3097, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.997860843426416e-05, |
|
"loss": 1.1405, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.997773812667726e-05, |
|
"loss": 1.2198, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 9.997685047020353e-05, |
|
"loss": 1.2622, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.997594546515112e-05, |
|
"loss": 1.1772, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.997502311183421e-05, |
|
"loss": 1.3354, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.997408341057295e-05, |
|
"loss": 1.2733, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 9.997312636169354e-05, |
|
"loss": 1.2603, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 9.99721519655282e-05, |
|
"loss": 1.182, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 9.997116022241519e-05, |
|
"loss": 1.001, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 9.997015113269878e-05, |
|
"loss": 1.1661, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 9.996912469672925e-05, |
|
"loss": 1.0415, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 9.99680809148629e-05, |
|
"loss": 1.0874, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 9.996701978746207e-05, |
|
"loss": 1.3411, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 9.996594131489511e-05, |
|
"loss": 1.0126, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 9.99648454975364e-05, |
|
"loss": 1.2103, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 9.996373233576631e-05, |
|
"loss": 1.1725, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.996260182997126e-05, |
|
"loss": 1.1493, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.996145398054372e-05, |
|
"loss": 1.3638, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.99602887878821e-05, |
|
"loss": 1.2561, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.995910625239092e-05, |
|
"loss": 1.0412, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 9.995790637448064e-05, |
|
"loss": 1.1855, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 9.995668915456778e-05, |
|
"loss": 1.1156, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 9.99554545930749e-05, |
|
"loss": 1.317, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 9.995420269043053e-05, |
|
"loss": 1.3018, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 9.995293344706927e-05, |
|
"loss": 1.2392, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 9.99516468634317e-05, |
|
"loss": 1.2128, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 9.995034293996442e-05, |
|
"loss": 1.173, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.99490216771201e-05, |
|
"loss": 1.2461, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.994768307535736e-05, |
|
"loss": 1.1709, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.99463271351409e-05, |
|
"loss": 1.0691, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.99449538569414e-05, |
|
"loss": 1.1749, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.994356324123555e-05, |
|
"loss": 1.2748, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.994215528850611e-05, |
|
"loss": 0.9984, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 9.994072999924178e-05, |
|
"loss": 1.288, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 9.993928737393738e-05, |
|
"loss": 1.3438, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 9.993782741309366e-05, |
|
"loss": 1.0476, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.993635011721743e-05, |
|
"loss": 1.0269, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.993485548682149e-05, |
|
"loss": 1.3591, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.993334352242469e-05, |
|
"loss": 0.9882, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 9.993181422455189e-05, |
|
"loss": 0.9844, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 9.993026759373394e-05, |
|
"loss": 1.2867, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.992870363050772e-05, |
|
"loss": 0.9716, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.992712233541616e-05, |
|
"loss": 1.1783, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 9.992552370900816e-05, |
|
"loss": 1.3025, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 9.992390775183865e-05, |
|
"loss": 0.9909, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 9.99222744644686e-05, |
|
"loss": 1.1289, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 9.992062384746496e-05, |
|
"loss": 0.9763, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.991895590140072e-05, |
|
"loss": 1.0838, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.991727062685488e-05, |
|
"loss": 1.3713, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 9.991556802441244e-05, |
|
"loss": 1.1156, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 9.991384809466445e-05, |
|
"loss": 1.0658, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 9.991211083820792e-05, |
|
"loss": 1.0693, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 9.991035625564593e-05, |
|
"loss": 1.1844, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 9.990858434758756e-05, |
|
"loss": 1.2371, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 9.990679511464788e-05, |
|
"loss": 1.2049, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 9.990498855744802e-05, |
|
"loss": 1.1487, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 9.990316467661506e-05, |
|
"loss": 1.1855, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 9.990132347278213e-05, |
|
"loss": 0.9415, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 9.989946494658839e-05, |
|
"loss": 1.2081, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 9.989758909867899e-05, |
|
"loss": 1.1945, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 9.989569592970509e-05, |
|
"loss": 1.386, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 9.989378544032388e-05, |
|
"loss": 1.1616, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 9.989185763119853e-05, |
|
"loss": 1.0811, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 9.988991250299826e-05, |
|
"loss": 1.1135, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 9.98879500563983e-05, |
|
"loss": 0.9678, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 9.988597029207986e-05, |
|
"loss": 1.1091, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 9.98839732107302e-05, |
|
"loss": 1.171, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.988195881304254e-05, |
|
"loss": 1.2926, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 9.987992709971617e-05, |
|
"loss": 0.9554, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 9.987787807145634e-05, |
|
"loss": 1.1523, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 9.987581172897436e-05, |
|
"loss": 1.0537, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 9.987372807298752e-05, |
|
"loss": 1.2286, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 9.987162710421909e-05, |
|
"loss": 1.3111, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.986950882339843e-05, |
|
"loss": 1.095, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.986737323126084e-05, |
|
"loss": 1.1133, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.986522032854766e-05, |
|
"loss": 1.1405, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 9.986305011600621e-05, |
|
"loss": 1.0994, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 9.986086259438987e-05, |
|
"loss": 1.0086, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 9.985865776445799e-05, |
|
"loss": 1.0992, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 9.985643562697595e-05, |
|
"loss": 1.1195, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 9.98541961827151e-05, |
|
"loss": 1.2934, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 9.985193943245284e-05, |
|
"loss": 0.932, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 9.984966537697254e-05, |
|
"loss": 1.4145, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 9.984737401706364e-05, |
|
"loss": 1.0835, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 9.98450653535215e-05, |
|
"loss": 1.2985, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 9.984273938714756e-05, |
|
"loss": 1.1208, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 9.984039611874922e-05, |
|
"loss": 1.0568, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 9.983803554913991e-05, |
|
"loss": 1.2661, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.983565767913908e-05, |
|
"loss": 1.1787, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.983326250957215e-05, |
|
"loss": 1.0023, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 9.983085004127055e-05, |
|
"loss": 1.073, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 9.982842027507174e-05, |
|
"loss": 0.8713, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 9.982597321181916e-05, |
|
"loss": 1.2682, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.982350885236227e-05, |
|
"loss": 1.1112, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9.982102719755652e-05, |
|
"loss": 1.1175, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.981852824826341e-05, |
|
"loss": 1.0672, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.981601200535036e-05, |
|
"loss": 1.121, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 9.981347846969086e-05, |
|
"loss": 1.2847, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.981092764216437e-05, |
|
"loss": 1.0444, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.980835952365639e-05, |
|
"loss": 1.0468, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 9.980577411505836e-05, |
|
"loss": 1.0451, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.98031714172678e-05, |
|
"loss": 1.0078, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.980055143118815e-05, |
|
"loss": 1.189, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.979791415772892e-05, |
|
"loss": 1.1625, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.979525959780558e-05, |
|
"loss": 1.2144, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 9.979258775233962e-05, |
|
"loss": 1.0964, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 9.978989862225853e-05, |
|
"loss": 1.2053, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 9.978719220849577e-05, |
|
"loss": 1.2922, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 9.978446851199085e-05, |
|
"loss": 1.1391, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 9.978172753368924e-05, |
|
"loss": 0.9737, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 9.977896927454242e-05, |
|
"loss": 1.3689, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 9.977619373550789e-05, |
|
"loss": 1.0774, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.977340091754909e-05, |
|
"loss": 1.0158, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.977059082163554e-05, |
|
"loss": 1.1729, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 9.976776344874268e-05, |
|
"loss": 1.0058, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 9.9764918799852e-05, |
|
"loss": 0.9511, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 9.976205687595098e-05, |
|
"loss": 1.0977, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 9.975917767803307e-05, |
|
"loss": 0.9736, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 9.975628120709774e-05, |
|
"loss": 1.0244, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 9.975336746415044e-05, |
|
"loss": 1.1551, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 9.975043645020264e-05, |
|
"loss": 1.3528, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 9.974748816627176e-05, |
|
"loss": 1.1664, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.974452261338128e-05, |
|
"loss": 1.1727, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.974153979256062e-05, |
|
"loss": 1.1234, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 9.973853970484521e-05, |
|
"loss": 1.0327, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.973552235127648e-05, |
|
"loss": 1.1534, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 9.973248773290186e-05, |
|
"loss": 1.0124, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 9.972943585077478e-05, |
|
"loss": 0.9568, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 9.972636670595461e-05, |
|
"loss": 1.2397, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 9.972328029950676e-05, |
|
"loss": 1.036, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 9.972017663250264e-05, |
|
"loss": 1.1054, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 9.971705570601958e-05, |
|
"loss": 0.8758, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 9.971391752114104e-05, |
|
"loss": 0.9002, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 9.971076207895632e-05, |
|
"loss": 1.0288, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 9.97075893805608e-05, |
|
"loss": 1.078, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 9.970439942705582e-05, |
|
"loss": 1.1148, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 9.970119221954871e-05, |
|
"loss": 0.7688, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 9.969796775915281e-05, |
|
"loss": 0.9705, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 9.969472604698742e-05, |
|
"loss": 1.1566, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 9.969146708417786e-05, |
|
"loss": 1.1484, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 9.968819087185539e-05, |
|
"loss": 0.9396, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 9.968489741115734e-05, |
|
"loss": 1.1973, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 9.968158670322692e-05, |
|
"loss": 1.0191, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 9.967825874921341e-05, |
|
"loss": 1.1284, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 9.967491355027207e-05, |
|
"loss": 1.1671, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 9.96715511075641e-05, |
|
"loss": 1.0915, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 9.96681714222567e-05, |
|
"loss": 1.1303, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 9.966477449552312e-05, |
|
"loss": 1.1181, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 9.96613603285425e-05, |
|
"loss": 1.163, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 9.96579289225e-05, |
|
"loss": 1.0522, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 9.96544802785868e-05, |
|
"loss": 0.8393, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 9.965101439800002e-05, |
|
"loss": 1.028, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 9.96475312819428e-05, |
|
"loss": 0.9271, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 9.964403093162422e-05, |
|
"loss": 0.9996, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 9.964051334825937e-05, |
|
"loss": 1.1694, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 9.96369785330693e-05, |
|
"loss": 0.9082, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 9.96334264872811e-05, |
|
"loss": 1.0761, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 9.962985721212777e-05, |
|
"loss": 0.9997, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 9.962627070884832e-05, |
|
"loss": 0.9916, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 9.962266697868776e-05, |
|
"loss": 1.2202, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 9.961904602289705e-05, |
|
"loss": 1.1519, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 9.961540784273311e-05, |
|
"loss": 0.9058, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 9.961175243945893e-05, |
|
"loss": 1.1024, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 9.960807981434338e-05, |
|
"loss": 0.9425, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 9.960438996866134e-05, |
|
"loss": 1.1816, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 9.960068290369371e-05, |
|
"loss": 1.2259, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 9.959695862072731e-05, |
|
"loss": 1.1489, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 9.959321712105496e-05, |
|
"loss": 1.1185, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 9.958945840597545e-05, |
|
"loss": 1.0252, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 9.958568247679355e-05, |
|
"loss": 1.1161, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 9.958188933482003e-05, |
|
"loss": 1.0131, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 9.957807898137157e-05, |
|
"loss": 0.9445, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.957425141777093e-05, |
|
"loss": 1.039, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.957040664534672e-05, |
|
"loss": 1.19, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 9.95665446654336e-05, |
|
"loss": 0.8734, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 9.956266547937222e-05, |
|
"loss": 1.1848, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 9.955876908850913e-05, |
|
"loss": 1.1969, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 9.955485549419692e-05, |
|
"loss": 0.903, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 9.95509246977941e-05, |
|
"loss": 0.8832, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.954697670066522e-05, |
|
"loss": 1.194, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 9.954301150418071e-05, |
|
"loss": 0.9009, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 9.953902910971703e-05, |
|
"loss": 0.8919, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 9.95350295186566e-05, |
|
"loss": 1.1544, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.953101273238782e-05, |
|
"loss": 0.8454, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.952697875230506e-05, |
|
"loss": 1.0843, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 9.95229275798086e-05, |
|
"loss": 1.2351, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.951885921630475e-05, |
|
"loss": 0.8698, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.951477366320579e-05, |
|
"loss": 0.9866, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 9.951067092192993e-05, |
|
"loss": 0.8646, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 9.950655099390136e-05, |
|
"loss": 0.9214, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 9.950241388055024e-05, |
|
"loss": 1.1716, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 9.949825958331272e-05, |
|
"loss": 1.0097, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 9.949408810363088e-05, |
|
"loss": 0.9652, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 9.948989944295274e-05, |
|
"loss": 0.9114, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 9.948569360273236e-05, |
|
"loss": 1.0803, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 9.948147058442973e-05, |
|
"loss": 1.1052, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.947723038951076e-05, |
|
"loss": 1.0975, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.947297301944738e-05, |
|
"loss": 1.0353, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 9.946869847571745e-05, |
|
"loss": 1.0835, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 9.946440675980482e-05, |
|
"loss": 0.8705, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 9.946009787319925e-05, |
|
"loss": 1.118, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 9.945577181739654e-05, |
|
"loss": 1.096, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.945142859389835e-05, |
|
"loss": 1.2616, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.944706820421242e-05, |
|
"loss": 1.03, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.944269064985232e-05, |
|
"loss": 0.9491, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 9.943829593233767e-05, |
|
"loss": 1.0014, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 9.943388405319401e-05, |
|
"loss": 0.8753, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 9.942945501395284e-05, |
|
"loss": 1.0214, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 9.942500881615164e-05, |
|
"loss": 1.0443, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 9.942054546133384e-05, |
|
"loss": 1.1553, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 9.941606495104877e-05, |
|
"loss": 0.8192, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 9.94115672868518e-05, |
|
"loss": 1.014, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 9.940705247030418e-05, |
|
"loss": 0.8881, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 9.940252050297318e-05, |
|
"loss": 1.1146, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 9.939797138643198e-05, |
|
"loss": 1.1543, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 9.939340512225972e-05, |
|
"loss": 0.8623, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 9.938882171204151e-05, |
|
"loss": 0.9957, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 9.938422115736839e-05, |
|
"loss": 1.0105, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 9.937960345983736e-05, |
|
"loss": 0.93, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 9.937496862105138e-05, |
|
"loss": 0.8828, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.937031664261933e-05, |
|
"loss": 1.0038, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.93656475261561e-05, |
|
"loss": 1.0304, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 9.936096127328247e-05, |
|
"loss": 1.1919, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.93562578856252e-05, |
|
"loss": 0.8084, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.935153736481696e-05, |
|
"loss": 1.3357, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.934679971249641e-05, |
|
"loss": 0.9653, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.934204493030816e-05, |
|
"loss": 1.1869, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 9.933727301990272e-05, |
|
"loss": 0.9646, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 9.93324839829366e-05, |
|
"loss": 0.9412, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 9.932767782107221e-05, |
|
"loss": 1.0985, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 9.932285453597794e-05, |
|
"loss": 1.1193, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 9.931801412932808e-05, |
|
"loss": 0.8925, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 9.931315660280294e-05, |
|
"loss": 0.9306, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 9.930828195808868e-05, |
|
"loss": 0.7386, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 9.930339019687746e-05, |
|
"loss": 1.1414, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 9.929848132086737e-05, |
|
"loss": 1.0167, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 9.929355533176244e-05, |
|
"loss": 1.0053, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 9.928861223127264e-05, |
|
"loss": 0.9431, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 9.928365202111389e-05, |
|
"loss": 1.0089, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 9.927867470300803e-05, |
|
"loss": 1.1917, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 9.927368027868283e-05, |
|
"loss": 0.9679, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 9.926866874987203e-05, |
|
"loss": 0.8943, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 9.92636401183153e-05, |
|
"loss": 0.9199, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 9.925859438575824e-05, |
|
"loss": 0.8748, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 9.925353155395238e-05, |
|
"loss": 1.0573, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 9.924845162465522e-05, |
|
"loss": 1.0292, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 9.924335459963011e-05, |
|
"loss": 1.0957, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 9.923824048064644e-05, |
|
"loss": 0.9933, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 9.923310926947947e-05, |
|
"loss": 1.0641, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 9.922796096791041e-05, |
|
"loss": 1.1815, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 9.922279557772639e-05, |
|
"loss": 1.09, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 9.921761310072051e-05, |
|
"loss": 0.8597, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 9.921241353869175e-05, |
|
"loss": 1.2314, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 9.920719689344506e-05, |
|
"loss": 0.9624, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 9.920196316679129e-05, |
|
"loss": 0.8595, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 9.919671236054726e-05, |
|
"loss": 1.0533, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 9.919144447653569e-05, |
|
"loss": 0.9057, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 9.918615951658521e-05, |
|
"loss": 0.828, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 9.918085748253044e-05, |
|
"loss": 0.9885, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 9.917553837621183e-05, |
|
"loss": 0.8416, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 9.917020219947586e-05, |
|
"loss": 0.8906, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 9.916484895417488e-05, |
|
"loss": 0.9593, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 9.915947864216716e-05, |
|
"loss": 1.2514, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 9.915409126531691e-05, |
|
"loss": 1.082, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 9.91486868254943e-05, |
|
"loss": 1.0438, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 9.914326532457534e-05, |
|
"loss": 1.0188, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 9.9137826764442e-05, |
|
"loss": 0.8931, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 9.913237114698223e-05, |
|
"loss": 0.9998, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 9.912689847408983e-05, |
|
"loss": 0.9381, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 9.912140874766453e-05, |
|
"loss": 0.8725, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 9.9115901969612e-05, |
|
"loss": 1.1549, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 9.911037814184382e-05, |
|
"loss": 0.8704, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 9.910483726627749e-05, |
|
"loss": 0.9152, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 9.90992793448364e-05, |
|
"loss": 0.7531, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 9.909370437944993e-05, |
|
"loss": 0.7568, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 9.908811237205331e-05, |
|
"loss": 0.9515, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 9.90825033245877e-05, |
|
"loss": 0.9485, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 9.907687723900019e-05, |
|
"loss": 1.0008, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 9.907123411724377e-05, |
|
"loss": 0.6774, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 9.906557396127734e-05, |
|
"loss": 0.8914, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 9.905989677306575e-05, |
|
"loss": 1.0804, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 9.905420255457971e-05, |
|
"loss": 1.0676, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 9.904849130779588e-05, |
|
"loss": 0.8497, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 9.904276303469683e-05, |
|
"loss": 1.0927, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.903701773727101e-05, |
|
"loss": 0.9029, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.903125541751279e-05, |
|
"loss": 1.05, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 9.902547607742247e-05, |
|
"loss": 1.084, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 9.901967971900627e-05, |
|
"loss": 1.0038, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 9.901386634427625e-05, |
|
"loss": 0.9117, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 9.900803595525044e-05, |
|
"loss": 0.9534, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.900218855395276e-05, |
|
"loss": 1.0565, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.899632414241302e-05, |
|
"loss": 0.9274, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.899044272266696e-05, |
|
"loss": 0.7033, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 9.89845442967562e-05, |
|
"loss": 0.8854, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 9.897862886672829e-05, |
|
"loss": 0.8273, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 9.897269643463663e-05, |
|
"loss": 0.8897, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 9.896674700254061e-05, |
|
"loss": 1.037, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 9.896078057250542e-05, |
|
"loss": 0.8056, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.895479714660226e-05, |
|
"loss": 0.9452, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.894879672690811e-05, |
|
"loss": 0.8404, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 9.894277931550593e-05, |
|
"loss": 0.8433, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 9.893674491448456e-05, |
|
"loss": 1.1066, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 9.893069352593876e-05, |
|
"loss": 1.0541, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 9.892462515196914e-05, |
|
"loss": 0.7835, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 9.89185397946822e-05, |
|
"loss": 1.0211, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 9.89124374561904e-05, |
|
"loss": 0.7885, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 9.890631813861207e-05, |
|
"loss": 1.0477, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 9.89001818440714e-05, |
|
"loss": 1.1342, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 9.88940285746985e-05, |
|
"loss": 1.0765, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 9.888785833262935e-05, |
|
"loss": 1.0354, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 9.888167112000589e-05, |
|
"loss": 0.8909, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 9.887546693897585e-05, |
|
"loss": 0.9852, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 9.886924579169293e-05, |
|
"loss": 0.8719, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 9.886300768031666e-05, |
|
"loss": 0.8452, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.885675260701254e-05, |
|
"loss": 0.9104, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.885048057395187e-05, |
|
"loss": 1.1023, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 9.884419158331188e-05, |
|
"loss": 0.7713, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 9.883788563727569e-05, |
|
"loss": 1.088, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 9.883156273803231e-05, |
|
"loss": 0.9891, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 9.88252228877766e-05, |
|
"loss": 0.7652, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 9.881886608870932e-05, |
|
"loss": 0.749, |
|
"step": 710 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 710, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 5.578854905151488e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|