|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.997173544375354, |
|
"eval_steps": 500, |
|
"global_step": 2210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 14.2562, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.9850746268656714e-08, |
|
"loss": 14.1082, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.955223880597014e-08, |
|
"loss": 13.89, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4925373134328355e-07, |
|
"loss": 14.336, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.08955223880597e-07, |
|
"loss": 14.191, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.388059701492537e-07, |
|
"loss": 13.9705, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.985074626865671e-07, |
|
"loss": 14.1539, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.5820895522388055e-07, |
|
"loss": 14.1077, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.17910447761194e-07, |
|
"loss": 13.8072, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.776119402985074e-07, |
|
"loss": 13.9236, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.373134328358208e-07, |
|
"loss": 13.6593, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.970149253731342e-07, |
|
"loss": 13.1699, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.567164179104477e-07, |
|
"loss": 13.1314, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.164179104477611e-07, |
|
"loss": 12.9618, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.761194029850746e-07, |
|
"loss": 12.5965, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.35820895522388e-07, |
|
"loss": 12.3889, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.955223880597015e-07, |
|
"loss": 12.0161, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.552238805970149e-07, |
|
"loss": 12.0092, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0149253731343285e-06, |
|
"loss": 11.6872, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0746268656716416e-06, |
|
"loss": 11.2193, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.134328358208955e-06, |
|
"loss": 11.3883, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1940298507462684e-06, |
|
"loss": 10.9494, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.253731343283582e-06, |
|
"loss": 10.4602, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3134328358208954e-06, |
|
"loss": 10.4576, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3731343283582088e-06, |
|
"loss": 10.056, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4328358208955222e-06, |
|
"loss": 9.7397, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4925373134328358e-06, |
|
"loss": 9.5213, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5522388059701492e-06, |
|
"loss": 8.9522, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.6119402985074626e-06, |
|
"loss": 8.5001, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.671641791044776e-06, |
|
"loss": 8.3404, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.7313432835820893e-06, |
|
"loss": 8.119, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.791044776119403e-06, |
|
"loss": 7.4038, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8507462686567163e-06, |
|
"loss": 6.4731, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9104477611940297e-06, |
|
"loss": 6.4348, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.970149253731343e-06, |
|
"loss": 5.866, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.999998925453213e-06, |
|
"loss": 5.221, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999990329092774e-06, |
|
"loss": 5.816, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9999731364457933e-06, |
|
"loss": 5.0727, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9999473476600653e-06, |
|
"loss": 4.4573, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.99991296295728e-06, |
|
"loss": 5.2337, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9998699826330215e-06, |
|
"loss": 4.1417, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9998184070567638e-06, |
|
"loss": 4.2764, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9997582366718695e-06, |
|
"loss": 4.6434, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9996894719955866e-06, |
|
"loss": 4.3801, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.99961211361904e-06, |
|
"loss": 4.1709, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.999526162207233e-06, |
|
"loss": 4.4757, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9994316184990337e-06, |
|
"loss": 5.1113, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.999328483307176e-06, |
|
"loss": 4.6985, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9992167575182478e-06, |
|
"loss": 4.2607, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.999096442092685e-06, |
|
"loss": 4.3804, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9989675380647637e-06, |
|
"loss": 4.0617, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.998830046542591e-06, |
|
"loss": 3.7293, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.998683968708094e-06, |
|
"loss": 4.2439, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9985293058170123e-06, |
|
"loss": 4.1583, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9983660591988855e-06, |
|
"loss": 4.3666, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9981942302570413e-06, |
|
"loss": 4.1329, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9980138204685853e-06, |
|
"loss": 4.1193, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9978248313843862e-06, |
|
"loss": 4.3299, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9976272646290646e-06, |
|
"loss": 3.8453, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.997421121900977e-06, |
|
"loss": 4.1314, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.997206404972203e-06, |
|
"loss": 3.7143, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9969831156885276e-06, |
|
"loss": 4.4762, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.996751255969429e-06, |
|
"loss": 4.4241, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9965108278080595e-06, |
|
"loss": 4.4316, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.996261833271227e-06, |
|
"loss": 4.3614, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9960042744993815e-06, |
|
"loss": 3.9243, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9957381537065932e-06, |
|
"loss": 3.5642, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.995463473180535e-06, |
|
"loss": 4.712, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.995180235282462e-06, |
|
"loss": 4.5578, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9948884424471923e-06, |
|
"loss": 4.9125, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9945880971830843e-06, |
|
"loss": 3.5687, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.994279202072018e-06, |
|
"loss": 3.819, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9939617597693683e-06, |
|
"loss": 4.4124, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.993635773003988e-06, |
|
"loss": 4.0662, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9933012445781795e-06, |
|
"loss": 3.9198, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9929581773676725e-06, |
|
"loss": 4.0843, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9926065743216e-06, |
|
"loss": 3.912, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9922464384624714e-06, |
|
"loss": 3.8158, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9918777728861474e-06, |
|
"loss": 4.4136, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9915005807618145e-06, |
|
"loss": 4.123, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.991114865331955e-06, |
|
"loss": 4.809, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9907206299123214e-06, |
|
"loss": 3.9801, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9903178778919072e-06, |
|
"loss": 4.453, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9899066127329178e-06, |
|
"loss": 4.3087, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.98948683797074e-06, |
|
"loss": 4.5307, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9890585572139136e-06, |
|
"loss": 4.1205, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.988621774144098e-06, |
|
"loss": 4.3706, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9881764925160414e-06, |
|
"loss": 4.2244, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9877227161575497e-06, |
|
"loss": 3.8475, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9872604489694526e-06, |
|
"loss": 3.928, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9867896949255693e-06, |
|
"loss": 4.3231, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9863104580726754e-06, |
|
"loss": 3.53, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9858227425304686e-06, |
|
"loss": 3.9642, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9853265524915316e-06, |
|
"loss": 3.924, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9848218922212978e-06, |
|
"loss": 4.1848, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9843087660580133e-06, |
|
"loss": 3.9171, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9837871784126997e-06, |
|
"loss": 3.4938, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.983257133769118e-06, |
|
"loss": 3.7065, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.982718636683727e-06, |
|
"loss": 3.7765, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9821716917856482e-06, |
|
"loss": 3.9975, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9816163037766205e-06, |
|
"loss": 3.6078, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.981052477430966e-06, |
|
"loss": 3.4676, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.980480217595543e-06, |
|
"loss": 3.7417, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.980190926653637e-06, |
|
"loss": 4.7603, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.979606025830001e-06, |
|
"loss": 3.8721, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.97901270395083e-06, |
|
"loss": 3.6418, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.978410966116538e-06, |
|
"loss": 3.9267, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9778008174998856e-06, |
|
"loss": 3.5786, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9771822633459368e-06, |
|
"loss": 3.4906, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.976555308972011e-06, |
|
"loss": 3.8799, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.97591995976764e-06, |
|
"loss": 3.8256, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.97527622119452e-06, |
|
"loss": 3.8061, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9746240987864664e-06, |
|
"loss": 3.8942, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.973963598149364e-06, |
|
"loss": 4.1381, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.973294724961121e-06, |
|
"loss": 3.8467, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.972617484971618e-06, |
|
"loss": 4.4283, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9719318840026603e-06, |
|
"loss": 4.024, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.971237927947928e-06, |
|
"loss": 4.3655, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9705356227729238e-06, |
|
"loss": 4.1166, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9698249745149214e-06, |
|
"loss": 3.9481, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9691059892829174e-06, |
|
"loss": 3.7715, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.968378673257574e-06, |
|
"loss": 3.7323, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9676430326911686e-06, |
|
"loss": 3.6433, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.966899073907539e-06, |
|
"loss": 3.6378, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9661468033020314e-06, |
|
"loss": 3.4342, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9653862273414402e-06, |
|
"loss": 4.1278, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.964617352563959e-06, |
|
"loss": 3.81, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.963840185579119e-06, |
|
"loss": 3.7042, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9630547330677346e-06, |
|
"loss": 3.6386, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9622610017818463e-06, |
|
"loss": 3.7616, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9614589985446616e-06, |
|
"loss": 4.4132, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9606487302504967e-06, |
|
"loss": 3.2473, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.96024049888095e-06, |
|
"loss": 3.999, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9594178460835627e-06, |
|
"loss": 3.1134, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.958586945775697e-06, |
|
"loss": 3.7284, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9577478051000774e-06, |
|
"loss": 4.1856, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9569004312702694e-06, |
|
"loss": 3.8734, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.95604483157061e-06, |
|
"loss": 3.663, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9551810133561524e-06, |
|
"loss": 3.5877, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9543089840525953e-06, |
|
"loss": 4.1249, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9534287511562268e-06, |
|
"loss": 3.5625, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.952540322233853e-06, |
|
"loss": 4.2327, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.951643704922737e-06, |
|
"loss": 4.0032, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9507389069305344e-06, |
|
"loss": 4.327, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9498259360352218e-06, |
|
"loss": 3.7673, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9489048000850346e-06, |
|
"loss": 3.5553, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9479755069983987e-06, |
|
"loss": 4.2058, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.947038064763861e-06, |
|
"loss": 3.6355, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9460924814400205e-06, |
|
"loss": 4.2922, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9451387651554616e-06, |
|
"loss": 3.8831, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.944176924108682e-06, |
|
"loss": 3.5195, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.943206966568023e-06, |
|
"loss": 3.8046, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.942228900871598e-06, |
|
"loss": 3.4737, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.941242735427222e-06, |
|
"loss": 3.6585, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.940248478712337e-06, |
|
"loss": 3.3485, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9392461392739412e-06, |
|
"loss": 3.5068, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.938235725728515e-06, |
|
"loss": 3.7612, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9372172467619476e-06, |
|
"loss": 3.5368, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9361907111294597e-06, |
|
"loss": 3.905, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9351561276555314e-06, |
|
"loss": 3.7021, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.9341135052338247e-06, |
|
"loss": 3.5583, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.9330628528271074e-06, |
|
"loss": 4.1436, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9320041794671763e-06, |
|
"loss": 3.5964, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9309374942547785e-06, |
|
"loss": 3.7826, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.9298628063595345e-06, |
|
"loss": 3.5185, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.928780125019859e-06, |
|
"loss": 3.8345, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.9276894595428805e-06, |
|
"loss": 3.246, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.9265908193043632e-06, |
|
"loss": 3.5096, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.9254842137486245e-06, |
|
"loss": 3.7819, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.9243696523884546e-06, |
|
"loss": 3.6428, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.9232471448050358e-06, |
|
"loss": 3.7838, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.922116700647857e-06, |
|
"loss": 3.8994, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.9209783296346356e-06, |
|
"loss": 3.1471, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9198320415512286e-06, |
|
"loss": 3.2651, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.918677846251552e-06, |
|
"loss": 4.1139, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.917515753657496e-06, |
|
"loss": 2.8653, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9163457737588374e-06, |
|
"loss": 3.6462, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.915167916613156e-06, |
|
"loss": 4.4759, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9139821923457483e-06, |
|
"loss": 2.948, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9127886111495373e-06, |
|
"loss": 3.0247, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9115871832849887e-06, |
|
"loss": 3.3062, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.91037791908002e-06, |
|
"loss": 3.601, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9091608289299143e-06, |
|
"loss": 3.5491, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9079359232972276e-06, |
|
"loss": 3.5064, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9067032127117022e-06, |
|
"loss": 3.694, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9054627077701735e-06, |
|
"loss": 3.3931, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9042144191364805e-06, |
|
"loss": 3.4121, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9029583575413744e-06, |
|
"loss": 3.7823, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9016945337824244e-06, |
|
"loss": 3.2065, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9004229587239266e-06, |
|
"loss": 3.607, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8991436432968111e-06, |
|
"loss": 3.3631, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8978565984985458e-06, |
|
"loss": 3.6406, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8965618353930431e-06, |
|
"loss": 3.8469, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.895259365110566e-06, |
|
"loss": 3.0934, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8939491988476298e-06, |
|
"loss": 3.3083, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8926313478669092e-06, |
|
"loss": 3.3458, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8913058234971375e-06, |
|
"loss": 3.5751, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8899726371330122e-06, |
|
"loss": 3.4553, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8886318002350965e-06, |
|
"loss": 3.8738, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8872833243297196e-06, |
|
"loss": 2.9535, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8859272210088796e-06, |
|
"loss": 4.2909, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8845635019301413e-06, |
|
"loss": 3.2678, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.883192178816538e-06, |
|
"loss": 3.3449, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.881813263456471e-06, |
|
"loss": 3.3589, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.8804267677036053e-06, |
|
"loss": 3.1426, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.8790327034767718e-06, |
|
"loss": 3.3746, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.877631082759862e-06, |
|
"loss": 3.7385, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8762219176017251e-06, |
|
"loss": 3.5787, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.874805220116066e-06, |
|
"loss": 3.6242, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.87338100248134e-06, |
|
"loss": 3.6493, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.8719492769406488e-06, |
|
"loss": 3.0777, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8705100558016338e-06, |
|
"loss": 3.1073, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.869063351436372e-06, |
|
"loss": 3.5807, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.8676091762812695e-06, |
|
"loss": 3.359, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.8661475428369531e-06, |
|
"loss": 3.511, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8646784636681642e-06, |
|
"loss": 3.362, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8632019514036506e-06, |
|
"loss": 3.3515, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8617180187360573e-06, |
|
"loss": 3.7303, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8602266784218184e-06, |
|
"loss": 3.2562, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.858727943281046e-06, |
|
"loss": 3.0296, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8572218261974224e-06, |
|
"loss": 3.5499, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.855708340118086e-06, |
|
"loss": 2.928, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.8541874980535229e-06, |
|
"loss": 3.8264, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.8526593130774535e-06, |
|
"loss": 3.9517, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.8511237983267212e-06, |
|
"loss": 3.3977, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.8495809670011782e-06, |
|
"loss": 3.111, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.8480308323635728e-06, |
|
"loss": 3.3572, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.8464734077394354e-06, |
|
"loss": 3.5381, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.8449087065169643e-06, |
|
"loss": 3.3103, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.843336742146909e-06, |
|
"loss": 3.5118, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.8417575281424567e-06, |
|
"loss": 3.3761, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.8401710780791147e-06, |
|
"loss": 3.4191, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.8385774055945942e-06, |
|
"loss": 3.688, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.8369765243886932e-06, |
|
"loss": 3.3699, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.8353684482231779e-06, |
|
"loss": 3.5255, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.8337531909216662e-06, |
|
"loss": 3.4657, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.8321307663695063e-06, |
|
"loss": 3.3744, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.8305011885136594e-06, |
|
"loss": 3.3628, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.8288644713625798e-06, |
|
"loss": 3.3277, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.827220628986093e-06, |
|
"loss": 3.0904, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.8255696755152753e-06, |
|
"loss": 3.1305, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.8239116251423335e-06, |
|
"loss": 3.5075, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.822246492120481e-06, |
|
"loss": 3.3155, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.8205742907638178e-06, |
|
"loss": 3.0176, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.8188950354472041e-06, |
|
"loss": 3.4693, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.8172087406061396e-06, |
|
"loss": 3.3606, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.8155154207366384e-06, |
|
"loss": 3.4945, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.8138150903951036e-06, |
|
"loss": 3.2044, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.8121077641982034e-06, |
|
"loss": 3.2363, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.8103934568227456e-06, |
|
"loss": 3.2894, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.80867218300555e-06, |
|
"loss": 3.8915, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.8069439575433216e-06, |
|
"loss": 3.615, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.8052087952925266e-06, |
|
"loss": 3.2595, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.8034667111692605e-06, |
|
"loss": 2.9635, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.8017177201491224e-06, |
|
"loss": 3.6671, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.7999618372670863e-06, |
|
"loss": 2.8625, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.79819907761737e-06, |
|
"loss": 2.7803, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.7964294563533074e-06, |
|
"loss": 3.5145, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.7946529886872166e-06, |
|
"loss": 3.5473, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.792869689890271e-06, |
|
"loss": 3.0734, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.791079575292366e-06, |
|
"loss": 3.8972, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.7892826602819882e-06, |
|
"loss": 2.9234, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.7874789603060836e-06, |
|
"loss": 3.6336, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.785668490869924e-06, |
|
"loss": 3.2648, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.7838512675369737e-06, |
|
"loss": 3.4194, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.7820273059287567e-06, |
|
"loss": 3.3257, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.7801966217247206e-06, |
|
"loss": 2.9887, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.7783592306621044e-06, |
|
"loss": 3.1913, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7765151485358004e-06, |
|
"loss": 3.5827, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7746643911982204e-06, |
|
"loss": 3.0704, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7728069745591583e-06, |
|
"loss": 3.0789, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.770942914585655e-06, |
|
"loss": 3.6528, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.769072227301858e-06, |
|
"loss": 3.2085, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.767194928788888e-06, |
|
"loss": 3.3669, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.765311035184696e-06, |
|
"loss": 3.6074, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.763420562683928e-06, |
|
"loss": 3.5051, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.7615235275377851e-06, |
|
"loss": 3.611, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.759619946053882e-06, |
|
"loss": 3.0341, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.7577098345961094e-06, |
|
"loss": 3.3202, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.755793209584491e-06, |
|
"loss": 3.5234, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.7538700874950447e-06, |
|
"loss": 2.9705, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7519404848596379e-06, |
|
"loss": 3.5456, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.750004418265849e-06, |
|
"loss": 3.0676, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7480619043568217e-06, |
|
"loss": 3.8065, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.746112959831124e-06, |
|
"loss": 3.3873, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.744157601442603e-06, |
|
"loss": 3.2863, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.742195846000243e-06, |
|
"loss": 3.4774, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.7402277103680185e-06, |
|
"loss": 3.0919, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.7382532114647508e-06, |
|
"loss": 2.8205, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.7362723662639627e-06, |
|
"loss": 3.2246, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.734285191793732e-06, |
|
"loss": 3.1851, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.7322917051365447e-06, |
|
"loss": 3.6528, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.730291923429149e-06, |
|
"loss": 3.6397, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.7282858638624082e-06, |
|
"loss": 3.5481, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.7262735436811514e-06, |
|
"loss": 3.6148, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.7242549801840273e-06, |
|
"loss": 3.2745, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.7222301907233537e-06, |
|
"loss": 3.1826, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.7201991927049689e-06, |
|
"loss": 2.9794, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.7181620035880832e-06, |
|
"loss": 3.9056, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.7161186408851274e-06, |
|
"loss": 2.904, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.714069122161602e-06, |
|
"loss": 3.3681, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.7120134650359285e-06, |
|
"loss": 2.6666, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.709951687179295e-06, |
|
"loss": 3.0994, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.7078838063155061e-06, |
|
"loss": 3.3996, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.7058098402208305e-06, |
|
"loss": 3.2222, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.703729806723847e-06, |
|
"loss": 3.2388, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.7016437237052928e-06, |
|
"loss": 3.265, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.6995516090979084e-06, |
|
"loss": 3.4802, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.6974534808862847e-06, |
|
"loss": 2.8957, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.6953493571067074e-06, |
|
"loss": 3.2822, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.6932392558470022e-06, |
|
"loss": 3.2971, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.6911231952463796e-06, |
|
"loss": 3.3349, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.6890011934952788e-06, |
|
"loss": 3.1385, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.6868732688352112e-06, |
|
"loss": 3.4216, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.684739439558604e-06, |
|
"loss": 3.5044, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.682599724008643e-06, |
|
"loss": 3.2023, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.6804541405791127e-06, |
|
"loss": 3.3691, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.6783027077142426e-06, |
|
"loss": 2.8764, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.6761454439085446e-06, |
|
"loss": 3.4093, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.6739823677066557e-06, |
|
"loss": 3.0168, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.6718134977031788e-06, |
|
"loss": 2.9064, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.669638852542522e-06, |
|
"loss": 3.2264, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.6674584509187393e-06, |
|
"loss": 3.0279, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.665272311575369e-06, |
|
"loss": 3.4402, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.6630804533052727e-06, |
|
"loss": 2.971, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.660882894950475e-06, |
|
"loss": 3.7801, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.6586796554019994e-06, |
|
"loss": 3.072, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.6564707535997078e-06, |
|
"loss": 2.8806, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.6542562085321365e-06, |
|
"loss": 3.5786, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.652036039236334e-06, |
|
"loss": 3.0554, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.6498102647976956e-06, |
|
"loss": 3.0103, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.6475789043498018e-06, |
|
"loss": 3.3522, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6453419770742514e-06, |
|
"loss": 2.9305, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6430995022004987e-06, |
|
"loss": 3.0927, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6408514990056863e-06, |
|
"loss": 3.7041, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.638597986814481e-06, |
|
"loss": 2.8467, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.6363389849989062e-06, |
|
"loss": 2.8837, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.6340745129781772e-06, |
|
"loss": 2.7715, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.6318045902185325e-06, |
|
"loss": 3.0874, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.6295292362330666e-06, |
|
"loss": 3.3405, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.6272484705815645e-06, |
|
"loss": 3.2075, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.6249623128703304e-06, |
|
"loss": 3.222, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.6226707827520207e-06, |
|
"loss": 3.502, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.620373899925476e-06, |
|
"loss": 3.5456, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.6180716841355505e-06, |
|
"loss": 3.0917, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.615764155172941e-06, |
|
"loss": 3.16, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.6134513328740206e-06, |
|
"loss": 3.1644, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.6111332371206646e-06, |
|
"loss": 3.1943, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.6088098878400808e-06, |
|
"loss": 2.992, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.6064813050046384e-06, |
|
"loss": 3.0377, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.6041475086316964e-06, |
|
"loss": 3.2359, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.6018085187834314e-06, |
|
"loss": 3.4935, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.5994643555666645e-06, |
|
"loss": 2.8595, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.5971150391326894e-06, |
|
"loss": 3.622, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.5947605896770985e-06, |
|
"loss": 3.3836, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.5924010274396101e-06, |
|
"loss": 3.0779, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.590036372703893e-06, |
|
"loss": 3.5189, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.5876666457973937e-06, |
|
"loss": 2.9973, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.585291867091161e-06, |
|
"loss": 3.5169, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.5829120569996702e-06, |
|
"loss": 2.7568, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.5805272359806485e-06, |
|
"loss": 3.4689, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5781374245348992e-06, |
|
"loss": 3.1464, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.575742643206125e-06, |
|
"loss": 2.9425, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.573342912580751e-06, |
|
"loss": 2.8022, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5709382532877498e-06, |
|
"loss": 3.4381, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.5685286859984607e-06, |
|
"loss": 3.142, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.5661142314264154e-06, |
|
"loss": 3.2907, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.5636949103271575e-06, |
|
"loss": 2.8657, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.561270743498066e-06, |
|
"loss": 3.5265, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.5588417517781748e-06, |
|
"loss": 3.0299, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.5564079560479949e-06, |
|
"loss": 3.0853, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.5539693772293336e-06, |
|
"loss": 2.9567, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.5515260362851167e-06, |
|
"loss": 3.0872, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.5490779542192056e-06, |
|
"loss": 2.9966, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.5466251520762193e-06, |
|
"loss": 2.666, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.5441676509413508e-06, |
|
"loss": 3.217, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.5417054719401895e-06, |
|
"loss": 2.9665, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.5392386362385355e-06, |
|
"loss": 3.0145, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.5367671650422206e-06, |
|
"loss": 3.088, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.534291079596925e-06, |
|
"loss": 3.0815, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.5318104011879943e-06, |
|
"loss": 3.2028, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.5293251511402576e-06, |
|
"loss": 3.2383, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.5268353508178423e-06, |
|
"loss": 3.5033, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.5243410216239935e-06, |
|
"loss": 3.1143, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.521842185000886e-06, |
|
"loss": 3.1468, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.5193388624294437e-06, |
|
"loss": 2.8792, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.5168310754291524e-06, |
|
"loss": 2.6661, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.5143188455578768e-06, |
|
"loss": 2.8124, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.5118021944116732e-06, |
|
"loss": 2.9561, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.5092811436246053e-06, |
|
"loss": 2.9348, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.5067557148685577e-06, |
|
"loss": 3.246, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.5042259298530495e-06, |
|
"loss": 2.9901, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.501691810325048e-06, |
|
"loss": 3.4768, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.4991533780687812e-06, |
|
"loss": 3.331, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4966106549055518e-06, |
|
"loss": 3.1482, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4940636626935473e-06, |
|
"loss": 3.1415, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.4915124233276549e-06, |
|
"loss": 2.8092, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.4889569587392707e-06, |
|
"loss": 2.7413, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.4863972908961136e-06, |
|
"loss": 2.9891, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.4838334418020345e-06, |
|
"loss": 3.3329, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.4812654334968278e-06, |
|
"loss": 3.3335, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.4786932880560424e-06, |
|
"loss": 3.1025, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.4761170275907913e-06, |
|
"loss": 2.8553, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.473536674247562e-06, |
|
"loss": 3.0792, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.4709522502080253e-06, |
|
"loss": 3.1111, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.4683637776888461e-06, |
|
"loss": 3.1688, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.465771278941491e-06, |
|
"loss": 2.9591, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.4631747762520374e-06, |
|
"loss": 3.6505, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.4605742919409827e-06, |
|
"loss": 3.1481, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.4579698483630508e-06, |
|
"loss": 3.1243, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4553614679070018e-06, |
|
"loss": 2.8568, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4527491729954384e-06, |
|
"loss": 2.6829, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.4501329860846134e-06, |
|
"loss": 3.2537, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.4475129296642364e-06, |
|
"loss": 3.4175, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.444889026257281e-06, |
|
"loss": 3.3713, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.4422612984197914e-06, |
|
"loss": 3.226, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4396297687406864e-06, |
|
"loss": 3.1959, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.436994459841569e-06, |
|
"loss": 3.0436, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4343553943765283e-06, |
|
"loss": 2.9748, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.431712595031947e-06, |
|
"loss": 3.0803, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4290660845263044e-06, |
|
"loss": 2.8028, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.426415885609984e-06, |
|
"loss": 3.1046, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.4237620210650748e-06, |
|
"loss": 3.0997, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4211045137051778e-06, |
|
"loss": 3.3444, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4184433863752086e-06, |
|
"loss": 3.2665, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4157786619512015e-06, |
|
"loss": 2.909, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.413110363340113e-06, |
|
"loss": 3.1522, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.4104385134796239e-06, |
|
"loss": 3.2937, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.407763135337944e-06, |
|
"loss": 3.4302, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.405084251913612e-06, |
|
"loss": 3.0326, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.4024018862353004e-06, |
|
"loss": 3.2939, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3997160613616166e-06, |
|
"loss": 2.7915, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3970268003809037e-06, |
|
"loss": 3.1943, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3943341264110431e-06, |
|
"loss": 3.2806, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.391638062599256e-06, |
|
"loss": 2.8522, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3889386321219036e-06, |
|
"loss": 2.8003, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.3862358581842881e-06, |
|
"loss": 2.8271, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.3835297640204535e-06, |
|
"loss": 2.9389, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.3808203728929853e-06, |
|
"loss": 2.7254, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.378107708092812e-06, |
|
"loss": 3.0289, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.3753917929390017e-06, |
|
"loss": 2.9601, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.3726726507785663e-06, |
|
"loss": 3.5084, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.3699503049862563e-06, |
|
"loss": 2.795, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.3672247789643632e-06, |
|
"loss": 2.8747, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.3644960961425157e-06, |
|
"loss": 2.8853, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.36176427997748e-06, |
|
"loss": 3.3706, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.3590293539529581e-06, |
|
"loss": 3.1291, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.3562913415793848e-06, |
|
"loss": 3.1716, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.3535502663937272e-06, |
|
"loss": 2.9143, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.35080615195928e-06, |
|
"loss": 3.3883, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.3480590218654665e-06, |
|
"loss": 3.0929, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.3453088997276314e-06, |
|
"loss": 2.8438, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.3425558091868419e-06, |
|
"loss": 3.0209, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.3397997739096818e-06, |
|
"loss": 3.3254, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.3370408175880492e-06, |
|
"loss": 2.9318, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.3342789639389529e-06, |
|
"loss": 2.8639, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.3315142367043075e-06, |
|
"loss": 3.164, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.3287466596507303e-06, |
|
"loss": 3.1302, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.3259762565693372e-06, |
|
"loss": 2.9031, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.3232030512755363e-06, |
|
"loss": 3.4944, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.3204270676088267e-06, |
|
"loss": 2.643, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.3176483294325892e-06, |
|
"loss": 3.0969, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.3148668606338855e-06, |
|
"loss": 2.9564, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.3120826851232485e-06, |
|
"loss": 3.1574, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.309295826834481e-06, |
|
"loss": 3.4807, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.3065063097244461e-06, |
|
"loss": 2.8635, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.3037141577728652e-06, |
|
"loss": 3.0288, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.300919394982108e-06, |
|
"loss": 3.0146, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.2981220453769887e-06, |
|
"loss": 3.2221, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.295322133004559e-06, |
|
"loss": 3.0042, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.2925196819339e-06, |
|
"loss": 3.0275, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.2897147162559178e-06, |
|
"loss": 2.608, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.286907260083134e-06, |
|
"loss": 2.8112, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.2840973375494795e-06, |
|
"loss": 3.309, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.2812849728100873e-06, |
|
"loss": 3.368, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.2784701900410851e-06, |
|
"loss": 2.4191, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.2756530134393848e-06, |
|
"loss": 3.2218, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.2728334672224788e-06, |
|
"loss": 3.4522, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.2700115756282285e-06, |
|
"loss": 3.3719, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.2671873629146573e-06, |
|
"loss": 2.7217, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.2643608533597416e-06, |
|
"loss": 3.0249, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.2615320712612026e-06, |
|
"loss": 2.663, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.2587010409362967e-06, |
|
"loss": 2.7607, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.2558677867216072e-06, |
|
"loss": 2.6782, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.2530323329728352e-06, |
|
"loss": 3.0509, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.2501947040645883e-06, |
|
"loss": 2.6735, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.2473549243901744e-06, |
|
"loss": 3.5474, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.2445130183613897e-06, |
|
"loss": 3.4528, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.241669010408308e-06, |
|
"loss": 3.6589, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2388229249790736e-06, |
|
"loss": 2.7694, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.235974786539689e-06, |
|
"loss": 2.9747, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2331246195738053e-06, |
|
"loss": 3.3257, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2302724485825104e-06, |
|
"loss": 2.9527, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.227418298084122e-06, |
|
"loss": 2.7044, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2245621926139716e-06, |
|
"loss": 3.0088, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.221704156724198e-06, |
|
"loss": 3.4115, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.218844214983535e-06, |
|
"loss": 2.9359, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.215982391977098e-06, |
|
"loss": 2.8188, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2131187123061766e-06, |
|
"loss": 3.1646, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2102532005880189e-06, |
|
"loss": 3.059, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2073858814556235e-06, |
|
"loss": 3.1625, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.204516779557525e-06, |
|
"loss": 2.9633, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.201645919557585e-06, |
|
"loss": 2.8562, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1987733261347764e-06, |
|
"loss": 3.0493, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1958990239829745e-06, |
|
"loss": 2.8915, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1930230378107434e-06, |
|
"loss": 2.7766, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1901453923411234e-06, |
|
"loss": 3.037, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1872661123114182e-06, |
|
"loss": 2.7826, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.184385222472984e-06, |
|
"loss": 3.0884, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1815027475910146e-06, |
|
"loss": 3.2076, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1786187124443297e-06, |
|
"loss": 3.1764, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1757331418251616e-06, |
|
"loss": 2.907, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1728460605389417e-06, |
|
"loss": 2.9013, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1699574934040884e-06, |
|
"loss": 3.0044, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1670674652517925e-06, |
|
"loss": 3.0849, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1641760009258047e-06, |
|
"loss": 2.9643, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1612831252822207e-06, |
|
"loss": 3.2874, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1583888631892692e-06, |
|
"loss": 3.2334, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1554932395270973e-06, |
|
"loss": 3.2864, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1525962791875561e-06, |
|
"loss": 2.9315, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1496980070739882e-06, |
|
"loss": 2.9479, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.146798448101011e-06, |
|
"loss": 2.9524, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1438976271943065e-06, |
|
"loss": 2.8668, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.140995569290403e-06, |
|
"loss": 2.9794, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1380922993364634e-06, |
|
"loss": 2.8266, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1351878422900689e-06, |
|
"loss": 2.7969, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1322822231190064e-06, |
|
"loss": 3.1351, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1293754668010525e-06, |
|
"loss": 2.9592, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1264675983237588e-06, |
|
"loss": 3.0088, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1235586426842381e-06, |
|
"loss": 2.709, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1206486248889476e-06, |
|
"loss": 3.3371, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.1177375699534772e-06, |
|
"loss": 3.0443, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.1148255029023305e-06, |
|
"loss": 2.7235, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.1119124487687126e-06, |
|
"loss": 2.9919, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.1089984325943138e-06, |
|
"loss": 2.7299, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.1060834794290942e-06, |
|
"loss": 2.7834, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.1031676143310689e-06, |
|
"loss": 2.8806, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.100250862366092e-06, |
|
"loss": 2.5645, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0973332486076421e-06, |
|
"loss": 3.1799, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0944147981366052e-06, |
|
"loss": 3.0639, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0914955360410607e-06, |
|
"loss": 3.1427, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0885754874160647e-06, |
|
"loss": 2.6835, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.0856546773634346e-06, |
|
"loss": 3.0022, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.082733130991533e-06, |
|
"loss": 2.8408, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.0798108734150533e-06, |
|
"loss": 2.8128, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.0768879297548016e-06, |
|
"loss": 2.7853, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.073964325137482e-06, |
|
"loss": 2.8805, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.0710400846954809e-06, |
|
"loss": 2.7104, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.0681152335666492e-06, |
|
"loss": 3.4712, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.0651897968940899e-06, |
|
"loss": 2.9869, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.062263799825937e-06, |
|
"loss": 3.0022, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.0593372675151436e-06, |
|
"loss": 3.1016, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.0564102251192628e-06, |
|
"loss": 3.3125, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0534826978002333e-06, |
|
"loss": 3.152, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0505547107241625e-06, |
|
"loss": 2.9954, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0476262890611093e-06, |
|
"loss": 2.6699, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0446974579848693e-06, |
|
"loss": 3.0823, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0417682426727565e-06, |
|
"loss": 3.1029, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.0388386683053895e-06, |
|
"loss": 2.7887, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.0359087600664712e-06, |
|
"loss": 2.5965, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0329785431425773e-06, |
|
"loss": 3.0214, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.030048042722935e-06, |
|
"loss": 2.865, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.0271172839992092e-06, |
|
"loss": 2.5204, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.0241862921652859e-06, |
|
"loss": 3.1104, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0212550924170538e-06, |
|
"loss": 3.2602, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.01832370995219e-06, |
|
"loss": 2.8817, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0153921699699416e-06, |
|
"loss": 2.7408, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0124604976709098e-06, |
|
"loss": 3.2223, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0095287182568338e-06, |
|
"loss": 2.8304, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0065968569303732e-06, |
|
"loss": 3.3916, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0036649388948915e-06, |
|
"loss": 2.8845, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0007329893542401e-06, |
|
"loss": 2.8152, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.978010335125418e-07, |
|
"loss": 3.1776, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.948690965739719e-07, |
|
"loss": 2.9452, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.919372037425448e-07, |
|
"loss": 3.102, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.89005380221895e-07, |
|
"loss": 2.8247, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.860736512150612e-07, |
|
"loss": 2.8144, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.831420419242695e-07, |
|
"loss": 2.6794, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.802105775507176e-07, |
|
"loss": 2.8484, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.772792832943572e-07, |
|
"loss": 3.0658, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.743481843536763e-07, |
|
"loss": 2.7053, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.71417305925486e-07, |
|
"loss": 2.9634, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.684866732046999e-07, |
|
"loss": 2.821, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.65556311384121e-07, |
|
"loss": 3.0826, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.626262456542221e-07, |
|
"loss": 3.1534, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.596965012029322e-07, |
|
"loss": 3.2123, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.567671032154167e-07, |
|
"loss": 2.8473, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.538380768738643e-07, |
|
"loss": 2.6558, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.509094473572684e-07, |
|
"loss": 3.3537, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.479812398412102e-07, |
|
"loss": 3.2921, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.450534794976447e-07, |
|
"loss": 3.1407, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.421261914946818e-07, |
|
"loss": 2.6678, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.391994009963715e-07, |
|
"loss": 3.0473, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.362731331624865e-07, |
|
"loss": 3.26, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.333474131483075e-07, |
|
"loss": 2.8314, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.304222661044046e-07, |
|
"loss": 3.0672, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.274977171764235e-07, |
|
"loss": 2.8473, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.245737915048683e-07, |
|
"loss": 2.77, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.216505142248842e-07, |
|
"loss": 3.0094, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.187279104660441e-07, |
|
"loss": 3.1822, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.158060053521299e-07, |
|
"loss": 2.8612, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.128848240009182e-07, |
|
"loss": 3.2639, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.099643915239636e-07, |
|
"loss": 3.0926, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.070447330263837e-07, |
|
"loss": 3.1956, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.041258736066419e-07, |
|
"loss": 3.1529, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.012078383563331e-07, |
|
"loss": 3.1493, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.98290652359967e-07, |
|
"loss": 3.1245, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.953743406947524e-07, |
|
"loss": 2.7367, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.924589284303833e-07, |
|
"loss": 2.876, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.895444406288204e-07, |
|
"loss": 3.2604, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.86630902344079e-07, |
|
"loss": 2.721, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.837183386220105e-07, |
|
"loss": 3.1033, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.808067745000901e-07, |
|
"loss": 3.1502, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.778962350071989e-07, |
|
"loss": 2.9382, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.749867451634103e-07, |
|
"loss": 3.2997, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.720783299797749e-07, |
|
"loss": 2.8682, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.691710144581045e-07, |
|
"loss": 2.9769, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.662648235907583e-07, |
|
"loss": 3.2094, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.633597823604271e-07, |
|
"loss": 3.0341, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.604559157399196e-07, |
|
"loss": 3.3464, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.575532486919463e-07, |
|
"loss": 2.9869, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.546518061689066e-07, |
|
"loss": 3.138, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.517516131126726e-07, |
|
"loss": 2.9878, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.488526944543763e-07, |
|
"loss": 3.3184, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.459550751141941e-07, |
|
"loss": 3.0895, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.430587800011329e-07, |
|
"loss": 3.0961, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.401638340128166e-07, |
|
"loss": 2.6911, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.372702620352703e-07, |
|
"loss": 2.5421, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.343780889427091e-07, |
|
"loss": 2.7481, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.314873395973217e-07, |
|
"loss": 3.1733, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.285980388490585e-07, |
|
"loss": 2.985, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.257102115354163e-07, |
|
"loss": 2.997, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.228238824812266e-07, |
|
"loss": 2.8608, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.19939076498441e-07, |
|
"loss": 2.8035, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.170558183859181e-07, |
|
"loss": 2.9794, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.141741329292104e-07, |
|
"loss": 2.8045, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.112940449003515e-07, |
|
"loss": 2.868, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.084155790576427e-07, |
|
"loss": 3.1646, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.055387601454407e-07, |
|
"loss": 3.0302, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.026636128939442e-07, |
|
"loss": 2.9926, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.997901620189815e-07, |
|
"loss": 2.7346, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.96918432221799e-07, |
|
"loss": 2.6431, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.940484481888477e-07, |
|
"loss": 2.8179, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.911802345915711e-07, |
|
"loss": 3.3702, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.883138160861938e-07, |
|
"loss": 2.8974, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.854492173135087e-07, |
|
"loss": 2.8441, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.825864628986659e-07, |
|
"loss": 2.8335, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.797255774509606e-07, |
|
"loss": 3.2566, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.768665855636218e-07, |
|
"loss": 2.4111, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.740095118136007e-07, |
|
"loss": 2.8494, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.711543807613591e-07, |
|
"loss": 2.8341, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.683012169506597e-07, |
|
"loss": 2.634, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.654500449083525e-07, |
|
"loss": 2.96, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.626008891441671e-07, |
|
"loss": 3.1442, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.597537741504994e-07, |
|
"loss": 2.9849, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.569087244022025e-07, |
|
"loss": 2.635, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.540657643563755e-07, |
|
"loss": 2.9934, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.512249184521543e-07, |
|
"loss": 2.6708, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.483862111105002e-07, |
|
"loss": 3.8165, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.45549666733991e-07, |
|
"loss": 3.1894, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.42715309706611e-07, |
|
"loss": 2.7983, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.398831643935406e-07, |
|
"loss": 2.6877, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.370532551409485e-07, |
|
"loss": 3.1716, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.342256062757799e-07, |
|
"loss": 3.3096, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.314002421055505e-07, |
|
"loss": 2.7958, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.285771869181347e-07, |
|
"loss": 2.8987, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.257564649815591e-07, |
|
"loss": 2.7978, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.229381005437916e-07, |
|
"loss": 3.0605, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.20122117832535e-07, |
|
"loss": 3.1028, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.173085410550179e-07, |
|
"loss": 2.877, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.144973943977859e-07, |
|
"loss": 2.8236, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.116887020264956e-07, |
|
"loss": 2.9203, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.088824880857043e-07, |
|
"loss": 3.4529, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.060787766986646e-07, |
|
"loss": 3.0048, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.032775919671161e-07, |
|
"loss": 3.2652, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.004789579710786e-07, |
|
"loss": 2.9996, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.976828987686439e-07, |
|
"loss": 3.4158, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.948894383957708e-07, |
|
"loss": 3.1706, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.920986008660776e-07, |
|
"loss": 2.8606, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.89310410170635e-07, |
|
"loss": 2.836, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.865248902777614e-07, |
|
"loss": 2.9352, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.83742065132815e-07, |
|
"loss": 3.1582, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.8096195865799e-07, |
|
"loss": 2.963, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.78184594752109e-07, |
|
"loss": 2.8691, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.75409997290419e-07, |
|
"loss": 3.1777, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.726381901243856e-07, |
|
"loss": 3.0099, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.698691970814877e-07, |
|
"loss": 2.714, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.671030419650135e-07, |
|
"loss": 3.0297, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.643397485538542e-07, |
|
"loss": 2.8318, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.615793406023024e-07, |
|
"loss": 2.8463, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.588218418398447e-07, |
|
"loss": 2.9608, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.5606727597096e-07, |
|
"loss": 2.9318, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.533156666749149e-07, |
|
"loss": 3.0022, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.505670376055601e-07, |
|
"loss": 3.1164, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.478214123911267e-07, |
|
"loss": 2.3793, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.450788146340247e-07, |
|
"loss": 3.0168, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.423392679106384e-07, |
|
"loss": 2.9942, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.396027957711233e-07, |
|
"loss": 2.969, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.368694217392063e-07, |
|
"loss": 2.9078, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.341391693119806e-07, |
|
"loss": 2.9066, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.314120619597058e-07, |
|
"loss": 2.9284, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.286881231256046e-07, |
|
"loss": 2.9018, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.25967376225662e-07, |
|
"loss": 3.28, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.232498446484246e-07, |
|
"loss": 2.714, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.205355517547979e-07, |
|
"loss": 2.8722, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.178245208778476e-07, |
|
"loss": 3.2644, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.151167753225968e-07, |
|
"loss": 2.6016, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.124123383658275e-07, |
|
"loss": 2.742, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.097112332558798e-07, |
|
"loss": 3.1899, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.070134832124514e-07, |
|
"loss": 2.819, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 6.04319111426399e-07, |
|
"loss": 2.8713, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 6.016281410595389e-07, |
|
"loss": 2.8341, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 5.989405952444466e-07, |
|
"loss": 2.9872, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.962564970842599e-07, |
|
"loss": 2.4304, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.935758696524786e-07, |
|
"loss": 3.0583, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.908987359927673e-07, |
|
"loss": 2.7353, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.882251191187561e-07, |
|
"loss": 2.5321, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 5.855550420138447e-07, |
|
"loss": 2.7257, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 5.828885276310024e-07, |
|
"loss": 2.5636, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.802255988925728e-07, |
|
"loss": 3.05, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.775662786900757e-07, |
|
"loss": 3.0127, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.749105898840108e-07, |
|
"loss": 3.2594, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.722585553036605e-07, |
|
"loss": 2.7708, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.696101977468952e-07, |
|
"loss": 2.5777, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.669655399799746e-07, |
|
"loss": 2.6788, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.643246047373549e-07, |
|
"loss": 3.1388, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.616874147214918e-07, |
|
"loss": 2.9707, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.590539926026459e-07, |
|
"loss": 2.9439, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.564243610186863e-07, |
|
"loss": 2.5704, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.537985425748987e-07, |
|
"loss": 2.927, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.511765598437894e-07, |
|
"loss": 3.0026, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.485584353648911e-07, |
|
"loss": 2.8803, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.4594419164457e-07, |
|
"loss": 2.9977, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.433338511558308e-07, |
|
"loss": 2.8187, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.407274363381256e-07, |
|
"loss": 2.9185, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.3812496959716e-07, |
|
"loss": 3.0245, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.355264733047e-07, |
|
"loss": 2.7075, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.329319697983803e-07, |
|
"loss": 2.9338, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.303414813815119e-07, |
|
"loss": 2.7982, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.277550303228914e-07, |
|
"loss": 2.9188, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.251726388566079e-07, |
|
"loss": 3.248, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.225943291818536e-07, |
|
"loss": 3.2395, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.200201234627309e-07, |
|
"loss": 3.1639, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.174500438280638e-07, |
|
"loss": 3.0188, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.148841123712074e-07, |
|
"loss": 3.0131, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.123223511498569e-07, |
|
"loss": 3.1215, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.097647821858592e-07, |
|
"loss": 2.92, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.072114274650217e-07, |
|
"loss": 3.1004, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.04662308936926e-07, |
|
"loss": 2.9878, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.021174485147373e-07, |
|
"loss": 3.3471, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.995768680750167e-07, |
|
"loss": 2.8552, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.970405894575326e-07, |
|
"loss": 3.0868, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.945086344650735e-07, |
|
"loss": 3.0844, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.919810248632607e-07, |
|
"loss": 3.0211, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.894577823803609e-07, |
|
"loss": 2.5786, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.869389287070993e-07, |
|
"loss": 2.6768, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.844244854964728e-07, |
|
"loss": 2.5954, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.819144743635648e-07, |
|
"loss": 3.0756, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.794089168853592e-07, |
|
"loss": 3.2461, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.769078346005547e-07, |
|
"loss": 2.5552, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.7441124900937825e-07, |
|
"loss": 2.9708, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.7191918157340303e-07, |
|
"loss": 2.966, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.69431653715362e-07, |
|
"loss": 2.8693, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.6694868681896416e-07, |
|
"loss": 2.6341, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.644703022287111e-07, |
|
"loss": 2.8153, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.619965212497124e-07, |
|
"loss": 2.5606, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.5952736514750423e-07, |
|
"loss": 2.9817, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.5706285514786514e-07, |
|
"loss": 2.9009, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.5460301243663445e-07, |
|
"loss": 3.0052, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.521478581595288e-07, |
|
"loss": 3.0823, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.4969741342196253e-07, |
|
"loss": 3.1085, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.472516992888642e-07, |
|
"loss": 3.1711, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.4481073678449666e-07, |
|
"loss": 2.5299, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.423745468922764e-07, |
|
"loss": 2.8673, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.3994315055459164e-07, |
|
"loss": 2.9013, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.375165686726243e-07, |
|
"loss": 2.8833, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.3509482210616963e-07, |
|
"loss": 2.8531, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.3267793167345646e-07, |
|
"loss": 3.0665, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.302659181509677e-07, |
|
"loss": 3.0484, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.2785880227326375e-07, |
|
"loss": 2.7242, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.254566047328024e-07, |
|
"loss": 3.2122, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.230593461797619e-07, |
|
"loss": 2.8401, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.206670472218633e-07, |
|
"loss": 2.9026, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.18279728424192e-07, |
|
"loss": 2.8899, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.158974103090235e-07, |
|
"loss": 3.1934, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.135201133556446e-07, |
|
"loss": 3.2364, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.111478580001796e-07, |
|
"loss": 2.9236, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.087806646354114e-07, |
|
"loss": 2.705, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.0641855361060975e-07, |
|
"loss": 3.035, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.0406154523135426e-07, |
|
"loss": 2.8299, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.017096597593602e-07, |
|
"loss": 2.6854, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.993629174123051e-07, |
|
"loss": 2.9821, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.970213383636527e-07, |
|
"loss": 3.0786, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.946849427424828e-07, |
|
"loss": 2.7093, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.9235375063331556e-07, |
|
"loss": 3.0842, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.9002778207594053e-07, |
|
"loss": 2.6559, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.8770705706524286e-07, |
|
"loss": 2.8457, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.8539159555103273e-07, |
|
"loss": 2.7101, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.830814174378735e-07, |
|
"loss": 3.0098, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.807765425849101e-07, |
|
"loss": 3.128, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.784769908056992e-07, |
|
"loss": 2.8657, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.761827818680372e-07, |
|
"loss": 3.124, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.738939354937928e-07, |
|
"loss": 3.0589, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.7161047135873533e-07, |
|
"loss": 2.7311, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.693324090923672e-07, |
|
"loss": 2.7837, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.6705976827775297e-07, |
|
"loss": 2.6855, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.6479256845135365e-07, |
|
"loss": 2.8892, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.6253082910285706e-07, |
|
"loss": 2.6166, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.602745696750107e-07, |
|
"loss": 2.8943, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.58023809563455e-07, |
|
"loss": 2.9001, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.5577856811655535e-07, |
|
"loss": 2.8965, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.5353886463523753e-07, |
|
"loss": 2.8494, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.513047183728207e-07, |
|
"loss": 3.0546, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.4907614853485234e-07, |
|
"loss": 2.7969, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.468531742789419e-07, |
|
"loss": 2.5205, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.4463581471459824e-07, |
|
"loss": 3.0961, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.4242408890306383e-07, |
|
"loss": 2.8262, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.402180158571516e-07, |
|
"loss": 2.9059, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.38017614541081e-07, |
|
"loss": 3.1977, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.3582290387031465e-07, |
|
"loss": 3.2206, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.336339027113971e-07, |
|
"loss": 2.9875, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.314506298817917e-07, |
|
"loss": 2.8175, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.2927310414971897e-07, |
|
"loss": 2.889, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.271013442339944e-07, |
|
"loss": 2.7773, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.249353688038695e-07, |
|
"loss": 3.0413, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.2277519647886973e-07, |
|
"loss": 3.1636, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.2062084582863504e-07, |
|
"loss": 2.6525, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.184723353727602e-07, |
|
"loss": 3.2312, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.163296835806347e-07, |
|
"loss": 2.7514, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.1419290887128604e-07, |
|
"loss": 2.9884, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.1206202961321914e-07, |
|
"loss": 2.9558, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.0993706412426045e-07, |
|
"loss": 2.8835, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.078180306713981e-07, |
|
"loss": 2.8117, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.057049474706274e-07, |
|
"loss": 2.8151, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.035978326867925e-07, |
|
"loss": 3.0091, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.014967044334313e-07, |
|
"loss": 3.3044, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.9940158077261926e-07, |
|
"loss": 2.96, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.973124797148131e-07, |
|
"loss": 2.9509, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.952294192186981e-07, |
|
"loss": 2.8689, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.931524171910327e-07, |
|
"loss": 3.3529, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.9108149148649405e-07, |
|
"loss": 3.04, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.890166599075247e-07, |
|
"loss": 2.5053, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.8695794020418043e-07, |
|
"loss": 2.9255, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.849053500739769e-07, |
|
"loss": 3.0302, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.8285890716173764e-07, |
|
"loss": 2.7595, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.8081862905944276e-07, |
|
"loss": 2.98, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.7878453330607656e-07, |
|
"loss": 2.7559, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.767566373874781e-07, |
|
"loss": 2.8781, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.747349587361908e-07, |
|
"loss": 2.8463, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.7271951473131163e-07, |
|
"loss": 3.1037, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.7071032269834195e-07, |
|
"loss": 2.9907, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.687073999090395e-07, |
|
"loss": 2.7051, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.6671076358126874e-07, |
|
"loss": 3.0174, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.64720430878854e-07, |
|
"loss": 2.8523, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.627364189114312e-07, |
|
"loss": 3.0586, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.607587447342998e-07, |
|
"loss": 2.822, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.587874253482788e-07, |
|
"loss": 2.598, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.5682247769955814e-07, |
|
"loss": 2.8567, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.548639186795546e-07, |
|
"loss": 2.8933, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.529117651247649e-07, |
|
"loss": 2.8712, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.5096603381662286e-07, |
|
"loss": 3.0959, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.490267414813544e-07, |
|
"loss": 3.0927, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.470939047898331e-07, |
|
"loss": 2.6929, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.4516754035743794e-07, |
|
"loss": 3.072, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.4324766474390934e-07, |
|
"loss": 2.9567, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.4133429445320796e-07, |
|
"loss": 2.8914, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.3942744593337214e-07, |
|
"loss": 2.7085, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.3752713557637704e-07, |
|
"loss": 2.7944, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.3563337971799224e-07, |
|
"loss": 2.7055, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.3374619463764378e-07, |
|
"loss": 2.9498, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.3186559655827188e-07, |
|
"loss": 3.3654, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.2999160164619314e-07, |
|
"loss": 3.0295, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.281242260109607e-07, |
|
"loss": 2.9428, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.2626348570522548e-07, |
|
"loss": 2.9547, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.24409396724599e-07, |
|
"loss": 2.7902, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.2256197500751582e-07, |
|
"loss": 3.1329, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.207212364350961e-07, |
|
"loss": 2.8246, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.1888719683100853e-07, |
|
"loss": 2.9648, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.1705987196133601e-07, |
|
"loss": 3.1123, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.152392775344384e-07, |
|
"loss": 3.0232, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.1342542920081853e-07, |
|
"loss": 2.8359, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.116183425529875e-07, |
|
"loss": 2.8829, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.0981803312532953e-07, |
|
"loss": 2.6053, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.0802451639397046e-07, |
|
"loss": 2.4774, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.062378077766429e-07, |
|
"loss": 2.9414, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.0445792263255502e-07, |
|
"loss": 2.9821, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.0268487626225706e-07, |
|
"loss": 3.3888, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.009186839075111e-07, |
|
"loss": 2.516, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.991593607511597e-07, |
|
"loss": 2.9193, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.9740692191699516e-07, |
|
"loss": 3.0875, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.9566138246962959e-07, |
|
"loss": 3.0756, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.9392275741436502e-07, |
|
"loss": 3.0378, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.9219106169706533e-07, |
|
"loss": 2.5343, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.9046631020402704e-07, |
|
"loss": 2.7928, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.8874851776185197e-07, |
|
"loss": 2.9793, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.8703769913731826e-07, |
|
"loss": 2.6558, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.8533386903725578e-07, |
|
"loss": 3.0308, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.836370421084177e-07, |
|
"loss": 3.4049, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.819472329373557e-07, |
|
"loss": 2.8375, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.802644560502944e-07, |
|
"loss": 3.0175, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.7858872591300535e-07, |
|
"loss": 2.4958, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.7692005693068456e-07, |
|
"loss": 2.8912, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.7525846344782757e-07, |
|
"loss": 3.0004, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.736039597481064e-07, |
|
"loss": 2.603, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.7195656005424598e-07, |
|
"loss": 3.417, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.703162785279033e-07, |
|
"loss": 3.0827, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.6868312926954486e-07, |
|
"loss": 2.6225, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.6705712631832535e-07, |
|
"loss": 2.8418, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.6543828365196742e-07, |
|
"loss": 3.3666, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.6382661518664065e-07, |
|
"loss": 3.0181, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.622221347768432e-07, |
|
"loss": 2.8163, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.6062485621528177e-07, |
|
"loss": 2.9662, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.590347932327537e-07, |
|
"loss": 2.5777, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.574519594980276e-07, |
|
"loss": 3.0153, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.5587636861772768e-07, |
|
"loss": 2.7245, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.5430803413621563e-07, |
|
"loss": 2.8319, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.5274696953547428e-07, |
|
"loss": 3.284, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.511931882349925e-07, |
|
"loss": 2.7292, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.4964670359164812e-07, |
|
"loss": 2.9275, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.481075288995951e-07, |
|
"loss": 3.1254, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.4657567739014808e-07, |
|
"loss": 2.9749, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.4505116223166925e-07, |
|
"loss": 2.8288, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.4353399652945386e-07, |
|
"loss": 2.8366, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.4202419332561944e-07, |
|
"loss": 2.8199, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.4052176559899254e-07, |
|
"loss": 2.8539, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.3902672626499723e-07, |
|
"loss": 3.0421, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.3753908817554461e-07, |
|
"loss": 3.4148, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.360588641189211e-07, |
|
"loss": 2.8371, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.345860668196801e-07, |
|
"loss": 3.0241, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.3312070893853177e-07, |
|
"loss": 2.8555, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.3166280307223442e-07, |
|
"loss": 2.8055, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.3021236175348516e-07, |
|
"loss": 3.0729, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.2876939745081417e-07, |
|
"loss": 3.0171, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.2733392256847597e-07, |
|
"loss": 2.8916, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.2590594944634337e-07, |
|
"loss": 2.6823, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.2448549035980138e-07, |
|
"loss": 3.4575, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.2307255751964086e-07, |
|
"loss": 2.9535, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.2166716307195525e-07, |
|
"loss": 3.0804, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.2026931909803472e-07, |
|
"loss": 2.9216, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.1887903761426288e-07, |
|
"loss": 2.6407, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.1749633057201313e-07, |
|
"loss": 2.966, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.1612120985754636e-07, |
|
"loss": 2.8072, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.147536872919087e-07, |
|
"loss": 2.6742, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.1339377463082967e-07, |
|
"loss": 3.0176, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.120414835646214e-07, |
|
"loss": 2.7162, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.1069682571807737e-07, |
|
"loss": 2.6895, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.0935981265037375e-07, |
|
"loss": 2.947, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.0803045585496906e-07, |
|
"loss": 2.7927, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.0670876675950613e-07, |
|
"loss": 2.9494, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.053947567257124e-07, |
|
"loss": 2.7982, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.0408843704930426e-07, |
|
"loss": 2.8976, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.0278981895988848e-07, |
|
"loss": 2.971, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.0149891362086627e-07, |
|
"loss": 2.7668, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.002157321293372e-07, |
|
"loss": 2.8169, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.894028551600352e-08, |
|
"loss": 3.0686, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.7672584745076e-08, |
|
"loss": 2.9252, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.641264071417899e-08, |
|
"loss": 3.0884, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.516046425425739e-08, |
|
"loss": 2.6837, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.391606612948256e-08, |
|
"loss": 2.6525, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.267945703716096e-08, |
|
"loss": 3.1154, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 9.145064760764166e-08, |
|
"loss": 2.949, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 9.022964840422453e-08, |
|
"loss": 2.5715, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.901646992307054e-08, |
|
"loss": 3.3559, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.781112259310986e-08, |
|
"loss": 2.5052, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.661361677595391e-08, |
|
"loss": 3.1308, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.542396276580543e-08, |
|
"loss": 2.8567, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.424217078937035e-08, |
|
"loss": 2.6532, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.306825100576886e-08, |
|
"loss": 2.9247, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.190221350644966e-08, |
|
"loss": 3.0211, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.074406831510206e-08, |
|
"loss": 2.7514, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.959382538757043e-08, |
|
"loss": 3.3172, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.845149461176814e-08, |
|
"loss": 2.7682, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.731708580759267e-08, |
|
"loss": 3.3666, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.619060872684157e-08, |
|
"loss": 2.9903, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.507207305312835e-08, |
|
"loss": 2.9921, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.396148840179894e-08, |
|
"loss": 2.6736, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 7.285886431984978e-08, |
|
"loss": 3.33, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 7.1764210285845e-08, |
|
"loss": 3.1014, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.067753570983526e-08, |
|
"loss": 2.7043, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.959884993327725e-08, |
|
"loss": 3.1317, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.85281622289523e-08, |
|
"loss": 3.0594, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.746548180088785e-08, |
|
"loss": 2.6525, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.64108177842777e-08, |
|
"loss": 2.6303, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.536417924540371e-08, |
|
"loss": 2.9763, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.432557518155768e-08, |
|
"loss": 2.8125, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.329501452096397e-08, |
|
"loss": 3.151, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.227250612270297e-08, |
|
"loss": 2.8432, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.125805877663493e-08, |
|
"loss": 2.8693, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.025168120332447e-08, |
|
"loss": 2.6283, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.925338205396491e-08, |
|
"loss": 2.8447, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.8263169910304774e-08, |
|
"loss": 2.9181, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.7281053284574e-08, |
|
"loss": 2.8213, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.630704061940994e-08, |
|
"loss": 2.8951, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.53411402877858e-08, |
|
"loss": 2.9127, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.4383360592937575e-08, |
|
"loss": 2.876, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.343370976829364e-08, |
|
"loss": 2.5429, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.249219597740373e-08, |
|
"loss": 3.3362, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.15588273138684e-08, |
|
"loss": 2.6231, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.063361180126968e-08, |
|
"loss": 3.0893, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.971655739310199e-08, |
|
"loss": 2.8763, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.88076719727043e-08, |
|
"loss": 3.0002, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.790696335319144e-08, |
|
"loss": 2.9231, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.701443927738802e-08, |
|
"loss": 3.2316, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.6130107417760575e-08, |
|
"loss": 3.0944, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.5253975376352896e-08, |
|
"loss": 3.0625, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.438605068471979e-08, |
|
"loss": 3.0791, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.352634080386286e-08, |
|
"loss": 2.6873, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.2674853124165964e-08, |
|
"loss": 2.9121, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.1831594965331954e-08, |
|
"loss": 2.7681, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.0996573576319695e-08, |
|
"loss": 2.6219, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.016979613528182e-08, |
|
"loss": 3.0929, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.935126974950298e-08, |
|
"loss": 2.8858, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.854100145533823e-08, |
|
"loss": 2.8813, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.773899821815363e-08, |
|
"loss": 3.0035, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.694526693226541e-08, |
|
"loss": 2.7125, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.615981442088112e-08, |
|
"loss": 2.5067, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.538264743604091e-08, |
|
"loss": 3.2893, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.461377265855947e-08, |
|
"loss": 2.7156, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.38531966979686e-08, |
|
"loss": 3.154, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.310092609246051e-08, |
|
"loss": 2.4774, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.235696730883142e-08, |
|
"loss": 2.7025, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.16213267424259e-08, |
|
"loss": 2.996, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.089401071708242e-08, |
|
"loss": 3.0237, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.017502548507822e-08, |
|
"loss": 2.9128, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.9464377227076288e-08, |
|
"loss": 3.0615, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.8762072052071707e-08, |
|
"loss": 2.9308, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.806811599733938e-08, |
|
"loss": 2.8216, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.738251502838207e-08, |
|
"loss": 3.1345, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6705275038879095e-08, |
|
"loss": 2.8343, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.6036401850636048e-08, |
|
"loss": 3.019, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.5375901213533836e-08, |
|
"loss": 2.7687, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.4723778805480157e-08, |
|
"loss": 2.7443, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.4080040232360322e-08, |
|
"loss": 2.9871, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.3444691027989185e-08, |
|
"loss": 2.9659, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.281773665406328e-08, |
|
"loss": 3.0672, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.21991825001141e-08, |
|
"loss": 3.3005, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.1589033883462004e-08, |
|
"loss": 2.7875, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.0987296049169935e-08, |
|
"loss": 2.6553, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.0393974169998774e-08, |
|
"loss": 3.1247, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.9809073346362836e-08, |
|
"loss": 3.0646, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.9232598606285565e-08, |
|
"loss": 2.5444, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.8664554905357233e-08, |
|
"loss": 2.9547, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.8104947126691418e-08, |
|
"loss": 2.9829, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.7553780080883374e-08, |
|
"loss": 2.8674, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.7011058505968733e-08, |
|
"loss": 2.8069, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.6476787067382868e-08, |
|
"loss": 2.9211, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.595097035792059e-08, |
|
"loss": 3.0826, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.5433612897696734e-08, |
|
"loss": 2.875, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.49247191341072e-08, |
|
"loss": 2.4676, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.442429344179097e-08, |
|
"loss": 3.0209, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.3932340122592256e-08, |
|
"loss": 2.6976, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.3448863405523647e-08, |
|
"loss": 2.9361, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.2973867446729792e-08, |
|
"loss": 3.0679, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.2507356329451435e-08, |
|
"loss": 3.0505, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.2049334063990669e-08, |
|
"loss": 2.9075, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.1599804587676288e-08, |
|
"loss": 2.7679, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.115877176482971e-08, |
|
"loss": 2.8252, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.0726239386732228e-08, |
|
"loss": 2.8929, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.0302211171592023e-08, |
|
"loss": 3.2954, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.886690764512317e-09, |
|
"loss": 2.6366, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.479681737460165e-09, |
|
"loss": 3.0659, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.081187589235706e-09, |
|
"loss": 2.9652, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 8.691211745441962e-09, |
|
"loss": 3.0253, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 8.30975755845531e-09, |
|
"loss": 3.0266, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.93682830739706e-09, |
|
"loss": 2.5579, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.572427198104913e-09, |
|
"loss": 2.8221, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 7.2165573631054465e-09, |
|
"loss": 3.2047, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.869221861587338e-09, |
|
"loss": 2.5362, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.5304236793750635e-09, |
|
"loss": 2.6583, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.200165728903028e-09, |
|
"loss": 3.0776, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.878450849190475e-09, |
|
"loss": 2.7981, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 5.565281805817612e-09, |
|
"loss": 3.1398, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 5.260661290901302e-09, |
|
"loss": 3.0285, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.9645919230719705e-09, |
|
"loss": 2.759, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.677076247451617e-09, |
|
"loss": 2.658, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.398116735630952e-09, |
|
"loss": 2.9268, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.127715785649299e-09, |
|
"loss": 2.9271, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.865875721973055e-09, |
|
"loss": 2.883, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.6125987954762627e-09, |
|
"loss": 2.7262, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.36788718342107e-09, |
|
"loss": 2.605, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.131742989438857e-09, |
|
"loss": 2.6757, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.9041682435123614e-09, |
|
"loss": 2.8687, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.685164901958359e-09, |
|
"loss": 3.5604, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.4747348474105645e-09, |
|
"loss": 2.8295, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.272879888803425e-09, |
|
"loss": 2.9471, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.079601761356797e-09, |
|
"loss": 2.7717, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.894902126560849e-09, |
|
"loss": 2.8575, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.7187825721620696e-09, |
|
"loss": 3.1, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.5512446121490607e-09, |
|
"loss": 3.0135, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.3922896867403221e-09, |
|
"loss": 2.5842, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.2419191623709302e-09, |
|
"loss": 3.0003, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.100134331681657e-09, |
|
"loss": 2.673, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 9.669364135072023e-10, |
|
"loss": 2.9576, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 8.423265528660905e-10, |
|
"loss": 2.7433, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.26305820950901e-10, |
|
"loss": 2.8567, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 6.188752151186083e-10, |
|
"loss": 2.8922, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 5.200356588824784e-10, |
|
"loss": 2.8813, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.2978800190385245e-10, |
|
"loss": 2.6901, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.481330199849308e-10, |
|
"loss": 2.8516, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.750714150621114e-10, |
|
"loss": 2.5775, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.1060381519999448e-10, |
|
"loss": 3.1494, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.5473077458594276e-10, |
|
"loss": 2.6535, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.0745277352519622e-10, |
|
"loss": 2.9943, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.87702184369865e-11, |
|
"loss": 3.0382, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.868344185087302e-11, |
|
"loss": 2.9406, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 2210, |
|
"total_flos": 4.82041270370304e+16, |
|
"train_loss": 3.4191648677463444, |
|
"train_runtime": 16036.3326, |
|
"train_samples_per_second": 8.823, |
|
"train_steps_per_second": 0.138 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 2210, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 8000, |
|
"total_flos": 4.82041270370304e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|