|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987878787878788, |
|
"eval_steps": 123, |
|
"global_step": 618, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016161616161616162, |
|
"grad_norm": 32.0, |
|
"learning_rate": 1.6129032258064518e-07, |
|
"loss": 3.549, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0032323232323232323, |
|
"grad_norm": 31.5, |
|
"learning_rate": 3.2258064516129035e-07, |
|
"loss": 3.6934, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0048484848484848485, |
|
"grad_norm": 31.875, |
|
"learning_rate": 4.838709677419355e-07, |
|
"loss": 3.4569, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006464646464646465, |
|
"grad_norm": 31.0, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 3.6778, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00808080808080808, |
|
"grad_norm": 30.625, |
|
"learning_rate": 8.064516129032258e-07, |
|
"loss": 3.4522, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009696969696969697, |
|
"grad_norm": 30.75, |
|
"learning_rate": 9.67741935483871e-07, |
|
"loss": 3.5121, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011313131313131313, |
|
"grad_norm": 31.0, |
|
"learning_rate": 1.1290322580645162e-06, |
|
"loss": 3.4922, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01292929292929293, |
|
"grad_norm": 30.5, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 3.4436, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014545454545454545, |
|
"grad_norm": 30.625, |
|
"learning_rate": 1.4516129032258066e-06, |
|
"loss": 3.3278, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01616161616161616, |
|
"grad_norm": 33.0, |
|
"learning_rate": 1.6129032258064516e-06, |
|
"loss": 3.5215, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 29.25, |
|
"learning_rate": 1.774193548387097e-06, |
|
"loss": 3.2032, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019393939393939394, |
|
"grad_norm": 27.875, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 3.3045, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02101010101010101, |
|
"grad_norm": 27.625, |
|
"learning_rate": 2.096774193548387e-06, |
|
"loss": 3.137, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.022626262626262626, |
|
"grad_norm": 25.625, |
|
"learning_rate": 2.2580645161290324e-06, |
|
"loss": 3.4479, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.024242424242424242, |
|
"grad_norm": 24.25, |
|
"learning_rate": 2.4193548387096776e-06, |
|
"loss": 3.1882, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02585858585858586, |
|
"grad_norm": 22.5, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 3.2887, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.027474747474747475, |
|
"grad_norm": 20.75, |
|
"learning_rate": 2.7419354838709676e-06, |
|
"loss": 3.1212, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02909090909090909, |
|
"grad_norm": 19.625, |
|
"learning_rate": 2.903225806451613e-06, |
|
"loss": 3.0184, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.030707070707070707, |
|
"grad_norm": 18.375, |
|
"learning_rate": 3.0645161290322584e-06, |
|
"loss": 3.2399, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03232323232323232, |
|
"grad_norm": 18.375, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 2.9449, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03393939393939394, |
|
"grad_norm": 17.125, |
|
"learning_rate": 3.3870967741935484e-06, |
|
"loss": 2.806, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 16.25, |
|
"learning_rate": 3.548387096774194e-06, |
|
"loss": 2.6531, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.037171717171717175, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 3.7096774193548392e-06, |
|
"loss": 2.7287, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03878787878787879, |
|
"grad_norm": 14.625, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 2.4542, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 13.375, |
|
"learning_rate": 4.032258064516129e-06, |
|
"loss": 2.2326, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04202020202020202, |
|
"grad_norm": 12.5625, |
|
"learning_rate": 4.193548387096774e-06, |
|
"loss": 2.5558, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04363636363636364, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 4.35483870967742e-06, |
|
"loss": 2.2653, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04525252525252525, |
|
"grad_norm": 10.625, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 2.2043, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04686868686868687, |
|
"grad_norm": 10.125, |
|
"learning_rate": 4.67741935483871e-06, |
|
"loss": 2.1746, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.048484848484848485, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 2.1539, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.050101010101010104, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 5e-06, |
|
"loss": 2.0672, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05171717171717172, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 1.7951, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 5.322580645161291e-06, |
|
"loss": 1.862, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05494949494949495, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 5.483870967741935e-06, |
|
"loss": 1.6652, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05656565656565657, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 5.645161290322582e-06, |
|
"loss": 1.8771, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05818181818181818, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 1.5247, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0597979797979798, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 5.967741935483872e-06, |
|
"loss": 1.5542, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.061414141414141414, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 6.129032258064517e-06, |
|
"loss": 1.7111, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06303030303030303, |
|
"grad_norm": 5.25, |
|
"learning_rate": 6.290322580645162e-06, |
|
"loss": 1.6834, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06464646464646465, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 1.4801, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06626262626262626, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 6.612903225806452e-06, |
|
"loss": 1.5409, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06787878787878789, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 6.774193548387097e-06, |
|
"loss": 1.4271, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0694949494949495, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 6.935483870967743e-06, |
|
"loss": 1.3182, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 7.096774193548388e-06, |
|
"loss": 1.5283, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 7.258064516129033e-06, |
|
"loss": 1.6482, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07434343434343435, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 7.4193548387096784e-06, |
|
"loss": 1.3221, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07595959595959596, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 7.580645161290323e-06, |
|
"loss": 1.5536, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07757575757575758, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 1.6106, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07919191919191919, |
|
"grad_norm": 4.375, |
|
"learning_rate": 7.903225806451613e-06, |
|
"loss": 1.5371, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 1.4707, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08242424242424243, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 8.225806451612904e-06, |
|
"loss": 1.3272, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08404040404040404, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 1.3188, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08565656565656565, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 8.548387096774194e-06, |
|
"loss": 1.3743, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08727272727272728, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 8.70967741935484e-06, |
|
"loss": 1.2833, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 4.625, |
|
"learning_rate": 8.870967741935484e-06, |
|
"loss": 1.4198, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0905050505050505, |
|
"grad_norm": 4.75, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 1.0967, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09212121212121212, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 9.193548387096775e-06, |
|
"loss": 1.6383, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09373737373737374, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 9.35483870967742e-06, |
|
"loss": 1.5197, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09535353535353536, |
|
"grad_norm": 4.25, |
|
"learning_rate": 9.516129032258065e-06, |
|
"loss": 0.993, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09696969696969697, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 1.2466, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09858585858585858, |
|
"grad_norm": 4.125, |
|
"learning_rate": 9.838709677419356e-06, |
|
"loss": 1.4099, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10020202020202021, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1905, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10181818181818182, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 9.999920184173449e-06, |
|
"loss": 1.2693, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10343434343434343, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 9.999680739242022e-06, |
|
"loss": 1.3024, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.10505050505050505, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 9.999281672850317e-06, |
|
"loss": 1.2561, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 9.99872299773906e-06, |
|
"loss": 1.0563, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10828282828282829, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 9.998004731744696e-06, |
|
"loss": 1.0479, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1098989898989899, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 9.997126897798826e-06, |
|
"loss": 1.0836, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11151515151515151, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 9.996089523927461e-06, |
|
"loss": 1.0356, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11313131313131314, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 9.994892643250147e-06, |
|
"loss": 1.4207, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11474747474747475, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 9.993536293978892e-06, |
|
"loss": 0.9769, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11636363636363636, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.99202051941695e-06, |
|
"loss": 1.1503, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11797979797979798, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 9.99034536795744e-06, |
|
"loss": 1.135, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1195959595959596, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.9885108930818e-06, |
|
"loss": 1.0679, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 9.986517153358086e-06, |
|
"loss": 0.907, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12282828282828283, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 9.984364212439089e-06, |
|
"loss": 1.1378, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 9.982052139060312e-06, |
|
"loss": 0.9528, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12606060606060607, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.979581007037776e-06, |
|
"loss": 1.1584, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12767676767676767, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 9.976950895265657e-06, |
|
"loss": 1.1824, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1292929292929293, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 9.974161887713775e-06, |
|
"loss": 1.2958, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13090909090909092, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.971214073424906e-06, |
|
"loss": 1.6086, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13252525252525252, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 9.968107546511942e-06, |
|
"loss": 1.2882, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.13414141414141414, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 9.964842406154892e-06, |
|
"loss": 1.0021, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13575757575757577, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 9.961418756597703e-06, |
|
"loss": 0.8922, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.13737373737373737, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.957836707144943e-06, |
|
"loss": 1.0149, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.138989898989899, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 9.95409637215831e-06, |
|
"loss": 1.1325, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1406060606060606, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 9.950197871052974e-06, |
|
"loss": 0.7177, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 3.125, |
|
"learning_rate": 9.94614132829377e-06, |
|
"loss": 1.1854, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.14383838383838385, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.941926873391223e-06, |
|
"loss": 0.8619, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.937554640897414e-06, |
|
"loss": 1.186, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14707070707070707, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 9.933024770401682e-06, |
|
"loss": 1.0314, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1486868686868687, |
|
"grad_norm": 3.25, |
|
"learning_rate": 9.928337406526172e-06, |
|
"loss": 1.0889, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1503030303030303, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.923492698921214e-06, |
|
"loss": 0.769, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15191919191919193, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 9.918490802260538e-06, |
|
"loss": 1.27, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15353535353535352, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 9.913331876236358e-06, |
|
"loss": 1.2402, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15515151515151515, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.90801608555425e-06, |
|
"loss": 1.0972, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.15676767676767678, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.902543599927903e-06, |
|
"loss": 0.8044, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.15838383838383838, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.896914594073703e-06, |
|
"loss": 0.951, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 9.891129247705153e-06, |
|
"loss": 1.0259, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 9.885187745527132e-06, |
|
"loss": 0.9311, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16323232323232323, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 9.879090277230005e-06, |
|
"loss": 1.0281, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.16484848484848486, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 9.87283703748356e-06, |
|
"loss": 1.2046, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.16646464646464645, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.866428225930798e-06, |
|
"loss": 0.9979, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.16808080808080808, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 9.859864047181551e-06, |
|
"loss": 1.057, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1696969696969697, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 9.853144710805966e-06, |
|
"loss": 1.1412, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1713131313131313, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 9.846270431327793e-06, |
|
"loss": 1.0381, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.17292929292929293, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.839241428217553e-06, |
|
"loss": 0.8583, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.17454545454545456, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 9.832057925885526e-06, |
|
"loss": 1.044, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.17616161616161616, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 9.824720153674578e-06, |
|
"loss": 0.9641, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.817228345852853e-06, |
|
"loss": 1.0769, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17939393939393938, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 9.809582741606283e-06, |
|
"loss": 0.9801, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.181010101010101, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 9.801783585030959e-06, |
|
"loss": 0.968, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.18262626262626264, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 9.79383112512533e-06, |
|
"loss": 1.5455, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.18424242424242424, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.785725615782262e-06, |
|
"loss": 0.7543, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.18585858585858586, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.777467315780926e-06, |
|
"loss": 1.1363, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1874747474747475, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 9.769056488778538e-06, |
|
"loss": 0.9436, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1890909090909091, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.760493403301941e-06, |
|
"loss": 1.1983, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1907070707070707, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.751778332739033e-06, |
|
"loss": 0.7469, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1923232323232323, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.74291155533004e-06, |
|
"loss": 1.2048, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.19393939393939394, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 9.733893354158628e-06, |
|
"loss": 1.0123, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 9.724724017142869e-06, |
|
"loss": 0.9488, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.19717171717171716, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 9.715403837026046e-06, |
|
"loss": 1.0733, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1987878787878788, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.705933111367314e-06, |
|
"loss": 1.0328, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1987878787878788, |
|
"eval_loss": 0.9794487953186035, |
|
"eval_runtime": 34.0835, |
|
"eval_samples_per_second": 32.274, |
|
"eval_steps_per_second": 4.049, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.20040404040404042, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 9.69631214253219e-06, |
|
"loss": 1.2002, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 9.68654123768291e-06, |
|
"loss": 0.8144, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.20363636363636364, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 9.676620708768608e-06, |
|
"loss": 0.9787, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.20525252525252524, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 9.666550872515367e-06, |
|
"loss": 1.0589, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.20686868686868687, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 9.656332050416118e-06, |
|
"loss": 1.1565, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2084848484848485, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 9.645964568720345e-06, |
|
"loss": 0.9859, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2101010101010101, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 9.635448758423703e-06, |
|
"loss": 0.8587, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21171717171717172, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 9.624784955257423e-06, |
|
"loss": 1.0057, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.613973499677613e-06, |
|
"loss": 1.1439, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.21494949494949495, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 9.60301473685438e-06, |
|
"loss": 0.9306, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.21656565656565657, |
|
"grad_norm": 3.125, |
|
"learning_rate": 9.591909016660806e-06, |
|
"loss": 0.9439, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 9.580656693661787e-06, |
|
"loss": 0.9083, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2197979797979798, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 9.569258127102708e-06, |
|
"loss": 0.8975, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.22141414141414142, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 9.55771368089797e-06, |
|
"loss": 1.0099, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.22303030303030302, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.546023723619387e-06, |
|
"loss": 0.8804, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.22464646464646465, |
|
"grad_norm": 2.75, |
|
"learning_rate": 9.534188628484391e-06, |
|
"loss": 1.2507, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.22626262626262628, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.522208773344147e-06, |
|
"loss": 1.1579, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22787878787878788, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 9.510084540671471e-06, |
|
"loss": 1.0875, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2294949494949495, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 9.497816317548625e-06, |
|
"loss": 1.2898, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.485404495654953e-06, |
|
"loss": 0.8103, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.23272727272727273, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 9.472849471254386e-06, |
|
"loss": 0.7465, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.23434343434343435, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 9.460151645182784e-06, |
|
"loss": 1.2381, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.23595959595959595, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 9.447311422835141e-06, |
|
"loss": 0.9853, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.23757575757575758, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 9.43432921415264e-06, |
|
"loss": 1.1108, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2391919191919192, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 9.421205433609568e-06, |
|
"loss": 0.7897, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2408080808080808, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 9.407940500200082e-06, |
|
"loss": 0.9302, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.39453483742483e-06, |
|
"loss": 0.8399, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24404040404040403, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.380988873277436e-06, |
|
"loss": 1.0561, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.24565656565656566, |
|
"grad_norm": 3.125, |
|
"learning_rate": 9.367303040230828e-06, |
|
"loss": 1.0688, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.24727272727272728, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 9.35347777522344e-06, |
|
"loss": 1.1062, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 9.33951351964525e-06, |
|
"loss": 0.8516, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2505050505050505, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.3254107193237e-06, |
|
"loss": 0.9327, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.25212121212121213, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 9.311169824509454e-06, |
|
"loss": 0.711, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.25373737373737376, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.296791289862031e-06, |
|
"loss": 0.7404, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.25535353535353533, |
|
"grad_norm": 3.25, |
|
"learning_rate": 9.28227557443528e-06, |
|
"loss": 1.0565, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.25696969696969696, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 9.267623141662734e-06, |
|
"loss": 1.0831, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2585858585858586, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 9.252834459342801e-06, |
|
"loss": 1.2091, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2602020202020202, |
|
"grad_norm": 2.75, |
|
"learning_rate": 9.237909999623847e-06, |
|
"loss": 0.9261, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.26181818181818184, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.222850238989104e-06, |
|
"loss": 0.86, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2634343434343434, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.207655658241469e-06, |
|
"loss": 0.7762, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.26505050505050504, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 9.192326742488153e-06, |
|
"loss": 0.9594, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 9.176863981125185e-06, |
|
"loss": 0.756, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2682828282828283, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 9.161267867821802e-06, |
|
"loss": 1.1111, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2698989898989899, |
|
"grad_norm": 2.875, |
|
"learning_rate": 9.14553890050467e-06, |
|
"loss": 0.955, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.27151515151515154, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 9.129677581342e-06, |
|
"loss": 1.0545, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2731313131313131, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.113684416727511e-06, |
|
"loss": 1.0048, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.27474747474747474, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 9.097559917264268e-06, |
|
"loss": 1.0471, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27636363636363637, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 9.081304597748366e-06, |
|
"loss": 0.837, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.277979797979798, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 9.064918977152517e-06, |
|
"loss": 0.9935, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2795959595959596, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.048403578609454e-06, |
|
"loss": 1.0329, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2812121212121212, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 9.031758929395259e-06, |
|
"loss": 1.1304, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 9.014985560912499e-06, |
|
"loss": 0.7198, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 3.0, |
|
"learning_rate": 8.998084008673284e-06, |
|
"loss": 0.8479, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.28606060606060607, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 8.981054812282162e-06, |
|
"loss": 0.9019, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2876767676767677, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.963898515418885e-06, |
|
"loss": 0.8804, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.28929292929292927, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 8.946615665821059e-06, |
|
"loss": 0.9184, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 3.0, |
|
"learning_rate": 8.929206815266653e-06, |
|
"loss": 0.8425, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2925252525252525, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 8.911672519556386e-06, |
|
"loss": 0.9117, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.29414141414141415, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 8.89401333849598e-06, |
|
"loss": 0.835, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2957575757575758, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 8.87622983587829e-06, |
|
"loss": 1.2808, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2973737373737374, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 8.8583225794653e-06, |
|
"loss": 1.1745, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.298989898989899, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 8.840292140969995e-06, |
|
"loss": 0.7905, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3006060606060606, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 8.82213909603812e-06, |
|
"loss": 1.1929, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 8.803864024229786e-06, |
|
"loss": 1.0457, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.30383838383838385, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.78546750900098e-06, |
|
"loss": 0.8142, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3054545454545455, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 8.766950137684929e-06, |
|
"loss": 0.7804, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.30707070707070705, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 8.748312501473351e-06, |
|
"loss": 0.97, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3086868686868687, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.72955519539758e-06, |
|
"loss": 0.7875, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3103030303030303, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 8.710678818309576e-06, |
|
"loss": 0.8975, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.31191919191919193, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 8.691683972862792e-06, |
|
"loss": 0.9239, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.31353535353535356, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 8.672571265492944e-06, |
|
"loss": 1.4731, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3151515151515151, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 8.653341306398645e-06, |
|
"loss": 1.0463, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.31676767676767675, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 8.63399470952193e-06, |
|
"loss": 0.9826, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3183838383838384, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 8.614532092528645e-06, |
|
"loss": 0.9094, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.125, |
|
"learning_rate": 8.594954076788736e-06, |
|
"loss": 1.0665, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.32161616161616163, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 8.575261287356407e-06, |
|
"loss": 0.9407, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 8.555454352950161e-06, |
|
"loss": 0.8439, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.32484848484848483, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 1.0384, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.32646464646464646, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 8.515500582290914e-06, |
|
"loss": 0.8061, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3280808080808081, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 8.495355021615204e-06, |
|
"loss": 0.972, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3296969696969697, |
|
"grad_norm": 3.125, |
|
"learning_rate": 8.475097867079437e-06, |
|
"loss": 1.0756, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.33131313131313134, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.454729765420228e-06, |
|
"loss": 0.8197, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3329292929292929, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 8.434251366916323e-06, |
|
"loss": 0.7967, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.33454545454545453, |
|
"grad_norm": 3.125, |
|
"learning_rate": 8.413663325367845e-06, |
|
"loss": 0.8979, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.33616161616161616, |
|
"grad_norm": 2.875, |
|
"learning_rate": 8.392966298075413e-06, |
|
"loss": 0.7354, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 8.372160945819164e-06, |
|
"loss": 0.713, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3393939393939394, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 8.351247932837655e-06, |
|
"loss": 0.8644, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.341010101010101, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 8.330227926806652e-06, |
|
"loss": 0.7115, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3426262626262626, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 8.309101598817812e-06, |
|
"loss": 0.9497, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.34424242424242424, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 8.287869623357275e-06, |
|
"loss": 1.0047, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.34585858585858587, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 8.266532678284103e-06, |
|
"loss": 0.964, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3474747474747475, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 8.245091444808663e-06, |
|
"loss": 0.9629, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3490909090909091, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 8.223546607470863e-06, |
|
"loss": 0.8935, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3507070707070707, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 8.201898854118301e-06, |
|
"loss": 0.8338, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3523232323232323, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 8.18014887588431e-06, |
|
"loss": 0.9086, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.35393939393939394, |
|
"grad_norm": 3.375, |
|
"learning_rate": 8.158297367165885e-06, |
|
"loss": 1.0212, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 8.13634502560152e-06, |
|
"loss": 1.0524, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3571717171717172, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 8.114292552048925e-06, |
|
"loss": 0.7959, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.35878787878787877, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 8.092140650562665e-06, |
|
"loss": 0.9641, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3604040404040404, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 8.069890028371672e-06, |
|
"loss": 1.0189, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.362020202020202, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 8.047541395856661e-06, |
|
"loss": 0.9189, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 8.025095466527468e-06, |
|
"loss": 1.0918, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3652525252525253, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 8.002552957000254e-06, |
|
"loss": 0.9643, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.36686868686868684, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 7.979914586974628e-06, |
|
"loss": 1.1684, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.36848484848484847, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 7.957181079210676e-06, |
|
"loss": 0.7611, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3701010101010101, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 7.934353159505885e-06, |
|
"loss": 0.9484, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3717171717171717, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 7.911431556671967e-06, |
|
"loss": 0.6935, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 7.888417002511592e-06, |
|
"loss": 0.8097, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.374949494949495, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 7.865310231795026e-06, |
|
"loss": 0.6958, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.37656565656565655, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 7.842111982236669e-06, |
|
"loss": 0.9087, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3781818181818182, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 7.818822994471504e-06, |
|
"loss": 0.7489, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3797979797979798, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 7.79544401203146e-06, |
|
"loss": 0.9611, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3814141414141414, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 7.771975781321655e-06, |
|
"loss": 0.9513, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.38303030303030305, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 7.748419051596586e-06, |
|
"loss": 0.986, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3846464646464646, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 7.72477457493619e-06, |
|
"loss": 0.6716, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.38626262626262625, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 7.701043106221847e-06, |
|
"loss": 0.6937, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3878787878787879, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 7.677225403112277e-06, |
|
"loss": 0.8809, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3894949494949495, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 7.653322226019341e-06, |
|
"loss": 0.8851, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 7.629334338083774e-06, |
|
"loss": 0.9655, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3927272727272727, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 7.605262505150819e-06, |
|
"loss": 0.9703, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.39434343434343433, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 7.58110749574577e-06, |
|
"loss": 0.5432, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.39595959595959596, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 7.556870081049444e-06, |
|
"loss": 0.8646, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3975757575757576, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 7.532551034873558e-06, |
|
"loss": 0.8533, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3975757575757576, |
|
"eval_loss": 0.8802760243415833, |
|
"eval_runtime": 34.0458, |
|
"eval_samples_per_second": 32.309, |
|
"eval_steps_per_second": 4.053, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3991919191919192, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 7.50815113363602e-06, |
|
"loss": 0.8146, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.40080808080808084, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 7.483671156336142e-06, |
|
"loss": 0.9311, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4024242424242424, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 7.459111884529775e-06, |
|
"loss": 0.8824, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 7.43447410230435e-06, |
|
"loss": 0.8129, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.40565656565656566, |
|
"grad_norm": 2.875, |
|
"learning_rate": 7.409758596253849e-06, |
|
"loss": 0.7964, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4072727272727273, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 7.384966155453686e-06, |
|
"loss": 0.9389, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 7.360097571435527e-06, |
|
"loss": 0.8737, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4105050505050505, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 7.335153638162005e-06, |
|
"loss": 0.9417, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4121212121212121, |
|
"grad_norm": 3.0, |
|
"learning_rate": 7.310135152001381e-06, |
|
"loss": 1.1766, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.41373737373737374, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 7.285042911702116e-06, |
|
"loss": 1.043, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.41535353535353536, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 7.259877718367372e-06, |
|
"loss": 1.183, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.416969696969697, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 7.234640375429427e-06, |
|
"loss": 0.9197, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.41858585858585856, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 7.209331688624039e-06, |
|
"loss": 0.9518, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4202020202020202, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 7.183952465964711e-06, |
|
"loss": 0.6607, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4218181818181818, |
|
"grad_norm": 3.125, |
|
"learning_rate": 7.158503517716894e-06, |
|
"loss": 0.8139, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.42343434343434344, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 7.132985656372126e-06, |
|
"loss": 0.81, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.42505050505050507, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 7.1073996966220835e-06, |
|
"loss": 0.8823, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 3.0, |
|
"learning_rate": 7.0817464553325764e-06, |
|
"loss": 1.0918, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.42828282828282827, |
|
"grad_norm": 3.0, |
|
"learning_rate": 7.0560267515174685e-06, |
|
"loss": 0.8481, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4298989898989899, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 7.030241406312528e-06, |
|
"loss": 0.9778, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4315151515151515, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 7.004391242949209e-06, |
|
"loss": 1.1517, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.43313131313131314, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 6.978477086728375e-06, |
|
"loss": 0.8944, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.43474747474747477, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 6.952499764993945e-06, |
|
"loss": 0.8877, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 6.926460107106483e-06, |
|
"loss": 0.9571, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.43797979797979797, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 6.900358944416712e-06, |
|
"loss": 0.8107, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4395959595959596, |
|
"grad_norm": 3.0, |
|
"learning_rate": 6.874197110238986e-06, |
|
"loss": 0.9484, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4412121212121212, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 6.847975439824669e-06, |
|
"loss": 1.1559, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.44282828282828285, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 6.8216947703354815e-06, |
|
"loss": 0.9301, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 6.795355940816768e-06, |
|
"loss": 1.1554, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.44606060606060605, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 6.7689597921707065e-06, |
|
"loss": 1.1886, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4476767676767677, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 6.742507167129465e-06, |
|
"loss": 0.7815, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.4492929292929293, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 6.715998910228296e-06, |
|
"loss": 0.7686, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.4509090909090909, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 6.689435867778578e-06, |
|
"loss": 0.8208, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.45252525252525255, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 6.6628188878407806e-06, |
|
"loss": 0.9298, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4541414141414141, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 6.636148820197409e-06, |
|
"loss": 0.6451, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.45575757575757575, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 6.609426516325859e-06, |
|
"loss": 0.8862, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4573737373737374, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 6.58265282937124e-06, |
|
"loss": 0.7027, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.458989898989899, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 6.555828614119132e-06, |
|
"loss": 1.0375, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.46060606060606063, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 6.528954726968302e-06, |
|
"loss": 0.7963, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 6.502032025903356e-06, |
|
"loss": 0.9763, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.4638383838383838, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 6.475061370467346e-06, |
|
"loss": 0.7831, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.46545454545454545, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 6.4480436217343366e-06, |
|
"loss": 1.2064, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.4670707070707071, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 6.420979642281909e-06, |
|
"loss": 0.8201, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.4686868686868687, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 6.393870296163616e-06, |
|
"loss": 0.7735, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4703030303030303, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 6.366716448881407e-06, |
|
"loss": 0.7052, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.4719191919191919, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 6.339518967357985e-06, |
|
"loss": 0.8898, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.47353535353535353, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 6.312278719909138e-06, |
|
"loss": 0.9402, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.47515151515151516, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 6.284996576216014e-06, |
|
"loss": 1.2848, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.4767676767676768, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 6.257673407297352e-06, |
|
"loss": 0.9033, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4783838383838384, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 6.230310085481677e-06, |
|
"loss": 0.7711, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 6.2029074843794445e-06, |
|
"loss": 1.0262, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.4816161616161616, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 6.175466478855161e-06, |
|
"loss": 0.8698, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.48323232323232324, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 6.147987944999441e-06, |
|
"loss": 1.09, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 6.1204727601010396e-06, |
|
"loss": 0.7683, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4864646464646465, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 6.092921802618849e-06, |
|
"loss": 0.6135, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.48808080808080806, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 6.065335952153846e-06, |
|
"loss": 0.7247, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4896969696969697, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 6.037716089421011e-06, |
|
"loss": 0.6764, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4913131313131313, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 6.010063096221215e-06, |
|
"loss": 0.9848, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.49292929292929294, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 5.982377855413063e-06, |
|
"loss": 0.7826, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.49454545454545457, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.954661250884704e-06, |
|
"loss": 0.8002, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.49616161616161614, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 5.926914167525618e-06, |
|
"loss": 0.9716, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.899137491198364e-06, |
|
"loss": 0.6486, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.4993939393939394, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 5.871332108710292e-06, |
|
"loss": 0.7897, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.501010101010101, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 5.843498907785236e-06, |
|
"loss": 0.8302, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5026262626262626, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 5.815638777035175e-06, |
|
"loss": 1.2278, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5042424242424243, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 5.78775260593185e-06, |
|
"loss": 1.0661, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5058585858585859, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 5.759841284778379e-06, |
|
"loss": 1.0882, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5074747474747475, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 5.731905704680834e-06, |
|
"loss": 0.6883, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.703946757519777e-06, |
|
"loss": 0.7921, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5107070707070707, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.6759653359218e-06, |
|
"loss": 1.0397, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5123232323232323, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 5.647962333231021e-06, |
|
"loss": 0.6039, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5139393939393939, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 5.6199386434805615e-06, |
|
"loss": 0.7038, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 5.591895161364006e-06, |
|
"loss": 0.8654, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5171717171717172, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 5.563832782206835e-06, |
|
"loss": 1.0567, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5187878787878788, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 5.535752401937846e-06, |
|
"loss": 0.4897, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5204040404040404, |
|
"grad_norm": 4.375, |
|
"learning_rate": 5.507654917060541e-06, |
|
"loss": 1.007, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.522020202020202, |
|
"grad_norm": 3.0, |
|
"learning_rate": 5.4795412246245126e-06, |
|
"loss": 0.9425, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5236363636363637, |
|
"grad_norm": 3.75, |
|
"learning_rate": 5.451412222196801e-06, |
|
"loss": 1.1415, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 5.4232688078332384e-06, |
|
"loss": 0.9817, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5268686868686868, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 5.395111880049775e-06, |
|
"loss": 0.5226, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5284848484848484, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 5.366942337793798e-06, |
|
"loss": 1.1519, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5301010101010101, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 5.338761080415425e-06, |
|
"loss": 0.7671, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5317171717171717, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 5.310569007638795e-06, |
|
"loss": 1.0666, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 5.28236701953335e-06, |
|
"loss": 0.9614, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.534949494949495, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 5.254156016485082e-06, |
|
"loss": 0.725, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5365656565656566, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 5.225936899167803e-06, |
|
"loss": 0.8421, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5381818181818182, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 5.197710568514381e-06, |
|
"loss": 1.0131, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5397979797979798, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 5.169477925687981e-06, |
|
"loss": 0.8607, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5414141414141415, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 5.141239872053294e-06, |
|
"loss": 0.6512, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5430303030303031, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 5.112997309147753e-06, |
|
"loss": 1.2432, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5446464646464646, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 5.084751138652764e-06, |
|
"loss": 0.7812, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5462626262626262, |
|
"grad_norm": 3.125, |
|
"learning_rate": 5.0565022623649e-06, |
|
"loss": 0.7546, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5478787878787879, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 5.028251582167126e-06, |
|
"loss": 0.7784, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5494949494949495, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8359, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.971748417832876e-06, |
|
"loss": 0.7377, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5527272727272727, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 4.943497737635103e-06, |
|
"loss": 1.0, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5543434343434344, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 4.915248861347239e-06, |
|
"loss": 0.7804, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.555959595959596, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 4.887002690852249e-06, |
|
"loss": 0.5882, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5575757575757576, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 4.858760127946707e-06, |
|
"loss": 0.6654, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5591919191919192, |
|
"grad_norm": 3.25, |
|
"learning_rate": 4.830522074312019e-06, |
|
"loss": 1.0637, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.5608080808080808, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 4.80228943148562e-06, |
|
"loss": 0.9217, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5624242424242424, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 4.774063100832199e-06, |
|
"loss": 0.8512, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.564040404040404, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 4.745843983514919e-06, |
|
"loss": 0.6813, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 4.717632980466652e-06, |
|
"loss": 0.6373, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5672727272727273, |
|
"grad_norm": 3.125, |
|
"learning_rate": 4.6894309923612055e-06, |
|
"loss": 0.9283, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.661238919584578e-06, |
|
"loss": 0.762, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5705050505050505, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 4.633057662206205e-06, |
|
"loss": 1.0961, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5721212121212121, |
|
"grad_norm": 3.25, |
|
"learning_rate": 4.6048881199502265e-06, |
|
"loss": 1.1472, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5737373737373738, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 4.576731192166762e-06, |
|
"loss": 0.9696, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5753535353535354, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.548587777803198e-06, |
|
"loss": 0.7152, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.576969696969697, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.520458775375488e-06, |
|
"loss": 0.8455, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5785858585858585, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 4.49234508293946e-06, |
|
"loss": 0.7451, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5802020202020202, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 4.464247598062156e-06, |
|
"loss": 0.7156, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 4.436167217793167e-06, |
|
"loss": 0.6658, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5834343434343434, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 4.408104838635996e-06, |
|
"loss": 0.7804, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.585050505050505, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.38006135651944e-06, |
|
"loss": 0.9036, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 4.3520376667689815e-06, |
|
"loss": 0.7542, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.5882828282828283, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.3240346640782014e-06, |
|
"loss": 0.7773, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5898989898989899, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 4.2960532424802235e-06, |
|
"loss": 0.985, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5915151515151515, |
|
"grad_norm": 3.0, |
|
"learning_rate": 4.268094295319167e-06, |
|
"loss": 1.15, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5931313131313132, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 4.240158715221621e-06, |
|
"loss": 1.2425, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5947474747474748, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 4.212247394068151e-06, |
|
"loss": 0.8483, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.5963636363636363, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 4.184361222964827e-06, |
|
"loss": 0.5802, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5963636363636363, |
|
"eval_loss": 0.8491820096969604, |
|
"eval_runtime": 34.0102, |
|
"eval_samples_per_second": 32.343, |
|
"eval_steps_per_second": 4.058, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.597979797979798, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 4.1565010922147644e-06, |
|
"loss": 1.0333, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5995959595959596, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 4.1286678912897095e-06, |
|
"loss": 0.7689, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6012121212121212, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.100862508801639e-06, |
|
"loss": 0.7953, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6028282828282828, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 4.0730858324743845e-06, |
|
"loss": 0.8048, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 4.045338749115299e-06, |
|
"loss": 0.9929, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 4.017622144586938e-06, |
|
"loss": 0.986, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6076767676767677, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 3.989936903778785e-06, |
|
"loss": 0.8808, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6092929292929293, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 3.962283910578991e-06, |
|
"loss": 0.7161, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.610909090909091, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 3.934664047846157e-06, |
|
"loss": 0.8097, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6125252525252525, |
|
"grad_norm": 3.0, |
|
"learning_rate": 3.907078197381153e-06, |
|
"loss": 1.0983, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6141414141414141, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 3.879527239898962e-06, |
|
"loss": 0.9825, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6157575757575757, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 3.85201205500056e-06, |
|
"loss": 0.8145, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6173737373737374, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 3.8245335211448404e-06, |
|
"loss": 0.5102, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.618989898989899, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 3.797092515620557e-06, |
|
"loss": 0.7706, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6206060606060606, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 3.769689914518326e-06, |
|
"loss": 0.7184, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 3.7423265927026473e-06, |
|
"loss": 0.4871, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6238383838383839, |
|
"grad_norm": 3.125, |
|
"learning_rate": 3.715003423783986e-06, |
|
"loss": 0.8992, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6254545454545455, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 3.6877212800908625e-06, |
|
"loss": 0.9216, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6270707070707071, |
|
"grad_norm": 3.125, |
|
"learning_rate": 3.660481032642016e-06, |
|
"loss": 0.8574, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6286868686868687, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 3.633283551118595e-06, |
|
"loss": 0.8686, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.6303030303030303, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 3.6061297038363853e-06, |
|
"loss": 1.0315, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6319191919191919, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 3.579020357718092e-06, |
|
"loss": 0.9183, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.6335353535353535, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 3.5519563782656642e-06, |
|
"loss": 0.9727, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6351515151515151, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 3.524938629532656e-06, |
|
"loss": 0.6831, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.6367676767676768, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 3.497967974096647e-06, |
|
"loss": 0.7253, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.6383838383838384, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 3.4710452730316978e-06, |
|
"loss": 0.8514, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 3.4441713858808684e-06, |
|
"loss": 0.8365, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.6416161616161616, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 3.4173471706287607e-06, |
|
"loss": 0.7557, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6432323232323233, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 3.3905734836741415e-06, |
|
"loss": 0.9645, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.6448484848484849, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 3.3638511798025927e-06, |
|
"loss": 0.8765, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 3.3371811121592203e-06, |
|
"loss": 0.7781, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.648080808080808, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 3.3105641322214238e-06, |
|
"loss": 0.9172, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6496969696969697, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 3.2840010897717045e-06, |
|
"loss": 1.0091, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6513131313131313, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 3.257492832870537e-06, |
|
"loss": 1.0656, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6529292929292929, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 3.2310402078292956e-06, |
|
"loss": 1.0486, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 3.2046440591832323e-06, |
|
"loss": 0.6996, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6561616161616162, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 3.178305229664519e-06, |
|
"loss": 0.8488, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 3.152024560175332e-06, |
|
"loss": 0.983, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6593939393939394, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 3.125802889761016e-06, |
|
"loss": 0.6526, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.661010101010101, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 3.099641055583289e-06, |
|
"loss": 0.5882, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6626262626262627, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 3.073539892893519e-06, |
|
"loss": 0.8811, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6642424242424242, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 3.047500235006056e-06, |
|
"loss": 0.7688, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6658585858585858, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 3.021522913271627e-06, |
|
"loss": 0.7487, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6674747474747474, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 2.995608757050793e-06, |
|
"loss": 0.9598, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6690909090909091, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 2.969758593687475e-06, |
|
"loss": 0.7067, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6707070707070707, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 2.9439732484825323e-06, |
|
"loss": 0.7588, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.6723232323232323, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 2.9182535446674244e-06, |
|
"loss": 0.7737, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.673939393939394, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 2.8926003033779194e-06, |
|
"loss": 0.7131, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 2.8670143436278757e-06, |
|
"loss": 1.3731, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6771717171717172, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 2.8414964822831063e-06, |
|
"loss": 0.8109, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6787878787878788, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 2.8160475340352913e-06, |
|
"loss": 0.6116, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6804040404040405, |
|
"grad_norm": 3.125, |
|
"learning_rate": 2.790668311375962e-06, |
|
"loss": 0.9945, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.682020202020202, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 2.765359624570574e-06, |
|
"loss": 0.7128, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6836363636363636, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 2.7401222816326316e-06, |
|
"loss": 0.8836, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6852525252525252, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 2.714957088297886e-06, |
|
"loss": 0.5784, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 2.6898648479986187e-06, |
|
"loss": 0.7598, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.6884848484848485, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 2.664846361837997e-06, |
|
"loss": 0.9088, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.6901010101010101, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 2.639902428564475e-06, |
|
"loss": 0.8364, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6917171717171717, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 2.6150338445463146e-06, |
|
"loss": 0.726, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 2.5902414037461544e-06, |
|
"loss": 0.7127, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.694949494949495, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 2.565525897695651e-06, |
|
"loss": 1.1283, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6965656565656566, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 2.540888115470226e-06, |
|
"loss": 0.8096, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6981818181818182, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 2.51632884366386e-06, |
|
"loss": 0.8525, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.6997979797979798, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 2.4918488663639824e-06, |
|
"loss": 0.7879, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7014141414141414, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 2.4674489651264433e-06, |
|
"loss": 0.8376, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.703030303030303, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 2.4431299189505563e-06, |
|
"loss": 0.9727, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7046464646464646, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 2.418892504254231e-06, |
|
"loss": 0.76, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7062626262626263, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 2.394737494849184e-06, |
|
"loss": 0.6697, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7078787878787879, |
|
"grad_norm": 3.375, |
|
"learning_rate": 2.3706656619162278e-06, |
|
"loss": 0.9635, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7094949494949495, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.34667777398066e-06, |
|
"loss": 0.8525, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 2.322774596887726e-06, |
|
"loss": 0.6937, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7127272727272728, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 2.298956893778154e-06, |
|
"loss": 0.7107, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7143434343434344, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 2.275225425063813e-06, |
|
"loss": 0.8223, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7159595959595959, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 2.251580948403416e-06, |
|
"loss": 0.8025, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7175757575757575, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 2.2280242186783473e-06, |
|
"loss": 0.6423, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.7191919191919192, |
|
"grad_norm": 3.0, |
|
"learning_rate": 2.204555987968541e-06, |
|
"loss": 0.8039, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7208080808080808, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 2.1811770055284968e-06, |
|
"loss": 0.6769, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7224242424242424, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.157888017763333e-06, |
|
"loss": 1.0327, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.724040404040404, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 2.134689768204975e-06, |
|
"loss": 0.6075, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7256565656565657, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 2.1115829974884097e-06, |
|
"loss": 0.8592, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.0885684433280336e-06, |
|
"loss": 0.7873, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 2.065646840494115e-06, |
|
"loss": 0.8771, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.7305050505050505, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 2.042818920789326e-06, |
|
"loss": 0.6399, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7321212121212122, |
|
"grad_norm": 3.125, |
|
"learning_rate": 2.020085413025375e-06, |
|
"loss": 0.7613, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.7337373737373737, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.9974470429997482e-06, |
|
"loss": 1.0072, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7353535353535353, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 1.974904533472532e-06, |
|
"loss": 0.8303, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7369696969696969, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.9524586041433393e-06, |
|
"loss": 0.8394, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.7385858585858586, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.9301099716283293e-06, |
|
"loss": 1.0079, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.7402020202020202, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.907859349437336e-06, |
|
"loss": 1.0984, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.7418181818181818, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 1.8857074479510761e-06, |
|
"loss": 0.739, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.7434343434343434, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.8636549743984815e-06, |
|
"loss": 0.8348, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7450505050505051, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 1.8417026328341158e-06, |
|
"loss": 0.5835, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 1.8198511241156902e-06, |
|
"loss": 0.6675, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7482828282828283, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.7981011458816988e-06, |
|
"loss": 1.1584, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.74989898989899, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 1.776453392529139e-06, |
|
"loss": 0.6752, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.7515151515151515, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.7549085551913358e-06, |
|
"loss": 0.7936, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7531313131313131, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.7334673217158976e-06, |
|
"loss": 0.9359, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7547474747474747, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 1.7121303766427266e-06, |
|
"loss": 0.8628, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7563636363636363, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.6908984011821883e-06, |
|
"loss": 0.7501, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.757979797979798, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.669772073193352e-06, |
|
"loss": 0.9188, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.7595959595959596, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.6487520671623469e-06, |
|
"loss": 0.8584, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7612121212121212, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 1.6278390541808364e-06, |
|
"loss": 0.9493, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7628282828282829, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.6070337019245896e-06, |
|
"loss": 0.8221, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 1.5863366746321578e-06, |
|
"loss": 0.9834, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7660606060606061, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 1.5657486330836786e-06, |
|
"loss": 1.0277, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.5452702345797738e-06, |
|
"loss": 1.0368, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7692929292929293, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 1.5249021329205638e-06, |
|
"loss": 0.9004, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.7709090909090909, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.5046449783847965e-06, |
|
"loss": 0.6375, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7725252525252525, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 1.4844994177090871e-06, |
|
"loss": 0.632, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7741414141414141, |
|
"grad_norm": 3.125, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 1.017, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.7757575757575758, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.4445456470498392e-06, |
|
"loss": 0.7848, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7773737373737374, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 1.4247387126435957e-06, |
|
"loss": 0.6678, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.778989898989899, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.4050459232112652e-06, |
|
"loss": 1.006, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.7806060606060606, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.3854679074713557e-06, |
|
"loss": 0.8044, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.3660052904780707e-06, |
|
"loss": 0.876, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.7838383838383839, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 1.3466586936013548e-06, |
|
"loss": 1.0428, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7854545454545454, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 1.3274287345070564e-06, |
|
"loss": 0.8647, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.787070707070707, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 1.3083160271372092e-06, |
|
"loss": 0.6243, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.7886868686868687, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.2893211816904243e-06, |
|
"loss": 0.7497, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.7903030303030303, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 1.2704448046024192e-06, |
|
"loss": 0.6755, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7919191919191919, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.2516874985266508e-06, |
|
"loss": 0.8875, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7935353535353535, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.2330498623150722e-06, |
|
"loss": 0.8863, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.7951515151515152, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.2145324909990202e-06, |
|
"loss": 0.7408, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7951515151515152, |
|
"eval_loss": 0.8412577509880066, |
|
"eval_runtime": 34.0436, |
|
"eval_samples_per_second": 32.311, |
|
"eval_steps_per_second": 4.054, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7967676767676768, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 1.1961359757702151e-06, |
|
"loss": 0.7413, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7983838383838384, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.1778609039618804e-06, |
|
"loss": 0.9246, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.1597078590300054e-06, |
|
"loss": 1.1691, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8016161616161617, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.1416774205347015e-06, |
|
"loss": 1.0407, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8032323232323232, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.1237701641217097e-06, |
|
"loss": 0.7975, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8048484848484848, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.1059866615040205e-06, |
|
"loss": 0.8686, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8064646464646464, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.0883274804436155e-06, |
|
"loss": 0.9299, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 1.0707931847333487e-06, |
|
"loss": 0.891, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8096969696969697, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 1.053384334178944e-06, |
|
"loss": 0.8845, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8113131313131313, |
|
"grad_norm": 3.0, |
|
"learning_rate": 1.036101484581117e-06, |
|
"loss": 0.6887, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.812929292929293, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.0189451877178386e-06, |
|
"loss": 0.5841, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.8145454545454546, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.0019159913267156e-06, |
|
"loss": 0.743, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8161616161616162, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 9.850144390875022e-07, |
|
"loss": 1.0117, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 9.68241070604743e-07, |
|
"loss": 0.8481, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.8193939393939393, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 9.51596421390547e-07, |
|
"loss": 1.4214, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.821010101010101, |
|
"grad_norm": 3.125, |
|
"learning_rate": 9.350810228474855e-07, |
|
"loss": 0.9121, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.8226262626262626, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 9.186954022516343e-07, |
|
"loss": 0.9282, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.8242424242424242, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 9.024400827357344e-07, |
|
"loss": 1.0226, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8258585858585858, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 8.863155832724895e-07, |
|
"loss": 0.8357, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8274747474747475, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 8.703224186580012e-07, |
|
"loss": 0.8131, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.8290909090909091, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 8.544610994953317e-07, |
|
"loss": 0.5461, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.8307070707070707, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 8.387321321781977e-07, |
|
"loss": 0.8584, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8323232323232324, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 8.23136018874815e-07, |
|
"loss": 0.6722, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.833939393939394, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 8.07673257511849e-07, |
|
"loss": 0.7455, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 7.923443417585324e-07, |
|
"loss": 0.864, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.8371717171717171, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 7.771497610108981e-07, |
|
"loss": 0.742, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8387878787878787, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 7.620900003761561e-07, |
|
"loss": 1.2428, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.8404040404040404, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 7.471655406572003e-07, |
|
"loss": 1.0067, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.842020202020202, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 7.323768583372675e-07, |
|
"loss": 0.6513, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.8436363636363636, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 7.177244255647209e-07, |
|
"loss": 0.8851, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.8452525252525253, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 7.032087101379703e-07, |
|
"loss": 0.8868, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.8468686868686869, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 6.888301754905469e-07, |
|
"loss": 0.9079, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 6.745892806763015e-07, |
|
"loss": 0.4854, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8501010101010101, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 6.604864803547511e-07, |
|
"loss": 0.5723, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.8517171717171718, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 6.465222247765618e-07, |
|
"loss": 0.6301, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 6.326969597691724e-07, |
|
"loss": 0.9427, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8549494949494949, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 6.190111267225651e-07, |
|
"loss": 1.0654, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8565656565656565, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 6.054651625751717e-07, |
|
"loss": 0.7778, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8581818181818182, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 5.920594997999202e-07, |
|
"loss": 0.907, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.8597979797979798, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 5.787945663904332e-07, |
|
"loss": 0.7575, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.8614141414141414, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 5.65670785847362e-07, |
|
"loss": 0.82, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.863030303030303, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 5.526885771648599e-07, |
|
"loss": 0.7037, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8646464646464647, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 5.398483548172162e-07, |
|
"loss": 0.9207, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8662626262626263, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 5.271505287456153e-07, |
|
"loss": 0.6755, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8678787878787879, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 5.145955043450484e-07, |
|
"loss": 0.6199, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.8694949494949495, |
|
"grad_norm": 3.0, |
|
"learning_rate": 5.021836824513759e-07, |
|
"loss": 1.014, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 4.899154593285294e-07, |
|
"loss": 0.7607, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 4.777912266558532e-07, |
|
"loss": 0.8517, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8743434343434343, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 4.658113715156098e-07, |
|
"loss": 1.0105, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.8759595959595959, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 4.5397627638061604e-07, |
|
"loss": 0.7295, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.8775757575757576, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.422863191020299e-07, |
|
"loss": 1.0043, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.8791919191919192, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.307418728972934e-07, |
|
"loss": 0.7609, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.8808080808080808, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 4.193433063382135e-07, |
|
"loss": 0.9825, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8824242424242424, |
|
"grad_norm": 3.0, |
|
"learning_rate": 4.080909833391944e-07, |
|
"loss": 0.9199, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8840404040404041, |
|
"grad_norm": 2.75, |
|
"learning_rate": 3.9698526314562114e-07, |
|
"loss": 0.826, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.8856565656565657, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 3.8602650032238675e-07, |
|
"loss": 0.9195, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.8872727272727273, |
|
"grad_norm": 3.125, |
|
"learning_rate": 3.752150447425773e-07, |
|
"loss": 0.892, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 3.6455124157629805e-07, |
|
"loss": 0.7029, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8905050505050505, |
|
"grad_norm": 3.125, |
|
"learning_rate": 3.5403543127965514e-07, |
|
"loss": 0.7938, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.8921212121212121, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 3.436679495838835e-07, |
|
"loss": 1.0719, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.8937373737373737, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 3.334491274846324e-07, |
|
"loss": 0.9874, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8953535353535353, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 3.233792912313943e-07, |
|
"loss": 0.7999, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.896969696969697, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 3.134587623170909e-07, |
|
"loss": 0.8419, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8985858585858586, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 3.0368785746780925e-07, |
|
"loss": 0.7711, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9002020202020202, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.940668886326864e-07, |
|
"loss": 0.8609, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9018181818181819, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 2.8459616297395464e-07, |
|
"loss": 0.768, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.9034343434343435, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 2.7527598285713387e-07, |
|
"loss": 0.924, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.9050505050505051, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 2.6610664584137413e-07, |
|
"loss": 0.5691, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 3.0, |
|
"learning_rate": 2.570884446699612e-07, |
|
"loss": 0.8907, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.9082828282828282, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 2.482216672609677e-07, |
|
"loss": 1.0101, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.9098989898989899, |
|
"grad_norm": 3.125, |
|
"learning_rate": 2.3950659669806033e-07, |
|
"loss": 0.8517, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.9115151515151515, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 2.3094351122146307e-07, |
|
"loss": 0.7141, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.9131313131313131, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 2.2253268421907503e-07, |
|
"loss": 0.7385, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9147474747474748, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 2.142743842177386e-07, |
|
"loss": 1.092, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.9163636363636364, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 2.0616887487467096e-07, |
|
"loss": 1.0305, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.917979797979798, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.98216414969043e-07, |
|
"loss": 0.7856, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.9195959595959596, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 1.9041725839371805e-07, |
|
"loss": 0.8109, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.9212121212121213, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 1.8277165414714858e-07, |
|
"loss": 0.8354, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9228282828282828, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 1.752798463254235e-07, |
|
"loss": 0.7052, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.6794207411447548e-07, |
|
"loss": 0.9284, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.926060606060606, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 1.6075857178244613e-07, |
|
"loss": 0.9869, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.9276767676767677, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 1.5372956867220678e-07, |
|
"loss": 0.8913, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.9292929292929293, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 1.4685528919403446e-07, |
|
"loss": 0.7928, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9309090909090909, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.4013595281844872e-07, |
|
"loss": 0.8047, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.9325252525252525, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 1.335717740692033e-07, |
|
"loss": 0.6207, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.9341414141414142, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.2716296251644e-07, |
|
"loss": 0.8742, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9357575757575758, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 1.2090972276999513e-07, |
|
"loss": 1.1864, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.9373737373737374, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.1481225447286803e-07, |
|
"loss": 0.9662, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.938989898989899, |
|
"grad_norm": 3.125, |
|
"learning_rate": 1.0887075229484789e-07, |
|
"loss": 1.1476, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.9406060606060606, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.0308540592629756e-07, |
|
"loss": 0.974, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 9.745640007209844e-08, |
|
"loss": 0.7653, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.9438383838383838, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 9.198391444575072e-08, |
|
"loss": 0.9482, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 8.666812376364187e-08, |
|
"loss": 0.6927, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.9470707070707071, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 8.150919773946165e-08, |
|
"loss": 0.9748, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.9486868686868687, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 7.650730107878812e-08, |
|
"loss": 0.8464, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.9503030303030303, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 7.166259347382854e-08, |
|
"loss": 0.8212, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.9519191919191919, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 6.697522959831837e-08, |
|
"loss": 0.8733, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.9535353535353536, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 6.244535910258697e-08, |
|
"loss": 0.7409, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9551515151515152, |
|
"grad_norm": 3.25, |
|
"learning_rate": 5.8073126608778064e-08, |
|
"loss": 0.799, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.9567676767676768, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 5.3858671706230605e-08, |
|
"loss": 0.6747, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.9583838383838383, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 4.98021289470263e-08, |
|
"loss": 0.7479, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 4.590362784169022e-08, |
|
"loss": 0.8435, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.9616161616161616, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 4.2163292855056936e-08, |
|
"loss": 0.7787, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9632323232323232, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 3.858124340229863e-08, |
|
"loss": 0.6817, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.9648484848484848, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 3.515759384510986e-08, |
|
"loss": 0.8463, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.9664646464646465, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 3.1892453488058803e-08, |
|
"loss": 0.906, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.9680808080808081, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 2.87859265750956e-08, |
|
"loss": 0.8429, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 2.5838112286226123e-08, |
|
"loss": 0.8432, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9713131313131314, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 2.304910473434341e-08, |
|
"loss": 0.9289, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.972929292929293, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 2.0418992962224495e-08, |
|
"loss": 0.9302, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.9745454545454545, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 1.7947860939688255e-08, |
|
"loss": 0.7842, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.9761616161616161, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 1.563578756091144e-08, |
|
"loss": 0.9495, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 1.3482846641914572e-08, |
|
"loss": 0.9672, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9793939393939394, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.1489106918200487e-08, |
|
"loss": 0.7065, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.981010101010101, |
|
"grad_norm": 3.25, |
|
"learning_rate": 9.654632042562229e-09, |
|
"loss": 0.8556, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.9826262626262626, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 7.979480583052423e-09, |
|
"loss": 1.3685, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.9842424242424243, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 6.4637060211092395e-09, |
|
"loss": 0.6013, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9858585858585859, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 5.107356749853298e-09, |
|
"loss": 0.8808, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9874747474747475, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 3.910476072539471e-09, |
|
"loss": 0.7069, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.9890909090909091, |
|
"grad_norm": 2.75, |
|
"learning_rate": 2.8731022011757593e-09, |
|
"loss": 0.7037, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.9907070707070708, |
|
"grad_norm": 3.3125, |
|
"learning_rate": 1.9952682553042722e-09, |
|
"loss": 0.9838, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.9923232323232323, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 1.2770022609409628e-09, |
|
"loss": 0.838, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.9939393939393939, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 7.18327149683562e-10, |
|
"loss": 0.996, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9939393939393939, |
|
"eval_loss": 0.8402793407440186, |
|
"eval_runtime": 34.0764, |
|
"eval_samples_per_second": 32.28, |
|
"eval_steps_per_second": 4.05, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 2.875, |
|
"learning_rate": 3.1926075797827914e-10, |
|
"loss": 1.1182, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9971717171717172, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 7.9815826551366e-11, |
|
"loss": 0.8098, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.9987878787878788, |
|
"grad_norm": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.7642, |
|
"step": 618 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 618, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.318968240081961e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|