|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.499047619047619, |
|
"eval_steps": 500, |
|
"global_step": 1182, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012698412698412698, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 7e-08, |
|
"loss": 2.3377, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0025396825396825397, |
|
"grad_norm": 12.4375, |
|
"learning_rate": 1.4e-07, |
|
"loss": 2.4221, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0038095238095238095, |
|
"grad_norm": 10.375, |
|
"learning_rate": 2.1e-07, |
|
"loss": 2.1995, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005079365079365079, |
|
"grad_norm": 11.25, |
|
"learning_rate": 2.8e-07, |
|
"loss": 2.3242, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006349206349206349, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 2.3848, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007619047619047619, |
|
"grad_norm": 10.875, |
|
"learning_rate": 4.2e-07, |
|
"loss": 2.2233, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 11.125, |
|
"learning_rate": 4.900000000000001e-07, |
|
"loss": 2.3011, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010158730158730159, |
|
"grad_norm": 12.25, |
|
"learning_rate": 5.6e-07, |
|
"loss": 2.4318, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011428571428571429, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 6.3e-07, |
|
"loss": 2.2978, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012698412698412698, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 2.4126, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013968253968253968, |
|
"grad_norm": 10.75, |
|
"learning_rate": 7.7e-07, |
|
"loss": 2.2333, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015238095238095238, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 8.4e-07, |
|
"loss": 2.3186, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01650793650793651, |
|
"grad_norm": 11.0, |
|
"learning_rate": 9.1e-07, |
|
"loss": 2.3917, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 2.3092, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01904761904761905, |
|
"grad_norm": 10.25, |
|
"learning_rate": 1.05e-06, |
|
"loss": 2.1844, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.020317460317460317, |
|
"grad_norm": 9.625, |
|
"learning_rate": 1.12e-06, |
|
"loss": 2.2563, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02158730158730159, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 1.19e-06, |
|
"loss": 2.3583, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022857142857142857, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 1.26e-06, |
|
"loss": 2.2967, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02412698412698413, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 1.33e-06, |
|
"loss": 2.2498, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.025396825396825397, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 2.2613, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 9.875, |
|
"learning_rate": 1.47e-06, |
|
"loss": 2.2819, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.027936507936507936, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 1.54e-06, |
|
"loss": 2.2904, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.029206349206349208, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.61e-06, |
|
"loss": 2.1933, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.030476190476190476, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.68e-06, |
|
"loss": 2.225, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.031746031746031744, |
|
"grad_norm": 8.75, |
|
"learning_rate": 1.75e-06, |
|
"loss": 2.2458, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03301587301587302, |
|
"grad_norm": 8.5, |
|
"learning_rate": 1.82e-06, |
|
"loss": 2.1693, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03428571428571429, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 1.8900000000000001e-06, |
|
"loss": 2.2753, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 9.5, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 2.3356, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.036825396825396824, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 2.0299999999999996e-06, |
|
"loss": 2.2706, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0380952380952381, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 2.1e-06, |
|
"loss": 2.1023, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03936507936507937, |
|
"grad_norm": 8.375, |
|
"learning_rate": 2.17e-06, |
|
"loss": 2.2069, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.040634920634920635, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 2.24e-06, |
|
"loss": 2.1079, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0419047619047619, |
|
"grad_norm": 7.375, |
|
"learning_rate": 2.31e-06, |
|
"loss": 2.2311, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04317460317460318, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 2.38e-06, |
|
"loss": 2.1627, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 6.125, |
|
"learning_rate": 2.45e-06, |
|
"loss": 2.1843, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.045714285714285714, |
|
"grad_norm": 6.0, |
|
"learning_rate": 2.52e-06, |
|
"loss": 2.1101, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04698412698412698, |
|
"grad_norm": 5.375, |
|
"learning_rate": 2.5899999999999998e-06, |
|
"loss": 2.0643, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04825396825396826, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 2.66e-06, |
|
"loss": 2.0844, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.049523809523809526, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 2.73e-06, |
|
"loss": 2.0624, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.050793650793650794, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 2.1232, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05206349206349206, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 2.8699999999999996e-06, |
|
"loss": 2.124, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 2.94e-06, |
|
"loss": 1.9933, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.054603174603174605, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.01e-06, |
|
"loss": 2.0451, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05587301587301587, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.08e-06, |
|
"loss": 2.1581, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05714285714285714, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.15e-06, |
|
"loss": 2.1967, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.058412698412698416, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 3.22e-06, |
|
"loss": 2.0406, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.059682539682539684, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 3.29e-06, |
|
"loss": 1.9816, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06095238095238095, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.36e-06, |
|
"loss": 1.9638, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 3.4299999999999998e-06, |
|
"loss": 1.9013, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 3.5e-06, |
|
"loss": 2.0392, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06476190476190476, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 3.57e-06, |
|
"loss": 1.8965, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06603174603174604, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 3.64e-06, |
|
"loss": 1.8598, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0673015873015873, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 3.71e-06, |
|
"loss": 1.9478, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06857142857142857, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 1.8397, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06984126984126984, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 3.85e-06, |
|
"loss": 1.8759, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 1.9694, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07238095238095238, |
|
"grad_norm": 2.875, |
|
"learning_rate": 3.99e-06, |
|
"loss": 1.8433, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07365079365079365, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 4.059999999999999e-06, |
|
"loss": 1.9218, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07492063492063492, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 4.129999999999999e-06, |
|
"loss": 1.8318, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0761904761904762, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 4.2e-06, |
|
"loss": 1.898, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07746031746031747, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.27e-06, |
|
"loss": 1.8301, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07873015873015873, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.34e-06, |
|
"loss": 1.8442, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.41e-06, |
|
"loss": 1.8299, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08126984126984127, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 4.48e-06, |
|
"loss": 1.8178, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08253968253968254, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 1.7625, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0838095238095238, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.62e-06, |
|
"loss": 1.7792, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08507936507936507, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.69e-06, |
|
"loss": 1.9265, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08634920634920636, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.76e-06, |
|
"loss": 1.8957, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08761904761904762, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.8299999999999995e-06, |
|
"loss": 1.7738, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.9e-06, |
|
"loss": 1.7845, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09015873015873016, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.97e-06, |
|
"loss": 1.6498, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09142857142857143, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 5.04e-06, |
|
"loss": 1.7736, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0926984126984127, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 5.11e-06, |
|
"loss": 1.7067, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09396825396825396, |
|
"grad_norm": 2.75, |
|
"learning_rate": 5.1799999999999995e-06, |
|
"loss": 1.8757, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 5.25e-06, |
|
"loss": 1.7789, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09650793650793651, |
|
"grad_norm": 2.5, |
|
"learning_rate": 5.32e-06, |
|
"loss": 1.7109, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09777777777777778, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 5.39e-06, |
|
"loss": 1.8135, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09904761904761905, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 5.46e-06, |
|
"loss": 1.8264, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.10031746031746032, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 5.53e-06, |
|
"loss": 1.822, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10158730158730159, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.7601, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10285714285714286, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 5.67e-06, |
|
"loss": 1.8076, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10412698412698412, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 5.739999999999999e-06, |
|
"loss": 1.7772, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10539682539682539, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 5.8099999999999994e-06, |
|
"loss": 1.8925, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 5.88e-06, |
|
"loss": 1.7806, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10793650793650794, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 5.95e-06, |
|
"loss": 1.6426, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10920634920634921, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 6.02e-06, |
|
"loss": 1.7752, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11047619047619048, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.09e-06, |
|
"loss": 1.7094, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11174603174603175, |
|
"grad_norm": 2.5, |
|
"learning_rate": 6.16e-06, |
|
"loss": 1.671, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11301587301587301, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.23e-06, |
|
"loss": 1.6576, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11428571428571428, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 6.3e-06, |
|
"loss": 1.6634, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11555555555555555, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 6.37e-06, |
|
"loss": 1.6374, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11682539682539683, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 6.44e-06, |
|
"loss": 1.6832, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1180952380952381, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 6.51e-06, |
|
"loss": 1.6738, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11936507936507937, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.58e-06, |
|
"loss": 1.6874, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12063492063492064, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.65e-06, |
|
"loss": 1.6468, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1219047619047619, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 6.72e-06, |
|
"loss": 1.6261, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12317460317460317, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 6.789999999999999e-06, |
|
"loss": 1.5797, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.8599999999999995e-06, |
|
"loss": 1.6279, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12571428571428572, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 6.93e-06, |
|
"loss": 1.6165, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12698412698412698, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 7e-06, |
|
"loss": 1.7051, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12825396825396826, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.999998140878257e-06, |
|
"loss": 1.6242, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1295238095238095, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 6.999992563515e-06, |
|
"loss": 1.591, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1307936507936508, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 6.999983267916156e-06, |
|
"loss": 1.6395, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13206349206349208, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.9999702540916e-06, |
|
"loss": 1.6801, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 6.999953522055158e-06, |
|
"loss": 1.6198, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1346031746031746, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 6.999933071824603e-06, |
|
"loss": 1.6125, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13587301587301587, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 6.9999089034216635e-06, |
|
"loss": 1.6474, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13714285714285715, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 6.999881016872011e-06, |
|
"loss": 1.5838, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1384126984126984, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 6.9998494122052754e-06, |
|
"loss": 1.5673, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13968253968253969, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 6.9998140894550295e-06, |
|
"loss": 1.5612, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14095238095238094, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.999775048658799e-06, |
|
"loss": 1.5632, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 6.999732289858059e-06, |
|
"loss": 1.6273, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1434920634920635, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 6.999685813098235e-06, |
|
"loss": 1.5568, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14476190476190476, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 6.999635618428701e-06, |
|
"loss": 1.5606, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14603174603174604, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.999581705902782e-06, |
|
"loss": 1.5716, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1473015873015873, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 6.999524075577753e-06, |
|
"loss": 1.5832, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14857142857142858, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 6.9994627275148364e-06, |
|
"loss": 1.6203, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14984126984126983, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 6.999397661779208e-06, |
|
"loss": 1.5982, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1511111111111111, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 6.999328878439989e-06, |
|
"loss": 1.4827, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1523809523809524, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 6.99925637757025e-06, |
|
"loss": 1.5523, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15365079365079365, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 6.9991801592470155e-06, |
|
"loss": 1.4812, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15492063492063493, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.999100223551257e-06, |
|
"loss": 1.5039, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15619047619047619, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 6.9990165705678915e-06, |
|
"loss": 1.5991, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15746031746031747, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.9989292003857905e-06, |
|
"loss": 1.5819, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15873015873015872, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 6.998838113097772e-06, |
|
"loss": 1.5072, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 6.998743308800602e-06, |
|
"loss": 1.539, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.16126984126984126, |
|
"grad_norm": 2.25, |
|
"learning_rate": 6.998644787594997e-06, |
|
"loss": 1.6346, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16253968253968254, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 6.998542549585622e-06, |
|
"loss": 1.4874, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16380952380952382, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 6.998436594881089e-06, |
|
"loss": 1.4606, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16507936507936508, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 6.99832692359396e-06, |
|
"loss": 1.4556, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16634920634920636, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 6.998213535840745e-06, |
|
"loss": 1.5251, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1676190476190476, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.998096431741903e-06, |
|
"loss": 1.6041, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.1688888888888889, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 6.997975611421838e-06, |
|
"loss": 1.516, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.17015873015873015, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 6.997851075008906e-06, |
|
"loss": 1.401, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17142857142857143, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 6.997722822635408e-06, |
|
"loss": 1.3963, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1726984126984127, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 6.997590854437593e-06, |
|
"loss": 1.4488, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17396825396825397, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 6.9974551705556605e-06, |
|
"loss": 1.4161, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17523809523809525, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 6.997315771133752e-06, |
|
"loss": 1.4458, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1765079365079365, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 6.997172656319962e-06, |
|
"loss": 1.5626, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 6.997025826266326e-06, |
|
"loss": 1.4079, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17904761904761904, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 6.996875281128833e-06, |
|
"loss": 1.5113, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18031746031746032, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 6.996721021067415e-06, |
|
"loss": 1.4129, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.18158730158730158, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 6.996563046245947e-06, |
|
"loss": 1.416, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18285714285714286, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 6.99640135683226e-06, |
|
"loss": 1.4688, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18412698412698414, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 6.9962359529981225e-06, |
|
"loss": 1.4409, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1853968253968254, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 6.996066834919252e-06, |
|
"loss": 1.4301, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18666666666666668, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 6.995894002775314e-06, |
|
"loss": 1.3702, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18793650793650793, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 6.995717456749914e-06, |
|
"loss": 1.5495, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1892063492063492, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 6.99553719703061e-06, |
|
"loss": 1.4909, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 6.9953532238089014e-06, |
|
"loss": 1.4744, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19174603174603175, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 6.995165537280231e-06, |
|
"loss": 1.3965, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19301587301587303, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 6.994974137643991e-06, |
|
"loss": 1.4505, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19428571428571428, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 6.994779025103515e-06, |
|
"loss": 1.4028, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 1.625, |
|
"learning_rate": 6.994580199866081e-06, |
|
"loss": 1.4503, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.19682539682539682, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 6.994377662142914e-06, |
|
"loss": 1.3517, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1980952380952381, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 6.9941714121491785e-06, |
|
"loss": 1.4896, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19936507936507936, |
|
"grad_norm": 1.5, |
|
"learning_rate": 6.993961450103987e-06, |
|
"loss": 1.4458, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.20063492063492064, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 6.993747776230393e-06, |
|
"loss": 1.4939, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2019047619047619, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.9935303907553945e-06, |
|
"loss": 1.3731, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.20317460317460317, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 6.993309293909931e-06, |
|
"loss": 1.4482, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20444444444444446, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 6.993084485928888e-06, |
|
"loss": 1.4789, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2057142857142857, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 6.992855967051091e-06, |
|
"loss": 1.3584, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.206984126984127, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 6.9926237375193055e-06, |
|
"loss": 1.3995, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20825396825396825, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.992387797580246e-06, |
|
"loss": 1.4282, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20952380952380953, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.992148147484561e-06, |
|
"loss": 1.4075, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.21079365079365078, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.991904787486846e-06, |
|
"loss": 1.4205, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.21206349206349207, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.991657717845635e-06, |
|
"loss": 1.365, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.991406938823403e-06, |
|
"loss": 1.4606, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2146031746031746, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.991152450686569e-06, |
|
"loss": 1.3828, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21587301587301588, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 6.9908942537054875e-06, |
|
"loss": 1.4357, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21714285714285714, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.990632348154456e-06, |
|
"loss": 1.3723, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21841269841269842, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.990366734311711e-06, |
|
"loss": 1.4549, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21968253968253967, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 6.9900974124594295e-06, |
|
"loss": 1.3897, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.22095238095238096, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.9898243828837265e-06, |
|
"loss": 1.381, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 6.989547645874657e-06, |
|
"loss": 1.3959, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2234920634920635, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.989267201726213e-06, |
|
"loss": 1.3529, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22476190476190477, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.988983050736326e-06, |
|
"loss": 1.3437, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22603174603174603, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.988695193206866e-06, |
|
"loss": 1.3084, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2273015873015873, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.98840362944364e-06, |
|
"loss": 1.2895, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22857142857142856, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.9881083597563915e-06, |
|
"loss": 1.3205, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22984126984126985, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.987809384458803e-06, |
|
"loss": 1.3976, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.987506703868491e-06, |
|
"loss": 1.3126, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.23238095238095238, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.987200318307011e-06, |
|
"loss": 1.4032, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23365079365079366, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 6.986890228099852e-06, |
|
"loss": 1.3045, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23492063492063492, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.986576433576441e-06, |
|
"loss": 1.3976, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2361904761904762, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 6.9862589350701396e-06, |
|
"loss": 1.3839, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23746031746031745, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.985937732918243e-06, |
|
"loss": 1.3107, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23873015873015874, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 6.985612827461983e-06, |
|
"loss": 1.4188, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 6.9852842190465244e-06, |
|
"loss": 1.2768, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.24126984126984127, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.984951908020966e-06, |
|
"loss": 1.2394, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24253968253968253, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.984615894738339e-06, |
|
"loss": 1.3525, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2438095238095238, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 6.984276179555611e-06, |
|
"loss": 1.3494, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2450793650793651, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.983932762833678e-06, |
|
"loss": 1.3324, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24634920634920635, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.983585644937373e-06, |
|
"loss": 1.2826, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24761904761904763, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.983234826235456e-06, |
|
"loss": 1.3029, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.982880307100624e-06, |
|
"loss": 1.3598, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.25015873015873014, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.982522087909498e-06, |
|
"loss": 1.3622, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.25142857142857145, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 6.9821601690426384e-06, |
|
"loss": 1.2689, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2526984126984127, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.981794550884529e-06, |
|
"loss": 1.3869, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.25396825396825395, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 6.981425233823588e-06, |
|
"loss": 1.3919, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25523809523809526, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 6.98105221825216e-06, |
|
"loss": 1.3206, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2565079365079365, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.98067550456652e-06, |
|
"loss": 1.386, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2577777777777778, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.980295093166873e-06, |
|
"loss": 1.409, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.259047619047619, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.97991098445735e-06, |
|
"loss": 1.3044, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.26031746031746034, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.979523178846011e-06, |
|
"loss": 1.29, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2615873015873016, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 6.979131676744844e-06, |
|
"loss": 1.4239, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.26285714285714284, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 6.978736478569762e-06, |
|
"loss": 1.4212, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.26412698412698415, |
|
"grad_norm": 1.5, |
|
"learning_rate": 6.978337584740607e-06, |
|
"loss": 1.3177, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2653968253968254, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 6.977934995681146e-06, |
|
"loss": 1.3258, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.977528711819072e-06, |
|
"loss": 1.4282, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2679365079365079, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.977118733586e-06, |
|
"loss": 1.4156, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2692063492063492, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.976705061417477e-06, |
|
"loss": 1.4307, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2704761904761905, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.976287695752965e-06, |
|
"loss": 1.3837, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.27174603174603174, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.975866637035859e-06, |
|
"loss": 1.3319, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.273015873015873, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.975441885713471e-06, |
|
"loss": 1.3346, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2742857142857143, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.975013442237037e-06, |
|
"loss": 1.2907, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.27555555555555555, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.974581307061718e-06, |
|
"loss": 1.3801, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2768253968253968, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 6.974145480646593e-06, |
|
"loss": 1.3904, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2780952380952381, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.973705963454666e-06, |
|
"loss": 1.3587, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.27936507936507937, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 6.973262755952861e-06, |
|
"loss": 1.3843, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2806349206349206, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 6.9728158586120195e-06, |
|
"loss": 1.3899, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2819047619047619, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.9723652719069074e-06, |
|
"loss": 1.3172, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2831746031746032, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.971910996316207e-06, |
|
"loss": 1.3525, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.97145303232252e-06, |
|
"loss": 1.2543, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 6.970991380412367e-06, |
|
"loss": 1.3654, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.286984126984127, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 6.9705260410761876e-06, |
|
"loss": 1.3518, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.28825396825396826, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.970057014808337e-06, |
|
"loss": 1.406, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.2895238095238095, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.9695843021070855e-06, |
|
"loss": 1.2749, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.29079365079365077, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.969107903474625e-06, |
|
"loss": 1.3623, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2920634920634921, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.9686278194170586e-06, |
|
"loss": 1.3651, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.29333333333333333, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.968144050444407e-06, |
|
"loss": 1.3215, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2946031746031746, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 6.967656597070603e-06, |
|
"loss": 1.3236, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2958730158730159, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.9671654598134965e-06, |
|
"loss": 1.2878, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.29714285714285715, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.96667063919485e-06, |
|
"loss": 1.257, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2984126984126984, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.966172135740339e-06, |
|
"loss": 1.3465, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.29968253968253966, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 6.96566994997955e-06, |
|
"loss": 1.3257, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.30095238095238097, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.965164082445983e-06, |
|
"loss": 1.367, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.96465453367705e-06, |
|
"loss": 1.3181, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3034920634920635, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.964141304214072e-06, |
|
"loss": 1.3274, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3047619047619048, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.963624394602281e-06, |
|
"loss": 1.2907, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.30603174603174604, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.963103805390821e-06, |
|
"loss": 1.3028, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3073015873015873, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.9625795371327375e-06, |
|
"loss": 1.3416, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.30857142857142855, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.962051590384995e-06, |
|
"loss": 1.3399, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.30984126984126986, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 6.961519965708457e-06, |
|
"loss": 1.2766, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.3111111111111111, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 6.9609846636679e-06, |
|
"loss": 1.1575, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.31238095238095237, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.960445684832004e-06, |
|
"loss": 1.276, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3136507936507936, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.959903029773356e-06, |
|
"loss": 1.2703, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.31492063492063493, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.9593566990684474e-06, |
|
"loss": 1.3219, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3161904761904762, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.9588066932976785e-06, |
|
"loss": 1.314, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.958253013045348e-06, |
|
"loss": 1.2881, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.31873015873015875, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.957695658899663e-06, |
|
"loss": 1.336, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 6.95713463145273e-06, |
|
"loss": 1.3237, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.32126984126984126, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 6.956569931300559e-06, |
|
"loss": 1.2288, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3225396825396825, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.956001559043064e-06, |
|
"loss": 1.3324, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3238095238095238, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.955429515284058e-06, |
|
"loss": 1.329, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3250793650793651, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 6.954853800631254e-06, |
|
"loss": 1.2696, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.32634920634920633, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.954274415696267e-06, |
|
"loss": 1.3572, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.32761904761904764, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.953691361094606e-06, |
|
"loss": 1.2582, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3288888888888889, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.953104637445686e-06, |
|
"loss": 1.2598, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.33015873015873015, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 6.952514245372815e-06, |
|
"loss": 1.274, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3314285714285714, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.951920185503199e-06, |
|
"loss": 1.2684, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3326984126984127, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 6.951322458467938e-06, |
|
"loss": 1.2921, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.33396825396825397, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.950721064902034e-06, |
|
"loss": 1.3409, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3352380952380952, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.95011600544438e-06, |
|
"loss": 1.2827, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.33650793650793653, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.9495072807377634e-06, |
|
"loss": 1.3019, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.948894891428866e-06, |
|
"loss": 1.2564, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33904761904761904, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 6.948278838168263e-06, |
|
"loss": 1.2545, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3403174603174603, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.947659121610421e-06, |
|
"loss": 1.29, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.3415873015873016, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 6.947035742413701e-06, |
|
"loss": 1.1939, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.34285714285714286, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 6.9464087012403534e-06, |
|
"loss": 1.2794, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3441269841269841, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 6.945777998756516e-06, |
|
"loss": 1.2046, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3453968253968254, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.94514363563222e-06, |
|
"loss": 1.2897, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3466666666666667, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.944505612541386e-06, |
|
"loss": 1.3137, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.34793650793650793, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 6.94386393016182e-06, |
|
"loss": 1.2758, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3492063492063492, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.943218589175216e-06, |
|
"loss": 1.2078, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3504761904761905, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 6.942569590267157e-06, |
|
"loss": 1.2597, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.35174603174603175, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.9419169341271085e-06, |
|
"loss": 1.2439, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.353015873015873, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 6.9412606214484245e-06, |
|
"loss": 1.2509, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.35428571428571426, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 6.9406006529283425e-06, |
|
"loss": 1.2124, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 6.939937029267983e-06, |
|
"loss": 1.2922, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3568253968253968, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.93926975117235e-06, |
|
"loss": 1.3179, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3580952380952381, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 6.93859881935033e-06, |
|
"loss": 1.2535, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.3593650793650794, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 6.937924234514692e-06, |
|
"loss": 1.2865, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.36063492063492064, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 6.9372459973820815e-06, |
|
"loss": 1.2395, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3619047619047619, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.936564108673031e-06, |
|
"loss": 1.2963, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.36317460317460315, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.935878569111948e-06, |
|
"loss": 1.2609, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.36444444444444446, |
|
"grad_norm": 1.25, |
|
"learning_rate": 6.935189379427116e-06, |
|
"loss": 1.1527, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3657142857142857, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.934496540350704e-06, |
|
"loss": 1.3306, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.36698412698412697, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 6.933800052618749e-06, |
|
"loss": 1.1579, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3682539682539683, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.933099916971171e-06, |
|
"loss": 1.2648, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.36952380952380953, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.932396134151762e-06, |
|
"loss": 1.2618, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3707936507936508, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 6.9316887049081885e-06, |
|
"loss": 1.2435, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.37206349206349204, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 6.930977629991993e-06, |
|
"loss": 1.196, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 6.93026291015859e-06, |
|
"loss": 1.1568, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3746031746031746, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.929544546167265e-06, |
|
"loss": 1.2868, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.37587301587301586, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 6.928822538781175e-06, |
|
"loss": 1.1996, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.37714285714285717, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 6.92809688876735e-06, |
|
"loss": 1.2809, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3784126984126984, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 6.9273675968966874e-06, |
|
"loss": 1.204, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.3796825396825397, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.926634663943954e-06, |
|
"loss": 1.1716, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.925898090687786e-06, |
|
"loss": 1.2552, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38222222222222224, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.9251578779106855e-06, |
|
"loss": 1.179, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3834920634920635, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 6.9244140263990194e-06, |
|
"loss": 1.1177, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.38476190476190475, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 6.9236665369430255e-06, |
|
"loss": 1.2488, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38603174603174606, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 6.9229154103368015e-06, |
|
"loss": 1.208, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3873015873015873, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.92216064737831e-06, |
|
"loss": 1.1659, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.38857142857142857, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 6.9214022488693786e-06, |
|
"loss": 1.2404, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3898412698412698, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 6.920640215615697e-06, |
|
"loss": 1.2515, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 6.919874548426813e-06, |
|
"loss": 1.2701, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3923809523809524, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.919105248116138e-06, |
|
"loss": 1.253, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.39365079365079364, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.918332315500942e-06, |
|
"loss": 1.1332, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3949206349206349, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 6.917555751402356e-06, |
|
"loss": 1.2004, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.3961904761904762, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 6.916775556645364e-06, |
|
"loss": 1.2051, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.39746031746031746, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 6.915991732058812e-06, |
|
"loss": 1.249, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.3987301587301587, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.915204278475399e-06, |
|
"loss": 1.2079, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.25, |
|
"learning_rate": 6.914413196731681e-06, |
|
"loss": 1.2628, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4012698412698413, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 6.913618487668069e-06, |
|
"loss": 1.3653, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.40253968253968253, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 6.912820152128825e-06, |
|
"loss": 1.1615, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4038095238095238, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.912018190962065e-06, |
|
"loss": 1.1839, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4050793650793651, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 6.911212605019757e-06, |
|
"loss": 1.1813, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.40634920634920635, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.910403395157719e-06, |
|
"loss": 1.2426, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4076190476190476, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 6.909590562235621e-06, |
|
"loss": 1.2174, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 6.908774107116979e-06, |
|
"loss": 1.2622, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.41015873015873017, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.907954030669158e-06, |
|
"loss": 1.2048, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.4114285714285714, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.907130333763371e-06, |
|
"loss": 1.3177, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4126984126984127, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.906303017274677e-06, |
|
"loss": 1.2328, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.413968253968254, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 6.90547208208198e-06, |
|
"loss": 1.2359, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.41523809523809524, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 6.904637529068028e-06, |
|
"loss": 1.1834, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4165079365079365, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.9037993591194145e-06, |
|
"loss": 1.1702, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4177777777777778, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6.902957573126571e-06, |
|
"loss": 1.3178, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.41904761904761906, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 6.902112171983775e-06, |
|
"loss": 1.2117, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4203174603174603, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.901263156589144e-06, |
|
"loss": 1.2687, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.42158730158730157, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.90041052784463e-06, |
|
"loss": 1.2933, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4228571428571429, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.899554286656032e-06, |
|
"loss": 1.2205, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.42412698412698413, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 6.89869443393298e-06, |
|
"loss": 1.2044, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4253968253968254, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 6.897830970588943e-06, |
|
"loss": 1.2005, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 6.896963897541227e-06, |
|
"loss": 1.2374, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.42793650793650795, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 6.896093215710971e-06, |
|
"loss": 1.2029, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4292063492063492, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.89521892602315e-06, |
|
"loss": 1.1452, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.43047619047619046, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.894341029406567e-06, |
|
"loss": 1.2151, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.43174603174603177, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 6.893459526793863e-06, |
|
"loss": 1.225, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.433015873015873, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.8925744191215055e-06, |
|
"loss": 1.2256, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4342857142857143, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.8916857073297935e-06, |
|
"loss": 1.1827, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.43555555555555553, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.890793392362855e-06, |
|
"loss": 1.2185, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.43682539682539684, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.889897475168645e-06, |
|
"loss": 1.1634, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.4380952380952381, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.888997956698947e-06, |
|
"loss": 1.2444, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.43936507936507935, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.888094837909369e-06, |
|
"loss": 1.1364, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.44063492063492066, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.887188119759343e-06, |
|
"loss": 1.2109, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4419047619047619, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 6.886277803212125e-06, |
|
"loss": 1.2345, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.44317460317460317, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.885363889234797e-06, |
|
"loss": 1.1564, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.884446378798258e-06, |
|
"loss": 1.1894, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44571428571428573, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.8835252728772335e-06, |
|
"loss": 1.194, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.446984126984127, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.882600572450261e-06, |
|
"loss": 1.2596, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.44825396825396824, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 6.881672278499705e-06, |
|
"loss": 1.1671, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.44952380952380955, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.880740392011738e-06, |
|
"loss": 1.0679, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4507936507936508, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 6.879804913976361e-06, |
|
"loss": 1.2133, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.45206349206349206, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.87886584538738e-06, |
|
"loss": 1.2645, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4533333333333333, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.87792318724242e-06, |
|
"loss": 1.2126, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.4546031746031746, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 6.87697694054292e-06, |
|
"loss": 1.2352, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4558730158730159, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.8760271062941286e-06, |
|
"loss": 1.1688, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.45714285714285713, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.875073685505108e-06, |
|
"loss": 1.2012, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.45841269841269844, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.874116679188731e-06, |
|
"loss": 1.2021, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.4596825396825397, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.873156088361677e-06, |
|
"loss": 1.2262, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.46095238095238095, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.872191914044435e-06, |
|
"loss": 1.1552, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.8712241572613e-06, |
|
"loss": 1.2148, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.4634920634920635, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.870252819040374e-06, |
|
"loss": 1.2414, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.46476190476190476, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.869277900413564e-06, |
|
"loss": 1.1179, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.466031746031746, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.868299402416579e-06, |
|
"loss": 1.0943, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.46730158730158733, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.867317326088932e-06, |
|
"loss": 1.1381, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.4685714285714286, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.866331672473937e-06, |
|
"loss": 1.174, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.46984126984126984, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.865342442618709e-06, |
|
"loss": 1.2216, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4711111111111111, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.86434963757416e-06, |
|
"loss": 1.1455, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.4723809523809524, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.863353258395003e-06, |
|
"loss": 1.1913, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.47365079365079366, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.8623533061397456e-06, |
|
"loss": 1.2139, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4749206349206349, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.861349781870693e-06, |
|
"loss": 1.1511, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.8603426866539436e-06, |
|
"loss": 1.15, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4774603174603175, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.859332021559393e-06, |
|
"loss": 1.1681, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.47873015873015873, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6.858317787660723e-06, |
|
"loss": 1.2131, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.857299986035413e-06, |
|
"loss": 1.1533, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4812698412698413, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 6.856278617764729e-06, |
|
"loss": 1.1488, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.48253968253968255, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 6.855253683933727e-06, |
|
"loss": 1.12, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4838095238095238, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 6.85422518563125e-06, |
|
"loss": 1.2726, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.48507936507936505, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.85319312394993e-06, |
|
"loss": 1.2218, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.48634920634920636, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.852157499986183e-06, |
|
"loss": 1.1984, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.4876190476190476, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.85111831484021e-06, |
|
"loss": 1.1689, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.4888888888888889, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.8500755696159925e-06, |
|
"loss": 1.1334, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4901587301587302, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.849029265421299e-06, |
|
"loss": 1.0929, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.49142857142857144, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.847979403367674e-06, |
|
"loss": 1.2601, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4926984126984127, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.846925984570446e-06, |
|
"loss": 1.2579, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.49396825396825395, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.8458690101487195e-06, |
|
"loss": 1.1605, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.49523809523809526, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.844808481225377e-06, |
|
"loss": 1.1947, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4965079365079365, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.8437443989270756e-06, |
|
"loss": 1.0904, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.84267676438425e-06, |
|
"loss": 1.1382, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4990476190476191, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 6.8416055787311076e-06, |
|
"loss": 1.252, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5003174603174603, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.840530843105628e-06, |
|
"loss": 1.1412, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5015873015873016, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.839452558649561e-06, |
|
"loss": 1.1867, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5028571428571429, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.83837072650843e-06, |
|
"loss": 1.1628, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5041269841269841, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.837285347831523e-06, |
|
"loss": 1.1583, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5053968253968254, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.836196423771898e-06, |
|
"loss": 1.2381, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5066666666666667, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.835103955486381e-06, |
|
"loss": 1.2314, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5079365079365079, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.83400794413556e-06, |
|
"loss": 1.2734, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5092063492063492, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.832908390883789e-06, |
|
"loss": 1.2223, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.5104761904761905, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.831805296899183e-06, |
|
"loss": 1.1663, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5117460317460317, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.8306986633536214e-06, |
|
"loss": 1.1652, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.513015873015873, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.829588491422743e-06, |
|
"loss": 1.121, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5142857142857142, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.8284747822859415e-06, |
|
"loss": 1.2003, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.827357537126373e-06, |
|
"loss": 1.2256, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5168253968253969, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.826236757130951e-06, |
|
"loss": 1.1817, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.518095238095238, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.8251124434903385e-06, |
|
"loss": 1.1242, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5193650793650794, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.823984597398959e-06, |
|
"loss": 1.1524, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5206349206349207, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.822853220054984e-06, |
|
"loss": 1.2177, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5219047619047619, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.821718312660339e-06, |
|
"loss": 1.3058, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5231746031746032, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.820579876420699e-06, |
|
"loss": 1.1484, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5244444444444445, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.819437912545487e-06, |
|
"loss": 1.1845, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5257142857142857, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.818292422247875e-06, |
|
"loss": 1.1738, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.526984126984127, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.8171434067447804e-06, |
|
"loss": 1.2016, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5282539682539683, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.815990867256866e-06, |
|
"loss": 1.1561, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5295238095238095, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.814834805008538e-06, |
|
"loss": 1.1634, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5307936507936508, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.813675221227945e-06, |
|
"loss": 1.1828, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.532063492063492, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.812512117146979e-06, |
|
"loss": 1.1899, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.811345494001267e-06, |
|
"loss": 1.2415, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5346031746031746, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 6.810175353030178e-06, |
|
"loss": 1.2068, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5358730158730158, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.809001695476819e-06, |
|
"loss": 1.2132, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5371428571428571, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.807824522588029e-06, |
|
"loss": 1.235, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5384126984126985, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.806643835614384e-06, |
|
"loss": 1.1771, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5396825396825397, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.805459635810196e-06, |
|
"loss": 1.1837, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.540952380952381, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.804271924433503e-06, |
|
"loss": 1.2026, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5422222222222223, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.8030807027460774e-06, |
|
"loss": 1.194, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5434920634920635, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.80188597201342e-06, |
|
"loss": 1.2045, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5447619047619048, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.800687733504759e-06, |
|
"loss": 1.1725, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.546031746031746, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.79948598849305e-06, |
|
"loss": 1.2031, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5473015873015873, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.798280738254972e-06, |
|
"loss": 1.2332, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5485714285714286, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.797071984070929e-06, |
|
"loss": 1.1543, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5498412698412698, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.7958597272250504e-06, |
|
"loss": 1.2151, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.794643969005179e-06, |
|
"loss": 1.236, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5523809523809524, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.793424710702885e-06, |
|
"loss": 1.2502, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5536507936507936, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.792201953613454e-06, |
|
"loss": 1.1059, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5549206349206349, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.790975699035886e-06, |
|
"loss": 1.1807, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5561904761904762, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.789745948272902e-06, |
|
"loss": 1.1415, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.5574603174603174, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.788512702630932e-06, |
|
"loss": 1.1708, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5587301587301587, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.787275963420123e-06, |
|
"loss": 1.147, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.786035731954329e-06, |
|
"loss": 1.1925, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5612698412698413, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.7847920095511185e-06, |
|
"loss": 1.1599, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5625396825396826, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.783544797531766e-06, |
|
"loss": 1.1751, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5638095238095238, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.782294097221253e-06, |
|
"loss": 1.2241, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5650793650793651, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.781039909948268e-06, |
|
"loss": 1.2607, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5663492063492064, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.779782237045204e-06, |
|
"loss": 1.1699, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5676190476190476, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.778521079848157e-06, |
|
"loss": 1.2567, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.777256439696922e-06, |
|
"loss": 1.168, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5701587301587302, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.775988317934998e-06, |
|
"loss": 1.2496, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.774716715909579e-06, |
|
"loss": 1.2178, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5726984126984127, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.773441634971561e-06, |
|
"loss": 1.1594, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.573968253968254, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.772163076475531e-06, |
|
"loss": 1.1178, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5752380952380952, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.770881041779771e-06, |
|
"loss": 1.1934, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5765079365079365, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.769595532246259e-06, |
|
"loss": 1.1575, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5777777777777777, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.768306549240664e-06, |
|
"loss": 1.1751, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.579047619047619, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.767014094132342e-06, |
|
"loss": 1.2337, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5803174603174603, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 6.7657181682943415e-06, |
|
"loss": 1.1441, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5815873015873015, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.764418773103393e-06, |
|
"loss": 1.2132, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5828571428571429, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.763115909939919e-06, |
|
"loss": 1.171, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5841269841269842, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.761809580188021e-06, |
|
"loss": 1.1907, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5853968253968254, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.760499785235486e-06, |
|
"loss": 1.055, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.759186526473782e-06, |
|
"loss": 1.2006, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.587936507936508, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.757869805298055e-06, |
|
"loss": 1.1229, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.5892063492063492, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.756549623107134e-06, |
|
"loss": 1.1498, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5904761904761905, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.755225981303518e-06, |
|
"loss": 1.1409, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5917460317460318, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.7538988812933854e-06, |
|
"loss": 1.121, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.593015873015873, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.752568324486589e-06, |
|
"loss": 1.2086, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5942857142857143, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.751234312296654e-06, |
|
"loss": 1.1454, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5955555555555555, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.749896846140772e-06, |
|
"loss": 1.242, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5968253968253968, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.74855592743981e-06, |
|
"loss": 1.1985, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5980952380952381, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.7472115576182975e-06, |
|
"loss": 1.1654, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5993650793650793, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.745863738104435e-06, |
|
"loss": 1.2013, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.6006349206349206, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.744512470330087e-06, |
|
"loss": 1.2208, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6019047619047619, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.743157755730776e-06, |
|
"loss": 1.183, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6031746031746031, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.741799595745694e-06, |
|
"loss": 1.1281, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.740437991817686e-06, |
|
"loss": 1.2161, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6057142857142858, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.739072945393262e-06, |
|
"loss": 1.1631, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.606984126984127, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.737704457922585e-06, |
|
"loss": 1.1185, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.6082539682539683, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.736332530859474e-06, |
|
"loss": 1.1549, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.6095238095238096, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.734957165661405e-06, |
|
"loss": 1.248, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6107936507936508, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.733578363789503e-06, |
|
"loss": 1.213, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.6120634920634921, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.732196126708547e-06, |
|
"loss": 1.2044, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.6133333333333333, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.7308104558869616e-06, |
|
"loss": 1.1523, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6146031746031746, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.729421352796823e-06, |
|
"loss": 1.2592, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6158730158730159, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.728028818913852e-06, |
|
"loss": 1.1671, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6171428571428571, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.726632855717414e-06, |
|
"loss": 1.1276, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6184126984126984, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.725233464690519e-06, |
|
"loss": 1.1712, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6196825396825397, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.723830647319816e-06, |
|
"loss": 1.1428, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6209523809523809, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.722424405095596e-06, |
|
"loss": 1.1649, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.721014739511787e-06, |
|
"loss": 1.1549, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6234920634920635, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.719601652065957e-06, |
|
"loss": 1.0888, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6247619047619047, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.7181851442593045e-06, |
|
"loss": 1.1915, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.626031746031746, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.716765217596666e-06, |
|
"loss": 1.0786, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6273015873015872, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.715341873586506e-06, |
|
"loss": 1.209, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6285714285714286, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.713915113740924e-06, |
|
"loss": 1.2102, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6298412698412699, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.712484939575643e-06, |
|
"loss": 1.2003, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6311111111111111, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.7110513526100184e-06, |
|
"loss": 1.2036, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.6323809523809524, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 6.7096143543670264e-06, |
|
"loss": 1.1949, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.6336507936507937, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.708173946373274e-06, |
|
"loss": 1.1559, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.706730130158981e-06, |
|
"loss": 1.2509, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6361904761904762, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.705282907257997e-06, |
|
"loss": 1.1922, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6374603174603175, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.703832279207786e-06, |
|
"loss": 1.1691, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6387301587301587, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.7023782475494296e-06, |
|
"loss": 1.1493, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.700920813827626e-06, |
|
"loss": 1.1078, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6412698412698413, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.699459979590689e-06, |
|
"loss": 1.1494, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6425396825396825, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.697995746390542e-06, |
|
"loss": 1.2077, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.6438095238095238, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.696528115782721e-06, |
|
"loss": 1.1758, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.645079365079365, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.695057089326372e-06, |
|
"loss": 1.1779, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6463492063492063, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.693582668584248e-06, |
|
"loss": 1.2281, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6476190476190476, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.6921048551227065e-06, |
|
"loss": 1.1934, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6488888888888888, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.690623650511711e-06, |
|
"loss": 1.24, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.6501587301587302, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.689139056324826e-06, |
|
"loss": 1.1979, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.6514285714285715, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.68765107413922e-06, |
|
"loss": 1.2034, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.6526984126984127, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.686159705535657e-06, |
|
"loss": 1.1913, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.653968253968254, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.684664952098503e-06, |
|
"loss": 1.2158, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6552380952380953, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.683166815415712e-06, |
|
"loss": 1.1612, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.6565079365079365, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.681665297078842e-06, |
|
"loss": 1.1911, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.6801603986830395e-06, |
|
"loss": 1.1902, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.659047619047619, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.6786521218270374e-06, |
|
"loss": 1.1424, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.6603174603174603, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.6771404681131634e-06, |
|
"loss": 1.1362, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6615873015873016, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.675625439147331e-06, |
|
"loss": 1.1517, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.6628571428571428, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.67410703653904e-06, |
|
"loss": 1.1232, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.6641269841269841, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.672585261901371e-06, |
|
"loss": 1.1583, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.6653968253968254, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.671060116850993e-06, |
|
"loss": 1.1405, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.669531603008148e-06, |
|
"loss": 1.1955, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6679365079365079, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.6679997219966635e-06, |
|
"loss": 1.1239, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.6692063492063492, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.66646447544394e-06, |
|
"loss": 1.2009, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.6704761904761904, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.664925864980956e-06, |
|
"loss": 1.1367, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.6717460317460318, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.663383892242261e-06, |
|
"loss": 1.1926, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.6730158730158731, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.661838558865978e-06, |
|
"loss": 1.1656, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6742857142857143, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.6602898664938014e-06, |
|
"loss": 1.2526, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.658737816770992e-06, |
|
"loss": 1.2039, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6768253968253968, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.657182411346377e-06, |
|
"loss": 1.1791, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6780952380952381, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.655623651872352e-06, |
|
"loss": 1.1603, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6793650793650794, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.654061540004871e-06, |
|
"loss": 1.1164, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6806349206349206, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.652496077403453e-06, |
|
"loss": 1.2685, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6819047619047619, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.650927265731175e-06, |
|
"loss": 1.1049, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6831746031746032, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.649355106654672e-06, |
|
"loss": 1.1492, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.6844444444444444, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.647779601844137e-06, |
|
"loss": 1.2699, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.6857142857142857, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.646200752973315e-06, |
|
"loss": 1.1014, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.686984126984127, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.6446185617195034e-06, |
|
"loss": 1.2045, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.6882539682539682, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.643033029763553e-06, |
|
"loss": 1.1571, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.6895238095238095, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.641444158789861e-06, |
|
"loss": 1.1805, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.6907936507936508, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.639851950486374e-06, |
|
"loss": 1.28, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.692063492063492, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.638256406544582e-06, |
|
"loss": 1.2255, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.636657528659518e-06, |
|
"loss": 1.1858, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.6946031746031746, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.6350553185297625e-06, |
|
"loss": 1.1587, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.6958730158730159, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.633449777857429e-06, |
|
"loss": 1.1482, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.6971428571428572, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.631840908348171e-06, |
|
"loss": 1.232, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.6984126984126984, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.6302287117111825e-06, |
|
"loss": 1.0773, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6996825396825397, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.628613189659187e-06, |
|
"loss": 1.1754, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.700952380952381, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.626994343908444e-06, |
|
"loss": 1.1066, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7022222222222222, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.625372176178743e-06, |
|
"loss": 1.2023, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7034920634920635, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.6237466881934e-06, |
|
"loss": 1.1735, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7047619047619048, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.622117881679265e-06, |
|
"loss": 1.0454, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.706031746031746, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.620485758366705e-06, |
|
"loss": 1.1948, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.7073015873015873, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.6188503199896175e-06, |
|
"loss": 1.1723, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.7085714285714285, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.617211568285419e-06, |
|
"loss": 1.2298, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.7098412698412698, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.615569504995044e-06, |
|
"loss": 1.1308, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.6139241318629486e-06, |
|
"loss": 1.1852, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7123809523809523, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.612275450637103e-06, |
|
"loss": 1.1247, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.7136507936507936, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.610623463068995e-06, |
|
"loss": 1.1734, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.714920634920635, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.608968170913618e-06, |
|
"loss": 1.1356, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.7161904761904762, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.607309575929483e-06, |
|
"loss": 1.1212, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.7174603174603175, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.6056476798786075e-06, |
|
"loss": 1.1004, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7187301587301588, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.603982484526515e-06, |
|
"loss": 1.1515, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.602313991642234e-06, |
|
"loss": 1.1864, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.7212698412698413, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.600642202998296e-06, |
|
"loss": 1.2114, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.7225396825396826, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.598967120370735e-06, |
|
"loss": 1.2265, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.7238095238095238, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.597288745539086e-06, |
|
"loss": 1.2089, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7250793650793651, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.595607080286378e-06, |
|
"loss": 1.1903, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.7263492063492063, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.593922126399136e-06, |
|
"loss": 1.1926, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7276190476190476, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.59223388566738e-06, |
|
"loss": 1.1675, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.5905423598846224e-06, |
|
"loss": 1.1888, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.7301587301587301, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.588847550847864e-06, |
|
"loss": 1.1543, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7314285714285714, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.587149460357595e-06, |
|
"loss": 1.198, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.7326984126984127, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.5854480902177885e-06, |
|
"loss": 1.1214, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.7339682539682539, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.583743442235906e-06, |
|
"loss": 1.1976, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.7352380952380952, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.582035518222888e-06, |
|
"loss": 1.1888, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.7365079365079366, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.580324319993159e-06, |
|
"loss": 1.1821, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7377777777777778, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.578609849364617e-06, |
|
"loss": 1.1231, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.7390476190476191, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.57689210815864e-06, |
|
"loss": 1.2334, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.7403174603174603, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.5751710982000795e-06, |
|
"loss": 1.1552, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.7415873015873016, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.573446821317259e-06, |
|
"loss": 1.1631, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.7428571428571429, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.571719279341972e-06, |
|
"loss": 1.153, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7441269841269841, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.569988474109486e-06, |
|
"loss": 1.1754, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.7453968253968254, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 6.568254407458526e-06, |
|
"loss": 1.1606, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.56651708123129e-06, |
|
"loss": 1.1695, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7479365079365079, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.564776497273436e-06, |
|
"loss": 1.2078, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7492063492063492, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.56303265743408e-06, |
|
"loss": 1.125, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7504761904761905, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.5612855635658015e-06, |
|
"loss": 1.1365, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7517460317460317, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.559535217524634e-06, |
|
"loss": 1.2071, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.753015873015873, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 6.557781621170068e-06, |
|
"loss": 1.1188, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.7542857142857143, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.556024776365044e-06, |
|
"loss": 1.1334, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.7555555555555555, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.554264684975958e-06, |
|
"loss": 1.2258, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7568253968253968, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.552501348872649e-06, |
|
"loss": 1.1216, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.758095238095238, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.550734769928409e-06, |
|
"loss": 1.1999, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.7593650793650794, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.548964950019971e-06, |
|
"loss": 1.2001, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.7606349206349207, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.547191891027514e-06, |
|
"loss": 1.1323, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.545415594834656e-06, |
|
"loss": 1.0832, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7631746031746032, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.5436360633284535e-06, |
|
"loss": 1.145, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.541853298399402e-06, |
|
"loss": 1.1393, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.7657142857142857, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.540067301941432e-06, |
|
"loss": 1.1743, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.766984126984127, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.538278075851906e-06, |
|
"loss": 1.1077, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.7682539682539683, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.536485622031617e-06, |
|
"loss": 1.1973, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7695238095238095, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.534689942384788e-06, |
|
"loss": 1.2011, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.7707936507936508, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.532891038819069e-06, |
|
"loss": 1.2117, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.7720634920634921, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.531088913245536e-06, |
|
"loss": 1.1794, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.7733333333333333, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.529283567578683e-06, |
|
"loss": 1.1103, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.7746031746031746, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.527475003736433e-06, |
|
"loss": 1.1573, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7758730158730158, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.52566322364012e-06, |
|
"loss": 1.2098, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.7771428571428571, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.523848229214499e-06, |
|
"loss": 1.1054, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.7784126984126984, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.52203002238774e-06, |
|
"loss": 1.1287, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.7796825396825396, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.520208605091422e-06, |
|
"loss": 1.1954, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.780952380952381, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.518383979260542e-06, |
|
"loss": 1.1641, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.516556146833496e-06, |
|
"loss": 1.1232, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.7834920634920635, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.514725109752094e-06, |
|
"loss": 1.0694, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.7847619047619048, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.512890869961548e-06, |
|
"loss": 1.2341, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.7860317460317461, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.51105342941047e-06, |
|
"loss": 1.1597, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7873015873015873, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.509212790050879e-06, |
|
"loss": 1.2067, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7885714285714286, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.507368953838183e-06, |
|
"loss": 1.1783, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7898412698412698, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.505521922731195e-06, |
|
"loss": 1.1478, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7911111111111111, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.503671698692115e-06, |
|
"loss": 1.1335, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.7923809523809524, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.501818283686542e-06, |
|
"loss": 1.1269, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7936507936507936, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.499961679683458e-06, |
|
"loss": 1.165, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7949206349206349, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.498101888655238e-06, |
|
"loss": 1.2347, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7961904761904762, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.4962389125776395e-06, |
|
"loss": 1.1218, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7974603174603174, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.494372753429806e-06, |
|
"loss": 1.1768, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.7987301587301587, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.492503413194261e-06, |
|
"loss": 1.1636, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.490630893856908e-06, |
|
"loss": 1.1189, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8012698412698412, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.488755197407028e-06, |
|
"loss": 1.1812, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8025396825396826, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.486876325837276e-06, |
|
"loss": 1.1498, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8038095238095239, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.4849942811436835e-06, |
|
"loss": 1.1018, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.8050793650793651, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.483109065325648e-06, |
|
"loss": 1.1646, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8063492063492064, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.481220680385941e-06, |
|
"loss": 1.2117, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8076190476190476, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.479329128330696e-06, |
|
"loss": 1.1717, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.8088888888888889, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.477434411169414e-06, |
|
"loss": 1.1423, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.8101587301587302, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.475536530914958e-06, |
|
"loss": 1.204, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.8114285714285714, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.473635489583552e-06, |
|
"loss": 1.1459, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.8126984126984127, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 6.471731289194776e-06, |
|
"loss": 1.1183, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.813968253968254, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.469823931771567e-06, |
|
"loss": 1.1436, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.8152380952380952, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.467913419340218e-06, |
|
"loss": 1.2367, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.8165079365079365, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.465999753930371e-06, |
|
"loss": 1.0817, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.4640829375750175e-06, |
|
"loss": 1.168, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.819047619047619, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.462162972310498e-06, |
|
"loss": 1.1851, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8203174603174603, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.4602398601765e-06, |
|
"loss": 1.1486, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.8215873015873015, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.458313603216049e-06, |
|
"loss": 1.1594, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.8228571428571428, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.456384203475516e-06, |
|
"loss": 1.1407, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.8241269841269842, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.454451663004607e-06, |
|
"loss": 1.1415, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.8253968253968254, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.4525159838563695e-06, |
|
"loss": 1.084, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8266666666666667, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.45057716808718e-06, |
|
"loss": 1.1939, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.827936507936508, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.448635217756751e-06, |
|
"loss": 1.1927, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.8292063492063492, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.446690134928123e-06, |
|
"loss": 1.1945, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.8304761904761905, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.444741921667665e-06, |
|
"loss": 1.1275, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.8317460317460318, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.442790580045072e-06, |
|
"loss": 1.1588, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.833015873015873, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.440836112133362e-06, |
|
"loss": 1.2396, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.8342857142857143, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.438878520008875e-06, |
|
"loss": 1.12, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.436917805751268e-06, |
|
"loss": 1.1548, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.8368253968253968, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.434953971443517e-06, |
|
"loss": 1.0921, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.8380952380952381, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.432987019171911e-06, |
|
"loss": 1.102, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8393650793650793, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.431016951026052e-06, |
|
"loss": 1.2026, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.8406349206349206, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.429043769098855e-06, |
|
"loss": 1.1753, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.8419047619047619, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.427067475486537e-06, |
|
"loss": 1.1094, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.8431746031746031, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.425088072288624e-06, |
|
"loss": 1.1124, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.8444444444444444, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.423105561607948e-06, |
|
"loss": 1.2325, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8457142857142858, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.421119945550639e-06, |
|
"loss": 1.175, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.846984126984127, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.419131226226125e-06, |
|
"loss": 1.0951, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.8482539682539683, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.4171394057471335e-06, |
|
"loss": 1.1837, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.8495238095238096, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.415144486229686e-06, |
|
"loss": 1.1108, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.8507936507936508, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.413146469793095e-06, |
|
"loss": 1.2185, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8520634920634921, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.411145358559964e-06, |
|
"loss": 1.1745, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.409141154656184e-06, |
|
"loss": 1.1682, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8546031746031746, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.407133860210932e-06, |
|
"loss": 1.1913, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8558730158730159, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.405123477356667e-06, |
|
"loss": 1.1316, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.403110008229131e-06, |
|
"loss": 1.1423, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8584126984126984, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.401093454967343e-06, |
|
"loss": 1.1023, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8596825396825397, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.3990738197136e-06, |
|
"loss": 1.2511, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.8609523809523809, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.39705110461347e-06, |
|
"loss": 1.1134, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8622222222222222, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.395025311815797e-06, |
|
"loss": 1.2409, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.8634920634920635, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.392996443472691e-06, |
|
"loss": 1.1694, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8647619047619047, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.390964501739533e-06, |
|
"loss": 1.1082, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.866031746031746, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.388929488774966e-06, |
|
"loss": 1.1303, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.8673015873015874, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.386891406740896e-06, |
|
"loss": 1.1679, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.8685714285714285, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.384850257802492e-06, |
|
"loss": 1.1124, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.8698412698412699, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.382806044128178e-06, |
|
"loss": 1.1452, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.3807587678896345e-06, |
|
"loss": 1.1504, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.8723809523809524, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.378708431261799e-06, |
|
"loss": 1.1171, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.8736507936507937, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.376655036422854e-06, |
|
"loss": 1.0953, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.8749206349206349, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.374598585554238e-06, |
|
"loss": 1.1465, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.8761904761904762, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.372539080840629e-06, |
|
"loss": 1.1303, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8774603174603175, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.370476524469955e-06, |
|
"loss": 1.1897, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.8787301587301587, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.368410918633383e-06, |
|
"loss": 1.165, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.3663422655253195e-06, |
|
"loss": 1.1836, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.8812698412698413, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.36427056734341e-06, |
|
"loss": 1.1677, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.8825396825396825, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.362195826288534e-06, |
|
"loss": 1.1874, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.8838095238095238, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.3601180445648025e-06, |
|
"loss": 1.1586, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.8850793650793651, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.358037224379559e-06, |
|
"loss": 1.1884, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.8863492063492063, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.355953367943375e-06, |
|
"loss": 1.1844, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.8876190476190476, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.353866477470043e-06, |
|
"loss": 1.1958, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.351776555176585e-06, |
|
"loss": 1.1412, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8901587301587301, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.34968360328324e-06, |
|
"loss": 1.0831, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.8914285714285715, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.347587624013467e-06, |
|
"loss": 1.2741, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.8926984126984127, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.34548861959394e-06, |
|
"loss": 1.184, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.893968253968254, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 6.343386592254547e-06, |
|
"loss": 1.1357, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.8952380952380953, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.341281544228389e-06, |
|
"loss": 1.147, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.8965079365079365, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.339173477751774e-06, |
|
"loss": 1.1581, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.8977777777777778, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.337062395064218e-06, |
|
"loss": 1.0782, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.8990476190476191, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.334948298408441e-06, |
|
"loss": 1.0997, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.9003174603174603, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.332831190030364e-06, |
|
"loss": 1.1336, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.9015873015873016, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.33071107217911e-06, |
|
"loss": 1.2151, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9028571428571428, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.328587947106995e-06, |
|
"loss": 1.2093, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9041269841269841, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.326461817069534e-06, |
|
"loss": 1.2357, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9053968253968254, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.324332684325431e-06, |
|
"loss": 1.1374, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.3222005511365826e-06, |
|
"loss": 1.1561, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9079365079365079, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.3200654197680716e-06, |
|
"loss": 1.1071, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9092063492063492, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.317927292488165e-06, |
|
"loss": 1.167, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.9104761904761904, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.3157861715683156e-06, |
|
"loss": 1.1746, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.9117460317460317, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.313642059283153e-06, |
|
"loss": 1.1708, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.9130158730158731, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.311494957910487e-06, |
|
"loss": 1.1456, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.9142857142857143, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.309344869731301e-06, |
|
"loss": 1.1039, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9155555555555556, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.307191797029753e-06, |
|
"loss": 1.2034, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.9168253968253969, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.305035742093172e-06, |
|
"loss": 1.2167, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.9180952380952381, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.3028767072120536e-06, |
|
"loss": 1.1112, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.9193650793650794, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.3007146946800605e-06, |
|
"loss": 1.1532, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.9206349206349206, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.298549706794017e-06, |
|
"loss": 1.128, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9219047619047619, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.29638174585391e-06, |
|
"loss": 1.132, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.9231746031746032, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.294210814162884e-06, |
|
"loss": 1.1487, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.29203691402724e-06, |
|
"loss": 1.1577, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.9257142857142857, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.289860047756434e-06, |
|
"loss": 1.128, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.926984126984127, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.287680217663068e-06, |
|
"loss": 1.1734, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9282539682539682, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 6.285497426062898e-06, |
|
"loss": 1.1226, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.9295238095238095, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.2833116752748255e-06, |
|
"loss": 1.0581, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.9307936507936508, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.281122967620892e-06, |
|
"loss": 1.1644, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.932063492063492, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.2789313054262826e-06, |
|
"loss": 1.1771, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.276736691019323e-06, |
|
"loss": 1.1277, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9346031746031747, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.274539126731473e-06, |
|
"loss": 1.0941, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.9358730158730159, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.2723386148973244e-06, |
|
"loss": 1.1645, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.9371428571428572, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.270135157854606e-06, |
|
"loss": 1.1453, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.9384126984126984, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 6.267928757944169e-06, |
|
"loss": 1.1461, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.9396825396825397, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.2657194175099964e-06, |
|
"loss": 1.0925, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.940952380952381, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.263507138899191e-06, |
|
"loss": 1.1974, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.26129192446198e-06, |
|
"loss": 1.1136, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.9434920634920635, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.259073776551707e-06, |
|
"loss": 1.1404, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.9447619047619048, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.2568526975248335e-06, |
|
"loss": 1.1954, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.946031746031746, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.254628689740935e-06, |
|
"loss": 1.1038, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9473015873015873, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.252401755562697e-06, |
|
"loss": 1.1808, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.9485714285714286, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.2501718973559155e-06, |
|
"loss": 1.1993, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.9498412698412698, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.247939117489492e-06, |
|
"loss": 1.2538, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.9511111111111111, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.245703418335432e-06, |
|
"loss": 1.1869, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.243464802268843e-06, |
|
"loss": 1.0689, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9536507936507936, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.2412232716679286e-06, |
|
"loss": 1.1005, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.954920634920635, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.238978828913992e-06, |
|
"loss": 1.092, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.9561904761904761, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.236731476391429e-06, |
|
"loss": 1.0574, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.9574603174603175, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.234481216487727e-06, |
|
"loss": 1.1339, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.9587301587301588, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.2322280515934605e-06, |
|
"loss": 1.1577, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.229971984102291e-06, |
|
"loss": 1.2549, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9612698412698413, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.227713016410963e-06, |
|
"loss": 1.1451, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.9625396825396826, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.225451150919305e-06, |
|
"loss": 1.191, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9638095238095238, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 6.22318639003022e-06, |
|
"loss": 1.1361, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9650793650793651, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.2209187361496904e-06, |
|
"loss": 1.122, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9663492063492064, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.2186481916867684e-06, |
|
"loss": 1.0857, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.9676190476190476, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 6.216374759053579e-06, |
|
"loss": 1.1183, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.9688888888888889, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.214098440665316e-06, |
|
"loss": 1.234, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.9701587301587301, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.211819238940239e-06, |
|
"loss": 1.168, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.9714285714285714, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.209537156299669e-06, |
|
"loss": 1.1411, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9726984126984127, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.207252195167988e-06, |
|
"loss": 1.1865, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.9739682539682539, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.204964357972639e-06, |
|
"loss": 1.0926, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.9752380952380952, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.202673647144115e-06, |
|
"loss": 1.2023, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.9765079365079365, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.2003800651159655e-06, |
|
"loss": 1.1062, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.198083614324791e-06, |
|
"loss": 1.2015, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.979047619047619, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.195784297210236e-06, |
|
"loss": 1.1814, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.9803174603174604, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.193482116214995e-06, |
|
"loss": 1.155, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.9815873015873016, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 6.191177073784799e-06, |
|
"loss": 1.103, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.9828571428571429, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.188869172368424e-06, |
|
"loss": 1.0483, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.9841269841269841, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.18655841441768e-06, |
|
"loss": 1.1273, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9853968253968254, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.184244802387415e-06, |
|
"loss": 1.1635, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.9866666666666667, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.181928338735503e-06, |
|
"loss": 1.1616, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.9879365079365079, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.179609025922857e-06, |
|
"loss": 1.1586, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.9892063492063492, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.177286866413406e-06, |
|
"loss": 1.1268, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.9904761904761905, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.174961862674111e-06, |
|
"loss": 1.1264, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9917460317460317, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.172634017174953e-06, |
|
"loss": 1.2188, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.993015873015873, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.1703033323889286e-06, |
|
"loss": 1.0856, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.9942857142857143, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.167969810792053e-06, |
|
"loss": 1.142, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.165633454863357e-06, |
|
"loss": 1.1106, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.9968253968253968, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 6.163294267084881e-06, |
|
"loss": 1.0554, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.9980952380952381, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.160952249941671e-06, |
|
"loss": 1.1311, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.9993650793650793, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.158607405921784e-06, |
|
"loss": 1.0738, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 6.156259737516275e-06, |
|
"loss": 0.5108, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.0006349206349205, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 6.153909247219202e-06, |
|
"loss": 0.6055, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.0006349206349205, |
|
"eval_loss": 1.1893014907836914, |
|
"eval_runtime": 99.4944, |
|
"eval_samples_per_second": 43.57, |
|
"eval_steps_per_second": 5.448, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.0012698412698413, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.151555937527625e-06, |
|
"loss": 1.1263, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0025396825396826, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.149199810941588e-06, |
|
"loss": 1.1561, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.0038095238095237, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.146840869964141e-06, |
|
"loss": 1.0985, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.005079365079365, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.144479117101314e-06, |
|
"loss": 1.1212, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.0063492063492063, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.142114554862129e-06, |
|
"loss": 1.0649, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.0076190476190476, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.139747185758589e-06, |
|
"loss": 1.0868, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.008888888888889, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.137377012305684e-06, |
|
"loss": 1.105, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.0101587301587303, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.135004037021378e-06, |
|
"loss": 1.0531, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.0114285714285713, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.1326282624266155e-06, |
|
"loss": 1.1147, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.0126984126984127, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.130249691045313e-06, |
|
"loss": 1.1736, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.013968253968254, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.127868325404357e-06, |
|
"loss": 1.1453, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0152380952380953, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.1254841680336046e-06, |
|
"loss": 1.1401, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.0165079365079366, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 6.123097221465878e-06, |
|
"loss": 1.1181, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.0177777777777777, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 6.120707488236962e-06, |
|
"loss": 1.1782, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.019047619047619, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.118314970885604e-06, |
|
"loss": 1.1901, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.0203174603174603, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.115919671953506e-06, |
|
"loss": 1.1247, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.0215873015873016, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 6.113521593985326e-06, |
|
"loss": 1.2278, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.022857142857143, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 6.111120739528676e-06, |
|
"loss": 1.1343, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.0241269841269842, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.108717111134116e-06, |
|
"loss": 1.2013, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.0253968253968253, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.106310711355153e-06, |
|
"loss": 1.1276, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.0266666666666666, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.10390154274824e-06, |
|
"loss": 1.1422, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.027936507936508, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.101489607872768e-06, |
|
"loss": 1.0979, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.0292063492063492, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 6.099074909291069e-06, |
|
"loss": 1.0752, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.0304761904761905, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 6.096657449568412e-06, |
|
"loss": 1.1214, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.0317460317460316, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.094237231272998e-06, |
|
"loss": 1.1449, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.033015873015873, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.091814256975957e-06, |
|
"loss": 1.153, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.0342857142857143, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.089388529251351e-06, |
|
"loss": 1.159, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.0355555555555556, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 6.086960050676163e-06, |
|
"loss": 1.1684, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.0368253968253969, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.084528823830299e-06, |
|
"loss": 1.1217, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.0380952380952382, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 6.082094851296589e-06, |
|
"loss": 1.1292, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.0393650793650793, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.079658135660774e-06, |
|
"loss": 1.1013, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.0406349206349206, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.077218679511512e-06, |
|
"loss": 1.1311, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.041904761904762, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.074776485440372e-06, |
|
"loss": 1.197, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.0431746031746032, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 6.072331556041833e-06, |
|
"loss": 1.054, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.0444444444444445, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.0698838939132786e-06, |
|
"loss": 1.1083, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.0457142857142858, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 6.0674335016549945e-06, |
|
"loss": 1.1574, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.046984126984127, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.064980381870168e-06, |
|
"loss": 1.1341, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.0482539682539682, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 6.062524537164885e-06, |
|
"loss": 1.1482, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.0495238095238095, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 6.060065970148123e-06, |
|
"loss": 1.0957, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.0507936507936508, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.057604683431756e-06, |
|
"loss": 1.1756, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.0520634920634921, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.055140679630543e-06, |
|
"loss": 1.199, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0533333333333332, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 6.052673961362132e-06, |
|
"loss": 1.1214, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.0546031746031745, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 6.050204531247056e-06, |
|
"loss": 1.1826, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.0558730158730159, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.047732391908725e-06, |
|
"loss": 1.0719, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.0571428571428572, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.04525754597343e-06, |
|
"loss": 1.1604, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.0584126984126985, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.042779996070335e-06, |
|
"loss": 1.1072, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.0596825396825398, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.040299744831482e-06, |
|
"loss": 1.1305, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.0609523809523809, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.0378167948917755e-06, |
|
"loss": 1.0534, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.0622222222222222, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.035331148888992e-06, |
|
"loss": 1.1919, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.0634920634920635, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 6.032842809463771e-06, |
|
"loss": 1.1266, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.0647619047619048, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.030351779259612e-06, |
|
"loss": 1.1957, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.066031746031746, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.027858060922874e-06, |
|
"loss": 1.1445, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.0673015873015874, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 6.025361657102773e-06, |
|
"loss": 1.0895, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.0685714285714285, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 6.022862570451376e-06, |
|
"loss": 1.2063, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.0698412698412698, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.020360803623601e-06, |
|
"loss": 1.0889, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.0711111111111111, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.017856359277215e-06, |
|
"loss": 1.138, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.0723809523809524, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.015349240072824e-06, |
|
"loss": 1.1788, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.0736507936507937, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 6.012839448673882e-06, |
|
"loss": 1.1004, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.0749206349206348, |
|
"grad_norm": 1.0, |
|
"learning_rate": 6.010326987746679e-06, |
|
"loss": 1.1197, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.0761904761904761, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.007811859960339e-06, |
|
"loss": 1.0873, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.0774603174603175, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 6.005294067986824e-06, |
|
"loss": 1.1637, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.0787301587301588, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 6.002773614500922e-06, |
|
"loss": 1.1315, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 6.000250502180251e-06, |
|
"loss": 1.1133, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.0812698412698412, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.9977247337052515e-06, |
|
"loss": 1.1943, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.0825396825396825, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.995196311759189e-06, |
|
"loss": 1.133, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.0838095238095238, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.992665239028144e-06, |
|
"loss": 1.1914, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.085079365079365, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.990131518201016e-06, |
|
"loss": 1.1349, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.0863492063492064, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.987595151969516e-06, |
|
"loss": 1.1804, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.0876190476190477, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 5.985056143028167e-06, |
|
"loss": 1.1553, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.0888888888888888, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.982514494074297e-06, |
|
"loss": 1.0719, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.09015873015873, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.979970207808042e-06, |
|
"loss": 1.1532, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.0914285714285714, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 5.9774232869323365e-06, |
|
"loss": 1.0941, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.0926984126984127, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.974873734152916e-06, |
|
"loss": 1.1314, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.093968253968254, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.972321552178312e-06, |
|
"loss": 1.165, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.0952380952380953, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.969766743719847e-06, |
|
"loss": 1.1825, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.0965079365079364, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.967209311491636e-06, |
|
"loss": 1.1202, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.0977777777777777, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.964649258210583e-06, |
|
"loss": 1.139, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.099047619047619, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 5.962086586596369e-06, |
|
"loss": 1.0856, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.1003174603174604, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.959521299371465e-06, |
|
"loss": 1.1494, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.1015873015873017, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.956953399261118e-06, |
|
"loss": 1.0633, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.1028571428571428, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.9543828889933484e-06, |
|
"loss": 1.1601, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.104126984126984, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.951809771298954e-06, |
|
"loss": 1.1452, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.1053968253968254, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.949234048911497e-06, |
|
"loss": 1.0889, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.1066666666666667, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.94665572456731e-06, |
|
"loss": 1.1215, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.107936507936508, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.94407480100549e-06, |
|
"loss": 1.1259, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.1092063492063493, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.941491280967898e-06, |
|
"loss": 1.0588, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.1104761904761904, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.938905167199145e-06, |
|
"loss": 1.1346, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.1117460317460317, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.936316462446605e-06, |
|
"loss": 1.0855, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.113015873015873, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.933725169460404e-06, |
|
"loss": 1.1648, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.1142857142857143, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.931131290993411e-06, |
|
"loss": 1.1728, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.1155555555555556, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.928534829801252e-06, |
|
"loss": 1.082, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.116825396825397, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.9259357886422866e-06, |
|
"loss": 1.1417, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.118095238095238, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.923334170277623e-06, |
|
"loss": 1.1807, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.1193650793650793, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 5.9207299774711025e-06, |
|
"loss": 1.1927, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.1206349206349207, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.918123212989304e-06, |
|
"loss": 1.1026, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.121904761904762, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.915513879601538e-06, |
|
"loss": 1.1803, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.1231746031746033, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.912901980079843e-06, |
|
"loss": 1.1667, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.1244444444444444, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.910287517198983e-06, |
|
"loss": 1.1347, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.1257142857142857, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.9076704937364475e-06, |
|
"loss": 1.1578, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.126984126984127, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.9050509124724456e-06, |
|
"loss": 1.1774, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.1282539682539683, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.902428776189903e-06, |
|
"loss": 1.1567, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.1295238095238096, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.899804087674461e-06, |
|
"loss": 1.1014, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.1307936507936507, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.897176849714469e-06, |
|
"loss": 1.1083, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.132063492063492, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.8945470651009875e-06, |
|
"loss": 1.1097, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.1333333333333333, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.891914736627785e-06, |
|
"loss": 1.096, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.1346031746031746, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.8892798670913275e-06, |
|
"loss": 1.1589, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.135873015873016, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 5.886642459290782e-06, |
|
"loss": 1.1019, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.1371428571428572, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.884002516028012e-06, |
|
"loss": 1.1257, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.1384126984126983, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.881360040107578e-06, |
|
"loss": 1.1422, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.1396825396825396, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.878715034336726e-06, |
|
"loss": 1.1903, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.140952380952381, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 5.8760675015253935e-06, |
|
"loss": 1.1277, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.1422222222222222, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.8734174444862e-06, |
|
"loss": 1.1103, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.1434920634920636, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.8707648660344454e-06, |
|
"loss": 1.1382, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.1447619047619049, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 5.868109768988114e-06, |
|
"loss": 1.1512, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.146031746031746, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.865452156167861e-06, |
|
"loss": 1.1509, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.1473015873015873, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.862792030397015e-06, |
|
"loss": 1.1152, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.1485714285714286, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 5.8601293945015745e-06, |
|
"loss": 1.142, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.1498412698412699, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.857464251310206e-06, |
|
"loss": 1.0839, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.1511111111111112, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.854796603654236e-06, |
|
"loss": 1.2105, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.1523809523809523, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.852126454367657e-06, |
|
"loss": 1.1147, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.1536507936507936, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.8494538062871145e-06, |
|
"loss": 1.1292, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.154920634920635, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 5.8467786622519105e-06, |
|
"loss": 1.1353, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.1561904761904762, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.844101025103999e-06, |
|
"loss": 1.0897, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.1574603174603175, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.841420897687981e-06, |
|
"loss": 1.1639, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.1587301587301586, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.838738282851105e-06, |
|
"loss": 1.1634, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.836053183443262e-06, |
|
"loss": 1.1569, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.1612698412698412, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.833365602316978e-06, |
|
"loss": 1.2137, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.1625396825396825, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 5.830675542327421e-06, |
|
"loss": 1.111, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.1638095238095238, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.827983006332391e-06, |
|
"loss": 1.0904, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.1650793650793652, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.825287997192318e-06, |
|
"loss": 1.1414, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.1663492063492065, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 5.822590517770259e-06, |
|
"loss": 1.1094, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.1676190476190476, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 5.819890570931895e-06, |
|
"loss": 1.128, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.1688888888888889, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.817188159545529e-06, |
|
"loss": 1.1724, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.1701587301587302, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.814483286482081e-06, |
|
"loss": 1.0582, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.1714285714285715, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.811775954615088e-06, |
|
"loss": 1.0837, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.1726984126984128, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 5.809066166820699e-06, |
|
"loss": 1.0938, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.1739682539682539, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.806353925977671e-06, |
|
"loss": 1.1441, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.1752380952380952, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.803639234967367e-06, |
|
"loss": 1.1908, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.1765079365079365, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.800922096673753e-06, |
|
"loss": 1.1109, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.1777777777777778, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 5.798202513983397e-06, |
|
"loss": 1.1131, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.1790476190476191, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.79548048978546e-06, |
|
"loss": 1.1502, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.1803174603174602, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.7927560269717e-06, |
|
"loss": 1.1954, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.1815873015873015, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.790029128436464e-06, |
|
"loss": 1.1165, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.1828571428571428, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.787299797076686e-06, |
|
"loss": 1.1306, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.1841269841269841, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.784568035791887e-06, |
|
"loss": 1.1082, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.1853968253968254, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.781833847484169e-06, |
|
"loss": 1.1388, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.1866666666666668, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 5.7790972350582095e-06, |
|
"loss": 1.1739, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.1879365079365078, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.776358201421263e-06, |
|
"loss": 1.1154, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.1892063492063492, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.773616749483157e-06, |
|
"loss": 1.1866, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.770872882156288e-06, |
|
"loss": 1.2014, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.1917460317460318, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 5.768126602355618e-06, |
|
"loss": 1.0875, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.193015873015873, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.765377912998671e-06, |
|
"loss": 1.1382, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.1942857142857144, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 5.762626817005533e-06, |
|
"loss": 1.0788, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.1955555555555555, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.759873317298842e-06, |
|
"loss": 1.1058, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.1968253968253968, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.757117416803798e-06, |
|
"loss": 1.1358, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.198095238095238, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.754359118448141e-06, |
|
"loss": 1.1381, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.1993650793650794, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.751598425162169e-06, |
|
"loss": 1.2095, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.2006349206349207, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.7488353398787165e-06, |
|
"loss": 1.1923, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.2019047619047618, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.746069865533161e-06, |
|
"loss": 1.0759, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.2031746031746031, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 5.743302005063419e-06, |
|
"loss": 1.0627, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.2044444444444444, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.740531761409945e-06, |
|
"loss": 1.1598, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2057142857142857, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 5.7377591375157175e-06, |
|
"loss": 1.1227, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.206984126984127, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.73498413632625e-06, |
|
"loss": 1.1589, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.2082539682539681, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.73220676078958e-06, |
|
"loss": 1.0832, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.2095238095238094, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.7294270138562675e-06, |
|
"loss": 1.2069, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.2107936507936508, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.7266448984793905e-06, |
|
"loss": 1.2382, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.212063492063492, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.723860417614543e-06, |
|
"loss": 1.1499, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.2133333333333334, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 5.721073574219835e-06, |
|
"loss": 1.1811, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.2146031746031747, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 5.718284371255883e-06, |
|
"loss": 1.0129, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.215873015873016, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.715492811685813e-06, |
|
"loss": 1.1847, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.217142857142857, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 5.712698898475249e-06, |
|
"loss": 1.0698, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2184126984126984, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.709902634592324e-06, |
|
"loss": 1.0597, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.2196825396825397, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.707104023007664e-06, |
|
"loss": 1.0922, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.220952380952381, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.7043030666943846e-06, |
|
"loss": 1.1159, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.701499768628099e-06, |
|
"loss": 1.1535, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.2234920634920634, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.6986941317869065e-06, |
|
"loss": 1.0798, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.2247619047619047, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.695886159151388e-06, |
|
"loss": 1.1546, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.226031746031746, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.69307585370461e-06, |
|
"loss": 1.0339, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.2273015873015873, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.690263218432114e-06, |
|
"loss": 1.1029, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.2285714285714286, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.687448256321919e-06, |
|
"loss": 1.0723, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.2298412698412697, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.684630970364515e-06, |
|
"loss": 1.0463, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.231111111111111, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 5.6818113635528595e-06, |
|
"loss": 1.1948, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.2323809523809524, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.6789894388823764e-06, |
|
"loss": 1.1977, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.2336507936507937, |
|
"grad_norm": 0.875, |
|
"learning_rate": 5.676165199350954e-06, |
|
"loss": 1.0944, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.234920634920635, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.6733386479589375e-06, |
|
"loss": 1.1806, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.2361904761904763, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.670509787709128e-06, |
|
"loss": 1.169, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.2374603174603174, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 5.667678621606781e-06, |
|
"loss": 1.161, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.2387301587301587, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.664845152659599e-06, |
|
"loss": 1.1933, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.6620093838777345e-06, |
|
"loss": 1.1191, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.2412698412698413, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.659171318273782e-06, |
|
"loss": 1.1189, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.2425396825396826, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.656330958862774e-06, |
|
"loss": 1.0603, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.243809523809524, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.653488308662181e-06, |
|
"loss": 1.1747, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.245079365079365, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.650643370691909e-06, |
|
"loss": 1.1333, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.2463492063492063, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.647796147974291e-06, |
|
"loss": 1.166, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.2476190476190476, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.644946643534091e-06, |
|
"loss": 1.1551, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.248888888888889, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.642094860398495e-06, |
|
"loss": 1.1008, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.2501587301587302, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.639240801597108e-06, |
|
"loss": 1.0894, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.2514285714285713, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.636384470161957e-06, |
|
"loss": 1.1374, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.2526984126984126, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.633525869127478e-06, |
|
"loss": 1.1105, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.253968253968254, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.630665001530522e-06, |
|
"loss": 1.0807, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.2552380952380953, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.627801870410348e-06, |
|
"loss": 1.1284, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.2565079365079366, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.624936478808617e-06, |
|
"loss": 1.1448, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.2577777777777777, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 5.622068829769393e-06, |
|
"loss": 1.0997, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.259047619047619, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.61919892633914e-06, |
|
"loss": 1.1034, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.2603174603174603, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.616326771566714e-06, |
|
"loss": 1.1347, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.2615873015873016, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.613452368503362e-06, |
|
"loss": 1.0823, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.262857142857143, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 5.610575720202723e-06, |
|
"loss": 1.0923, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.2641269841269842, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.60769682972082e-06, |
|
"loss": 1.0534, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.2653968253968255, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.604815700116055e-06, |
|
"loss": 1.1278, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.2666666666666666, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 5.601932334449214e-06, |
|
"loss": 1.037, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.267936507936508, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.599046735783456e-06, |
|
"loss": 1.087, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.2692063492063492, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.596158907184309e-06, |
|
"loss": 1.0948, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.2704761904761905, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.593268851719676e-06, |
|
"loss": 1.1269, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.2717460317460318, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.590376572459821e-06, |
|
"loss": 1.1055, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.273015873015873, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.587482072477371e-06, |
|
"loss": 1.0811, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.2742857142857142, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.584585354847317e-06, |
|
"loss": 1.0866, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.2755555555555556, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.581686422646998e-06, |
|
"loss": 1.0588, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.2768253968253969, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.578785278956114e-06, |
|
"loss": 1.0968, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.2780952380952382, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.575881926856708e-06, |
|
"loss": 1.0973, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.2793650793650793, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.572976369433172e-06, |
|
"loss": 1.1661, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.2806349206349206, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.5700686097722384e-06, |
|
"loss": 1.1793, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.2819047619047619, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.5671586509629845e-06, |
|
"loss": 1.1664, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.2831746031746032, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.564246496096818e-06, |
|
"loss": 1.1553, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.2844444444444445, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.5613321482674834e-06, |
|
"loss": 1.0554, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.558415610571052e-06, |
|
"loss": 1.1176, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.2869841269841271, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.555496886105924e-06, |
|
"loss": 1.1676, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.2882539682539682, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.552575977972822e-06, |
|
"loss": 1.1585, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.2895238095238095, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.549652889274788e-06, |
|
"loss": 1.127, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.2907936507936508, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.546727623117179e-06, |
|
"loss": 1.2112, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.2920634920634921, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.543800182607669e-06, |
|
"loss": 1.1219, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.2933333333333334, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.54087057085624e-06, |
|
"loss": 1.2062, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.2946031746031745, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.537938790975179e-06, |
|
"loss": 1.1131, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.2958730158730158, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.535004846079079e-06, |
|
"loss": 1.0721, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.2971428571428572, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 5.532068739284832e-06, |
|
"loss": 1.1498, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.2984126984126985, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.5291304737116254e-06, |
|
"loss": 1.1301, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.2996825396825398, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.526190052480942e-06, |
|
"loss": 1.1593, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.3009523809523809, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.523247478716555e-06, |
|
"loss": 1.0929, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.3022222222222222, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.5203027555445205e-06, |
|
"loss": 1.1383, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.3034920634920635, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.517355886093182e-06, |
|
"loss": 1.1199, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.3047619047619048, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.514406873493163e-06, |
|
"loss": 1.0592, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.306031746031746, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 5.511455720877359e-06, |
|
"loss": 1.0237, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3073015873015872, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.508502431380948e-06, |
|
"loss": 1.1415, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.3085714285714285, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.5055470081413684e-06, |
|
"loss": 1.1732, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.3098412698412698, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.5025894542983315e-06, |
|
"loss": 1.1345, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.3111111111111111, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.499629772993809e-06, |
|
"loss": 1.0832, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.3123809523809524, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.496667967372035e-06, |
|
"loss": 1.2329, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.3136507936507935, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.4937040405795e-06, |
|
"loss": 1.1604, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.314920634920635, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 5.490737995764946e-06, |
|
"loss": 1.133, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.3161904761904761, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 5.487769836079367e-06, |
|
"loss": 1.0956, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.3174603174603174, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.484799564676002e-06, |
|
"loss": 1.0835, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.3187301587301588, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 5.481827184710336e-06, |
|
"loss": 1.0909, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.478852699340092e-06, |
|
"loss": 1.0682, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.3212698412698414, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.47587611172523e-06, |
|
"loss": 1.1619, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.3225396825396825, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 5.4728974250279445e-06, |
|
"loss": 1.1885, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.3238095238095238, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.4699166424126575e-06, |
|
"loss": 1.1089, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.325079365079365, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.4669337670460205e-06, |
|
"loss": 1.1159, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.3263492063492064, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.463948802096907e-06, |
|
"loss": 1.153, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.3276190476190477, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.460961750736408e-06, |
|
"loss": 1.0732, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.3288888888888888, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.457972616137836e-06, |
|
"loss": 1.0756, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.33015873015873, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.454981401476713e-06, |
|
"loss": 1.1332, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.3314285714285714, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.451988109930771e-06, |
|
"loss": 1.1398, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.3326984126984127, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.44899274467995e-06, |
|
"loss": 1.1576, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.333968253968254, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.4459953089063925e-06, |
|
"loss": 1.0711, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.3352380952380951, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.442995805794438e-06, |
|
"loss": 1.1551, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.3365079365079366, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.439994238530627e-06, |
|
"loss": 1.0791, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.3377777777777777, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 5.4369906103036895e-06, |
|
"loss": 1.2093, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.339047619047619, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 5.433984924304546e-06, |
|
"loss": 1.1535, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.3403174603174604, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.430977183726302e-06, |
|
"loss": 1.113, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.3415873015873017, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.427967391764247e-06, |
|
"loss": 1.1166, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.342857142857143, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.42495555161585e-06, |
|
"loss": 1.2128, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.344126984126984, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.421941666480755e-06, |
|
"loss": 1.0608, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.3453968253968254, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.418925739560777e-06, |
|
"loss": 1.1802, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.3466666666666667, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 5.415907774059904e-06, |
|
"loss": 1.0853, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.347936507936508, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.412887773184288e-06, |
|
"loss": 1.1111, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.3492063492063493, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 5.409865740142242e-06, |
|
"loss": 1.0664, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.3504761904761904, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.406841678144237e-06, |
|
"loss": 1.0606, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.3517460317460317, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.403815590402905e-06, |
|
"loss": 1.1376, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.353015873015873, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.400787480133023e-06, |
|
"loss": 1.1475, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.3542857142857143, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.397757350551521e-06, |
|
"loss": 1.021, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.3555555555555556, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.394725204877474e-06, |
|
"loss": 1.141, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.3568253968253967, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.391691046332097e-06, |
|
"loss": 1.1207, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.358095238095238, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.3886548781387455e-06, |
|
"loss": 1.1566, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.3593650793650793, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.385616703522907e-06, |
|
"loss": 1.1329, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.3606349206349206, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.382576525712205e-06, |
|
"loss": 1.1949, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.361904761904762, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.379534347936386e-06, |
|
"loss": 1.1444, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.363174603174603, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.376490173427324e-06, |
|
"loss": 1.1498, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.3644444444444446, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.373444005419014e-06, |
|
"loss": 1.0996, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.3657142857142857, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.37039584714757e-06, |
|
"loss": 1.1395, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.366984126984127, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 5.367345701851217e-06, |
|
"loss": 1.1675, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.3682539682539683, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.364293572770295e-06, |
|
"loss": 1.1776, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.3695238095238096, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.3612394631472475e-06, |
|
"loss": 1.1552, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.370793650793651, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 5.358183376226626e-06, |
|
"loss": 1.0682, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.372063492063492, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 5.355125315255079e-06, |
|
"loss": 1.117, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.3733333333333333, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.352065283481355e-06, |
|
"loss": 1.2372, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.3746031746031746, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 5.349003284156292e-06, |
|
"loss": 1.1573, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.375873015873016, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.3459393205328255e-06, |
|
"loss": 1.1628, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.3771428571428572, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.342873395865971e-06, |
|
"loss": 1.1204, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.3784126984126983, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.3398055134128295e-06, |
|
"loss": 1.124, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.3796825396825396, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.336735676432583e-06, |
|
"loss": 1.0391, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.380952380952381, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 5.333663888186488e-06, |
|
"loss": 1.1251, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.3822222222222222, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.330590151937877e-06, |
|
"loss": 1.1557, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.3834920634920636, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.327514470952147e-06, |
|
"loss": 1.1114, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.3847619047619046, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.324436848496766e-06, |
|
"loss": 1.1198, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.3860317460317462, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.321357287841262e-06, |
|
"loss": 1.0892, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.3873015873015873, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 5.3182757922572226e-06, |
|
"loss": 1.1187, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.3885714285714286, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 5.315192365018292e-06, |
|
"loss": 1.0835, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.3898412698412699, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.312107009400163e-06, |
|
"loss": 1.1224, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.3911111111111112, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.309019728680581e-06, |
|
"loss": 1.1558, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.3923809523809525, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.3059305261393355e-06, |
|
"loss": 1.2182, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.3936507936507936, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.3028394050582555e-06, |
|
"loss": 1.1034, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.394920634920635, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.29974636872121e-06, |
|
"loss": 1.1412, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.3961904761904762, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.296651420414104e-06, |
|
"loss": 1.1102, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.3974603174603175, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.293554563424871e-06, |
|
"loss": 1.1589, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.3987301587301588, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.2904558010434745e-06, |
|
"loss": 1.1025, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.125, |
|
"learning_rate": 5.2873551365619e-06, |
|
"loss": 1.1318, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.4012698412698412, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 5.284252573274153e-06, |
|
"loss": 1.125, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.4025396825396825, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 5.281148114476263e-06, |
|
"loss": 1.0316, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.4038095238095238, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.278041763466265e-06, |
|
"loss": 1.1106, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.4050793650793652, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 5.274933523544206e-06, |
|
"loss": 1.2083, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.4063492063492062, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 5.2718233980121435e-06, |
|
"loss": 1.0879, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.4076190476190475, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 5.268711390174135e-06, |
|
"loss": 1.2132, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4088888888888889, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.265597503336238e-06, |
|
"loss": 1.1204, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.4101587301587302, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.262481740806509e-06, |
|
"loss": 1.048, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.4114285714285715, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.259364105894991e-06, |
|
"loss": 1.1192, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.4126984126984126, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.2562446019137225e-06, |
|
"loss": 1.1135, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.413968253968254, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 5.253123232176724e-06, |
|
"loss": 1.1061, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.4152380952380952, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.25e-06, |
|
"loss": 1.0388, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.4165079365079365, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.246874908701532e-06, |
|
"loss": 1.0881, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.4177777777777778, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 5.2437479616012775e-06, |
|
"loss": 1.0944, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.4190476190476191, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.240619162021165e-06, |
|
"loss": 1.119, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.4203174603174604, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.237488513285092e-06, |
|
"loss": 1.1366, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.4215873015873015, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.234356018718919e-06, |
|
"loss": 1.0363, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.4228571428571428, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.231221681650469e-06, |
|
"loss": 1.1057, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.4241269841269841, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.228085505409519e-06, |
|
"loss": 1.1738, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.4253968253968254, |
|
"grad_norm": 0.875, |
|
"learning_rate": 5.224947493327806e-06, |
|
"loss": 1.0843, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.4266666666666667, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 5.221807648739012e-06, |
|
"loss": 1.2114, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.4279365079365078, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.218665974978767e-06, |
|
"loss": 1.1338, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.4292063492063491, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.215522475384645e-06, |
|
"loss": 1.0466, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.4304761904761905, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.21237715329616e-06, |
|
"loss": 1.1675, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.4317460317460318, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 5.209230012054761e-06, |
|
"loss": 1.2035, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.433015873015873, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.20608105500383e-06, |
|
"loss": 1.0926, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.4342857142857142, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.202930285488679e-06, |
|
"loss": 1.0956, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.4355555555555555, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.1997777068565426e-06, |
|
"loss": 1.1305, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.4368253968253968, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.196623322456582e-06, |
|
"loss": 1.1132, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.438095238095238, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.193467135639873e-06, |
|
"loss": 1.0628, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.4393650793650794, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.1903091497594075e-06, |
|
"loss": 1.2005, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.4406349206349207, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.187149368170087e-06, |
|
"loss": 1.1237, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.441904761904762, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.1839877942287245e-06, |
|
"loss": 1.099, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.443174603174603, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 5.180824431294034e-06, |
|
"loss": 1.1586, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.17765928272663e-06, |
|
"loss": 1.1243, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.4457142857142857, |
|
"grad_norm": 0.875, |
|
"learning_rate": 5.1744923518890255e-06, |
|
"loss": 1.0623, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.446984126984127, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.171323642145625e-06, |
|
"loss": 1.111, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.4482539682539683, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.168153156862726e-06, |
|
"loss": 1.0733, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.4495238095238094, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.164980899408509e-06, |
|
"loss": 1.0906, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.4507936507936507, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 5.1618068731530395e-06, |
|
"loss": 1.1173, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.452063492063492, |
|
"grad_norm": 1.0, |
|
"learning_rate": 5.15863108146826e-06, |
|
"loss": 1.1727, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.4533333333333334, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 5.155453527727989e-06, |
|
"loss": 1.1193, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.4546031746031747, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.15227421530792e-06, |
|
"loss": 1.1275, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.4558730158730158, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.14909314758561e-06, |
|
"loss": 1.1938, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.457142857142857, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.145910327940484e-06, |
|
"loss": 1.1307, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.4584126984126984, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 5.142725759753826e-06, |
|
"loss": 1.2017, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.4596825396825397, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 5.139539446408781e-06, |
|
"loss": 1.1306, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.460952380952381, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 5.136351391290346e-06, |
|
"loss": 1.102, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.462222222222222, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 5.133161597785365e-06, |
|
"loss": 1.1168, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.4634920634920636, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 5.129970069282535e-06, |
|
"loss": 1.1488, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.4647619047619047, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 5.126776809172392e-06, |
|
"loss": 1.0992, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.466031746031746, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 5.123581820847313e-06, |
|
"loss": 1.1016, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.4673015873015873, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.1203851077015106e-06, |
|
"loss": 1.12, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.4685714285714286, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 5.1171866731310285e-06, |
|
"loss": 1.087, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.46984126984127, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 5.113986520533743e-06, |
|
"loss": 1.1197, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.471111111111111, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 5.110784653309353e-06, |
|
"loss": 1.1768, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.4723809523809523, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 5.1075810748593765e-06, |
|
"loss": 1.1136, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.4736507936507937, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 5.104375788587154e-06, |
|
"loss": 1.1602, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.474920634920635, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 5.1011687978978394e-06, |
|
"loss": 1.1295, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.4761904761904763, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.097960106198393e-06, |
|
"loss": 1.2086, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.4774603174603174, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.094749716897588e-06, |
|
"loss": 1.1108, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.4787301587301587, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.091537633405996e-06, |
|
"loss": 1.1007, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 5.088323859135995e-06, |
|
"loss": 1.0905, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.4812698412698413, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.0851083975017515e-06, |
|
"loss": 1.1302, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.4825396825396826, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.081891251919227e-06, |
|
"loss": 1.104, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.4838095238095237, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 5.078672425806178e-06, |
|
"loss": 1.0844, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.485079365079365, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 5.075451922582138e-06, |
|
"loss": 1.0839, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.4863492063492063, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.072229745668424e-06, |
|
"loss": 1.1437, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.4876190476190476, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 5.069005898488137e-06, |
|
"loss": 1.0498, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.488888888888889, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.065780384466147e-06, |
|
"loss": 1.1558, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.4901587301587302, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 5.062553207029093e-06, |
|
"loss": 1.1752, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.4914285714285715, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.059324369605386e-06, |
|
"loss": 1.1394, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.4926984126984126, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.056093875625199e-06, |
|
"loss": 1.1602, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.493968253968254, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 5.052861728520462e-06, |
|
"loss": 1.1289, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.4952380952380953, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 5.049627931724868e-06, |
|
"loss": 1.1, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.4965079365079366, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 5.046392488673853e-06, |
|
"loss": 1.1183, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.4977777777777779, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 5.043155402804611e-06, |
|
"loss": 1.2041, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.499047619047619, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 5.039916677556075e-06, |
|
"loss": 1.1673, |
|
"step": 1182 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3148, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 394, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6179854689516913e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|