|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.2106135986733, |
|
"global_step": 741, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.347826086956522e-07, |
|
"loss": 2.7034, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 2.73, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3043478260869566e-06, |
|
"loss": 2.7564, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 2.6514, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 2.5382, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 2.5815, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.043478260869566e-06, |
|
"loss": 2.6067, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 2.6704, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 2.6642, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 2.5732, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 2.6243, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 2.5127, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 2.6569, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 2.5531, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 2.6155, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 2.6498, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.391304347826087e-06, |
|
"loss": 2.5353, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 2.605, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.260869565217392e-06, |
|
"loss": 2.6735, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 2.5645, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 2.5904, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 2.6846, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5589, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.999953315763929e-06, |
|
"loss": 2.5329, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.999813263927483e-06, |
|
"loss": 2.5547, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.999579847105947e-06, |
|
"loss": 2.5863, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.999253069658074e-06, |
|
"loss": 2.7017, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.99883293768601e-06, |
|
"loss": 2.6934, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.998319459035168e-06, |
|
"loss": 2.5498, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.997712643294093e-06, |
|
"loss": 2.5468, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.997012501794273e-06, |
|
"loss": 2.6303, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.996219047609943e-06, |
|
"loss": 2.4883, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.995332295557818e-06, |
|
"loss": 2.5467, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.994352262196839e-06, |
|
"loss": 2.6186, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.993278965827844e-06, |
|
"loss": 2.5559, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.992112426493247e-06, |
|
"loss": 2.4594, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.990852665976648e-06, |
|
"loss": 2.5372, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.989499707802424e-06, |
|
"loss": 2.5247, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.988053577235306e-06, |
|
"loss": 2.4697, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.986514301279894e-06, |
|
"loss": 2.4605, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.984881908680157e-06, |
|
"loss": 2.5399, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.983156429918895e-06, |
|
"loss": 2.4888, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.981337897217171e-06, |
|
"loss": 2.4991, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.979426344533712e-06, |
|
"loss": 2.4208, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.977421807564264e-06, |
|
"loss": 2.5066, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.97532432374094e-06, |
|
"loss": 2.4321, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.973133932231514e-06, |
|
"loss": 2.5499, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.970850673938684e-06, |
|
"loss": 2.5242, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.96847459149932e-06, |
|
"loss": 2.6053, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.966005729283658e-06, |
|
"loss": 2.5961, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.963444133394478e-06, |
|
"loss": 2.5773, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.960789851666237e-06, |
|
"loss": 2.521, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.958042933664186e-06, |
|
"loss": 2.5786, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.955203430683425e-06, |
|
"loss": 2.4393, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.952271395747969e-06, |
|
"loss": 2.4537, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.949246883609743e-06, |
|
"loss": 2.5576, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.94612995074756e-06, |
|
"loss": 2.4571, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.942920655366075e-06, |
|
"loss": 2.4928, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.939619057394687e-06, |
|
"loss": 2.573, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.936225218486428e-06, |
|
"loss": 2.5714, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.93273920201681e-06, |
|
"loss": 2.4764, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.929161073082636e-06, |
|
"loss": 2.5248, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.925490898500796e-06, |
|
"loss": 2.5659, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.921728746807008e-06, |
|
"loss": 2.4022, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.917874688254542e-06, |
|
"loss": 2.493, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.913928794812909e-06, |
|
"loss": 2.4454, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.90989114016652e-06, |
|
"loss": 2.5217, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.905761799713302e-06, |
|
"loss": 2.66, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.901540850563295e-06, |
|
"loss": 2.4788, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.89722837153722e-06, |
|
"loss": 2.4752, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.892824443164987e-06, |
|
"loss": 2.5233, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.88832914768421e-06, |
|
"loss": 2.4721, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.883742569038663e-06, |
|
"loss": 2.5893, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.879064792876717e-06, |
|
"loss": 2.4385, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.874295906549728e-06, |
|
"loss": 2.3598, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.869435999110428e-06, |
|
"loss": 2.3664, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.864485161311242e-06, |
|
"loss": 2.5441, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.859443485602603e-06, |
|
"loss": 2.536, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.85431106613122e-06, |
|
"loss": 2.5167, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.849087998738328e-06, |
|
"loss": 2.4948, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.84377438095789e-06, |
|
"loss": 2.377, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.838370312014783e-06, |
|
"loss": 2.5684, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.832875892822937e-06, |
|
"loss": 2.4302, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.827291225983458e-06, |
|
"loss": 2.4731, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.821616415782708e-06, |
|
"loss": 2.4375, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.815851568190358e-06, |
|
"loss": 2.3958, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.80999679085741e-06, |
|
"loss": 2.3743, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.80405219311419e-06, |
|
"loss": 2.4446, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.798017885968295e-06, |
|
"loss": 2.4291, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.791893982102537e-06, |
|
"loss": 2.5374, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.785680595872824e-06, |
|
"loss": 2.4543, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.77937784330603e-06, |
|
"loss": 2.3586, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.772985842097832e-06, |
|
"loss": 2.5718, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.766504711610507e-06, |
|
"loss": 2.3728, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.759934572870706e-06, |
|
"loss": 2.5059, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.753275548567192e-06, |
|
"loss": 2.4287, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.74652776304855e-06, |
|
"loss": 2.3691, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.739691342320866e-06, |
|
"loss": 2.3754, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.732766414045368e-06, |
|
"loss": 2.5768, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.725753107536053e-06, |
|
"loss": 2.5617, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.718651553757266e-06, |
|
"loss": 2.4548, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.711461885321247e-06, |
|
"loss": 2.3576, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.704184236485672e-06, |
|
"loss": 2.4765, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.696818743151128e-06, |
|
"loss": 2.5565, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.68936554285859e-06, |
|
"loss": 2.4757, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.68182477478684e-06, |
|
"loss": 2.5012, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.67419657974988e-06, |
|
"loss": 2.4415, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.66648110019429e-06, |
|
"loss": 2.5461, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.658678480196579e-06, |
|
"loss": 2.4411, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.650788865460487e-06, |
|
"loss": 2.4137, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.642812403314272e-06, |
|
"loss": 2.4774, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.634749242707948e-06, |
|
"loss": 2.3834, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.626599534210514e-06, |
|
"loss": 2.3706, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.618363430007134e-06, |
|
"loss": 2.4467, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.610041083896304e-06, |
|
"loss": 2.3918, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.60163265128697e-06, |
|
"loss": 2.4193, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.593138289195634e-06, |
|
"loss": 2.4662, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.584558156243418e-06, |
|
"loss": 2.4145, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.575892412653102e-06, |
|
"loss": 2.4683, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.567141220246136e-06, |
|
"loss": 2.4461, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.55830474243961e-06, |
|
"loss": 2.4211, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.549383144243213e-06, |
|
"loss": 2.6434, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.540376592256142e-06, |
|
"loss": 2.3675, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.531285254663997e-06, |
|
"loss": 2.4487, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.522109301235637e-06, |
|
"loss": 2.4202, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.512848903320017e-06, |
|
"loss": 2.5347, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.503504233842973e-06, |
|
"loss": 2.5286, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.494075467304007e-06, |
|
"loss": 2.5092, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.484562779773027e-06, |
|
"loss": 2.4088, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.474966348887055e-06, |
|
"loss": 2.4655, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.465286353846905e-06, |
|
"loss": 2.4513, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.455522975413846e-06, |
|
"loss": 2.4779, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.445676395906226e-06, |
|
"loss": 2.4917, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.435746799196061e-06, |
|
"loss": 2.4344, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.425734370705606e-06, |
|
"loss": 2.3951, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.415639297403891e-06, |
|
"loss": 2.5753, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.40546176780323e-06, |
|
"loss": 2.4163, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.395201971955701e-06, |
|
"loss": 2.406, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.384860101449598e-06, |
|
"loss": 2.4533, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.374436349405847e-06, |
|
"loss": 2.4783, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.36393091047441e-06, |
|
"loss": 2.4423, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.353343980830644e-06, |
|
"loss": 2.4606, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.342675758171638e-06, |
|
"loss": 2.3636, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.331926441712522e-06, |
|
"loss": 2.3702, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 9.32109623218275e-06, |
|
"loss": 2.4619, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.310185331822338e-06, |
|
"loss": 2.2859, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.299193944378112e-06, |
|
"loss": 2.3135, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.28812227509988e-06, |
|
"loss": 2.3796, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.27697053073661e-06, |
|
"loss": 2.5387, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.26573891953257e-06, |
|
"loss": 2.4443, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.254427651223434e-06, |
|
"loss": 2.3509, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.243036937032373e-06, |
|
"loss": 2.3877, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.2315669896661e-06, |
|
"loss": 2.4596, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.220018023310908e-06, |
|
"loss": 2.5048, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.208390253628667e-06, |
|
"loss": 2.3846, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.196683897752794e-06, |
|
"loss": 2.3357, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.184899174284201e-06, |
|
"loss": 2.6323, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.173036303287215e-06, |
|
"loss": 2.4945, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.16109550628546e-06, |
|
"loss": 2.3997, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.149077006257734e-06, |
|
"loss": 2.4204, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.136981027633834e-06, |
|
"loss": 2.4147, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.124807796290366e-06, |
|
"loss": 2.3096, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.112557539546535e-06, |
|
"loss": 2.3793, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.100230486159893e-06, |
|
"loss": 2.5293, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.087826866322065e-06, |
|
"loss": 2.3883, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.075346911654456e-06, |
|
"loss": 2.321, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.062790855203932e-06, |
|
"loss": 2.3753, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.050158931438451e-06, |
|
"loss": 2.3658, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.037451376242696e-06, |
|
"loss": 2.3558, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.024668426913671e-06, |
|
"loss": 2.3781, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.011810322156269e-06, |
|
"loss": 2.3236, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.998877302078803e-06, |
|
"loss": 2.3662, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.985869608188545e-06, |
|
"loss": 2.4065, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.97278748338719e-06, |
|
"loss": 2.56, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.95963117196634e-06, |
|
"loss": 2.4793, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.946400919602933e-06, |
|
"loss": 2.4959, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.933096973354665e-06, |
|
"loss": 2.3332, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.919719581655357e-06, |
|
"loss": 2.4093, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.906268994310339e-06, |
|
"loss": 2.4014, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.892745462491763e-06, |
|
"loss": 2.3916, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.879149238733932e-06, |
|
"loss": 2.3817, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.865480576928578e-06, |
|
"loss": 2.5024, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.851739732320109e-06, |
|
"loss": 2.3461, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.83792696150086e-06, |
|
"loss": 2.3118, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 8.824042522406295e-06, |
|
"loss": 2.51, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.810086674310184e-06, |
|
"loss": 2.3644, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.796059677819773e-06, |
|
"loss": 2.434, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.781961794870903e-06, |
|
"loss": 2.3205, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.767793288723137e-06, |
|
"loss": 2.3496, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.753554423954828e-06, |
|
"loss": 2.545, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.739245466458187e-06, |
|
"loss": 2.3487, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 8.72486668343431e-06, |
|
"loss": 2.4724, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.7104183433882e-06, |
|
"loss": 2.3646, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 8.695900716123744e-06, |
|
"loss": 2.3395, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 8.681314072738678e-06, |
|
"loss": 2.5131, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.666658685619523e-06, |
|
"loss": 2.4721, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.651934828436497e-06, |
|
"loss": 2.3247, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.637142776138415e-06, |
|
"loss": 2.4546, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.622282804947537e-06, |
|
"loss": 2.3452, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.607355192354425e-06, |
|
"loss": 2.3957, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 8.592360217112759e-06, |
|
"loss": 2.3971, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 8.57729815923412e-06, |
|
"loss": 2.2656, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 8.562169299982776e-06, |
|
"loss": 2.4263, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.546973921870421e-06, |
|
"loss": 2.4916, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.531712308650904e-06, |
|
"loss": 2.469, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.516384745314926e-06, |
|
"loss": 2.3268, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.50099151808472e-06, |
|
"loss": 2.3219, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.485532914408712e-06, |
|
"loss": 2.3158, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.470009222956138e-06, |
|
"loss": 2.3291, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.45442073361167e-06, |
|
"loss": 2.4595, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.438767737469995e-06, |
|
"loss": 2.3873, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.42305052683038e-06, |
|
"loss": 2.3981, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.407269395191216e-06, |
|
"loss": 2.3886, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.391424637244528e-06, |
|
"loss": 2.3819, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.375516548870489e-06, |
|
"loss": 2.3791, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.359545427131876e-06, |
|
"loss": 2.3164, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.343511570268541e-06, |
|
"loss": 2.3921, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.327415277691824e-06, |
|
"loss": 2.3502, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.311256849978974e-06, |
|
"loss": 2.2597, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 8.295036588867533e-06, |
|
"loss": 2.3938, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 8.278754797249702e-06, |
|
"loss": 2.4966, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.262411779166681e-06, |
|
"loss": 2.3562, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.246007839802997e-06, |
|
"loss": 2.3949, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.229543285480797e-06, |
|
"loss": 2.3203, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.213018423654144e-06, |
|
"loss": 2.3606, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.196433562903252e-06, |
|
"loss": 2.3521, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.179789012928747e-06, |
|
"loss": 2.4479, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.163085084545867e-06, |
|
"loss": 2.4099, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.146322089678668e-06, |
|
"loss": 2.4377, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.129500341354192e-06, |
|
"loss": 2.4691, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 8.11262015369663e-06, |
|
"loss": 2.2523, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 8.095681841921441e-06, |
|
"loss": 2.4388, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.07868572232949e-06, |
|
"loss": 2.408, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.061632112301122e-06, |
|
"loss": 2.5245, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.044521330290235e-06, |
|
"loss": 2.3994, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 8.027353695818345e-06, |
|
"loss": 2.2924, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 8.010129529468614e-06, |
|
"loss": 2.2718, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.992849152879857e-06, |
|
"loss": 2.4062, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 7.97551288874055e-06, |
|
"loss": 2.3462, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 7.95812106078279e-06, |
|
"loss": 2.3439, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 7.940673993776258e-06, |
|
"loss": 2.4067, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 7.923172013522153e-06, |
|
"loss": 2.4119, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 7.905615446847107e-06, |
|
"loss": 2.3155, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 7.888004621597079e-06, |
|
"loss": 2.5228, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 7.87033986663124e-06, |
|
"loss": 2.4585, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 7.852621511815825e-06, |
|
"loss": 2.4616, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 7.834849888017979e-06, |
|
"loss": 2.4858, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 7.817025327099574e-06, |
|
"loss": 2.4023, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 7.799148161911013e-06, |
|
"loss": 2.279, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.781218726285014e-06, |
|
"loss": 2.3382, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.763237355030384e-06, |
|
"loss": 2.2992, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 7.745204383925753e-06, |
|
"loss": 2.3759, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 7.727120149713313e-06, |
|
"loss": 2.3034, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 7.708984990092528e-06, |
|
"loss": 2.3704, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 7.690799243713825e-06, |
|
"loss": 2.2834, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 7.672563250172278e-06, |
|
"loss": 2.3202, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 7.654277350001255e-06, |
|
"loss": 2.3212, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 7.635941884666072e-06, |
|
"loss": 2.3898, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.617557196557601e-06, |
|
"loss": 2.4211, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.599123628985894e-06, |
|
"loss": 2.4041, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.580641526173758e-06, |
|
"loss": 2.3458, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.5621112332503325e-06, |
|
"loss": 2.3498, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.543533096244644e-06, |
|
"loss": 2.3767, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.524907462079149e-06, |
|
"loss": 2.529, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.506234678563248e-06, |
|
"loss": 2.3742, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 7.487515094386792e-06, |
|
"loss": 2.3069, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.468749059113578e-06, |
|
"loss": 2.3369, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.449936923174813e-06, |
|
"loss": 2.3661, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.431079037862575e-06, |
|
"loss": 2.3691, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.412175755323254e-06, |
|
"loss": 2.361, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.39322742855097e-06, |
|
"loss": 2.3486, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.374234411380987e-06, |
|
"loss": 2.3527, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.355197058483103e-06, |
|
"loss": 2.3877, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 7.336115725355033e-06, |
|
"loss": 2.4065, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 7.316990768315757e-06, |
|
"loss": 2.2512, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 7.297822544498879e-06, |
|
"loss": 2.3204, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 7.2786114118459564e-06, |
|
"loss": 2.2819, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 7.259357729099805e-06, |
|
"loss": 2.3463, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 7.240061855797818e-06, |
|
"loss": 2.4142, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 7.220724152265234e-06, |
|
"loss": 2.4426, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 7.201344979608423e-06, |
|
"loss": 2.3442, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.181924699708127e-06, |
|
"loss": 2.4645, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.162463675212726e-06, |
|
"loss": 2.2494, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 7.142962269531439e-06, |
|
"loss": 2.3921, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.12342084682756e-06, |
|
"loss": 2.3148, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 7.1038397720116445e-06, |
|
"loss": 2.3584, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.084219410734701e-06, |
|
"loss": 2.2597, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.064560129381359e-06, |
|
"loss": 2.3235, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.0448622950630305e-06, |
|
"loss": 2.345, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.025126275611058e-06, |
|
"loss": 2.4172, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.0053524395698345e-06, |
|
"loss": 2.3613, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.985541156189932e-06, |
|
"loss": 2.3586, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.965692795421206e-06, |
|
"loss": 2.4496, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.945807727905876e-06, |
|
"loss": 2.3308, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 6.925886324971619e-06, |
|
"loss": 2.415, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 6.905928958624627e-06, |
|
"loss": 2.3855, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.885936001542658e-06, |
|
"loss": 2.4297, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 6.865907827068085e-06, |
|
"loss": 2.3936, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 6.845844809200918e-06, |
|
"loss": 2.3937, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 6.82574732259182e-06, |
|
"loss": 2.3423, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 6.805615742535117e-06, |
|
"loss": 2.317, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 6.785450444961783e-06, |
|
"loss": 2.4252, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 6.765251806432423e-06, |
|
"loss": 2.2925, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 6.7450202041302404e-06, |
|
"loss": 2.3396, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 6.724756015853994e-06, |
|
"loss": 2.4506, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 6.704459620010945e-06, |
|
"loss": 2.3618, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.684131395609784e-06, |
|
"loss": 2.2474, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.663771722253567e-06, |
|
"loss": 2.2596, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 6.643380980132608e-06, |
|
"loss": 2.3431, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.622959550017397e-06, |
|
"loss": 2.2327, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 6.602507813251478e-06, |
|
"loss": 2.2748, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.5820261517443365e-06, |
|
"loss": 2.2585, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.561514947964258e-06, |
|
"loss": 2.3711, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.540974584931199e-06, |
|
"loss": 2.3061, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.520405446209615e-06, |
|
"loss": 2.2768, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.4998079159013236e-06, |
|
"loss": 2.3563, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.479182378638308e-06, |
|
"loss": 2.3836, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.458529219575551e-06, |
|
"loss": 2.3632, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.437848824383832e-06, |
|
"loss": 2.3514, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.417141579242532e-06, |
|
"loss": 2.5659, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.396407870832419e-06, |
|
"loss": 2.346, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.375648086328431e-06, |
|
"loss": 2.3057, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.354862613392436e-06, |
|
"loss": 2.3792, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.334051840166006e-06, |
|
"loss": 2.3504, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.313216155263161e-06, |
|
"loss": 2.356, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.292355947763114e-06, |
|
"loss": 2.3169, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.271471607203006e-06, |
|
"loss": 2.315, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.25056352357063e-06, |
|
"loss": 2.3571, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.2296320872971515e-06, |
|
"loss": 2.2367, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.208677689249816e-06, |
|
"loss": 2.4162, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.187700720724648e-06, |
|
"loss": 2.3897, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.16670157343915e-06, |
|
"loss": 2.3532, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.14568063952498e-06, |
|
"loss": 2.345, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.124638311520634e-06, |
|
"loss": 2.3358, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.103574982364118e-06, |
|
"loss": 2.3113, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.082491045385601e-06, |
|
"loss": 2.2876, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 6.061386894300082e-06, |
|
"loss": 2.3047, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 6.0402629232000275e-06, |
|
"loss": 2.2679, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.01911952654802e-06, |
|
"loss": 2.2512, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 5.997957099169388e-06, |
|
"loss": 2.3577, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.976776036244833e-06, |
|
"loss": 2.3674, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.955576733303053e-06, |
|
"loss": 2.1205, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.9343595862133515e-06, |
|
"loss": 2.2911, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.91312499117825e-06, |
|
"loss": 2.247, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.891873344726089e-06, |
|
"loss": 2.3012, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.87060504370362e-06, |
|
"loss": 2.2817, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.849320485268597e-06, |
|
"loss": 2.3684, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.828020066882361e-06, |
|
"loss": 2.4574, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 5.806704186302413e-06, |
|
"loss": 2.2969, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.7853732415749985e-06, |
|
"loss": 2.3386, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.764027631027659e-06, |
|
"loss": 2.2298, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.7426677532618e-06, |
|
"loss": 2.4084, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.721294007145256e-06, |
|
"loss": 2.3175, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 5.69990679180483e-06, |
|
"loss": 2.4387, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 5.678506506618845e-06, |
|
"loss": 2.4227, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 5.657093551209687e-06, |
|
"loss": 2.235, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 5.635668325436343e-06, |
|
"loss": 2.2519, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 5.614231229386933e-06, |
|
"loss": 2.3856, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.592782663371237e-06, |
|
"loss": 2.3068, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 5.571323027913221e-06, |
|
"loss": 2.2521, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 5.549852723743564e-06, |
|
"loss": 2.1542, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5.528372151792161e-06, |
|
"loss": 2.3091, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 5.506881713180652e-06, |
|
"loss": 2.4098, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 5.485381809214921e-06, |
|
"loss": 2.3544, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 5.463872841377601e-06, |
|
"loss": 2.4103, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 5.44235521132059e-06, |
|
"loss": 2.3074, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 5.420829320857532e-06, |
|
"loss": 2.3675, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 5.39929557195633e-06, |
|
"loss": 2.3678, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 5.377754366731633e-06, |
|
"loss": 2.2966, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.35620610743732e-06, |
|
"loss": 2.3583, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.334651196459003e-06, |
|
"loss": 2.2363, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.3130900363065055e-06, |
|
"loss": 2.2912, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.291523029606339e-06, |
|
"loss": 2.3355, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.269950579094199e-06, |
|
"loss": 2.3922, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.248373087607434e-06, |
|
"loss": 2.2934, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.22679095807753e-06, |
|
"loss": 2.2972, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.2052045935225725e-06, |
|
"loss": 2.3647, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.183614397039741e-06, |
|
"loss": 2.2244, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.162020771797768e-06, |
|
"loss": 2.4884, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.1404241210294095e-06, |
|
"loss": 2.3397, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.118824848023926e-06, |
|
"loss": 2.261, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.097223356119538e-06, |
|
"loss": 2.4023, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.07562004869591e-06, |
|
"loss": 2.3712, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 5.054015329166596e-06, |
|
"loss": 2.298, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.032409600971533e-06, |
|
"loss": 2.3549, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.010803267569483e-06, |
|
"loss": 2.4179, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.989196732430518e-06, |
|
"loss": 2.361, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.967590399028468e-06, |
|
"loss": 2.3038, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.9459846708334044e-06, |
|
"loss": 2.3168, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.924379951304094e-06, |
|
"loss": 2.2561, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.902776643880461e-06, |
|
"loss": 2.3989, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.881175151976075e-06, |
|
"loss": 2.1956, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.859575878970592e-06, |
|
"loss": 2.4416, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.837979228202234e-06, |
|
"loss": 2.2633, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.81638560296026e-06, |
|
"loss": 2.2302, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.794795406477429e-06, |
|
"loss": 2.2946, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.773209041922472e-06, |
|
"loss": 2.365, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.7516269123925665e-06, |
|
"loss": 2.2991, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.730049420905801e-06, |
|
"loss": 2.3378, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.708476970393662e-06, |
|
"loss": 2.3307, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.686909963693498e-06, |
|
"loss": 2.3909, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.6653488035409975e-06, |
|
"loss": 2.3368, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.643793892562682e-06, |
|
"loss": 2.2934, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.622245633268371e-06, |
|
"loss": 2.2731, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.60070442804367e-06, |
|
"loss": 2.3552, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.5791706791424694e-06, |
|
"loss": 2.3491, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.557644788679413e-06, |
|
"loss": 2.2096, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.536127158622401e-06, |
|
"loss": 2.2307, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.514618190785081e-06, |
|
"loss": 2.2926, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.493118286819348e-06, |
|
"loss": 2.2682, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.47162784820784e-06, |
|
"loss": 2.3075, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.450147276256439e-06, |
|
"loss": 2.3367, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.42867697208678e-06, |
|
"loss": 2.276, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.407217336628765e-06, |
|
"loss": 2.3126, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.385768770613069e-06, |
|
"loss": 2.1602, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.3643316745636574e-06, |
|
"loss": 2.3406, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.342906448790315e-06, |
|
"loss": 2.2879, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.321493493381157e-06, |
|
"loss": 2.333, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.300093208195171e-06, |
|
"loss": 2.3582, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.278705992854745e-06, |
|
"loss": 2.3028, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.257332246738201e-06, |
|
"loss": 2.3173, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.235972368972343e-06, |
|
"loss": 2.3594, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.214626758425003e-06, |
|
"loss": 2.267, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.193295813697587e-06, |
|
"loss": 2.2458, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.171979933117641e-06, |
|
"loss": 2.2125, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.150679514731405e-06, |
|
"loss": 2.1231, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.12939495629638e-06, |
|
"loss": 2.2268, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.108126655273912e-06, |
|
"loss": 2.2652, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.086875008821752e-06, |
|
"loss": 2.2586, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.06564041378665e-06, |
|
"loss": 2.3749, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.04442326669695e-06, |
|
"loss": 2.2593, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.023223963755168e-06, |
|
"loss": 2.3175, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.002042900830613e-06, |
|
"loss": 2.3238, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.980880473451982e-06, |
|
"loss": 2.4295, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.959737076799974e-06, |
|
"loss": 2.2343, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.93861310569992e-06, |
|
"loss": 2.339, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.917508954614401e-06, |
|
"loss": 2.3416, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.896425017635884e-06, |
|
"loss": 2.3733, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.875361688479367e-06, |
|
"loss": 2.3012, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.854319360475022e-06, |
|
"loss": 2.296, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.833298426560851e-06, |
|
"loss": 2.3398, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.8122992792753534e-06, |
|
"loss": 2.2432, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.7913223107501847e-06, |
|
"loss": 2.2624, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.7703679127028497e-06, |
|
"loss": 2.3152, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.7494364764293722e-06, |
|
"loss": 2.2709, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.728528392796995e-06, |
|
"loss": 2.4161, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.707644052236887e-06, |
|
"loss": 2.3116, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.6867838447368414e-06, |
|
"loss": 2.159, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.6659481598339952e-06, |
|
"loss": 2.2846, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.6451373866075657e-06, |
|
"loss": 2.337, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.624351913671571e-06, |
|
"loss": 2.3568, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.6035921291675815e-06, |
|
"loss": 2.3092, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.5828584207574698e-06, |
|
"loss": 2.3266, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.5621511756161686e-06, |
|
"loss": 2.3007, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.54147078042445e-06, |
|
"loss": 2.3106, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.520817621361693e-06, |
|
"loss": 2.2919, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.500192084098677e-06, |
|
"loss": 2.3618, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.4795945537903852e-06, |
|
"loss": 2.2895, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.459025415068804e-06, |
|
"loss": 2.3116, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.4384850520357416e-06, |
|
"loss": 2.2887, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.4179738482556648e-06, |
|
"loss": 2.2725, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.3974921867485238e-06, |
|
"loss": 2.3094, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.377040449982604e-06, |
|
"loss": 2.3289, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.356619019867394e-06, |
|
"loss": 2.2469, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.336228277746435e-06, |
|
"loss": 2.3668, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.3158686043902166e-06, |
|
"loss": 2.4661, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.2955403799890567e-06, |
|
"loss": 2.2158, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.2752439841460063e-06, |
|
"loss": 2.1988, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.254979795869761e-06, |
|
"loss": 2.3208, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.234748193567579e-06, |
|
"loss": 2.2562, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.214549555038218e-06, |
|
"loss": 2.2584, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.194384257464884e-06, |
|
"loss": 2.1852, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.1742526774081822e-06, |
|
"loss": 2.2364, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.154155190799084e-06, |
|
"loss": 2.3356, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.1340921729319173e-06, |
|
"loss": 2.2179, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.1140639984573428e-06, |
|
"loss": 2.2551, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.094071041375375e-06, |
|
"loss": 2.4366, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.0741136750283816e-06, |
|
"loss": 2.4265, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.054192272094125e-06, |
|
"loss": 2.3015, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.0343072045787956e-06, |
|
"loss": 2.3268, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.0144588438100693e-06, |
|
"loss": 2.2294, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.994647560430167e-06, |
|
"loss": 2.3442, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.974873724388945e-06, |
|
"loss": 2.2479, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.955137704936971e-06, |
|
"loss": 2.1724, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9354398706186427e-06, |
|
"loss": 2.4248, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.915780589265301e-06, |
|
"loss": 2.3454, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.896160227988357e-06, |
|
"loss": 2.3336, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.876579153172441e-06, |
|
"loss": 2.3406, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.8570377304685627e-06, |
|
"loss": 2.2548, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.8375363247872756e-06, |
|
"loss": 2.3472, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.8180753002918735e-06, |
|
"loss": 2.3438, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.7986550203915807e-06, |
|
"loss": 2.3328, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.779275847734766e-06, |
|
"loss": 2.3456, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.7599381442021833e-06, |
|
"loss": 2.3872, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.7406422709001956e-06, |
|
"loss": 2.3587, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.721388588154045e-06, |
|
"loss": 2.3089, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.7021774555011214e-06, |
|
"loss": 2.303, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.6830092316842448e-06, |
|
"loss": 2.2772, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.6638842746449672e-06, |
|
"loss": 2.3411, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.6448029415168964e-06, |
|
"loss": 2.2603, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.6257655886190147e-06, |
|
"loss": 2.2749, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.6067725714490307e-06, |
|
"loss": 2.2677, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.5878242446767466e-06, |
|
"loss": 2.3006, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.5689209621374257e-06, |
|
"loss": 2.2082, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.5500630768251895e-06, |
|
"loss": 2.2563, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.5312509408864248e-06, |
|
"loss": 2.3765, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.5124849056132094e-06, |
|
"loss": 2.3231, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.493765321436755e-06, |
|
"loss": 2.2649, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.475092537920853e-06, |
|
"loss": 2.3013, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.456466903755357e-06, |
|
"loss": 2.3028, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.4378887667496696e-06, |
|
"loss": 2.3475, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.4193584738262426e-06, |
|
"loss": 2.2435, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.400876371014107e-06, |
|
"loss": 2.2967, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.3824428034424e-06, |
|
"loss": 2.2928, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.3640581153339293e-06, |
|
"loss": 2.3718, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.3457226499987456e-06, |
|
"loss": 2.2232, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.3274367498277246e-06, |
|
"loss": 2.3189, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.3092007562861756e-06, |
|
"loss": 2.3843, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.291015009907474e-06, |
|
"loss": 2.2295, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.2728798502866887e-06, |
|
"loss": 2.2856, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.2547956160742473e-06, |
|
"loss": 2.2164, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.2367626449696168e-06, |
|
"loss": 2.2529, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.2187812737149856e-06, |
|
"loss": 2.2924, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.2008518380889892e-06, |
|
"loss": 2.3298, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.182974672900428e-06, |
|
"loss": 2.2495, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.1651501119820212e-06, |
|
"loss": 2.3201, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1473784881841753e-06, |
|
"loss": 2.2512, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.129660133368761e-06, |
|
"loss": 2.3506, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.1119953784029207e-06, |
|
"loss": 2.3026, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.0943845531528932e-06, |
|
"loss": 2.3814, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.0768279864778475e-06, |
|
"loss": 2.2394, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.059326006223743e-06, |
|
"loss": 2.3631, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.0418789392172113e-06, |
|
"loss": 2.2676, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.0244871112594523e-06, |
|
"loss": 2.3143, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.007150847120145e-06, |
|
"loss": 2.164, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.98987047053139e-06, |
|
"loss": 2.332, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.972646304181656e-06, |
|
"loss": 2.2384, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.9554786697097668e-06, |
|
"loss": 2.3065, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.9383678876988797e-06, |
|
"loss": 2.3406, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.921314277670509e-06, |
|
"loss": 2.2071, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.9043181580785597e-06, |
|
"loss": 2.4211, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.8873798463033742e-06, |
|
"loss": 2.2528, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.870499658645809e-06, |
|
"loss": 2.2526, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.8536779103213336e-06, |
|
"loss": 2.1434, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.8369149154541333e-06, |
|
"loss": 2.2012, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8202109870712542e-06, |
|
"loss": 2.2621, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.8035664370967493e-06, |
|
"loss": 2.327, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.7869815763458576e-06, |
|
"loss": 2.2459, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.7704567145192036e-06, |
|
"loss": 2.3164, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.753992160197006e-06, |
|
"loss": 2.2619, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.73758822083332e-06, |
|
"loss": 2.3484, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.721245202750299e-06, |
|
"loss": 2.2519, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.7049634111324687e-06, |
|
"loss": 2.3655, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.6887431500210272e-06, |
|
"loss": 2.4345, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.6725847223081776e-06, |
|
"loss": 2.3152, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.6564884297314593e-06, |
|
"loss": 2.3824, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.6404545728681232e-06, |
|
"loss": 2.3621, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.624483451129512e-06, |
|
"loss": 2.2405, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.6085753627554728e-06, |
|
"loss": 2.2798, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.5927306048087855e-06, |
|
"loss": 2.2497, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.5769494731696206e-06, |
|
"loss": 2.25, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5612322625300064e-06, |
|
"loss": 2.4691, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5455792663883329e-06, |
|
"loss": 2.3415, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.529990777043866e-06, |
|
"loss": 2.23, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.5144670855912908e-06, |
|
"loss": 2.2422, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.499008481915281e-06, |
|
"loss": 2.2791, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.483615254685075e-06, |
|
"loss": 2.3216, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4682876913490973e-06, |
|
"loss": 2.4383, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.4530260781295813e-06, |
|
"loss": 2.3001, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.437830700017226e-06, |
|
"loss": 2.3399, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.4227018407658822e-06, |
|
"loss": 2.324, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.4076397828872441e-06, |
|
"loss": 2.2255, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.392644807645575e-06, |
|
"loss": 2.4436, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.3777171950524648e-06, |
|
"loss": 2.2523, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.3628572238615878e-06, |
|
"loss": 2.3668, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.3480651715635035e-06, |
|
"loss": 2.3376, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.333341314380479e-06, |
|
"loss": 2.2861, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.3186859272613222e-06, |
|
"loss": 2.2756, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.3040992838762562e-06, |
|
"loss": 2.1302, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.2895816566118014e-06, |
|
"loss": 2.4016, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.275133316565691e-06, |
|
"loss": 2.2167, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.2607545335418154e-06, |
|
"loss": 2.2321, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.2464455760451733e-06, |
|
"loss": 2.3397, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.2322067112768632e-06, |
|
"loss": 2.3304, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.2180382051290974e-06, |
|
"loss": 2.3833, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.2039403221802297e-06, |
|
"loss": 2.4011, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.189913325689816e-06, |
|
"loss": 2.2326, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.175957477593706e-06, |
|
"loss": 2.2893, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.1620730384991407e-06, |
|
"loss": 2.2846, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.1482602676798933e-06, |
|
"loss": 2.2759, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.1345194230714235e-06, |
|
"loss": 2.2681, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.120850761266068e-06, |
|
"loss": 2.2402, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.107254537508239e-06, |
|
"loss": 2.2913, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.0937310056896643e-06, |
|
"loss": 2.3218, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.080280418344643e-06, |
|
"loss": 2.2079, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.0669030266453367e-06, |
|
"loss": 2.2507, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.053599080397068e-06, |
|
"loss": 2.3211, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.0403688280336626e-06, |
|
"loss": 2.3367, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.027212516612814e-06, |
|
"loss": 2.2297, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.014130391811457e-06, |
|
"loss": 2.3259, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.001122697921197e-06, |
|
"loss": 2.2548, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.881896778437328e-07, |
|
"loss": 2.2746, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 9.753315730863284e-07, |
|
"loss": 2.3727, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 9.625486237573046e-07, |
|
"loss": 2.2859, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9.498410685615511e-07, |
|
"loss": 2.2621, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.372091447960685e-07, |
|
"loss": 2.3176, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 9.24653088345544e-07, |
|
"loss": 2.4216, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 9.121731336779377e-07, |
|
"loss": 2.261, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.99769513840108e-07, |
|
"loss": 2.3639, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.874424604534643e-07, |
|
"loss": 2.2011, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.751922037096328e-07, |
|
"loss": 2.3312, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.630189723661663e-07, |
|
"loss": 2.2176, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.509229937422664e-07, |
|
"loss": 2.1899, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.389044937145397e-07, |
|
"loss": 2.2521, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.269636967127864e-07, |
|
"loss": 2.4843, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.151008257158e-07, |
|
"loss": 2.1477, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.033161022472063e-07, |
|
"loss": 2.3578, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 7.916097463713335e-07, |
|
"loss": 2.3254, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 7.799819766890926e-07, |
|
"loss": 2.3148, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 7.684330103339016e-07, |
|
"loss": 2.2215, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 7.569630629676294e-07, |
|
"loss": 2.2728, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 7.455723487765664e-07, |
|
"loss": 2.3129, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 7.342610804674316e-07, |
|
"loss": 2.3301, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.230294692633922e-07, |
|
"loss": 2.2177, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 7.118777249001213e-07, |
|
"loss": 2.2484, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.008060556218893e-07, |
|
"loss": 2.3895, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 6.898146681776629e-07, |
|
"loss": 2.2685, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 6.789037678172522e-07, |
|
"loss": 2.2515, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.680735582874781e-07, |
|
"loss": 2.2984, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 6.573242418283632e-07, |
|
"loss": 2.3604, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.466560191693566e-07, |
|
"loss": 2.2527, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 6.360690895255916e-07, |
|
"loss": 2.187, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 6.255636505941548e-07, |
|
"loss": 2.3555, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 6.151398985504043e-07, |
|
"loss": 2.3284, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 6.047980280443e-07, |
|
"loss": 2.2503, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 5.945382321967696e-07, |
|
"loss": 2.2688, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 5.84360702596109e-07, |
|
"loss": 2.2249, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 5.742656292943943e-07, |
|
"loss": 2.2609, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 5.642532008039392e-07, |
|
"loss": 2.2697, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 5.543236040937744e-07, |
|
"loss": 2.3562, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.444770245861553e-07, |
|
"loss": 2.1866, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 5.347136461530966e-07, |
|
"loss": 2.2249, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 5.250336511129462e-07, |
|
"loss": 2.2293, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 5.15437220226972e-07, |
|
"loss": 2.2144, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 5.059245326959927e-07, |
|
"loss": 2.2897, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.964957661570285e-07, |
|
"loss": 2.2324, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.871510966799847e-07, |
|
"loss": 2.3129, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.778906987643633e-07, |
|
"loss": 2.3131, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.6871474533600413e-07, |
|
"loss": 2.2208, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.5962340774385936e-07, |
|
"loss": 2.3414, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.506168557567886e-07, |
|
"loss": 2.2689, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.4169525756039164e-07, |
|
"loss": 2.2039, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.328587797538658e-07, |
|
"loss": 2.311, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.2410758734689915e-07, |
|
"loss": 2.1353, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.1544184375658326e-07, |
|
"loss": 2.2208, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.0686171080436767e-07, |
|
"loss": 2.2835, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.983673487130313e-07, |
|
"loss": 2.2034, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.8995891610369707e-07, |
|
"loss": 2.2443, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.8163656999286647e-07, |
|
"loss": 2.2348, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.734004657894874e-07, |
|
"loss": 2.2864, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.6525075729205274e-07, |
|
"loss": 2.22, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.5718759668572913e-07, |
|
"loss": 2.2842, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.4921113453951385e-07, |
|
"loss": 2.2445, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.4132151980342255e-07, |
|
"loss": 2.3406, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.335188998057115e-07, |
|
"loss": 2.1633, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.2580342025012204e-07, |
|
"loss": 2.3446, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.1817522521316034e-07, |
|
"loss": 2.2757, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.106344571414116e-07, |
|
"loss": 2.3542, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.0318125684887233e-07, |
|
"loss": 2.2571, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.958157635143294e-07, |
|
"loss": 2.3593, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.8853811467875413e-07, |
|
"loss": 2.2509, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.813484462427357e-07, |
|
"loss": 2.2306, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.7424689246394685e-07, |
|
"loss": 2.3121, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.672335859546332e-07, |
|
"loss": 2.3072, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.6030865767913527e-07, |
|
"loss": 2.171, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.534722369514503e-07, |
|
"loss": 2.2241, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.467244514328082e-07, |
|
"loss": 2.3027, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.400654271292946e-07, |
|
"loss": 2.2732, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.334952883894942e-07, |
|
"loss": 2.2501, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.270141579021695e-07, |
|
"loss": 2.4042, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.2062215669397201e-07, |
|
"loss": 2.2304, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.1431940412717843e-07, |
|
"loss": 2.2965, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.081060178974642e-07, |
|
"loss": 2.2902, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.019821140317052e-07, |
|
"loss": 2.3338, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.9594780688581172e-07, |
|
"loss": 2.2142, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.900032091425902e-07, |
|
"loss": 2.4194, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.8414843180964316e-07, |
|
"loss": 2.2878, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.7838358421729375e-07, |
|
"loss": 2.2304, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.7270877401654283e-07, |
|
"loss": 2.3066, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.6712410717706406e-07, |
|
"loss": 2.2243, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.616296879852175e-07, |
|
"loss": 2.2178, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.562256190421102e-07, |
|
"loss": 2.2442, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.5091200126167328e-07, |
|
"loss": 2.2656, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.4568893386878057e-07, |
|
"loss": 2.2923, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.405565143973986e-07, |
|
"loss": 2.486, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.3551483868875836e-07, |
|
"loss": 2.2223, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.30564000889572e-07, |
|
"loss": 2.267, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.257040934502729e-07, |
|
"loss": 2.2644, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.209352071232861e-07, |
|
"loss": 2.2235, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.162574309613379e-07, |
|
"loss": 2.2914, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.1167085231579111e-07, |
|
"loss": 2.1447, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.0717555683501413e-07, |
|
"loss": 2.1958, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.027716284627811e-07, |
|
"loss": 2.3153, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 9.845914943670432e-08, |
|
"loss": 2.2076, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 9.423820028669983e-08, |
|
"loss": 2.2937, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.010885983348094e-08, |
|
"loss": 2.3275, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 8.607120518709156e-08, |
|
"loss": 2.2495, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 8.212531174545957e-08, |
|
"loss": 2.3742, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 7.827125319299301e-08, |
|
"loss": 2.3173, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 7.450910149920499e-08, |
|
"loss": 2.3841, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.083892691736428e-08, |
|
"loss": 2.3203, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.726079798319185e-08, |
|
"loss": 2.372, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.377478151357308e-08, |
|
"loss": 2.2576, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 6.038094260531425e-08, |
|
"loss": 2.2702, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 5.707934463392628e-08, |
|
"loss": 2.2772, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 5.387004925244077e-08, |
|
"loss": 2.2446, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 5.0753116390258594e-08, |
|
"loss": 2.3236, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.772860425203252e-08, |
|
"loss": 2.2049, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.479656931657694e-08, |
|
"loss": 2.3853, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.195706633581709e-08, |
|
"loss": 2.3235, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.9210148333763135e-08, |
|
"loss": 2.2853, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.655586660552324e-08, |
|
"loss": 2.2421, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.39942707163432e-08, |
|
"loss": 2.3028, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.152540850068164e-08, |
|
"loss": 2.3869, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.9149326061317373e-08, |
|
"loss": 2.3606, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.686606776848788e-08, |
|
"loss": 2.1782, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4675676259059976e-08, |
|
"loss": 2.3123, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.2578192435736555e-08, |
|
"loss": 2.2398, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.0573655466289423e-08, |
|
"loss": 2.3347, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.866210278282876e-08, |
|
"loss": 2.2827, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.684357008110593e-08, |
|
"loss": 2.2904, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.5118091319843985e-08, |
|
"loss": 2.2559, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.3485698720107077e-08, |
|
"loss": 2.3674, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.1946422764695376e-08, |
|
"loss": 2.257, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.0500292197577756e-08, |
|
"loss": 2.343, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 9.147334023354437e-09, |
|
"loss": 2.1531, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 7.887573506752954e-09, |
|
"loss": 2.3412, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 6.7210341721563044e-09, |
|
"loss": 2.1971, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 5.647737803163855e-09, |
|
"loss": 2.1668, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 4.667704442183341e-09, |
|
"loss": 2.2644, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.780952390058379e-09, |
|
"loss": 2.2436, |
|
"step": 741 |
|
} |
|
], |
|
"max_steps": 750, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.195119998926848e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|