|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 940, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 2.1876, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3793103448275863e-05, |
|
"loss": 2.2886, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0689655172413793e-05, |
|
"loss": 2.1823, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.7586206896551727e-05, |
|
"loss": 2.3409, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 2.2662, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.1379310344827587e-05, |
|
"loss": 1.9962, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.827586206896552e-05, |
|
"loss": 1.9879, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.517241379310345e-05, |
|
"loss": 1.7576, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.206896551724138e-05, |
|
"loss": 1.683, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.896551724137931e-05, |
|
"loss": 1.5743, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.586206896551724e-05, |
|
"loss": 1.4835, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8.275862068965517e-05, |
|
"loss": 1.5786, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.96551724137931e-05, |
|
"loss": 1.4103, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.655172413793105e-05, |
|
"loss": 1.3709, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00010344827586206898, |
|
"loss": 1.4257, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001103448275862069, |
|
"loss": 1.3711, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00011724137931034482, |
|
"loss": 1.2682, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00012413793103448277, |
|
"loss": 1.3635, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00013103448275862068, |
|
"loss": 1.2354, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00013793103448275863, |
|
"loss": 1.25, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00014482758620689657, |
|
"loss": 1.1641, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00015172413793103449, |
|
"loss": 1.2721, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00015862068965517243, |
|
"loss": 1.1605, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00016551724137931035, |
|
"loss": 1.228, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00017241379310344826, |
|
"loss": 1.2069, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001793103448275862, |
|
"loss": 1.2013, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00018620689655172415, |
|
"loss": 1.2213, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001931034482758621, |
|
"loss": 1.1444, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0002, |
|
"loss": 1.263, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019999940538951178, |
|
"loss": 1.1873, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001999976215651183, |
|
"loss": 1.1814, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001999946485480332, |
|
"loss": 1.1387, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019999048637361222, |
|
"loss": 1.1123, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001999851350913528, |
|
"loss": 1.0635, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019997859476489355, |
|
"loss": 1.0912, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019997086547201336, |
|
"loss": 1.0614, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00019996194730463062, |
|
"loss": 1.1857, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00019995184036880205, |
|
"loss": 1.1534, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00019994054478472144, |
|
"loss": 1.1395, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00019992806068671822, |
|
"loss": 1.0477, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019991438822325596, |
|
"loss": 1.1387, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019989952755693048, |
|
"loss": 1.0391, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019988347886446784, |
|
"loss": 1.0089, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019986624233672253, |
|
"loss": 1.1454, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019984781817867492, |
|
"loss": 1.109, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000199828206609429, |
|
"loss": 1.1386, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00019980740786220967, |
|
"loss": 1.0947, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019978542218435997, |
|
"loss": 1.0971, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019976224983733816, |
|
"loss": 1.0616, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001997378910967147, |
|
"loss": 1.0902, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019971234625216884, |
|
"loss": 1.0984, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001996856156074852, |
|
"loss": 1.0847, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019965769948055025, |
|
"loss": 1.0508, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019962859820334838, |
|
"loss": 1.0532, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019959831212195813, |
|
"loss": 1.0539, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019956684159654794, |
|
"loss": 1.1491, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019953418700137184, |
|
"loss": 1.0944, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019950034872476518, |
|
"loss": 1.1604, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001994653271691398, |
|
"loss": 1.043, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019942912275097943, |
|
"loss": 1.1162, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019939173590083456, |
|
"loss": 0.9745, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001993531670633175, |
|
"loss": 0.984, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019931341669709695, |
|
"loss": 1.0841, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019927248527489258, |
|
"loss": 1.037, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019923037328346946, |
|
"loss": 1.1158, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019918708122363218, |
|
"loss": 0.9966, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001991426096102191, |
|
"loss": 1.0151, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019909695897209588, |
|
"loss": 1.0946, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019905012985214953, |
|
"loss": 1.1756, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019900212280728178, |
|
"loss": 1.0429, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001989529384084024, |
|
"loss": 1.1326, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019890257724042268, |
|
"loss": 1.1368, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019885103990224812, |
|
"loss": 0.9641, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001987983270067716, |
|
"loss": 0.9642, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001987444391808659, |
|
"loss": 1.0429, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001986893770653764, |
|
"loss": 1.1404, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019863314131511324, |
|
"loss": 1.0498, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001985757325988438, |
|
"loss": 1.0293, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019851715159928466, |
|
"loss": 1.0084, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001984573990130932, |
|
"loss": 1.0712, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019839647555085983, |
|
"loss": 1.1065, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019833438193709912, |
|
"loss": 1.0316, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001982711189102413, |
|
"loss": 1.0076, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001982066872226236, |
|
"loss": 1.0379, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019814108764048115, |
|
"loss": 1.0316, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019807432094393792, |
|
"loss": 1.0108, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001980063879269975, |
|
"loss": 0.9731, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019793728939753363, |
|
"loss": 1.0154, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001978670261772804, |
|
"loss": 1.0085, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019779559910182282, |
|
"loss": 0.9388, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019772300902058668, |
|
"loss": 1.0841, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0001976492567968284, |
|
"loss": 1.0179, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019757434330762495, |
|
"loss": 0.9289, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019749826944386322, |
|
"loss": 1.0816, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019742103611022959, |
|
"loss": 0.9418, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019734264422519902, |
|
"loss": 1.0085, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019726309472102428, |
|
"loss": 1.0727, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019718238854372477, |
|
"loss": 1.0057, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019710052665307523, |
|
"loss": 1.0178, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019701751002259448, |
|
"loss": 1.0333, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019693333963953372, |
|
"loss": 1.0743, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019684801650486473, |
|
"loss": 1.0868, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001967615416332682, |
|
"loss": 0.9358, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001966739160531214, |
|
"loss": 0.9894, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019658514080648614, |
|
"loss": 0.9653, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001964952169490962, |
|
"loss": 0.9682, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019640414555034506, |
|
"loss": 1.012, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019631192769327285, |
|
"loss": 1.0601, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019621856447455364, |
|
"loss": 1.0882, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019612405700448245, |
|
"loss": 1.0279, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019602840640696193, |
|
"loss": 0.9825, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019593161381948907, |
|
"loss": 1.004, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001958336803931416, |
|
"loss": 1.0466, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019573460729256433, |
|
"loss": 0.9698, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019563439569595542, |
|
"loss": 1.0157, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019553304679505217, |
|
"loss": 1.0221, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019543056179511695, |
|
"loss": 1.051, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019532694191492294, |
|
"loss": 1.093, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019522218838673944, |
|
"loss": 1.0751, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001951163024563174, |
|
"loss": 1.0015, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019500928538287448, |
|
"loss": 1.0284, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001949011384390802, |
|
"loss": 0.9958, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001947918629110407, |
|
"loss": 1.1206, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00019468146009828346, |
|
"loss": 0.9973, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001945699313137419, |
|
"loss": 0.9493, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019445727788373974, |
|
"loss": 1.1093, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001943435011479752, |
|
"loss": 1.0352, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00019422860245950504, |
|
"loss": 0.9579, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00019411258318472862, |
|
"loss": 1.0053, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00019399544470337146, |
|
"loss": 1.0048, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00019387718840846896, |
|
"loss": 1.0891, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00019375781570634976, |
|
"loss": 1.1069, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00019363732801661913, |
|
"loss": 1.0058, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001935157267721419, |
|
"loss": 1.0393, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00019339301341902564, |
|
"loss": 1.0531, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001932691894166032, |
|
"loss": 0.9608, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00019314425623741566, |
|
"loss": 1.0752, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00019301821536719452, |
|
"loss": 0.9667, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00019289106830484436, |
|
"loss": 1.0902, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00019276281656242463, |
|
"loss": 0.9993, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00019263346166513196, |
|
"loss": 0.9524, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00019250300515128194, |
|
"loss": 0.9226, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00019237144857229082, |
|
"loss": 1.0367, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00019223879349265695, |
|
"loss": 0.9414, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00019210504148994248, |
|
"loss": 0.9305, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00019197019415475422, |
|
"loss": 0.8975, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00019183425309072494, |
|
"loss": 0.9615, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001916972199144943, |
|
"loss": 1.0114, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00019155909625568967, |
|
"loss": 0.9631, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00019141988375690646, |
|
"loss": 0.9684, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00019127958407368895, |
|
"loss": 1.0255, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001911381988745104, |
|
"loss": 1.1141, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00019099572984075325, |
|
"loss": 1.078, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00019085217866668917, |
|
"loss": 0.9364, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00019070754705945877, |
|
"loss": 0.9256, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0001905618367390515, |
|
"loss": 0.8849, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00019041504943828508, |
|
"loss": 1.1235, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00019026718690278485, |
|
"loss": 1.1084, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001901182508909631, |
|
"loss": 0.9836, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001899682431739981, |
|
"loss": 1.0317, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00018981716553581314, |
|
"loss": 0.9848, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00018966501977305512, |
|
"loss": 0.9545, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00018951180769507343, |
|
"loss": 0.9357, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00018935753112389825, |
|
"loss": 1.0412, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00018920219189421883, |
|
"loss": 0.957, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00018904579185336195, |
|
"loss": 1.0153, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001888883328612697, |
|
"loss": 1.0068, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001887298167904774, |
|
"loss": 0.9777, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00018857024552609145, |
|
"loss": 0.9894, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00018840962096576682, |
|
"loss": 1.0518, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00018824794501968444, |
|
"loss": 1.0353, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00018808521961052857, |
|
"loss": 1.0303, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00018792144667346391, |
|
"loss": 0.889, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00018775662815611262, |
|
"loss": 0.9982, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00018759076601853106, |
|
"loss": 0.9959, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00018742386223318653, |
|
"loss": 0.8984, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00018725591878493387, |
|
"loss": 0.9193, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00018708693767099184, |
|
"loss": 0.9917, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00018691692090091927, |
|
"loss": 1.046, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001867458704965912, |
|
"loss": 0.9424, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.000186573788492175, |
|
"loss": 0.9681, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001864006769341059, |
|
"loss": 0.9762, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001862265378810629, |
|
"loss": 0.9811, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00018605137340394414, |
|
"loss": 1.0102, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00018587518558584233, |
|
"loss": 0.9727, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00018569797652201992, |
|
"loss": 1.018, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0001855197483198843, |
|
"loss": 0.9156, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00018534050309896265, |
|
"loss": 0.9272, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001851602429908767, |
|
"loss": 0.6938, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00018497897013931754, |
|
"loss": 0.7238, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00018479668670001983, |
|
"loss": 0.7324, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00018461339484073658, |
|
"loss": 0.7076, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00018442909674121295, |
|
"loss": 0.6871, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00018424379459316067, |
|
"loss": 0.7156, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00018405749060023167, |
|
"loss": 0.7082, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00018387018697799218, |
|
"loss": 0.7174, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00018368188595389615, |
|
"loss": 0.655, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00018349258976725884, |
|
"loss": 0.6597, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00018330230066923022, |
|
"loss": 0.6928, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00018311102092276815, |
|
"loss": 0.6567, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0001829187528026115, |
|
"loss": 0.7052, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00018272549859525312, |
|
"loss": 0.7075, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00018253126059891256, |
|
"loss": 0.7101, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00018233604112350877, |
|
"loss": 0.6877, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00018213984249063278, |
|
"loss": 0.6454, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00018194266703351982, |
|
"loss": 0.6476, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00018174451709702187, |
|
"loss": 0.7597, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00018154539503757944, |
|
"loss": 0.7288, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0001813453032231939, |
|
"loss": 0.6766, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00018114424403339904, |
|
"loss": 0.6553, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00018094221985923294, |
|
"loss": 0.6661, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00018073923310320947, |
|
"loss": 0.8169, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00018053528617928966, |
|
"loss": 0.7249, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00018033038151285319, |
|
"loss": 0.6402, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00018012452154066928, |
|
"loss": 0.7054, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001799177087108679, |
|
"loss": 0.6768, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00017970994548291062, |
|
"loss": 0.6639, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00017950123432756133, |
|
"loss": 0.7245, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00017929157772685685, |
|
"loss": 0.6367, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00017908097817407746, |
|
"loss": 0.6771, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00017886943817371726, |
|
"loss": 0.7223, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00017865696024145424, |
|
"loss": 0.6609, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00017844354690412062, |
|
"loss": 0.6434, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00017822920069967248, |
|
"loss": 0.8253, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0001780139241771599, |
|
"loss": 0.7738, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00017779771989669647, |
|
"loss": 0.6607, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00017758059042942878, |
|
"loss": 0.7007, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00017736253835750604, |
|
"loss": 0.7111, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00017714356627404924, |
|
"loss": 0.6154, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0001769236767831203, |
|
"loss": 0.7109, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00017670287249969116, |
|
"loss": 0.6744, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00017648115604961272, |
|
"loss": 0.7274, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00017625853006958351, |
|
"loss": 0.7171, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00017603499720711836, |
|
"loss": 0.6993, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00017581056012051696, |
|
"loss": 0.6791, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001755852214788322, |
|
"loss": 0.7034, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00017535898396183852, |
|
"loss": 0.6822, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00017513185025999989, |
|
"loss": 0.7047, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.000174903823074438, |
|
"loss": 0.6997, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00017467490511689993, |
|
"loss": 0.7198, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00017444509910972603, |
|
"loss": 0.7137, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00017421440778581755, |
|
"loss": 0.7461, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00017398283388860413, |
|
"loss": 0.7134, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.0001737503801720111, |
|
"loss": 0.697, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00017351704940042688, |
|
"loss": 0.7388, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0001732828443486699, |
|
"loss": 0.713, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00017304776780195576, |
|
"loss": 0.7775, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00017281182255586406, |
|
"loss": 0.7171, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00017257501141630516, |
|
"loss": 0.6963, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.0001723373371994869, |
|
"loss": 0.7245, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00017209880273188075, |
|
"loss": 0.6883, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00017185941085018879, |
|
"loss": 0.6677, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.0001716191644013094, |
|
"loss": 0.687, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.0001713780662423038, |
|
"loss": 0.6464, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00017113611924036182, |
|
"loss": 0.6727, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.000170893326272768, |
|
"loss": 0.7104, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00017064969022686724, |
|
"loss": 0.6962, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.0001704052140000305, |
|
"loss": 0.7072, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0001701599004996203, |
|
"loss": 0.7634, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00016991375264295634, |
|
"loss": 0.7518, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00016966677335728046, |
|
"loss": 0.6743, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.0001694189655797222, |
|
"loss": 0.7077, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00016917033225726368, |
|
"loss": 0.7159, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00016892087634670438, |
|
"loss": 0.7228, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00016867060081462643, |
|
"loss": 0.7195, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001684195086373589, |
|
"loss": 0.7129, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00016816760280094267, |
|
"loss": 0.7258, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0001679148863010948, |
|
"loss": 0.672, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00016766136214317286, |
|
"loss": 0.7452, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00016740703334213936, |
|
"loss": 0.7328, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00016715190292252577, |
|
"loss": 0.7458, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001668959739183965, |
|
"loss": 0.7344, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00016663924937331296, |
|
"loss": 0.6737, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00016638173234029732, |
|
"loss": 0.686, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00016612342588179617, |
|
"loss": 0.6873, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00016586433306964402, |
|
"loss": 0.6912, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00016560445698502702, |
|
"loss": 0.6801, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00016534380071844606, |
|
"loss": 0.7085, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00016508236736968016, |
|
"loss": 0.6999, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00016482016004774948, |
|
"loss": 0.6793, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0001645571818708785, |
|
"loss": 0.6892, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00016429343596645896, |
|
"loss": 0.7138, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.0001640289254710124, |
|
"loss": 0.65, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00016376365353015313, |
|
"loss": 0.7034, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00016349762329855068, |
|
"loss": 0.7561, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00016323083793989243, |
|
"loss": 0.6998, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00016296330062684578, |
|
"loss": 0.7439, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00016269501454102065, |
|
"loss": 0.6734, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00016242598287293135, |
|
"loss": 0.7231, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00016215620882195894, |
|
"loss": 0.6827, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.0001618856955963131, |
|
"loss": 0.7203, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00016161444641299376, |
|
"loss": 0.6595, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00016134246449775314, |
|
"loss": 0.6964, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00016106975308505722, |
|
"loss": 0.6844, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00016079631541804736, |
|
"loss": 0.6621, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00016052215474850158, |
|
"loss": 0.6574, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00016024727433679617, |
|
"loss": 0.7115, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00015997167745186657, |
|
"loss": 0.689, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00015969536737116879, |
|
"loss": 0.6422, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00015941834738064024, |
|
"loss": 0.7619, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00015914062077466076, |
|
"loss": 0.7776, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.0001588621908560134, |
|
"loss": 0.7157, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00015858306093584522, |
|
"loss": 0.6986, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00015830323433362772, |
|
"loss": 0.6644, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00015802271437711753, |
|
"loss": 0.6756, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00015774150440231685, |
|
"loss": 0.6343, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00015745960775343372, |
|
"loss": 0.6833, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00015717702778284218, |
|
"loss": 0.686, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.0001568937678510425, |
|
"loss": 0.7058, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00015660983132662125, |
|
"loss": 0.6421, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00015632522158621112, |
|
"loss": 0.7206, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.00015603994201445083, |
|
"loss": 0.6923, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.0001557539960039449, |
|
"loss": 0.7092, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 0.00015546738695522332, |
|
"loss": 0.7465, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00015518011827670095, |
|
"loss": 0.701, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00015489219338463714, |
|
"loss": 0.7157, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.00015460361570309518, |
|
"loss": 0.6585, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.00015431438866390137, |
|
"loss": 0.6455, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.00015402451570660437, |
|
"loss": 0.7563, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.00015373400027843408, |
|
"loss": 0.6688, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.00015344284583426103, |
|
"loss": 0.6508, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.0001531510558365549, |
|
"loss": 0.6647, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.00015285863375534353, |
|
"loss": 0.6401, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.00015256558306817164, |
|
"loss": 0.6478, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.00015227190726005953, |
|
"loss": 0.7038, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.0001519776098234615, |
|
"loss": 0.6994, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.00015168269425822433, |
|
"loss": 0.7038, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.00015138716407154588, |
|
"loss": 0.6898, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.00015109102277793306, |
|
"loss": 0.6907, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.00015079427389916026, |
|
"loss": 0.6597, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.0001504969209642274, |
|
"loss": 0.8138, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.000150198967509318, |
|
"loss": 0.6845, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.000149900417077757, |
|
"loss": 0.7397, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00014960127321996875, |
|
"loss": 0.6613, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00014930153949343477, |
|
"loss": 0.6948, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.00014900121946265143, |
|
"loss": 0.7035, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.00014870031669908748, |
|
"loss": 0.6901, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.00014839883478114172, |
|
"loss": 0.6756, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.00014809677729410046, |
|
"loss": 0.6576, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.00014779414783009455, |
|
"loss": 0.6971, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.00014749094998805714, |
|
"loss": 0.6282, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.00014718718737368055, |
|
"loss": 0.7039, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0001468828635993735, |
|
"loss": 0.6784, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.00014657798228421816, |
|
"loss": 0.7283, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00014627254705392706, |
|
"loss": 0.7035, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00014596656154080003, |
|
"loss": 0.6997, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.00014566002938368105, |
|
"loss": 0.7096, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.00014535295422791474, |
|
"loss": 0.7273, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.0001450453397253033, |
|
"loss": 0.7222, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00014473718953406296, |
|
"loss": 0.6742, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.00014442850731878038, |
|
"loss": 0.6749, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.0001441192967503692, |
|
"loss": 0.7543, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.00014380956150602644, |
|
"loss": 0.6966, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.00014349930526918854, |
|
"loss": 0.6952, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.00014318853172948774, |
|
"loss": 0.6639, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.00014287724458270822, |
|
"loss": 0.6729, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00014256544753074194, |
|
"loss": 0.647, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00014225314428154494, |
|
"loss": 0.711, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.0001419403385490929, |
|
"loss": 0.6997, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.00014162703405333726, |
|
"loss": 0.7596, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 0.00014131323452016073, |
|
"loss": 0.6618, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.00014099894368133325, |
|
"loss": 0.7145, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.0001406841652744674, |
|
"loss": 0.7416, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.00014036890304297396, |
|
"loss": 0.6452, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.0001400531607360176, |
|
"loss": 0.6843, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.000139736942108472, |
|
"loss": 0.7051, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.00013942025092087548, |
|
"loss": 0.7183, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.00013910309093938597, |
|
"loss": 0.6799, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.00013878546593573663, |
|
"loss": 0.6735, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.00013846737968719052, |
|
"loss": 0.632, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.00013814883597649606, |
|
"loss": 0.6892, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 0.00013782983859184192, |
|
"loss": 0.6802, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 0.00013751039132681186, |
|
"loss": 0.6761, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00013719049798033978, |
|
"loss": 0.6824, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00013687016235666447, |
|
"loss": 0.72, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0001365493882652844, |
|
"loss": 0.6919, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.00013622817952091225, |
|
"loss": 0.3804, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.00013590653994342987, |
|
"loss": 0.4047, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.00013558447335784254, |
|
"loss": 0.4268, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.0001352619835942337, |
|
"loss": 0.389, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.0001349390744877192, |
|
"loss": 0.4203, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.00013461574987840192, |
|
"loss": 0.4585, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.00013429201361132595, |
|
"loss": 0.4049, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.00013396786953643084, |
|
"loss": 0.4205, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.00013364332150850596, |
|
"loss": 0.3821, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.0001333183733871445, |
|
"loss": 0.3748, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.0001329930290366977, |
|
"loss": 0.3675, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.00013266729232622884, |
|
"loss": 0.3359, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 0.00013234116712946715, |
|
"loss": 0.3514, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 0.00013201465732476194, |
|
"loss": 0.3909, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.0001316877667950363, |
|
"loss": 0.3805, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.00013136049942774096, |
|
"loss": 0.4049, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.0001310328591148081, |
|
"loss": 0.4273, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 0.00013070484975260502, |
|
"loss": 0.356, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 0.00013037647524188792, |
|
"loss": 0.4221, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.00013004773948775532, |
|
"loss": 0.4015, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.00012971864639960175, |
|
"loss": 0.3346, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.0001293891998910713, |
|
"loss": 0.3899, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00012905940388001092, |
|
"loss": 0.3619, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.00012872926228842398, |
|
"loss": 0.3837, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.00012839877904242348, |
|
"loss": 0.3598, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 0.0001280679580721856, |
|
"loss": 0.4221, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 0.0001277368033119026, |
|
"loss": 0.4154, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.00012740531869973647, |
|
"loss": 0.4163, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.00012707350817777163, |
|
"loss": 0.3702, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 0.00012674137569196857, |
|
"loss": 0.4351, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 0.00012640892519211628, |
|
"loss": 0.3819, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.000126076160631786, |
|
"loss": 0.3664, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.00012574308596828364, |
|
"loss": 0.3746, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.00012540970516260292, |
|
"loss": 0.4059, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.00012507602217937835, |
|
"loss": 0.4267, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.00012474204098683795, |
|
"loss": 0.4026, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.00012440776555675616, |
|
"loss": 0.386, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.00012407319986440648, |
|
"loss": 0.4094, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.00012373834788851427, |
|
"loss": 0.401, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.00012340321361120954, |
|
"loss": 0.3938, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.00012306780101797936, |
|
"loss": 0.4015, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.00012273211409762066, |
|
"loss": 0.4115, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.00012239615684219273, |
|
"loss": 0.4172, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.0001220599332469697, |
|
"loss": 0.3897, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.00012172344731039311, |
|
"loss": 0.4019, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.00012138670303402429, |
|
"loss": 0.3838, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 0.00012104970442249679, |
|
"loss": 0.3961, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.00012071245548346883, |
|
"loss": 0.3596, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.00012037496022757552, |
|
"loss": 0.4054, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.00012003722266838124, |
|
"loss": 0.3964, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.00011969924682233187, |
|
"loss": 0.3895, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 0.0001193610367087071, |
|
"loss": 0.3695, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 0.0001190225963495725, |
|
"loss": 0.3914, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.00011868392976973188, |
|
"loss": 0.3781, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.0001183450409966792, |
|
"loss": 0.412, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.00011800593406055082, |
|
"loss": 0.4239, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.00011766661299407763, |
|
"loss": 0.4357, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 0.00011732708183253685, |
|
"loss": 0.3945, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 0.00011698734461370428, |
|
"loss": 0.4635, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00011664740537780626, |
|
"loss": 0.4222, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00011630726816747143, |
|
"loss": 0.4205, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.00011596693702768288, |
|
"loss": 0.3976, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.00011562641600572993, |
|
"loss": 0.4068, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.00011528570915115988, |
|
"loss": 0.3564, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.00011494482051573027, |
|
"loss": 0.4185, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.00011460375415336015, |
|
"loss": 0.3978, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.00011426251412008233, |
|
"loss": 0.3957, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.0001139211044739948, |
|
"loss": 0.4026, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 0.00011357952927521269, |
|
"loss": 0.381, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 0.00011323779258581997, |
|
"loss": 0.3935, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.00011289589846982096, |
|
"loss": 0.4046, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.00011255385099309228, |
|
"loss": 0.363, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.0001122116542233342, |
|
"loss": 0.3909, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.00011186931223002255, |
|
"loss": 0.4179, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.00011152682908436007, |
|
"loss": 0.4005, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.00011118420885922822, |
|
"loss": 0.431, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 0.00011084145562913856, |
|
"loss": 0.414, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 0.0001104985734701844, |
|
"loss": 0.4168, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 0.0001101555664599923, |
|
"loss": 0.374, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.00010981243867767357, |
|
"loss": 0.3913, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.00010946919420377581, |
|
"loss": 0.418, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.00010912583712023425, |
|
"loss": 0.4128, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.0001087823715103234, |
|
"loss": 0.4182, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.0001084388014586083, |
|
"loss": 0.3988, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.00010809513105089607, |
|
"loss": 0.4154, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 0.00010775136437418736, |
|
"loss": 0.412, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 0.00010740750551662752, |
|
"loss": 0.3769, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.00010706355856745824, |
|
"loss": 0.3595, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.0001067195276169688, |
|
"loss": 0.425, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.00010637541675644742, |
|
"loss": 0.3763, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.00010603123007813263, |
|
"loss": 0.4234, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.00010568697167516461, |
|
"loss": 0.4064, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.00010534264564153651, |
|
"loss": 0.3525, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.00010499825607204574, |
|
"loss": 0.4296, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.0001046538070622453, |
|
"loss": 0.387, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.00010430930270839507, |
|
"loss": 0.4323, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.00010396474710741311, |
|
"loss": 0.4123, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.00010362014435682688, |
|
"loss": 0.39, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00010327549855472455, |
|
"loss": 0.4337, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00010293081379970637, |
|
"loss": 0.3811, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.00010258609419083567, |
|
"loss": 0.3828, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.00010224134382759041, |
|
"loss": 0.4279, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.00010189656680981415, |
|
"loss": 0.4081, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.00010155176723766757, |
|
"loss": 0.3947, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00010120694921157952, |
|
"loss": 0.3972, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00010086211683219824, |
|
"loss": 0.4335, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0001005172742003428, |
|
"loss": 0.4024, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.000100172425416954, |
|
"loss": 0.4127, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.9827574583046e-05, |
|
"loss": 0.3778, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.948272579965723e-05, |
|
"loss": 0.3849, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.913788316780177e-05, |
|
"loss": 0.3924, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.879305078842052e-05, |
|
"loss": 0.3981, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.844823276233243e-05, |
|
"loss": 0.4112, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.810343319018588e-05, |
|
"loss": 0.3772, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.775865617240964e-05, |
|
"loss": 0.4012, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.741390580916434e-05, |
|
"loss": 0.4257, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.706918620029364e-05, |
|
"loss": 0.4266, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.672450144527546e-05, |
|
"loss": 0.4075, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.637985564317316e-05, |
|
"loss": 0.3916, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.603525289258689e-05, |
|
"loss": 0.4133, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.569069729160494e-05, |
|
"loss": 0.415, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.534619293775471e-05, |
|
"loss": 0.4424, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.500174392795431e-05, |
|
"loss": 0.3833, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.465735435846348e-05, |
|
"loss": 0.4125, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.43130283248354e-05, |
|
"loss": 0.415, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.396876992186738e-05, |
|
"loss": 0.4123, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.362458324355257e-05, |
|
"loss": 0.4073, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.328047238303121e-05, |
|
"loss": 0.4492, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.293644143254177e-05, |
|
"loss": 0.3842, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.259249448337253e-05, |
|
"loss": 0.3957, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.224863562581265e-05, |
|
"loss": 0.4234, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.190486894910394e-05, |
|
"loss": 0.394, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.156119854139173e-05, |
|
"loss": 0.438, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.121762848967666e-05, |
|
"loss": 0.4308, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.087416287976577e-05, |
|
"loss": 0.3848, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.053080579622422e-05, |
|
"loss": 0.4004, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.018756132232647e-05, |
|
"loss": 0.4204, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.984443354000774e-05, |
|
"loss": 0.4189, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.950142652981563e-05, |
|
"loss": 0.3693, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.915854437086146e-05, |
|
"loss": 0.393, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.881579114077181e-05, |
|
"loss": 0.3968, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.847317091563994e-05, |
|
"loss": 0.3969, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.813068776997747e-05, |
|
"loss": 0.3808, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.778834577666584e-05, |
|
"loss": 0.376, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.744614900690777e-05, |
|
"loss": 0.4335, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.710410153017905e-05, |
|
"loss": 0.4409, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.676220741418005e-05, |
|
"loss": 0.3661, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.642047072478734e-05, |
|
"loss": 0.4364, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.607889552600524e-05, |
|
"loss": 0.4184, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.573748587991769e-05, |
|
"loss": 0.4287, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.539624584663986e-05, |
|
"loss": 0.3787, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.505517948426976e-05, |
|
"loss": 0.3736, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.471429084884012e-05, |
|
"loss": 0.3949, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.437358399427011e-05, |
|
"loss": 0.3967, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.403306297231714e-05, |
|
"loss": 0.3834, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.369273183252858e-05, |
|
"loss": 0.3839, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.335259462219374e-05, |
|
"loss": 0.3951, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.301265538629573e-05, |
|
"loss": 0.3814, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.267291816746319e-05, |
|
"loss": 0.4207, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 8.233338700592244e-05, |
|
"loss": 0.4081, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.199406593944916e-05, |
|
"loss": 0.4028, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.165495900332083e-05, |
|
"loss": 0.3982, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.131607023026814e-05, |
|
"loss": 0.3836, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.097740365042748e-05, |
|
"loss": 0.4145, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.063896329129292e-05, |
|
"loss": 0.3982, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.030075317766814e-05, |
|
"loss": 0.3979, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.996277733161881e-05, |
|
"loss": 0.414, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.962503977242449e-05, |
|
"loss": 0.3386, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.928754451653119e-05, |
|
"loss": 0.379, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.895029557750324e-05, |
|
"loss": 0.4198, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.861329696597576e-05, |
|
"loss": 0.3939, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.827655268960692e-05, |
|
"loss": 0.3767, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.794006675303031e-05, |
|
"loss": 0.3972, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.760384315780731e-05, |
|
"loss": 0.3748, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.726788590237933e-05, |
|
"loss": 0.3914, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.693219898202067e-05, |
|
"loss": 0.3768, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 7.659678638879048e-05, |
|
"loss": 0.3819, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.626165211148575e-05, |
|
"loss": 0.4186, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.592680013559354e-05, |
|
"loss": 0.3752, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.559223444324385e-05, |
|
"loss": 0.4123, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.525795901316207e-05, |
|
"loss": 0.408, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.492397782062169e-05, |
|
"loss": 0.3921, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.459029483739711e-05, |
|
"loss": 0.3909, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.425691403171639e-05, |
|
"loss": 0.3834, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.392383936821403e-05, |
|
"loss": 0.3637, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.359107480788372e-05, |
|
"loss": 0.4098, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.325862430803148e-05, |
|
"loss": 0.3947, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.292649182222837e-05, |
|
"loss": 0.4422, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.259468130026356e-05, |
|
"loss": 0.2145, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 7.22631966880974e-05, |
|
"loss": 0.226, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.193204192781443e-05, |
|
"loss": 0.2171, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.160122095757653e-05, |
|
"loss": 0.2361, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.127073771157606e-05, |
|
"loss": 0.2122, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.094059611998909e-05, |
|
"loss": 0.2158, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.061080010892873e-05, |
|
"loss": 0.2161, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.028135360039827e-05, |
|
"loss": 0.2232, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.995226051224475e-05, |
|
"loss": 0.2088, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.96235247581121e-05, |
|
"loss": 0.2123, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.9295150247395e-05, |
|
"loss": 0.2266, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.896714088519193e-05, |
|
"loss": 0.1801, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.863950057225904e-05, |
|
"loss": 0.2187, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.831223320496371e-05, |
|
"loss": 0.21, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.798534267523806e-05, |
|
"loss": 0.2233, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.765883287053287e-05, |
|
"loss": 0.2025, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.733270767377118e-05, |
|
"loss": 0.219, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.700697096330233e-05, |
|
"loss": 0.2202, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.668162661285552e-05, |
|
"loss": 0.1953, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.63566784914941e-05, |
|
"loss": 0.2078, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.603213046356918e-05, |
|
"loss": 0.1914, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.570798638867407e-05, |
|
"loss": 0.2202, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.53842501215981e-05, |
|
"loss": 0.2239, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.506092551228079e-05, |
|
"loss": 0.2178, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.473801640576633e-05, |
|
"loss": 0.2101, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.441552664215748e-05, |
|
"loss": 0.2105, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.409346005657018e-05, |
|
"loss": 0.201, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.377182047908776e-05, |
|
"loss": 0.2115, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.345061173471564e-05, |
|
"loss": 0.2137, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.312983764333555e-05, |
|
"loss": 0.2025, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.280950201966025e-05, |
|
"loss": 0.2072, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.248960867318816e-05, |
|
"loss": 0.2125, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.21701614081581e-05, |
|
"loss": 0.2134, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.185116402350397e-05, |
|
"loss": 0.1979, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.153262031280951e-05, |
|
"loss": 0.2166, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.121453406426341e-05, |
|
"loss": 0.1843, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.089690906061405e-05, |
|
"loss": 0.2279, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.057974907912457e-05, |
|
"loss": 0.2293, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.0263057891528e-05, |
|
"loss": 0.1968, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 5.994683926398241e-05, |
|
"loss": 0.2056, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 5.9631096957026065e-05, |
|
"loss": 0.2134, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 5.931583472553265e-05, |
|
"loss": 0.2124, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.900105631866676e-05, |
|
"loss": 0.1952, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.86867654798393e-05, |
|
"loss": 0.2294, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.8372965946662815e-05, |
|
"loss": 0.2078, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.805966145090709e-05, |
|
"loss": 0.2053, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 5.7746855718455085e-05, |
|
"loss": 0.1753, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.7434552469258085e-05, |
|
"loss": 0.1954, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.712275541729184e-05, |
|
"loss": 0.245, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.6811468270512266e-05, |
|
"loss": 0.2103, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.650069473081148e-05, |
|
"loss": 0.2288, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.619043849397358e-05, |
|
"loss": 0.1956, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.588070324963082e-05, |
|
"loss": 0.2034, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.557149268121965e-05, |
|
"loss": 0.2061, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.5262810465937045e-05, |
|
"loss": 0.2272, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.49546602746967e-05, |
|
"loss": 0.2099, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.464704577208526e-05, |
|
"loss": 0.2174, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.433997061631897e-05, |
|
"loss": 0.1958, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.4033438459199994e-05, |
|
"loss": 0.2127, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.3727452946072965e-05, |
|
"loss": 0.1877, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.3422017715781856e-05, |
|
"loss": 0.2127, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.311713640062653e-05, |
|
"loss": 0.2122, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.2812812626319495e-05, |
|
"loss": 0.1962, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.2509050011942885e-05, |
|
"loss": 0.1989, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.2205852169905434e-05, |
|
"loss": 0.2066, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.1903222705899553e-05, |
|
"loss": 0.216, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.160116521885827e-05, |
|
"loss": 0.1945, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.129968330091253e-05, |
|
"loss": 0.22, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.099878053734859e-05, |
|
"loss": 0.2085, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.069846050656525e-05, |
|
"loss": 0.2149, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.039872678003129e-05, |
|
"loss": 0.2147, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.0099582922243017e-05, |
|
"loss": 0.2025, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.980103249068203e-05, |
|
"loss": 0.209, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.950307903577259e-05, |
|
"loss": 0.2066, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.920572610083977e-05, |
|
"loss": 0.1999, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.8908977222066966e-05, |
|
"loss": 0.1888, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.861283592845416e-05, |
|
"loss": 0.203, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.831730574177568e-05, |
|
"loss": 0.1905, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.8022390176538555e-05, |
|
"loss": 0.1772, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.772809273994048e-05, |
|
"loss": 0.2002, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.7434416931828384e-05, |
|
"loss": 0.2037, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.714136624465654e-05, |
|
"loss": 0.2121, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.684894416344511e-05, |
|
"loss": 0.2193, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.655715416573898e-05, |
|
"loss": 0.2119, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.626599972156593e-05, |
|
"loss": 0.2147, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.5975484293395696e-05, |
|
"loss": 0.1987, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.568561133609865e-05, |
|
"loss": 0.23, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.5396384296904815e-05, |
|
"loss": 0.2032, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.510780661536288e-05, |
|
"loss": 0.1954, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.481988172329913e-05, |
|
"loss": 0.2175, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.453261304477672e-05, |
|
"loss": 0.1893, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.4246003996055083e-05, |
|
"loss": 0.2012, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.396005798554919e-05, |
|
"loss": 0.1918, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.367477841378889e-05, |
|
"loss": 0.1682, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.3390168673378775e-05, |
|
"loss": 0.2308, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.310623214895754e-05, |
|
"loss": 0.1955, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.282297221715785e-05, |
|
"loss": 0.1907, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.2540392246566285e-05, |
|
"loss": 0.216, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.225849559768316e-05, |
|
"loss": 0.214, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.197728562288251e-05, |
|
"loss": 0.1959, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.169676566637232e-05, |
|
"loss": 0.221, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.1416939064154783e-05, |
|
"loss": 0.2136, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.113780914398659e-05, |
|
"loss": 0.1989, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.0859379225339265e-05, |
|
"loss": 0.2084, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.0581652619359775e-05, |
|
"loss": 0.2125, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.030463262883121e-05, |
|
"loss": 0.2134, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.0028322548133436e-05, |
|
"loss": 0.1939, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.975272566320386e-05, |
|
"loss": 0.1979, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.9477845251498414e-05, |
|
"loss": 0.2245, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.920368458195268e-05, |
|
"loss": 0.2091, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.893024691494279e-05, |
|
"loss": 0.2384, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.86575355022469e-05, |
|
"loss": 0.1949, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.8385553587006266e-05, |
|
"loss": 0.1908, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.811430440368694e-05, |
|
"loss": 0.1898, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.7843791178041054e-05, |
|
"loss": 0.21, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.7574017127068653e-05, |
|
"loss": 0.2023, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.730498545897938e-05, |
|
"loss": 0.1823, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.703669937315423e-05, |
|
"loss": 0.2009, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.676916206010761e-05, |
|
"loss": 0.2006, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.65023767014493e-05, |
|
"loss": 0.2102, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.6236346469846895e-05, |
|
"loss": 0.2274, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.597107452898763e-05, |
|
"loss": 0.2016, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.570656403354107e-05, |
|
"loss": 0.1788, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5442818129121494e-05, |
|
"loss": 0.203, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5179839952250524e-05, |
|
"loss": 0.203, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.491763263031987e-05, |
|
"loss": 0.2054, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.465619928155397e-05, |
|
"loss": 0.2175, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.4395543014972985e-05, |
|
"loss": 0.1948, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.413566693035598e-05, |
|
"loss": 0.2305, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.387657411820388e-05, |
|
"loss": 0.208, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.3618267659702686e-05, |
|
"loss": 0.1987, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.336075062668707e-05, |
|
"loss": 0.2087, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.310402608160356e-05, |
|
"loss": 0.1867, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.284809707747426e-05, |
|
"loss": 0.193, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.2592966657860625e-05, |
|
"loss": 0.2181, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.2338637856827145e-05, |
|
"loss": 0.2251, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.208511369890523e-05, |
|
"loss": 0.2046, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.1832397199057326e-05, |
|
"loss": 0.1922, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.1580491362641085e-05, |
|
"loss": 0.2165, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.13293991853736e-05, |
|
"loss": 0.1802, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.107912365329566e-05, |
|
"loss": 0.1901, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.082966774273637e-05, |
|
"loss": 0.213, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.0581034420277776e-05, |
|
"loss": 0.2196, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.0333226642719548e-05, |
|
"loss": 0.2127, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.0086247357043705e-05, |
|
"loss": 0.1955, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.9840099500379702e-05, |
|
"loss": 0.193, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.9594785999969542e-05, |
|
"loss": 0.1934, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.9350309773132765e-05, |
|
"loss": 0.2202, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.9106673727232015e-05, |
|
"loss": 0.1906, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.8863880759638184e-05, |
|
"loss": 0.2021, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.8621933757696228e-05, |
|
"loss": 0.1927, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.8380835598690602e-05, |
|
"loss": 0.1975, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.8140589149811214e-05, |
|
"loss": 0.2174, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.790119726811925e-05, |
|
"loss": 0.1967, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.766266280051316e-05, |
|
"loss": 0.1885, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.7424988583694832e-05, |
|
"loss": 0.2141, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.718817744413594e-05, |
|
"loss": 0.2245, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.695223219804427e-05, |
|
"loss": 0.2199, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.671715565133015e-05, |
|
"loss": 0.2163, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.6482950599573163e-05, |
|
"loss": 0.1899, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.6249619827988915e-05, |
|
"loss": 0.1866, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.6017166111395884e-05, |
|
"loss": 0.1954, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.5785592214182476e-05, |
|
"loss": 0.2127, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.555490089027399e-05, |
|
"loss": 0.2049, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.5325094883100108e-05, |
|
"loss": 0.2079, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.509617692556201e-05, |
|
"loss": 0.2061, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.4868149740000113e-05, |
|
"loss": 0.2302, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.464101603816148e-05, |
|
"loss": 0.1819, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.4414778521167814e-05, |
|
"loss": 0.189, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.418943987948309e-05, |
|
"loss": 0.2262, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.3965002792881664e-05, |
|
"loss": 0.2094, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.3741469930416495e-05, |
|
"loss": 0.1811, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.351884395038727e-05, |
|
"loss": 0.1906, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.3297127500308845e-05, |
|
"loss": 0.196, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.3076323216879714e-05, |
|
"loss": 0.1987, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.2856433725950767e-05, |
|
"loss": 0.1889, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.2637461642493963e-05, |
|
"loss": 0.2074, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.2419409570571248e-05, |
|
"loss": 0.2005, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.2202280103303553e-05, |
|
"loss": 0.1966, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.198607582284009e-05, |
|
"loss": 0.2121, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.1770799300327538e-05, |
|
"loss": 0.2034, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.1556453095879424e-05, |
|
"loss": 0.2022, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.134303975854576e-05, |
|
"loss": 0.2034, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.113056182628277e-05, |
|
"loss": 0.1977, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.0919021825922536e-05, |
|
"loss": 0.1987, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.070842227314317e-05, |
|
"loss": 0.1859, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.049876567243869e-05, |
|
"loss": 0.1722, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.0290054517089408e-05, |
|
"loss": 0.2052, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.0082291289132106e-05, |
|
"loss": 0.1345, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.987547845933072e-05, |
|
"loss": 0.1436, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.9669618487146813e-05, |
|
"loss": 0.1271, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.9464713820710334e-05, |
|
"loss": 0.1301, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.926076689679054e-05, |
|
"loss": 0.1289, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.9057780140767044e-05, |
|
"loss": 0.1329, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.8855755966600962e-05, |
|
"loss": 0.1352, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.8654696776806125e-05, |
|
"loss": 0.1274, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.8454604962420595e-05, |
|
"loss": 0.1184, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.825548290297817e-05, |
|
"loss": 0.1267, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.805733296648018e-05, |
|
"loss": 0.1272, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.786015750936725e-05, |
|
"loss": 0.1241, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.766395887649124e-05, |
|
"loss": 0.124, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.7468739401087486e-05, |
|
"loss": 0.1202, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.727450140474689e-05, |
|
"loss": 0.1194, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.7081247197388516e-05, |
|
"loss": 0.1261, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.688897907723186e-05, |
|
"loss": 0.1311, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.669769933076981e-05, |
|
"loss": 0.1217, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.6507410232741206e-05, |
|
"loss": 0.1228, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.631811404610387e-05, |
|
"loss": 0.1211, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.612981302200782e-05, |
|
"loss": 0.1238, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.5942509399768336e-05, |
|
"loss": 0.1278, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.5756205406839364e-05, |
|
"loss": 0.1278, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.557090325878705e-05, |
|
"loss": 0.1291, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.5386605159263424e-05, |
|
"loss": 0.1329, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.520331329998017e-05, |
|
"loss": 0.128, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.50210298606825e-05, |
|
"loss": 0.129, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.4839757009123301e-05, |
|
"loss": 0.1235, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.465949690103735e-05, |
|
"loss": 0.1157, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.448025168011572e-05, |
|
"loss": 0.1201, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.430202347798012e-05, |
|
"loss": 0.1132, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.4124814414157706e-05, |
|
"loss": 0.1335, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.394862659605589e-05, |
|
"loss": 0.113, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.377346211893712e-05, |
|
"loss": 0.1235, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.3599323065894098e-05, |
|
"loss": 0.1154, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.3426211507825026e-05, |
|
"loss": 0.1252, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.325412950340882e-05, |
|
"loss": 0.1206, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.3083079099080764e-05, |
|
"loss": 0.1281, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.2913062329008164e-05, |
|
"loss": 0.1279, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.2744081215066128e-05, |
|
"loss": 0.1254, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.25761377668135e-05, |
|
"loss": 0.1273, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.240923398146897e-05, |
|
"loss": 0.1165, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.2243371843887375e-05, |
|
"loss": 0.1161, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.207855332653609e-05, |
|
"loss": 0.1232, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.1914780389471446e-05, |
|
"loss": 0.1292, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.1752054980315597e-05, |
|
"loss": 0.1286, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.159037903423319e-05, |
|
"loss": 0.1284, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.1429754473908538e-05, |
|
"loss": 0.1233, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.1270183209522611e-05, |
|
"loss": 0.1252, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.111166713873032e-05, |
|
"loss": 0.1158, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.0954208146638067e-05, |
|
"loss": 0.1246, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.0797808105781181e-05, |
|
"loss": 0.1287, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.0642468876101786e-05, |
|
"loss": 0.1275, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.0488192304926559e-05, |
|
"loss": 0.119, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.0334980226944879e-05, |
|
"loss": 0.1237, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.0182834464186874e-05, |
|
"loss": 0.1294, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.0031756826001914e-05, |
|
"loss": 0.1249, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 9.881749109036931e-06, |
|
"loss": 0.1317, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 9.732813097215176e-06, |
|
"loss": 0.1169, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.584950561714944e-06, |
|
"loss": 0.1273, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.438163260948495e-06, |
|
"loss": 0.1264, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.292452940541229e-06, |
|
"loss": 0.1166, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.147821333310858e-06, |
|
"loss": 0.1342, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.004270159246764e-06, |
|
"loss": 0.1256, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.86180112548961e-06, |
|
"loss": 0.125, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.720415926311053e-06, |
|
"loss": 0.1257, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.580116243093562e-06, |
|
"loss": 0.1273, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.440903744310357e-06, |
|
"loss": 0.1264, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.302780085505679e-06, |
|
"loss": 0.1157, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.165746909275084e-06, |
|
"loss": 0.1276, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.029805845245819e-06, |
|
"loss": 0.1327, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 7.894958510057527e-06, |
|
"loss": 0.1287, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.761206507343044e-06, |
|
"loss": 0.123, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.628551427709218e-06, |
|
"loss": 0.1272, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.496994848718075e-06, |
|
"loss": 0.1268, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.366538334868045e-06, |
|
"loss": 0.1303, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.23718343757539e-06, |
|
"loss": 0.1186, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.108931695155663e-06, |
|
"loss": 0.1284, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.981784632805466e-06, |
|
"loss": 0.1328, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.855743762584355e-06, |
|
"loss": 0.1284, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 6.730810583396807e-06, |
|
"loss": 0.1213, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.606986580974384e-06, |
|
"loss": 0.1164, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 6.4842732278580845e-06, |
|
"loss": 0.1223, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.362671983380886e-06, |
|
"loss": 0.1207, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 6.242184293650233e-06, |
|
"loss": 0.1259, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.122811591531064e-06, |
|
"loss": 0.1221, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.004555296628556e-06, |
|
"loss": 0.1256, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.887416815271407e-06, |
|
"loss": 0.1136, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.771397540494961e-06, |
|
"loss": 0.1111, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.656498852024828e-06, |
|
"loss": 0.1175, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 5.542722116260257e-06, |
|
"loss": 0.1187, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.430068686258094e-06, |
|
"loss": 0.111, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.3185399017165465e-06, |
|
"loss": 0.1276, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.208137088959308e-06, |
|
"loss": 0.1277, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.09886156091981e-06, |
|
"loss": 0.1189, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.990714617125536e-06, |
|
"loss": 0.1243, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.88369754368263e-06, |
|
"loss": 0.131, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.777811613260574e-06, |
|
"loss": 0.1235, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.673058085077053e-06, |
|
"loss": 0.1324, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.569438204883036e-06, |
|
"loss": 0.1274, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.466953204947843e-06, |
|
"loss": 0.1305, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.36560430404459e-06, |
|
"loss": 0.1341, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.265392707435678e-06, |
|
"loss": 0.1228, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.166319606858438e-06, |
|
"loss": 0.1261, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.0683861805109524e-06, |
|
"loss": 0.1248, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.97159359303807e-06, |
|
"loss": 0.1264, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.8759429955175564e-06, |
|
"loss": 0.1241, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.781435525446364e-06, |
|
"loss": 0.1189, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.688072306727164e-06, |
|
"loss": 0.1211, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.595854449654945e-06, |
|
"loss": 0.125, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.5047830509038037e-06, |
|
"loss": 0.1261, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.414859193513886e-06, |
|
"loss": 0.1318, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.3260839468786e-06, |
|
"loss": 0.1187, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.2384583667318114e-06, |
|
"loss": 0.1249, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.15198349513528e-06, |
|
"loss": 0.1298, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.0666603604663e-06, |
|
"loss": 0.13, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.9824899774055113e-06, |
|
"loss": 0.1189, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.899473346924775e-06, |
|
"loss": 0.1195, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.817611456275249e-06, |
|
"loss": 0.1191, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.736905278975721e-06, |
|
"loss": 0.1145, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.657355774800985e-06, |
|
"loss": 0.1215, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.5789638897704247e-06, |
|
"loss": 0.1224, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.501730556136783e-06, |
|
"loss": 0.1225, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.4256566923750555e-06, |
|
"loss": 0.1302, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.3507432031716016e-06, |
|
"loss": 0.1211, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.27699097941334e-06, |
|
"loss": 0.1404, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.2044008981771947e-06, |
|
"loss": 0.1164, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.1329738227196196e-06, |
|
"loss": 0.122, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.062710602466411e-06, |
|
"loss": 0.1225, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.993612073002493e-06, |
|
"loss": 0.1152, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.9256790560620798e-06, |
|
"loss": 0.116, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8589123595188718e-06, |
|
"loss": 0.1332, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.7933127773764214e-06, |
|
"loss": 0.1254, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.728881089758727e-06, |
|
"loss": 0.1182, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.6656180629009088e-06, |
|
"loss": 0.1335, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.6035244491401702e-06, |
|
"loss": 0.119, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.5426009869067948e-06, |
|
"loss": 0.1227, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.4828484007153753e-06, |
|
"loss": 0.1295, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.4242674011561875e-06, |
|
"loss": 0.1073, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.3668586848867737e-06, |
|
"loss": 0.1244, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.3106229346236398e-06, |
|
"loss": 0.1261, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.255560819134105e-06, |
|
"loss": 0.1293, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.2016729932284198e-06, |
|
"loss": 0.122, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.1489600977518833e-06, |
|
"loss": 0.1316, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.0974227595773378e-06, |
|
"loss": 0.1292, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.0470615915975978e-06, |
|
"loss": 0.1167, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 9.97877192718255e-07, |
|
"loss": 0.1186, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.498701478504845e-07, |
|
"loss": 0.1239, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 9.030410279041279e-07, |
|
"loss": 0.1213, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 8.573903897809099e-07, |
|
"loss": 0.1263, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 8.129187763678103e-07, |
|
"loss": 0.1319, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.696267165305693e-07, |
|
"loss": 0.1263, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.275147251074366e-07, |
|
"loss": 0.12, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 6.865833029030543e-07, |
|
"loss": 0.1254, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 6.468329366824955e-07, |
|
"loss": 0.1478, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.082640991654343e-07, |
|
"loss": 0.1222, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.708772490205849e-07, |
|
"loss": 0.1241, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 5.346728308602056e-07, |
|
"loss": 0.1255, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.99651275234847e-07, |
|
"loss": 0.1216, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.6581299862817894e-07, |
|
"loss": 0.1223, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.33158403452083e-07, |
|
"loss": 0.1347, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.016878780418787e-07, |
|
"loss": 0.1196, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.71401796651627e-07, |
|
"loss": 0.1098, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.423005194497786e-07, |
|
"loss": 0.1406, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.1438439251481046e-07, |
|
"loss": 0.1219, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.876537478311847e-07, |
|
"loss": 0.1094, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.621089032853075e-07, |
|
"loss": 0.1261, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.377501626618428e-07, |
|
"loss": 0.1271, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.1457781564004909e-07, |
|
"loss": 0.1261, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.9259213779033725e-07, |
|
"loss": 0.1165, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.7179339057098453e-07, |
|
"loss": 0.1322, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.521818213250703e-07, |
|
"loss": 0.1276, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.337576632774784e-07, |
|
"loss": 0.1207, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.1652113553217714e-07, |
|
"loss": 0.1265, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.0047244306955473e-07, |
|
"loss": 0.1208, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.561177674403232e-08, |
|
"loss": 0.1207, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 7.193931328177695e-08, |
|
"loss": 0.1234, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 5.9455215278581e-08, |
|
"loss": 0.1291, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.815963119796374e-08, |
|
"loss": 0.125, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.805269536939493e-08, |
|
"loss": 0.1228, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.9134527986651728e-08, |
|
"loss": 0.144, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.1405235106453093e-08, |
|
"loss": 0.1341, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.4864908647194143e-08, |
|
"loss": 0.1113, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 9.513626387780417e-09, |
|
"loss": 0.124, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 5.351451966806309e-09, |
|
"loss": 0.1142, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.3784348817113e-09, |
|
"loss": 0.1143, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 5.946104882359471e-10, |
|
"loss": 0.1341, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1388, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 940, |
|
"total_flos": 8329575720960.0, |
|
"train_loss": 0.5075884050432038, |
|
"train_runtime": 2014.6169, |
|
"train_samples_per_second": 7.443, |
|
"train_steps_per_second": 0.467 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 940, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 50000, |
|
"total_flos": 8329575720960.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|