UCCIX-Llama2-13B-Instruct-191224
/
uccix_v2_instruct_191224_lr1e-4
/checkpoint-624
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 3.9920127795527156, | |
"eval_steps": 500, | |
"global_step": 624, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.006389776357827476, | |
"grad_norm": 2.055291493195234, | |
"learning_rate": 3.125e-06, | |
"loss": 1.695, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.012779552715654952, | |
"grad_norm": 2.0685233500522586, | |
"learning_rate": 6.25e-06, | |
"loss": 1.6748, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.025559105431309903, | |
"grad_norm": 2.325735299422439, | |
"learning_rate": 1.25e-05, | |
"loss": 1.6964, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.038338658146964855, | |
"grad_norm": 0.4729866673863026, | |
"learning_rate": 1.8750000000000002e-05, | |
"loss": 1.4325, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.051118210862619806, | |
"grad_norm": 0.482620239981458, | |
"learning_rate": 2.5e-05, | |
"loss": 1.3874, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.06389776357827476, | |
"grad_norm": 1.6728433474079003, | |
"learning_rate": 3.125e-05, | |
"loss": 1.4689, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.07667731629392971, | |
"grad_norm": 0.3405987431283081, | |
"learning_rate": 3.7500000000000003e-05, | |
"loss": 1.3127, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.08945686900958466, | |
"grad_norm": 0.2323496464888272, | |
"learning_rate": 4.375e-05, | |
"loss": 1.2639, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.10223642172523961, | |
"grad_norm": 0.18809974511784008, | |
"learning_rate": 5e-05, | |
"loss": 1.2401, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.11501597444089456, | |
"grad_norm": 0.18997340619225084, | |
"learning_rate": 5.6250000000000005e-05, | |
"loss": 1.2084, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.12779552715654952, | |
"grad_norm": 0.15504216343509883, | |
"learning_rate": 6.25e-05, | |
"loss": 1.1855, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.14057507987220447, | |
"grad_norm": 0.12848416587626313, | |
"learning_rate": 6.875e-05, | |
"loss": 1.146, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.15335463258785942, | |
"grad_norm": 0.09889252813730416, | |
"learning_rate": 7.500000000000001e-05, | |
"loss": 1.1357, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.16613418530351437, | |
"grad_norm": 0.09024188902019939, | |
"learning_rate": 8.125000000000001e-05, | |
"loss": 1.1096, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.17891373801916932, | |
"grad_norm": 0.08133676595279006, | |
"learning_rate": 8.75e-05, | |
"loss": 1.0913, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.19169329073482427, | |
"grad_norm": 0.0978463769637292, | |
"learning_rate": 9.375e-05, | |
"loss": 1.0679, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.20447284345047922, | |
"grad_norm": 0.07943889170723487, | |
"learning_rate": 0.0001, | |
"loss": 1.075, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.21725239616613418, | |
"grad_norm": 0.08240884428512509, | |
"learning_rate": 9.99971838728789e-05, | |
"loss": 1.075, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.23003194888178913, | |
"grad_norm": 0.08253986997481327, | |
"learning_rate": 9.998873580873848e-05, | |
"loss": 1.0652, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.24281150159744408, | |
"grad_norm": 0.07954648039103362, | |
"learning_rate": 9.997465675921163e-05, | |
"loss": 1.0519, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.25559105431309903, | |
"grad_norm": 0.0776223200815433, | |
"learning_rate": 9.995494831023409e-05, | |
"loss": 1.0094, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.268370607028754, | |
"grad_norm": 0.08000844411167178, | |
"learning_rate": 9.992961268186573e-05, | |
"loss": 1.0074, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.28115015974440893, | |
"grad_norm": 0.0689657212250583, | |
"learning_rate": 9.989865272804063e-05, | |
"loss": 1.0087, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.2939297124600639, | |
"grad_norm": 0.0722150479128947, | |
"learning_rate": 9.986207193624536e-05, | |
"loss": 1.0067, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.30670926517571884, | |
"grad_norm": 0.06646168454668608, | |
"learning_rate": 9.981987442712633e-05, | |
"loss": 0.9837, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.3194888178913738, | |
"grad_norm": 0.06815852582234988, | |
"learning_rate": 9.977206495402554e-05, | |
"loss": 1.0024, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.33226837060702874, | |
"grad_norm": 0.07469571057420442, | |
"learning_rate": 9.971864890244513e-05, | |
"loss": 0.9606, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.3450479233226837, | |
"grad_norm": 0.07160841663430713, | |
"learning_rate": 9.965963228944078e-05, | |
"loss": 0.9681, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.35782747603833864, | |
"grad_norm": 0.06954866095292117, | |
"learning_rate": 9.959502176294383e-05, | |
"loss": 0.951, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.3706070287539936, | |
"grad_norm": 0.06598684065212063, | |
"learning_rate": 9.95248246010126e-05, | |
"loss": 0.9501, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.38338658146964855, | |
"grad_norm": 0.12103302407814338, | |
"learning_rate": 9.944904871101228e-05, | |
"loss": 0.9713, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.3961661341853035, | |
"grad_norm": 0.07330981053456032, | |
"learning_rate": 9.936770262872443e-05, | |
"loss": 0.9283, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.40894568690095845, | |
"grad_norm": 0.06537535724415816, | |
"learning_rate": 9.928079551738543e-05, | |
"loss": 0.9118, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.4217252396166134, | |
"grad_norm": 0.07457609795137939, | |
"learning_rate": 9.918833716665419e-05, | |
"loss": 0.9279, | |
"step": 66 | |
}, | |
{ | |
"epoch": 0.43450479233226835, | |
"grad_norm": 0.07491122165043795, | |
"learning_rate": 9.909033799150946e-05, | |
"loss": 0.935, | |
"step": 68 | |
}, | |
{ | |
"epoch": 0.4472843450479233, | |
"grad_norm": 0.06781283989008571, | |
"learning_rate": 9.898680903107666e-05, | |
"loss": 0.9361, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.46006389776357826, | |
"grad_norm": 0.07160916695151898, | |
"learning_rate": 9.887776194738432e-05, | |
"loss": 0.9159, | |
"step": 72 | |
}, | |
{ | |
"epoch": 0.4728434504792332, | |
"grad_norm": 0.0681941013678725, | |
"learning_rate": 9.876320902405042e-05, | |
"loss": 0.8779, | |
"step": 74 | |
}, | |
{ | |
"epoch": 0.48562300319488816, | |
"grad_norm": 0.07482319269062407, | |
"learning_rate": 9.864316316489873e-05, | |
"loss": 0.8825, | |
"step": 76 | |
}, | |
{ | |
"epoch": 0.4984025559105431, | |
"grad_norm": 0.08697975313543096, | |
"learning_rate": 9.851763789250525e-05, | |
"loss": 0.922, | |
"step": 78 | |
}, | |
{ | |
"epoch": 0.5111821086261981, | |
"grad_norm": 0.09978612068745818, | |
"learning_rate": 9.838664734667495e-05, | |
"loss": 0.8894, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.5239616613418531, | |
"grad_norm": 0.09384667638421956, | |
"learning_rate": 9.825020628284896e-05, | |
"loss": 0.8593, | |
"step": 82 | |
}, | |
{ | |
"epoch": 0.536741214057508, | |
"grad_norm": 0.06932081799385038, | |
"learning_rate": 9.810833007044247e-05, | |
"loss": 0.8662, | |
"step": 84 | |
}, | |
{ | |
"epoch": 0.549520766773163, | |
"grad_norm": 0.10358699944795004, | |
"learning_rate": 9.796103469111351e-05, | |
"loss": 0.8723, | |
"step": 86 | |
}, | |
{ | |
"epoch": 0.5623003194888179, | |
"grad_norm": 0.07169243369499742, | |
"learning_rate": 9.780833673696254e-05, | |
"loss": 0.8482, | |
"step": 88 | |
}, | |
{ | |
"epoch": 0.5750798722044729, | |
"grad_norm": 0.1050406308556227, | |
"learning_rate": 9.76502534086636e-05, | |
"loss": 0.8496, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.5878594249201278, | |
"grad_norm": 0.07201905690967678, | |
"learning_rate": 9.74868025135266e-05, | |
"loss": 0.8291, | |
"step": 92 | |
}, | |
{ | |
"epoch": 0.6006389776357828, | |
"grad_norm": 1.2625349021090781, | |
"learning_rate": 9.731800246349148e-05, | |
"loss": 0.8503, | |
"step": 94 | |
}, | |
{ | |
"epoch": 0.6134185303514377, | |
"grad_norm": 0.17981258022070712, | |
"learning_rate": 9.714387227305422e-05, | |
"loss": 0.8231, | |
"step": 96 | |
}, | |
{ | |
"epoch": 0.6261980830670927, | |
"grad_norm": 0.07561478832740967, | |
"learning_rate": 9.696443155712486e-05, | |
"loss": 0.8119, | |
"step": 98 | |
}, | |
{ | |
"epoch": 0.6389776357827476, | |
"grad_norm": 0.08195686915168865, | |
"learning_rate": 9.67797005288181e-05, | |
"loss": 0.7926, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.6517571884984026, | |
"grad_norm": 0.0890476280007116, | |
"learning_rate": 9.65896999971763e-05, | |
"loss": 0.8039, | |
"step": 102 | |
}, | |
{ | |
"epoch": 0.6645367412140575, | |
"grad_norm": 0.07738578891457887, | |
"learning_rate": 9.639445136482548e-05, | |
"loss": 0.7721, | |
"step": 104 | |
}, | |
{ | |
"epoch": 0.6773162939297125, | |
"grad_norm": 0.0743037172920425, | |
"learning_rate": 9.619397662556435e-05, | |
"loss": 0.794, | |
"step": 106 | |
}, | |
{ | |
"epoch": 0.6900958466453674, | |
"grad_norm": 0.08803835897602165, | |
"learning_rate": 9.598829836188694e-05, | |
"loss": 0.7721, | |
"step": 108 | |
}, | |
{ | |
"epoch": 0.7028753993610224, | |
"grad_norm": 0.07702819696223887, | |
"learning_rate": 9.577743974243874e-05, | |
"loss": 0.7765, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.7156549520766773, | |
"grad_norm": 0.07473535070111323, | |
"learning_rate": 9.55614245194068e-05, | |
"loss": 0.7598, | |
"step": 112 | |
}, | |
{ | |
"epoch": 0.7284345047923323, | |
"grad_norm": 0.08433756541496004, | |
"learning_rate": 9.534027702584425e-05, | |
"loss": 0.7727, | |
"step": 114 | |
}, | |
{ | |
"epoch": 0.7412140575079872, | |
"grad_norm": 0.07483257658817612, | |
"learning_rate": 9.511402217292926e-05, | |
"loss": 0.7465, | |
"step": 116 | |
}, | |
{ | |
"epoch": 0.7539936102236422, | |
"grad_norm": 0.0880318685591304, | |
"learning_rate": 9.488268544715896e-05, | |
"loss": 0.7321, | |
"step": 118 | |
}, | |
{ | |
"epoch": 0.7667731629392971, | |
"grad_norm": 0.07719604899450865, | |
"learning_rate": 9.464629290747842e-05, | |
"loss": 0.7624, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.7795527156549521, | |
"grad_norm": 0.0733176421376437, | |
"learning_rate": 9.440487118234535e-05, | |
"loss": 0.6975, | |
"step": 122 | |
}, | |
{ | |
"epoch": 0.792332268370607, | |
"grad_norm": 0.07051701385784455, | |
"learning_rate": 9.415844746673047e-05, | |
"loss": 0.7127, | |
"step": 124 | |
}, | |
{ | |
"epoch": 0.805111821086262, | |
"grad_norm": 0.0729787562181977, | |
"learning_rate": 9.390704951905411e-05, | |
"loss": 0.7503, | |
"step": 126 | |
}, | |
{ | |
"epoch": 0.8178913738019169, | |
"grad_norm": 0.07128874732953779, | |
"learning_rate": 9.365070565805941e-05, | |
"loss": 0.6941, | |
"step": 128 | |
}, | |
{ | |
"epoch": 0.8306709265175719, | |
"grad_norm": 0.07804844381711577, | |
"learning_rate": 9.338944475962237e-05, | |
"loss": 0.7197, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.8434504792332268, | |
"grad_norm": 0.08207580744924538, | |
"learning_rate": 9.312329625349902e-05, | |
"loss": 0.7134, | |
"step": 132 | |
}, | |
{ | |
"epoch": 0.8562300319488818, | |
"grad_norm": 0.10268159904999394, | |
"learning_rate": 9.285229012001047e-05, | |
"loss": 0.705, | |
"step": 134 | |
}, | |
{ | |
"epoch": 0.8690095846645367, | |
"grad_norm": 0.07097527094154266, | |
"learning_rate": 9.257645688666556e-05, | |
"loss": 0.7036, | |
"step": 136 | |
}, | |
{ | |
"epoch": 0.8817891373801917, | |
"grad_norm": 0.07284443178958877, | |
"learning_rate": 9.22958276247223e-05, | |
"loss": 0.7313, | |
"step": 138 | |
}, | |
{ | |
"epoch": 0.8945686900958466, | |
"grad_norm": 0.07294697279525543, | |
"learning_rate": 9.201043394568773e-05, | |
"loss": 0.6847, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.9073482428115016, | |
"grad_norm": 0.0725032039002937, | |
"learning_rate": 9.172030799775699e-05, | |
"loss": 0.6877, | |
"step": 142 | |
}, | |
{ | |
"epoch": 0.9201277955271565, | |
"grad_norm": 0.06708836437156662, | |
"learning_rate": 9.142548246219212e-05, | |
"loss": 0.6837, | |
"step": 144 | |
}, | |
{ | |
"epoch": 0.9329073482428115, | |
"grad_norm": 0.07361178534656698, | |
"learning_rate": 9.112599054964057e-05, | |
"loss": 0.6522, | |
"step": 146 | |
}, | |
{ | |
"epoch": 0.9456869009584664, | |
"grad_norm": 0.06961060997060975, | |
"learning_rate": 9.082186599639428e-05, | |
"loss": 0.6732, | |
"step": 148 | |
}, | |
{ | |
"epoch": 0.9584664536741214, | |
"grad_norm": 0.06369267112915664, | |
"learning_rate": 9.051314306058933e-05, | |
"loss": 0.6615, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.9712460063897763, | |
"grad_norm": 0.06667729772792583, | |
"learning_rate": 9.019985651834703e-05, | |
"loss": 0.6742, | |
"step": 152 | |
}, | |
{ | |
"epoch": 0.9840255591054313, | |
"grad_norm": 0.07052786453330319, | |
"learning_rate": 8.988204165985649e-05, | |
"loss": 0.6365, | |
"step": 154 | |
}, | |
{ | |
"epoch": 0.9968051118210862, | |
"grad_norm": 0.06352217971127558, | |
"learning_rate": 8.955973428539944e-05, | |
"loss": 0.6531, | |
"step": 156 | |
}, | |
{ | |
"epoch": 1.011182108626198, | |
"grad_norm": 0.0907023898699884, | |
"learning_rate": 8.923297070131737e-05, | |
"loss": 0.6986, | |
"step": 158 | |
}, | |
{ | |
"epoch": 1.023961661341853, | |
"grad_norm": 0.06588723514264389, | |
"learning_rate": 8.890178771592199e-05, | |
"loss": 0.4221, | |
"step": 160 | |
}, | |
{ | |
"epoch": 1.036741214057508, | |
"grad_norm": 0.07457104912562523, | |
"learning_rate": 8.856622263534875e-05, | |
"loss": 0.4375, | |
"step": 162 | |
}, | |
{ | |
"epoch": 1.049520766773163, | |
"grad_norm": 0.08716030746078077, | |
"learning_rate": 8.822631325935463e-05, | |
"loss": 0.4633, | |
"step": 164 | |
}, | |
{ | |
"epoch": 1.0623003194888179, | |
"grad_norm": 0.07564657660605784, | |
"learning_rate": 8.788209787706015e-05, | |
"loss": 0.4149, | |
"step": 166 | |
}, | |
{ | |
"epoch": 1.0750798722044728, | |
"grad_norm": 0.2601478494309565, | |
"learning_rate": 8.753361526263621e-05, | |
"loss": 0.4644, | |
"step": 168 | |
}, | |
{ | |
"epoch": 1.0878594249201279, | |
"grad_norm": 0.07236244361689621, | |
"learning_rate": 8.718090467093654e-05, | |
"loss": 0.445, | |
"step": 170 | |
}, | |
{ | |
"epoch": 1.1006389776357828, | |
"grad_norm": 0.07360308087849284, | |
"learning_rate": 8.682400583307562e-05, | |
"loss": 0.4189, | |
"step": 172 | |
}, | |
{ | |
"epoch": 1.1134185303514377, | |
"grad_norm": 0.06934965586236702, | |
"learning_rate": 8.646295895195333e-05, | |
"loss": 0.4168, | |
"step": 174 | |
}, | |
{ | |
"epoch": 1.1261980830670926, | |
"grad_norm": 0.06652725595095291, | |
"learning_rate": 8.609780469772623e-05, | |
"loss": 0.4332, | |
"step": 176 | |
}, | |
{ | |
"epoch": 1.1389776357827477, | |
"grad_norm": 0.06493423808775205, | |
"learning_rate": 8.572858420322627e-05, | |
"loss": 0.4126, | |
"step": 178 | |
}, | |
{ | |
"epoch": 1.1517571884984026, | |
"grad_norm": 0.07224306242862681, | |
"learning_rate": 8.535533905932738e-05, | |
"loss": 0.4639, | |
"step": 180 | |
}, | |
{ | |
"epoch": 1.1645367412140575, | |
"grad_norm": 0.06325420247080109, | |
"learning_rate": 8.497811131026046e-05, | |
"loss": 0.4097, | |
"step": 182 | |
}, | |
{ | |
"epoch": 1.1773162939297124, | |
"grad_norm": 0.05960690196531746, | |
"learning_rate": 8.459694344887732e-05, | |
"loss": 0.4258, | |
"step": 184 | |
}, | |
{ | |
"epoch": 1.1900958466453675, | |
"grad_norm": 0.06526403248406679, | |
"learning_rate": 8.421187841186402e-05, | |
"loss": 0.4453, | |
"step": 186 | |
}, | |
{ | |
"epoch": 1.2028753993610224, | |
"grad_norm": 0.06754636177295095, | |
"learning_rate": 8.382295957490436e-05, | |
"loss": 0.4277, | |
"step": 188 | |
}, | |
{ | |
"epoch": 1.2156549520766773, | |
"grad_norm": 0.11883404710840821, | |
"learning_rate": 8.343023074779368e-05, | |
"loss": 0.4386, | |
"step": 190 | |
}, | |
{ | |
"epoch": 1.2284345047923322, | |
"grad_norm": 0.07793571463351197, | |
"learning_rate": 8.303373616950408e-05, | |
"loss": 0.4072, | |
"step": 192 | |
}, | |
{ | |
"epoch": 1.2412140575079873, | |
"grad_norm": 0.06518657342856102, | |
"learning_rate": 8.263352050320094e-05, | |
"loss": 0.4396, | |
"step": 194 | |
}, | |
{ | |
"epoch": 1.2539936102236422, | |
"grad_norm": 0.05974282037032855, | |
"learning_rate": 8.222962883121196e-05, | |
"loss": 0.4016, | |
"step": 196 | |
}, | |
{ | |
"epoch": 1.266773162939297, | |
"grad_norm": 0.0693639502217822, | |
"learning_rate": 8.182210664994878e-05, | |
"loss": 0.3808, | |
"step": 198 | |
}, | |
{ | |
"epoch": 1.279552715654952, | |
"grad_norm": 0.06127831754623801, | |
"learning_rate": 8.141099986478212e-05, | |
"loss": 0.3961, | |
"step": 200 | |
}, | |
{ | |
"epoch": 1.292332268370607, | |
"grad_norm": 0.06755312065722066, | |
"learning_rate": 8.099635478487064e-05, | |
"loss": 0.3894, | |
"step": 202 | |
}, | |
{ | |
"epoch": 1.305111821086262, | |
"grad_norm": 0.0584212869146413, | |
"learning_rate": 8.057821811794458e-05, | |
"loss": 0.414, | |
"step": 204 | |
}, | |
{ | |
"epoch": 1.317891373801917, | |
"grad_norm": 0.05983512956529008, | |
"learning_rate": 8.015663696504422e-05, | |
"loss": 0.3634, | |
"step": 206 | |
}, | |
{ | |
"epoch": 1.330670926517572, | |
"grad_norm": 0.05778218969166584, | |
"learning_rate": 7.973165881521434e-05, | |
"loss": 0.4233, | |
"step": 208 | |
}, | |
{ | |
"epoch": 1.343450479233227, | |
"grad_norm": 0.058310021079803646, | |
"learning_rate": 7.930333154015466e-05, | |
"loss": 0.4061, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.3562300319488818, | |
"grad_norm": 0.0642143238679532, | |
"learning_rate": 7.88717033888274e-05, | |
"loss": 0.4083, | |
"step": 212 | |
}, | |
{ | |
"epoch": 1.3690095846645367, | |
"grad_norm": 0.05656381877721736, | |
"learning_rate": 7.843682298202235e-05, | |
"loss": 0.4033, | |
"step": 214 | |
}, | |
{ | |
"epoch": 1.3817891373801916, | |
"grad_norm": 0.05518190162844295, | |
"learning_rate": 7.799873930687978e-05, | |
"loss": 0.3953, | |
"step": 216 | |
}, | |
{ | |
"epoch": 1.3945686900958467, | |
"grad_norm": 0.05903661851778338, | |
"learning_rate": 7.755750171137246e-05, | |
"loss": 0.4096, | |
"step": 218 | |
}, | |
{ | |
"epoch": 1.4073482428115016, | |
"grad_norm": 0.05833074145436464, | |
"learning_rate": 7.711315989874677e-05, | |
"loss": 0.4151, | |
"step": 220 | |
}, | |
{ | |
"epoch": 1.4201277955271565, | |
"grad_norm": 0.05919878363690307, | |
"learning_rate": 7.666576392192389e-05, | |
"loss": 0.39, | |
"step": 222 | |
}, | |
{ | |
"epoch": 1.4329073482428116, | |
"grad_norm": 0.05913664327254173, | |
"learning_rate": 7.621536417786159e-05, | |
"loss": 0.4005, | |
"step": 224 | |
}, | |
{ | |
"epoch": 1.4456869009584665, | |
"grad_norm": 0.0640842931075253, | |
"learning_rate": 7.576201140187727e-05, | |
"loss": 0.4165, | |
"step": 226 | |
}, | |
{ | |
"epoch": 1.4584664536741214, | |
"grad_norm": 0.062131879810909965, | |
"learning_rate": 7.530575666193283e-05, | |
"loss": 0.3891, | |
"step": 228 | |
}, | |
{ | |
"epoch": 1.4712460063897763, | |
"grad_norm": 0.06992276137309804, | |
"learning_rate": 7.484665135288213e-05, | |
"loss": 0.3971, | |
"step": 230 | |
}, | |
{ | |
"epoch": 1.4840255591054312, | |
"grad_norm": 0.06078790664861669, | |
"learning_rate": 7.438474719068173e-05, | |
"loss": 0.3961, | |
"step": 232 | |
}, | |
{ | |
"epoch": 1.4968051118210863, | |
"grad_norm": 0.06922648734675908, | |
"learning_rate": 7.392009620656513e-05, | |
"loss": 0.4331, | |
"step": 234 | |
}, | |
{ | |
"epoch": 1.5095846645367412, | |
"grad_norm": 0.05766139832102871, | |
"learning_rate": 7.345275074118185e-05, | |
"loss": 0.4182, | |
"step": 236 | |
}, | |
{ | |
"epoch": 1.5223642172523961, | |
"grad_norm": 0.06292873231888371, | |
"learning_rate": 7.298276343870151e-05, | |
"loss": 0.4061, | |
"step": 238 | |
}, | |
{ | |
"epoch": 1.5351437699680512, | |
"grad_norm": 0.06000860844713537, | |
"learning_rate": 7.251018724088367e-05, | |
"loss": 0.4023, | |
"step": 240 | |
}, | |
{ | |
"epoch": 1.547923322683706, | |
"grad_norm": 0.0585777107714916, | |
"learning_rate": 7.203507538111423e-05, | |
"loss": 0.3855, | |
"step": 242 | |
}, | |
{ | |
"epoch": 1.560702875399361, | |
"grad_norm": 0.0571671995255021, | |
"learning_rate": 7.155748137840892e-05, | |
"loss": 0.3951, | |
"step": 244 | |
}, | |
{ | |
"epoch": 1.573482428115016, | |
"grad_norm": 0.053447175708899994, | |
"learning_rate": 7.107745903138472e-05, | |
"loss": 0.3745, | |
"step": 246 | |
}, | |
{ | |
"epoch": 1.5862619808306708, | |
"grad_norm": 0.055736902711725635, | |
"learning_rate": 7.059506241219965e-05, | |
"loss": 0.3911, | |
"step": 248 | |
}, | |
{ | |
"epoch": 1.599041533546326, | |
"grad_norm": 0.05715355824554817, | |
"learning_rate": 7.011034586046176e-05, | |
"loss": 0.4043, | |
"step": 250 | |
}, | |
{ | |
"epoch": 1.6118210862619808, | |
"grad_norm": 0.06030447320081754, | |
"learning_rate": 6.962336397710819e-05, | |
"loss": 0.3899, | |
"step": 252 | |
}, | |
{ | |
"epoch": 1.6246006389776357, | |
"grad_norm": 0.061239135474291606, | |
"learning_rate": 6.91341716182545e-05, | |
"loss": 0.4246, | |
"step": 254 | |
}, | |
{ | |
"epoch": 1.6373801916932909, | |
"grad_norm": 0.05695235071864785, | |
"learning_rate": 6.864282388901544e-05, | |
"loss": 0.3953, | |
"step": 256 | |
}, | |
{ | |
"epoch": 1.6501597444089455, | |
"grad_norm": 0.05308868251491366, | |
"learning_rate": 6.814937613729766e-05, | |
"loss": 0.4103, | |
"step": 258 | |
}, | |
{ | |
"epoch": 1.6629392971246006, | |
"grad_norm": 0.054046791633493914, | |
"learning_rate": 6.765388394756504e-05, | |
"loss": 0.4059, | |
"step": 260 | |
}, | |
{ | |
"epoch": 1.6757188498402555, | |
"grad_norm": 0.05148697040730548, | |
"learning_rate": 6.715640313457733e-05, | |
"loss": 0.3767, | |
"step": 262 | |
}, | |
{ | |
"epoch": 1.6884984025559104, | |
"grad_norm": 0.05318569591896447, | |
"learning_rate": 6.665698973710288e-05, | |
"loss": 0.3708, | |
"step": 264 | |
}, | |
{ | |
"epoch": 1.7012779552715656, | |
"grad_norm": 0.05196719070381999, | |
"learning_rate": 6.615570001160626e-05, | |
"loss": 0.4042, | |
"step": 266 | |
}, | |
{ | |
"epoch": 1.7140575079872205, | |
"grad_norm": 0.05632881769869459, | |
"learning_rate": 6.565259042591113e-05, | |
"loss": 0.3987, | |
"step": 268 | |
}, | |
{ | |
"epoch": 1.7268370607028753, | |
"grad_norm": 0.05470059818193366, | |
"learning_rate": 6.514771765283942e-05, | |
"loss": 0.3973, | |
"step": 270 | |
}, | |
{ | |
"epoch": 1.7396166134185305, | |
"grad_norm": 0.056351811449582394, | |
"learning_rate": 6.464113856382752e-05, | |
"loss": 0.3864, | |
"step": 272 | |
}, | |
{ | |
"epoch": 1.7523961661341851, | |
"grad_norm": 0.05831258279981057, | |
"learning_rate": 6.413291022251989e-05, | |
"loss": 0.4041, | |
"step": 274 | |
}, | |
{ | |
"epoch": 1.7651757188498403, | |
"grad_norm": 0.053467450310740065, | |
"learning_rate": 6.362308987834115e-05, | |
"loss": 0.3814, | |
"step": 276 | |
}, | |
{ | |
"epoch": 1.7779552715654952, | |
"grad_norm": 0.051287152623381335, | |
"learning_rate": 6.311173496004723e-05, | |
"loss": 0.395, | |
"step": 278 | |
}, | |
{ | |
"epoch": 1.79073482428115, | |
"grad_norm": 0.05429714498773308, | |
"learning_rate": 6.259890306925627e-05, | |
"loss": 0.3821, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.8035143769968052, | |
"grad_norm": 0.057523653580626326, | |
"learning_rate": 6.208465197396013e-05, | |
"loss": 0.3984, | |
"step": 282 | |
}, | |
{ | |
"epoch": 1.81629392971246, | |
"grad_norm": 0.05724842136937287, | |
"learning_rate": 6.156903960201709e-05, | |
"loss": 0.4181, | |
"step": 284 | |
}, | |
{ | |
"epoch": 1.829073482428115, | |
"grad_norm": 0.052227309043480996, | |
"learning_rate": 6.105212403462651e-05, | |
"loss": 0.4049, | |
"step": 286 | |
}, | |
{ | |
"epoch": 1.84185303514377, | |
"grad_norm": 0.04967908325326877, | |
"learning_rate": 6.0533963499786314e-05, | |
"loss": 0.4117, | |
"step": 288 | |
}, | |
{ | |
"epoch": 1.854632587859425, | |
"grad_norm": 0.05539898234566285, | |
"learning_rate": 6.001461636573397e-05, | |
"loss": 0.4006, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.8674121405750799, | |
"grad_norm": 0.05795414669880149, | |
"learning_rate": 5.949414113437142e-05, | |
"loss": 0.386, | |
"step": 292 | |
}, | |
{ | |
"epoch": 1.880191693290735, | |
"grad_norm": 0.050446841270231885, | |
"learning_rate": 5.897259643467527e-05, | |
"loss": 0.3842, | |
"step": 294 | |
}, | |
{ | |
"epoch": 1.8929712460063897, | |
"grad_norm": 0.052453051506198604, | |
"learning_rate": 5.8450041016092464e-05, | |
"loss": 0.3525, | |
"step": 296 | |
}, | |
{ | |
"epoch": 1.9057507987220448, | |
"grad_norm": 0.052803823491155276, | |
"learning_rate": 5.792653374192245e-05, | |
"loss": 0.3963, | |
"step": 298 | |
}, | |
{ | |
"epoch": 1.9185303514376997, | |
"grad_norm": 0.05180901601155745, | |
"learning_rate": 5.7402133582686576e-05, | |
"loss": 0.3798, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.9313099041533546, | |
"grad_norm": 0.05166645429890597, | |
"learning_rate": 5.6876899609485256e-05, | |
"loss": 0.3838, | |
"step": 302 | |
}, | |
{ | |
"epoch": 1.9440894568690097, | |
"grad_norm": 0.05306354741968808, | |
"learning_rate": 5.6350890987343944e-05, | |
"loss": 0.4165, | |
"step": 304 | |
}, | |
{ | |
"epoch": 1.9568690095846646, | |
"grad_norm": 0.0860975722690725, | |
"learning_rate": 5.582416696854853e-05, | |
"loss": 0.3737, | |
"step": 306 | |
}, | |
{ | |
"epoch": 1.9696485623003195, | |
"grad_norm": 0.05323286133666828, | |
"learning_rate": 5.5296786885970805e-05, | |
"loss": 0.3889, | |
"step": 308 | |
}, | |
{ | |
"epoch": 1.9824281150159746, | |
"grad_norm": 0.05299665331057226, | |
"learning_rate": 5.476881014638491e-05, | |
"loss": 0.3896, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.9952076677316293, | |
"grad_norm": 0.05157945275339266, | |
"learning_rate": 5.4240296223775465e-05, | |
"loss": 0.3637, | |
"step": 312 | |
}, | |
{ | |
"epoch": 2.009584664536741, | |
"grad_norm": 0.09139947660133817, | |
"learning_rate": 5.3711304652638126e-05, | |
"loss": 0.3775, | |
"step": 314 | |
}, | |
{ | |
"epoch": 2.022364217252396, | |
"grad_norm": 0.10130414532724454, | |
"learning_rate": 5.318189502127332e-05, | |
"loss": 0.2112, | |
"step": 316 | |
}, | |
{ | |
"epoch": 2.0351437699680512, | |
"grad_norm": 0.0633333619180165, | |
"learning_rate": 5.265212696507387e-05, | |
"loss": 0.2004, | |
"step": 318 | |
}, | |
{ | |
"epoch": 2.047923322683706, | |
"grad_norm": 0.0668276114954086, | |
"learning_rate": 5.212206015980742e-05, | |
"loss": 0.2019, | |
"step": 320 | |
}, | |
{ | |
"epoch": 2.060702875399361, | |
"grad_norm": 0.05942503367303514, | |
"learning_rate": 5.159175431489424e-05, | |
"loss": 0.1978, | |
"step": 322 | |
}, | |
{ | |
"epoch": 2.073482428115016, | |
"grad_norm": 0.07284145764738766, | |
"learning_rate": 5.1061269166681183e-05, | |
"loss": 0.1935, | |
"step": 324 | |
}, | |
{ | |
"epoch": 2.086261980830671, | |
"grad_norm": 0.052260140697323494, | |
"learning_rate": 5.053066447171282e-05, | |
"loss": 0.1854, | |
"step": 326 | |
}, | |
{ | |
"epoch": 2.099041533546326, | |
"grad_norm": 0.05754923159453662, | |
"learning_rate": 5e-05, | |
"loss": 0.1965, | |
"step": 328 | |
}, | |
{ | |
"epoch": 2.1118210862619806, | |
"grad_norm": 0.05500397186780569, | |
"learning_rate": 4.94693355282872e-05, | |
"loss": 0.1827, | |
"step": 330 | |
}, | |
{ | |
"epoch": 2.1246006389776357, | |
"grad_norm": 0.061606661346763424, | |
"learning_rate": 4.893873083331882e-05, | |
"loss": 0.2008, | |
"step": 332 | |
}, | |
{ | |
"epoch": 2.137380191693291, | |
"grad_norm": 0.05678242709297541, | |
"learning_rate": 4.840824568510579e-05, | |
"loss": 0.1853, | |
"step": 334 | |
}, | |
{ | |
"epoch": 2.1501597444089455, | |
"grad_norm": 0.054080318070508115, | |
"learning_rate": 4.78779398401926e-05, | |
"loss": 0.1952, | |
"step": 336 | |
}, | |
{ | |
"epoch": 2.1629392971246006, | |
"grad_norm": 0.057204881343756786, | |
"learning_rate": 4.734787303492615e-05, | |
"loss": 0.1778, | |
"step": 338 | |
}, | |
{ | |
"epoch": 2.1757188498402558, | |
"grad_norm": 0.6941487667655994, | |
"learning_rate": 4.6818104978726685e-05, | |
"loss": 0.219, | |
"step": 340 | |
}, | |
{ | |
"epoch": 2.1884984025559104, | |
"grad_norm": 0.06999590590614403, | |
"learning_rate": 4.628869534736187e-05, | |
"loss": 0.181, | |
"step": 342 | |
}, | |
{ | |
"epoch": 2.2012779552715656, | |
"grad_norm": 0.07558854262088241, | |
"learning_rate": 4.575970377622456e-05, | |
"loss": 0.2349, | |
"step": 344 | |
}, | |
{ | |
"epoch": 2.2140575079872207, | |
"grad_norm": 0.07120027160683609, | |
"learning_rate": 4.52311898536151e-05, | |
"loss": 0.1993, | |
"step": 346 | |
}, | |
{ | |
"epoch": 2.2268370607028753, | |
"grad_norm": 0.05697032990090494, | |
"learning_rate": 4.47032131140292e-05, | |
"loss": 0.1739, | |
"step": 348 | |
}, | |
{ | |
"epoch": 2.2396166134185305, | |
"grad_norm": 0.06092977319118132, | |
"learning_rate": 4.4175833031451473e-05, | |
"loss": 0.188, | |
"step": 350 | |
}, | |
{ | |
"epoch": 2.252396166134185, | |
"grad_norm": 0.05900721095602371, | |
"learning_rate": 4.364910901265606e-05, | |
"loss": 0.1778, | |
"step": 352 | |
}, | |
{ | |
"epoch": 2.2651757188498403, | |
"grad_norm": 0.08992850669862418, | |
"learning_rate": 4.3123100390514756e-05, | |
"loss": 0.1838, | |
"step": 354 | |
}, | |
{ | |
"epoch": 2.2779552715654954, | |
"grad_norm": 0.059213794143429914, | |
"learning_rate": 4.2597866417313436e-05, | |
"loss": 0.1902, | |
"step": 356 | |
}, | |
{ | |
"epoch": 2.29073482428115, | |
"grad_norm": 0.051525349318871976, | |
"learning_rate": 4.207346625807756e-05, | |
"loss": 0.1784, | |
"step": 358 | |
}, | |
{ | |
"epoch": 2.303514376996805, | |
"grad_norm": 0.055922862481655594, | |
"learning_rate": 4.1549958983907555e-05, | |
"loss": 0.1827, | |
"step": 360 | |
}, | |
{ | |
"epoch": 2.31629392971246, | |
"grad_norm": 0.054189632126131766, | |
"learning_rate": 4.102740356532473e-05, | |
"loss": 0.186, | |
"step": 362 | |
}, | |
{ | |
"epoch": 2.329073482428115, | |
"grad_norm": 0.06298745746452741, | |
"learning_rate": 4.050585886562858e-05, | |
"loss": 0.1854, | |
"step": 364 | |
}, | |
{ | |
"epoch": 2.34185303514377, | |
"grad_norm": 0.06476475169367538, | |
"learning_rate": 3.998538363426605e-05, | |
"loss": 0.1794, | |
"step": 366 | |
}, | |
{ | |
"epoch": 2.3546325878594248, | |
"grad_norm": 0.05187178001518817, | |
"learning_rate": 3.94660365002137e-05, | |
"loss": 0.1817, | |
"step": 368 | |
}, | |
{ | |
"epoch": 2.36741214057508, | |
"grad_norm": 0.05110076217610542, | |
"learning_rate": 3.894787596537352e-05, | |
"loss": 0.1757, | |
"step": 370 | |
}, | |
{ | |
"epoch": 2.380191693290735, | |
"grad_norm": 0.061027606854849537, | |
"learning_rate": 3.843096039798293e-05, | |
"loss": 0.1888, | |
"step": 372 | |
}, | |
{ | |
"epoch": 2.3929712460063897, | |
"grad_norm": 0.05689282057128392, | |
"learning_rate": 3.791534802603988e-05, | |
"loss": 0.1972, | |
"step": 374 | |
}, | |
{ | |
"epoch": 2.405750798722045, | |
"grad_norm": 0.05144327012401281, | |
"learning_rate": 3.740109693074375e-05, | |
"loss": 0.1975, | |
"step": 376 | |
}, | |
{ | |
"epoch": 2.4185303514377, | |
"grad_norm": 0.07243681779987425, | |
"learning_rate": 3.68882650399528e-05, | |
"loss": 0.1865, | |
"step": 378 | |
}, | |
{ | |
"epoch": 2.4313099041533546, | |
"grad_norm": 0.11601839655528177, | |
"learning_rate": 3.637691012165886e-05, | |
"loss": 0.1977, | |
"step": 380 | |
}, | |
{ | |
"epoch": 2.4440894568690097, | |
"grad_norm": 0.05323975029748036, | |
"learning_rate": 3.586708977748012e-05, | |
"loss": 0.1873, | |
"step": 382 | |
}, | |
{ | |
"epoch": 2.4568690095846644, | |
"grad_norm": 0.0499469664737551, | |
"learning_rate": 3.5358861436172485e-05, | |
"loss": 0.1832, | |
"step": 384 | |
}, | |
{ | |
"epoch": 2.4696485623003195, | |
"grad_norm": 0.05043024991533826, | |
"learning_rate": 3.485228234716058e-05, | |
"loss": 0.1821, | |
"step": 386 | |
}, | |
{ | |
"epoch": 2.4824281150159746, | |
"grad_norm": 0.054685112352780986, | |
"learning_rate": 3.434740957408889e-05, | |
"loss": 0.1816, | |
"step": 388 | |
}, | |
{ | |
"epoch": 2.4952076677316293, | |
"grad_norm": 0.057237969167094144, | |
"learning_rate": 3.3844299988393755e-05, | |
"loss": 0.1909, | |
"step": 390 | |
}, | |
{ | |
"epoch": 2.5079872204472844, | |
"grad_norm": 0.05134273506646416, | |
"learning_rate": 3.334301026289712e-05, | |
"loss": 0.1782, | |
"step": 392 | |
}, | |
{ | |
"epoch": 2.520766773162939, | |
"grad_norm": 0.049993934417102925, | |
"learning_rate": 3.284359686542269e-05, | |
"loss": 0.1928, | |
"step": 394 | |
}, | |
{ | |
"epoch": 2.533546325878594, | |
"grad_norm": 0.06457823051474779, | |
"learning_rate": 3.234611605243496e-05, | |
"loss": 0.196, | |
"step": 396 | |
}, | |
{ | |
"epoch": 2.5463258785942493, | |
"grad_norm": 0.051805062617152425, | |
"learning_rate": 3.1850623862702344e-05, | |
"loss": 0.1881, | |
"step": 398 | |
}, | |
{ | |
"epoch": 2.559105431309904, | |
"grad_norm": 0.049188541484928724, | |
"learning_rate": 3.135717611098458e-05, | |
"loss": 0.1806, | |
"step": 400 | |
}, | |
{ | |
"epoch": 2.571884984025559, | |
"grad_norm": 0.05687592017078177, | |
"learning_rate": 3.086582838174551e-05, | |
"loss": 0.1784, | |
"step": 402 | |
}, | |
{ | |
"epoch": 2.584664536741214, | |
"grad_norm": 0.05098573657706369, | |
"learning_rate": 3.0376636022891812e-05, | |
"loss": 0.1932, | |
"step": 404 | |
}, | |
{ | |
"epoch": 2.597444089456869, | |
"grad_norm": 0.052376381772842893, | |
"learning_rate": 2.9889654139538246e-05, | |
"loss": 0.1889, | |
"step": 406 | |
}, | |
{ | |
"epoch": 2.610223642172524, | |
"grad_norm": 0.05031660077056393, | |
"learning_rate": 2.9404937587800375e-05, | |
"loss": 0.1769, | |
"step": 408 | |
}, | |
{ | |
"epoch": 2.623003194888179, | |
"grad_norm": 0.04930354808056054, | |
"learning_rate": 2.8922540968615286e-05, | |
"loss": 0.1685, | |
"step": 410 | |
}, | |
{ | |
"epoch": 2.635782747603834, | |
"grad_norm": 0.06709139465230578, | |
"learning_rate": 2.8442518621591086e-05, | |
"loss": 0.1785, | |
"step": 412 | |
}, | |
{ | |
"epoch": 2.648562300319489, | |
"grad_norm": 0.0503489735828908, | |
"learning_rate": 2.7964924618885778e-05, | |
"loss": 0.1689, | |
"step": 414 | |
}, | |
{ | |
"epoch": 2.661341853035144, | |
"grad_norm": 0.05047783892143097, | |
"learning_rate": 2.748981275911633e-05, | |
"loss": 0.1808, | |
"step": 416 | |
}, | |
{ | |
"epoch": 2.6741214057507987, | |
"grad_norm": 0.04955419672921838, | |
"learning_rate": 2.701723656129851e-05, | |
"loss": 0.1727, | |
"step": 418 | |
}, | |
{ | |
"epoch": 2.686900958466454, | |
"grad_norm": 0.04769759775271665, | |
"learning_rate": 2.6547249258818164e-05, | |
"loss": 0.1708, | |
"step": 420 | |
}, | |
{ | |
"epoch": 2.6996805111821085, | |
"grad_norm": 0.050324696099913684, | |
"learning_rate": 2.607990379343489e-05, | |
"loss": 0.1817, | |
"step": 422 | |
}, | |
{ | |
"epoch": 2.7124600638977636, | |
"grad_norm": 0.05249496210993974, | |
"learning_rate": 2.5615252809318284e-05, | |
"loss": 0.1836, | |
"step": 424 | |
}, | |
{ | |
"epoch": 2.7252396166134183, | |
"grad_norm": 0.0472378188955872, | |
"learning_rate": 2.5153348647117857e-05, | |
"loss": 0.1736, | |
"step": 426 | |
}, | |
{ | |
"epoch": 2.7380191693290734, | |
"grad_norm": 0.049243154928981264, | |
"learning_rate": 2.469424333806718e-05, | |
"loss": 0.1675, | |
"step": 428 | |
}, | |
{ | |
"epoch": 2.7507987220447285, | |
"grad_norm": 0.05096273109137321, | |
"learning_rate": 2.4237988598122752e-05, | |
"loss": 0.1658, | |
"step": 430 | |
}, | |
{ | |
"epoch": 2.763578274760383, | |
"grad_norm": 0.0514806212844811, | |
"learning_rate": 2.3784635822138424e-05, | |
"loss": 0.1922, | |
"step": 432 | |
}, | |
{ | |
"epoch": 2.7763578274760383, | |
"grad_norm": 0.05006269553229606, | |
"learning_rate": 2.333423607807613e-05, | |
"loss": 0.1887, | |
"step": 434 | |
}, | |
{ | |
"epoch": 2.7891373801916934, | |
"grad_norm": 0.04935551516167026, | |
"learning_rate": 2.288684010125325e-05, | |
"loss": 0.1763, | |
"step": 436 | |
}, | |
{ | |
"epoch": 2.801916932907348, | |
"grad_norm": 0.05353903496894845, | |
"learning_rate": 2.2442498288627556e-05, | |
"loss": 0.1944, | |
"step": 438 | |
}, | |
{ | |
"epoch": 2.8146964856230032, | |
"grad_norm": 0.04697149845887787, | |
"learning_rate": 2.2001260693120233e-05, | |
"loss": 0.1672, | |
"step": 440 | |
}, | |
{ | |
"epoch": 2.8274760383386583, | |
"grad_norm": 0.054384654770629585, | |
"learning_rate": 2.156317701797766e-05, | |
"loss": 0.1807, | |
"step": 442 | |
}, | |
{ | |
"epoch": 2.840255591054313, | |
"grad_norm": 0.04684823569442938, | |
"learning_rate": 2.1128296611172593e-05, | |
"loss": 0.171, | |
"step": 444 | |
}, | |
{ | |
"epoch": 2.853035143769968, | |
"grad_norm": 0.0498371244165766, | |
"learning_rate": 2.0696668459845355e-05, | |
"loss": 0.1827, | |
"step": 446 | |
}, | |
{ | |
"epoch": 2.8658146964856233, | |
"grad_norm": 0.04969475724913098, | |
"learning_rate": 2.026834118478567e-05, | |
"loss": 0.1749, | |
"step": 448 | |
}, | |
{ | |
"epoch": 2.878594249201278, | |
"grad_norm": 0.051902756416916496, | |
"learning_rate": 1.98433630349558e-05, | |
"loss": 0.1891, | |
"step": 450 | |
}, | |
{ | |
"epoch": 2.891373801916933, | |
"grad_norm": 0.05102564026340021, | |
"learning_rate": 1.9421781882055444e-05, | |
"loss": 0.1849, | |
"step": 452 | |
}, | |
{ | |
"epoch": 2.9041533546325877, | |
"grad_norm": 0.05200929870376942, | |
"learning_rate": 1.9003645215129355e-05, | |
"loss": 0.1891, | |
"step": 454 | |
}, | |
{ | |
"epoch": 2.916932907348243, | |
"grad_norm": 0.05083154953396676, | |
"learning_rate": 1.858900013521788e-05, | |
"loss": 0.179, | |
"step": 456 | |
}, | |
{ | |
"epoch": 2.9297124600638975, | |
"grad_norm": 0.049127219472404525, | |
"learning_rate": 1.817789335005121e-05, | |
"loss": 0.17, | |
"step": 458 | |
}, | |
{ | |
"epoch": 2.9424920127795526, | |
"grad_norm": 0.049677004679461886, | |
"learning_rate": 1.777037116878804e-05, | |
"loss": 0.1831, | |
"step": 460 | |
}, | |
{ | |
"epoch": 2.9552715654952078, | |
"grad_norm": 0.054496479788018075, | |
"learning_rate": 1.7366479496799077e-05, | |
"loss": 0.1843, | |
"step": 462 | |
}, | |
{ | |
"epoch": 2.9680511182108624, | |
"grad_norm": 0.04820092295738451, | |
"learning_rate": 1.6966263830495936e-05, | |
"loss": 0.1685, | |
"step": 464 | |
}, | |
{ | |
"epoch": 2.9808306709265175, | |
"grad_norm": 0.04915420841884947, | |
"learning_rate": 1.656976925220633e-05, | |
"loss": 0.1875, | |
"step": 466 | |
}, | |
{ | |
"epoch": 2.9936102236421727, | |
"grad_norm": 0.07661474807504913, | |
"learning_rate": 1.6177040425095662e-05, | |
"loss": 0.1891, | |
"step": 468 | |
}, | |
{ | |
"epoch": 3.0079872204472844, | |
"grad_norm": 0.07655695803476695, | |
"learning_rate": 1.5788121588135975e-05, | |
"loss": 0.1837, | |
"step": 470 | |
}, | |
{ | |
"epoch": 3.0207667731629395, | |
"grad_norm": 0.060916330302725, | |
"learning_rate": 1.5403056551122697e-05, | |
"loss": 0.0872, | |
"step": 472 | |
}, | |
{ | |
"epoch": 3.033546325878594, | |
"grad_norm": 0.052542395235648506, | |
"learning_rate": 1.5021888689739549e-05, | |
"loss": 0.0778, | |
"step": 474 | |
}, | |
{ | |
"epoch": 3.0463258785942493, | |
"grad_norm": 0.20368087770560855, | |
"learning_rate": 1.4644660940672627e-05, | |
"loss": 0.102, | |
"step": 476 | |
}, | |
{ | |
"epoch": 3.059105431309904, | |
"grad_norm": 0.10396707161226072, | |
"learning_rate": 1.427141579677374e-05, | |
"loss": 0.083, | |
"step": 478 | |
}, | |
{ | |
"epoch": 3.071884984025559, | |
"grad_norm": 0.04599720220665865, | |
"learning_rate": 1.3902195302273779e-05, | |
"loss": 0.0757, | |
"step": 480 | |
}, | |
{ | |
"epoch": 3.084664536741214, | |
"grad_norm": 0.056109340867354925, | |
"learning_rate": 1.3537041048046695e-05, | |
"loss": 0.081, | |
"step": 482 | |
}, | |
{ | |
"epoch": 3.097444089456869, | |
"grad_norm": 0.048015102375770044, | |
"learning_rate": 1.3175994166924394e-05, | |
"loss": 0.0802, | |
"step": 484 | |
}, | |
{ | |
"epoch": 3.110223642172524, | |
"grad_norm": 0.04645228076024571, | |
"learning_rate": 1.2819095329063469e-05, | |
"loss": 0.0787, | |
"step": 486 | |
}, | |
{ | |
"epoch": 3.123003194888179, | |
"grad_norm": 0.04637085498651796, | |
"learning_rate": 1.246638473736378e-05, | |
"loss": 0.0839, | |
"step": 488 | |
}, | |
{ | |
"epoch": 3.135782747603834, | |
"grad_norm": 0.05039074009256794, | |
"learning_rate": 1.2117902122939861e-05, | |
"loss": 0.0812, | |
"step": 490 | |
}, | |
{ | |
"epoch": 3.148562300319489, | |
"grad_norm": 0.05079569512274489, | |
"learning_rate": 1.1773686740645384e-05, | |
"loss": 0.0797, | |
"step": 492 | |
}, | |
{ | |
"epoch": 3.1613418530351436, | |
"grad_norm": 0.04286375870307716, | |
"learning_rate": 1.1433777364651271e-05, | |
"loss": 0.0737, | |
"step": 494 | |
}, | |
{ | |
"epoch": 3.1741214057507987, | |
"grad_norm": 0.03982951021947898, | |
"learning_rate": 1.1098212284078036e-05, | |
"loss": 0.0722, | |
"step": 496 | |
}, | |
{ | |
"epoch": 3.186900958466454, | |
"grad_norm": 0.0446624849328897, | |
"learning_rate": 1.076702929868264e-05, | |
"loss": 0.079, | |
"step": 498 | |
}, | |
{ | |
"epoch": 3.1996805111821085, | |
"grad_norm": 0.04376807908723891, | |
"learning_rate": 1.0440265714600572e-05, | |
"loss": 0.0837, | |
"step": 500 | |
}, | |
{ | |
"epoch": 3.2124600638977636, | |
"grad_norm": 0.04087367539850916, | |
"learning_rate": 1.0117958340143507e-05, | |
"loss": 0.076, | |
"step": 502 | |
}, | |
{ | |
"epoch": 3.2252396166134187, | |
"grad_norm": 0.04066584417219993, | |
"learning_rate": 9.800143481652979e-06, | |
"loss": 0.0701, | |
"step": 504 | |
}, | |
{ | |
"epoch": 3.2380191693290734, | |
"grad_norm": 0.08215263649470263, | |
"learning_rate": 9.48685693941067e-06, | |
"loss": 0.0776, | |
"step": 506 | |
}, | |
{ | |
"epoch": 3.2507987220447285, | |
"grad_norm": 0.0437601284673361, | |
"learning_rate": 9.17813400360572e-06, | |
"loss": 0.0764, | |
"step": 508 | |
}, | |
{ | |
"epoch": 3.263578274760383, | |
"grad_norm": 0.04382435518426366, | |
"learning_rate": 8.874009450359427e-06, | |
"loss": 0.0826, | |
"step": 510 | |
}, | |
{ | |
"epoch": 3.2763578274760383, | |
"grad_norm": 0.04095610913441161, | |
"learning_rate": 8.574517537807897e-06, | |
"loss": 0.0753, | |
"step": 512 | |
}, | |
{ | |
"epoch": 3.2891373801916934, | |
"grad_norm": 0.040525949300480126, | |
"learning_rate": 8.279692002243027e-06, | |
"loss": 0.0764, | |
"step": 514 | |
}, | |
{ | |
"epoch": 3.301916932907348, | |
"grad_norm": 0.043675586209021296, | |
"learning_rate": 7.989566054312287e-06, | |
"loss": 0.0817, | |
"step": 516 | |
}, | |
{ | |
"epoch": 3.3146964856230032, | |
"grad_norm": 0.04319420448553361, | |
"learning_rate": 7.704172375277691e-06, | |
"loss": 0.0759, | |
"step": 518 | |
}, | |
{ | |
"epoch": 3.3274760383386583, | |
"grad_norm": 0.044446852802239034, | |
"learning_rate": 7.423543113334436e-06, | |
"loss": 0.0813, | |
"step": 520 | |
}, | |
{ | |
"epoch": 3.340255591054313, | |
"grad_norm": 0.09121973616663154, | |
"learning_rate": 7.14770987998954e-06, | |
"loss": 0.0838, | |
"step": 522 | |
}, | |
{ | |
"epoch": 3.353035143769968, | |
"grad_norm": 0.05879997473879583, | |
"learning_rate": 6.876703746500984e-06, | |
"loss": 0.0738, | |
"step": 524 | |
}, | |
{ | |
"epoch": 3.365814696485623, | |
"grad_norm": 0.04667273388126841, | |
"learning_rate": 6.610555240377652e-06, | |
"loss": 0.0787, | |
"step": 526 | |
}, | |
{ | |
"epoch": 3.378594249201278, | |
"grad_norm": 0.042105033545020404, | |
"learning_rate": 6.349294341940593e-06, | |
"loss": 0.0801, | |
"step": 528 | |
}, | |
{ | |
"epoch": 3.391373801916933, | |
"grad_norm": 0.0407975465413022, | |
"learning_rate": 6.092950480945897e-06, | |
"loss": 0.0735, | |
"step": 530 | |
}, | |
{ | |
"epoch": 3.4041533546325877, | |
"grad_norm": 0.04234912863253251, | |
"learning_rate": 5.841552533269534e-06, | |
"loss": 0.0772, | |
"step": 532 | |
}, | |
{ | |
"epoch": 3.416932907348243, | |
"grad_norm": 0.04032120711392374, | |
"learning_rate": 5.595128817654638e-06, | |
"loss": 0.0749, | |
"step": 534 | |
}, | |
{ | |
"epoch": 3.4297124600638975, | |
"grad_norm": 0.041050930036482094, | |
"learning_rate": 5.353707092521582e-06, | |
"loss": 0.0769, | |
"step": 536 | |
}, | |
{ | |
"epoch": 3.4424920127795526, | |
"grad_norm": 0.043382176933190755, | |
"learning_rate": 5.117314552841052e-06, | |
"loss": 0.0767, | |
"step": 538 | |
}, | |
{ | |
"epoch": 3.4552715654952078, | |
"grad_norm": 0.039240502138117625, | |
"learning_rate": 4.885977827070748e-06, | |
"loss": 0.0721, | |
"step": 540 | |
}, | |
{ | |
"epoch": 3.4680511182108624, | |
"grad_norm": 0.040812347040587296, | |
"learning_rate": 4.659722974155767e-06, | |
"loss": 0.1114, | |
"step": 542 | |
}, | |
{ | |
"epoch": 3.4808306709265175, | |
"grad_norm": 0.0423787622918925, | |
"learning_rate": 4.43857548059321e-06, | |
"loss": 0.0778, | |
"step": 544 | |
}, | |
{ | |
"epoch": 3.4936102236421727, | |
"grad_norm": 0.042228923687598445, | |
"learning_rate": 4.2225602575612755e-06, | |
"loss": 0.0814, | |
"step": 546 | |
}, | |
{ | |
"epoch": 3.5063897763578273, | |
"grad_norm": 0.0407267289339222, | |
"learning_rate": 4.011701638113063e-06, | |
"loss": 0.0782, | |
"step": 548 | |
}, | |
{ | |
"epoch": 3.5191693290734825, | |
"grad_norm": 0.0389855165359938, | |
"learning_rate": 3.8060233744356633e-06, | |
"loss": 0.0789, | |
"step": 550 | |
}, | |
{ | |
"epoch": 3.5319488817891376, | |
"grad_norm": 0.040904703617676376, | |
"learning_rate": 3.605548635174533e-06, | |
"loss": 0.078, | |
"step": 552 | |
}, | |
{ | |
"epoch": 3.5447284345047922, | |
"grad_norm": 0.04093280012624571, | |
"learning_rate": 3.410300002823691e-06, | |
"loss": 0.0777, | |
"step": 554 | |
}, | |
{ | |
"epoch": 3.5575079872204474, | |
"grad_norm": 0.042904856841507744, | |
"learning_rate": 3.220299471181898e-06, | |
"loss": 0.0757, | |
"step": 556 | |
}, | |
{ | |
"epoch": 3.5702875399361025, | |
"grad_norm": 0.0436449067886704, | |
"learning_rate": 3.035568442875136e-06, | |
"loss": 0.0798, | |
"step": 558 | |
}, | |
{ | |
"epoch": 3.583067092651757, | |
"grad_norm": 0.035664776931118955, | |
"learning_rate": 2.85612772694579e-06, | |
"loss": 0.0632, | |
"step": 560 | |
}, | |
{ | |
"epoch": 3.5958466453674123, | |
"grad_norm": 0.03847526723825484, | |
"learning_rate": 2.6819975365085237e-06, | |
"loss": 0.0744, | |
"step": 562 | |
}, | |
{ | |
"epoch": 3.608626198083067, | |
"grad_norm": 0.039939236612970476, | |
"learning_rate": 2.5131974864734066e-06, | |
"loss": 0.0794, | |
"step": 564 | |
}, | |
{ | |
"epoch": 3.621405750798722, | |
"grad_norm": 0.040388305870748, | |
"learning_rate": 2.349746591336405e-06, | |
"loss": 0.0718, | |
"step": 566 | |
}, | |
{ | |
"epoch": 3.6341853035143767, | |
"grad_norm": 0.04232813426430434, | |
"learning_rate": 2.191663263037458e-06, | |
"loss": 0.0769, | |
"step": 568 | |
}, | |
{ | |
"epoch": 3.646964856230032, | |
"grad_norm": 0.04213845492527589, | |
"learning_rate": 2.0389653088865036e-06, | |
"loss": 0.0728, | |
"step": 570 | |
}, | |
{ | |
"epoch": 3.659744408945687, | |
"grad_norm": 0.04098999517730541, | |
"learning_rate": 1.8916699295575324e-06, | |
"loss": 0.0724, | |
"step": 572 | |
}, | |
{ | |
"epoch": 3.6725239616613417, | |
"grad_norm": 0.037533240934183365, | |
"learning_rate": 1.7497937171510547e-06, | |
"loss": 0.0709, | |
"step": 574 | |
}, | |
{ | |
"epoch": 3.6853035143769968, | |
"grad_norm": 0.039040304607963414, | |
"learning_rate": 1.6133526533250565e-06, | |
"loss": 0.0756, | |
"step": 576 | |
}, | |
{ | |
"epoch": 3.698083067092652, | |
"grad_norm": 0.04065729024121047, | |
"learning_rate": 1.4823621074947503e-06, | |
"loss": 0.0774, | |
"step": 578 | |
}, | |
{ | |
"epoch": 3.7108626198083066, | |
"grad_norm": 0.04252602887603373, | |
"learning_rate": 1.3568368351012717e-06, | |
"loss": 0.0824, | |
"step": 580 | |
}, | |
{ | |
"epoch": 3.7236421725239617, | |
"grad_norm": 0.04343672134882273, | |
"learning_rate": 1.236790975949592e-06, | |
"loss": 0.074, | |
"step": 582 | |
}, | |
{ | |
"epoch": 3.736421725239617, | |
"grad_norm": 0.0403766223584342, | |
"learning_rate": 1.1222380526156928e-06, | |
"loss": 0.0755, | |
"step": 584 | |
}, | |
{ | |
"epoch": 3.7492012779552715, | |
"grad_norm": 0.04234625541762105, | |
"learning_rate": 1.0131909689233442e-06, | |
"loss": 0.0814, | |
"step": 586 | |
}, | |
{ | |
"epoch": 3.7619808306709266, | |
"grad_norm": 0.03861507912847567, | |
"learning_rate": 9.096620084905472e-07, | |
"loss": 0.0664, | |
"step": 588 | |
}, | |
{ | |
"epoch": 3.7747603833865817, | |
"grad_norm": 0.041892733843973705, | |
"learning_rate": 8.11662833345822e-07, | |
"loss": 0.0832, | |
"step": 590 | |
}, | |
{ | |
"epoch": 3.7875399361022364, | |
"grad_norm": 0.05413889236863839, | |
"learning_rate": 7.192044826145771e-07, | |
"loss": 0.0921, | |
"step": 592 | |
}, | |
{ | |
"epoch": 3.8003194888178915, | |
"grad_norm": 0.04010280150213322, | |
"learning_rate": 6.322973712755697e-07, | |
"loss": 0.0752, | |
"step": 594 | |
}, | |
{ | |
"epoch": 3.813099041533546, | |
"grad_norm": 0.04321423418162425, | |
"learning_rate": 5.509512889877333e-07, | |
"loss": 0.0781, | |
"step": 596 | |
}, | |
{ | |
"epoch": 3.8258785942492013, | |
"grad_norm": 0.04049679761598481, | |
"learning_rate": 4.7517539898741524e-07, | |
"loss": 0.0694, | |
"step": 598 | |
}, | |
{ | |
"epoch": 3.838658146964856, | |
"grad_norm": 0.04258434666712487, | |
"learning_rate": 4.049782370561583e-07, | |
"loss": 0.0756, | |
"step": 600 | |
}, | |
{ | |
"epoch": 3.851437699680511, | |
"grad_norm": 0.03927978960342531, | |
"learning_rate": 3.4036771055923066e-07, | |
"loss": 0.075, | |
"step": 602 | |
}, | |
{ | |
"epoch": 3.864217252396166, | |
"grad_norm": 0.04093422273122725, | |
"learning_rate": 2.813510975548772e-07, | |
"loss": 0.0793, | |
"step": 604 | |
}, | |
{ | |
"epoch": 3.876996805111821, | |
"grad_norm": 0.0433141394014271, | |
"learning_rate": 2.2793504597447002e-07, | |
"loss": 0.0796, | |
"step": 606 | |
}, | |
{ | |
"epoch": 3.889776357827476, | |
"grad_norm": 0.04198937065288365, | |
"learning_rate": 1.8012557287367392e-07, | |
"loss": 0.0753, | |
"step": 608 | |
}, | |
{ | |
"epoch": 3.902555910543131, | |
"grad_norm": 0.043002763720086865, | |
"learning_rate": 1.379280637546443e-07, | |
"loss": 0.0917, | |
"step": 610 | |
}, | |
{ | |
"epoch": 3.915335463258786, | |
"grad_norm": 0.042376326172823574, | |
"learning_rate": 1.0134727195937333e-07, | |
"loss": 0.0747, | |
"step": 612 | |
}, | |
{ | |
"epoch": 3.928115015974441, | |
"grad_norm": 0.040600294489722695, | |
"learning_rate": 7.038731813426291e-08, | |
"loss": 0.0714, | |
"step": 614 | |
}, | |
{ | |
"epoch": 3.940894568690096, | |
"grad_norm": 0.0395711578920217, | |
"learning_rate": 4.5051689765929214e-08, | |
"loss": 0.0801, | |
"step": 616 | |
}, | |
{ | |
"epoch": 3.9536741214057507, | |
"grad_norm": 0.03943811103107873, | |
"learning_rate": 2.534324078837802e-08, | |
"loss": 0.074, | |
"step": 618 | |
}, | |
{ | |
"epoch": 3.966453674121406, | |
"grad_norm": 0.041905340316324986, | |
"learning_rate": 1.1264191261528557e-08, | |
"loss": 0.0771, | |
"step": 620 | |
}, | |
{ | |
"epoch": 3.979233226837061, | |
"grad_norm": 0.06686218682304546, | |
"learning_rate": 2.8161271211024633e-09, | |
"loss": 0.0811, | |
"step": 622 | |
}, | |
{ | |
"epoch": 3.9920127795527156, | |
"grad_norm": 0.03876030687059135, | |
"learning_rate": 0.0, | |
"loss": 0.076, | |
"step": 624 | |
} | |
], | |
"logging_steps": 2, | |
"max_steps": 624, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 4, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 2.5262683933881926e+19, | |
"train_batch_size": 2, | |
"trial_name": null, | |
"trial_params": null | |
} | |