UCCIX-Llama2-13B-Instruct-191224
/
uccix_v2_instruct_191224_lr1e-4
/checkpoint-312
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 1.9952076677316293, | |
"eval_steps": 500, | |
"global_step": 312, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.006389776357827476, | |
"grad_norm": 2.055291493195234, | |
"learning_rate": 3.125e-06, | |
"loss": 1.695, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.012779552715654952, | |
"grad_norm": 2.0685233500522586, | |
"learning_rate": 6.25e-06, | |
"loss": 1.6748, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.025559105431309903, | |
"grad_norm": 2.325735299422439, | |
"learning_rate": 1.25e-05, | |
"loss": 1.6964, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.038338658146964855, | |
"grad_norm": 0.4729866673863026, | |
"learning_rate": 1.8750000000000002e-05, | |
"loss": 1.4325, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.051118210862619806, | |
"grad_norm": 0.482620239981458, | |
"learning_rate": 2.5e-05, | |
"loss": 1.3874, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.06389776357827476, | |
"grad_norm": 1.6728433474079003, | |
"learning_rate": 3.125e-05, | |
"loss": 1.4689, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.07667731629392971, | |
"grad_norm": 0.3405987431283081, | |
"learning_rate": 3.7500000000000003e-05, | |
"loss": 1.3127, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.08945686900958466, | |
"grad_norm": 0.2323496464888272, | |
"learning_rate": 4.375e-05, | |
"loss": 1.2639, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.10223642172523961, | |
"grad_norm": 0.18809974511784008, | |
"learning_rate": 5e-05, | |
"loss": 1.2401, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.11501597444089456, | |
"grad_norm": 0.18997340619225084, | |
"learning_rate": 5.6250000000000005e-05, | |
"loss": 1.2084, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.12779552715654952, | |
"grad_norm": 0.15504216343509883, | |
"learning_rate": 6.25e-05, | |
"loss": 1.1855, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.14057507987220447, | |
"grad_norm": 0.12848416587626313, | |
"learning_rate": 6.875e-05, | |
"loss": 1.146, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.15335463258785942, | |
"grad_norm": 0.09889252813730416, | |
"learning_rate": 7.500000000000001e-05, | |
"loss": 1.1357, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.16613418530351437, | |
"grad_norm": 0.09024188902019939, | |
"learning_rate": 8.125000000000001e-05, | |
"loss": 1.1096, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.17891373801916932, | |
"grad_norm": 0.08133676595279006, | |
"learning_rate": 8.75e-05, | |
"loss": 1.0913, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.19169329073482427, | |
"grad_norm": 0.0978463769637292, | |
"learning_rate": 9.375e-05, | |
"loss": 1.0679, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.20447284345047922, | |
"grad_norm": 0.07943889170723487, | |
"learning_rate": 0.0001, | |
"loss": 1.075, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.21725239616613418, | |
"grad_norm": 0.08240884428512509, | |
"learning_rate": 9.99971838728789e-05, | |
"loss": 1.075, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.23003194888178913, | |
"grad_norm": 0.08253986997481327, | |
"learning_rate": 9.998873580873848e-05, | |
"loss": 1.0652, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.24281150159744408, | |
"grad_norm": 0.07954648039103362, | |
"learning_rate": 9.997465675921163e-05, | |
"loss": 1.0519, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.25559105431309903, | |
"grad_norm": 0.0776223200815433, | |
"learning_rate": 9.995494831023409e-05, | |
"loss": 1.0094, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.268370607028754, | |
"grad_norm": 0.08000844411167178, | |
"learning_rate": 9.992961268186573e-05, | |
"loss": 1.0074, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.28115015974440893, | |
"grad_norm": 0.0689657212250583, | |
"learning_rate": 9.989865272804063e-05, | |
"loss": 1.0087, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.2939297124600639, | |
"grad_norm": 0.0722150479128947, | |
"learning_rate": 9.986207193624536e-05, | |
"loss": 1.0067, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.30670926517571884, | |
"grad_norm": 0.06646168454668608, | |
"learning_rate": 9.981987442712633e-05, | |
"loss": 0.9837, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.3194888178913738, | |
"grad_norm": 0.06815852582234988, | |
"learning_rate": 9.977206495402554e-05, | |
"loss": 1.0024, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.33226837060702874, | |
"grad_norm": 0.07469571057420442, | |
"learning_rate": 9.971864890244513e-05, | |
"loss": 0.9606, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.3450479233226837, | |
"grad_norm": 0.07160841663430713, | |
"learning_rate": 9.965963228944078e-05, | |
"loss": 0.9681, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.35782747603833864, | |
"grad_norm": 0.06954866095292117, | |
"learning_rate": 9.959502176294383e-05, | |
"loss": 0.951, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.3706070287539936, | |
"grad_norm": 0.06598684065212063, | |
"learning_rate": 9.95248246010126e-05, | |
"loss": 0.9501, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.38338658146964855, | |
"grad_norm": 0.12103302407814338, | |
"learning_rate": 9.944904871101228e-05, | |
"loss": 0.9713, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.3961661341853035, | |
"grad_norm": 0.07330981053456032, | |
"learning_rate": 9.936770262872443e-05, | |
"loss": 0.9283, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.40894568690095845, | |
"grad_norm": 0.06537535724415816, | |
"learning_rate": 9.928079551738543e-05, | |
"loss": 0.9118, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.4217252396166134, | |
"grad_norm": 0.07457609795137939, | |
"learning_rate": 9.918833716665419e-05, | |
"loss": 0.9279, | |
"step": 66 | |
}, | |
{ | |
"epoch": 0.43450479233226835, | |
"grad_norm": 0.07491122165043795, | |
"learning_rate": 9.909033799150946e-05, | |
"loss": 0.935, | |
"step": 68 | |
}, | |
{ | |
"epoch": 0.4472843450479233, | |
"grad_norm": 0.06781283989008571, | |
"learning_rate": 9.898680903107666e-05, | |
"loss": 0.9361, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.46006389776357826, | |
"grad_norm": 0.07160916695151898, | |
"learning_rate": 9.887776194738432e-05, | |
"loss": 0.9159, | |
"step": 72 | |
}, | |
{ | |
"epoch": 0.4728434504792332, | |
"grad_norm": 0.0681941013678725, | |
"learning_rate": 9.876320902405042e-05, | |
"loss": 0.8779, | |
"step": 74 | |
}, | |
{ | |
"epoch": 0.48562300319488816, | |
"grad_norm": 0.07482319269062407, | |
"learning_rate": 9.864316316489873e-05, | |
"loss": 0.8825, | |
"step": 76 | |
}, | |
{ | |
"epoch": 0.4984025559105431, | |
"grad_norm": 0.08697975313543096, | |
"learning_rate": 9.851763789250525e-05, | |
"loss": 0.922, | |
"step": 78 | |
}, | |
{ | |
"epoch": 0.5111821086261981, | |
"grad_norm": 0.09978612068745818, | |
"learning_rate": 9.838664734667495e-05, | |
"loss": 0.8894, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.5239616613418531, | |
"grad_norm": 0.09384667638421956, | |
"learning_rate": 9.825020628284896e-05, | |
"loss": 0.8593, | |
"step": 82 | |
}, | |
{ | |
"epoch": 0.536741214057508, | |
"grad_norm": 0.06932081799385038, | |
"learning_rate": 9.810833007044247e-05, | |
"loss": 0.8662, | |
"step": 84 | |
}, | |
{ | |
"epoch": 0.549520766773163, | |
"grad_norm": 0.10358699944795004, | |
"learning_rate": 9.796103469111351e-05, | |
"loss": 0.8723, | |
"step": 86 | |
}, | |
{ | |
"epoch": 0.5623003194888179, | |
"grad_norm": 0.07169243369499742, | |
"learning_rate": 9.780833673696254e-05, | |
"loss": 0.8482, | |
"step": 88 | |
}, | |
{ | |
"epoch": 0.5750798722044729, | |
"grad_norm": 0.1050406308556227, | |
"learning_rate": 9.76502534086636e-05, | |
"loss": 0.8496, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.5878594249201278, | |
"grad_norm": 0.07201905690967678, | |
"learning_rate": 9.74868025135266e-05, | |
"loss": 0.8291, | |
"step": 92 | |
}, | |
{ | |
"epoch": 0.6006389776357828, | |
"grad_norm": 1.2625349021090781, | |
"learning_rate": 9.731800246349148e-05, | |
"loss": 0.8503, | |
"step": 94 | |
}, | |
{ | |
"epoch": 0.6134185303514377, | |
"grad_norm": 0.17981258022070712, | |
"learning_rate": 9.714387227305422e-05, | |
"loss": 0.8231, | |
"step": 96 | |
}, | |
{ | |
"epoch": 0.6261980830670927, | |
"grad_norm": 0.07561478832740967, | |
"learning_rate": 9.696443155712486e-05, | |
"loss": 0.8119, | |
"step": 98 | |
}, | |
{ | |
"epoch": 0.6389776357827476, | |
"grad_norm": 0.08195686915168865, | |
"learning_rate": 9.67797005288181e-05, | |
"loss": 0.7926, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.6517571884984026, | |
"grad_norm": 0.0890476280007116, | |
"learning_rate": 9.65896999971763e-05, | |
"loss": 0.8039, | |
"step": 102 | |
}, | |
{ | |
"epoch": 0.6645367412140575, | |
"grad_norm": 0.07738578891457887, | |
"learning_rate": 9.639445136482548e-05, | |
"loss": 0.7721, | |
"step": 104 | |
}, | |
{ | |
"epoch": 0.6773162939297125, | |
"grad_norm": 0.0743037172920425, | |
"learning_rate": 9.619397662556435e-05, | |
"loss": 0.794, | |
"step": 106 | |
}, | |
{ | |
"epoch": 0.6900958466453674, | |
"grad_norm": 0.08803835897602165, | |
"learning_rate": 9.598829836188694e-05, | |
"loss": 0.7721, | |
"step": 108 | |
}, | |
{ | |
"epoch": 0.7028753993610224, | |
"grad_norm": 0.07702819696223887, | |
"learning_rate": 9.577743974243874e-05, | |
"loss": 0.7765, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.7156549520766773, | |
"grad_norm": 0.07473535070111323, | |
"learning_rate": 9.55614245194068e-05, | |
"loss": 0.7598, | |
"step": 112 | |
}, | |
{ | |
"epoch": 0.7284345047923323, | |
"grad_norm": 0.08433756541496004, | |
"learning_rate": 9.534027702584425e-05, | |
"loss": 0.7727, | |
"step": 114 | |
}, | |
{ | |
"epoch": 0.7412140575079872, | |
"grad_norm": 0.07483257658817612, | |
"learning_rate": 9.511402217292926e-05, | |
"loss": 0.7465, | |
"step": 116 | |
}, | |
{ | |
"epoch": 0.7539936102236422, | |
"grad_norm": 0.0880318685591304, | |
"learning_rate": 9.488268544715896e-05, | |
"loss": 0.7321, | |
"step": 118 | |
}, | |
{ | |
"epoch": 0.7667731629392971, | |
"grad_norm": 0.07719604899450865, | |
"learning_rate": 9.464629290747842e-05, | |
"loss": 0.7624, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.7795527156549521, | |
"grad_norm": 0.0733176421376437, | |
"learning_rate": 9.440487118234535e-05, | |
"loss": 0.6975, | |
"step": 122 | |
}, | |
{ | |
"epoch": 0.792332268370607, | |
"grad_norm": 0.07051701385784455, | |
"learning_rate": 9.415844746673047e-05, | |
"loss": 0.7127, | |
"step": 124 | |
}, | |
{ | |
"epoch": 0.805111821086262, | |
"grad_norm": 0.0729787562181977, | |
"learning_rate": 9.390704951905411e-05, | |
"loss": 0.7503, | |
"step": 126 | |
}, | |
{ | |
"epoch": 0.8178913738019169, | |
"grad_norm": 0.07128874732953779, | |
"learning_rate": 9.365070565805941e-05, | |
"loss": 0.6941, | |
"step": 128 | |
}, | |
{ | |
"epoch": 0.8306709265175719, | |
"grad_norm": 0.07804844381711577, | |
"learning_rate": 9.338944475962237e-05, | |
"loss": 0.7197, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.8434504792332268, | |
"grad_norm": 0.08207580744924538, | |
"learning_rate": 9.312329625349902e-05, | |
"loss": 0.7134, | |
"step": 132 | |
}, | |
{ | |
"epoch": 0.8562300319488818, | |
"grad_norm": 0.10268159904999394, | |
"learning_rate": 9.285229012001047e-05, | |
"loss": 0.705, | |
"step": 134 | |
}, | |
{ | |
"epoch": 0.8690095846645367, | |
"grad_norm": 0.07097527094154266, | |
"learning_rate": 9.257645688666556e-05, | |
"loss": 0.7036, | |
"step": 136 | |
}, | |
{ | |
"epoch": 0.8817891373801917, | |
"grad_norm": 0.07284443178958877, | |
"learning_rate": 9.22958276247223e-05, | |
"loss": 0.7313, | |
"step": 138 | |
}, | |
{ | |
"epoch": 0.8945686900958466, | |
"grad_norm": 0.07294697279525543, | |
"learning_rate": 9.201043394568773e-05, | |
"loss": 0.6847, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.9073482428115016, | |
"grad_norm": 0.0725032039002937, | |
"learning_rate": 9.172030799775699e-05, | |
"loss": 0.6877, | |
"step": 142 | |
}, | |
{ | |
"epoch": 0.9201277955271565, | |
"grad_norm": 0.06708836437156662, | |
"learning_rate": 9.142548246219212e-05, | |
"loss": 0.6837, | |
"step": 144 | |
}, | |
{ | |
"epoch": 0.9329073482428115, | |
"grad_norm": 0.07361178534656698, | |
"learning_rate": 9.112599054964057e-05, | |
"loss": 0.6522, | |
"step": 146 | |
}, | |
{ | |
"epoch": 0.9456869009584664, | |
"grad_norm": 0.06961060997060975, | |
"learning_rate": 9.082186599639428e-05, | |
"loss": 0.6732, | |
"step": 148 | |
}, | |
{ | |
"epoch": 0.9584664536741214, | |
"grad_norm": 0.06369267112915664, | |
"learning_rate": 9.051314306058933e-05, | |
"loss": 0.6615, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.9712460063897763, | |
"grad_norm": 0.06667729772792583, | |
"learning_rate": 9.019985651834703e-05, | |
"loss": 0.6742, | |
"step": 152 | |
}, | |
{ | |
"epoch": 0.9840255591054313, | |
"grad_norm": 0.07052786453330319, | |
"learning_rate": 8.988204165985649e-05, | |
"loss": 0.6365, | |
"step": 154 | |
}, | |
{ | |
"epoch": 0.9968051118210862, | |
"grad_norm": 0.06352217971127558, | |
"learning_rate": 8.955973428539944e-05, | |
"loss": 0.6531, | |
"step": 156 | |
}, | |
{ | |
"epoch": 1.011182108626198, | |
"grad_norm": 0.0907023898699884, | |
"learning_rate": 8.923297070131737e-05, | |
"loss": 0.6986, | |
"step": 158 | |
}, | |
{ | |
"epoch": 1.023961661341853, | |
"grad_norm": 0.06588723514264389, | |
"learning_rate": 8.890178771592199e-05, | |
"loss": 0.4221, | |
"step": 160 | |
}, | |
{ | |
"epoch": 1.036741214057508, | |
"grad_norm": 0.07457104912562523, | |
"learning_rate": 8.856622263534875e-05, | |
"loss": 0.4375, | |
"step": 162 | |
}, | |
{ | |
"epoch": 1.049520766773163, | |
"grad_norm": 0.08716030746078077, | |
"learning_rate": 8.822631325935463e-05, | |
"loss": 0.4633, | |
"step": 164 | |
}, | |
{ | |
"epoch": 1.0623003194888179, | |
"grad_norm": 0.07564657660605784, | |
"learning_rate": 8.788209787706015e-05, | |
"loss": 0.4149, | |
"step": 166 | |
}, | |
{ | |
"epoch": 1.0750798722044728, | |
"grad_norm": 0.2601478494309565, | |
"learning_rate": 8.753361526263621e-05, | |
"loss": 0.4644, | |
"step": 168 | |
}, | |
{ | |
"epoch": 1.0878594249201279, | |
"grad_norm": 0.07236244361689621, | |
"learning_rate": 8.718090467093654e-05, | |
"loss": 0.445, | |
"step": 170 | |
}, | |
{ | |
"epoch": 1.1006389776357828, | |
"grad_norm": 0.07360308087849284, | |
"learning_rate": 8.682400583307562e-05, | |
"loss": 0.4189, | |
"step": 172 | |
}, | |
{ | |
"epoch": 1.1134185303514377, | |
"grad_norm": 0.06934965586236702, | |
"learning_rate": 8.646295895195333e-05, | |
"loss": 0.4168, | |
"step": 174 | |
}, | |
{ | |
"epoch": 1.1261980830670926, | |
"grad_norm": 0.06652725595095291, | |
"learning_rate": 8.609780469772623e-05, | |
"loss": 0.4332, | |
"step": 176 | |
}, | |
{ | |
"epoch": 1.1389776357827477, | |
"grad_norm": 0.06493423808775205, | |
"learning_rate": 8.572858420322627e-05, | |
"loss": 0.4126, | |
"step": 178 | |
}, | |
{ | |
"epoch": 1.1517571884984026, | |
"grad_norm": 0.07224306242862681, | |
"learning_rate": 8.535533905932738e-05, | |
"loss": 0.4639, | |
"step": 180 | |
}, | |
{ | |
"epoch": 1.1645367412140575, | |
"grad_norm": 0.06325420247080109, | |
"learning_rate": 8.497811131026046e-05, | |
"loss": 0.4097, | |
"step": 182 | |
}, | |
{ | |
"epoch": 1.1773162939297124, | |
"grad_norm": 0.05960690196531746, | |
"learning_rate": 8.459694344887732e-05, | |
"loss": 0.4258, | |
"step": 184 | |
}, | |
{ | |
"epoch": 1.1900958466453675, | |
"grad_norm": 0.06526403248406679, | |
"learning_rate": 8.421187841186402e-05, | |
"loss": 0.4453, | |
"step": 186 | |
}, | |
{ | |
"epoch": 1.2028753993610224, | |
"grad_norm": 0.06754636177295095, | |
"learning_rate": 8.382295957490436e-05, | |
"loss": 0.4277, | |
"step": 188 | |
}, | |
{ | |
"epoch": 1.2156549520766773, | |
"grad_norm": 0.11883404710840821, | |
"learning_rate": 8.343023074779368e-05, | |
"loss": 0.4386, | |
"step": 190 | |
}, | |
{ | |
"epoch": 1.2284345047923322, | |
"grad_norm": 0.07793571463351197, | |
"learning_rate": 8.303373616950408e-05, | |
"loss": 0.4072, | |
"step": 192 | |
}, | |
{ | |
"epoch": 1.2412140575079873, | |
"grad_norm": 0.06518657342856102, | |
"learning_rate": 8.263352050320094e-05, | |
"loss": 0.4396, | |
"step": 194 | |
}, | |
{ | |
"epoch": 1.2539936102236422, | |
"grad_norm": 0.05974282037032855, | |
"learning_rate": 8.222962883121196e-05, | |
"loss": 0.4016, | |
"step": 196 | |
}, | |
{ | |
"epoch": 1.266773162939297, | |
"grad_norm": 0.0693639502217822, | |
"learning_rate": 8.182210664994878e-05, | |
"loss": 0.3808, | |
"step": 198 | |
}, | |
{ | |
"epoch": 1.279552715654952, | |
"grad_norm": 0.06127831754623801, | |
"learning_rate": 8.141099986478212e-05, | |
"loss": 0.3961, | |
"step": 200 | |
}, | |
{ | |
"epoch": 1.292332268370607, | |
"grad_norm": 0.06755312065722066, | |
"learning_rate": 8.099635478487064e-05, | |
"loss": 0.3894, | |
"step": 202 | |
}, | |
{ | |
"epoch": 1.305111821086262, | |
"grad_norm": 0.0584212869146413, | |
"learning_rate": 8.057821811794458e-05, | |
"loss": 0.414, | |
"step": 204 | |
}, | |
{ | |
"epoch": 1.317891373801917, | |
"grad_norm": 0.05983512956529008, | |
"learning_rate": 8.015663696504422e-05, | |
"loss": 0.3634, | |
"step": 206 | |
}, | |
{ | |
"epoch": 1.330670926517572, | |
"grad_norm": 0.05778218969166584, | |
"learning_rate": 7.973165881521434e-05, | |
"loss": 0.4233, | |
"step": 208 | |
}, | |
{ | |
"epoch": 1.343450479233227, | |
"grad_norm": 0.058310021079803646, | |
"learning_rate": 7.930333154015466e-05, | |
"loss": 0.4061, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.3562300319488818, | |
"grad_norm": 0.0642143238679532, | |
"learning_rate": 7.88717033888274e-05, | |
"loss": 0.4083, | |
"step": 212 | |
}, | |
{ | |
"epoch": 1.3690095846645367, | |
"grad_norm": 0.05656381877721736, | |
"learning_rate": 7.843682298202235e-05, | |
"loss": 0.4033, | |
"step": 214 | |
}, | |
{ | |
"epoch": 1.3817891373801916, | |
"grad_norm": 0.05518190162844295, | |
"learning_rate": 7.799873930687978e-05, | |
"loss": 0.3953, | |
"step": 216 | |
}, | |
{ | |
"epoch": 1.3945686900958467, | |
"grad_norm": 0.05903661851778338, | |
"learning_rate": 7.755750171137246e-05, | |
"loss": 0.4096, | |
"step": 218 | |
}, | |
{ | |
"epoch": 1.4073482428115016, | |
"grad_norm": 0.05833074145436464, | |
"learning_rate": 7.711315989874677e-05, | |
"loss": 0.4151, | |
"step": 220 | |
}, | |
{ | |
"epoch": 1.4201277955271565, | |
"grad_norm": 0.05919878363690307, | |
"learning_rate": 7.666576392192389e-05, | |
"loss": 0.39, | |
"step": 222 | |
}, | |
{ | |
"epoch": 1.4329073482428116, | |
"grad_norm": 0.05913664327254173, | |
"learning_rate": 7.621536417786159e-05, | |
"loss": 0.4005, | |
"step": 224 | |
}, | |
{ | |
"epoch": 1.4456869009584665, | |
"grad_norm": 0.0640842931075253, | |
"learning_rate": 7.576201140187727e-05, | |
"loss": 0.4165, | |
"step": 226 | |
}, | |
{ | |
"epoch": 1.4584664536741214, | |
"grad_norm": 0.062131879810909965, | |
"learning_rate": 7.530575666193283e-05, | |
"loss": 0.3891, | |
"step": 228 | |
}, | |
{ | |
"epoch": 1.4712460063897763, | |
"grad_norm": 0.06992276137309804, | |
"learning_rate": 7.484665135288213e-05, | |
"loss": 0.3971, | |
"step": 230 | |
}, | |
{ | |
"epoch": 1.4840255591054312, | |
"grad_norm": 0.06078790664861669, | |
"learning_rate": 7.438474719068173e-05, | |
"loss": 0.3961, | |
"step": 232 | |
}, | |
{ | |
"epoch": 1.4968051118210863, | |
"grad_norm": 0.06922648734675908, | |
"learning_rate": 7.392009620656513e-05, | |
"loss": 0.4331, | |
"step": 234 | |
}, | |
{ | |
"epoch": 1.5095846645367412, | |
"grad_norm": 0.05766139832102871, | |
"learning_rate": 7.345275074118185e-05, | |
"loss": 0.4182, | |
"step": 236 | |
}, | |
{ | |
"epoch": 1.5223642172523961, | |
"grad_norm": 0.06292873231888371, | |
"learning_rate": 7.298276343870151e-05, | |
"loss": 0.4061, | |
"step": 238 | |
}, | |
{ | |
"epoch": 1.5351437699680512, | |
"grad_norm": 0.06000860844713537, | |
"learning_rate": 7.251018724088367e-05, | |
"loss": 0.4023, | |
"step": 240 | |
}, | |
{ | |
"epoch": 1.547923322683706, | |
"grad_norm": 0.0585777107714916, | |
"learning_rate": 7.203507538111423e-05, | |
"loss": 0.3855, | |
"step": 242 | |
}, | |
{ | |
"epoch": 1.560702875399361, | |
"grad_norm": 0.0571671995255021, | |
"learning_rate": 7.155748137840892e-05, | |
"loss": 0.3951, | |
"step": 244 | |
}, | |
{ | |
"epoch": 1.573482428115016, | |
"grad_norm": 0.053447175708899994, | |
"learning_rate": 7.107745903138472e-05, | |
"loss": 0.3745, | |
"step": 246 | |
}, | |
{ | |
"epoch": 1.5862619808306708, | |
"grad_norm": 0.055736902711725635, | |
"learning_rate": 7.059506241219965e-05, | |
"loss": 0.3911, | |
"step": 248 | |
}, | |
{ | |
"epoch": 1.599041533546326, | |
"grad_norm": 0.05715355824554817, | |
"learning_rate": 7.011034586046176e-05, | |
"loss": 0.4043, | |
"step": 250 | |
}, | |
{ | |
"epoch": 1.6118210862619808, | |
"grad_norm": 0.06030447320081754, | |
"learning_rate": 6.962336397710819e-05, | |
"loss": 0.3899, | |
"step": 252 | |
}, | |
{ | |
"epoch": 1.6246006389776357, | |
"grad_norm": 0.061239135474291606, | |
"learning_rate": 6.91341716182545e-05, | |
"loss": 0.4246, | |
"step": 254 | |
}, | |
{ | |
"epoch": 1.6373801916932909, | |
"grad_norm": 0.05695235071864785, | |
"learning_rate": 6.864282388901544e-05, | |
"loss": 0.3953, | |
"step": 256 | |
}, | |
{ | |
"epoch": 1.6501597444089455, | |
"grad_norm": 0.05308868251491366, | |
"learning_rate": 6.814937613729766e-05, | |
"loss": 0.4103, | |
"step": 258 | |
}, | |
{ | |
"epoch": 1.6629392971246006, | |
"grad_norm": 0.054046791633493914, | |
"learning_rate": 6.765388394756504e-05, | |
"loss": 0.4059, | |
"step": 260 | |
}, | |
{ | |
"epoch": 1.6757188498402555, | |
"grad_norm": 0.05148697040730548, | |
"learning_rate": 6.715640313457733e-05, | |
"loss": 0.3767, | |
"step": 262 | |
}, | |
{ | |
"epoch": 1.6884984025559104, | |
"grad_norm": 0.05318569591896447, | |
"learning_rate": 6.665698973710288e-05, | |
"loss": 0.3708, | |
"step": 264 | |
}, | |
{ | |
"epoch": 1.7012779552715656, | |
"grad_norm": 0.05196719070381999, | |
"learning_rate": 6.615570001160626e-05, | |
"loss": 0.4042, | |
"step": 266 | |
}, | |
{ | |
"epoch": 1.7140575079872205, | |
"grad_norm": 0.05632881769869459, | |
"learning_rate": 6.565259042591113e-05, | |
"loss": 0.3987, | |
"step": 268 | |
}, | |
{ | |
"epoch": 1.7268370607028753, | |
"grad_norm": 0.05470059818193366, | |
"learning_rate": 6.514771765283942e-05, | |
"loss": 0.3973, | |
"step": 270 | |
}, | |
{ | |
"epoch": 1.7396166134185305, | |
"grad_norm": 0.056351811449582394, | |
"learning_rate": 6.464113856382752e-05, | |
"loss": 0.3864, | |
"step": 272 | |
}, | |
{ | |
"epoch": 1.7523961661341851, | |
"grad_norm": 0.05831258279981057, | |
"learning_rate": 6.413291022251989e-05, | |
"loss": 0.4041, | |
"step": 274 | |
}, | |
{ | |
"epoch": 1.7651757188498403, | |
"grad_norm": 0.053467450310740065, | |
"learning_rate": 6.362308987834115e-05, | |
"loss": 0.3814, | |
"step": 276 | |
}, | |
{ | |
"epoch": 1.7779552715654952, | |
"grad_norm": 0.051287152623381335, | |
"learning_rate": 6.311173496004723e-05, | |
"loss": 0.395, | |
"step": 278 | |
}, | |
{ | |
"epoch": 1.79073482428115, | |
"grad_norm": 0.05429714498773308, | |
"learning_rate": 6.259890306925627e-05, | |
"loss": 0.3821, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.8035143769968052, | |
"grad_norm": 0.057523653580626326, | |
"learning_rate": 6.208465197396013e-05, | |
"loss": 0.3984, | |
"step": 282 | |
}, | |
{ | |
"epoch": 1.81629392971246, | |
"grad_norm": 0.05724842136937287, | |
"learning_rate": 6.156903960201709e-05, | |
"loss": 0.4181, | |
"step": 284 | |
}, | |
{ | |
"epoch": 1.829073482428115, | |
"grad_norm": 0.052227309043480996, | |
"learning_rate": 6.105212403462651e-05, | |
"loss": 0.4049, | |
"step": 286 | |
}, | |
{ | |
"epoch": 1.84185303514377, | |
"grad_norm": 0.04967908325326877, | |
"learning_rate": 6.0533963499786314e-05, | |
"loss": 0.4117, | |
"step": 288 | |
}, | |
{ | |
"epoch": 1.854632587859425, | |
"grad_norm": 0.05539898234566285, | |
"learning_rate": 6.001461636573397e-05, | |
"loss": 0.4006, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.8674121405750799, | |
"grad_norm": 0.05795414669880149, | |
"learning_rate": 5.949414113437142e-05, | |
"loss": 0.386, | |
"step": 292 | |
}, | |
{ | |
"epoch": 1.880191693290735, | |
"grad_norm": 0.050446841270231885, | |
"learning_rate": 5.897259643467527e-05, | |
"loss": 0.3842, | |
"step": 294 | |
}, | |
{ | |
"epoch": 1.8929712460063897, | |
"grad_norm": 0.052453051506198604, | |
"learning_rate": 5.8450041016092464e-05, | |
"loss": 0.3525, | |
"step": 296 | |
}, | |
{ | |
"epoch": 1.9057507987220448, | |
"grad_norm": 0.052803823491155276, | |
"learning_rate": 5.792653374192245e-05, | |
"loss": 0.3963, | |
"step": 298 | |
}, | |
{ | |
"epoch": 1.9185303514376997, | |
"grad_norm": 0.05180901601155745, | |
"learning_rate": 5.7402133582686576e-05, | |
"loss": 0.3798, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.9313099041533546, | |
"grad_norm": 0.05166645429890597, | |
"learning_rate": 5.6876899609485256e-05, | |
"loss": 0.3838, | |
"step": 302 | |
}, | |
{ | |
"epoch": 1.9440894568690097, | |
"grad_norm": 0.05306354741968808, | |
"learning_rate": 5.6350890987343944e-05, | |
"loss": 0.4165, | |
"step": 304 | |
}, | |
{ | |
"epoch": 1.9568690095846646, | |
"grad_norm": 0.0860975722690725, | |
"learning_rate": 5.582416696854853e-05, | |
"loss": 0.3737, | |
"step": 306 | |
}, | |
{ | |
"epoch": 1.9696485623003195, | |
"grad_norm": 0.05323286133666828, | |
"learning_rate": 5.5296786885970805e-05, | |
"loss": 0.3889, | |
"step": 308 | |
}, | |
{ | |
"epoch": 1.9824281150159746, | |
"grad_norm": 0.05299665331057226, | |
"learning_rate": 5.476881014638491e-05, | |
"loss": 0.3896, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.9952076677316293, | |
"grad_norm": 0.05157945275339266, | |
"learning_rate": 5.4240296223775465e-05, | |
"loss": 0.3637, | |
"step": 312 | |
} | |
], | |
"logging_steps": 2, | |
"max_steps": 624, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 4, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": false | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 1.2651584501604942e+19, | |
"train_batch_size": 2, | |
"trial_name": null, | |
"trial_params": null | |
} | |