|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 104972, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004763174941889266, |
|
"grad_norm": 23.845611572265625, |
|
"learning_rate": 4.976184125290554e-05, |
|
"loss": 1.6947, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.009526349883778531, |
|
"grad_norm": 11.257529258728027, |
|
"learning_rate": 4.952368250581108e-05, |
|
"loss": 1.2773, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.014289524825667798, |
|
"grad_norm": 19.1054630279541, |
|
"learning_rate": 4.9285523758716614e-05, |
|
"loss": 1.1464, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.019052699767557062, |
|
"grad_norm": 8.174609184265137, |
|
"learning_rate": 4.904736501162215e-05, |
|
"loss": 1.0952, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02381587470944633, |
|
"grad_norm": 22.265609741210938, |
|
"learning_rate": 4.880920626452769e-05, |
|
"loss": 1.0419, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.028579049651335595, |
|
"grad_norm": 3.621635913848877, |
|
"learning_rate": 4.857104751743322e-05, |
|
"loss": 0.9673, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03334222459322486, |
|
"grad_norm": 9.05103874206543, |
|
"learning_rate": 4.8332888770338755e-05, |
|
"loss": 0.9227, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.038105399535114125, |
|
"grad_norm": 6.0216474533081055, |
|
"learning_rate": 4.809473002324429e-05, |
|
"loss": 0.917, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.042868574477003395, |
|
"grad_norm": 12.011100769042969, |
|
"learning_rate": 4.785657127614983e-05, |
|
"loss": 0.8644, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04763174941889266, |
|
"grad_norm": 10.062934875488281, |
|
"learning_rate": 4.761841252905537e-05, |
|
"loss": 0.8573, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.05239492436078192, |
|
"grad_norm": 4.252622127532959, |
|
"learning_rate": 4.7380253781960904e-05, |
|
"loss": 0.8198, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05715809930267119, |
|
"grad_norm": 11.673833847045898, |
|
"learning_rate": 4.714209503486644e-05, |
|
"loss": 0.7742, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.061921274244560454, |
|
"grad_norm": 8.625577926635742, |
|
"learning_rate": 4.690393628777198e-05, |
|
"loss": 0.7513, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.06668444918644972, |
|
"grad_norm": 11.9224853515625, |
|
"learning_rate": 4.666577754067752e-05, |
|
"loss": 0.7376, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.07144762412833898, |
|
"grad_norm": 3.3534774780273438, |
|
"learning_rate": 4.642761879358305e-05, |
|
"loss": 0.733, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.07621079907022825, |
|
"grad_norm": 6.773929595947266, |
|
"learning_rate": 4.618946004648859e-05, |
|
"loss": 0.6743, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.08097397401211752, |
|
"grad_norm": 12.177743911743164, |
|
"learning_rate": 4.595130129939413e-05, |
|
"loss": 0.6837, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.08573714895400679, |
|
"grad_norm": 10.937847137451172, |
|
"learning_rate": 4.5713142552299664e-05, |
|
"loss": 0.6745, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.09050032389589605, |
|
"grad_norm": 9.08661937713623, |
|
"learning_rate": 4.54749838052052e-05, |
|
"loss": 0.6366, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.09526349883778532, |
|
"grad_norm": 14.092183113098145, |
|
"learning_rate": 4.523682505811074e-05, |
|
"loss": 0.6218, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.10002667377967459, |
|
"grad_norm": 9.430176734924316, |
|
"learning_rate": 4.4998666311016275e-05, |
|
"loss": 0.6286, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.10478984872156384, |
|
"grad_norm": 6.812849998474121, |
|
"learning_rate": 4.476050756392181e-05, |
|
"loss": 0.6121, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.10955302366345311, |
|
"grad_norm": 6.120726108551025, |
|
"learning_rate": 4.452234881682735e-05, |
|
"loss": 0.581, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.11431619860534238, |
|
"grad_norm": 13.171046257019043, |
|
"learning_rate": 4.4284190069732887e-05, |
|
"loss": 0.5797, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.11907937354723164, |
|
"grad_norm": 9.89405632019043, |
|
"learning_rate": 4.404603132263842e-05, |
|
"loss": 0.5461, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.12384254848912091, |
|
"grad_norm": 6.620090484619141, |
|
"learning_rate": 4.3807872575543954e-05, |
|
"loss": 0.5583, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.12860572343101018, |
|
"grad_norm": 17.443872451782227, |
|
"learning_rate": 4.356971382844949e-05, |
|
"loss": 0.562, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.13336889837289945, |
|
"grad_norm": 4.9741997718811035, |
|
"learning_rate": 4.333155508135503e-05, |
|
"loss": 0.5448, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.13813207331478872, |
|
"grad_norm": 13.39028549194336, |
|
"learning_rate": 4.3093396334260565e-05, |
|
"loss": 0.5166, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.14289524825667796, |
|
"grad_norm": 7.8550333976745605, |
|
"learning_rate": 4.28552375871661e-05, |
|
"loss": 0.5071, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.14765842319856723, |
|
"grad_norm": 13.109572410583496, |
|
"learning_rate": 4.261707884007164e-05, |
|
"loss": 0.489, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.1524215981404565, |
|
"grad_norm": 8.581804275512695, |
|
"learning_rate": 4.237892009297718e-05, |
|
"loss": 0.4992, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.15718477308234577, |
|
"grad_norm": 0.8759533166885376, |
|
"learning_rate": 4.2140761345882714e-05, |
|
"loss": 0.5041, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.16194794802423504, |
|
"grad_norm": 4.165910243988037, |
|
"learning_rate": 4.190260259878825e-05, |
|
"loss": 0.4923, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.1667111229661243, |
|
"grad_norm": 4.400519847869873, |
|
"learning_rate": 4.166444385169379e-05, |
|
"loss": 0.4895, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.17147429790801358, |
|
"grad_norm": 3.9758849143981934, |
|
"learning_rate": 4.142628510459932e-05, |
|
"loss": 0.4845, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.17623747284990282, |
|
"grad_norm": 11.77698802947998, |
|
"learning_rate": 4.1188126357504856e-05, |
|
"loss": 0.4523, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.1810006477917921, |
|
"grad_norm": 8.056093215942383, |
|
"learning_rate": 4.094996761041039e-05, |
|
"loss": 0.4815, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.18576382273368136, |
|
"grad_norm": 3.4172158241271973, |
|
"learning_rate": 4.071180886331593e-05, |
|
"loss": 0.478, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.19052699767557063, |
|
"grad_norm": 4.890318393707275, |
|
"learning_rate": 4.0473650116221474e-05, |
|
"loss": 0.4492, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.1952901726174599, |
|
"grad_norm": 9.595198631286621, |
|
"learning_rate": 4.023549136912701e-05, |
|
"loss": 0.4507, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.20005334755934917, |
|
"grad_norm": 8.482044219970703, |
|
"learning_rate": 3.999733262203255e-05, |
|
"loss": 0.4436, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.2048165225012384, |
|
"grad_norm": 5.411190509796143, |
|
"learning_rate": 3.9759173874938085e-05, |
|
"loss": 0.4406, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.20957969744312768, |
|
"grad_norm": 13.22948932647705, |
|
"learning_rate": 3.952101512784362e-05, |
|
"loss": 0.4222, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.21434287238501695, |
|
"grad_norm": 11.69010066986084, |
|
"learning_rate": 3.928285638074915e-05, |
|
"loss": 0.4119, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.21910604732690622, |
|
"grad_norm": 11.298418998718262, |
|
"learning_rate": 3.904469763365469e-05, |
|
"loss": 0.4058, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.2238692222687955, |
|
"grad_norm": 8.810882568359375, |
|
"learning_rate": 3.880653888656023e-05, |
|
"loss": 0.4201, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.22863239721068476, |
|
"grad_norm": 15.669658660888672, |
|
"learning_rate": 3.8568380139465764e-05, |
|
"loss": 0.4098, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.23339557215257403, |
|
"grad_norm": 3.749591112136841, |
|
"learning_rate": 3.83302213923713e-05, |
|
"loss": 0.3972, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.23815874709446327, |
|
"grad_norm": 8.824658393859863, |
|
"learning_rate": 3.809206264527684e-05, |
|
"loss": 0.3722, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.24292192203635254, |
|
"grad_norm": 13.041702270507812, |
|
"learning_rate": 3.7853903898182375e-05, |
|
"loss": 0.4254, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.24768509697824181, |
|
"grad_norm": 15.908291816711426, |
|
"learning_rate": 3.761574515108791e-05, |
|
"loss": 0.3736, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.2524482719201311, |
|
"grad_norm": 10.208578109741211, |
|
"learning_rate": 3.737758640399345e-05, |
|
"loss": 0.412, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.25721144686202035, |
|
"grad_norm": 5.080774784088135, |
|
"learning_rate": 3.713942765689899e-05, |
|
"loss": 0.3799, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.2619746218039096, |
|
"grad_norm": 8.108014106750488, |
|
"learning_rate": 3.6901268909804524e-05, |
|
"loss": 0.3557, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.2667377967457989, |
|
"grad_norm": 13.029576301574707, |
|
"learning_rate": 3.6663110162710054e-05, |
|
"loss": 0.3785, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.27150097168768816, |
|
"grad_norm": 9.865415573120117, |
|
"learning_rate": 3.642495141561559e-05, |
|
"loss": 0.4013, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.27626414662957743, |
|
"grad_norm": 4.568774700164795, |
|
"learning_rate": 3.618679266852113e-05, |
|
"loss": 0.3663, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.2810273215714667, |
|
"grad_norm": 3.7032947540283203, |
|
"learning_rate": 3.5948633921426665e-05, |
|
"loss": 0.3728, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.2857904965133559, |
|
"grad_norm": 5.560993194580078, |
|
"learning_rate": 3.57104751743322e-05, |
|
"loss": 0.364, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.2905536714552452, |
|
"grad_norm": 12.633008003234863, |
|
"learning_rate": 3.547231642723774e-05, |
|
"loss": 0.3738, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.29531684639713446, |
|
"grad_norm": 16.694622039794922, |
|
"learning_rate": 3.523415768014328e-05, |
|
"loss": 0.363, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.30008002133902373, |
|
"grad_norm": 6.716195583343506, |
|
"learning_rate": 3.4995998933048814e-05, |
|
"loss": 0.3547, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.304843196280913, |
|
"grad_norm": 5.371184349060059, |
|
"learning_rate": 3.475784018595435e-05, |
|
"loss": 0.3392, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.30960637122280227, |
|
"grad_norm": 15.001012802124023, |
|
"learning_rate": 3.451968143885989e-05, |
|
"loss": 0.3419, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.31436954616469154, |
|
"grad_norm": 14.817428588867188, |
|
"learning_rate": 3.4281522691765425e-05, |
|
"loss": 0.3454, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.3191327211065808, |
|
"grad_norm": 8.50633716583252, |
|
"learning_rate": 3.404336394467096e-05, |
|
"loss": 0.3322, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.3238958960484701, |
|
"grad_norm": 10.963394165039062, |
|
"learning_rate": 3.38052051975765e-05, |
|
"loss": 0.3507, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.32865907099035935, |
|
"grad_norm": 6.286830902099609, |
|
"learning_rate": 3.356704645048204e-05, |
|
"loss": 0.3662, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.3334222459322486, |
|
"grad_norm": 7.931427955627441, |
|
"learning_rate": 3.3328887703387574e-05, |
|
"loss": 0.3223, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.3381854208741379, |
|
"grad_norm": 0.015142062678933144, |
|
"learning_rate": 3.309072895629311e-05, |
|
"loss": 0.3492, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.34294859581602716, |
|
"grad_norm": 6.33823823928833, |
|
"learning_rate": 3.285257020919865e-05, |
|
"loss": 0.3443, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.34771177075791637, |
|
"grad_norm": 14.21090030670166, |
|
"learning_rate": 3.2614411462104185e-05, |
|
"loss": 0.3208, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.35247494569980564, |
|
"grad_norm": 6.757291793823242, |
|
"learning_rate": 3.237625271500972e-05, |
|
"loss": 0.3283, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.3572381206416949, |
|
"grad_norm": 25.391630172729492, |
|
"learning_rate": 3.213809396791525e-05, |
|
"loss": 0.3207, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.3620012955835842, |
|
"grad_norm": 4.002015113830566, |
|
"learning_rate": 3.189993522082079e-05, |
|
"loss": 0.3001, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.36676447052547345, |
|
"grad_norm": 8.16422176361084, |
|
"learning_rate": 3.166177647372633e-05, |
|
"loss": 0.2973, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.3715276454673627, |
|
"grad_norm": 4.861073017120361, |
|
"learning_rate": 3.1423617726631864e-05, |
|
"loss": 0.3104, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.376290820409252, |
|
"grad_norm": 9.573932647705078, |
|
"learning_rate": 3.11854589795374e-05, |
|
"loss": 0.2984, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.38105399535114126, |
|
"grad_norm": 10.641077041625977, |
|
"learning_rate": 3.094730023244294e-05, |
|
"loss": 0.314, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.38581717029303053, |
|
"grad_norm": 19.601299285888672, |
|
"learning_rate": 3.0709141485348475e-05, |
|
"loss": 0.292, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.3905803452349198, |
|
"grad_norm": 1.557869553565979, |
|
"learning_rate": 3.0470982738254012e-05, |
|
"loss": 0.3244, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.39534352017680907, |
|
"grad_norm": 9.880559921264648, |
|
"learning_rate": 3.0232823991159546e-05, |
|
"loss": 0.3053, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.40010669511869834, |
|
"grad_norm": 16.77007293701172, |
|
"learning_rate": 2.9994665244065083e-05, |
|
"loss": 0.2935, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.4048698700605876, |
|
"grad_norm": 15.44863510131836, |
|
"learning_rate": 2.975650649697062e-05, |
|
"loss": 0.2885, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.4096330450024768, |
|
"grad_norm": 2.127357006072998, |
|
"learning_rate": 2.9518347749876158e-05, |
|
"loss": 0.2896, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.4143962199443661, |
|
"grad_norm": 4.906562328338623, |
|
"learning_rate": 2.9280189002781695e-05, |
|
"loss": 0.2961, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.41915939488625537, |
|
"grad_norm": 5.996885299682617, |
|
"learning_rate": 2.9042030255687232e-05, |
|
"loss": 0.283, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.42392256982814464, |
|
"grad_norm": 4.349148750305176, |
|
"learning_rate": 2.8803871508592766e-05, |
|
"loss": 0.285, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.4286857447700339, |
|
"grad_norm": 11.325324058532715, |
|
"learning_rate": 2.8565712761498303e-05, |
|
"loss": 0.2991, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.4334489197119232, |
|
"grad_norm": 29.92687225341797, |
|
"learning_rate": 2.832755401440384e-05, |
|
"loss": 0.2758, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.43821209465381245, |
|
"grad_norm": 6.005437850952148, |
|
"learning_rate": 2.8089395267309377e-05, |
|
"loss": 0.285, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.4429752695957017, |
|
"grad_norm": 7.799108505249023, |
|
"learning_rate": 2.7851236520214917e-05, |
|
"loss": 0.2851, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.447738444537591, |
|
"grad_norm": 7.533286094665527, |
|
"learning_rate": 2.7613077773120455e-05, |
|
"loss": 0.2713, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.45250161947948025, |
|
"grad_norm": 16.269254684448242, |
|
"learning_rate": 2.737491902602599e-05, |
|
"loss": 0.2954, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.4572647944213695, |
|
"grad_norm": 12.095314025878906, |
|
"learning_rate": 2.713676027893153e-05, |
|
"loss": 0.2733, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.4620279693632588, |
|
"grad_norm": 9.375202178955078, |
|
"learning_rate": 2.6898601531837066e-05, |
|
"loss": 0.266, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.46679114430514806, |
|
"grad_norm": 18.472373962402344, |
|
"learning_rate": 2.66604427847426e-05, |
|
"loss": 0.2616, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.4715543192470373, |
|
"grad_norm": 10.298583984375, |
|
"learning_rate": 2.6422284037648137e-05, |
|
"loss": 0.273, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.47631749418892655, |
|
"grad_norm": 5.778076648712158, |
|
"learning_rate": 2.6184125290553674e-05, |
|
"loss": 0.2754, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.4810806691308158, |
|
"grad_norm": 3.6435136795043945, |
|
"learning_rate": 2.594596654345921e-05, |
|
"loss": 0.2664, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.4858438440727051, |
|
"grad_norm": 21.803163528442383, |
|
"learning_rate": 2.5707807796364748e-05, |
|
"loss": 0.2621, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.49060701901459436, |
|
"grad_norm": 11.081718444824219, |
|
"learning_rate": 2.5469649049270282e-05, |
|
"loss": 0.2806, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.49537019395648363, |
|
"grad_norm": 3.4980528354644775, |
|
"learning_rate": 2.523149030217582e-05, |
|
"loss": 0.278, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.5001333688983729, |
|
"grad_norm": 7.148010730743408, |
|
"learning_rate": 2.4993331555081356e-05, |
|
"loss": 0.2592, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.5048965438402622, |
|
"grad_norm": 3.838651657104492, |
|
"learning_rate": 2.4755172807986893e-05, |
|
"loss": 0.2636, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.5096597187821514, |
|
"grad_norm": 12.54211711883545, |
|
"learning_rate": 2.451701406089243e-05, |
|
"loss": 0.2483, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.5144228937240407, |
|
"grad_norm": 17.774137496948242, |
|
"learning_rate": 2.4278855313797964e-05, |
|
"loss": 0.2442, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.51918606866593, |
|
"grad_norm": 10.924212455749512, |
|
"learning_rate": 2.40406965667035e-05, |
|
"loss": 0.2367, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.5239492436078192, |
|
"grad_norm": 0.054358381778001785, |
|
"learning_rate": 2.380253781960904e-05, |
|
"loss": 0.2239, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.5287124185497085, |
|
"grad_norm": 1.8560116291046143, |
|
"learning_rate": 2.3564379072514575e-05, |
|
"loss": 0.2621, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.5334755934915978, |
|
"grad_norm": 0.23067010939121246, |
|
"learning_rate": 2.3326220325420116e-05, |
|
"loss": 0.245, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.5382387684334871, |
|
"grad_norm": 9.185586929321289, |
|
"learning_rate": 2.308806157832565e-05, |
|
"loss": 0.2578, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.5430019433753763, |
|
"grad_norm": 5.946952819824219, |
|
"learning_rate": 2.2849902831231187e-05, |
|
"loss": 0.2053, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.5477651183172656, |
|
"grad_norm": 21.640161514282227, |
|
"learning_rate": 2.2611744084136724e-05, |
|
"loss": 0.2419, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.5525282932591549, |
|
"grad_norm": 4.966391563415527, |
|
"learning_rate": 2.237358533704226e-05, |
|
"loss": 0.2472, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.5572914682010441, |
|
"grad_norm": 6.430874347686768, |
|
"learning_rate": 2.2135426589947798e-05, |
|
"loss": 0.2176, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.5620546431429334, |
|
"grad_norm": 1.1396760940551758, |
|
"learning_rate": 2.1897267842853332e-05, |
|
"loss": 0.2297, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.5668178180848226, |
|
"grad_norm": 0.023226283490657806, |
|
"learning_rate": 2.165910909575887e-05, |
|
"loss": 0.2286, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.5715809930267118, |
|
"grad_norm": 7.5434770584106445, |
|
"learning_rate": 2.1420950348664406e-05, |
|
"loss": 0.2453, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.5763441679686011, |
|
"grad_norm": 20.513235092163086, |
|
"learning_rate": 2.1182791601569943e-05, |
|
"loss": 0.2237, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.5811073429104904, |
|
"grad_norm": 0.2174549102783203, |
|
"learning_rate": 2.094463285447548e-05, |
|
"loss": 0.2206, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.5858705178523796, |
|
"grad_norm": 5.196407794952393, |
|
"learning_rate": 2.0706474107381018e-05, |
|
"loss": 0.2249, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.5906336927942689, |
|
"grad_norm": 6.904590129852295, |
|
"learning_rate": 2.046831536028655e-05, |
|
"loss": 0.2158, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.5953968677361582, |
|
"grad_norm": 4.865005970001221, |
|
"learning_rate": 2.0230156613192092e-05, |
|
"loss": 0.2369, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.6001600426780475, |
|
"grad_norm": 8.591822624206543, |
|
"learning_rate": 1.999199786609763e-05, |
|
"loss": 0.2186, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.6049232176199367, |
|
"grad_norm": 11.895712852478027, |
|
"learning_rate": 1.9753839119003166e-05, |
|
"loss": 0.2175, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.609686392561826, |
|
"grad_norm": 29.44073486328125, |
|
"learning_rate": 1.95156803719087e-05, |
|
"loss": 0.239, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.6144495675037153, |
|
"grad_norm": 0.14603924751281738, |
|
"learning_rate": 1.9277521624814237e-05, |
|
"loss": 0.2342, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.6192127424456045, |
|
"grad_norm": 8.721657752990723, |
|
"learning_rate": 1.9039362877719774e-05, |
|
"loss": 0.2072, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.6239759173874938, |
|
"grad_norm": 7.2579755783081055, |
|
"learning_rate": 1.880120413062531e-05, |
|
"loss": 0.2176, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.6287390923293831, |
|
"grad_norm": 10.267131805419922, |
|
"learning_rate": 1.8563045383530848e-05, |
|
"loss": 0.2118, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.6335022672712723, |
|
"grad_norm": 12.264985084533691, |
|
"learning_rate": 1.8324886636436382e-05, |
|
"loss": 0.2215, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.6382654422131616, |
|
"grad_norm": 15.631693840026855, |
|
"learning_rate": 1.808672788934192e-05, |
|
"loss": 0.2213, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.6430286171550509, |
|
"grad_norm": 9.132320404052734, |
|
"learning_rate": 1.7848569142247456e-05, |
|
"loss": 0.2007, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.6477917920969402, |
|
"grad_norm": 2.836796283721924, |
|
"learning_rate": 1.7610410395152993e-05, |
|
"loss": 0.2197, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.6525549670388294, |
|
"grad_norm": 1.20395028591156, |
|
"learning_rate": 1.737225164805853e-05, |
|
"loss": 0.2092, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.6573181419807187, |
|
"grad_norm": 4.42557954788208, |
|
"learning_rate": 1.7134092900964068e-05, |
|
"loss": 0.2228, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.662081316922608, |
|
"grad_norm": 7.136639595031738, |
|
"learning_rate": 1.6895934153869605e-05, |
|
"loss": 0.2159, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.6668444918644972, |
|
"grad_norm": 12.695110321044922, |
|
"learning_rate": 1.6657775406775142e-05, |
|
"loss": 0.1984, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.6716076668063865, |
|
"grad_norm": 11.841693878173828, |
|
"learning_rate": 1.641961665968068e-05, |
|
"loss": 0.1913, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.6763708417482758, |
|
"grad_norm": 0.7839029431343079, |
|
"learning_rate": 1.6181457912586216e-05, |
|
"loss": 0.1911, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.681134016690165, |
|
"grad_norm": 6.188957691192627, |
|
"learning_rate": 1.594329916549175e-05, |
|
"loss": 0.2164, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.6858971916320543, |
|
"grad_norm": 11.414396286010742, |
|
"learning_rate": 1.5705140418397287e-05, |
|
"loss": 0.2026, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.6906603665739435, |
|
"grad_norm": 21.34324836730957, |
|
"learning_rate": 1.5466981671302824e-05, |
|
"loss": 0.1877, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.6954235415158327, |
|
"grad_norm": 10.534087181091309, |
|
"learning_rate": 1.5228822924208361e-05, |
|
"loss": 0.1889, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.700186716457722, |
|
"grad_norm": 0.4472333788871765, |
|
"learning_rate": 1.4990664177113897e-05, |
|
"loss": 0.1902, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.7049498913996113, |
|
"grad_norm": 27.742639541625977, |
|
"learning_rate": 1.4752505430019434e-05, |
|
"loss": 0.1929, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.7097130663415006, |
|
"grad_norm": 12.095074653625488, |
|
"learning_rate": 1.451434668292497e-05, |
|
"loss": 0.1754, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.7144762412833898, |
|
"grad_norm": 10.153932571411133, |
|
"learning_rate": 1.4276187935830506e-05, |
|
"loss": 0.1866, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.7192394162252791, |
|
"grad_norm": 9.065908432006836, |
|
"learning_rate": 1.4038029188736043e-05, |
|
"loss": 0.2004, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.7240025911671684, |
|
"grad_norm": 1.7107542753219604, |
|
"learning_rate": 1.3799870441641582e-05, |
|
"loss": 0.175, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.7287657661090576, |
|
"grad_norm": 1.4907644987106323, |
|
"learning_rate": 1.356171169454712e-05, |
|
"loss": 0.1865, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.7335289410509469, |
|
"grad_norm": 0.11667291074991226, |
|
"learning_rate": 1.3323552947452655e-05, |
|
"loss": 0.1694, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.7382921159928362, |
|
"grad_norm": 29.062976837158203, |
|
"learning_rate": 1.3085394200358192e-05, |
|
"loss": 0.1821, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.7430552909347254, |
|
"grad_norm": 7.0561113357543945, |
|
"learning_rate": 1.2847235453263729e-05, |
|
"loss": 0.1692, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.7478184658766147, |
|
"grad_norm": 7.916496753692627, |
|
"learning_rate": 1.2609076706169264e-05, |
|
"loss": 0.1711, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.752581640818504, |
|
"grad_norm": 0.009064608253538609, |
|
"learning_rate": 1.2370917959074802e-05, |
|
"loss": 0.1795, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.7573448157603933, |
|
"grad_norm": 16.94624900817871, |
|
"learning_rate": 1.2132759211980339e-05, |
|
"loss": 0.1732, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.7621079907022825, |
|
"grad_norm": 24.09770965576172, |
|
"learning_rate": 1.1894600464885874e-05, |
|
"loss": 0.1599, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.7668711656441718, |
|
"grad_norm": 17.340219497680664, |
|
"learning_rate": 1.1656441717791411e-05, |
|
"loss": 0.1702, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.7716343405860611, |
|
"grad_norm": 0.015018216334283352, |
|
"learning_rate": 1.1418282970696948e-05, |
|
"loss": 0.1679, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.7763975155279503, |
|
"grad_norm": 16.005643844604492, |
|
"learning_rate": 1.1180124223602485e-05, |
|
"loss": 0.1832, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.7811606904698396, |
|
"grad_norm": 0.0013979446375742555, |
|
"learning_rate": 1.0941965476508023e-05, |
|
"loss": 0.1505, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.7859238654117289, |
|
"grad_norm": 1.049574375152588, |
|
"learning_rate": 1.0703806729413558e-05, |
|
"loss": 0.1663, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.7906870403536181, |
|
"grad_norm": 5.492803573608398, |
|
"learning_rate": 1.0465647982319095e-05, |
|
"loss": 0.1561, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.7954502152955074, |
|
"grad_norm": 14.530741691589355, |
|
"learning_rate": 1.022748923522463e-05, |
|
"loss": 0.1876, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.8002133902373967, |
|
"grad_norm": 0.0616171695291996, |
|
"learning_rate": 9.98933048813017e-06, |
|
"loss": 0.1524, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.804976565179286, |
|
"grad_norm": 15.610015869140625, |
|
"learning_rate": 9.751171741035706e-06, |
|
"loss": 0.1588, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.8097397401211752, |
|
"grad_norm": 7.8352885246276855, |
|
"learning_rate": 9.513012993941242e-06, |
|
"loss": 0.1779, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.8145029150630644, |
|
"grad_norm": 21.532123565673828, |
|
"learning_rate": 9.274854246846779e-06, |
|
"loss": 0.1436, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.8192660900049537, |
|
"grad_norm": 0.025519462302327156, |
|
"learning_rate": 9.036695499752314e-06, |
|
"loss": 0.1472, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.8240292649468429, |
|
"grad_norm": 0.09057486802339554, |
|
"learning_rate": 8.798536752657852e-06, |
|
"loss": 0.1525, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.8287924398887322, |
|
"grad_norm": 0.9066371917724609, |
|
"learning_rate": 8.560378005563389e-06, |
|
"loss": 0.1614, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.8335556148306215, |
|
"grad_norm": 2.612293004989624, |
|
"learning_rate": 8.322219258468926e-06, |
|
"loss": 0.1492, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.8383187897725107, |
|
"grad_norm": 6.420555114746094, |
|
"learning_rate": 8.084060511374463e-06, |
|
"loss": 0.1518, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.8430819647144, |
|
"grad_norm": 0.04058153182268143, |
|
"learning_rate": 7.845901764279998e-06, |
|
"loss": 0.1581, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.8478451396562893, |
|
"grad_norm": 17.142908096313477, |
|
"learning_rate": 7.6077430171855355e-06, |
|
"loss": 0.166, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.8526083145981785, |
|
"grad_norm": 17.988386154174805, |
|
"learning_rate": 7.369584270091072e-06, |
|
"loss": 0.1613, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.8573714895400678, |
|
"grad_norm": 0.28535395860671997, |
|
"learning_rate": 7.131425522996608e-06, |
|
"loss": 0.1448, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.8621346644819571, |
|
"grad_norm": 0.02261945605278015, |
|
"learning_rate": 6.893266775902146e-06, |
|
"loss": 0.1397, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.8668978394238464, |
|
"grad_norm": 15.836788177490234, |
|
"learning_rate": 6.655108028807683e-06, |
|
"loss": 0.146, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.8716610143657356, |
|
"grad_norm": 0.6049064993858337, |
|
"learning_rate": 6.416949281713219e-06, |
|
"loss": 0.1405, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.8764241893076249, |
|
"grad_norm": 0.08131851255893707, |
|
"learning_rate": 6.178790534618756e-06, |
|
"loss": 0.1466, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.8811873642495142, |
|
"grad_norm": 16.24254608154297, |
|
"learning_rate": 5.940631787524292e-06, |
|
"loss": 0.1398, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.8859505391914034, |
|
"grad_norm": 8.061177504714578e-05, |
|
"learning_rate": 5.702473040429829e-06, |
|
"loss": 0.124, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.8907137141332927, |
|
"grad_norm": 2.3429534435272217, |
|
"learning_rate": 5.464314293335366e-06, |
|
"loss": 0.1448, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.895476889075182, |
|
"grad_norm": 21.22195053100586, |
|
"learning_rate": 5.2261555462409025e-06, |
|
"loss": 0.1304, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.9002400640170712, |
|
"grad_norm": 22.701601028442383, |
|
"learning_rate": 4.9879967991464396e-06, |
|
"loss": 0.1211, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.9050032389589605, |
|
"grad_norm": 15.280872344970703, |
|
"learning_rate": 4.749838052051976e-06, |
|
"loss": 0.1514, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.9097664139008498, |
|
"grad_norm": 0.01960950717329979, |
|
"learning_rate": 4.511679304957513e-06, |
|
"loss": 0.1115, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.914529588842739, |
|
"grad_norm": 0.0017501560505479574, |
|
"learning_rate": 4.273520557863049e-06, |
|
"loss": 0.1571, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.9192927637846283, |
|
"grad_norm": 0.05184149742126465, |
|
"learning_rate": 4.035361810768586e-06, |
|
"loss": 0.1499, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.9240559387265176, |
|
"grad_norm": 6.8865180015563965, |
|
"learning_rate": 3.7972030636741226e-06, |
|
"loss": 0.1272, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.9288191136684069, |
|
"grad_norm": 0.10639504343271255, |
|
"learning_rate": 3.5590443165796593e-06, |
|
"loss": 0.1263, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.9335822886102961, |
|
"grad_norm": 0.9005939960479736, |
|
"learning_rate": 3.3208855694851965e-06, |
|
"loss": 0.133, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.9383454635521854, |
|
"grad_norm": 1.1748387813568115, |
|
"learning_rate": 3.082726822390733e-06, |
|
"loss": 0.1325, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.9431086384940746, |
|
"grad_norm": 28.960805892944336, |
|
"learning_rate": 2.8445680752962694e-06, |
|
"loss": 0.1319, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.9478718134359638, |
|
"grad_norm": 8.118181228637695, |
|
"learning_rate": 2.606409328201806e-06, |
|
"loss": 0.1531, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.9526349883778531, |
|
"grad_norm": 0.004353045951575041, |
|
"learning_rate": 2.3682505811073433e-06, |
|
"loss": 0.1275, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.9573981633197424, |
|
"grad_norm": 2.1794090270996094, |
|
"learning_rate": 2.1300918340128795e-06, |
|
"loss": 0.1428, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.9621613382616316, |
|
"grad_norm": 8.74696159362793, |
|
"learning_rate": 1.8919330869184164e-06, |
|
"loss": 0.1135, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.9669245132035209, |
|
"grad_norm": 31.47783851623535, |
|
"learning_rate": 1.6537743398239533e-06, |
|
"loss": 0.1391, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.9716876881454102, |
|
"grad_norm": 1.8234331607818604, |
|
"learning_rate": 1.4156155927294898e-06, |
|
"loss": 0.12, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.9764508630872994, |
|
"grad_norm": 5.736881732940674, |
|
"learning_rate": 1.1774568456350265e-06, |
|
"loss": 0.1253, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.9812140380291887, |
|
"grad_norm": 0.919281005859375, |
|
"learning_rate": 9.392980985405633e-07, |
|
"loss": 0.1216, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.985977212971078, |
|
"grad_norm": 6.265872001647949, |
|
"learning_rate": 7.011393514460999e-07, |
|
"loss": 0.1245, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.9907403879129673, |
|
"grad_norm": 19.050594329833984, |
|
"learning_rate": 4.629806043516366e-07, |
|
"loss": 0.1298, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.9955035628548565, |
|
"grad_norm": 9.125242233276367, |
|
"learning_rate": 2.2482185725717335e-07, |
|
"loss": 0.1177, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 104972, |
|
"total_flos": 1.5105749507715635e+17, |
|
"train_loss": 0.05896880445613715, |
|
"train_runtime": 27901.1518, |
|
"train_samples_per_second": 30.098, |
|
"train_steps_per_second": 3.762 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 104972, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5105749507715635e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|