|
{ |
|
"best_metric": 0.9356, |
|
"best_model_checkpoint": "checkpoint/vit-large/checkpoint-11970", |
|
"epoch": 18.0, |
|
"eval_steps": 500, |
|
"global_step": 11970, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.7020020484924316, |
|
"learning_rate": 9.998496240601504e-06, |
|
"loss": 4.6119, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.892003059387207, |
|
"learning_rate": 9.996992481203008e-06, |
|
"loss": 4.5669, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.7313311100006104, |
|
"learning_rate": 9.995488721804511e-06, |
|
"loss": 4.5263, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.659421443939209, |
|
"learning_rate": 9.993984962406017e-06, |
|
"loss": 4.4729, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.846480131149292, |
|
"learning_rate": 9.992481203007518e-06, |
|
"loss": 4.3985, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0880706310272217, |
|
"learning_rate": 9.990977443609024e-06, |
|
"loss": 4.3247, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.202064037322998, |
|
"learning_rate": 9.989473684210527e-06, |
|
"loss": 4.233, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.243076324462891, |
|
"learning_rate": 9.98796992481203e-06, |
|
"loss": 4.1598, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.47965669631958, |
|
"learning_rate": 9.986466165413534e-06, |
|
"loss": 4.0929, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.431342363357544, |
|
"learning_rate": 9.984962406015038e-06, |
|
"loss": 3.9735, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.915376901626587, |
|
"learning_rate": 9.983458646616541e-06, |
|
"loss": 3.9051, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.6235947608947754, |
|
"learning_rate": 9.981954887218046e-06, |
|
"loss": 3.7795, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.0705485343933105, |
|
"learning_rate": 9.98045112781955e-06, |
|
"loss": 3.6517, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.916447162628174, |
|
"learning_rate": 9.978947368421053e-06, |
|
"loss": 3.5197, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.508981227874756, |
|
"learning_rate": 9.977443609022557e-06, |
|
"loss": 3.4313, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.508138179779053, |
|
"learning_rate": 9.97593984962406e-06, |
|
"loss": 3.3823, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.037680625915527, |
|
"learning_rate": 9.974436090225564e-06, |
|
"loss": 3.2748, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 10.304414749145508, |
|
"learning_rate": 9.97293233082707e-06, |
|
"loss": 3.1054, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.1311421394348145, |
|
"learning_rate": 9.971428571428571e-06, |
|
"loss": 3.0703, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.8540120124816895, |
|
"learning_rate": 9.969924812030076e-06, |
|
"loss": 2.9789, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.647185802459717, |
|
"learning_rate": 9.96842105263158e-06, |
|
"loss": 2.8568, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.943347454071045, |
|
"learning_rate": 9.966917293233083e-06, |
|
"loss": 2.8175, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.15534210205078, |
|
"learning_rate": 9.965413533834587e-06, |
|
"loss": 2.7049, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.327253341674805, |
|
"learning_rate": 9.963909774436092e-06, |
|
"loss": 2.6185, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.56367301940918, |
|
"learning_rate": 9.962406015037594e-06, |
|
"loss": 2.4704, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.038745880126953, |
|
"learning_rate": 9.960902255639099e-06, |
|
"loss": 2.4862, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.661726474761963, |
|
"learning_rate": 9.959398496240603e-06, |
|
"loss": 2.4405, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.565981864929199, |
|
"learning_rate": 9.957894736842106e-06, |
|
"loss": 2.3698, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 7.349733829498291, |
|
"learning_rate": 9.95639097744361e-06, |
|
"loss": 2.3338, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.526618957519531, |
|
"learning_rate": 9.954887218045113e-06, |
|
"loss": 2.2445, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.558746337890625, |
|
"learning_rate": 9.953383458646617e-06, |
|
"loss": 2.202, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.4157633781433105, |
|
"learning_rate": 9.951879699248122e-06, |
|
"loss": 2.1578, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.635522842407227, |
|
"learning_rate": 9.950375939849625e-06, |
|
"loss": 2.099, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 8.216004371643066, |
|
"learning_rate": 9.948872180451129e-06, |
|
"loss": 2.0472, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 7.348927021026611, |
|
"learning_rate": 9.947368421052632e-06, |
|
"loss": 2.0272, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.218992710113525, |
|
"learning_rate": 9.945864661654136e-06, |
|
"loss": 1.9022, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 12.379638671875, |
|
"learning_rate": 9.94436090225564e-06, |
|
"loss": 1.9707, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.454248905181885, |
|
"learning_rate": 9.942857142857145e-06, |
|
"loss": 1.9612, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 8.975961685180664, |
|
"learning_rate": 9.941353383458647e-06, |
|
"loss": 1.8519, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 11.839798927307129, |
|
"learning_rate": 9.939849624060152e-06, |
|
"loss": 1.8246, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 21.542709350585938, |
|
"learning_rate": 9.938345864661655e-06, |
|
"loss": 1.7591, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.640402793884277, |
|
"learning_rate": 9.936842105263159e-06, |
|
"loss": 1.7803, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 7.312070369720459, |
|
"learning_rate": 9.935338345864662e-06, |
|
"loss": 1.6758, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.633362770080566, |
|
"learning_rate": 9.933834586466168e-06, |
|
"loss": 1.7095, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 9.548731803894043, |
|
"learning_rate": 9.93233082706767e-06, |
|
"loss": 1.6934, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 9.280405044555664, |
|
"learning_rate": 9.930827067669175e-06, |
|
"loss": 1.74, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.683008670806885, |
|
"learning_rate": 9.929323308270678e-06, |
|
"loss": 1.5803, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.887816905975342, |
|
"learning_rate": 9.927819548872182e-06, |
|
"loss": 1.6039, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 7.674352645874023, |
|
"learning_rate": 9.926315789473685e-06, |
|
"loss": 1.6009, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.968412399291992, |
|
"learning_rate": 9.924812030075189e-06, |
|
"loss": 1.5391, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.869007587432861, |
|
"learning_rate": 9.923308270676692e-06, |
|
"loss": 1.5226, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.70728874206543, |
|
"learning_rate": 9.921804511278196e-06, |
|
"loss": 1.6173, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 7.701499938964844, |
|
"learning_rate": 9.920300751879701e-06, |
|
"loss": 1.5042, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.880636692047119, |
|
"learning_rate": 9.918796992481203e-06, |
|
"loss": 1.456, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.021149635314941, |
|
"learning_rate": 9.917293233082708e-06, |
|
"loss": 1.5698, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 7.1271138191223145, |
|
"learning_rate": 9.915789473684211e-06, |
|
"loss": 1.3838, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 7.8424482345581055, |
|
"learning_rate": 9.914285714285715e-06, |
|
"loss": 1.3997, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 8.099345207214355, |
|
"learning_rate": 9.912781954887218e-06, |
|
"loss": 1.4359, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.261110782623291, |
|
"learning_rate": 9.911278195488722e-06, |
|
"loss": 1.454, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.30597448348999, |
|
"learning_rate": 9.909774436090226e-06, |
|
"loss": 1.3907, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 7.466990947723389, |
|
"learning_rate": 9.90827067669173e-06, |
|
"loss": 1.329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 7.6469316482543945, |
|
"learning_rate": 9.906766917293234e-06, |
|
"loss": 1.2848, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 7.2480244636535645, |
|
"learning_rate": 9.905263157894738e-06, |
|
"loss": 1.4246, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 7.738135814666748, |
|
"learning_rate": 9.903759398496241e-06, |
|
"loss": 1.3669, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 8.298168182373047, |
|
"learning_rate": 9.902255639097745e-06, |
|
"loss": 1.2883, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 6.747694969177246, |
|
"learning_rate": 9.900751879699248e-06, |
|
"loss": 1.2884, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8834, |
|
"eval_loss": 0.8751662373542786, |
|
"eval_runtime": 85.9745, |
|
"eval_samples_per_second": 116.314, |
|
"eval_steps_per_second": 0.465, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 7.908563137054443, |
|
"learning_rate": 9.899248120300754e-06, |
|
"loss": 1.2429, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 7.439415454864502, |
|
"learning_rate": 9.897744360902255e-06, |
|
"loss": 1.2716, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 7.2306599617004395, |
|
"learning_rate": 9.89624060150376e-06, |
|
"loss": 1.1676, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 6.287716388702393, |
|
"learning_rate": 9.894736842105264e-06, |
|
"loss": 1.2212, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 10.363208770751953, |
|
"learning_rate": 9.893233082706768e-06, |
|
"loss": 1.259, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 6.556034564971924, |
|
"learning_rate": 9.891729323308271e-06, |
|
"loss": 1.1383, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.640949726104736, |
|
"learning_rate": 9.890225563909776e-06, |
|
"loss": 1.2454, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 7.140594959259033, |
|
"learning_rate": 9.888721804511278e-06, |
|
"loss": 1.1363, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.780942440032959, |
|
"learning_rate": 9.887218045112783e-06, |
|
"loss": 1.1806, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 6.301698684692383, |
|
"learning_rate": 9.885714285714287e-06, |
|
"loss": 1.1416, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 7.256317138671875, |
|
"learning_rate": 9.88421052631579e-06, |
|
"loss": 1.1947, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 9.818164825439453, |
|
"learning_rate": 9.882706766917294e-06, |
|
"loss": 1.1865, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 11.159587860107422, |
|
"learning_rate": 9.881203007518797e-06, |
|
"loss": 1.0911, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 12.432153701782227, |
|
"learning_rate": 9.879699248120301e-06, |
|
"loss": 1.1486, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 9.031283378601074, |
|
"learning_rate": 9.878195488721806e-06, |
|
"loss": 1.0838, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 7.850508689880371, |
|
"learning_rate": 9.87669172932331e-06, |
|
"loss": 1.0953, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 6.112914562225342, |
|
"learning_rate": 9.875187969924813e-06, |
|
"loss": 1.1159, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 6.945899486541748, |
|
"learning_rate": 9.873684210526317e-06, |
|
"loss": 0.9613, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 7.430254936218262, |
|
"learning_rate": 9.87218045112782e-06, |
|
"loss": 1.086, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 6.00394868850708, |
|
"learning_rate": 9.870676691729324e-06, |
|
"loss": 1.0873, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.864083290100098, |
|
"learning_rate": 9.869172932330829e-06, |
|
"loss": 1.055, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 8.37553882598877, |
|
"learning_rate": 9.86766917293233e-06, |
|
"loss": 1.0894, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 8.302824974060059, |
|
"learning_rate": 9.866165413533836e-06, |
|
"loss": 1.1037, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 6.968749046325684, |
|
"learning_rate": 9.86466165413534e-06, |
|
"loss": 1.0071, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 5.924696922302246, |
|
"learning_rate": 9.863157894736843e-06, |
|
"loss": 1.0498, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 7.433680534362793, |
|
"learning_rate": 9.861654135338347e-06, |
|
"loss": 1.0456, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 7.614802360534668, |
|
"learning_rate": 9.86015037593985e-06, |
|
"loss": 0.9659, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 9.205560684204102, |
|
"learning_rate": 9.858646616541354e-06, |
|
"loss": 0.9692, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 6.741930961608887, |
|
"learning_rate": 9.857142857142859e-06, |
|
"loss": 1.0157, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 8.176901817321777, |
|
"learning_rate": 9.855639097744362e-06, |
|
"loss": 1.0014, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 5.335792541503906, |
|
"learning_rate": 9.854135338345866e-06, |
|
"loss": 0.9361, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 6.488396644592285, |
|
"learning_rate": 9.85263157894737e-06, |
|
"loss": 1.0858, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 6.725528240203857, |
|
"learning_rate": 9.851127819548873e-06, |
|
"loss": 1.0027, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 10.740559577941895, |
|
"learning_rate": 9.849624060150376e-06, |
|
"loss": 1.0121, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 8.048620223999023, |
|
"learning_rate": 9.84812030075188e-06, |
|
"loss": 0.967, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 7.225861072540283, |
|
"learning_rate": 9.846616541353383e-06, |
|
"loss": 1.0329, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 7.12366247177124, |
|
"learning_rate": 9.845112781954887e-06, |
|
"loss": 1.0117, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 7.5486273765563965, |
|
"learning_rate": 9.843609022556392e-06, |
|
"loss": 0.969, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 9.591785430908203, |
|
"learning_rate": 9.842105263157896e-06, |
|
"loss": 0.9415, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 7.203570365905762, |
|
"learning_rate": 9.8406015037594e-06, |
|
"loss": 1.0409, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 9.412242889404297, |
|
"learning_rate": 9.839097744360903e-06, |
|
"loss": 1.0352, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 9.688934326171875, |
|
"learning_rate": 9.837593984962406e-06, |
|
"loss": 1.0524, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 6.9523844718933105, |
|
"learning_rate": 9.83609022556391e-06, |
|
"loss": 0.9546, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 8.400866508483887, |
|
"learning_rate": 9.834586466165415e-06, |
|
"loss": 0.9361, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 8.09070110321045, |
|
"learning_rate": 9.833082706766917e-06, |
|
"loss": 0.9797, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 6.927423000335693, |
|
"learning_rate": 9.831578947368422e-06, |
|
"loss": 0.9439, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 7.291294574737549, |
|
"learning_rate": 9.830075187969926e-06, |
|
"loss": 0.9753, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 7.919008731842041, |
|
"learning_rate": 9.828571428571429e-06, |
|
"loss": 0.9697, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 8.660476684570312, |
|
"learning_rate": 9.827067669172933e-06, |
|
"loss": 0.8882, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 8.102679252624512, |
|
"learning_rate": 9.825563909774438e-06, |
|
"loss": 0.8959, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.902896404266357, |
|
"learning_rate": 9.82406015037594e-06, |
|
"loss": 0.887, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 6.6904778480529785, |
|
"learning_rate": 9.822556390977445e-06, |
|
"loss": 0.9352, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 6.770270824432373, |
|
"learning_rate": 9.821052631578948e-06, |
|
"loss": 0.8916, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 8.353099822998047, |
|
"learning_rate": 9.819548872180452e-06, |
|
"loss": 0.9523, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 6.385773658752441, |
|
"learning_rate": 9.818045112781955e-06, |
|
"loss": 0.9175, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 13.28996467590332, |
|
"learning_rate": 9.816541353383459e-06, |
|
"loss": 0.9375, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 9.252169609069824, |
|
"learning_rate": 9.815037593984962e-06, |
|
"loss": 1.0589, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 6.009567737579346, |
|
"learning_rate": 9.813533834586468e-06, |
|
"loss": 0.9608, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.863635063171387, |
|
"learning_rate": 9.812030075187971e-06, |
|
"loss": 0.9724, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 10.548372268676758, |
|
"learning_rate": 9.810526315789475e-06, |
|
"loss": 0.9651, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 8.277862548828125, |
|
"learning_rate": 9.809022556390978e-06, |
|
"loss": 0.8868, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 6.657036304473877, |
|
"learning_rate": 9.807518796992482e-06, |
|
"loss": 0.9133, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 7.065949440002441, |
|
"learning_rate": 9.806015037593985e-06, |
|
"loss": 0.9602, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 9.187036514282227, |
|
"learning_rate": 9.80451127819549e-06, |
|
"loss": 0.9626, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 7.014963150024414, |
|
"learning_rate": 9.803007518796992e-06, |
|
"loss": 0.8653, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 8.665754318237305, |
|
"learning_rate": 9.801503759398498e-06, |
|
"loss": 0.8795, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 8.92686939239502, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 0.7958, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9142, |
|
"eval_loss": 0.4723776876926422, |
|
"eval_runtime": 84.6568, |
|
"eval_samples_per_second": 118.124, |
|
"eval_steps_per_second": 0.472, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 6.881080627441406, |
|
"learning_rate": 9.798496240601505e-06, |
|
"loss": 0.9416, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 5.449582099914551, |
|
"learning_rate": 9.796992481203008e-06, |
|
"loss": 0.8897, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 6.535237789154053, |
|
"learning_rate": 9.795488721804513e-06, |
|
"loss": 0.8241, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 7.858671188354492, |
|
"learning_rate": 9.793984962406015e-06, |
|
"loss": 0.7834, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 6.7106852531433105, |
|
"learning_rate": 9.79248120300752e-06, |
|
"loss": 0.836, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 6.440729141235352, |
|
"learning_rate": 9.790977443609024e-06, |
|
"loss": 0.775, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 8.935519218444824, |
|
"learning_rate": 9.789473684210527e-06, |
|
"loss": 0.7826, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 7.244870662689209, |
|
"learning_rate": 9.787969924812031e-06, |
|
"loss": 0.8917, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 9.125387191772461, |
|
"learning_rate": 9.786466165413534e-06, |
|
"loss": 0.8167, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 6.8634114265441895, |
|
"learning_rate": 9.784962406015038e-06, |
|
"loss": 0.8052, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 7.519056797027588, |
|
"learning_rate": 9.783458646616543e-06, |
|
"loss": 0.8204, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 7.861953258514404, |
|
"learning_rate": 9.781954887218047e-06, |
|
"loss": 0.8404, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 10.940001487731934, |
|
"learning_rate": 9.78045112781955e-06, |
|
"loss": 0.7934, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 5.963690757751465, |
|
"learning_rate": 9.778947368421054e-06, |
|
"loss": 0.7549, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 6.734865665435791, |
|
"learning_rate": 9.777443609022557e-06, |
|
"loss": 0.8112, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 7.971401214599609, |
|
"learning_rate": 9.77593984962406e-06, |
|
"loss": 0.8323, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 9.728713989257812, |
|
"learning_rate": 9.774436090225564e-06, |
|
"loss": 0.7441, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 9.506553649902344, |
|
"learning_rate": 9.772932330827068e-06, |
|
"loss": 0.8018, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 7.3224663734436035, |
|
"learning_rate": 9.771428571428571e-06, |
|
"loss": 0.7869, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 7.251104831695557, |
|
"learning_rate": 9.769924812030077e-06, |
|
"loss": 0.8472, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 7.3015055656433105, |
|
"learning_rate": 9.76842105263158e-06, |
|
"loss": 0.7224, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 9.096901893615723, |
|
"learning_rate": 9.766917293233084e-06, |
|
"loss": 0.8195, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 7.366261959075928, |
|
"learning_rate": 9.765413533834587e-06, |
|
"loss": 0.8322, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 9.955854415893555, |
|
"learning_rate": 9.76390977443609e-06, |
|
"loss": 0.8315, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 6.852784633636475, |
|
"learning_rate": 9.762406015037594e-06, |
|
"loss": 0.7113, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 9.9766263961792, |
|
"learning_rate": 9.7609022556391e-06, |
|
"loss": 0.8024, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 9.095175743103027, |
|
"learning_rate": 9.759398496240601e-06, |
|
"loss": 0.8774, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 5.913175106048584, |
|
"learning_rate": 9.757894736842106e-06, |
|
"loss": 0.7608, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 9.544361114501953, |
|
"learning_rate": 9.75639097744361e-06, |
|
"loss": 0.7984, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 6.991225242614746, |
|
"learning_rate": 9.754887218045113e-06, |
|
"loss": 0.7942, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 7.531531810760498, |
|
"learning_rate": 9.753383458646617e-06, |
|
"loss": 0.8005, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 5.945763111114502, |
|
"learning_rate": 9.751879699248122e-06, |
|
"loss": 0.7673, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 8.382121086120605, |
|
"learning_rate": 9.750375939849624e-06, |
|
"loss": 0.7966, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 5.387685775756836, |
|
"learning_rate": 9.74887218045113e-06, |
|
"loss": 0.7892, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 7.867427349090576, |
|
"learning_rate": 9.747368421052633e-06, |
|
"loss": 0.8002, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 7.549880027770996, |
|
"learning_rate": 9.745864661654136e-06, |
|
"loss": 0.8879, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 7.67978572845459, |
|
"learning_rate": 9.74436090225564e-06, |
|
"loss": 0.7849, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 8.076873779296875, |
|
"learning_rate": 9.742857142857143e-06, |
|
"loss": 0.6503, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 10.748533248901367, |
|
"learning_rate": 9.741353383458647e-06, |
|
"loss": 0.7868, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 8.428750991821289, |
|
"learning_rate": 9.739849624060152e-06, |
|
"loss": 0.8195, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 7.678562164306641, |
|
"learning_rate": 9.738345864661655e-06, |
|
"loss": 0.8428, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 7.171645164489746, |
|
"learning_rate": 9.736842105263159e-06, |
|
"loss": 0.809, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 7.041049003601074, |
|
"learning_rate": 9.735338345864663e-06, |
|
"loss": 0.7417, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 9.66743278503418, |
|
"learning_rate": 9.733834586466166e-06, |
|
"loss": 0.7952, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 9.864920616149902, |
|
"learning_rate": 9.73233082706767e-06, |
|
"loss": 0.8427, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 10.242929458618164, |
|
"learning_rate": 9.730827067669175e-06, |
|
"loss": 0.7599, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 8.138999938964844, |
|
"learning_rate": 9.729323308270677e-06, |
|
"loss": 0.7517, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 7.668764114379883, |
|
"learning_rate": 9.727819548872182e-06, |
|
"loss": 0.7766, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 6.978646278381348, |
|
"learning_rate": 9.726315789473685e-06, |
|
"loss": 0.7323, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 8.794787406921387, |
|
"learning_rate": 9.724812030075189e-06, |
|
"loss": 0.8644, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 10.154306411743164, |
|
"learning_rate": 9.723308270676692e-06, |
|
"loss": 0.8235, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 9.513362884521484, |
|
"learning_rate": 9.721804511278196e-06, |
|
"loss": 0.792, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 4.891651630401611, |
|
"learning_rate": 9.7203007518797e-06, |
|
"loss": 0.7343, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 6.595260143280029, |
|
"learning_rate": 9.718796992481205e-06, |
|
"loss": 0.6451, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 11.115670204162598, |
|
"learning_rate": 9.717293233082708e-06, |
|
"loss": 0.7841, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 7.82785701751709, |
|
"learning_rate": 9.715789473684212e-06, |
|
"loss": 0.8396, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 5.41937780380249, |
|
"learning_rate": 9.714285714285715e-06, |
|
"loss": 0.772, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 8.092954635620117, |
|
"learning_rate": 9.712781954887219e-06, |
|
"loss": 0.7124, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 4.913546562194824, |
|
"learning_rate": 9.711278195488722e-06, |
|
"loss": 0.7824, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 6.090660572052002, |
|
"learning_rate": 9.709774436090227e-06, |
|
"loss": 0.7911, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 5.547027111053467, |
|
"learning_rate": 9.70827067669173e-06, |
|
"loss": 0.7818, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 8.583475112915039, |
|
"learning_rate": 9.706766917293234e-06, |
|
"loss": 0.7272, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 8.129578590393066, |
|
"learning_rate": 9.705263157894738e-06, |
|
"loss": 0.7352, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 9.513014793395996, |
|
"learning_rate": 9.703759398496242e-06, |
|
"loss": 0.8126, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 5.819597244262695, |
|
"learning_rate": 9.702255639097745e-06, |
|
"loss": 0.7599, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 7.391184329986572, |
|
"learning_rate": 9.700751879699249e-06, |
|
"loss": 0.743, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9207, |
|
"eval_loss": 0.3750178813934326, |
|
"eval_runtime": 84.5377, |
|
"eval_samples_per_second": 118.29, |
|
"eval_steps_per_second": 0.473, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 6.347775459289551, |
|
"learning_rate": 9.699248120300752e-06, |
|
"loss": 0.6685, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 6.109332084655762, |
|
"learning_rate": 9.697744360902256e-06, |
|
"loss": 0.7398, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 4.770040512084961, |
|
"learning_rate": 9.69624060150376e-06, |
|
"loss": 0.7587, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 7.322962760925293, |
|
"learning_rate": 9.694736842105263e-06, |
|
"loss": 0.7917, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 9.097600936889648, |
|
"learning_rate": 9.693233082706768e-06, |
|
"loss": 0.6967, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 10.507075309753418, |
|
"learning_rate": 9.691729323308271e-06, |
|
"loss": 0.7229, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 6.249164581298828, |
|
"learning_rate": 9.690225563909775e-06, |
|
"loss": 0.6491, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 7.525278568267822, |
|
"learning_rate": 9.688721804511278e-06, |
|
"loss": 0.6476, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 9.564391136169434, |
|
"learning_rate": 9.687218045112784e-06, |
|
"loss": 0.7133, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 6.4053955078125, |
|
"learning_rate": 9.685714285714285e-06, |
|
"loss": 0.8012, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 6.6741251945495605, |
|
"learning_rate": 9.68421052631579e-06, |
|
"loss": 0.7507, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 12.398811340332031, |
|
"learning_rate": 9.682706766917294e-06, |
|
"loss": 0.7138, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 8.505881309509277, |
|
"learning_rate": 9.681203007518798e-06, |
|
"loss": 0.7136, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 5.51025915145874, |
|
"learning_rate": 9.679699248120301e-06, |
|
"loss": 0.6037, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 6.39398193359375, |
|
"learning_rate": 9.678195488721805e-06, |
|
"loss": 0.6799, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 6.508944511413574, |
|
"learning_rate": 9.676691729323308e-06, |
|
"loss": 0.6587, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 7.027959823608398, |
|
"learning_rate": 9.675187969924813e-06, |
|
"loss": 0.7096, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 8.690469741821289, |
|
"learning_rate": 9.673684210526317e-06, |
|
"loss": 0.6169, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 9.489269256591797, |
|
"learning_rate": 9.67218045112782e-06, |
|
"loss": 0.6643, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 12.934528350830078, |
|
"learning_rate": 9.670676691729324e-06, |
|
"loss": 0.7485, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 9.072815895080566, |
|
"learning_rate": 9.669172932330828e-06, |
|
"loss": 0.704, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 7.934593200683594, |
|
"learning_rate": 9.667669172932331e-06, |
|
"loss": 0.7134, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 6.846796989440918, |
|
"learning_rate": 9.666165413533836e-06, |
|
"loss": 0.7372, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 6.8362250328063965, |
|
"learning_rate": 9.664661654135338e-06, |
|
"loss": 0.6854, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 8.184903144836426, |
|
"learning_rate": 9.663157894736843e-06, |
|
"loss": 0.6662, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 8.972626686096191, |
|
"learning_rate": 9.661654135338347e-06, |
|
"loss": 0.7895, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 6.524502754211426, |
|
"learning_rate": 9.66015037593985e-06, |
|
"loss": 0.8453, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 6.215096950531006, |
|
"learning_rate": 9.658646616541354e-06, |
|
"loss": 0.716, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 6.9516401290893555, |
|
"learning_rate": 9.657142857142859e-06, |
|
"loss": 0.7453, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 9.131119728088379, |
|
"learning_rate": 9.655639097744361e-06, |
|
"loss": 0.6474, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 7.063914775848389, |
|
"learning_rate": 9.654135338345866e-06, |
|
"loss": 0.6535, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 9.410021781921387, |
|
"learning_rate": 9.65263157894737e-06, |
|
"loss": 0.708, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 7.179042816162109, |
|
"learning_rate": 9.651127819548873e-06, |
|
"loss": 0.7098, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 8.097248077392578, |
|
"learning_rate": 9.649624060150377e-06, |
|
"loss": 0.7052, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 4.258429050445557, |
|
"learning_rate": 9.64812030075188e-06, |
|
"loss": 0.6803, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 7.451633930206299, |
|
"learning_rate": 9.646616541353384e-06, |
|
"loss": 0.6569, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 9.551535606384277, |
|
"learning_rate": 9.645112781954889e-06, |
|
"loss": 0.6411, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 6.456385612487793, |
|
"learning_rate": 9.643609022556392e-06, |
|
"loss": 0.5938, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 5.947078704833984, |
|
"learning_rate": 9.642105263157896e-06, |
|
"loss": 0.6175, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 5.279054164886475, |
|
"learning_rate": 9.6406015037594e-06, |
|
"loss": 0.6788, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 7.593316555023193, |
|
"learning_rate": 9.639097744360903e-06, |
|
"loss": 0.6255, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 7.478080749511719, |
|
"learning_rate": 9.637593984962407e-06, |
|
"loss": 0.7908, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 9.64027214050293, |
|
"learning_rate": 9.636090225563912e-06, |
|
"loss": 0.7241, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 14.006696701049805, |
|
"learning_rate": 9.634586466165414e-06, |
|
"loss": 0.686, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 5.0339789390563965, |
|
"learning_rate": 9.633082706766919e-06, |
|
"loss": 0.7082, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 8.148447036743164, |
|
"learning_rate": 9.631578947368422e-06, |
|
"loss": 0.6859, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 7.614720344543457, |
|
"learning_rate": 9.630075187969926e-06, |
|
"loss": 0.7117, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 9.017003059387207, |
|
"learning_rate": 9.62857142857143e-06, |
|
"loss": 0.6505, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 6.4466986656188965, |
|
"learning_rate": 9.627067669172933e-06, |
|
"loss": 0.71, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 7.973327159881592, |
|
"learning_rate": 9.625563909774436e-06, |
|
"loss": 0.6466, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 6.712606906890869, |
|
"learning_rate": 9.62406015037594e-06, |
|
"loss": 0.6767, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 8.149372100830078, |
|
"learning_rate": 9.622556390977445e-06, |
|
"loss": 0.783, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 8.645270347595215, |
|
"learning_rate": 9.621052631578947e-06, |
|
"loss": 0.6978, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 9.347142219543457, |
|
"learning_rate": 9.619548872180452e-06, |
|
"loss": 0.583, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 7.905392169952393, |
|
"learning_rate": 9.618045112781956e-06, |
|
"loss": 0.6884, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 8.783331871032715, |
|
"learning_rate": 9.61654135338346e-06, |
|
"loss": 0.691, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 8.456209182739258, |
|
"learning_rate": 9.615037593984963e-06, |
|
"loss": 0.7281, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 6.667693138122559, |
|
"learning_rate": 9.613533834586468e-06, |
|
"loss": 0.6912, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 8.541569709777832, |
|
"learning_rate": 9.61203007518797e-06, |
|
"loss": 0.6853, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 4.732927322387695, |
|
"learning_rate": 9.610526315789475e-06, |
|
"loss": 0.6647, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 7.604156017303467, |
|
"learning_rate": 9.609022556390978e-06, |
|
"loss": 0.7526, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 8.218050956726074, |
|
"learning_rate": 9.607518796992482e-06, |
|
"loss": 0.6828, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 5.613206386566162, |
|
"learning_rate": 9.606015037593985e-06, |
|
"loss": 0.6964, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 9.644120216369629, |
|
"learning_rate": 9.604511278195489e-06, |
|
"loss": 0.6912, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 8.14504337310791, |
|
"learning_rate": 9.603007518796993e-06, |
|
"loss": 0.7527, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 6.1560468673706055, |
|
"learning_rate": 9.601503759398498e-06, |
|
"loss": 0.6145, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 20.564706802368164, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.6935, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9236, |
|
"eval_loss": 0.319810152053833, |
|
"eval_runtime": 84.5508, |
|
"eval_samples_per_second": 118.272, |
|
"eval_steps_per_second": 0.473, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 9.448854446411133, |
|
"learning_rate": 9.598496240601505e-06, |
|
"loss": 0.631, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 10.194000244140625, |
|
"learning_rate": 9.596992481203008e-06, |
|
"loss": 0.6046, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 8.277205467224121, |
|
"learning_rate": 9.595488721804512e-06, |
|
"loss": 0.5853, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 7.616865158081055, |
|
"learning_rate": 9.593984962406015e-06, |
|
"loss": 0.6168, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 5.158208847045898, |
|
"learning_rate": 9.59248120300752e-06, |
|
"loss": 0.5893, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 10.609253883361816, |
|
"learning_rate": 9.590977443609022e-06, |
|
"loss": 0.5819, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 7.288332462310791, |
|
"learning_rate": 9.589473684210528e-06, |
|
"loss": 0.6384, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 6.625866889953613, |
|
"learning_rate": 9.587969924812031e-06, |
|
"loss": 0.6841, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 8.38702392578125, |
|
"learning_rate": 9.586466165413535e-06, |
|
"loss": 0.5815, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 6.58852481842041, |
|
"learning_rate": 9.584962406015038e-06, |
|
"loss": 0.7091, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 5.776881217956543, |
|
"learning_rate": 9.583458646616542e-06, |
|
"loss": 0.6426, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 7.806540489196777, |
|
"learning_rate": 9.581954887218045e-06, |
|
"loss": 0.6459, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 7.378940582275391, |
|
"learning_rate": 9.58045112781955e-06, |
|
"loss": 0.6553, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 8.37366008758545, |
|
"learning_rate": 9.578947368421054e-06, |
|
"loss": 0.6024, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 5.783264636993408, |
|
"learning_rate": 9.577443609022557e-06, |
|
"loss": 0.5959, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 6.4687676429748535, |
|
"learning_rate": 9.575939849624061e-06, |
|
"loss": 0.5669, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 10.575803756713867, |
|
"learning_rate": 9.574436090225564e-06, |
|
"loss": 0.6461, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 9.703124046325684, |
|
"learning_rate": 9.572932330827068e-06, |
|
"loss": 0.5784, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 9.454757690429688, |
|
"learning_rate": 9.571428571428573e-06, |
|
"loss": 0.7381, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 7.034806728363037, |
|
"learning_rate": 9.569924812030075e-06, |
|
"loss": 0.6277, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 10.1060791015625, |
|
"learning_rate": 9.56842105263158e-06, |
|
"loss": 0.6973, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 7.225138187408447, |
|
"learning_rate": 9.566917293233084e-06, |
|
"loss": 0.587, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 6.221525192260742, |
|
"learning_rate": 9.565413533834587e-06, |
|
"loss": 0.6006, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 6.329552173614502, |
|
"learning_rate": 9.56390977443609e-06, |
|
"loss": 0.6474, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 8.411649703979492, |
|
"learning_rate": 9.562406015037596e-06, |
|
"loss": 0.5376, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 8.27790355682373, |
|
"learning_rate": 9.560902255639098e-06, |
|
"loss": 0.6324, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 6.995235443115234, |
|
"learning_rate": 9.559398496240603e-06, |
|
"loss": 0.6178, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 8.169748306274414, |
|
"learning_rate": 9.557894736842107e-06, |
|
"loss": 0.5546, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 7.832982063293457, |
|
"learning_rate": 9.55639097744361e-06, |
|
"loss": 0.5873, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 7.024545192718506, |
|
"learning_rate": 9.554887218045114e-06, |
|
"loss": 0.5919, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 8.610920906066895, |
|
"learning_rate": 9.553383458646617e-06, |
|
"loss": 0.6324, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 8.49885368347168, |
|
"learning_rate": 9.55187969924812e-06, |
|
"loss": 0.6392, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 6.013737678527832, |
|
"learning_rate": 9.550375939849624e-06, |
|
"loss": 0.6267, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 9.457529067993164, |
|
"learning_rate": 9.54887218045113e-06, |
|
"loss": 0.633, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 7.126248359680176, |
|
"learning_rate": 9.547368421052631e-06, |
|
"loss": 0.5527, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 8.481447219848633, |
|
"learning_rate": 9.545864661654136e-06, |
|
"loss": 0.7163, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 10.300518035888672, |
|
"learning_rate": 9.54436090225564e-06, |
|
"loss": 0.5921, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 8.265804290771484, |
|
"learning_rate": 9.542857142857143e-06, |
|
"loss": 0.5952, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 5.028606414794922, |
|
"learning_rate": 9.541353383458647e-06, |
|
"loss": 0.6269, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 8.997878074645996, |
|
"learning_rate": 9.53984962406015e-06, |
|
"loss": 0.5761, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 9.585675239562988, |
|
"learning_rate": 9.538345864661654e-06, |
|
"loss": 0.5851, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 5.83755350112915, |
|
"learning_rate": 9.53684210526316e-06, |
|
"loss": 0.6105, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 5.208207607269287, |
|
"learning_rate": 9.535338345864663e-06, |
|
"loss": 0.5786, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 9.895461082458496, |
|
"learning_rate": 9.533834586466166e-06, |
|
"loss": 0.623, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 8.958138465881348, |
|
"learning_rate": 9.53233082706767e-06, |
|
"loss": 0.5708, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 10.452126502990723, |
|
"learning_rate": 9.530827067669173e-06, |
|
"loss": 0.5694, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 7.20021915435791, |
|
"learning_rate": 9.529323308270677e-06, |
|
"loss": 0.6267, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 7.995909690856934, |
|
"learning_rate": 9.527819548872182e-06, |
|
"loss": 0.6989, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 6.9314985275268555, |
|
"learning_rate": 9.526315789473684e-06, |
|
"loss": 0.6093, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 10.158616065979004, |
|
"learning_rate": 9.524812030075189e-06, |
|
"loss": 0.5696, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 7.637181758880615, |
|
"learning_rate": 9.523308270676693e-06, |
|
"loss": 0.7494, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 7.443474769592285, |
|
"learning_rate": 9.521804511278196e-06, |
|
"loss": 0.7636, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 6.130582809448242, |
|
"learning_rate": 9.5203007518797e-06, |
|
"loss": 0.7397, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 8.699774742126465, |
|
"learning_rate": 9.518796992481205e-06, |
|
"loss": 0.5461, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 8.060851097106934, |
|
"learning_rate": 9.517293233082707e-06, |
|
"loss": 0.5424, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 6.084632396697998, |
|
"learning_rate": 9.515789473684212e-06, |
|
"loss": 0.6181, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 8.804571151733398, |
|
"learning_rate": 9.514285714285715e-06, |
|
"loss": 0.6696, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 8.552626609802246, |
|
"learning_rate": 9.512781954887219e-06, |
|
"loss": 0.6237, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 8.930567741394043, |
|
"learning_rate": 9.511278195488722e-06, |
|
"loss": 0.6427, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 8.916244506835938, |
|
"learning_rate": 9.509774436090226e-06, |
|
"loss": 0.608, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 12.679169654846191, |
|
"learning_rate": 9.50827067669173e-06, |
|
"loss": 0.622, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 9.21071720123291, |
|
"learning_rate": 9.506766917293235e-06, |
|
"loss": 0.6153, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 8.040297508239746, |
|
"learning_rate": 9.505263157894738e-06, |
|
"loss": 0.654, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 6.395382404327393, |
|
"learning_rate": 9.503759398496242e-06, |
|
"loss": 0.6729, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 8.437057495117188, |
|
"learning_rate": 9.502255639097745e-06, |
|
"loss": 0.6457, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 7.987279415130615, |
|
"learning_rate": 9.500751879699249e-06, |
|
"loss": 0.6159, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9289, |
|
"eval_loss": 0.2945062816143036, |
|
"eval_runtime": 84.9367, |
|
"eval_samples_per_second": 117.735, |
|
"eval_steps_per_second": 0.471, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 6.12150239944458, |
|
"learning_rate": 9.499248120300752e-06, |
|
"loss": 0.4871, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 8.20666217803955, |
|
"learning_rate": 9.497744360902257e-06, |
|
"loss": 0.6011, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 8.818642616271973, |
|
"learning_rate": 9.49624060150376e-06, |
|
"loss": 0.5681, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 5.606151103973389, |
|
"learning_rate": 9.494736842105265e-06, |
|
"loss": 0.5494, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 6.230663299560547, |
|
"learning_rate": 9.493233082706768e-06, |
|
"loss": 0.6138, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 6.923035621643066, |
|
"learning_rate": 9.491729323308272e-06, |
|
"loss": 0.6398, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 8.464035034179688, |
|
"learning_rate": 9.490225563909775e-06, |
|
"loss": 0.533, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 5.440852165222168, |
|
"learning_rate": 9.488721804511279e-06, |
|
"loss": 0.5901, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 6.880829334259033, |
|
"learning_rate": 9.487218045112782e-06, |
|
"loss": 0.5699, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 6.773617267608643, |
|
"learning_rate": 9.485714285714287e-06, |
|
"loss": 0.5164, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 6.794729232788086, |
|
"learning_rate": 9.484210526315791e-06, |
|
"loss": 0.4939, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 8.347722053527832, |
|
"learning_rate": 9.482706766917294e-06, |
|
"loss": 0.6138, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 6.279055595397949, |
|
"learning_rate": 9.481203007518798e-06, |
|
"loss": 0.4792, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 7.93798828125, |
|
"learning_rate": 9.479699248120301e-06, |
|
"loss": 0.5945, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 6.767178535461426, |
|
"learning_rate": 9.478195488721805e-06, |
|
"loss": 0.5878, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 6.87293004989624, |
|
"learning_rate": 9.476691729323308e-06, |
|
"loss": 0.566, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 2.407437562942505, |
|
"learning_rate": 9.475187969924814e-06, |
|
"loss": 0.5014, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 8.233712196350098, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.5879, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 7.905375003814697, |
|
"learning_rate": 9.47218045112782e-06, |
|
"loss": 0.6127, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 5.8037238121032715, |
|
"learning_rate": 9.470676691729324e-06, |
|
"loss": 0.6048, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 9.2665433883667, |
|
"learning_rate": 9.469172932330828e-06, |
|
"loss": 0.6233, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 5.650614261627197, |
|
"learning_rate": 9.467669172932331e-06, |
|
"loss": 0.5703, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 5.246155738830566, |
|
"learning_rate": 9.466165413533835e-06, |
|
"loss": 0.5108, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 8.701322555541992, |
|
"learning_rate": 9.464661654135338e-06, |
|
"loss": 0.5783, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 5.870892524719238, |
|
"learning_rate": 9.463157894736844e-06, |
|
"loss": 0.5546, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 8.061163902282715, |
|
"learning_rate": 9.461654135338347e-06, |
|
"loss": 0.5973, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 4.166900157928467, |
|
"learning_rate": 9.46015037593985e-06, |
|
"loss": 0.6042, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 5.8524346351623535, |
|
"learning_rate": 9.458646616541354e-06, |
|
"loss": 0.5307, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 7.229081153869629, |
|
"learning_rate": 9.457142857142858e-06, |
|
"loss": 0.6533, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 5.403026580810547, |
|
"learning_rate": 9.455639097744361e-06, |
|
"loss": 0.5059, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 7.53814697265625, |
|
"learning_rate": 9.454135338345866e-06, |
|
"loss": 0.6576, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 8.08530330657959, |
|
"learning_rate": 9.452631578947368e-06, |
|
"loss": 0.5881, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"grad_norm": 7.80808687210083, |
|
"learning_rate": 9.451127819548873e-06, |
|
"loss": 0.5725, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 4.368475914001465, |
|
"learning_rate": 9.449624060150377e-06, |
|
"loss": 0.5538, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 7.806415557861328, |
|
"learning_rate": 9.44812030075188e-06, |
|
"loss": 0.6739, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 8.047362327575684, |
|
"learning_rate": 9.446616541353384e-06, |
|
"loss": 0.5229, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 5.89243221282959, |
|
"learning_rate": 9.445112781954887e-06, |
|
"loss": 0.599, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 6.812804222106934, |
|
"learning_rate": 9.443609022556391e-06, |
|
"loss": 0.6213, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 7.177376747131348, |
|
"learning_rate": 9.442105263157896e-06, |
|
"loss": 0.5568, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 4.684938907623291, |
|
"learning_rate": 9.4406015037594e-06, |
|
"loss": 0.5266, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 9.440715789794922, |
|
"learning_rate": 9.439097744360903e-06, |
|
"loss": 0.6431, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 6.564364910125732, |
|
"learning_rate": 9.437593984962407e-06, |
|
"loss": 0.4702, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 7.499642372131348, |
|
"learning_rate": 9.43609022556391e-06, |
|
"loss": 0.5249, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 5.27653169631958, |
|
"learning_rate": 9.434586466165414e-06, |
|
"loss": 0.4517, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 4.881272792816162, |
|
"learning_rate": 9.433082706766919e-06, |
|
"loss": 0.4682, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 9.005290985107422, |
|
"learning_rate": 9.43157894736842e-06, |
|
"loss": 0.5642, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 6.421182155609131, |
|
"learning_rate": 9.430075187969926e-06, |
|
"loss": 0.4855, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 7.035130023956299, |
|
"learning_rate": 9.42857142857143e-06, |
|
"loss": 0.6362, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 5.77438497543335, |
|
"learning_rate": 9.427067669172933e-06, |
|
"loss": 0.5876, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 8.328043937683105, |
|
"learning_rate": 9.425563909774437e-06, |
|
"loss": 0.4768, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 5.7907586097717285, |
|
"learning_rate": 9.424060150375942e-06, |
|
"loss": 0.5941, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 8.730267524719238, |
|
"learning_rate": 9.422556390977444e-06, |
|
"loss": 0.6426, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 8.710532188415527, |
|
"learning_rate": 9.421052631578949e-06, |
|
"loss": 0.5909, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 8.74202823638916, |
|
"learning_rate": 9.419548872180452e-06, |
|
"loss": 0.5735, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 7.489967346191406, |
|
"learning_rate": 9.418045112781956e-06, |
|
"loss": 0.6072, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 6.942547798156738, |
|
"learning_rate": 9.41654135338346e-06, |
|
"loss": 0.5513, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 5.517817497253418, |
|
"learning_rate": 9.415037593984963e-06, |
|
"loss": 0.5317, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 6.26224946975708, |
|
"learning_rate": 9.413533834586466e-06, |
|
"loss": 0.6014, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 4.848892688751221, |
|
"learning_rate": 9.412030075187972e-06, |
|
"loss": 0.5628, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 7.279343128204346, |
|
"learning_rate": 9.410526315789475e-06, |
|
"loss": 0.631, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 5.791496753692627, |
|
"learning_rate": 9.409022556390979e-06, |
|
"loss": 0.5312, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 4.935235977172852, |
|
"learning_rate": 9.407518796992482e-06, |
|
"loss": 0.4694, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 5.741876125335693, |
|
"learning_rate": 9.406015037593986e-06, |
|
"loss": 0.6073, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 5.398350715637207, |
|
"learning_rate": 9.40451127819549e-06, |
|
"loss": 0.6009, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 6.093377590179443, |
|
"learning_rate": 9.403007518796994e-06, |
|
"loss": 0.5845, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 8.6488676071167, |
|
"learning_rate": 9.401503759398496e-06, |
|
"loss": 0.5932, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 3.0173494815826416, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.4423, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.925, |
|
"eval_loss": 0.2876322865486145, |
|
"eval_runtime": 85.2695, |
|
"eval_samples_per_second": 117.275, |
|
"eval_steps_per_second": 0.469, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 4.103921890258789, |
|
"learning_rate": 9.398496240601505e-06, |
|
"loss": 0.5346, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"grad_norm": 4.625704288482666, |
|
"learning_rate": 9.396992481203009e-06, |
|
"loss": 0.5522, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"grad_norm": 7.251491546630859, |
|
"learning_rate": 9.395488721804512e-06, |
|
"loss": 0.6501, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"grad_norm": 8.962389945983887, |
|
"learning_rate": 9.393984962406016e-06, |
|
"loss": 0.5402, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 6.207771301269531, |
|
"learning_rate": 9.392481203007519e-06, |
|
"loss": 0.5545, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"grad_norm": 5.253688812255859, |
|
"learning_rate": 9.390977443609023e-06, |
|
"loss": 0.5084, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"grad_norm": 7.277046203613281, |
|
"learning_rate": 9.389473684210528e-06, |
|
"loss": 0.5115, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 5.671750068664551, |
|
"learning_rate": 9.38796992481203e-06, |
|
"loss": 0.5508, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"grad_norm": 3.9672038555145264, |
|
"learning_rate": 9.386466165413535e-06, |
|
"loss": 0.494, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"grad_norm": 6.129919052124023, |
|
"learning_rate": 9.384962406015038e-06, |
|
"loss": 0.5898, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"grad_norm": 6.198451519012451, |
|
"learning_rate": 9.383458646616542e-06, |
|
"loss": 0.5378, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"grad_norm": 9.286908149719238, |
|
"learning_rate": 9.381954887218045e-06, |
|
"loss": 0.5656, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"grad_norm": 6.862420082092285, |
|
"learning_rate": 9.38045112781955e-06, |
|
"loss": 0.5457, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"grad_norm": 5.948605537414551, |
|
"learning_rate": 9.378947368421052e-06, |
|
"loss": 0.5564, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 7.1652116775512695, |
|
"learning_rate": 9.377443609022558e-06, |
|
"loss": 0.5624, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 7.091752052307129, |
|
"learning_rate": 9.375939849624061e-06, |
|
"loss": 0.532, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 4.2119903564453125, |
|
"learning_rate": 9.374436090225565e-06, |
|
"loss": 0.385, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"grad_norm": 9.477155685424805, |
|
"learning_rate": 9.372932330827068e-06, |
|
"loss": 0.5019, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"grad_norm": 7.294814109802246, |
|
"learning_rate": 9.371428571428572e-06, |
|
"loss": 0.556, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 8.124314308166504, |
|
"learning_rate": 9.369924812030075e-06, |
|
"loss": 0.548, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 6.1076483726501465, |
|
"learning_rate": 9.36842105263158e-06, |
|
"loss": 0.5228, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 7.360411643981934, |
|
"learning_rate": 9.366917293233084e-06, |
|
"loss": 0.6335, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"grad_norm": 6.000509738922119, |
|
"learning_rate": 9.365413533834588e-06, |
|
"loss": 0.5727, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 8.538400650024414, |
|
"learning_rate": 9.363909774436091e-06, |
|
"loss": 0.5135, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"grad_norm": 6.543038845062256, |
|
"learning_rate": 9.362406015037595e-06, |
|
"loss": 0.4859, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"grad_norm": 7.515405178070068, |
|
"learning_rate": 9.360902255639098e-06, |
|
"loss": 0.6172, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"grad_norm": 4.859574317932129, |
|
"learning_rate": 9.359398496240603e-06, |
|
"loss": 0.407, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"grad_norm": 5.684931755065918, |
|
"learning_rate": 9.357894736842105e-06, |
|
"loss": 0.5748, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 5.063128471374512, |
|
"learning_rate": 9.35639097744361e-06, |
|
"loss": 0.5363, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"grad_norm": 9.34011173248291, |
|
"learning_rate": 9.354887218045114e-06, |
|
"loss": 0.5072, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 6.302648544311523, |
|
"learning_rate": 9.353383458646617e-06, |
|
"loss": 0.4891, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 6.268799781799316, |
|
"learning_rate": 9.351879699248121e-06, |
|
"loss": 0.6215, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"grad_norm": 5.54179573059082, |
|
"learning_rate": 9.350375939849624e-06, |
|
"loss": 0.5827, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"grad_norm": 6.042153835296631, |
|
"learning_rate": 9.348872180451128e-06, |
|
"loss": 0.5205, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"grad_norm": 7.558413982391357, |
|
"learning_rate": 9.347368421052633e-06, |
|
"loss": 0.5357, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"grad_norm": 7.838019847869873, |
|
"learning_rate": 9.345864661654137e-06, |
|
"loss": 0.5719, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 10.056818008422852, |
|
"learning_rate": 9.34436090225564e-06, |
|
"loss": 0.542, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"grad_norm": 7.325047492980957, |
|
"learning_rate": 9.342857142857144e-06, |
|
"loss": 0.5564, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"grad_norm": 8.13595199584961, |
|
"learning_rate": 9.341353383458647e-06, |
|
"loss": 0.5106, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 7.225549221038818, |
|
"learning_rate": 9.33984962406015e-06, |
|
"loss": 0.5714, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"grad_norm": 9.103632926940918, |
|
"learning_rate": 9.338345864661656e-06, |
|
"loss": 0.559, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"grad_norm": 5.579386234283447, |
|
"learning_rate": 9.336842105263158e-06, |
|
"loss": 0.5789, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"grad_norm": 9.875541687011719, |
|
"learning_rate": 9.335338345864663e-06, |
|
"loss": 0.5435, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"grad_norm": 10.33945083618164, |
|
"learning_rate": 9.333834586466166e-06, |
|
"loss": 0.4678, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"grad_norm": 8.180964469909668, |
|
"learning_rate": 9.33233082706767e-06, |
|
"loss": 0.5476, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"grad_norm": 8.327938079833984, |
|
"learning_rate": 9.330827067669174e-06, |
|
"loss": 0.5605, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"grad_norm": 6.9362311363220215, |
|
"learning_rate": 9.329323308270679e-06, |
|
"loss": 0.6099, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 4.627447128295898, |
|
"learning_rate": 9.32781954887218e-06, |
|
"loss": 0.4521, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"grad_norm": 5.713562488555908, |
|
"learning_rate": 9.326315789473684e-06, |
|
"loss": 0.4532, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 6.893897533416748, |
|
"learning_rate": 9.32481203007519e-06, |
|
"loss": 0.5298, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"grad_norm": 6.630578994750977, |
|
"learning_rate": 9.323308270676693e-06, |
|
"loss": 0.5226, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 7.725119113922119, |
|
"learning_rate": 9.321804511278196e-06, |
|
"loss": 0.4581, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 8.243720054626465, |
|
"learning_rate": 9.3203007518797e-06, |
|
"loss": 0.4821, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"grad_norm": 7.254865646362305, |
|
"learning_rate": 9.318796992481203e-06, |
|
"loss": 0.4623, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"grad_norm": 6.510406494140625, |
|
"learning_rate": 9.317293233082707e-06, |
|
"loss": 0.5073, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"grad_norm": 5.531012058258057, |
|
"learning_rate": 9.315789473684212e-06, |
|
"loss": 0.4842, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"grad_norm": 9.50185489654541, |
|
"learning_rate": 9.314285714285714e-06, |
|
"loss": 0.5485, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 5.591551780700684, |
|
"learning_rate": 9.312781954887219e-06, |
|
"loss": 0.5397, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 3.7708208560943604, |
|
"learning_rate": 9.311278195488723e-06, |
|
"loss": 0.4746, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"grad_norm": 5.826446533203125, |
|
"learning_rate": 9.309774436090226e-06, |
|
"loss": 0.503, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"grad_norm": 14.129280090332031, |
|
"learning_rate": 9.30827067669173e-06, |
|
"loss": 0.5729, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"grad_norm": 5.19706392288208, |
|
"learning_rate": 9.306766917293233e-06, |
|
"loss": 0.4967, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"grad_norm": 6.513811111450195, |
|
"learning_rate": 9.305263157894737e-06, |
|
"loss": 0.587, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 7.2199506759643555, |
|
"learning_rate": 9.303759398496242e-06, |
|
"loss": 0.5733, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"grad_norm": 6.173489570617676, |
|
"learning_rate": 9.302255639097745e-06, |
|
"loss": 0.6391, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"grad_norm": 4.977587699890137, |
|
"learning_rate": 9.300751879699249e-06, |
|
"loss": 0.5506, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9302, |
|
"eval_loss": 0.2616922855377197, |
|
"eval_runtime": 85.0513, |
|
"eval_samples_per_second": 117.576, |
|
"eval_steps_per_second": 0.47, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"grad_norm": 7.2303266525268555, |
|
"learning_rate": 9.299248120300753e-06, |
|
"loss": 0.4889, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"grad_norm": 5.8191914558410645, |
|
"learning_rate": 9.297744360902256e-06, |
|
"loss": 0.6557, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 7.453029632568359, |
|
"learning_rate": 9.29624060150376e-06, |
|
"loss": 0.5975, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"grad_norm": 9.434555053710938, |
|
"learning_rate": 9.294736842105265e-06, |
|
"loss": 0.4769, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 9.096846580505371, |
|
"learning_rate": 9.293233082706767e-06, |
|
"loss": 0.5521, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"grad_norm": 6.146598815917969, |
|
"learning_rate": 9.291729323308272e-06, |
|
"loss": 0.5191, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"grad_norm": 5.138683319091797, |
|
"learning_rate": 9.290225563909775e-06, |
|
"loss": 0.4883, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 5.840444564819336, |
|
"learning_rate": 9.288721804511279e-06, |
|
"loss": 0.4901, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"grad_norm": 5.589585781097412, |
|
"learning_rate": 9.287218045112782e-06, |
|
"loss": 0.4574, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"grad_norm": 7.447097301483154, |
|
"learning_rate": 9.285714285714288e-06, |
|
"loss": 0.4672, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"grad_norm": 6.8820295333862305, |
|
"learning_rate": 9.28421052631579e-06, |
|
"loss": 0.5641, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"grad_norm": 5.46907901763916, |
|
"learning_rate": 9.282706766917295e-06, |
|
"loss": 0.4948, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 5.4233527183532715, |
|
"learning_rate": 9.281203007518798e-06, |
|
"loss": 0.5507, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 6.316089153289795, |
|
"learning_rate": 9.279699248120302e-06, |
|
"loss": 0.5791, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"grad_norm": 3.7618801593780518, |
|
"learning_rate": 9.278195488721805e-06, |
|
"loss": 0.4846, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 6.426711082458496, |
|
"learning_rate": 9.276691729323309e-06, |
|
"loss": 0.4886, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"grad_norm": 6.98826265335083, |
|
"learning_rate": 9.275187969924812e-06, |
|
"loss": 0.4627, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"grad_norm": 6.147061824798584, |
|
"learning_rate": 9.273684210526317e-06, |
|
"loss": 0.4577, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 7.308942794799805, |
|
"learning_rate": 9.272180451127821e-06, |
|
"loss": 0.4893, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"grad_norm": 8.406046867370605, |
|
"learning_rate": 9.270676691729324e-06, |
|
"loss": 0.4968, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"grad_norm": 4.631737232208252, |
|
"learning_rate": 9.269172932330828e-06, |
|
"loss": 0.4654, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"grad_norm": 3.802255868911743, |
|
"learning_rate": 9.267669172932331e-06, |
|
"loss": 0.5018, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"grad_norm": 7.958065986633301, |
|
"learning_rate": 9.266165413533835e-06, |
|
"loss": 0.5358, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"grad_norm": 4.825588703155518, |
|
"learning_rate": 9.26466165413534e-06, |
|
"loss": 0.5201, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 4.964457035064697, |
|
"learning_rate": 9.263157894736842e-06, |
|
"loss": 0.4819, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"grad_norm": 11.642394065856934, |
|
"learning_rate": 9.261654135338347e-06, |
|
"loss": 0.5989, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"grad_norm": 9.31828498840332, |
|
"learning_rate": 9.26015037593985e-06, |
|
"loss": 0.5455, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 7.8129963874816895, |
|
"learning_rate": 9.258646616541354e-06, |
|
"loss": 0.5542, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"grad_norm": 7.043788909912109, |
|
"learning_rate": 9.257142857142858e-06, |
|
"loss": 0.5153, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 8.311758995056152, |
|
"learning_rate": 9.255639097744363e-06, |
|
"loss": 0.4802, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"grad_norm": 10.970717430114746, |
|
"learning_rate": 9.254135338345865e-06, |
|
"loss": 0.4495, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 5.547107219696045, |
|
"learning_rate": 9.252631578947368e-06, |
|
"loss": 0.4824, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"grad_norm": 5.658668518066406, |
|
"learning_rate": 9.251127819548874e-06, |
|
"loss": 0.5722, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 4.896615982055664, |
|
"learning_rate": 9.249624060150375e-06, |
|
"loss": 0.4936, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 7.777392864227295, |
|
"learning_rate": 9.24812030075188e-06, |
|
"loss": 0.5078, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"grad_norm": 11.22333812713623, |
|
"learning_rate": 9.246616541353384e-06, |
|
"loss": 0.575, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 6.031052589416504, |
|
"learning_rate": 9.245112781954888e-06, |
|
"loss": 0.4742, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"grad_norm": 11.427336692810059, |
|
"learning_rate": 9.243609022556391e-06, |
|
"loss": 0.5519, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 6.76407527923584, |
|
"learning_rate": 9.242105263157896e-06, |
|
"loss": 0.5202, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 7.091256618499756, |
|
"learning_rate": 9.240601503759398e-06, |
|
"loss": 0.5255, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"grad_norm": 6.818326473236084, |
|
"learning_rate": 9.239097744360903e-06, |
|
"loss": 0.442, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 7.494906902313232, |
|
"learning_rate": 9.237593984962407e-06, |
|
"loss": 0.5126, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"grad_norm": 5.982577800750732, |
|
"learning_rate": 9.23609022556391e-06, |
|
"loss": 0.5056, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 4.815781116485596, |
|
"learning_rate": 9.234586466165414e-06, |
|
"loss": 0.4896, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"grad_norm": 10.674721717834473, |
|
"learning_rate": 9.233082706766918e-06, |
|
"loss": 0.5209, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 5.937568187713623, |
|
"learning_rate": 9.231578947368421e-06, |
|
"loss": 0.4772, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"grad_norm": 5.146367073059082, |
|
"learning_rate": 9.230075187969926e-06, |
|
"loss": 0.5032, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"grad_norm": 8.03272819519043, |
|
"learning_rate": 9.22857142857143e-06, |
|
"loss": 0.5072, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"grad_norm": 5.361180782318115, |
|
"learning_rate": 9.227067669172933e-06, |
|
"loss": 0.5057, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"grad_norm": 5.487973213195801, |
|
"learning_rate": 9.225563909774437e-06, |
|
"loss": 0.5253, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 6.845251560211182, |
|
"learning_rate": 9.22406015037594e-06, |
|
"loss": 0.5436, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"grad_norm": 4.931974411010742, |
|
"learning_rate": 9.222556390977444e-06, |
|
"loss": 0.4227, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 7.382147312164307, |
|
"learning_rate": 9.221052631578949e-06, |
|
"loss": 0.5022, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"grad_norm": 8.380685806274414, |
|
"learning_rate": 9.219548872180451e-06, |
|
"loss": 0.5259, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 6.625802993774414, |
|
"learning_rate": 9.218045112781956e-06, |
|
"loss": 0.5213, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 6.7276692390441895, |
|
"learning_rate": 9.21654135338346e-06, |
|
"loss": 0.4831, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 5.930064678192139, |
|
"learning_rate": 9.215037593984963e-06, |
|
"loss": 0.4278, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"grad_norm": 5.958808422088623, |
|
"learning_rate": 9.213533834586467e-06, |
|
"loss": 0.5039, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"grad_norm": 8.592114448547363, |
|
"learning_rate": 9.21203007518797e-06, |
|
"loss": 0.48, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"grad_norm": 7.286666393280029, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 0.5513, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"grad_norm": 3.8542234897613525, |
|
"learning_rate": 9.209022556390979e-06, |
|
"loss": 0.4452, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 8.812358856201172, |
|
"learning_rate": 9.207518796992482e-06, |
|
"loss": 0.4796, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"grad_norm": 7.729457378387451, |
|
"learning_rate": 9.206015037593986e-06, |
|
"loss": 0.5283, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"grad_norm": 5.370766639709473, |
|
"learning_rate": 9.20451127819549e-06, |
|
"loss": 0.557, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"grad_norm": 7.00390625, |
|
"learning_rate": 9.203007518796993e-06, |
|
"loss": 0.5266, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"grad_norm": 5.945902347564697, |
|
"learning_rate": 9.201503759398496e-06, |
|
"loss": 0.505, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 39.0330924987793, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.5673, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9324, |
|
"eval_loss": 0.2575855553150177, |
|
"eval_runtime": 85.1441, |
|
"eval_samples_per_second": 117.448, |
|
"eval_steps_per_second": 0.47, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 7.23183012008667, |
|
"learning_rate": 9.198496240601504e-06, |
|
"loss": 0.4209, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"grad_norm": 5.357458591461182, |
|
"learning_rate": 9.196992481203009e-06, |
|
"loss": 0.4701, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 9.471860885620117, |
|
"learning_rate": 9.195488721804512e-06, |
|
"loss": 0.4924, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 7.7437214851379395, |
|
"learning_rate": 9.193984962406016e-06, |
|
"loss": 0.4997, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 6.697991371154785, |
|
"learning_rate": 9.19248120300752e-06, |
|
"loss": 0.4365, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"grad_norm": 9.78630542755127, |
|
"learning_rate": 9.190977443609025e-06, |
|
"loss": 0.5572, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 6.197582244873047, |
|
"learning_rate": 9.189473684210526e-06, |
|
"loss": 0.4581, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 5.565506458282471, |
|
"learning_rate": 9.187969924812032e-06, |
|
"loss": 0.5219, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 5.3856353759765625, |
|
"learning_rate": 9.186466165413535e-06, |
|
"loss": 0.5744, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"grad_norm": 6.119697093963623, |
|
"learning_rate": 9.184962406015039e-06, |
|
"loss": 0.3964, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 5.344573497772217, |
|
"learning_rate": 9.183458646616542e-06, |
|
"loss": 0.4816, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"grad_norm": 5.624027729034424, |
|
"learning_rate": 9.181954887218046e-06, |
|
"loss": 0.4341, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 4.847854137420654, |
|
"learning_rate": 9.180451127819549e-06, |
|
"loss": 0.5471, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"grad_norm": 6.051102638244629, |
|
"learning_rate": 9.178947368421053e-06, |
|
"loss": 0.5593, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 8.345075607299805, |
|
"learning_rate": 9.177443609022558e-06, |
|
"loss": 0.4132, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"grad_norm": 5.369195938110352, |
|
"learning_rate": 9.17593984962406e-06, |
|
"loss": 0.4844, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 6.0165228843688965, |
|
"learning_rate": 9.174436090225565e-06, |
|
"loss": 0.4712, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"grad_norm": 4.54939079284668, |
|
"learning_rate": 9.172932330827068e-06, |
|
"loss": 0.4956, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 5.021441459655762, |
|
"learning_rate": 9.171428571428572e-06, |
|
"loss": 0.4428, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 7.0452446937561035, |
|
"learning_rate": 9.169924812030075e-06, |
|
"loss": 0.5036, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 8.039512634277344, |
|
"learning_rate": 9.168421052631579e-06, |
|
"loss": 0.478, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 6.608015537261963, |
|
"learning_rate": 9.166917293233083e-06, |
|
"loss": 0.4338, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"grad_norm": 6.659971714019775, |
|
"learning_rate": 9.165413533834588e-06, |
|
"loss": 0.5033, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"grad_norm": 5.663388252258301, |
|
"learning_rate": 9.163909774436091e-06, |
|
"loss": 0.4779, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 5.978389739990234, |
|
"learning_rate": 9.162406015037595e-06, |
|
"loss": 0.4615, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"grad_norm": 5.943080425262451, |
|
"learning_rate": 9.160902255639098e-06, |
|
"loss": 0.4282, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 3.245058536529541, |
|
"learning_rate": 9.159398496240602e-06, |
|
"loss": 0.4423, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"grad_norm": 5.73254919052124, |
|
"learning_rate": 9.157894736842105e-06, |
|
"loss": 0.4777, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 6.487976551055908, |
|
"learning_rate": 9.15639097744361e-06, |
|
"loss": 0.5132, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"grad_norm": 4.1268463134765625, |
|
"learning_rate": 9.154887218045112e-06, |
|
"loss": 0.3942, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"grad_norm": 7.593535900115967, |
|
"learning_rate": 9.153383458646618e-06, |
|
"loss": 0.5109, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 4.127936840057373, |
|
"learning_rate": 9.151879699248121e-06, |
|
"loss": 0.4554, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 12.721508026123047, |
|
"learning_rate": 9.150375939849625e-06, |
|
"loss": 0.5204, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 7.618612289428711, |
|
"learning_rate": 9.148872180451128e-06, |
|
"loss": 0.5095, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 5.089692115783691, |
|
"learning_rate": 9.147368421052633e-06, |
|
"loss": 0.478, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"grad_norm": 6.890159606933594, |
|
"learning_rate": 9.145864661654135e-06, |
|
"loss": 0.4634, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 9.275102615356445, |
|
"learning_rate": 9.14436090225564e-06, |
|
"loss": 0.528, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"grad_norm": 4.839653015136719, |
|
"learning_rate": 9.142857142857144e-06, |
|
"loss": 0.4558, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 7.7605791091918945, |
|
"learning_rate": 9.141353383458647e-06, |
|
"loss": 0.5086, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"grad_norm": 6.753016948699951, |
|
"learning_rate": 9.139849624060151e-06, |
|
"loss": 0.4953, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 10.297369003295898, |
|
"learning_rate": 9.138345864661654e-06, |
|
"loss": 0.454, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"grad_norm": 5.704922676086426, |
|
"learning_rate": 9.136842105263158e-06, |
|
"loss": 0.4504, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 5.4977030754089355, |
|
"learning_rate": 9.135338345864663e-06, |
|
"loss": 0.5203, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"grad_norm": 7.531189918518066, |
|
"learning_rate": 9.133834586466167e-06, |
|
"loss": 0.4534, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 6.660569667816162, |
|
"learning_rate": 9.13233082706767e-06, |
|
"loss": 0.577, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"grad_norm": 6.752721309661865, |
|
"learning_rate": 9.130827067669174e-06, |
|
"loss": 0.4574, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 7.526786804199219, |
|
"learning_rate": 9.129323308270677e-06, |
|
"loss": 0.4511, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"grad_norm": 6.5963239669799805, |
|
"learning_rate": 9.12781954887218e-06, |
|
"loss": 0.4917, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 4.203681468963623, |
|
"learning_rate": 9.126315789473686e-06, |
|
"loss": 0.5218, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"grad_norm": 4.367255210876465, |
|
"learning_rate": 9.124812030075188e-06, |
|
"loss": 0.4162, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 3.8670730590820312, |
|
"learning_rate": 9.123308270676693e-06, |
|
"loss": 0.3913, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"grad_norm": 9.634267807006836, |
|
"learning_rate": 9.121804511278197e-06, |
|
"loss": 0.5062, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 2.509295701980591, |
|
"learning_rate": 9.1203007518797e-06, |
|
"loss": 0.4422, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 7.6244659423828125, |
|
"learning_rate": 9.118796992481204e-06, |
|
"loss": 0.4566, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 6.837118625640869, |
|
"learning_rate": 9.117293233082709e-06, |
|
"loss": 0.5095, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"grad_norm": 4.819979667663574, |
|
"learning_rate": 9.11578947368421e-06, |
|
"loss": 0.4471, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 6.350512504577637, |
|
"learning_rate": 9.114285714285716e-06, |
|
"loss": 0.4751, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"grad_norm": 3.4793074131011963, |
|
"learning_rate": 9.11278195488722e-06, |
|
"loss": 0.4089, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 5.1062774658203125, |
|
"learning_rate": 9.111278195488723e-06, |
|
"loss": 0.5624, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"grad_norm": 8.126543998718262, |
|
"learning_rate": 9.109774436090226e-06, |
|
"loss": 0.5146, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 7.661808967590332, |
|
"learning_rate": 9.10827067669173e-06, |
|
"loss": 0.4602, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 6.820888996124268, |
|
"learning_rate": 9.106766917293233e-06, |
|
"loss": 0.4827, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 24.169485092163086, |
|
"learning_rate": 9.105263157894739e-06, |
|
"loss": 0.5302, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 5.068043231964111, |
|
"learning_rate": 9.103759398496242e-06, |
|
"loss": 0.5453, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 5.819450378417969, |
|
"learning_rate": 9.102255639097744e-06, |
|
"loss": 0.4811, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"grad_norm": 4.129781723022461, |
|
"learning_rate": 9.10075187969925e-06, |
|
"loss": 0.4613, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9311, |
|
"eval_loss": 0.25862327218055725, |
|
"eval_runtime": 84.8621, |
|
"eval_samples_per_second": 117.838, |
|
"eval_steps_per_second": 0.471, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 6.844424247741699, |
|
"learning_rate": 9.099248120300753e-06, |
|
"loss": 0.4154, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"grad_norm": 5.792689323425293, |
|
"learning_rate": 9.097744360902256e-06, |
|
"loss": 0.4345, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 5.300471305847168, |
|
"learning_rate": 9.09624060150376e-06, |
|
"loss": 0.3986, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 11.9564208984375, |
|
"learning_rate": 9.094736842105263e-06, |
|
"loss": 0.4255, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 6.798022270202637, |
|
"learning_rate": 9.093233082706767e-06, |
|
"loss": 0.4296, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"grad_norm": 7.78212308883667, |
|
"learning_rate": 9.091729323308272e-06, |
|
"loss": 0.4373, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 6.719583988189697, |
|
"learning_rate": 9.090225563909776e-06, |
|
"loss": 0.4943, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"grad_norm": 8.298834800720215, |
|
"learning_rate": 9.088721804511279e-06, |
|
"loss": 0.3937, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 4.731727600097656, |
|
"learning_rate": 9.087218045112783e-06, |
|
"loss": 0.4711, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"grad_norm": 6.207810878753662, |
|
"learning_rate": 9.085714285714286e-06, |
|
"loss": 0.4565, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"grad_norm": 6.939966678619385, |
|
"learning_rate": 9.08421052631579e-06, |
|
"loss": 0.5276, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"grad_norm": 9.00831127166748, |
|
"learning_rate": 9.082706766917295e-06, |
|
"loss": 0.5558, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 4.730199813842773, |
|
"learning_rate": 9.081203007518797e-06, |
|
"loss": 0.4826, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 4.198337078094482, |
|
"learning_rate": 9.079699248120302e-06, |
|
"loss": 0.3978, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 5.722704887390137, |
|
"learning_rate": 9.078195488721805e-06, |
|
"loss": 0.4622, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"grad_norm": 8.497228622436523, |
|
"learning_rate": 9.076691729323309e-06, |
|
"loss": 0.4726, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 7.046009063720703, |
|
"learning_rate": 9.075187969924812e-06, |
|
"loss": 0.527, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 7.972896099090576, |
|
"learning_rate": 9.073684210526316e-06, |
|
"loss": 0.3072, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 8.850788116455078, |
|
"learning_rate": 9.07218045112782e-06, |
|
"loss": 0.4753, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"grad_norm": 6.064061641693115, |
|
"learning_rate": 9.070676691729325e-06, |
|
"loss": 0.4009, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 6.12713098526001, |
|
"learning_rate": 9.069172932330828e-06, |
|
"loss": 0.4786, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"grad_norm": 8.643204689025879, |
|
"learning_rate": 9.067669172932332e-06, |
|
"loss": 0.5134, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 7.257277488708496, |
|
"learning_rate": 9.066165413533835e-06, |
|
"loss": 0.4474, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"grad_norm": 7.62333869934082, |
|
"learning_rate": 9.064661654135339e-06, |
|
"loss": 0.4579, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 9.346735954284668, |
|
"learning_rate": 9.063157894736842e-06, |
|
"loss": 0.4714, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"grad_norm": 3.8007750511169434, |
|
"learning_rate": 9.061654135338347e-06, |
|
"loss": 0.4334, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 6.266302108764648, |
|
"learning_rate": 9.06015037593985e-06, |
|
"loss": 0.4704, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 6.959786891937256, |
|
"learning_rate": 9.058646616541355e-06, |
|
"loss": 0.5353, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"grad_norm": 6.572616100311279, |
|
"learning_rate": 9.057142857142858e-06, |
|
"loss": 0.4629, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 5.961916446685791, |
|
"learning_rate": 9.055639097744362e-06, |
|
"loss": 0.5119, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 6.547915935516357, |
|
"learning_rate": 9.054135338345865e-06, |
|
"loss": 0.4576, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"grad_norm": 6.359402179718018, |
|
"learning_rate": 9.05263157894737e-06, |
|
"loss": 0.3692, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 7.048614501953125, |
|
"learning_rate": 9.051127819548872e-06, |
|
"loss": 0.3911, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"grad_norm": 5.198198318481445, |
|
"learning_rate": 9.049624060150377e-06, |
|
"loss": 0.475, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 5.797221660614014, |
|
"learning_rate": 9.04812030075188e-06, |
|
"loss": 0.3771, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"grad_norm": 5.751585483551025, |
|
"learning_rate": 9.046616541353384e-06, |
|
"loss": 0.497, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 9.54306697845459, |
|
"learning_rate": 9.045112781954888e-06, |
|
"loss": 0.3976, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 8.968032836914062, |
|
"learning_rate": 9.043609022556391e-06, |
|
"loss": 0.4252, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 13.218304634094238, |
|
"learning_rate": 9.042105263157895e-06, |
|
"loss": 0.5191, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"grad_norm": 4.405686855316162, |
|
"learning_rate": 9.0406015037594e-06, |
|
"loss": 0.4149, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 4.863158702850342, |
|
"learning_rate": 9.039097744360904e-06, |
|
"loss": 0.4875, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"grad_norm": 6.247385501861572, |
|
"learning_rate": 9.037593984962407e-06, |
|
"loss": 0.4106, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"grad_norm": 6.554888725280762, |
|
"learning_rate": 9.03609022556391e-06, |
|
"loss": 0.4645, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"grad_norm": 7.249465465545654, |
|
"learning_rate": 9.034586466165414e-06, |
|
"loss": 0.4291, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"grad_norm": 6.810882568359375, |
|
"learning_rate": 9.033082706766918e-06, |
|
"loss": 0.4637, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 2.65733003616333, |
|
"learning_rate": 9.031578947368423e-06, |
|
"loss": 0.4585, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 8.63343334197998, |
|
"learning_rate": 9.030075187969925e-06, |
|
"loss": 0.5187, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"grad_norm": 5.558303356170654, |
|
"learning_rate": 9.028571428571428e-06, |
|
"loss": 0.4819, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 8.711833000183105, |
|
"learning_rate": 9.027067669172933e-06, |
|
"loss": 0.4585, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"grad_norm": 7.882017135620117, |
|
"learning_rate": 9.025563909774437e-06, |
|
"loss": 0.5154, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 5.849830150604248, |
|
"learning_rate": 9.02406015037594e-06, |
|
"loss": 0.5093, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"grad_norm": 7.546263217926025, |
|
"learning_rate": 9.022556390977444e-06, |
|
"loss": 0.3969, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 7.114614963531494, |
|
"learning_rate": 9.021052631578948e-06, |
|
"loss": 0.4485, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"grad_norm": 7.6169209480285645, |
|
"learning_rate": 9.019548872180451e-06, |
|
"loss": 0.425, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 5.7843403816223145, |
|
"learning_rate": 9.018045112781956e-06, |
|
"loss": 0.4171, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"grad_norm": 6.0503082275390625, |
|
"learning_rate": 9.016541353383458e-06, |
|
"loss": 0.483, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 7.677584648132324, |
|
"learning_rate": 9.015037593984963e-06, |
|
"loss": 0.4747, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"grad_norm": 5.793139934539795, |
|
"learning_rate": 9.013533834586467e-06, |
|
"loss": 0.3621, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"grad_norm": 6.399969577789307, |
|
"learning_rate": 9.01203007518797e-06, |
|
"loss": 0.4373, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 10.296338081359863, |
|
"learning_rate": 9.010526315789474e-06, |
|
"loss": 0.417, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 6.193917274475098, |
|
"learning_rate": 9.009022556390979e-06, |
|
"loss": 0.4419, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 3.921016216278076, |
|
"learning_rate": 9.007518796992481e-06, |
|
"loss": 0.3981, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"grad_norm": 6.30132532119751, |
|
"learning_rate": 9.006015037593986e-06, |
|
"loss": 0.4699, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"grad_norm": 8.901771545410156, |
|
"learning_rate": 9.00451127819549e-06, |
|
"loss": 0.4308, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 5.031552314758301, |
|
"learning_rate": 9.003007518796993e-06, |
|
"loss": 0.48, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"grad_norm": 5.636510372161865, |
|
"learning_rate": 9.001503759398497e-06, |
|
"loss": 0.403, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 3.5294342041015625, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4179, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.9285, |
|
"eval_loss": 0.2555387318134308, |
|
"eval_runtime": 84.7099, |
|
"eval_samples_per_second": 118.05, |
|
"eval_steps_per_second": 0.472, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"grad_norm": 6.522907257080078, |
|
"learning_rate": 8.998496240601504e-06, |
|
"loss": 0.4525, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"grad_norm": 6.142210006713867, |
|
"learning_rate": 8.996992481203009e-06, |
|
"loss": 0.3998, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"grad_norm": 7.781100749969482, |
|
"learning_rate": 8.995488721804512e-06, |
|
"loss": 0.4122, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"grad_norm": 5.448252201080322, |
|
"learning_rate": 8.993984962406016e-06, |
|
"loss": 0.461, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"grad_norm": 7.063671588897705, |
|
"learning_rate": 8.99248120300752e-06, |
|
"loss": 0.432, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"grad_norm": 6.696626663208008, |
|
"learning_rate": 8.990977443609023e-06, |
|
"loss": 0.4208, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"grad_norm": 6.5666656494140625, |
|
"learning_rate": 8.989473684210527e-06, |
|
"loss": 0.4527, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"grad_norm": 8.801324844360352, |
|
"learning_rate": 8.987969924812032e-06, |
|
"loss": 0.442, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"grad_norm": 6.743152141571045, |
|
"learning_rate": 8.986466165413534e-06, |
|
"loss": 0.4424, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 5.408703327178955, |
|
"learning_rate": 8.984962406015039e-06, |
|
"loss": 0.4548, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"grad_norm": 8.466784477233887, |
|
"learning_rate": 8.983458646616542e-06, |
|
"loss": 0.4355, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"grad_norm": 5.309767723083496, |
|
"learning_rate": 8.981954887218046e-06, |
|
"loss": 0.4023, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"grad_norm": 3.3604421615600586, |
|
"learning_rate": 8.98045112781955e-06, |
|
"loss": 0.4367, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"grad_norm": 6.275347709655762, |
|
"learning_rate": 8.978947368421055e-06, |
|
"loss": 0.4764, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"grad_norm": 6.770579814910889, |
|
"learning_rate": 8.977443609022556e-06, |
|
"loss": 0.4414, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"grad_norm": 8.56733512878418, |
|
"learning_rate": 8.975939849624062e-06, |
|
"loss": 0.4426, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"grad_norm": 6.006712436676025, |
|
"learning_rate": 8.974436090225565e-06, |
|
"loss": 0.3702, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 4.649052143096924, |
|
"learning_rate": 8.972932330827069e-06, |
|
"loss": 0.5371, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"grad_norm": 9.080769538879395, |
|
"learning_rate": 8.971428571428572e-06, |
|
"loss": 0.4866, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"grad_norm": 5.778624534606934, |
|
"learning_rate": 8.969924812030076e-06, |
|
"loss": 0.381, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"grad_norm": 7.814187049865723, |
|
"learning_rate": 8.96842105263158e-06, |
|
"loss": 0.5162, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"grad_norm": 5.049838542938232, |
|
"learning_rate": 8.966917293233084e-06, |
|
"loss": 0.4879, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"grad_norm": 8.096096992492676, |
|
"learning_rate": 8.965413533834588e-06, |
|
"loss": 0.4726, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"grad_norm": 7.028320789337158, |
|
"learning_rate": 8.963909774436091e-06, |
|
"loss": 0.4424, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"grad_norm": 4.826821804046631, |
|
"learning_rate": 8.962406015037595e-06, |
|
"loss": 0.4552, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"grad_norm": 8.392495155334473, |
|
"learning_rate": 8.960902255639098e-06, |
|
"loss": 0.4378, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"grad_norm": 4.868290424346924, |
|
"learning_rate": 8.959398496240602e-06, |
|
"loss": 0.4151, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"grad_norm": 6.117234230041504, |
|
"learning_rate": 8.957894736842107e-06, |
|
"loss": 0.5149, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"grad_norm": 9.33238697052002, |
|
"learning_rate": 8.956390977443609e-06, |
|
"loss": 0.3984, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"grad_norm": 9.559886932373047, |
|
"learning_rate": 8.954887218045113e-06, |
|
"loss": 0.4171, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"grad_norm": 4.344634056091309, |
|
"learning_rate": 8.953383458646618e-06, |
|
"loss": 0.4419, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 5.508487701416016, |
|
"learning_rate": 8.951879699248121e-06, |
|
"loss": 0.4605, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"grad_norm": 5.529686450958252, |
|
"learning_rate": 8.950375939849625e-06, |
|
"loss": 0.4004, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"grad_norm": 5.424170970916748, |
|
"learning_rate": 8.948872180451128e-06, |
|
"loss": 0.4351, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"grad_norm": 6.121506690979004, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 0.4198, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"grad_norm": 4.664872169494629, |
|
"learning_rate": 8.945864661654135e-06, |
|
"loss": 0.3917, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"grad_norm": 5.378602027893066, |
|
"learning_rate": 8.94436090225564e-06, |
|
"loss": 0.47, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"grad_norm": 8.281057357788086, |
|
"learning_rate": 8.942857142857142e-06, |
|
"loss": 0.3779, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"grad_norm": 5.378328800201416, |
|
"learning_rate": 8.941353383458648e-06, |
|
"loss": 0.4878, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"grad_norm": 4.809008598327637, |
|
"learning_rate": 8.939849624060151e-06, |
|
"loss": 0.4409, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"grad_norm": 6.703794002532959, |
|
"learning_rate": 8.938345864661655e-06, |
|
"loss": 0.45, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"grad_norm": 10.097111701965332, |
|
"learning_rate": 8.936842105263158e-06, |
|
"loss": 0.4442, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"grad_norm": 5.404522895812988, |
|
"learning_rate": 8.935338345864662e-06, |
|
"loss": 0.4289, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"grad_norm": 2.983161449432373, |
|
"learning_rate": 8.933834586466165e-06, |
|
"loss": 0.4072, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"grad_norm": 6.501340389251709, |
|
"learning_rate": 8.93233082706767e-06, |
|
"loss": 0.4344, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"grad_norm": 7.439212322235107, |
|
"learning_rate": 8.930827067669174e-06, |
|
"loss": 0.4208, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"grad_norm": 6.9180192947387695, |
|
"learning_rate": 8.929323308270677e-06, |
|
"loss": 0.4901, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"grad_norm": 4.9598212242126465, |
|
"learning_rate": 8.927819548872181e-06, |
|
"loss": 0.4551, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"grad_norm": 7.020519256591797, |
|
"learning_rate": 8.926315789473685e-06, |
|
"loss": 0.4469, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"grad_norm": 6.747496604919434, |
|
"learning_rate": 8.924812030075188e-06, |
|
"loss": 0.3626, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"grad_norm": 2.869495153427124, |
|
"learning_rate": 8.923308270676693e-06, |
|
"loss": 0.3794, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 7.156761169433594, |
|
"learning_rate": 8.921804511278195e-06, |
|
"loss": 0.4909, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"grad_norm": 9.461006164550781, |
|
"learning_rate": 8.9203007518797e-06, |
|
"loss": 0.4487, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"grad_norm": 5.75421142578125, |
|
"learning_rate": 8.918796992481204e-06, |
|
"loss": 0.4953, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"grad_norm": 4.186371326446533, |
|
"learning_rate": 8.917293233082707e-06, |
|
"loss": 0.3788, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"grad_norm": 6.402685165405273, |
|
"learning_rate": 8.915789473684211e-06, |
|
"loss": 0.5009, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"grad_norm": 10.709757804870605, |
|
"learning_rate": 8.914285714285716e-06, |
|
"loss": 0.5308, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"grad_norm": 8.926152229309082, |
|
"learning_rate": 8.912781954887218e-06, |
|
"loss": 0.4461, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"grad_norm": 6.41901969909668, |
|
"learning_rate": 8.911278195488723e-06, |
|
"loss": 0.4188, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 4.931794166564941, |
|
"learning_rate": 8.909774436090227e-06, |
|
"loss": 0.4089, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"grad_norm": 7.75593376159668, |
|
"learning_rate": 8.90827067669173e-06, |
|
"loss": 0.5261, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"grad_norm": 9.013036727905273, |
|
"learning_rate": 8.906766917293234e-06, |
|
"loss": 0.3274, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"grad_norm": 6.653579235076904, |
|
"learning_rate": 8.905263157894737e-06, |
|
"loss": 0.4646, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"grad_norm": 5.304203987121582, |
|
"learning_rate": 8.90375939849624e-06, |
|
"loss": 0.4399, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"grad_norm": 40.1646842956543, |
|
"learning_rate": 8.902255639097746e-06, |
|
"loss": 0.3007, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"grad_norm": 5.538785934448242, |
|
"learning_rate": 8.90075187969925e-06, |
|
"loss": 0.4438, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.9316, |
|
"eval_loss": 0.25541195273399353, |
|
"eval_runtime": 84.8107, |
|
"eval_samples_per_second": 117.91, |
|
"eval_steps_per_second": 0.472, |
|
"step": 7315 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"grad_norm": 8.498943328857422, |
|
"learning_rate": 8.899248120300753e-06, |
|
"loss": 0.4009, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"grad_norm": 6.2147040367126465, |
|
"learning_rate": 8.897744360902256e-06, |
|
"loss": 0.4283, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"grad_norm": 5.028774261474609, |
|
"learning_rate": 8.89624060150376e-06, |
|
"loss": 0.4145, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"grad_norm": 7.040588855743408, |
|
"learning_rate": 8.894736842105264e-06, |
|
"loss": 0.3753, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"grad_norm": 4.658559322357178, |
|
"learning_rate": 8.893233082706769e-06, |
|
"loss": 0.4139, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"grad_norm": 7.867548942565918, |
|
"learning_rate": 8.89172932330827e-06, |
|
"loss": 0.4854, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"grad_norm": 8.354945182800293, |
|
"learning_rate": 8.890225563909776e-06, |
|
"loss": 0.4186, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 6.198273658752441, |
|
"learning_rate": 8.88872180451128e-06, |
|
"loss": 0.4486, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"grad_norm": 7.401607990264893, |
|
"learning_rate": 8.887218045112783e-06, |
|
"loss": 0.4015, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"grad_norm": 5.412950038909912, |
|
"learning_rate": 8.885714285714286e-06, |
|
"loss": 0.3654, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"grad_norm": 3.7357654571533203, |
|
"learning_rate": 8.884210526315792e-06, |
|
"loss": 0.4165, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"grad_norm": 7.468185901641846, |
|
"learning_rate": 8.882706766917293e-06, |
|
"loss": 0.3241, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"grad_norm": 5.967494487762451, |
|
"learning_rate": 8.881203007518799e-06, |
|
"loss": 0.4364, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"grad_norm": 8.94781494140625, |
|
"learning_rate": 8.879699248120302e-06, |
|
"loss": 0.4927, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"grad_norm": 7.6195969581604, |
|
"learning_rate": 8.878195488721804e-06, |
|
"loss": 0.3722, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"grad_norm": 9.522473335266113, |
|
"learning_rate": 8.876691729323309e-06, |
|
"loss": 0.3974, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"grad_norm": 9.590860366821289, |
|
"learning_rate": 8.875187969924813e-06, |
|
"loss": 0.3842, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"grad_norm": 6.479350566864014, |
|
"learning_rate": 8.873684210526316e-06, |
|
"loss": 0.4135, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"grad_norm": 8.100231170654297, |
|
"learning_rate": 8.87218045112782e-06, |
|
"loss": 0.4262, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"grad_norm": 9.401702880859375, |
|
"learning_rate": 8.870676691729325e-06, |
|
"loss": 0.3899, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"grad_norm": 7.8885626792907715, |
|
"learning_rate": 8.869172932330827e-06, |
|
"loss": 0.4738, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 7.2377753257751465, |
|
"learning_rate": 8.867669172932332e-06, |
|
"loss": 0.3686, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"grad_norm": 5.0235209465026855, |
|
"learning_rate": 8.866165413533835e-06, |
|
"loss": 0.3939, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"grad_norm": 6.832250595092773, |
|
"learning_rate": 8.864661654135339e-06, |
|
"loss": 0.4485, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"grad_norm": 8.186062812805176, |
|
"learning_rate": 8.863157894736842e-06, |
|
"loss": 0.4242, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"grad_norm": 5.467780113220215, |
|
"learning_rate": 8.861654135338346e-06, |
|
"loss": 0.4599, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"grad_norm": 6.155720233917236, |
|
"learning_rate": 8.86015037593985e-06, |
|
"loss": 0.5285, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 6.44677734375, |
|
"learning_rate": 8.858646616541355e-06, |
|
"loss": 0.4931, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"grad_norm": 15.308818817138672, |
|
"learning_rate": 8.857142857142858e-06, |
|
"loss": 0.3899, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"grad_norm": 6.691050052642822, |
|
"learning_rate": 8.855639097744362e-06, |
|
"loss": 0.4313, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"grad_norm": 5.215397357940674, |
|
"learning_rate": 8.854135338345865e-06, |
|
"loss": 0.3528, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"grad_norm": 7.355811595916748, |
|
"learning_rate": 8.852631578947369e-06, |
|
"loss": 0.4402, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"grad_norm": 4.864825248718262, |
|
"learning_rate": 8.851127819548872e-06, |
|
"loss": 0.3485, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"grad_norm": 7.4907755851745605, |
|
"learning_rate": 8.849624060150378e-06, |
|
"loss": 0.4522, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"grad_norm": 6.480433464050293, |
|
"learning_rate": 8.84812030075188e-06, |
|
"loss": 0.4655, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"grad_norm": 5.072092056274414, |
|
"learning_rate": 8.846616541353385e-06, |
|
"loss": 0.3735, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"grad_norm": 10.207109451293945, |
|
"learning_rate": 8.845112781954888e-06, |
|
"loss": 0.3884, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"grad_norm": 5.795559883117676, |
|
"learning_rate": 8.843609022556392e-06, |
|
"loss": 0.4115, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"grad_norm": 7.781355381011963, |
|
"learning_rate": 8.842105263157895e-06, |
|
"loss": 0.4617, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"grad_norm": 6.770030975341797, |
|
"learning_rate": 8.8406015037594e-06, |
|
"loss": 0.4218, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"grad_norm": 7.052707672119141, |
|
"learning_rate": 8.839097744360902e-06, |
|
"loss": 0.3897, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 8.882899284362793, |
|
"learning_rate": 8.837593984962407e-06, |
|
"loss": 0.4546, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"grad_norm": 7.858944892883301, |
|
"learning_rate": 8.836090225563911e-06, |
|
"loss": 0.4287, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"grad_norm": 6.74614953994751, |
|
"learning_rate": 8.834586466165414e-06, |
|
"loss": 0.3326, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"grad_norm": 8.970141410827637, |
|
"learning_rate": 8.833082706766918e-06, |
|
"loss": 0.4863, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"grad_norm": 6.568352699279785, |
|
"learning_rate": 8.831578947368421e-06, |
|
"loss": 0.4248, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"grad_norm": 6.05830717086792, |
|
"learning_rate": 8.830075187969925e-06, |
|
"loss": 0.4829, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"grad_norm": 7.666469097137451, |
|
"learning_rate": 8.82857142857143e-06, |
|
"loss": 0.5319, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"grad_norm": 5.955508708953857, |
|
"learning_rate": 8.827067669172934e-06, |
|
"loss": 0.4309, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 6.9883270263671875, |
|
"learning_rate": 8.825563909774437e-06, |
|
"loss": 0.4157, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"grad_norm": 6.703571319580078, |
|
"learning_rate": 8.82406015037594e-06, |
|
"loss": 0.3219, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"grad_norm": 7.131542682647705, |
|
"learning_rate": 8.822556390977444e-06, |
|
"loss": 0.4492, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"grad_norm": 5.014946460723877, |
|
"learning_rate": 8.821052631578948e-06, |
|
"loss": 0.4189, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"grad_norm": 4.254874229431152, |
|
"learning_rate": 8.819548872180453e-06, |
|
"loss": 0.484, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"grad_norm": 4.319407939910889, |
|
"learning_rate": 8.818045112781955e-06, |
|
"loss": 0.3861, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"grad_norm": 7.9686408042907715, |
|
"learning_rate": 8.81654135338346e-06, |
|
"loss": 0.4264, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"grad_norm": 5.5855326652526855, |
|
"learning_rate": 8.815037593984964e-06, |
|
"loss": 0.4532, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"grad_norm": 6.914451599121094, |
|
"learning_rate": 8.813533834586467e-06, |
|
"loss": 0.4355, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"grad_norm": 7.542539596557617, |
|
"learning_rate": 8.81203007518797e-06, |
|
"loss": 0.3671, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"grad_norm": 7.947263717651367, |
|
"learning_rate": 8.810526315789474e-06, |
|
"loss": 0.373, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"grad_norm": 7.884321689605713, |
|
"learning_rate": 8.809022556390978e-06, |
|
"loss": 0.4827, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"grad_norm": 5.361155986785889, |
|
"learning_rate": 8.807518796992483e-06, |
|
"loss": 0.4485, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"grad_norm": 7.507490158081055, |
|
"learning_rate": 8.806015037593986e-06, |
|
"loss": 0.4446, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"grad_norm": 7.053649425506592, |
|
"learning_rate": 8.804511278195488e-06, |
|
"loss": 0.4112, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"grad_norm": 8.394134521484375, |
|
"learning_rate": 8.803007518796993e-06, |
|
"loss": 0.4221, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"grad_norm": 9.852388381958008, |
|
"learning_rate": 8.801503759398497e-06, |
|
"loss": 0.4178, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 17.5406551361084, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.4869, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.9298, |
|
"eval_loss": 0.2563527822494507, |
|
"eval_runtime": 84.9156, |
|
"eval_samples_per_second": 117.764, |
|
"eval_steps_per_second": 0.471, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"grad_norm": 4.651547908782959, |
|
"learning_rate": 8.798496240601504e-06, |
|
"loss": 0.4307, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"grad_norm": 5.47507905960083, |
|
"learning_rate": 8.796992481203007e-06, |
|
"loss": 0.3957, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"grad_norm": 7.309173583984375, |
|
"learning_rate": 8.795488721804511e-06, |
|
"loss": 0.3848, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"grad_norm": 5.4073591232299805, |
|
"learning_rate": 8.793984962406016e-06, |
|
"loss": 0.347, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"grad_norm": 9.495542526245117, |
|
"learning_rate": 8.79248120300752e-06, |
|
"loss": 0.3963, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"grad_norm": 7.175304412841797, |
|
"learning_rate": 8.790977443609023e-06, |
|
"loss": 0.4028, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"grad_norm": 5.7672624588012695, |
|
"learning_rate": 8.789473684210527e-06, |
|
"loss": 0.4336, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"grad_norm": 5.373271942138672, |
|
"learning_rate": 8.78796992481203e-06, |
|
"loss": 0.4214, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"grad_norm": 7.81503963470459, |
|
"learning_rate": 8.786466165413534e-06, |
|
"loss": 0.3362, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"grad_norm": 5.352240085601807, |
|
"learning_rate": 8.784962406015039e-06, |
|
"loss": 0.438, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"grad_norm": 4.825592994689941, |
|
"learning_rate": 8.783458646616541e-06, |
|
"loss": 0.3996, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"grad_norm": 4.875209808349609, |
|
"learning_rate": 8.781954887218046e-06, |
|
"loss": 0.4056, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"grad_norm": 6.405061721801758, |
|
"learning_rate": 8.78045112781955e-06, |
|
"loss": 0.404, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"grad_norm": 5.762337684631348, |
|
"learning_rate": 8.778947368421053e-06, |
|
"loss": 0.3609, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"grad_norm": 8.700191497802734, |
|
"learning_rate": 8.777443609022557e-06, |
|
"loss": 0.4316, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"grad_norm": 5.509273052215576, |
|
"learning_rate": 8.775939849624062e-06, |
|
"loss": 0.3814, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"grad_norm": 6.949098587036133, |
|
"learning_rate": 8.774436090225564e-06, |
|
"loss": 0.3588, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"grad_norm": 5.564908981323242, |
|
"learning_rate": 8.772932330827069e-06, |
|
"loss": 0.353, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"grad_norm": 6.935297012329102, |
|
"learning_rate": 8.771428571428572e-06, |
|
"loss": 0.4212, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"grad_norm": 4.811358451843262, |
|
"learning_rate": 8.769924812030076e-06, |
|
"loss": 0.3865, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"grad_norm": 6.4804368019104, |
|
"learning_rate": 8.76842105263158e-06, |
|
"loss": 0.3656, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"grad_norm": 3.9236013889312744, |
|
"learning_rate": 8.766917293233083e-06, |
|
"loss": 0.4885, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"grad_norm": 7.50891637802124, |
|
"learning_rate": 8.765413533834586e-06, |
|
"loss": 0.3962, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"grad_norm": 4.313982963562012, |
|
"learning_rate": 8.763909774436092e-06, |
|
"loss": 0.4023, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"grad_norm": 4.385167121887207, |
|
"learning_rate": 8.762406015037595e-06, |
|
"loss": 0.4841, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"grad_norm": 5.977277755737305, |
|
"learning_rate": 8.760902255639099e-06, |
|
"loss": 0.399, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"grad_norm": 8.858118057250977, |
|
"learning_rate": 8.759398496240602e-06, |
|
"loss": 0.451, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"grad_norm": 6.294662952423096, |
|
"learning_rate": 8.757894736842106e-06, |
|
"loss": 0.416, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"grad_norm": 4.536668300628662, |
|
"learning_rate": 8.75639097744361e-06, |
|
"loss": 0.407, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"grad_norm": 5.644812107086182, |
|
"learning_rate": 8.754887218045114e-06, |
|
"loss": 0.3685, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"grad_norm": 5.488842010498047, |
|
"learning_rate": 8.753383458646616e-06, |
|
"loss": 0.4136, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"grad_norm": 4.548142910003662, |
|
"learning_rate": 8.751879699248122e-06, |
|
"loss": 0.4502, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 3.41457200050354, |
|
"learning_rate": 8.750375939849625e-06, |
|
"loss": 0.3598, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"grad_norm": 6.259812831878662, |
|
"learning_rate": 8.748872180451129e-06, |
|
"loss": 0.3843, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"grad_norm": 5.301551342010498, |
|
"learning_rate": 8.747368421052632e-06, |
|
"loss": 0.4038, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"grad_norm": 14.684255599975586, |
|
"learning_rate": 8.745864661654137e-06, |
|
"loss": 0.4115, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"grad_norm": 6.711531162261963, |
|
"learning_rate": 8.744360902255639e-06, |
|
"loss": 0.4112, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"grad_norm": 8.990388870239258, |
|
"learning_rate": 8.742857142857144e-06, |
|
"loss": 0.3547, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"grad_norm": 4.513948440551758, |
|
"learning_rate": 8.741353383458648e-06, |
|
"loss": 0.3776, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"grad_norm": 6.088433742523193, |
|
"learning_rate": 8.739849624060151e-06, |
|
"loss": 0.4116, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"grad_norm": 7.882970809936523, |
|
"learning_rate": 8.738345864661655e-06, |
|
"loss": 0.3712, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"grad_norm": 6.829627990722656, |
|
"learning_rate": 8.736842105263158e-06, |
|
"loss": 0.3842, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"grad_norm": 6.185722351074219, |
|
"learning_rate": 8.735338345864662e-06, |
|
"loss": 0.424, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"grad_norm": 4.945958137512207, |
|
"learning_rate": 8.733834586466167e-06, |
|
"loss": 0.377, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"grad_norm": 6.356648921966553, |
|
"learning_rate": 8.73233082706767e-06, |
|
"loss": 0.4256, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"grad_norm": 6.276622295379639, |
|
"learning_rate": 8.730827067669172e-06, |
|
"loss": 0.3733, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"grad_norm": 7.50572395324707, |
|
"learning_rate": 8.729323308270678e-06, |
|
"loss": 0.4407, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"grad_norm": 7.089003086090088, |
|
"learning_rate": 8.727819548872181e-06, |
|
"loss": 0.3948, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"grad_norm": 6.90725564956665, |
|
"learning_rate": 8.726315789473685e-06, |
|
"loss": 0.4511, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"grad_norm": 4.369374752044678, |
|
"learning_rate": 8.724812030075188e-06, |
|
"loss": 0.3559, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"grad_norm": 2.895493507385254, |
|
"learning_rate": 8.723308270676692e-06, |
|
"loss": 0.349, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"grad_norm": 8.638984680175781, |
|
"learning_rate": 8.721804511278195e-06, |
|
"loss": 0.3406, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"grad_norm": 7.664207458496094, |
|
"learning_rate": 8.7203007518797e-06, |
|
"loss": 0.3619, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"grad_norm": 4.544347286224365, |
|
"learning_rate": 8.718796992481204e-06, |
|
"loss": 0.3109, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"grad_norm": 6.640614032745361, |
|
"learning_rate": 8.717293233082708e-06, |
|
"loss": 0.4116, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"grad_norm": 7.840051174163818, |
|
"learning_rate": 8.715789473684211e-06, |
|
"loss": 0.4027, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"grad_norm": 10.355204582214355, |
|
"learning_rate": 8.714285714285715e-06, |
|
"loss": 0.4013, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"grad_norm": 7.472030162811279, |
|
"learning_rate": 8.712781954887218e-06, |
|
"loss": 0.4119, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"grad_norm": 7.9360246658325195, |
|
"learning_rate": 8.711278195488723e-06, |
|
"loss": 0.3472, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"grad_norm": 5.889431953430176, |
|
"learning_rate": 8.709774436090225e-06, |
|
"loss": 0.4009, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"grad_norm": 5.548401355743408, |
|
"learning_rate": 8.70827067669173e-06, |
|
"loss": 0.4261, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"grad_norm": 5.590747833251953, |
|
"learning_rate": 8.706766917293234e-06, |
|
"loss": 0.4437, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"grad_norm": 6.401696681976318, |
|
"learning_rate": 8.705263157894737e-06, |
|
"loss": 0.3746, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"grad_norm": 9.315383911132812, |
|
"learning_rate": 8.703759398496241e-06, |
|
"loss": 0.3803, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"grad_norm": 3.9589388370513916, |
|
"learning_rate": 8.702255639097746e-06, |
|
"loss": 0.4448, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"grad_norm": 4.445014953613281, |
|
"learning_rate": 8.700751879699248e-06, |
|
"loss": 0.4289, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.9288, |
|
"eval_loss": 0.2712935507297516, |
|
"eval_runtime": 84.8607, |
|
"eval_samples_per_second": 117.84, |
|
"eval_steps_per_second": 0.471, |
|
"step": 8645 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"grad_norm": 5.444362163543701, |
|
"learning_rate": 8.699248120300753e-06, |
|
"loss": 0.3994, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"grad_norm": 5.8952178955078125, |
|
"learning_rate": 8.697744360902257e-06, |
|
"loss": 0.3819, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"grad_norm": 5.363025188446045, |
|
"learning_rate": 8.69624060150376e-06, |
|
"loss": 0.4251, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"grad_norm": 6.1266961097717285, |
|
"learning_rate": 8.694736842105264e-06, |
|
"loss": 0.4236, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"grad_norm": 6.096094131469727, |
|
"learning_rate": 8.693233082706767e-06, |
|
"loss": 0.4411, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"grad_norm": 6.0483293533325195, |
|
"learning_rate": 8.69172932330827e-06, |
|
"loss": 0.3538, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"grad_norm": 8.619955062866211, |
|
"learning_rate": 8.690225563909776e-06, |
|
"loss": 0.4698, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"grad_norm": 5.028072834014893, |
|
"learning_rate": 8.68872180451128e-06, |
|
"loss": 0.3883, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"grad_norm": 7.43666934967041, |
|
"learning_rate": 8.687218045112783e-06, |
|
"loss": 0.3552, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"grad_norm": 9.520151138305664, |
|
"learning_rate": 8.685714285714287e-06, |
|
"loss": 0.4079, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"grad_norm": 7.852067947387695, |
|
"learning_rate": 8.68421052631579e-06, |
|
"loss": 0.3607, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"grad_norm": 5.92877721786499, |
|
"learning_rate": 8.682706766917294e-06, |
|
"loss": 0.3739, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"grad_norm": 4.25166130065918, |
|
"learning_rate": 8.681203007518799e-06, |
|
"loss": 0.4621, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"grad_norm": 7.073912143707275, |
|
"learning_rate": 8.6796992481203e-06, |
|
"loss": 0.4465, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"grad_norm": 7.39524507522583, |
|
"learning_rate": 8.678195488721806e-06, |
|
"loss": 0.4303, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"grad_norm": 6.938388824462891, |
|
"learning_rate": 8.67669172932331e-06, |
|
"loss": 0.3535, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"grad_norm": 5.0067524909973145, |
|
"learning_rate": 8.675187969924813e-06, |
|
"loss": 0.4399, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"grad_norm": 6.340808391571045, |
|
"learning_rate": 8.673684210526316e-06, |
|
"loss": 0.4199, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"grad_norm": 4.246801853179932, |
|
"learning_rate": 8.67218045112782e-06, |
|
"loss": 0.4039, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"grad_norm": 4.85552453994751, |
|
"learning_rate": 8.670676691729323e-06, |
|
"loss": 0.3753, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"grad_norm": 6.020550727844238, |
|
"learning_rate": 8.669172932330829e-06, |
|
"loss": 0.4039, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"grad_norm": 3.4875411987304688, |
|
"learning_rate": 8.667669172932332e-06, |
|
"loss": 0.3829, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"grad_norm": 6.239095211029053, |
|
"learning_rate": 8.666165413533836e-06, |
|
"loss": 0.3511, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"grad_norm": 4.244966983795166, |
|
"learning_rate": 8.66466165413534e-06, |
|
"loss": 0.4268, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"grad_norm": 12.684317588806152, |
|
"learning_rate": 8.663157894736843e-06, |
|
"loss": 0.3471, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"grad_norm": 8.664961814880371, |
|
"learning_rate": 8.661654135338346e-06, |
|
"loss": 0.3822, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"grad_norm": 5.7766804695129395, |
|
"learning_rate": 8.660150375939851e-06, |
|
"loss": 0.4199, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"grad_norm": 4.019351959228516, |
|
"learning_rate": 8.658646616541353e-06, |
|
"loss": 0.4137, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"grad_norm": 6.156152248382568, |
|
"learning_rate": 8.657142857142858e-06, |
|
"loss": 0.4606, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"grad_norm": 5.74890661239624, |
|
"learning_rate": 8.655639097744362e-06, |
|
"loss": 0.3221, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"grad_norm": 6.321985721588135, |
|
"learning_rate": 8.654135338345866e-06, |
|
"loss": 0.415, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"grad_norm": 8.88508129119873, |
|
"learning_rate": 8.652631578947369e-06, |
|
"loss": 0.3714, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"grad_norm": 6.999327659606934, |
|
"learning_rate": 8.651127819548873e-06, |
|
"loss": 0.3576, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"grad_norm": 7.313613414764404, |
|
"learning_rate": 8.649624060150376e-06, |
|
"loss": 0.4096, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"grad_norm": 6.363276958465576, |
|
"learning_rate": 8.64812030075188e-06, |
|
"loss": 0.3356, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"grad_norm": 7.81085729598999, |
|
"learning_rate": 8.646616541353385e-06, |
|
"loss": 0.4216, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"grad_norm": 8.093158721923828, |
|
"learning_rate": 8.645112781954887e-06, |
|
"loss": 0.5105, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"grad_norm": 3.801630735397339, |
|
"learning_rate": 8.643609022556392e-06, |
|
"loss": 0.439, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"grad_norm": 5.564939975738525, |
|
"learning_rate": 8.642105263157895e-06, |
|
"loss": 0.3854, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"grad_norm": 9.847439765930176, |
|
"learning_rate": 8.640601503759399e-06, |
|
"loss": 0.4034, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"grad_norm": 9.21834659576416, |
|
"learning_rate": 8.639097744360902e-06, |
|
"loss": 0.4448, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"grad_norm": 4.98524808883667, |
|
"learning_rate": 8.637593984962408e-06, |
|
"loss": 0.3646, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"grad_norm": 6.707414150238037, |
|
"learning_rate": 8.63609022556391e-06, |
|
"loss": 0.3618, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"grad_norm": 5.5840840339660645, |
|
"learning_rate": 8.634586466165415e-06, |
|
"loss": 0.3628, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"grad_norm": 4.939608097076416, |
|
"learning_rate": 8.633082706766918e-06, |
|
"loss": 0.3785, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"grad_norm": 7.449197769165039, |
|
"learning_rate": 8.631578947368422e-06, |
|
"loss": 0.4354, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"grad_norm": 9.470358848571777, |
|
"learning_rate": 8.630075187969925e-06, |
|
"loss": 0.4075, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"grad_norm": 7.6183085441589355, |
|
"learning_rate": 8.628571428571429e-06, |
|
"loss": 0.4067, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"grad_norm": 3.0916943550109863, |
|
"learning_rate": 8.627067669172932e-06, |
|
"loss": 0.365, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"grad_norm": 4.251070499420166, |
|
"learning_rate": 8.625563909774437e-06, |
|
"loss": 0.4255, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"grad_norm": 6.8059282302856445, |
|
"learning_rate": 8.624060150375941e-06, |
|
"loss": 0.32, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"grad_norm": 7.302189826965332, |
|
"learning_rate": 8.622556390977444e-06, |
|
"loss": 0.4131, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"grad_norm": 6.402463436126709, |
|
"learning_rate": 8.621052631578948e-06, |
|
"loss": 0.3848, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"grad_norm": 4.343325138092041, |
|
"learning_rate": 8.619548872180452e-06, |
|
"loss": 0.4014, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"grad_norm": 9.013459205627441, |
|
"learning_rate": 8.618045112781955e-06, |
|
"loss": 0.3739, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"grad_norm": 7.037381172180176, |
|
"learning_rate": 8.61654135338346e-06, |
|
"loss": 0.4189, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"grad_norm": 4.7024760246276855, |
|
"learning_rate": 8.615037593984962e-06, |
|
"loss": 0.38, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"grad_norm": 4.808414936065674, |
|
"learning_rate": 8.613533834586467e-06, |
|
"loss": 0.4414, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"grad_norm": 8.237750053405762, |
|
"learning_rate": 8.61203007518797e-06, |
|
"loss": 0.4215, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"grad_norm": 7.862570285797119, |
|
"learning_rate": 8.610526315789474e-06, |
|
"loss": 0.4727, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"grad_norm": 7.045783519744873, |
|
"learning_rate": 8.609022556390978e-06, |
|
"loss": 0.4109, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"grad_norm": 5.3544135093688965, |
|
"learning_rate": 8.607518796992483e-06, |
|
"loss": 0.3824, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"grad_norm": 14.21022891998291, |
|
"learning_rate": 8.606015037593985e-06, |
|
"loss": 0.3659, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"grad_norm": 7.408153533935547, |
|
"learning_rate": 8.60451127819549e-06, |
|
"loss": 0.3508, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"grad_norm": 3.206442356109619, |
|
"learning_rate": 8.603007518796994e-06, |
|
"loss": 0.4451, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"grad_norm": 4.974185466766357, |
|
"learning_rate": 8.601503759398497e-06, |
|
"loss": 0.3888, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 15.39065170288086, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.4003, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.932, |
|
"eval_loss": 0.2616865336894989, |
|
"eval_runtime": 84.8808, |
|
"eval_samples_per_second": 117.812, |
|
"eval_steps_per_second": 0.471, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"grad_norm": 5.1268534660339355, |
|
"learning_rate": 8.598496240601504e-06, |
|
"loss": 0.4195, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"grad_norm": 6.874084949493408, |
|
"learning_rate": 8.596992481203008e-06, |
|
"loss": 0.3901, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"grad_norm": 11.405204772949219, |
|
"learning_rate": 8.595488721804513e-06, |
|
"loss": 0.3234, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"grad_norm": 4.844882965087891, |
|
"learning_rate": 8.593984962406016e-06, |
|
"loss": 0.3437, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"grad_norm": 7.187948226928711, |
|
"learning_rate": 8.59248120300752e-06, |
|
"loss": 0.3895, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"grad_norm": 3.7594106197357178, |
|
"learning_rate": 8.590977443609023e-06, |
|
"loss": 0.3329, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"grad_norm": 4.245199203491211, |
|
"learning_rate": 8.589473684210527e-06, |
|
"loss": 0.3644, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"grad_norm": 6.302145004272461, |
|
"learning_rate": 8.58796992481203e-06, |
|
"loss": 0.4615, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"grad_norm": 6.26497220993042, |
|
"learning_rate": 8.586466165413536e-06, |
|
"loss": 0.3983, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"grad_norm": 7.315799236297607, |
|
"learning_rate": 8.584962406015038e-06, |
|
"loss": 0.3474, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"grad_norm": 8.584407806396484, |
|
"learning_rate": 8.583458646616543e-06, |
|
"loss": 0.3858, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"grad_norm": 6.192986488342285, |
|
"learning_rate": 8.581954887218046e-06, |
|
"loss": 0.4653, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"grad_norm": 6.261072635650635, |
|
"learning_rate": 8.58045112781955e-06, |
|
"loss": 0.3686, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"grad_norm": 6.7162017822265625, |
|
"learning_rate": 8.578947368421053e-06, |
|
"loss": 0.4143, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"grad_norm": 5.550053119659424, |
|
"learning_rate": 8.577443609022557e-06, |
|
"loss": 0.4262, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"grad_norm": 6.601341247558594, |
|
"learning_rate": 8.57593984962406e-06, |
|
"loss": 0.3615, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"grad_norm": 6.859097957611084, |
|
"learning_rate": 8.574436090225564e-06, |
|
"loss": 0.3584, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"grad_norm": 3.824615478515625, |
|
"learning_rate": 8.572932330827069e-06, |
|
"loss": 0.3941, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"grad_norm": 6.923836708068848, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.4016, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"grad_norm": 6.395806789398193, |
|
"learning_rate": 8.569924812030076e-06, |
|
"loss": 0.368, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"grad_norm": 6.522418022155762, |
|
"learning_rate": 8.56842105263158e-06, |
|
"loss": 0.377, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"grad_norm": 7.502889633178711, |
|
"learning_rate": 8.566917293233083e-06, |
|
"loss": 0.4179, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"grad_norm": 6.025669574737549, |
|
"learning_rate": 8.565413533834587e-06, |
|
"loss": 0.3395, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"grad_norm": 7.751435279846191, |
|
"learning_rate": 8.563909774436092e-06, |
|
"loss": 0.3531, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"grad_norm": 6.964672088623047, |
|
"learning_rate": 8.562406015037594e-06, |
|
"loss": 0.3905, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"grad_norm": 2.502666473388672, |
|
"learning_rate": 8.560902255639099e-06, |
|
"loss": 0.3172, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"grad_norm": 7.133659839630127, |
|
"learning_rate": 8.559398496240602e-06, |
|
"loss": 0.442, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"grad_norm": 4.32753324508667, |
|
"learning_rate": 8.557894736842106e-06, |
|
"loss": 0.3458, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"grad_norm": 6.268803119659424, |
|
"learning_rate": 8.55639097744361e-06, |
|
"loss": 0.3431, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"grad_norm": 7.018800735473633, |
|
"learning_rate": 8.554887218045113e-06, |
|
"loss": 0.4369, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"grad_norm": 7.435917377471924, |
|
"learning_rate": 8.553383458646617e-06, |
|
"loss": 0.3861, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"grad_norm": 7.8388752937316895, |
|
"learning_rate": 8.551879699248122e-06, |
|
"loss": 0.4239, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"grad_norm": 7.880455493927002, |
|
"learning_rate": 8.550375939849625e-06, |
|
"loss": 0.3829, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"grad_norm": 5.568830490112305, |
|
"learning_rate": 8.548872180451129e-06, |
|
"loss": 0.4011, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"grad_norm": 5.6487274169921875, |
|
"learning_rate": 8.547368421052632e-06, |
|
"loss": 0.4091, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"grad_norm": 4.51718807220459, |
|
"learning_rate": 8.545864661654136e-06, |
|
"loss": 0.4087, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"grad_norm": 7.874798774719238, |
|
"learning_rate": 8.54436090225564e-06, |
|
"loss": 0.3494, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"grad_norm": 4.497681617736816, |
|
"learning_rate": 8.542857142857145e-06, |
|
"loss": 0.3783, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"grad_norm": 5.411101818084717, |
|
"learning_rate": 8.541353383458646e-06, |
|
"loss": 0.3395, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"grad_norm": 5.714541435241699, |
|
"learning_rate": 8.539849624060152e-06, |
|
"loss": 0.4507, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"grad_norm": 7.042336940765381, |
|
"learning_rate": 8.538345864661655e-06, |
|
"loss": 0.3363, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"grad_norm": 6.70949649810791, |
|
"learning_rate": 8.536842105263159e-06, |
|
"loss": 0.3559, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"grad_norm": 5.84644889831543, |
|
"learning_rate": 8.535338345864662e-06, |
|
"loss": 0.3936, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"grad_norm": 5.070087432861328, |
|
"learning_rate": 8.533834586466166e-06, |
|
"loss": 0.4014, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"grad_norm": 5.353463172912598, |
|
"learning_rate": 8.53233082706767e-06, |
|
"loss": 0.4091, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"grad_norm": 9.118497848510742, |
|
"learning_rate": 8.530827067669174e-06, |
|
"loss": 0.3922, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"grad_norm": 7.667191982269287, |
|
"learning_rate": 8.529323308270678e-06, |
|
"loss": 0.4087, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"grad_norm": 7.210267066955566, |
|
"learning_rate": 8.527819548872181e-06, |
|
"loss": 0.3458, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"grad_norm": 5.242373466491699, |
|
"learning_rate": 8.526315789473685e-06, |
|
"loss": 0.3293, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"grad_norm": 7.6933393478393555, |
|
"learning_rate": 8.524812030075188e-06, |
|
"loss": 0.4406, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"grad_norm": 5.179628372192383, |
|
"learning_rate": 8.523308270676692e-06, |
|
"loss": 0.3827, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"grad_norm": 9.525907516479492, |
|
"learning_rate": 8.521804511278197e-06, |
|
"loss": 0.4095, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"grad_norm": 6.132147789001465, |
|
"learning_rate": 8.520300751879699e-06, |
|
"loss": 0.3778, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"grad_norm": 8.04976749420166, |
|
"learning_rate": 8.518796992481204e-06, |
|
"loss": 0.3996, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"grad_norm": 9.131913185119629, |
|
"learning_rate": 8.517293233082708e-06, |
|
"loss": 0.4103, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"grad_norm": 5.724211692810059, |
|
"learning_rate": 8.515789473684211e-06, |
|
"loss": 0.4166, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"grad_norm": 4.537842750549316, |
|
"learning_rate": 8.514285714285715e-06, |
|
"loss": 0.4357, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"grad_norm": 9.75554370880127, |
|
"learning_rate": 8.51278195488722e-06, |
|
"loss": 0.351, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"grad_norm": 5.427340030670166, |
|
"learning_rate": 8.511278195488722e-06, |
|
"loss": 0.3488, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"grad_norm": 4.465277671813965, |
|
"learning_rate": 8.509774436090227e-06, |
|
"loss": 0.32, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"grad_norm": 4.331689834594727, |
|
"learning_rate": 8.50827067669173e-06, |
|
"loss": 0.4442, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"grad_norm": 5.798705577850342, |
|
"learning_rate": 8.506766917293232e-06, |
|
"loss": 0.4003, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"grad_norm": 9.353456497192383, |
|
"learning_rate": 8.505263157894738e-06, |
|
"loss": 0.3969, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"grad_norm": 7.088143825531006, |
|
"learning_rate": 8.503759398496241e-06, |
|
"loss": 0.3749, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"grad_norm": 6.825297832489014, |
|
"learning_rate": 8.502255639097745e-06, |
|
"loss": 0.3532, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"grad_norm": 3.3977503776550293, |
|
"learning_rate": 8.500751879699248e-06, |
|
"loss": 0.3227, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.9335, |
|
"eval_loss": 0.2566547989845276, |
|
"eval_runtime": 84.4818, |
|
"eval_samples_per_second": 118.369, |
|
"eval_steps_per_second": 0.473, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"grad_norm": 3.752779483795166, |
|
"learning_rate": 8.499248120300753e-06, |
|
"loss": 0.2741, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"grad_norm": 5.626667499542236, |
|
"learning_rate": 8.497744360902255e-06, |
|
"loss": 0.3584, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"grad_norm": 5.8750834465026855, |
|
"learning_rate": 8.49624060150376e-06, |
|
"loss": 0.3563, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"grad_norm": 6.653073787689209, |
|
"learning_rate": 8.494736842105264e-06, |
|
"loss": 0.3808, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"grad_norm": 6.017663955688477, |
|
"learning_rate": 8.493233082706767e-06, |
|
"loss": 0.3165, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"grad_norm": 4.427550792694092, |
|
"learning_rate": 8.491729323308271e-06, |
|
"loss": 0.4417, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"grad_norm": 7.968047142028809, |
|
"learning_rate": 8.490225563909775e-06, |
|
"loss": 0.3807, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"grad_norm": 6.646710395812988, |
|
"learning_rate": 8.488721804511278e-06, |
|
"loss": 0.3014, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"grad_norm": 5.180790424346924, |
|
"learning_rate": 8.487218045112783e-06, |
|
"loss": 0.3737, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"grad_norm": 8.142125129699707, |
|
"learning_rate": 8.485714285714287e-06, |
|
"loss": 0.3953, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"grad_norm": 5.258510589599609, |
|
"learning_rate": 8.48421052631579e-06, |
|
"loss": 0.2742, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"grad_norm": 7.299388408660889, |
|
"learning_rate": 8.482706766917294e-06, |
|
"loss": 0.388, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"grad_norm": 9.05027961730957, |
|
"learning_rate": 8.481203007518797e-06, |
|
"loss": 0.3364, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"grad_norm": 8.713417053222656, |
|
"learning_rate": 8.4796992481203e-06, |
|
"loss": 0.2847, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"grad_norm": 4.598002910614014, |
|
"learning_rate": 8.478195488721806e-06, |
|
"loss": 0.3536, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"grad_norm": 8.883098602294922, |
|
"learning_rate": 8.476691729323308e-06, |
|
"loss": 0.3917, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"grad_norm": 9.343592643737793, |
|
"learning_rate": 8.475187969924813e-06, |
|
"loss": 0.3912, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"grad_norm": 11.456267356872559, |
|
"learning_rate": 8.473684210526317e-06, |
|
"loss": 0.3171, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"grad_norm": 7.874906539916992, |
|
"learning_rate": 8.47218045112782e-06, |
|
"loss": 0.4443, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"grad_norm": 7.280247211456299, |
|
"learning_rate": 8.470676691729324e-06, |
|
"loss": 0.4121, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"grad_norm": 7.839987754821777, |
|
"learning_rate": 8.469172932330829e-06, |
|
"loss": 0.4284, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"grad_norm": 5.39335298538208, |
|
"learning_rate": 8.46766917293233e-06, |
|
"loss": 0.4311, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"grad_norm": 8.08191204071045, |
|
"learning_rate": 8.466165413533836e-06, |
|
"loss": 0.3789, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"grad_norm": 3.493443250656128, |
|
"learning_rate": 8.46466165413534e-06, |
|
"loss": 0.3526, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"grad_norm": 7.41270637512207, |
|
"learning_rate": 8.463157894736843e-06, |
|
"loss": 0.4283, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"grad_norm": 6.4891486167907715, |
|
"learning_rate": 8.461654135338346e-06, |
|
"loss": 0.298, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"grad_norm": 6.028573989868164, |
|
"learning_rate": 8.46015037593985e-06, |
|
"loss": 0.3996, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"grad_norm": 3.365438938140869, |
|
"learning_rate": 8.458646616541353e-06, |
|
"loss": 0.334, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 15.43, |
|
"grad_norm": 5.2097015380859375, |
|
"learning_rate": 8.457142857142859e-06, |
|
"loss": 0.3555, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"grad_norm": 4.656721591949463, |
|
"learning_rate": 8.455639097744362e-06, |
|
"loss": 0.398, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"grad_norm": 11.353671073913574, |
|
"learning_rate": 8.454135338345866e-06, |
|
"loss": 0.3341, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"grad_norm": 6.971073627471924, |
|
"learning_rate": 8.45263157894737e-06, |
|
"loss": 0.3569, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"grad_norm": 5.809013843536377, |
|
"learning_rate": 8.451127819548873e-06, |
|
"loss": 0.3642, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"grad_norm": 7.3322834968566895, |
|
"learning_rate": 8.449624060150376e-06, |
|
"loss": 0.3814, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"grad_norm": 9.322681427001953, |
|
"learning_rate": 8.448120300751882e-06, |
|
"loss": 0.3455, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"grad_norm": 7.197205066680908, |
|
"learning_rate": 8.446616541353383e-06, |
|
"loss": 0.3652, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"grad_norm": 7.1502766609191895, |
|
"learning_rate": 8.445112781954889e-06, |
|
"loss": 0.3882, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"grad_norm": 4.826005935668945, |
|
"learning_rate": 8.443609022556392e-06, |
|
"loss": 0.3878, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"grad_norm": 8.432343482971191, |
|
"learning_rate": 8.442105263157896e-06, |
|
"loss": 0.3778, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"grad_norm": 6.166329383850098, |
|
"learning_rate": 8.440601503759399e-06, |
|
"loss": 0.3675, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"grad_norm": 9.84304428100586, |
|
"learning_rate": 8.439097744360903e-06, |
|
"loss": 0.385, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"grad_norm": 4.937039375305176, |
|
"learning_rate": 8.437593984962406e-06, |
|
"loss": 0.3558, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"grad_norm": 5.817636966705322, |
|
"learning_rate": 8.436090225563911e-06, |
|
"loss": 0.3605, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"grad_norm": 6.982740879058838, |
|
"learning_rate": 8.434586466165415e-06, |
|
"loss": 0.2966, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"grad_norm": 5.2945098876953125, |
|
"learning_rate": 8.433082706766918e-06, |
|
"loss": 0.3279, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"grad_norm": 7.526950359344482, |
|
"learning_rate": 8.431578947368422e-06, |
|
"loss": 0.414, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"grad_norm": 8.325518608093262, |
|
"learning_rate": 8.430075187969925e-06, |
|
"loss": 0.314, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"grad_norm": 2.876897096633911, |
|
"learning_rate": 8.428571428571429e-06, |
|
"loss": 0.354, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"grad_norm": 6.776325702667236, |
|
"learning_rate": 8.427067669172932e-06, |
|
"loss": 0.3462, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 8.158499717712402, |
|
"learning_rate": 8.425563909774438e-06, |
|
"loss": 0.3307, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"grad_norm": 10.01845645904541, |
|
"learning_rate": 8.42406015037594e-06, |
|
"loss": 0.3196, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"grad_norm": 4.778624534606934, |
|
"learning_rate": 8.422556390977445e-06, |
|
"loss": 0.3412, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"grad_norm": 5.8504157066345215, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.4183, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"grad_norm": 5.351130962371826, |
|
"learning_rate": 8.419548872180452e-06, |
|
"loss": 0.2639, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"grad_norm": 7.211291313171387, |
|
"learning_rate": 8.418045112781955e-06, |
|
"loss": 0.3021, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"grad_norm": 6.899810791015625, |
|
"learning_rate": 8.416541353383459e-06, |
|
"loss": 0.4069, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"grad_norm": 4.64746618270874, |
|
"learning_rate": 8.415037593984962e-06, |
|
"loss": 0.2798, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"grad_norm": 9.508644104003906, |
|
"learning_rate": 8.413533834586468e-06, |
|
"loss": 0.3791, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"grad_norm": 6.956771373748779, |
|
"learning_rate": 8.412030075187971e-06, |
|
"loss": 0.4188, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"grad_norm": 6.4203667640686035, |
|
"learning_rate": 8.410526315789475e-06, |
|
"loss": 0.3749, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"grad_norm": 8.310030937194824, |
|
"learning_rate": 8.409022556390978e-06, |
|
"loss": 0.4605, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"grad_norm": 8.788355827331543, |
|
"learning_rate": 8.407518796992482e-06, |
|
"loss": 0.3811, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"grad_norm": 5.931136131286621, |
|
"learning_rate": 8.406015037593985e-06, |
|
"loss": 0.4129, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"grad_norm": 7.846260070800781, |
|
"learning_rate": 8.40451127819549e-06, |
|
"loss": 0.3889, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"grad_norm": 6.834481239318848, |
|
"learning_rate": 8.403007518796992e-06, |
|
"loss": 0.4303, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"grad_norm": 3.6619720458984375, |
|
"learning_rate": 8.401503759398497e-06, |
|
"loss": 0.3483, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 12.63433837890625, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.386, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.931, |
|
"eval_loss": 0.25709524750709534, |
|
"eval_runtime": 84.9959, |
|
"eval_samples_per_second": 117.653, |
|
"eval_steps_per_second": 0.471, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"grad_norm": 7.7511796951293945, |
|
"learning_rate": 8.398496240601504e-06, |
|
"loss": 0.3916, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"grad_norm": 4.640677452087402, |
|
"learning_rate": 8.396992481203008e-06, |
|
"loss": 0.395, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"grad_norm": 7.270589828491211, |
|
"learning_rate": 8.395488721804511e-06, |
|
"loss": 0.4299, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 16.06, |
|
"grad_norm": 5.698379993438721, |
|
"learning_rate": 8.393984962406015e-06, |
|
"loss": 0.3792, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"grad_norm": 29.69732093811035, |
|
"learning_rate": 8.39248120300752e-06, |
|
"loss": 0.3564, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"grad_norm": 5.11942720413208, |
|
"learning_rate": 8.390977443609024e-06, |
|
"loss": 0.3284, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"grad_norm": 6.748551368713379, |
|
"learning_rate": 8.389473684210527e-06, |
|
"loss": 0.3582, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"grad_norm": 6.748464584350586, |
|
"learning_rate": 8.38796992481203e-06, |
|
"loss": 0.3707, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"grad_norm": 6.992805480957031, |
|
"learning_rate": 8.386466165413534e-06, |
|
"loss": 0.4388, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"grad_norm": 4.689752578735352, |
|
"learning_rate": 8.384962406015038e-06, |
|
"loss": 0.3394, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"grad_norm": 7.47608757019043, |
|
"learning_rate": 8.383458646616543e-06, |
|
"loss": 0.3662, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"grad_norm": 8.161937713623047, |
|
"learning_rate": 8.381954887218045e-06, |
|
"loss": 0.3729, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"grad_norm": 4.649080276489258, |
|
"learning_rate": 8.38045112781955e-06, |
|
"loss": 0.4012, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"grad_norm": 5.3081512451171875, |
|
"learning_rate": 8.378947368421054e-06, |
|
"loss": 0.3275, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"grad_norm": 8.424674034118652, |
|
"learning_rate": 8.377443609022557e-06, |
|
"loss": 0.3169, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"grad_norm": 7.213728427886963, |
|
"learning_rate": 8.37593984962406e-06, |
|
"loss": 0.4158, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"grad_norm": 4.228058815002441, |
|
"learning_rate": 8.374436090225566e-06, |
|
"loss": 0.2967, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"grad_norm": 9.091151237487793, |
|
"learning_rate": 8.372932330827068e-06, |
|
"loss": 0.3631, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"grad_norm": 7.325952053070068, |
|
"learning_rate": 8.371428571428573e-06, |
|
"loss": 0.4045, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"grad_norm": 8.557323455810547, |
|
"learning_rate": 8.369924812030076e-06, |
|
"loss": 0.3657, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"grad_norm": 8.98796558380127, |
|
"learning_rate": 8.36842105263158e-06, |
|
"loss": 0.3812, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"grad_norm": 14.74909496307373, |
|
"learning_rate": 8.366917293233083e-06, |
|
"loss": 0.3966, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"grad_norm": 6.872273921966553, |
|
"learning_rate": 8.365413533834587e-06, |
|
"loss": 0.3797, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"grad_norm": 8.392032623291016, |
|
"learning_rate": 8.36390977443609e-06, |
|
"loss": 0.3864, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"grad_norm": 8.61768627166748, |
|
"learning_rate": 8.362406015037596e-06, |
|
"loss": 0.3434, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"grad_norm": 6.622392177581787, |
|
"learning_rate": 8.3609022556391e-06, |
|
"loss": 0.3751, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"grad_norm": 7.108027458190918, |
|
"learning_rate": 8.359398496240603e-06, |
|
"loss": 0.3542, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"grad_norm": 7.145939826965332, |
|
"learning_rate": 8.357894736842106e-06, |
|
"loss": 0.3586, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"grad_norm": 16.715761184692383, |
|
"learning_rate": 8.35639097744361e-06, |
|
"loss": 0.4365, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"grad_norm": 3.3526134490966797, |
|
"learning_rate": 8.354887218045113e-06, |
|
"loss": 0.38, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"grad_norm": 4.425145149230957, |
|
"learning_rate": 8.353383458646617e-06, |
|
"loss": 0.2944, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"grad_norm": 4.0147552490234375, |
|
"learning_rate": 8.35187969924812e-06, |
|
"loss": 0.3836, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"grad_norm": 6.398830413818359, |
|
"learning_rate": 8.350375939849624e-06, |
|
"loss": 0.3402, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 16.51, |
|
"grad_norm": 5.556189060211182, |
|
"learning_rate": 8.348872180451129e-06, |
|
"loss": 0.3728, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"grad_norm": 4.928891658782959, |
|
"learning_rate": 8.347368421052633e-06, |
|
"loss": 0.2993, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"grad_norm": 8.429544448852539, |
|
"learning_rate": 8.345864661654136e-06, |
|
"loss": 0.3896, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"grad_norm": 9.285510063171387, |
|
"learning_rate": 8.34436090225564e-06, |
|
"loss": 0.3428, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 16.57, |
|
"grad_norm": 5.118491172790527, |
|
"learning_rate": 8.342857142857143e-06, |
|
"loss": 0.3583, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"grad_norm": 9.391587257385254, |
|
"learning_rate": 8.341353383458647e-06, |
|
"loss": 0.312, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"grad_norm": 22.18227767944336, |
|
"learning_rate": 8.339849624060152e-06, |
|
"loss": 0.3549, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"grad_norm": 5.834057331085205, |
|
"learning_rate": 8.338345864661654e-06, |
|
"loss": 0.4242, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"grad_norm": 5.136647701263428, |
|
"learning_rate": 8.336842105263159e-06, |
|
"loss": 0.3623, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 16.65, |
|
"grad_norm": 5.481499195098877, |
|
"learning_rate": 8.335338345864662e-06, |
|
"loss": 0.3745, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"grad_norm": 3.6383814811706543, |
|
"learning_rate": 8.333834586466166e-06, |
|
"loss": 0.3526, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"grad_norm": 4.198364734649658, |
|
"learning_rate": 8.33233082706767e-06, |
|
"loss": 0.3247, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"grad_norm": 6.066871166229248, |
|
"learning_rate": 8.330827067669175e-06, |
|
"loss": 0.346, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"grad_norm": 4.822031497955322, |
|
"learning_rate": 8.329323308270676e-06, |
|
"loss": 0.3421, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"grad_norm": 8.0927152633667, |
|
"learning_rate": 8.327819548872182e-06, |
|
"loss": 0.3212, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"grad_norm": 5.737279415130615, |
|
"learning_rate": 8.326315789473685e-06, |
|
"loss": 0.338, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"grad_norm": 6.6013994216918945, |
|
"learning_rate": 8.324812030075189e-06, |
|
"loss": 0.4221, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"grad_norm": 3.22804856300354, |
|
"learning_rate": 8.323308270676692e-06, |
|
"loss": 0.2824, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"grad_norm": 7.42767333984375, |
|
"learning_rate": 8.321804511278196e-06, |
|
"loss": 0.3654, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"grad_norm": 6.393558979034424, |
|
"learning_rate": 8.3203007518797e-06, |
|
"loss": 0.3939, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"grad_norm": 6.1491241455078125, |
|
"learning_rate": 8.318796992481204e-06, |
|
"loss": 0.3454, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"grad_norm": 4.844079971313477, |
|
"learning_rate": 8.317293233082708e-06, |
|
"loss": 0.3744, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"grad_norm": 7.577675819396973, |
|
"learning_rate": 8.315789473684212e-06, |
|
"loss": 0.4259, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"grad_norm": 5.073234558105469, |
|
"learning_rate": 8.314285714285715e-06, |
|
"loss": 0.343, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"grad_norm": 4.934657096862793, |
|
"learning_rate": 8.312781954887219e-06, |
|
"loss": 0.3834, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"grad_norm": 4.744530200958252, |
|
"learning_rate": 8.311278195488722e-06, |
|
"loss": 0.2937, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"grad_norm": 7.569250583648682, |
|
"learning_rate": 8.309774436090227e-06, |
|
"loss": 0.3719, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"grad_norm": 7.076653480529785, |
|
"learning_rate": 8.308270676691729e-06, |
|
"loss": 0.3489, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"grad_norm": 7.1391520500183105, |
|
"learning_rate": 8.306766917293234e-06, |
|
"loss": 0.3325, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"grad_norm": 4.94738245010376, |
|
"learning_rate": 8.305263157894738e-06, |
|
"loss": 0.3421, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"grad_norm": 6.052053451538086, |
|
"learning_rate": 8.303759398496241e-06, |
|
"loss": 0.4267, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"grad_norm": 6.822144985198975, |
|
"learning_rate": 8.302255639097745e-06, |
|
"loss": 0.4932, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"grad_norm": 7.0719218254089355, |
|
"learning_rate": 8.300751879699248e-06, |
|
"loss": 0.3688, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.9346, |
|
"eval_loss": 0.25758126378059387, |
|
"eval_runtime": 84.7562, |
|
"eval_samples_per_second": 117.986, |
|
"eval_steps_per_second": 0.472, |
|
"step": 11305 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"grad_norm": 4.741504669189453, |
|
"learning_rate": 8.299248120300752e-06, |
|
"loss": 0.3837, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"grad_norm": 6.912674427032471, |
|
"learning_rate": 8.297744360902257e-06, |
|
"loss": 0.2938, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"grad_norm": 5.881788730621338, |
|
"learning_rate": 8.29624060150376e-06, |
|
"loss": 0.3984, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"grad_norm": 5.705871105194092, |
|
"learning_rate": 8.294736842105264e-06, |
|
"loss": 0.3793, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"grad_norm": 5.036585330963135, |
|
"learning_rate": 8.293233082706768e-06, |
|
"loss": 0.365, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"grad_norm": 5.040714740753174, |
|
"learning_rate": 8.291729323308271e-06, |
|
"loss": 0.3662, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"grad_norm": 7.933087348937988, |
|
"learning_rate": 8.290225563909775e-06, |
|
"loss": 0.3204, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"grad_norm": 8.096324920654297, |
|
"learning_rate": 8.28872180451128e-06, |
|
"loss": 0.3383, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"grad_norm": 5.693844318389893, |
|
"learning_rate": 8.287218045112782e-06, |
|
"loss": 0.3455, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 17.14, |
|
"grad_norm": 5.274537086486816, |
|
"learning_rate": 8.285714285714287e-06, |
|
"loss": 0.3695, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"grad_norm": 5.249573230743408, |
|
"learning_rate": 8.28421052631579e-06, |
|
"loss": 0.3801, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"grad_norm": 6.644190311431885, |
|
"learning_rate": 8.282706766917294e-06, |
|
"loss": 0.3355, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"grad_norm": 8.79143238067627, |
|
"learning_rate": 8.281203007518798e-06, |
|
"loss": 0.3857, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"grad_norm": 4.0490851402282715, |
|
"learning_rate": 8.279699248120301e-06, |
|
"loss": 0.3224, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"grad_norm": 8.35059928894043, |
|
"learning_rate": 8.278195488721805e-06, |
|
"loss": 0.372, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"grad_norm": 5.733313083648682, |
|
"learning_rate": 8.276691729323308e-06, |
|
"loss": 0.3511, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"grad_norm": 4.974298477172852, |
|
"learning_rate": 8.275187969924813e-06, |
|
"loss": 0.332, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"grad_norm": 6.859165191650391, |
|
"learning_rate": 8.273684210526317e-06, |
|
"loss": 0.3053, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"grad_norm": 7.4038591384887695, |
|
"learning_rate": 8.27218045112782e-06, |
|
"loss": 0.3419, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"grad_norm": 4.045393943786621, |
|
"learning_rate": 8.270676691729324e-06, |
|
"loss": 0.3656, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"grad_norm": 2.965898036956787, |
|
"learning_rate": 8.269172932330827e-06, |
|
"loss": 0.4222, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"grad_norm": 6.6445746421813965, |
|
"learning_rate": 8.267669172932331e-06, |
|
"loss": 0.4351, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"grad_norm": 11.220673561096191, |
|
"learning_rate": 8.266165413533836e-06, |
|
"loss": 0.3738, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"grad_norm": 3.7990376949310303, |
|
"learning_rate": 8.264661654135338e-06, |
|
"loss": 0.2779, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"grad_norm": 6.193857669830322, |
|
"learning_rate": 8.263157894736843e-06, |
|
"loss": 0.3864, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"grad_norm": 7.089908123016357, |
|
"learning_rate": 8.261654135338347e-06, |
|
"loss": 0.3243, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"grad_norm": 10.148313522338867, |
|
"learning_rate": 8.26015037593985e-06, |
|
"loss": 0.3848, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"grad_norm": 7.47261905670166, |
|
"learning_rate": 8.258646616541354e-06, |
|
"loss": 0.3958, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"grad_norm": 8.237654685974121, |
|
"learning_rate": 8.257142857142857e-06, |
|
"loss": 0.4104, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"grad_norm": 7.028960227966309, |
|
"learning_rate": 8.25563909774436e-06, |
|
"loss": 0.3515, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"grad_norm": 6.804955959320068, |
|
"learning_rate": 8.254135338345866e-06, |
|
"loss": 0.3555, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"grad_norm": 8.740710258483887, |
|
"learning_rate": 8.25263157894737e-06, |
|
"loss": 0.328, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"grad_norm": 6.383413314819336, |
|
"learning_rate": 8.251127819548873e-06, |
|
"loss": 0.3352, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 8.289705276489258, |
|
"learning_rate": 8.249624060150377e-06, |
|
"loss": 0.4067, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"grad_norm": 6.0566911697387695, |
|
"learning_rate": 8.24812030075188e-06, |
|
"loss": 0.3556, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"grad_norm": 9.813027381896973, |
|
"learning_rate": 8.246616541353384e-06, |
|
"loss": 0.3856, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"grad_norm": 6.0970988273620605, |
|
"learning_rate": 8.245112781954889e-06, |
|
"loss": 0.396, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"grad_norm": 4.837037086486816, |
|
"learning_rate": 8.24360902255639e-06, |
|
"loss": 0.3194, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 17.58, |
|
"grad_norm": 10.178328514099121, |
|
"learning_rate": 8.242105263157896e-06, |
|
"loss": 0.4108, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"grad_norm": 3.696746826171875, |
|
"learning_rate": 8.2406015037594e-06, |
|
"loss": 0.3198, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"grad_norm": 6.4773993492126465, |
|
"learning_rate": 8.239097744360903e-06, |
|
"loss": 0.2889, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"grad_norm": 9.036526679992676, |
|
"learning_rate": 8.237593984962406e-06, |
|
"loss": 0.4616, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"grad_norm": 5.1061320304870605, |
|
"learning_rate": 8.236090225563912e-06, |
|
"loss": 0.3941, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"grad_norm": 5.17496919631958, |
|
"learning_rate": 8.234586466165413e-06, |
|
"loss": 0.3883, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"grad_norm": 4.007594585418701, |
|
"learning_rate": 8.233082706766919e-06, |
|
"loss": 0.3097, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"grad_norm": 3.903956174850464, |
|
"learning_rate": 8.231578947368422e-06, |
|
"loss": 0.3473, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"grad_norm": 9.270066261291504, |
|
"learning_rate": 8.230075187969926e-06, |
|
"loss": 0.3255, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"grad_norm": 4.118042469024658, |
|
"learning_rate": 8.22857142857143e-06, |
|
"loss": 0.335, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"grad_norm": 5.715611457824707, |
|
"learning_rate": 8.227067669172933e-06, |
|
"loss": 0.422, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"grad_norm": 5.848507881164551, |
|
"learning_rate": 8.225563909774436e-06, |
|
"loss": 0.3465, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"grad_norm": 5.273082733154297, |
|
"learning_rate": 8.224060150375941e-06, |
|
"loss": 0.3479, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"grad_norm": 4.500287055969238, |
|
"learning_rate": 8.222556390977445e-06, |
|
"loss": 0.3813, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"grad_norm": 7.676726341247559, |
|
"learning_rate": 8.221052631578948e-06, |
|
"loss": 0.3973, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"grad_norm": 6.1550211906433105, |
|
"learning_rate": 8.219548872180452e-06, |
|
"loss": 0.3209, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"grad_norm": 3.796853542327881, |
|
"learning_rate": 8.218045112781955e-06, |
|
"loss": 0.3241, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"grad_norm": 7.188779354095459, |
|
"learning_rate": 8.216541353383459e-06, |
|
"loss": 0.3923, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"grad_norm": 5.088048934936523, |
|
"learning_rate": 8.215037593984964e-06, |
|
"loss": 0.3528, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"grad_norm": 6.49263334274292, |
|
"learning_rate": 8.213533834586466e-06, |
|
"loss": 0.3724, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"grad_norm": 8.370095252990723, |
|
"learning_rate": 8.212030075187971e-06, |
|
"loss": 0.3697, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"grad_norm": 7.578341007232666, |
|
"learning_rate": 8.210526315789475e-06, |
|
"loss": 0.3317, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"grad_norm": 5.1709723472595215, |
|
"learning_rate": 8.209022556390978e-06, |
|
"loss": 0.3304, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"grad_norm": 4.586398124694824, |
|
"learning_rate": 8.207518796992482e-06, |
|
"loss": 0.3942, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"grad_norm": 3.6240298748016357, |
|
"learning_rate": 8.206015037593985e-06, |
|
"loss": 0.2839, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"grad_norm": 7.672499179840088, |
|
"learning_rate": 8.204511278195489e-06, |
|
"loss": 0.3539, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"grad_norm": 5.807362079620361, |
|
"learning_rate": 8.203007518796992e-06, |
|
"loss": 0.3006, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"grad_norm": 5.875560283660889, |
|
"learning_rate": 8.201503759398498e-06, |
|
"loss": 0.3857, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 47.7349967956543, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.3985, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.9356, |
|
"eval_loss": 0.25322866439819336, |
|
"eval_runtime": 84.6682, |
|
"eval_samples_per_second": 118.108, |
|
"eval_steps_per_second": 0.472, |
|
"step": 11970 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 66500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"total_flos": 2.0962905086398464e+20, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|