|
{ |
|
"best_metric": 0.357177734375, |
|
"best_model_checkpoint": "./results/checkpoint-7906", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 11859, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 6.6436, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 6.098, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 134.1601780588041, |
|
"learning_rate": 4.2e-06, |
|
"loss": 4.8583, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 133.05228107823655, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.0425, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 195.91524058669003, |
|
"learning_rate": 1.6199999999999997e-05, |
|
"loss": 1.9412, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 181.65188510965584, |
|
"learning_rate": 2.2199999999999998e-05, |
|
"loss": 2.3468, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 60.78222784908158, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 1.3293, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 67.96261738087682, |
|
"learning_rate": 3.42e-05, |
|
"loss": 1.4593, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 86.79690686353207, |
|
"learning_rate": 4.02e-05, |
|
"loss": 1.734, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 129.6077267827689, |
|
"learning_rate": 4.62e-05, |
|
"loss": 1.8397, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 128.778169813118, |
|
"learning_rate": 5.2199999999999995e-05, |
|
"loss": 2.128, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 66.30008183585016, |
|
"learning_rate": 5.82e-05, |
|
"loss": 1.1456, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 18.090731070809323, |
|
"learning_rate": 6.419999999999999e-05, |
|
"loss": 0.6283, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 74.2338848286518, |
|
"learning_rate": 7.02e-05, |
|
"loss": 1.0037, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 31.89756478099821, |
|
"learning_rate": 7.62e-05, |
|
"loss": 0.7777, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 72.26210302069988, |
|
"learning_rate": 8.22e-05, |
|
"loss": 1.4065, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 40.07021523514231, |
|
"learning_rate": 8.819999999999999e-05, |
|
"loss": 1.3171, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 42.58218330850966, |
|
"learning_rate": 9.419999999999999e-05, |
|
"loss": 1.2867, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 23.51267950170972, |
|
"learning_rate": 0.0001002, |
|
"loss": 1.2002, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 63.83972633293598, |
|
"learning_rate": 0.00010619999999999998, |
|
"loss": 0.9739, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.632685004324832, |
|
"learning_rate": 0.00011219999999999999, |
|
"loss": 0.6737, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 23.885371404594686, |
|
"learning_rate": 0.0001182, |
|
"loss": 0.6572, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.49905029172192, |
|
"learning_rate": 0.00012419999999999998, |
|
"loss": 0.7263, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 14.325576955018139, |
|
"learning_rate": 0.0001302, |
|
"loss": 0.5531, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.680218121561142, |
|
"learning_rate": 0.0001362, |
|
"loss": 0.694, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 30.394743554491985, |
|
"learning_rate": 0.0001422, |
|
"loss": 0.9192, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 14.645189915067395, |
|
"learning_rate": 0.0001482, |
|
"loss": 0.8989, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 40.455222802195145, |
|
"learning_rate": 0.00015419999999999998, |
|
"loss": 1.8282, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 40.2149893166574, |
|
"learning_rate": 0.0001602, |
|
"loss": 1.455, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 12.86712929272992, |
|
"learning_rate": 0.0001662, |
|
"loss": 1.3317, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 38.867456553692456, |
|
"learning_rate": 0.00017219999999999998, |
|
"loss": 0.938, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.2591410314567515, |
|
"learning_rate": 0.00017819999999999997, |
|
"loss": 1.2697, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.615561417103653, |
|
"learning_rate": 0.00018419999999999998, |
|
"loss": 1.0933, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 25.14845233927464, |
|
"learning_rate": 0.0001902, |
|
"loss": 1.0341, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 42.51003360957394, |
|
"learning_rate": 0.0001962, |
|
"loss": 0.8678, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 12.128840825001165, |
|
"learning_rate": 0.0002022, |
|
"loss": 0.7396, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.964848825295658, |
|
"learning_rate": 0.00020819999999999996, |
|
"loss": 0.7442, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 39.59298428319672, |
|
"learning_rate": 0.00021419999999999998, |
|
"loss": 1.0503, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7185329297433249, |
|
"learning_rate": 0.00022019999999999999, |
|
"loss": 1.2561, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 9.015303486193389, |
|
"learning_rate": 0.00022619999999999997, |
|
"loss": 1.6315, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 39.887835051307775, |
|
"learning_rate": 0.00023219999999999998, |
|
"loss": 1.0392, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 17.789030039861967, |
|
"learning_rate": 0.0002382, |
|
"loss": 1.0087, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 21.119668675796365, |
|
"learning_rate": 0.00024419999999999997, |
|
"loss": 1.1151, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 25.79842512878894, |
|
"learning_rate": 0.00025019999999999996, |
|
"loss": 1.088, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.848194192167474, |
|
"learning_rate": 0.0002562, |
|
"loss": 0.7838, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 15.398937771226997, |
|
"learning_rate": 0.0002622, |
|
"loss": 1.0873, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 20.90332391672475, |
|
"learning_rate": 0.00026819999999999996, |
|
"loss": 0.9293, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.3634772721877364, |
|
"learning_rate": 0.0002742, |
|
"loss": 0.8292, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 25.53471193923469, |
|
"learning_rate": 0.0002802, |
|
"loss": 0.8938, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 23.763159586392472, |
|
"learning_rate": 0.00028619999999999996, |
|
"loss": 1.0155, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.7779129544247447, |
|
"learning_rate": 0.00029219999999999995, |
|
"loss": 1.1806, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 13.642477580459792, |
|
"learning_rate": 0.0002982, |
|
"loss": 0.911, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 27.669745330451978, |
|
"learning_rate": 0.0002998151245708249, |
|
"loss": 1.1182, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.858690106717045, |
|
"learning_rate": 0.00029955101681486044, |
|
"loss": 1.1516, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 15.566308581477797, |
|
"learning_rate": 0.00029928690905889604, |
|
"loss": 1.0132, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.539866709492637, |
|
"learning_rate": 0.0002990228013029316, |
|
"loss": 0.9881, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 33.14180454779081, |
|
"learning_rate": 0.00029875869354696713, |
|
"loss": 1.4354, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 18.209134483516564, |
|
"learning_rate": 0.0002984945857910027, |
|
"loss": 1.2814, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.5280953085313502, |
|
"learning_rate": 0.0002982304780350383, |
|
"loss": 0.6949, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 13.662246550170599, |
|
"learning_rate": 0.00029796637027907383, |
|
"loss": 0.7094, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 14.747851518871128, |
|
"learning_rate": 0.0002977022625231094, |
|
"loss": 0.7794, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.859442783782644, |
|
"learning_rate": 0.000297438154767145, |
|
"loss": 0.6411, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 12.96927823289309, |
|
"learning_rate": 0.0002971740470111805, |
|
"loss": 0.7762, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 38.807838563909925, |
|
"learning_rate": 0.0002969099392552161, |
|
"loss": 0.8307, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.078824122582168, |
|
"learning_rate": 0.00029664583149925167, |
|
"loss": 0.7072, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.516433721876162, |
|
"learning_rate": 0.00029638172374328727, |
|
"loss": 0.6187, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.95247613200742, |
|
"learning_rate": 0.0002961176159873228, |
|
"loss": 0.7584, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.72122601802889, |
|
"learning_rate": 0.00029585350823135837, |
|
"loss": 0.697, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.3859041275387476, |
|
"learning_rate": 0.0002955894004753939, |
|
"loss": 0.6976, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 39.987188376655034, |
|
"learning_rate": 0.00029532529271942946, |
|
"loss": 0.8641, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 20.0944364520948, |
|
"learning_rate": 0.00029506118496346506, |
|
"loss": 1.1189, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 11.747621641446118, |
|
"learning_rate": 0.0002947970772075006, |
|
"loss": 1.1795, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.473321247415091, |
|
"learning_rate": 0.0002945329694515362, |
|
"loss": 0.8382, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 10.69289455466116, |
|
"learning_rate": 0.00029426886169557176, |
|
"loss": 0.7501, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.623860952912251, |
|
"learning_rate": 0.00029400475393960736, |
|
"loss": 0.6535, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 7.418967068846029, |
|
"learning_rate": 0.0002937406461836429, |
|
"loss": 0.7884, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.599297021151376, |
|
"learning_rate": 0.00029347653842767845, |
|
"loss": 0.6215, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.1212965208019026, |
|
"learning_rate": 0.000293212430671714, |
|
"loss": 0.7527, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 10.25996637792451, |
|
"learning_rate": 0.0002929483229157496, |
|
"loss": 0.761, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 21.335470985081095, |
|
"learning_rate": 0.00029268421515978515, |
|
"loss": 1.0277, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 23.380006601416916, |
|
"learning_rate": 0.00029242010740382075, |
|
"loss": 0.8902, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 17.392278392164588, |
|
"learning_rate": 0.0002921559996478563, |
|
"loss": 0.7655, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.304718429892404, |
|
"learning_rate": 0.0002918918918918919, |
|
"loss": 0.7682, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.8327331801754974, |
|
"learning_rate": 0.00029162778413592745, |
|
"loss": 0.6086, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 32.97517046309539, |
|
"learning_rate": 0.000291363676379963, |
|
"loss": 0.7071, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 10.785667285051565, |
|
"learning_rate": 0.0002910995686239986, |
|
"loss": 0.8056, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.498089410707394, |
|
"learning_rate": 0.00029083546086803414, |
|
"loss": 0.7692, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.145717003985533, |
|
"learning_rate": 0.0002905713531120697, |
|
"loss": 0.7532, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.931492489393408, |
|
"learning_rate": 0.00029030724535610524, |
|
"loss": 0.6855, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 12.008189140846019, |
|
"learning_rate": 0.00029004313760014084, |
|
"loss": 0.5884, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 28.461967837050068, |
|
"learning_rate": 0.0002897790298441764, |
|
"loss": 0.7168, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.19188899701212, |
|
"learning_rate": 0.000289514922088212, |
|
"loss": 0.5666, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 25.557101759286965, |
|
"learning_rate": 0.00028925081433224753, |
|
"loss": 0.672, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 14.361742645303556, |
|
"learning_rate": 0.00028898670657628313, |
|
"loss": 0.8772, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 27.123395529551424, |
|
"learning_rate": 0.0002887225988203187, |
|
"loss": 1.2076, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.700071862540531, |
|
"learning_rate": 0.00028845849106435423, |
|
"loss": 1.7946, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 23.71993783255748, |
|
"learning_rate": 0.0002881943833083898, |
|
"loss": 0.72, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 13.997692229274783, |
|
"learning_rate": 0.0002879302755524254, |
|
"loss": 0.7971, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.935951663120656, |
|
"learning_rate": 0.0002876661677964609, |
|
"loss": 0.6953, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 18.097171459954673, |
|
"learning_rate": 0.00028740206004049647, |
|
"loss": 0.67, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.44313735700682, |
|
"learning_rate": 0.00028713795228453207, |
|
"loss": 0.7554, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.311122698753216, |
|
"learning_rate": 0.0002868738445285676, |
|
"loss": 0.6804, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.703908413146344, |
|
"learning_rate": 0.0002866097367726032, |
|
"loss": 0.7243, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 12.117182879812908, |
|
"learning_rate": 0.00028634562901663877, |
|
"loss": 0.7858, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 73.12653578751251, |
|
"learning_rate": 0.0002860815212606743, |
|
"loss": 0.779, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 13.873978056302795, |
|
"learning_rate": 0.0002858174135047099, |
|
"loss": 0.8547, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 27.29586810442331, |
|
"learning_rate": 0.00028555330574874546, |
|
"loss": 0.9191, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 8.436221863931042, |
|
"learning_rate": 0.000285289197992781, |
|
"loss": 0.7295, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.590273107484888, |
|
"learning_rate": 0.0002850250902368166, |
|
"loss": 0.6445, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.040340894332966, |
|
"learning_rate": 0.00028476098248085216, |
|
"loss": 0.7118, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 11.765987496408746, |
|
"learning_rate": 0.0002844968747248877, |
|
"loss": 0.6206, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.067285304810526, |
|
"learning_rate": 0.0002842327669689233, |
|
"loss": 0.6261, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 9.574834164933826, |
|
"learning_rate": 0.00028396865921295885, |
|
"loss": 0.5988, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 8.639025152901535, |
|
"learning_rate": 0.00028370455145699445, |
|
"loss": 0.6358, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 8.33927811216473, |
|
"learning_rate": 0.00028344044370103, |
|
"loss": 0.6151, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.710721165097775, |
|
"learning_rate": 0.00028317633594506555, |
|
"loss": 0.5678, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.834141261057005, |
|
"learning_rate": 0.0002829122281891011, |
|
"loss": 0.619, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.21024200850564, |
|
"learning_rate": 0.0002826481204331367, |
|
"loss": 0.5213, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 14.05609595603206, |
|
"learning_rate": 0.00028238401267717224, |
|
"loss": 0.6939, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 12.306991788875578, |
|
"learning_rate": 0.00028211990492120785, |
|
"loss": 0.6109, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 8.95186713005476, |
|
"learning_rate": 0.0002818557971652434, |
|
"loss": 0.7076, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 11.767871616866502, |
|
"learning_rate": 0.000281591689409279, |
|
"loss": 0.5459, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.2642817789157834, |
|
"learning_rate": 0.00028132758165331454, |
|
"loss": 0.7554, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 11.178081059149552, |
|
"learning_rate": 0.0002810634738973501, |
|
"loss": 0.6658, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.247946181517104, |
|
"learning_rate": 0.00028079936614138564, |
|
"loss": 0.7952, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 26.230589608874784, |
|
"learning_rate": 0.00028053525838542124, |
|
"loss": 0.646, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.595936896088998, |
|
"learning_rate": 0.0002802711506294568, |
|
"loss": 0.6389, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.805550894077348, |
|
"learning_rate": 0.00028000704287349233, |
|
"loss": 0.6685, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.854245170446724, |
|
"learning_rate": 0.00027974293511752793, |
|
"loss": 0.6371, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 10.383564421649856, |
|
"learning_rate": 0.0002794788273615635, |
|
"loss": 0.6386, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 9.361896888396116, |
|
"learning_rate": 0.0002792147196055991, |
|
"loss": 0.5856, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.331278217830638, |
|
"learning_rate": 0.00027895061184963463, |
|
"loss": 0.5423, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 14.006733190567504, |
|
"learning_rate": 0.00027868650409367023, |
|
"loss": 0.6314, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 22.004723578235705, |
|
"learning_rate": 0.0002784223963377058, |
|
"loss": 0.6539, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 14.007983160388825, |
|
"learning_rate": 0.0002781582885817413, |
|
"loss": 0.638, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.926185711207916, |
|
"learning_rate": 0.00027789418082577687, |
|
"loss": 0.6127, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.136580587101956, |
|
"learning_rate": 0.00027763007306981247, |
|
"loss": 0.5212, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.2192146030344, |
|
"learning_rate": 0.000277365965313848, |
|
"loss": 0.8935, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 15.497566564721284, |
|
"learning_rate": 0.00027710185755788357, |
|
"loss": 0.7205, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.8681942113478143, |
|
"learning_rate": 0.00027683774980191917, |
|
"loss": 0.6125, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 12.058902856382517, |
|
"learning_rate": 0.0002765736420459547, |
|
"loss": 0.5452, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 18.525701145994237, |
|
"learning_rate": 0.0002763095342899903, |
|
"loss": 0.5644, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.3200261698006592, |
|
"learning_rate": 0.00027604542653402586, |
|
"loss": 0.4896, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.419398855450636, |
|
"learning_rate": 0.0002757813187780614, |
|
"loss": 0.6601, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.999656150453379, |
|
"learning_rate": 0.00027551721102209696, |
|
"loss": 0.6464, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.417685550026515, |
|
"learning_rate": 0.00027525310326613256, |
|
"loss": 0.6317, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 14.045184127020653, |
|
"learning_rate": 0.0002749889955101681, |
|
"loss": 0.7528, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.390939440920372, |
|
"learning_rate": 0.0002747248877542037, |
|
"loss": 0.7923, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.379366418554961, |
|
"learning_rate": 0.00027446077999823925, |
|
"loss": 0.5519, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 24.109342420534976, |
|
"learning_rate": 0.00027419667224227486, |
|
"loss": 0.665, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 16.033689274285177, |
|
"learning_rate": 0.0002739325644863104, |
|
"loss": 0.6613, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.417873320686352, |
|
"learning_rate": 0.00027366845673034595, |
|
"loss": 0.5534, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 9.774105476099281, |
|
"learning_rate": 0.00027340434897438155, |
|
"loss": 0.5153, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.156967697263397, |
|
"learning_rate": 0.0002731402412184171, |
|
"loss": 0.6953, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.541534147261922, |
|
"learning_rate": 0.00027287613346245265, |
|
"loss": 0.627, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.7595278692002374, |
|
"learning_rate": 0.0002726120257064882, |
|
"loss": 0.6443, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 8.442783531090978, |
|
"learning_rate": 0.0002723479179505238, |
|
"loss": 0.4679, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.415292857030091, |
|
"learning_rate": 0.00027208381019455934, |
|
"loss": 0.6031, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.168910593006703, |
|
"learning_rate": 0.00027181970243859494, |
|
"loss": 0.5918, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 12.97684498547781, |
|
"learning_rate": 0.0002715555946826305, |
|
"loss": 0.7227, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.009332020345263, |
|
"learning_rate": 0.0002712914869266661, |
|
"loss": 0.6532, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 15.075405052422697, |
|
"learning_rate": 0.00027102737917070164, |
|
"loss": 0.5465, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 13.557703015855992, |
|
"learning_rate": 0.0002707632714147372, |
|
"loss": 0.5378, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.020360863839114, |
|
"learning_rate": 0.00027049916365877273, |
|
"loss": 0.3787, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 8.106509945189808, |
|
"learning_rate": 0.00027023505590280833, |
|
"loss": 0.876, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.868486395804283, |
|
"learning_rate": 0.0002699709481468439, |
|
"loss": 0.7369, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 8.651700120327037, |
|
"learning_rate": 0.00026970684039087943, |
|
"loss": 0.5749, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.8253064792153335, |
|
"learning_rate": 0.00026944273263491503, |
|
"loss": 0.5524, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 18.10581395417861, |
|
"learning_rate": 0.0002691786248789506, |
|
"loss": 0.6028, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.549476686940514, |
|
"learning_rate": 0.0002689145171229862, |
|
"loss": 0.5602, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.059658671180737, |
|
"learning_rate": 0.0002686504093670217, |
|
"loss": 0.4547, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.638791804793982, |
|
"learning_rate": 0.00026838630161105727, |
|
"loss": 0.4888, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 19.886486264160368, |
|
"learning_rate": 0.00026812219385509287, |
|
"loss": 0.5952, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.801635078783133, |
|
"learning_rate": 0.0002678580860991284, |
|
"loss": 0.5706, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 14.710153578070637, |
|
"learning_rate": 0.00026759397834316397, |
|
"loss": 0.3925, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.069819724878684, |
|
"learning_rate": 0.00026732987058719957, |
|
"loss": 0.5302, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.456332856299659, |
|
"learning_rate": 0.0002670657628312351, |
|
"loss": 0.4644, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 9.691992871490271, |
|
"learning_rate": 0.00026680165507527066, |
|
"loss": 0.8107, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.993002963331337, |
|
"learning_rate": 0.00026653754731930626, |
|
"loss": 0.7395, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 16.442630137693712, |
|
"learning_rate": 0.0002662734395633418, |
|
"loss": 0.5488, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 8.58882149775824, |
|
"learning_rate": 0.0002660093318073774, |
|
"loss": 0.5637, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 10.186850399825746, |
|
"learning_rate": 0.00026574522405141296, |
|
"loss": 0.5963, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.50607476489273, |
|
"learning_rate": 0.0002654811162954485, |
|
"loss": 0.5356, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.555998790501263, |
|
"learning_rate": 0.00026521700853948405, |
|
"loss": 0.5437, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.011844232153095, |
|
"learning_rate": 0.00026495290078351965, |
|
"loss": 0.4832, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.805639693834114, |
|
"learning_rate": 0.0002646887930275552, |
|
"loss": 0.6261, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 12.964148309393078, |
|
"learning_rate": 0.0002644246852715908, |
|
"loss": 0.6932, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.582112479892344, |
|
"learning_rate": 0.00026416057751562635, |
|
"loss": 0.4207, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.670421832838283, |
|
"learning_rate": 0.00026389646975966195, |
|
"loss": 0.55, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.0688458539036425, |
|
"learning_rate": 0.0002636323620036975, |
|
"loss": 0.5457, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.925993593066434, |
|
"learning_rate": 0.00026336825424773305, |
|
"loss": 0.5954, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.8754276870701614, |
|
"learning_rate": 0.0002631041464917686, |
|
"loss": 0.4933, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.765372679005643, |
|
"learning_rate": 0.0002628400387358042, |
|
"loss": 0.4824, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 19.509189680845587, |
|
"learning_rate": 0.00026257593097983974, |
|
"loss": 0.6568, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.062175321616727, |
|
"learning_rate": 0.0002623118232238753, |
|
"loss": 0.6939, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.159611499070369, |
|
"learning_rate": 0.0002620477154679109, |
|
"loss": 0.6518, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 6.979648103703146, |
|
"learning_rate": 0.00026178360771194644, |
|
"loss": 0.5731, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 14.908967039411184, |
|
"learning_rate": 0.00026151949995598204, |
|
"loss": 0.6586, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 8.997627786755453, |
|
"learning_rate": 0.0002612553922000176, |
|
"loss": 0.5739, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 8.893499066551819, |
|
"learning_rate": 0.0002609912844440532, |
|
"loss": 0.6826, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.580638745855676, |
|
"learning_rate": 0.00026072717668808873, |
|
"loss": 0.5423, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 10.35996751327857, |
|
"learning_rate": 0.0002604630689321243, |
|
"loss": 0.665, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.354381241634792, |
|
"learning_rate": 0.00026019896117615983, |
|
"loss": 0.5428, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.92752738546648, |
|
"learning_rate": 0.00025993485342019543, |
|
"loss": 0.4283, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.551392879437307, |
|
"learning_rate": 0.000259670745664231, |
|
"loss": 0.437, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.462155992824948, |
|
"learning_rate": 0.0002594066379082665, |
|
"loss": 0.5491, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.8080515656559095, |
|
"learning_rate": 0.0002591425301523021, |
|
"loss": 0.5937, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 11.009135060561086, |
|
"learning_rate": 0.00025887842239633767, |
|
"loss": 0.5445, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 7.376854781509573, |
|
"learning_rate": 0.00025861431464037327, |
|
"loss": 0.7632, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 6.627380987442955, |
|
"learning_rate": 0.0002583502068844088, |
|
"loss": 0.5898, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 16.95687109435049, |
|
"learning_rate": 0.00025808609912844437, |
|
"loss": 0.5336, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 14.68013043671797, |
|
"learning_rate": 0.00025782199137247997, |
|
"loss": 0.5598, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 18.86018463552324, |
|
"learning_rate": 0.0002575578836165155, |
|
"loss": 0.6714, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.724843407945365, |
|
"learning_rate": 0.00025729377586055106, |
|
"loss": 0.5705, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.817247590572551, |
|
"learning_rate": 0.00025702966810458666, |
|
"loss": 0.4574, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.917842170261981, |
|
"learning_rate": 0.0002567655603486222, |
|
"loss": 0.587, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 7.627238820448371, |
|
"learning_rate": 0.0002565014525926578, |
|
"loss": 0.67, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.530085992123676, |
|
"learning_rate": 0.00025623734483669336, |
|
"loss": 0.6785, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.75173224313274, |
|
"learning_rate": 0.0002559732370807289, |
|
"loss": 0.4789, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 7.833713937208152, |
|
"learning_rate": 0.0002557091293247645, |
|
"loss": 0.6027, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.208140165958884, |
|
"learning_rate": 0.00025544502156880005, |
|
"loss": 0.5169, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 9.736988531153909, |
|
"learning_rate": 0.0002551809138128356, |
|
"loss": 0.475, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.128800674222159, |
|
"learning_rate": 0.00025491680605687115, |
|
"loss": 0.5872, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 11.849855989751077, |
|
"learning_rate": 0.00025465269830090675, |
|
"loss": 0.6424, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.780041585488714, |
|
"learning_rate": 0.0002543885905449423, |
|
"loss": 0.5252, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 13.849384257077089, |
|
"learning_rate": 0.0002541244827889779, |
|
"loss": 0.518, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.305548999479876, |
|
"learning_rate": 0.00025386037503301345, |
|
"loss": 0.4546, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 6.507299601068285, |
|
"learning_rate": 0.00025359626727704905, |
|
"loss": 0.5043, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.388606168597159, |
|
"learning_rate": 0.0002533321595210846, |
|
"loss": 0.3738, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.7450663596711564, |
|
"learning_rate": 0.00025306805176512014, |
|
"loss": 0.5475, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 10.644110820061742, |
|
"learning_rate": 0.0002528039440091557, |
|
"loss": 0.5164, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.553493826310751, |
|
"learning_rate": 0.0002525398362531913, |
|
"loss": 0.6376, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 7.538284810132889, |
|
"learning_rate": 0.00025227572849722684, |
|
"loss": 0.546, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.255730794622848, |
|
"learning_rate": 0.0002520116207412624, |
|
"loss": 0.4938, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 8.06756441932208, |
|
"learning_rate": 0.000251747512985298, |
|
"loss": 0.6297, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 13.962411434452775, |
|
"learning_rate": 0.00025148340522933353, |
|
"loss": 0.4572, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 8.884013956961077, |
|
"learning_rate": 0.00025121929747336913, |
|
"loss": 0.518, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 12.741167916460428, |
|
"learning_rate": 0.0002509551897174047, |
|
"loss": 0.6492, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 7.5518542127821195, |
|
"learning_rate": 0.00025069108196144023, |
|
"loss": 0.4654, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.505009953324827, |
|
"learning_rate": 0.00025042697420547583, |
|
"loss": 0.4609, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.971887581229028, |
|
"learning_rate": 0.0002501628664495114, |
|
"loss": 0.52, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.265527884403745, |
|
"learning_rate": 0.0002498987586935469, |
|
"loss": 0.6493, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 5.334607656416119, |
|
"learning_rate": 0.0002496346509375825, |
|
"loss": 0.6016, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 22.377156430759506, |
|
"learning_rate": 0.00024937054318161807, |
|
"loss": 0.6253, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.514787513790615, |
|
"learning_rate": 0.0002491064354256537, |
|
"loss": 0.4283, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 8.537546355671793, |
|
"learning_rate": 0.0002488423276696892, |
|
"loss": 0.4697, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.6373042514427474, |
|
"learning_rate": 0.00024857821991372477, |
|
"loss": 0.5043, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.732099378394687, |
|
"learning_rate": 0.00024831411215776037, |
|
"loss": 0.3868, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.2724796251734904, |
|
"learning_rate": 0.0002480500044017959, |
|
"loss": 0.4179, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 29.76737431870085, |
|
"learning_rate": 0.00024778589664583146, |
|
"loss": 0.7539, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.237457685623285, |
|
"learning_rate": 0.000247521788889867, |
|
"loss": 0.5763, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.630218202492239, |
|
"learning_rate": 0.0002472576811339026, |
|
"loss": 0.5585, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 9.10703033449879, |
|
"learning_rate": 0.00024699357337793816, |
|
"loss": 0.5287, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 12.936475845702056, |
|
"learning_rate": 0.00024672946562197376, |
|
"loss": 0.5337, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 13.42630275904361, |
|
"learning_rate": 0.0002464653578660093, |
|
"loss": 0.7086, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.118383950588261, |
|
"learning_rate": 0.0002462012501100449, |
|
"loss": 0.509, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 9.663275601075963, |
|
"learning_rate": 0.00024593714235408046, |
|
"loss": 0.3979, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 13.98420224056408, |
|
"learning_rate": 0.000245673034598116, |
|
"loss": 0.4386, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 7.55640251867126, |
|
"learning_rate": 0.00024540892684215155, |
|
"loss": 0.2599, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 19.14016970813088, |
|
"learning_rate": 0.00024514481908618715, |
|
"loss": 0.8847, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.560194655648171, |
|
"learning_rate": 0.0002448807113302227, |
|
"loss": 0.6883, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.905497694480768, |
|
"learning_rate": 0.00024461660357425824, |
|
"loss": 0.5749, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 6.9729602942440145, |
|
"learning_rate": 0.00024435249581829385, |
|
"loss": 0.5574, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 23.12193222317225, |
|
"learning_rate": 0.0002440883880623294, |
|
"loss": 0.568, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 12.26142704553814, |
|
"learning_rate": 0.000243824280306365, |
|
"loss": 0.4193, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 9.466163231555143, |
|
"learning_rate": 0.00024356017255040054, |
|
"loss": 0.5367, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.909453729777092, |
|
"learning_rate": 0.00024329606479443612, |
|
"loss": 0.5698, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.3942741383189237, |
|
"learning_rate": 0.00024303195703847166, |
|
"loss": 0.4005, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 14.989998129309498, |
|
"learning_rate": 0.00024276784928250726, |
|
"loss": 0.457, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 10.2061024523183, |
|
"learning_rate": 0.0002425037415265428, |
|
"loss": 0.6318, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 7.2232790663472715, |
|
"learning_rate": 0.00024223963377057839, |
|
"loss": 0.435, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.638300708728188, |
|
"learning_rate": 0.00024197552601461393, |
|
"loss": 0.5269, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.589974429151266, |
|
"learning_rate": 0.00024171141825864948, |
|
"loss": 0.5127, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 11.244784211013016, |
|
"learning_rate": 0.00024144731050268508, |
|
"loss": 0.3722, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.6011074716409204, |
|
"learning_rate": 0.00024118320274672063, |
|
"loss": 0.4151, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 13.474185150099283, |
|
"learning_rate": 0.0002409190949907562, |
|
"loss": 0.5032, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.9228510791340483, |
|
"learning_rate": 0.00024065498723479175, |
|
"loss": 0.4312, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.2931543769646483, |
|
"learning_rate": 0.00024039087947882735, |
|
"loss": 0.4744, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 10.662618625971549, |
|
"learning_rate": 0.0002401267717228629, |
|
"loss": 0.515, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 9.151796952250688, |
|
"learning_rate": 0.00023986266396689847, |
|
"loss": 0.5096, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 8.031392721473967, |
|
"learning_rate": 0.00023959855621093405, |
|
"loss": 0.3746, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 8.656245662846494, |
|
"learning_rate": 0.00023933444845496962, |
|
"loss": 0.6719, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.7056302688614196, |
|
"learning_rate": 0.00023907034069900517, |
|
"loss": 0.5397, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 11.202168572804897, |
|
"learning_rate": 0.00023880623294304074, |
|
"loss": 0.4804, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.560007457362698, |
|
"learning_rate": 0.00023854212518707632, |
|
"loss": 0.6195, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.3492079377472574, |
|
"learning_rate": 0.00023827801743111186, |
|
"loss": 0.4512, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 9.9189295158775, |
|
"learning_rate": 0.00023801390967514744, |
|
"loss": 0.4923, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.341286959201107, |
|
"learning_rate": 0.00023774980191918298, |
|
"loss": 0.388, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 10.293197096548353, |
|
"learning_rate": 0.00023748569416321859, |
|
"loss": 0.461, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 6.323508263958569, |
|
"learning_rate": 0.00023722158640725413, |
|
"loss": 0.4534, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.563996619300781, |
|
"learning_rate": 0.0002369574786512897, |
|
"loss": 0.6514, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.430706263067635, |
|
"learning_rate": 0.00023669337089532525, |
|
"loss": 0.4485, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 12.991699951667346, |
|
"learning_rate": 0.00023642926313936086, |
|
"loss": 0.5424, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.942064030959408, |
|
"learning_rate": 0.0002361651553833964, |
|
"loss": 0.6151, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 7.856411254695191, |
|
"learning_rate": 0.00023590104762743198, |
|
"loss": 0.4963, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.680294296841557, |
|
"learning_rate": 0.00023563693987146752, |
|
"loss": 0.3779, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 8.48135962742209, |
|
"learning_rate": 0.00023537283211550313, |
|
"loss": 0.4061, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 9.326470246659994, |
|
"learning_rate": 0.00023510872435953867, |
|
"loss": 0.4668, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.91107122608516, |
|
"learning_rate": 0.00023484461660357425, |
|
"loss": 0.6635, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.128915816791306, |
|
"learning_rate": 0.0002345805088476098, |
|
"loss": 0.6607, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 14.26004929646375, |
|
"learning_rate": 0.00023431640109164537, |
|
"loss": 0.3594, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.5966328597923884, |
|
"learning_rate": 0.00023405229333568094, |
|
"loss": 0.518, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.770970776458943, |
|
"learning_rate": 0.0002337881855797165, |
|
"loss": 0.5155, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 7.395416602742529, |
|
"learning_rate": 0.00023352407782375206, |
|
"loss": 0.4847, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.664220480982938, |
|
"learning_rate": 0.00023325997006778764, |
|
"loss": 0.5428, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.039233661639251, |
|
"learning_rate": 0.0002329958623118232, |
|
"loss": 0.3664, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 16.818377752518863, |
|
"learning_rate": 0.00023273175455585876, |
|
"loss": 0.4977, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 10.947186587998635, |
|
"learning_rate": 0.00023246764679989436, |
|
"loss": 0.4966, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.4808526444203745, |
|
"learning_rate": 0.0002322035390439299, |
|
"loss": 0.3452, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.817395491877276, |
|
"learning_rate": 0.00023193943128796548, |
|
"loss": 0.4869, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 9.532075035626836, |
|
"learning_rate": 0.00023167532353200103, |
|
"loss": 0.6532, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 9.092901799047013, |
|
"learning_rate": 0.00023141121577603663, |
|
"loss": 0.4978, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.522284576872321, |
|
"learning_rate": 0.00023114710802007218, |
|
"loss": 0.5905, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 11.398355036614378, |
|
"learning_rate": 0.00023088300026410772, |
|
"loss": 0.5277, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.104585056324945, |
|
"learning_rate": 0.0002306188925081433, |
|
"loss": 0.356, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.034537089837142, |
|
"learning_rate": 0.00023035478475217885, |
|
"loss": 0.5848, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.857463981250751, |
|
"learning_rate": 0.00023009067699621445, |
|
"loss": 0.4997, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 12.727805428786185, |
|
"learning_rate": 0.00022982656924025, |
|
"loss": 0.5272, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 5.582360842526427, |
|
"learning_rate": 0.00022956246148428557, |
|
"loss": 0.4024, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.7027802507330385, |
|
"learning_rate": 0.00022929835372832112, |
|
"loss": 0.5709, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 13.650501833151731, |
|
"learning_rate": 0.00022903424597235672, |
|
"loss": 0.6356, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.538410769120061, |
|
"learning_rate": 0.00022877013821639226, |
|
"loss": 0.5364, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 12.421316460582023, |
|
"learning_rate": 0.00022850603046042784, |
|
"loss": 0.4789, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 16.382444105613263, |
|
"learning_rate": 0.00022824192270446338, |
|
"loss": 0.4113, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 9.389514021127093, |
|
"learning_rate": 0.00022797781494849899, |
|
"loss": 0.3535, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 10.79789184703559, |
|
"learning_rate": 0.00022771370719253453, |
|
"loss": 0.5936, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.942735153547533, |
|
"learning_rate": 0.00022744959943657008, |
|
"loss": 0.3831, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.62777327463562, |
|
"learning_rate": 0.00022718549168060568, |
|
"loss": 0.6384, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.950217620162037, |
|
"learning_rate": 0.00022692138392464123, |
|
"loss": 0.4969, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 16.399583992719105, |
|
"learning_rate": 0.0002266572761686768, |
|
"loss": 0.5946, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 5.234209657306621, |
|
"learning_rate": 0.00022639316841271235, |
|
"loss": 0.4933, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.6348460554697257, |
|
"learning_rate": 0.00022612906065674795, |
|
"loss": 0.4455, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.5205466182331855, |
|
"learning_rate": 0.0002258649529007835, |
|
"loss": 0.4379, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 8.930738370574772, |
|
"learning_rate": 0.00022560084514481907, |
|
"loss": 0.3278, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.035596470101852, |
|
"learning_rate": 0.00022533673738885462, |
|
"loss": 0.4408, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.63755703750273, |
|
"learning_rate": 0.00022507262963289022, |
|
"loss": 0.47, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.706132430365636, |
|
"learning_rate": 0.00022480852187692577, |
|
"loss": 0.4023, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.585769097917519, |
|
"learning_rate": 0.00022454441412096134, |
|
"loss": 0.5224, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 18.509301972265284, |
|
"learning_rate": 0.0002242803063649969, |
|
"loss": 0.5628, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 13.854335524550581, |
|
"learning_rate": 0.00022401619860903244, |
|
"loss": 0.4414, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 11.18763451721793, |
|
"learning_rate": 0.00022375209085306804, |
|
"loss": 0.5034, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 6.642950105893305, |
|
"learning_rate": 0.00022348798309710359, |
|
"loss": 0.6943, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.3505931032203895, |
|
"learning_rate": 0.00022322387534113916, |
|
"loss": 0.3146, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 6.8894968720438055, |
|
"learning_rate": 0.00022295976758517473, |
|
"loss": 0.4646, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 10.970096250118766, |
|
"learning_rate": 0.0002226956598292103, |
|
"loss": 0.4324, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.871263672015693, |
|
"learning_rate": 0.00022243155207324585, |
|
"loss": 0.319, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 9.668433980826158, |
|
"learning_rate": 0.00022216744431728143, |
|
"loss": 0.5684, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.257915637753227, |
|
"learning_rate": 0.000221903336561317, |
|
"loss": 0.4304, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 7.170590886198052, |
|
"learning_rate": 0.00022163922880535258, |
|
"loss": 0.471, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.015283264716949, |
|
"learning_rate": 0.00022137512104938812, |
|
"loss": 0.4489, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.901924055363354, |
|
"learning_rate": 0.0002211110132934237, |
|
"loss": 0.4896, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 6.9521037069629354, |
|
"learning_rate": 0.00022084690553745927, |
|
"loss": 0.3511, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.022012302982127, |
|
"learning_rate": 0.00022058279778149485, |
|
"loss": 0.4645, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 11.693923281594277, |
|
"learning_rate": 0.0002203186900255304, |
|
"loss": 0.4974, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.93642332608164, |
|
"learning_rate": 0.00022005458226956594, |
|
"loss": 0.6888, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.162350094500878, |
|
"learning_rate": 0.00021979047451360154, |
|
"loss": 0.4518, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.619365411105759, |
|
"learning_rate": 0.0002195263667576371, |
|
"loss": 0.5504, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 9.025064819537734, |
|
"learning_rate": 0.00021926225900167266, |
|
"loss": 0.4704, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.8536752316347793, |
|
"learning_rate": 0.0002189981512457082, |
|
"loss": 0.3768, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.003788040997863, |
|
"learning_rate": 0.0002187340434897438, |
|
"loss": 0.5497, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.3294110305384472, |
|
"learning_rate": 0.00021846993573377936, |
|
"loss": 0.3457, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.7516165448430834, |
|
"learning_rate": 0.00021820582797781493, |
|
"loss": 0.4296, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.635906796431671, |
|
"learning_rate": 0.00021794172022185048, |
|
"loss": 0.3416, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.679756445448465, |
|
"learning_rate": 0.00021767761246588608, |
|
"loss": 0.61, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.295507977784215, |
|
"learning_rate": 0.00021741350470992163, |
|
"loss": 0.466, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.383640244060453, |
|
"learning_rate": 0.0002171493969539572, |
|
"loss": 0.5255, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.9598013665801877, |
|
"learning_rate": 0.00021688528919799275, |
|
"loss": 0.4679, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.194966826534859, |
|
"learning_rate": 0.00021662118144202832, |
|
"loss": 0.4265, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 20.31059851916358, |
|
"learning_rate": 0.0002163570736860639, |
|
"loss": 0.465, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.318095175840534, |
|
"learning_rate": 0.00021609296593009945, |
|
"loss": 0.3438, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.192871617361268, |
|
"learning_rate": 0.00021582885817413502, |
|
"loss": 0.4376, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.3160131090019607, |
|
"learning_rate": 0.0002155647504181706, |
|
"loss": 0.2692, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.911077873322694, |
|
"learning_rate": 0.00021530064266220617, |
|
"loss": 0.5164, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 6.800288114027628, |
|
"learning_rate": 0.00021503653490624172, |
|
"loss": 0.5284, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 8.935626744814929, |
|
"learning_rate": 0.00021477242715027732, |
|
"loss": 0.4553, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 16.092871581798644, |
|
"learning_rate": 0.00021450831939431286, |
|
"loss": 0.53, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 7.057776607002434, |
|
"learning_rate": 0.00021424421163834844, |
|
"loss": 0.6253, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 10.551995863164397, |
|
"learning_rate": 0.00021398010388238399, |
|
"loss": 0.562, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.456350239855379, |
|
"learning_rate": 0.0002137159961264196, |
|
"loss": 0.5066, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 13.67557205240678, |
|
"learning_rate": 0.00021345188837045513, |
|
"loss": 0.4223, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 8.439422356201387, |
|
"learning_rate": 0.00021318778061449068, |
|
"loss": 0.5762, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 7.289284921464211, |
|
"learning_rate": 0.00021292367285852626, |
|
"loss": 0.4309, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.5246952543345638, |
|
"learning_rate": 0.0002126595651025618, |
|
"loss": 0.3514, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.90597694006036, |
|
"learning_rate": 0.0002123954573465974, |
|
"loss": 0.4376, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.264944109699735, |
|
"learning_rate": 0.00021213134959063295, |
|
"loss": 0.4439, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.2460797473723435, |
|
"learning_rate": 0.00021186724183466852, |
|
"loss": 0.4195, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.676528855829685, |
|
"learning_rate": 0.00021160313407870407, |
|
"loss": 0.4114, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 31.036901944651785, |
|
"learning_rate": 0.00021133902632273967, |
|
"loss": 0.6423, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.181578204077942, |
|
"learning_rate": 0.00021107491856677522, |
|
"loss": 0.4257, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.478353976688533, |
|
"learning_rate": 0.0002108108108108108, |
|
"loss": 0.5036, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.960466961810618, |
|
"learning_rate": 0.00021054670305484634, |
|
"loss": 0.4279, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.552220995181842, |
|
"learning_rate": 0.00021028259529888194, |
|
"loss": 0.2732, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.502280377970437, |
|
"learning_rate": 0.0002100184875429175, |
|
"loss": 0.5032, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.145692369646056, |
|
"learning_rate": 0.00020975437978695306, |
|
"loss": 0.3472, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 7.556295887410977, |
|
"learning_rate": 0.00020949027203098864, |
|
"loss": 0.4946, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7051721568042928, |
|
"eval_f1": 0.6853729055052904, |
|
"eval_loss": 0.54150390625, |
|
"eval_precision": 0.7030204500613235, |
|
"eval_recall": 0.7681594809856258, |
|
"eval_runtime": 465.1347, |
|
"eval_samples_per_second": 14.424, |
|
"eval_steps_per_second": 2.406, |
|
"step": 3953 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 14.750715824311628, |
|
"learning_rate": 0.00020922616427502419, |
|
"loss": 0.4315, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.8211901469385654, |
|
"learning_rate": 0.00020896205651905976, |
|
"loss": 0.3911, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 8.203098593169805, |
|
"learning_rate": 0.0002086979487630953, |
|
"loss": 0.3039, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 10.102765895975901, |
|
"learning_rate": 0.0002084338410071309, |
|
"loss": 0.4994, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.616945269045702, |
|
"learning_rate": 0.00020816973325116646, |
|
"loss": 0.2976, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.384475721456983, |
|
"learning_rate": 0.00020790562549520203, |
|
"loss": 0.3492, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.9550298854471444, |
|
"learning_rate": 0.00020764151773923758, |
|
"loss": 0.3062, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.0433509380121029, |
|
"learning_rate": 0.00020737740998327318, |
|
"loss": 0.3137, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 4.748780651660099, |
|
"learning_rate": 0.00020711330222730873, |
|
"loss": 0.3892, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 3.1310767766394516, |
|
"learning_rate": 0.00020687560524694076, |
|
"loss": 0.3759, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 5.015625322347851, |
|
"learning_rate": 0.0002066114974909763, |
|
"loss": 0.3816, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.1539686658190753, |
|
"learning_rate": 0.00020634738973501188, |
|
"loss": 0.3234, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.2157031129713913, |
|
"learning_rate": 0.00020608328197904742, |
|
"loss": 0.2868, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 14.193121008750008, |
|
"learning_rate": 0.00020581917422308297, |
|
"loss": 0.5939, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.536030144737745, |
|
"learning_rate": 0.00020555506646711857, |
|
"loss": 0.2593, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 6.890194212277803, |
|
"learning_rate": 0.00020529095871115412, |
|
"loss": 0.2522, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 14.02265911651655, |
|
"learning_rate": 0.0002050268509551897, |
|
"loss": 0.5109, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 8.357623765347387, |
|
"learning_rate": 0.00020476274319922527, |
|
"loss": 0.3223, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 7.4581890825932495, |
|
"learning_rate": 0.00020449863544326084, |
|
"loss": 0.2692, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 3.2098633368648017, |
|
"learning_rate": 0.0002042345276872964, |
|
"loss": 0.4877, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 7.526325975589617, |
|
"learning_rate": 0.00020397041993133196, |
|
"loss": 0.3352, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.145980382522719, |
|
"learning_rate": 0.00020370631217536754, |
|
"loss": 0.2768, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 2.633811660630051, |
|
"learning_rate": 0.0002034422044194031, |
|
"loss": 0.2366, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.7060398651558935, |
|
"learning_rate": 0.00020317809666343866, |
|
"loss": 0.3208, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 13.154069609969191, |
|
"learning_rate": 0.00020291398890747423, |
|
"loss": 0.3124, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 3.2490397915258002, |
|
"learning_rate": 0.0002026498811515098, |
|
"loss": 0.3983, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.5074853084169275, |
|
"learning_rate": 0.00020238577339554538, |
|
"loss": 0.3634, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 2.432611350975554, |
|
"learning_rate": 0.00020212166563958093, |
|
"loss": 0.24, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 2.5110530289918245, |
|
"learning_rate": 0.00020185755788361648, |
|
"loss": 0.3825, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.6088717012760947, |
|
"learning_rate": 0.00020159345012765208, |
|
"loss": 0.2321, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 7.368818898360172, |
|
"learning_rate": 0.00020132934237168762, |
|
"loss": 0.4175, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 3.0566385971464793, |
|
"learning_rate": 0.0002010652346157232, |
|
"loss": 0.366, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 10.829763632525873, |
|
"learning_rate": 0.00020080112685975875, |
|
"loss": 0.2711, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 2.2340116043827645, |
|
"learning_rate": 0.00020053701910379435, |
|
"loss": 0.344, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.149576390393793, |
|
"learning_rate": 0.0002002729113478299, |
|
"loss": 0.3403, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 3.9124613694962838, |
|
"learning_rate": 0.00020000880359186547, |
|
"loss": 0.2991, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 12.488201847486222, |
|
"learning_rate": 0.00019974469583590101, |
|
"loss": 0.4423, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 3.884982728237774, |
|
"learning_rate": 0.00019948058807993662, |
|
"loss": 0.1908, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 10.049768614062495, |
|
"learning_rate": 0.00019921648032397216, |
|
"loss": 0.2456, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 10.859679307035771, |
|
"learning_rate": 0.00019895237256800774, |
|
"loss": 0.3236, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 9.167951613915271, |
|
"learning_rate": 0.00019868826481204328, |
|
"loss": 0.6773, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 11.674904564813565, |
|
"learning_rate": 0.00019842415705607886, |
|
"loss": 0.3841, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 8.491068804945932, |
|
"learning_rate": 0.00019816004930011443, |
|
"loss": 0.4128, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 13.741327205705838, |
|
"learning_rate": 0.00019789594154414998, |
|
"loss": 0.4301, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.77802489618066, |
|
"learning_rate": 0.00019763183378818555, |
|
"loss": 0.3121, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 2.893664716507649, |
|
"learning_rate": 0.00019736772603222113, |
|
"loss": 0.3624, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 4.519451727534314, |
|
"learning_rate": 0.0001971036182762567, |
|
"loss": 0.2001, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.2537621584939747, |
|
"learning_rate": 0.00019683951052029225, |
|
"loss": 0.4367, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 14.867532748144109, |
|
"learning_rate": 0.00019657540276432785, |
|
"loss": 0.354, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.670985068926129, |
|
"learning_rate": 0.0001963112950083634, |
|
"loss": 0.2072, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.179932982364767, |
|
"learning_rate": 0.00019604718725239897, |
|
"loss": 0.4212, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.238353935521112, |
|
"learning_rate": 0.00019578307949643452, |
|
"loss": 0.4491, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 3.0658773658616707, |
|
"learning_rate": 0.00019551897174047012, |
|
"loss": 0.4347, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 9.72293023841812, |
|
"learning_rate": 0.00019525486398450567, |
|
"loss": 0.4087, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 8.518136419487966, |
|
"learning_rate": 0.00019499075622854122, |
|
"loss": 0.2174, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.886714661286067, |
|
"learning_rate": 0.0001947266484725768, |
|
"loss": 0.3281, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 2.445400132301571, |
|
"learning_rate": 0.00019446254071661234, |
|
"loss": 0.3407, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.494881015581473, |
|
"learning_rate": 0.00019419843296064794, |
|
"loss": 0.3496, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 3.151665702679958, |
|
"learning_rate": 0.00019393432520468348, |
|
"loss": 0.2611, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 13.532449081098884, |
|
"learning_rate": 0.00019367021744871906, |
|
"loss": 0.3941, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.235985317755349, |
|
"learning_rate": 0.0001934061096927546, |
|
"loss": 0.3201, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 6.597392212444202, |
|
"learning_rate": 0.0001931420019367902, |
|
"loss": 0.4564, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 3.140435218562986, |
|
"learning_rate": 0.00019287789418082575, |
|
"loss": 0.2876, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 8.68166804572153, |
|
"learning_rate": 0.00019261378642486133, |
|
"loss": 0.4184, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 5.13946024002739, |
|
"learning_rate": 0.00019234967866889688, |
|
"loss": 0.2847, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.145968384230681, |
|
"learning_rate": 0.00019208557091293248, |
|
"loss": 0.2438, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 6.403929411620646, |
|
"learning_rate": 0.00019182146315696802, |
|
"loss": 0.3279, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 8.64016161912627, |
|
"learning_rate": 0.00019155735540100357, |
|
"loss": 0.3164, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 9.668585643160457, |
|
"learning_rate": 0.00019129324764503917, |
|
"loss": 0.3796, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 14.943998906205717, |
|
"learning_rate": 0.00019102913988907472, |
|
"loss": 0.3244, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 5.449038152539126, |
|
"learning_rate": 0.0001907650321331103, |
|
"loss": 0.4199, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.588596643185838, |
|
"learning_rate": 0.00019050092437714584, |
|
"loss": 0.317, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.2478841853180118, |
|
"learning_rate": 0.00019023681662118144, |
|
"loss": 0.2229, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 15.5335226123234, |
|
"learning_rate": 0.000189972708865217, |
|
"loss": 0.6227, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 8.419431089407398, |
|
"learning_rate": 0.00018970860110925256, |
|
"loss": 0.6332, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.236499078980196, |
|
"learning_rate": 0.0001894444933532881, |
|
"loss": 0.3134, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 2.575405335243286, |
|
"learning_rate": 0.0001891803855973237, |
|
"loss": 0.2478, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.5276012861681565, |
|
"learning_rate": 0.00018891627784135926, |
|
"loss": 0.3107, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.915244686315227, |
|
"learning_rate": 0.00018865217008539483, |
|
"loss": 0.2537, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.6314608277899836, |
|
"learning_rate": 0.00018838806232943038, |
|
"loss": 0.3013, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.4281299113190515, |
|
"learning_rate": 0.00018812395457346598, |
|
"loss": 0.4033, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.3344593707818075, |
|
"learning_rate": 0.00018785984681750153, |
|
"loss": 0.276, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 5.637765503351757, |
|
"learning_rate": 0.00018759573906153708, |
|
"loss": 0.4755, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 6.702704562048585, |
|
"learning_rate": 0.00018733163130557265, |
|
"loss": 0.386, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 3.1625393449402934, |
|
"learning_rate": 0.00018706752354960822, |
|
"loss": 0.357, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.4188940831370935, |
|
"learning_rate": 0.0001868034157936438, |
|
"loss": 0.3086, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 8.673434660245706, |
|
"learning_rate": 0.00018653930803767935, |
|
"loss": 0.1807, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 3.738887993235016, |
|
"learning_rate": 0.00018627520028171492, |
|
"loss": 0.3299, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 9.319508688160127, |
|
"learning_rate": 0.0001860110925257505, |
|
"loss": 0.4157, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 6.892943360966015, |
|
"learning_rate": 0.00018574698476978607, |
|
"loss": 0.2339, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 12.942098135606841, |
|
"learning_rate": 0.00018548287701382162, |
|
"loss": 0.5181, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 14.39700673839823, |
|
"learning_rate": 0.0001852187692578572, |
|
"loss": 0.2945, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 3.9132635629855486, |
|
"learning_rate": 0.00018495466150189276, |
|
"loss": 0.3401, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 3.0002697966747713, |
|
"learning_rate": 0.00018469055374592834, |
|
"loss": 0.2648, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.21238117057248854, |
|
"learning_rate": 0.00018442644598996389, |
|
"loss": 0.2277, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 16.868097673871954, |
|
"learning_rate": 0.00018416233823399943, |
|
"loss": 0.2122, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 7.430973727847297, |
|
"learning_rate": 0.00018389823047803503, |
|
"loss": 0.2273, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 13.39392838076716, |
|
"learning_rate": 0.00018363412272207058, |
|
"loss": 0.4154, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.789150745954093, |
|
"learning_rate": 0.00018337001496610615, |
|
"loss": 0.1768, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 11.174877422869125, |
|
"learning_rate": 0.0001831059072101417, |
|
"loss": 0.5684, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.6018630455438325, |
|
"learning_rate": 0.0001828417994541773, |
|
"loss": 0.3407, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 9.953101867756693, |
|
"learning_rate": 0.00018257769169821285, |
|
"loss": 0.2901, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 7.407680005230561, |
|
"learning_rate": 0.00018231358394224842, |
|
"loss": 0.4594, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 2.6390957223682454, |
|
"learning_rate": 0.00018204947618628397, |
|
"loss": 0.4566, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 6.740560085736122, |
|
"learning_rate": 0.00018178536843031957, |
|
"loss": 0.3352, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 28.89110470090379, |
|
"learning_rate": 0.00018152126067435512, |
|
"loss": 0.4236, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 5.894457266781238, |
|
"learning_rate": 0.0001812571529183907, |
|
"loss": 0.3826, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.90265790457071, |
|
"learning_rate": 0.00018099304516242624, |
|
"loss": 0.2422, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.915714947527184, |
|
"learning_rate": 0.00018072893740646182, |
|
"loss": 0.4215, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 10.593603092345282, |
|
"learning_rate": 0.0001804648296504974, |
|
"loss": 0.2984, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 8.610545572953026, |
|
"learning_rate": 0.00018020072189453294, |
|
"loss": 0.3861, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 3.275694600127651, |
|
"learning_rate": 0.0001799366141385685, |
|
"loss": 0.1484, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 2.9917701980802534, |
|
"learning_rate": 0.00017967250638260409, |
|
"loss": 0.354, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.752595829803958, |
|
"learning_rate": 0.00017940839862663966, |
|
"loss": 0.2423, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 6.424507759307914, |
|
"learning_rate": 0.0001791442908706752, |
|
"loss": 0.2912, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 5.150914763339286, |
|
"learning_rate": 0.0001788801831147108, |
|
"loss": 0.3863, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 7.950047718285756, |
|
"learning_rate": 0.00017861607535874636, |
|
"loss": 0.3273, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 2.0619701931886967, |
|
"learning_rate": 0.00017835196760278193, |
|
"loss": 0.3099, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 11.802245354515275, |
|
"learning_rate": 0.00017808785984681748, |
|
"loss": 0.4913, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 10.007925086910582, |
|
"learning_rate": 0.00017782375209085308, |
|
"loss": 0.3709, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 11.125023955683686, |
|
"learning_rate": 0.00017755964433488862, |
|
"loss": 0.3352, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.89797690004093, |
|
"learning_rate": 0.0001772955365789242, |
|
"loss": 0.3096, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 12.084564328245744, |
|
"learning_rate": 0.00017703142882295975, |
|
"loss": 0.2493, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 10.445125880603614, |
|
"learning_rate": 0.0001767673210669953, |
|
"loss": 0.3003, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 3.6377729302644695, |
|
"learning_rate": 0.0001765032133110309, |
|
"loss": 0.2035, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.028090302576488, |
|
"learning_rate": 0.00017623910555506644, |
|
"loss": 0.2136, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 7.747464129470143, |
|
"learning_rate": 0.00017600140857469847, |
|
"loss": 0.4821, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.9907365174614449, |
|
"learning_rate": 0.00017573730081873402, |
|
"loss": 0.4276, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 11.291404090563887, |
|
"learning_rate": 0.0001754731930627696, |
|
"loss": 0.4383, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.156181022153768, |
|
"learning_rate": 0.00017520908530680514, |
|
"loss": 0.2204, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 4.088807891623482, |
|
"learning_rate": 0.00017494497755084074, |
|
"loss": 0.4143, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 3.9398518664541005, |
|
"learning_rate": 0.0001746808697948763, |
|
"loss": 0.2843, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 6.861595569917812, |
|
"learning_rate": 0.00017441676203891186, |
|
"loss": 0.3798, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.2732346010657967, |
|
"learning_rate": 0.0001741526542829474, |
|
"loss": 0.2078, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.4383072436268267, |
|
"learning_rate": 0.000173888546526983, |
|
"loss": 0.3779, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 5.914009405172411, |
|
"learning_rate": 0.00017362443877101856, |
|
"loss": 0.2341, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.6422195515529956, |
|
"learning_rate": 0.0001733603310150541, |
|
"loss": 0.2333, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.3857533289452737, |
|
"learning_rate": 0.0001730962232590897, |
|
"loss": 0.1961, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.059953351439511, |
|
"learning_rate": 0.00017283211550312525, |
|
"loss": 0.4881, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 10.597653032886594, |
|
"learning_rate": 0.00017256800774716083, |
|
"loss": 0.424, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 5.171578842871781, |
|
"learning_rate": 0.00017230389999119638, |
|
"loss": 0.3054, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 2.8072490729454778, |
|
"learning_rate": 0.00017203979223523198, |
|
"loss": 0.3361, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 10.67098938868372, |
|
"learning_rate": 0.00017177568447926752, |
|
"loss": 0.199, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 7.583380258309324, |
|
"learning_rate": 0.0001715115767233031, |
|
"loss": 0.2661, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 9.05363459550282, |
|
"learning_rate": 0.00017124746896733865, |
|
"loss": 0.3187, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.7695240656878144, |
|
"learning_rate": 0.00017098336121137425, |
|
"loss": 0.3067, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 5.025416113484515, |
|
"learning_rate": 0.0001707192534554098, |
|
"loss": 0.4757, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 5.730906882565963, |
|
"learning_rate": 0.00017045514569944537, |
|
"loss": 0.279, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 7.933729801673655, |
|
"learning_rate": 0.00017019103794348091, |
|
"loss": 0.2164, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 9.069156267463432, |
|
"learning_rate": 0.00016992693018751652, |
|
"loss": 0.2898, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.052632966251752, |
|
"learning_rate": 0.00016966282243155206, |
|
"loss": 0.3861, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 4.351753715632027, |
|
"learning_rate": 0.0001693987146755876, |
|
"loss": 0.3692, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 6.595474867843094, |
|
"learning_rate": 0.00016913460691962318, |
|
"loss": 0.4144, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 8.875854869087618, |
|
"learning_rate": 0.00016887049916365876, |
|
"loss": 0.3301, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.1474446238717586, |
|
"learning_rate": 0.00016860639140769433, |
|
"loss": 0.2172, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 22.776417456462937, |
|
"learning_rate": 0.00016836869442732634, |
|
"loss": 0.6439, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 6.498663232395253, |
|
"learning_rate": 0.0001681045866713619, |
|
"loss": 0.3548, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 2.540809246554936, |
|
"learning_rate": 0.00016784047891539746, |
|
"loss": 0.3104, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 11.23205251986895, |
|
"learning_rate": 0.00016757637115943303, |
|
"loss": 0.4779, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 15.50340937459547, |
|
"learning_rate": 0.0001673122634034686, |
|
"loss": 0.3544, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.649484749641056, |
|
"learning_rate": 0.00016704815564750418, |
|
"loss": 0.3832, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 6.479502493786179, |
|
"learning_rate": 0.00016678404789153973, |
|
"loss": 0.3119, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.2456553372360117, |
|
"learning_rate": 0.0001665199401355753, |
|
"loss": 0.2147, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.1843629031152485, |
|
"learning_rate": 0.00016625583237961088, |
|
"loss": 0.3104, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 10.240618772565934, |
|
"learning_rate": 0.00016599172462364645, |
|
"loss": 0.3794, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 6.420744423415216, |
|
"learning_rate": 0.000165727616867682, |
|
"loss": 0.3225, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.7783835399126193, |
|
"learning_rate": 0.00016546350911171754, |
|
"loss": 0.265, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 8.771413780307158, |
|
"learning_rate": 0.00016519940135575315, |
|
"loss": 0.3462, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 17.099197597109754, |
|
"learning_rate": 0.0001649352935997887, |
|
"loss": 0.3665, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 11.598065804290718, |
|
"learning_rate": 0.00016467118584382427, |
|
"loss": 0.3195, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.230585182301318, |
|
"learning_rate": 0.00016440707808785981, |
|
"loss": 0.6228, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.557975044284719, |
|
"learning_rate": 0.00016414297033189542, |
|
"loss": 0.2, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 7.237677686409225, |
|
"learning_rate": 0.00016387886257593096, |
|
"loss": 0.2912, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 3.0272821140167574, |
|
"learning_rate": 0.00016361475481996654, |
|
"loss": 0.3627, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.132070094762826, |
|
"learning_rate": 0.00016335064706400208, |
|
"loss": 0.2754, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 2.735196686309314, |
|
"learning_rate": 0.00016308653930803768, |
|
"loss": 0.289, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 5.303597924748066, |
|
"learning_rate": 0.00016282243155207323, |
|
"loss": 0.4207, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 4.900482527326059, |
|
"learning_rate": 0.0001625583237961088, |
|
"loss": 0.3014, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.510216989082974, |
|
"learning_rate": 0.00016229421604014435, |
|
"loss": 0.3637, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 8.996542111519144, |
|
"learning_rate": 0.00016203010828417993, |
|
"loss": 0.355, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 2.9960128892632762, |
|
"learning_rate": 0.0001617660005282155, |
|
"loss": 0.3615, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 11.410429494498963, |
|
"learning_rate": 0.00016150189277225105, |
|
"loss": 0.4041, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 5.884751081527833, |
|
"learning_rate": 0.00016123778501628662, |
|
"loss": 0.2194, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 6.892881901273992, |
|
"learning_rate": 0.0001609736772603222, |
|
"loss": 0.2815, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 16.378051622703484, |
|
"learning_rate": 0.00016070956950435777, |
|
"loss": 0.44, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.747758726063269, |
|
"learning_rate": 0.00016044546174839332, |
|
"loss": 0.3339, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 2.4881745488949276, |
|
"learning_rate": 0.00016018135399242892, |
|
"loss": 0.2822, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 6.925114288101384, |
|
"learning_rate": 0.00015991724623646447, |
|
"loss": 0.1522, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 11.426697298806934, |
|
"learning_rate": 0.00015965313848050004, |
|
"loss": 0.5291, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 3.0859117775460683, |
|
"learning_rate": 0.0001593890307245356, |
|
"loss": 0.179, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 10.481580739953248, |
|
"learning_rate": 0.0001591249229685712, |
|
"loss": 0.2707, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 12.399421336771333, |
|
"learning_rate": 0.00015886081521260674, |
|
"loss": 0.4472, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 9.775123704587289, |
|
"learning_rate": 0.00015859670745664228, |
|
"loss": 0.2163, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 8.003344532407036, |
|
"learning_rate": 0.00015833259970067786, |
|
"loss": 0.3938, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 4.063169363933348, |
|
"learning_rate": 0.0001580684919447134, |
|
"loss": 0.3104, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 5.021103491372309, |
|
"learning_rate": 0.000157804384188749, |
|
"loss": 0.2433, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 8.127815435528477, |
|
"learning_rate": 0.00015754027643278455, |
|
"loss": 0.4188, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 18.575206074912845, |
|
"learning_rate": 0.00015727616867682013, |
|
"loss": 0.3035, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.6707897237666264, |
|
"learning_rate": 0.00015701206092085567, |
|
"loss": 0.2629, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 7.0249289338922525, |
|
"learning_rate": 0.00015674795316489128, |
|
"loss": 0.4149, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 24.315251449678733, |
|
"learning_rate": 0.00015648384540892682, |
|
"loss": 0.5126, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 6.3420314053766145, |
|
"learning_rate": 0.0001562197376529624, |
|
"loss": 0.2902, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 14.2218215768708, |
|
"learning_rate": 0.00015595562989699794, |
|
"loss": 0.4114, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.800340107636826, |
|
"learning_rate": 0.00015569152214103355, |
|
"loss": 0.284, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.7299155241996056, |
|
"learning_rate": 0.0001554274143850691, |
|
"loss": 0.3159, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 12.518196707309734, |
|
"learning_rate": 0.00015516330662910464, |
|
"loss": 0.2959, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.332368023026213, |
|
"learning_rate": 0.00015489919887314024, |
|
"loss": 0.3974, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 11.232975643160248, |
|
"learning_rate": 0.0001546350911171758, |
|
"loss": 0.2524, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 11.74121783142979, |
|
"learning_rate": 0.00015437098336121136, |
|
"loss": 0.2416, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 7.504719616139981, |
|
"learning_rate": 0.0001541068756052469, |
|
"loss": 0.3549, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 10.41647084870661, |
|
"learning_rate": 0.0001538427678492825, |
|
"loss": 0.471, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 16.158188481314436, |
|
"learning_rate": 0.00015357866009331806, |
|
"loss": 0.3437, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 16.32739961007586, |
|
"learning_rate": 0.00015331455233735363, |
|
"loss": 0.3165, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 5.184342314118148, |
|
"learning_rate": 0.00015305044458138918, |
|
"loss": 0.2971, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 7.207726323071327, |
|
"learning_rate": 0.00015278633682542478, |
|
"loss": 0.3692, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 23.297237421564507, |
|
"learning_rate": 0.00015252222906946033, |
|
"loss": 0.4152, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 3.4114255435535004, |
|
"learning_rate": 0.0001522581213134959, |
|
"loss": 0.2974, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 10.690999505219285, |
|
"learning_rate": 0.00015199401355753145, |
|
"loss": 0.2765, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 7.335840951773753, |
|
"learning_rate": 0.00015172990580156705, |
|
"loss": 0.3455, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 15.604891813878552, |
|
"learning_rate": 0.0001514657980456026, |
|
"loss": 0.4385, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 9.628438285923433, |
|
"learning_rate": 0.00015120169028963814, |
|
"loss": 0.4003, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 10.306314054776461, |
|
"learning_rate": 0.00015093758253367372, |
|
"loss": 0.424, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 6.260496743722033, |
|
"learning_rate": 0.0001506734747777093, |
|
"loss": 0.2472, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 7.080198000715199, |
|
"learning_rate": 0.00015040936702174487, |
|
"loss": 0.2117, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 10.181287285553235, |
|
"learning_rate": 0.00015014525926578041, |
|
"loss": 0.2691, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 9.118200463227126, |
|
"learning_rate": 0.000149881151509816, |
|
"loss": 0.3818, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 9.655859369876493, |
|
"learning_rate": 0.00014961704375385156, |
|
"loss": 0.364, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 7.16565316656793, |
|
"learning_rate": 0.0001493529359978871, |
|
"loss": 0.3077, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 8.303193350888552, |
|
"learning_rate": 0.00014908882824192268, |
|
"loss": 0.3348, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 3.3438569678761842, |
|
"learning_rate": 0.00014882472048595826, |
|
"loss": 0.2974, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 4.002626485525544, |
|
"learning_rate": 0.00014856061272999383, |
|
"loss": 0.3356, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.4996572519866604, |
|
"learning_rate": 0.00014829650497402938, |
|
"loss": 0.3667, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.0521944804779153, |
|
"learning_rate": 0.00014803239721806495, |
|
"loss": 0.3562, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 9.402961094189349, |
|
"learning_rate": 0.00014776828946210053, |
|
"loss": 0.3747, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.300432959190631, |
|
"learning_rate": 0.0001475041817061361, |
|
"loss": 0.2226, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.2533536238959868, |
|
"learning_rate": 0.00014724007395017165, |
|
"loss": 0.2996, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 17.023102286523546, |
|
"learning_rate": 0.00014697596619420722, |
|
"loss": 0.2937, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 10.496957218839603, |
|
"learning_rate": 0.0001467118584382428, |
|
"loss": 0.2281, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.9920070855130945, |
|
"learning_rate": 0.00014644775068227837, |
|
"loss": 0.2676, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.9563652862881009, |
|
"learning_rate": 0.00014618364292631392, |
|
"loss": 0.2081, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 20.739397647977388, |
|
"learning_rate": 0.0001459195351703495, |
|
"loss": 0.4464, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 3.583675688967886, |
|
"learning_rate": 0.00014565542741438504, |
|
"loss": 0.3721, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 9.510915177884517, |
|
"learning_rate": 0.00014539131965842061, |
|
"loss": 0.2997, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.3261929823378318, |
|
"learning_rate": 0.0001451272119024562, |
|
"loss": 0.2083, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 6.204559478601404, |
|
"learning_rate": 0.00014486310414649176, |
|
"loss": 0.4097, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 18.013955291169616, |
|
"learning_rate": 0.0001445989963905273, |
|
"loss": 0.3774, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 6.610762645480038, |
|
"learning_rate": 0.00014433488863456288, |
|
"loss": 0.3484, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 10.177813470182098, |
|
"learning_rate": 0.00014407078087859846, |
|
"loss": 0.3181, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 17.392831094348985, |
|
"learning_rate": 0.00014380667312263403, |
|
"loss": 0.4373, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 4.860262017846033, |
|
"learning_rate": 0.00014354256536666958, |
|
"loss": 0.4033, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 5.639817435634789, |
|
"learning_rate": 0.00014327845761070515, |
|
"loss": 0.3144, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 5.547212157278192, |
|
"learning_rate": 0.00014301434985474073, |
|
"loss": 0.3565, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.216430670026243, |
|
"learning_rate": 0.0001427502420987763, |
|
"loss": 0.2645, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 7.470166734375579, |
|
"learning_rate": 0.00014248613434281185, |
|
"loss": 0.2985, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 6.079270967627962, |
|
"learning_rate": 0.00014222202658684742, |
|
"loss": 0.3225, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 3.82678101506829, |
|
"learning_rate": 0.00014195791883088297, |
|
"loss": 0.1106, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.1436834838991876, |
|
"learning_rate": 0.00014169381107491854, |
|
"loss": 0.2378, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 5.032168769837695, |
|
"learning_rate": 0.00014142970331895412, |
|
"loss": 0.312, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 9.845380454289769, |
|
"learning_rate": 0.0001411655955629897, |
|
"loss": 0.2321, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.437202119786478, |
|
"learning_rate": 0.00014090148780702524, |
|
"loss": 0.3306, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 8.556663714292608, |
|
"learning_rate": 0.00014063738005106081, |
|
"loss": 0.3701, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 5.9124287744903565, |
|
"learning_rate": 0.0001403732722950964, |
|
"loss": 0.2823, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 6.707735033607613, |
|
"learning_rate": 0.00014010916453913196, |
|
"loss": 0.3947, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.9719620881655064, |
|
"learning_rate": 0.00013984505678316754, |
|
"loss": 0.2768, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 10.072632760821824, |
|
"learning_rate": 0.00013958094902720308, |
|
"loss": 0.5599, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 6.358318051518618, |
|
"learning_rate": 0.00013931684127123866, |
|
"loss": 0.3604, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 4.577029869748708, |
|
"learning_rate": 0.0001390527335152742, |
|
"loss": 0.3292, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.421913815301032, |
|
"learning_rate": 0.00013878862575930978, |
|
"loss": 0.2617, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.246052725873391, |
|
"learning_rate": 0.00013852451800334535, |
|
"loss": 0.4331, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 16.779819219746976, |
|
"learning_rate": 0.00013826041024738093, |
|
"loss": 0.2813, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 13.472895007673747, |
|
"learning_rate": 0.00013799630249141648, |
|
"loss": 0.3333, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 4.090768431286489, |
|
"learning_rate": 0.00013773219473545205, |
|
"loss": 0.3463, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 6.296496050367258, |
|
"learning_rate": 0.00013746808697948762, |
|
"loss": 0.3922, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 7.573282179736087, |
|
"learning_rate": 0.0001372039792235232, |
|
"loss": 0.5174, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 9.476190995295713, |
|
"learning_rate": 0.00013693987146755875, |
|
"loss": 0.3445, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 7.503897944936768, |
|
"learning_rate": 0.00013667576371159432, |
|
"loss": 0.2797, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.384492334204901, |
|
"learning_rate": 0.0001364116559556299, |
|
"loss": 0.2803, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.550543480911678, |
|
"learning_rate": 0.00013614754819966547, |
|
"loss": 0.3624, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 11.551640844627658, |
|
"learning_rate": 0.00013588344044370101, |
|
"loss": 0.2134, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 8.935569639750181, |
|
"learning_rate": 0.0001356193326877366, |
|
"loss": 0.3754, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 7.051912650212629, |
|
"learning_rate": 0.00013535522493177214, |
|
"loss": 0.2394, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5676283972219329, |
|
"learning_rate": 0.0001350911171758077, |
|
"loss": 0.1677, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 4.399054721547771, |
|
"learning_rate": 0.00013482700941984328, |
|
"loss": 0.2642, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 2.742555406471177, |
|
"learning_rate": 0.00013456290166387886, |
|
"loss": 0.3408, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 12.128611034295828, |
|
"learning_rate": 0.0001342987939079144, |
|
"loss": 0.3314, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 2.3202295788243634, |
|
"learning_rate": 0.00013403468615194998, |
|
"loss": 0.3683, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 8.281040697781666, |
|
"learning_rate": 0.00013377057839598555, |
|
"loss": 0.3436, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.179128585007064, |
|
"learning_rate": 0.00013350647064002113, |
|
"loss": 0.2195, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 13.569603018038574, |
|
"learning_rate": 0.00013324236288405668, |
|
"loss": 0.399, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 14.599350326434276, |
|
"learning_rate": 0.00013297825512809225, |
|
"loss": 0.3329, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.986639468091575, |
|
"learning_rate": 0.00013271414737212782, |
|
"loss": 0.203, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 14.03862023758173, |
|
"learning_rate": 0.0001324500396161634, |
|
"loss": 0.2006, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 5.793862534328231, |
|
"learning_rate": 0.00013218593186019895, |
|
"loss": 0.3644, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.283836370554853, |
|
"learning_rate": 0.00013192182410423452, |
|
"loss": 0.3263, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.4170345327733065, |
|
"learning_rate": 0.00013165771634827007, |
|
"loss": 0.3041, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 6.851470265914908, |
|
"learning_rate": 0.00013139360859230564, |
|
"loss": 0.3416, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.770427591596483, |
|
"learning_rate": 0.00013112950083634121, |
|
"loss": 0.3061, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 9.024461006147305, |
|
"learning_rate": 0.0001308653930803768, |
|
"loss": 0.293, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.902704756256012, |
|
"learning_rate": 0.00013060128532441234, |
|
"loss": 0.3741, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 19.847226252906548, |
|
"learning_rate": 0.0001303371775684479, |
|
"loss": 0.4751, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 4.801241012685623, |
|
"learning_rate": 0.00013007306981248348, |
|
"loss": 0.3473, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 3.293307817958716, |
|
"learning_rate": 0.00012980896205651906, |
|
"loss": 0.3203, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 13.580777822195827, |
|
"learning_rate": 0.0001295448543005546, |
|
"loss": 0.4492, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 7.084767692974912, |
|
"learning_rate": 0.00012928074654459018, |
|
"loss": 0.2711, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 3.806799718919279, |
|
"learning_rate": 0.00012901663878862575, |
|
"loss": 0.249, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 3.9048368014810007, |
|
"learning_rate": 0.00012875253103266133, |
|
"loss": 0.3136, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 15.265715601719572, |
|
"learning_rate": 0.00012848842327669688, |
|
"loss": 0.4343, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 3.149093780791876, |
|
"learning_rate": 0.00012822431552073245, |
|
"loss": 0.2987, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 6.493197182696941, |
|
"learning_rate": 0.000127960207764768, |
|
"loss": 0.249, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 5.249477830603063, |
|
"learning_rate": 0.00012769610000880357, |
|
"loss": 0.2025, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.7424442549499177, |
|
"learning_rate": 0.00012743199225283915, |
|
"loss": 0.1231, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 8.217226055770746, |
|
"learning_rate": 0.00012716788449687472, |
|
"loss": 0.3337, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 3.0621751429301365, |
|
"learning_rate": 0.00012690377674091027, |
|
"loss": 0.336, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 13.086797386499311, |
|
"learning_rate": 0.00012663966898494584, |
|
"loss": 0.2337, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 4.579111196044498, |
|
"learning_rate": 0.00012637556122898142, |
|
"loss": 0.3374, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.7540516028809328, |
|
"learning_rate": 0.000126111453473017, |
|
"loss": 0.1698, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 7.778067854467908, |
|
"learning_rate": 0.00012584734571705256, |
|
"loss": 0.3176, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 11.28249883211765, |
|
"learning_rate": 0.0001255832379610881, |
|
"loss": 0.1805, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 5.472404912586062, |
|
"learning_rate": 0.00012531913020512368, |
|
"loss": 0.1059, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.8872730254681165, |
|
"learning_rate": 0.00012505502244915926, |
|
"loss": 0.3318, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 7.7794265745835744, |
|
"learning_rate": 0.0001247909146931948, |
|
"loss": 0.2772, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.5333644033252645, |
|
"learning_rate": 0.00012452680693723038, |
|
"loss": 0.4262, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 16.826885464636717, |
|
"learning_rate": 0.00012426269918126593, |
|
"loss": 0.2253, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 2.622835603930756, |
|
"learning_rate": 0.0001239985914253015, |
|
"loss": 0.2077, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 9.764683064619582, |
|
"learning_rate": 0.00012373448366933708, |
|
"loss": 0.3954, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 6.870091906882638, |
|
"learning_rate": 0.00012347037591337265, |
|
"loss": 0.2326, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 3.118790740070018, |
|
"learning_rate": 0.00012320626815740822, |
|
"loss": 0.2159, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 6.244474003280159, |
|
"learning_rate": 0.00012294216040144377, |
|
"loss": 0.4341, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 2.653487270102085, |
|
"learning_rate": 0.00012267805264547935, |
|
"loss": 0.5084, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 9.14669567974351, |
|
"learning_rate": 0.00012241394488951492, |
|
"loss": 0.4201, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 4.792042813792595, |
|
"learning_rate": 0.0001221498371335505, |
|
"loss": 0.2149, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 10.51542484693792, |
|
"learning_rate": 0.00012188572937758605, |
|
"loss": 0.3921, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 13.762497244565239, |
|
"learning_rate": 0.00012162162162162162, |
|
"loss": 0.2177, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 5.475664304577624, |
|
"learning_rate": 0.00012135751386565719, |
|
"loss": 0.304, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 16.69877452415643, |
|
"learning_rate": 0.00012109340610969274, |
|
"loss": 0.372, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 12.221634614551201, |
|
"learning_rate": 0.00012082929835372831, |
|
"loss": 0.2879, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 10.172009412255388, |
|
"learning_rate": 0.00012056519059776387, |
|
"loss": 0.2582, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.509769033707379, |
|
"learning_rate": 0.00012030108284179945, |
|
"loss": 0.2183, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 8.321430562793568, |
|
"learning_rate": 0.000120036975085835, |
|
"loss": 0.3057, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 2.0566355491459625, |
|
"learning_rate": 0.00011977286732987058, |
|
"loss": 0.3616, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 6.649560281056566, |
|
"learning_rate": 0.00011950875957390614, |
|
"loss": 0.2277, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 3.823681392264618, |
|
"learning_rate": 0.00011924465181794172, |
|
"loss": 0.2506, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 13.977585296138626, |
|
"learning_rate": 0.00011898054406197728, |
|
"loss": 0.2668, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 8.18817069677191, |
|
"learning_rate": 0.00011871643630601285, |
|
"loss": 0.3338, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 5.13086843253833, |
|
"learning_rate": 0.00011845232855004841, |
|
"loss": 0.1987, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.47912661312829385, |
|
"learning_rate": 0.00011818822079408399, |
|
"loss": 0.1843, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.810984382614806, |
|
"learning_rate": 0.00011792411303811955, |
|
"loss": 0.2258, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 20.186757822536684, |
|
"learning_rate": 0.0001176600052821551, |
|
"loss": 0.3983, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.6582476219915, |
|
"learning_rate": 0.00011739589752619067, |
|
"loss": 0.3271, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.3936916683319542, |
|
"learning_rate": 0.00011713178977022624, |
|
"loss": 0.3525, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 3.0701950430849614, |
|
"learning_rate": 0.0001168676820142618, |
|
"loss": 0.2089, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 18.441688716432523, |
|
"learning_rate": 0.00011660357425829738, |
|
"loss": 0.3196, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 6.3186094662020835, |
|
"learning_rate": 0.00011633946650233294, |
|
"loss": 0.3754, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 2.6445611437174326, |
|
"learning_rate": 0.00011607535874636851, |
|
"loss": 0.2543, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 19.763658077394638, |
|
"learning_rate": 0.00011581125099040407, |
|
"loss": 0.2198, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 9.959144520673648, |
|
"learning_rate": 0.00011554714323443965, |
|
"loss": 0.2783, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 7.784092477796405, |
|
"learning_rate": 0.0001152830354784752, |
|
"loss": 0.3151, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 5.998190142436887, |
|
"learning_rate": 0.00011501892772251078, |
|
"loss": 0.1617, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 7.252694593147776, |
|
"learning_rate": 0.00011475481996654634, |
|
"loss": 0.2864, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 12.244038586138855, |
|
"learning_rate": 0.00011449071221058192, |
|
"loss": 0.1699, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 17.43389471787065, |
|
"learning_rate": 0.00011422660445461748, |
|
"loss": 0.2885, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 2.8670922708223348, |
|
"learning_rate": 0.00011396249669865304, |
|
"loss": 0.2298, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 6.967344925020078, |
|
"learning_rate": 0.0001136983889426886, |
|
"loss": 0.2609, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 15.255839592868696, |
|
"learning_rate": 0.00011346069196232061, |
|
"loss": 0.3349, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 10.997459901332054, |
|
"learning_rate": 0.00011319658420635618, |
|
"loss": 0.274, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.936816842767314, |
|
"learning_rate": 0.00011293247645039175, |
|
"loss": 0.2767, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 8.722905002127161, |
|
"learning_rate": 0.00011266836869442731, |
|
"loss": 0.2499, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 10.999166638602913, |
|
"learning_rate": 0.00011240426093846288, |
|
"loss": 0.2229, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.8032525020782211, |
|
"learning_rate": 0.00011214015318249844, |
|
"loss": 0.1764, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 27.547154782714138, |
|
"learning_rate": 0.00011187604542653402, |
|
"loss": 0.2496, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 20.106686389398114, |
|
"learning_rate": 0.00011161193767056958, |
|
"loss": 0.2334, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 5.083256278071224, |
|
"learning_rate": 0.00011134782991460515, |
|
"loss": 0.3343, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 10.199029959820093, |
|
"learning_rate": 0.00011108372215864071, |
|
"loss": 0.3351, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 20.144031653830446, |
|
"learning_rate": 0.00011081961440267629, |
|
"loss": 0.1828, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 3.0893760301832116, |
|
"learning_rate": 0.00011055550664671185, |
|
"loss": 0.3685, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 4.585526910204328, |
|
"learning_rate": 0.00011029139889074742, |
|
"loss": 0.2942, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 8.846752410262999, |
|
"learning_rate": 0.00011002729113478297, |
|
"loss": 0.3366, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 3.2964170047983092, |
|
"learning_rate": 0.00010976318337881854, |
|
"loss": 0.2765, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 10.119994789567933, |
|
"learning_rate": 0.0001094990756228541, |
|
"loss": 0.3336, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 10.143299496577857, |
|
"learning_rate": 0.00010923496786688968, |
|
"loss": 0.2018, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 12.03543638740937, |
|
"learning_rate": 0.00010897086011092524, |
|
"loss": 0.3145, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 2.465157864032654, |
|
"learning_rate": 0.00010870675235496081, |
|
"loss": 0.2603, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 4.532296356702725, |
|
"learning_rate": 0.00010844264459899638, |
|
"loss": 0.5217, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 5.290637566404521, |
|
"learning_rate": 0.00010817853684303195, |
|
"loss": 0.3109, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 10.579357609290867, |
|
"learning_rate": 0.00010791442908706751, |
|
"loss": 0.3611, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 6.867193048451062, |
|
"learning_rate": 0.00010765032133110308, |
|
"loss": 0.4212, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 5.3338729699400655, |
|
"learning_rate": 0.00010738621357513866, |
|
"loss": 0.222, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 3.936161017952145, |
|
"learning_rate": 0.00010712210581917422, |
|
"loss": 0.3205, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 20.529054885815693, |
|
"learning_rate": 0.0001068579980632098, |
|
"loss": 0.3484, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 6.440117544284218, |
|
"learning_rate": 0.00010659389030724534, |
|
"loss": 0.2661, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 6.764316456522104, |
|
"learning_rate": 0.0001063297825512809, |
|
"loss": 0.1981, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 5.807864772560368, |
|
"learning_rate": 0.00010606567479531648, |
|
"loss": 0.3118, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 14.28570131675949, |
|
"learning_rate": 0.00010580156703935204, |
|
"loss": 0.1987, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 14.070614046363564, |
|
"learning_rate": 0.00010553745928338761, |
|
"loss": 0.1938, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 5.519446863351419, |
|
"learning_rate": 0.00010527335152742317, |
|
"loss": 0.3154, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8391712624832315, |
|
"eval_f1": 0.8067493314121663, |
|
"eval_loss": 0.357177734375, |
|
"eval_precision": 0.7895852602851988, |
|
"eval_recall": 0.841805198689739, |
|
"eval_runtime": 464.0655, |
|
"eval_samples_per_second": 14.457, |
|
"eval_steps_per_second": 2.411, |
|
"step": 7906 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.570371337454835, |
|
"learning_rate": 0.00010500924377145875, |
|
"loss": 0.3369, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 4.855296400806096, |
|
"learning_rate": 0.00010474513601549432, |
|
"loss": 0.2167, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.8449716786359939, |
|
"learning_rate": 0.00010448102825952988, |
|
"loss": 0.0707, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 39.85705463948524, |
|
"learning_rate": 0.00010421692050356545, |
|
"loss": 0.1918, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 6.2797592787806, |
|
"learning_rate": 0.00010395281274760101, |
|
"loss": 0.0892, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.9610650197953539, |
|
"learning_rate": 0.00010368870499163659, |
|
"loss": 0.0893, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.9959374584504933, |
|
"learning_rate": 0.00010342459723567215, |
|
"loss": 0.1205, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.4738182783177045, |
|
"learning_rate": 0.00010316048947970772, |
|
"loss": 0.1759, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.1119985220385673, |
|
"learning_rate": 0.00010289638172374327, |
|
"loss": 0.1134, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.1231176982192115, |
|
"learning_rate": 0.00010263227396777885, |
|
"loss": 0.1927, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 1.1678565739543172, |
|
"learning_rate": 0.0001023681662118144, |
|
"loss": 0.2502, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 9.265883828901494, |
|
"learning_rate": 0.00010210405845584998, |
|
"loss": 0.2104, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 7.243874798770191, |
|
"learning_rate": 0.00010183995069988554, |
|
"loss": 0.1726, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.40591160969289597, |
|
"learning_rate": 0.00010157584294392111, |
|
"loss": 0.0678, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.927362068225369, |
|
"learning_rate": 0.00010131173518795668, |
|
"loss": 0.0875, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 5.009131341604889, |
|
"learning_rate": 0.00010104762743199225, |
|
"loss": 0.227, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.7148744429646117, |
|
"learning_rate": 0.00010078351967602781, |
|
"loss": 0.1711, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.20496942985561, |
|
"learning_rate": 0.00010051941192006338, |
|
"loss": 0.1087, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.9231245687182146, |
|
"learning_rate": 0.00010025530416409895, |
|
"loss": 0.1996, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 22.72875318987384, |
|
"learning_rate": 9.999119640813452e-05, |
|
"loss": 0.1851, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 2.00881260240084, |
|
"learning_rate": 9.972708865217008e-05, |
|
"loss": 0.0927, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.209720716655473, |
|
"learning_rate": 9.946298089620564e-05, |
|
"loss": 0.0849, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.9529919774532654, |
|
"learning_rate": 9.91988731402412e-05, |
|
"loss": 0.0215, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 2.107865291879719, |
|
"learning_rate": 9.893476538427678e-05, |
|
"loss": 0.2368, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.268600282889506, |
|
"learning_rate": 9.867065762831234e-05, |
|
"loss": 0.037, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.01050701287250132, |
|
"learning_rate": 9.840654987234791e-05, |
|
"loss": 0.112, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.4766594187021542, |
|
"learning_rate": 9.814244211638347e-05, |
|
"loss": 0.1619, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 7.977577908902681, |
|
"learning_rate": 9.787833436041905e-05, |
|
"loss": 0.0851, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 7.716767884791129, |
|
"learning_rate": 9.76142266044546e-05, |
|
"loss": 0.128, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.8920174800911701, |
|
"learning_rate": 9.735011884849018e-05, |
|
"loss": 0.1015, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3498389647596418, |
|
"learning_rate": 9.708601109252574e-05, |
|
"loss": 0.1739, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 15.60644366259143, |
|
"learning_rate": 9.682190333656132e-05, |
|
"loss": 0.2107, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.8701698283820544, |
|
"learning_rate": 9.655779558059688e-05, |
|
"loss": 0.1222, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.9143304968078294, |
|
"learning_rate": 9.629368782463245e-05, |
|
"loss": 0.061, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 3.6884628782455313, |
|
"learning_rate": 9.602958006866801e-05, |
|
"loss": 0.1234, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 15.137930438366073, |
|
"learning_rate": 9.576547231270357e-05, |
|
"loss": 0.1065, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 20.497763755991276, |
|
"learning_rate": 9.550136455673913e-05, |
|
"loss": 0.1955, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.6297291192661263, |
|
"learning_rate": 9.52372568007747e-05, |
|
"loss": 0.1042, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 7.5522736122013265, |
|
"learning_rate": 9.497314904481027e-05, |
|
"loss": 0.1371, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.1954233455079364, |
|
"learning_rate": 9.470904128884584e-05, |
|
"loss": 0.1168, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.8568853988431717, |
|
"learning_rate": 9.44449335328814e-05, |
|
"loss": 0.0678, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 8.058061442543563, |
|
"learning_rate": 9.418082577691698e-05, |
|
"loss": 0.0824, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 4.20800373044439, |
|
"learning_rate": 9.391671802095254e-05, |
|
"loss": 0.0912, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 2.0416560806313, |
|
"learning_rate": 9.365261026498811e-05, |
|
"loss": 0.1513, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 27.525859053820138, |
|
"learning_rate": 9.338850250902367e-05, |
|
"loss": 0.1325, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.3353720644746128, |
|
"learning_rate": 9.312439475305925e-05, |
|
"loss": 0.1358, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.1884532660672202, |
|
"learning_rate": 9.28602869970948e-05, |
|
"loss": 0.1026, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.13764298399856836, |
|
"learning_rate": 9.259617924113038e-05, |
|
"loss": 0.1127, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 8.022496581506283, |
|
"learning_rate": 9.233207148516593e-05, |
|
"loss": 0.1256, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.21143244051880208, |
|
"learning_rate": 9.20679637292015e-05, |
|
"loss": 0.139, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.6104497392849312, |
|
"learning_rate": 9.180385597323706e-05, |
|
"loss": 0.1127, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.3076077939010853, |
|
"learning_rate": 9.153974821727264e-05, |
|
"loss": 0.0933, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 2.8183509445857418, |
|
"learning_rate": 9.12756404613082e-05, |
|
"loss": 0.1079, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 4.096549653818764, |
|
"learning_rate": 9.101153270534377e-05, |
|
"loss": 0.07, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.7215206673289605, |
|
"learning_rate": 9.074742494937933e-05, |
|
"loss": 0.309, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 15.718577759407667, |
|
"learning_rate": 9.04833171934149e-05, |
|
"loss": 0.2524, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 3.9256241045762397, |
|
"learning_rate": 9.021920943745048e-05, |
|
"loss": 0.1415, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 16.08379613121137, |
|
"learning_rate": 8.995510168148604e-05, |
|
"loss": 0.3066, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 6.760712726710112, |
|
"learning_rate": 8.969099392552162e-05, |
|
"loss": 0.1154, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.3985228336827005, |
|
"learning_rate": 8.942688616955718e-05, |
|
"loss": 0.0986, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.1802485482692591, |
|
"learning_rate": 8.916277841359275e-05, |
|
"loss": 0.0779, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.6563191930790624, |
|
"learning_rate": 8.889867065762831e-05, |
|
"loss": 0.1167, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.6652412854559228, |
|
"learning_rate": 8.863456290166386e-05, |
|
"loss": 0.0646, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.431258079582006, |
|
"learning_rate": 8.837045514569943e-05, |
|
"loss": 0.1383, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.24832586396252196, |
|
"learning_rate": 8.810634738973499e-05, |
|
"loss": 0.0864, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.1699998051681302, |
|
"learning_rate": 8.784223963377057e-05, |
|
"loss": 0.2314, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 11.474808327106688, |
|
"learning_rate": 8.760454265340257e-05, |
|
"loss": 0.2557, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 3.9295228929342434, |
|
"learning_rate": 8.734043489743814e-05, |
|
"loss": 0.1622, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.3840033482411934, |
|
"learning_rate": 8.70763271414737e-05, |
|
"loss": 0.1265, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.0243901637701598, |
|
"learning_rate": 8.681221938550928e-05, |
|
"loss": 0.0309, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 2.413148766104575, |
|
"learning_rate": 8.654811162954485e-05, |
|
"loss": 0.0593, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.06719997457490859, |
|
"learning_rate": 8.628400387358041e-05, |
|
"loss": 0.0646, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.8835322138559412, |
|
"learning_rate": 8.601989611761599e-05, |
|
"loss": 0.1953, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 15.041935138560776, |
|
"learning_rate": 8.575578836165155e-05, |
|
"loss": 0.1152, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.1218005591033131, |
|
"learning_rate": 8.549168060568712e-05, |
|
"loss": 0.1248, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.3773948299156011, |
|
"learning_rate": 8.522757284972268e-05, |
|
"loss": 0.2706, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.5557269416973856, |
|
"learning_rate": 8.496346509375826e-05, |
|
"loss": 0.1381, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 7.345987263229923, |
|
"learning_rate": 8.46993573377938e-05, |
|
"loss": 0.0965, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.3490779839965095, |
|
"learning_rate": 8.443524958182938e-05, |
|
"loss": 0.1977, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 29.747030469606557, |
|
"learning_rate": 8.417114182586494e-05, |
|
"loss": 0.2724, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 12.08882292534392, |
|
"learning_rate": 8.390703406990051e-05, |
|
"loss": 0.2033, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 5.193636589499085, |
|
"learning_rate": 8.364292631393607e-05, |
|
"loss": 0.1297, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.3725218267089253, |
|
"learning_rate": 8.337881855797165e-05, |
|
"loss": 0.0448, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.8560258938012986, |
|
"learning_rate": 8.311471080200721e-05, |
|
"loss": 0.0929, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.05501975523877587, |
|
"learning_rate": 8.285060304604278e-05, |
|
"loss": 0.0303, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 16.01099719850488, |
|
"learning_rate": 8.258649529007834e-05, |
|
"loss": 0.1764, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 22.398232395548956, |
|
"learning_rate": 8.232238753411392e-05, |
|
"loss": 0.3596, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 8.781541772827923, |
|
"learning_rate": 8.205827977814948e-05, |
|
"loss": 0.3248, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 2.2963466234719823, |
|
"learning_rate": 8.179417202218505e-05, |
|
"loss": 0.1504, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 7.267265645265003, |
|
"learning_rate": 8.153006426622061e-05, |
|
"loss": 0.1733, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.1250143324457247, |
|
"learning_rate": 8.126595651025617e-05, |
|
"loss": 0.1134, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 3.24929393121602, |
|
"learning_rate": 8.100184875429174e-05, |
|
"loss": 0.1758, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 1.6685242506781202, |
|
"learning_rate": 8.073774099832731e-05, |
|
"loss": 0.0951, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 13.17646145414902, |
|
"learning_rate": 8.047363324236287e-05, |
|
"loss": 0.2249, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 10.766085447366857, |
|
"learning_rate": 8.020952548639844e-05, |
|
"loss": 0.0767, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.437347110388571, |
|
"learning_rate": 7.9945417730434e-05, |
|
"loss": 0.058, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.1214332634621527, |
|
"learning_rate": 7.968130997446958e-05, |
|
"loss": 0.2229, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 5.876581897211659, |
|
"learning_rate": 7.941720221850514e-05, |
|
"loss": 0.193, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 4.501289763988718, |
|
"learning_rate": 7.915309446254071e-05, |
|
"loss": 0.1414, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.6224887226366147, |
|
"learning_rate": 7.888898670657628e-05, |
|
"loss": 0.1095, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 8.347439493123973, |
|
"learning_rate": 7.862487895061185e-05, |
|
"loss": 0.1348, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.4864953010659459, |
|
"learning_rate": 7.836077119464741e-05, |
|
"loss": 0.05, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 5.550479008890894, |
|
"learning_rate": 7.809666343868298e-05, |
|
"loss": 0.2035, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 5.654461158579552, |
|
"learning_rate": 7.783255568271854e-05, |
|
"loss": 0.1046, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.4307183255414384, |
|
"learning_rate": 7.75684479267541e-05, |
|
"loss": 0.1701, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 13.608080017284609, |
|
"learning_rate": 7.730434017078967e-05, |
|
"loss": 0.272, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.678000896802301, |
|
"learning_rate": 7.704023241482524e-05, |
|
"loss": 0.2433, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.370415506731004, |
|
"learning_rate": 7.67761246588608e-05, |
|
"loss": 0.2123, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.9083728841668193, |
|
"learning_rate": 7.651201690289638e-05, |
|
"loss": 0.2176, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 2.324331069283942, |
|
"learning_rate": 7.624790914693194e-05, |
|
"loss": 0.0449, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 3.3540280241688483, |
|
"learning_rate": 7.598380139096751e-05, |
|
"loss": 0.0776, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.8081383403403553, |
|
"learning_rate": 7.571969363500307e-05, |
|
"loss": 0.2757, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 6.340708587827813, |
|
"learning_rate": 7.545558587903864e-05, |
|
"loss": 0.1071, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.4771124752811318, |
|
"learning_rate": 7.51914781230742e-05, |
|
"loss": 0.2146, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 12.328840457379812, |
|
"learning_rate": 7.492737036710977e-05, |
|
"loss": 0.1453, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 7.86559890554053, |
|
"learning_rate": 7.466326261114534e-05, |
|
"loss": 0.1442, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.183074026280236, |
|
"learning_rate": 7.43991548551809e-05, |
|
"loss": 0.0975, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 7.588202181180001, |
|
"learning_rate": 7.413504709921648e-05, |
|
"loss": 0.1359, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 11.591111775618687, |
|
"learning_rate": 7.387093934325204e-05, |
|
"loss": 0.2605, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.4037183503782767, |
|
"learning_rate": 7.360683158728761e-05, |
|
"loss": 0.0965, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 7.767795756329233, |
|
"learning_rate": 7.334272383132317e-05, |
|
"loss": 0.2464, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.8595308365752063, |
|
"learning_rate": 7.307861607535873e-05, |
|
"loss": 0.2457, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 18.84334961938705, |
|
"learning_rate": 7.28145083193943e-05, |
|
"loss": 0.1245, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 3.9422947009381484, |
|
"learning_rate": 7.255040056342987e-05, |
|
"loss": 0.1193, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 21.866163000139704, |
|
"learning_rate": 7.228629280746544e-05, |
|
"loss": 0.1464, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.5927815780630423, |
|
"learning_rate": 7.202218505150101e-05, |
|
"loss": 0.0346, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 5.703801860227664, |
|
"learning_rate": 7.175807729553658e-05, |
|
"loss": 0.0592, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.36254942866638484, |
|
"learning_rate": 7.149396953957214e-05, |
|
"loss": 0.1765, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 4.418908234330049, |
|
"learning_rate": 7.12298617836077e-05, |
|
"loss": 0.0874, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.9506347199912738, |
|
"learning_rate": 7.096575402764327e-05, |
|
"loss": 0.065, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.2935718801839875, |
|
"learning_rate": 7.070164627167885e-05, |
|
"loss": 0.1703, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.1124870355542493, |
|
"learning_rate": 7.04375385157144e-05, |
|
"loss": 0.1708, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.3688631854532768, |
|
"learning_rate": 7.017343075974998e-05, |
|
"loss": 0.1014, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.8352408086564822, |
|
"learning_rate": 6.990932300378554e-05, |
|
"loss": 0.0566, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.1542874366516385, |
|
"learning_rate": 6.96452152478211e-05, |
|
"loss": 0.248, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.1544892653823733, |
|
"learning_rate": 6.938110749185668e-05, |
|
"loss": 0.2333, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 21.56280917070406, |
|
"learning_rate": 6.911699973589224e-05, |
|
"loss": 0.1926, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.2520688419834509, |
|
"learning_rate": 6.885289197992781e-05, |
|
"loss": 0.0806, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 1.4781905563970963, |
|
"learning_rate": 6.858878422396337e-05, |
|
"loss": 0.1101, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 8.616392641941344, |
|
"learning_rate": 6.832467646799895e-05, |
|
"loss": 0.0729, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 17.42189647348218, |
|
"learning_rate": 6.80605687120345e-05, |
|
"loss": 0.2795, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 8.297361946902777, |
|
"learning_rate": 6.779646095607007e-05, |
|
"loss": 0.162, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 5.375951555212969, |
|
"learning_rate": 6.753235320010564e-05, |
|
"loss": 0.1466, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.540354092448967, |
|
"learning_rate": 6.72682454441412e-05, |
|
"loss": 0.1, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 7.226674986298907, |
|
"learning_rate": 6.700413768817678e-05, |
|
"loss": 0.0885, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.662689789979408, |
|
"learning_rate": 6.674002993221234e-05, |
|
"loss": 0.3726, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 9.919696238986528, |
|
"learning_rate": 6.647592217624791e-05, |
|
"loss": 0.0673, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 19.61501206459386, |
|
"learning_rate": 6.621181442028347e-05, |
|
"loss": 0.1964, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.034768337943533, |
|
"learning_rate": 6.594770666431903e-05, |
|
"loss": 0.1425, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 3.10466655515715, |
|
"learning_rate": 6.56835989083546e-05, |
|
"loss": 0.0877, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 3.8377626611279765, |
|
"learning_rate": 6.541949115239017e-05, |
|
"loss": 0.2897, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 5.694959836933135, |
|
"learning_rate": 6.515538339642574e-05, |
|
"loss": 0.1375, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.7072358352528014, |
|
"learning_rate": 6.48912756404613e-05, |
|
"loss": 0.1536, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 21.957339715071846, |
|
"learning_rate": 6.462716788449688e-05, |
|
"loss": 0.2325, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.9973069500028089, |
|
"learning_rate": 6.436306012853244e-05, |
|
"loss": 0.0966, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 5.206448503424523, |
|
"learning_rate": 6.4098952372568e-05, |
|
"loss": 0.2192, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 5.719525784887322, |
|
"learning_rate": 6.383484461660357e-05, |
|
"loss": 0.1817, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.553404969881463, |
|
"learning_rate": 6.357073686063913e-05, |
|
"loss": 0.1441, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.103013880353259, |
|
"learning_rate": 6.33066291046747e-05, |
|
"loss": 0.0399, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.6971411697797874, |
|
"learning_rate": 6.304252134871027e-05, |
|
"loss": 0.2328, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.849892129001678, |
|
"learning_rate": 6.277841359274584e-05, |
|
"loss": 0.1212, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 4.163269713173385, |
|
"learning_rate": 6.25143058367814e-05, |
|
"loss": 0.1184, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 3.7794331352302883, |
|
"learning_rate": 6.225019808081696e-05, |
|
"loss": 0.0563, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 18.627705595135957, |
|
"learning_rate": 6.198609032485254e-05, |
|
"loss": 0.1419, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.2853968833910283, |
|
"learning_rate": 6.17219825688881e-05, |
|
"loss": 0.0906, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.2661103776889737, |
|
"learning_rate": 6.145787481292367e-05, |
|
"loss": 0.1242, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 9.592938650565387, |
|
"learning_rate": 6.119376705695923e-05, |
|
"loss": 0.1942, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.3300849423184784, |
|
"learning_rate": 6.0929659300994806e-05, |
|
"loss": 0.044, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.2053997379213532, |
|
"learning_rate": 6.066555154503037e-05, |
|
"loss": 0.0928, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 7.19395583303236, |
|
"learning_rate": 6.0401443789065934e-05, |
|
"loss": 0.1125, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 8.269855434350298, |
|
"learning_rate": 6.01373360331015e-05, |
|
"loss": 0.2177, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 10.269506918035669, |
|
"learning_rate": 5.987322827713707e-05, |
|
"loss": 0.278, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 8.66184316610978, |
|
"learning_rate": 5.9609120521172636e-05, |
|
"loss": 0.103, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 7.945938685825119, |
|
"learning_rate": 5.9345012765208204e-05, |
|
"loss": 0.1133, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 4.933574221157829, |
|
"learning_rate": 5.908090500924377e-05, |
|
"loss": 0.1305, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.7879011912280413, |
|
"learning_rate": 5.881679725327933e-05, |
|
"loss": 0.0597, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 5.3794371918406965, |
|
"learning_rate": 5.85526894973149e-05, |
|
"loss": 0.1443, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.5710056874707381, |
|
"learning_rate": 5.828858174135047e-05, |
|
"loss": 0.2218, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.32058848381222327, |
|
"learning_rate": 5.8024473985386034e-05, |
|
"loss": 0.1602, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 6.793952549838277, |
|
"learning_rate": 5.77603662294216e-05, |
|
"loss": 0.1137, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.9176053157133593, |
|
"learning_rate": 5.749625847345717e-05, |
|
"loss": 0.1521, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 4.959980234017326, |
|
"learning_rate": 5.723215071749273e-05, |
|
"loss": 0.0889, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 3.0075759280667156, |
|
"learning_rate": 5.69680429615283e-05, |
|
"loss": 0.118, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 3.6838990961744704, |
|
"learning_rate": 5.6703935205563865e-05, |
|
"loss": 0.088, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 6.310064678724968, |
|
"learning_rate": 5.643982744959943e-05, |
|
"loss": 0.3464, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 6.959426556418945, |
|
"learning_rate": 5.6175719693635e-05, |
|
"loss": 0.0988, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.178761973866902, |
|
"learning_rate": 5.591161193767057e-05, |
|
"loss": 0.0488, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.9189330144782912, |
|
"learning_rate": 5.5647504181706134e-05, |
|
"loss": 0.1747, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.6121902809356623, |
|
"learning_rate": 5.5383396425741695e-05, |
|
"loss": 0.1073, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 13.509716413826606, |
|
"learning_rate": 5.511928866977726e-05, |
|
"loss": 0.1049, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 4.845712579474245, |
|
"learning_rate": 5.488159168940927e-05, |
|
"loss": 0.144, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.4574259294233924, |
|
"learning_rate": 5.461748393344484e-05, |
|
"loss": 0.0477, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 11.276614742678678, |
|
"learning_rate": 5.435337617748041e-05, |
|
"loss": 0.076, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 4.287269515433337, |
|
"learning_rate": 5.4089268421515975e-05, |
|
"loss": 0.1456, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.7870008322276636, |
|
"learning_rate": 5.382516066555154e-05, |
|
"loss": 0.1846, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.4576468257971762, |
|
"learning_rate": 5.356105290958711e-05, |
|
"loss": 0.045, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.314871396726202, |
|
"learning_rate": 5.329694515362267e-05, |
|
"loss": 0.3069, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.7718785896414182, |
|
"learning_rate": 5.303283739765824e-05, |
|
"loss": 0.0325, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 8.812536626766736, |
|
"learning_rate": 5.2768729641693805e-05, |
|
"loss": 0.0756, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.869108315773467, |
|
"learning_rate": 5.250462188572937e-05, |
|
"loss": 0.0774, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 11.637977359161589, |
|
"learning_rate": 5.224051412976494e-05, |
|
"loss": 0.1496, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.3582743400795256, |
|
"learning_rate": 5.197640637380051e-05, |
|
"loss": 0.1161, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 5.422991695341771, |
|
"learning_rate": 5.1712298617836075e-05, |
|
"loss": 0.0715, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 1.9971699482282776, |
|
"learning_rate": 5.1448190861871635e-05, |
|
"loss": 0.1128, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 7.354984493390003, |
|
"learning_rate": 5.11840831059072e-05, |
|
"loss": 0.0326, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.5444593746342109, |
|
"learning_rate": 5.091997534994277e-05, |
|
"loss": 0.0599, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 10.570928022813604, |
|
"learning_rate": 5.065586759397834e-05, |
|
"loss": 0.083, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.0267858564413939, |
|
"learning_rate": 5.0391759838013905e-05, |
|
"loss": 0.2821, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2826701064869344, |
|
"learning_rate": 5.012765208204947e-05, |
|
"loss": 0.1695, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.30108004892294266, |
|
"learning_rate": 4.986354432608504e-05, |
|
"loss": 0.0506, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.9049715361211421, |
|
"learning_rate": 4.95994365701206e-05, |
|
"loss": 0.0949, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 2.2007249390683805, |
|
"learning_rate": 4.933532881415617e-05, |
|
"loss": 0.1289, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.5888459296090317, |
|
"learning_rate": 4.9071221058191736e-05, |
|
"loss": 0.0511, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.9691912006046467, |
|
"learning_rate": 4.88071133022273e-05, |
|
"loss": 0.0447, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 6.5134129085276955, |
|
"learning_rate": 4.854300554626287e-05, |
|
"loss": 0.102, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 4.027136926862574, |
|
"learning_rate": 4.827889779029844e-05, |
|
"loss": 0.1908, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 19.250884994066908, |
|
"learning_rate": 4.8014790034334005e-05, |
|
"loss": 0.144, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.8893908721884611, |
|
"learning_rate": 4.7750682278369566e-05, |
|
"loss": 0.1049, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.11371244448898017, |
|
"learning_rate": 4.748657452240513e-05, |
|
"loss": 0.0542, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 3.9529994334004073, |
|
"learning_rate": 4.72224667664407e-05, |
|
"loss": 0.2115, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 22.695910694426306, |
|
"learning_rate": 4.695835901047627e-05, |
|
"loss": 0.1871, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.6508865368070988, |
|
"learning_rate": 4.6694251254511836e-05, |
|
"loss": 0.0771, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 12.688863533018672, |
|
"learning_rate": 4.64301434985474e-05, |
|
"loss": 0.1765, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 11.248140452930153, |
|
"learning_rate": 4.6166035742582964e-05, |
|
"loss": 0.144, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.5056240274213692, |
|
"learning_rate": 4.590192798661853e-05, |
|
"loss": 0.1073, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.1525606056414703, |
|
"learning_rate": 4.56378202306541e-05, |
|
"loss": 0.0754, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 4.077649798147871, |
|
"learning_rate": 4.5373712474689666e-05, |
|
"loss": 0.1682, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 7.123075586108915, |
|
"learning_rate": 4.510960471872524e-05, |
|
"loss": 0.0769, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.07024333898162324, |
|
"learning_rate": 4.484549696276081e-05, |
|
"loss": 0.0514, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.1302876456089308, |
|
"learning_rate": 4.4581389206796375e-05, |
|
"loss": 0.0357, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.7017409405729192, |
|
"learning_rate": 4.431728145083193e-05, |
|
"loss": 0.1135, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.957512424380371, |
|
"learning_rate": 4.4053173694867496e-05, |
|
"loss": 0.1427, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.7624371766373486, |
|
"learning_rate": 4.378906593890307e-05, |
|
"loss": 0.0967, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.0410211937203258, |
|
"learning_rate": 4.352495818293864e-05, |
|
"loss": 0.4155, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 7.167808785589543, |
|
"learning_rate": 4.3260850426974205e-05, |
|
"loss": 0.162, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 3.6838180745732223, |
|
"learning_rate": 4.299674267100977e-05, |
|
"loss": 0.1394, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.20084241971641661, |
|
"learning_rate": 4.273263491504534e-05, |
|
"loss": 0.0629, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.9933652116745235, |
|
"learning_rate": 4.24685271590809e-05, |
|
"loss": 0.0847, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.124410357695191, |
|
"learning_rate": 4.220441940311647e-05, |
|
"loss": 0.1249, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 1.0881424374659778, |
|
"learning_rate": 4.1940311647152036e-05, |
|
"loss": 0.0532, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.9462406945243451, |
|
"learning_rate": 4.16762038911876e-05, |
|
"loss": 0.1408, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 8.446099459905815, |
|
"learning_rate": 4.141209613522317e-05, |
|
"loss": 0.0914, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.8034177016932733, |
|
"learning_rate": 4.114798837925874e-05, |
|
"loss": 0.2553, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.138858126459365, |
|
"learning_rate": 4.0883880623294306e-05, |
|
"loss": 0.0592, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.6359838426779154, |
|
"learning_rate": 4.0619772867329866e-05, |
|
"loss": 0.0668, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.644243083157715, |
|
"learning_rate": 4.0355665111365434e-05, |
|
"loss": 0.0462, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.7635382174566347, |
|
"learning_rate": 4.0091557355401e-05, |
|
"loss": 0.0579, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 6.1535950795966405, |
|
"learning_rate": 3.982744959943657e-05, |
|
"loss": 0.3327, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 13.05703208638941, |
|
"learning_rate": 3.9563341843472136e-05, |
|
"loss": 0.115, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.4184167895624646, |
|
"learning_rate": 3.92992340875077e-05, |
|
"loss": 0.0391, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.42191460797965785, |
|
"learning_rate": 3.9035126331543264e-05, |
|
"loss": 0.1597, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 14.6318480422782, |
|
"learning_rate": 3.877101857557883e-05, |
|
"loss": 0.0733, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 5.671967203531022, |
|
"learning_rate": 3.85069108196144e-05, |
|
"loss": 0.1082, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.769743830367795, |
|
"learning_rate": 3.8242803063649966e-05, |
|
"loss": 0.0929, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 9.473401117829091, |
|
"learning_rate": 3.7978695307685534e-05, |
|
"loss": 0.0671, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 3.10818851060489, |
|
"learning_rate": 3.77145875517211e-05, |
|
"loss": 0.1062, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.19493193116698393, |
|
"learning_rate": 3.745047979575666e-05, |
|
"loss": 0.075, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.24854901716368594, |
|
"learning_rate": 3.718637203979223e-05, |
|
"loss": 0.2781, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 5.057687803468933, |
|
"learning_rate": 3.6922264283827803e-05, |
|
"loss": 0.1657, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 4.141377021767007, |
|
"learning_rate": 3.6658156527863364e-05, |
|
"loss": 0.1454, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.9507559207300316, |
|
"learning_rate": 3.639404877189893e-05, |
|
"loss": 0.0966, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 1.5757066011596175, |
|
"learning_rate": 3.61299410159345e-05, |
|
"loss": 0.0875, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 13.83734841586774, |
|
"learning_rate": 3.586583325997006e-05, |
|
"loss": 0.108, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.15044361157293365, |
|
"learning_rate": 3.5601725504005634e-05, |
|
"loss": 0.042, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.7291444270290097, |
|
"learning_rate": 3.53376177480412e-05, |
|
"loss": 0.0845, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.8376508635937641, |
|
"learning_rate": 3.507350999207677e-05, |
|
"loss": 0.1072, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.662374821524742, |
|
"learning_rate": 3.480940223611233e-05, |
|
"loss": 0.044, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 4.382422265786289, |
|
"learning_rate": 3.45452944801479e-05, |
|
"loss": 0.0257, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 3.7729275642209026, |
|
"learning_rate": 3.4281186724183464e-05, |
|
"loss": 0.0982, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.6167807277164736, |
|
"learning_rate": 3.401707896821903e-05, |
|
"loss": 0.1444, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.3390979840123256, |
|
"learning_rate": 3.37529712122546e-05, |
|
"loss": 0.2813, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 11.095709715607736, |
|
"learning_rate": 3.3488863456290166e-05, |
|
"loss": 0.1907, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.47254607817072725, |
|
"learning_rate": 3.322475570032573e-05, |
|
"loss": 0.0919, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.943883990242144, |
|
"learning_rate": 3.2960647944361295e-05, |
|
"loss": 0.0797, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.4137256867290577, |
|
"learning_rate": 3.269654018839686e-05, |
|
"loss": 0.1045, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 3.797400283682247, |
|
"learning_rate": 3.243243243243243e-05, |
|
"loss": 0.2231, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.18773776950023852, |
|
"learning_rate": 3.2168324676468e-05, |
|
"loss": 0.0312, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.40299221520593376, |
|
"learning_rate": 3.1904216920503564e-05, |
|
"loss": 0.1194, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.3289196326435833, |
|
"learning_rate": 3.164010916453913e-05, |
|
"loss": 0.0647, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.7116763771003195, |
|
"learning_rate": 3.137600140857469e-05, |
|
"loss": 0.0626, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 8.269537704530125, |
|
"learning_rate": 3.111189365261026e-05, |
|
"loss": 0.0452, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.5441648707031814, |
|
"learning_rate": 3.084778589664583e-05, |
|
"loss": 0.2587, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.2940876118032927, |
|
"learning_rate": 3.0583678140681395e-05, |
|
"loss": 0.1419, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 7.204826101361029, |
|
"learning_rate": 3.0319570384716962e-05, |
|
"loss": 0.0269, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.9970746450068529, |
|
"learning_rate": 3.005546262875253e-05, |
|
"loss": 0.1342, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.24876466316797904, |
|
"learning_rate": 2.9791354872788097e-05, |
|
"loss": 0.0512, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 6.37439302168123, |
|
"learning_rate": 2.952724711682366e-05, |
|
"loss": 0.0765, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 4.446435610481913, |
|
"learning_rate": 2.926313936085923e-05, |
|
"loss": 0.1737, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 13.127859094837586, |
|
"learning_rate": 2.8999031604894796e-05, |
|
"loss": 0.1934, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 12.34255516208749, |
|
"learning_rate": 2.873492384893036e-05, |
|
"loss": 0.0914, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 4.932453233603026, |
|
"learning_rate": 2.8470816092965927e-05, |
|
"loss": 0.1355, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.3836301473079415, |
|
"learning_rate": 2.8206708337001495e-05, |
|
"loss": 0.0612, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 17.59760420746046, |
|
"learning_rate": 2.7942600581037062e-05, |
|
"loss": 0.09, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.14360702340771692, |
|
"learning_rate": 2.7678492825072626e-05, |
|
"loss": 0.0667, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 3.131777089264597, |
|
"learning_rate": 2.7414385069108194e-05, |
|
"loss": 0.1506, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.36724519212978596, |
|
"learning_rate": 2.715027731314376e-05, |
|
"loss": 0.0541, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 6.116694134514987, |
|
"learning_rate": 2.6886169557179325e-05, |
|
"loss": 0.0685, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 28.441147635297522, |
|
"learning_rate": 2.6622061801214893e-05, |
|
"loss": 0.182, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 6.207440857908955, |
|
"learning_rate": 2.635795404525046e-05, |
|
"loss": 0.0841, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.2660646664649682, |
|
"learning_rate": 2.6093846289286024e-05, |
|
"loss": 0.1044, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.6092081949682532, |
|
"learning_rate": 2.582973853332159e-05, |
|
"loss": 0.1218, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.26754241443153093, |
|
"learning_rate": 2.556563077735716e-05, |
|
"loss": 0.0755, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 4.614645486521455, |
|
"learning_rate": 2.5301523021392726e-05, |
|
"loss": 0.2079, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 10.500071506881186, |
|
"learning_rate": 2.503741526542829e-05, |
|
"loss": 0.12, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 2.4157532308157768, |
|
"learning_rate": 2.4773307509463858e-05, |
|
"loss": 0.0486, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 3.090458090289476, |
|
"learning_rate": 2.450919975349943e-05, |
|
"loss": 0.0922, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.725234178573422, |
|
"learning_rate": 2.424509199753499e-05, |
|
"loss": 0.1187, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 2.883964200616445, |
|
"learning_rate": 2.3980984241570557e-05, |
|
"loss": 0.0991, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.8016329522459724, |
|
"learning_rate": 2.3716876485606127e-05, |
|
"loss": 0.2729, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.19691039993042744, |
|
"learning_rate": 2.3452768729641688e-05, |
|
"loss": 0.0165, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.7931561296721391, |
|
"learning_rate": 2.318866097367726e-05, |
|
"loss": 0.0438, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.3666105715233941, |
|
"learning_rate": 2.2924553217712826e-05, |
|
"loss": 0.0767, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 7.346452519095264, |
|
"learning_rate": 2.2660445461748394e-05, |
|
"loss": 0.0461, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.0799370688756174, |
|
"learning_rate": 2.2396337705783958e-05, |
|
"loss": 0.1117, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.6935165088282154, |
|
"learning_rate": 2.2132229949819525e-05, |
|
"loss": 0.1418, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 11.225662105081213, |
|
"learning_rate": 2.1868122193855093e-05, |
|
"loss": 0.1905, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.286056290531877, |
|
"learning_rate": 2.1604014437890657e-05, |
|
"loss": 0.1624, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.18686178060435013, |
|
"learning_rate": 2.1339906681926224e-05, |
|
"loss": 0.0206, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 11.939093895301632, |
|
"learning_rate": 2.107579892596179e-05, |
|
"loss": 0.1333, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.12096721036928222, |
|
"learning_rate": 2.081169116999736e-05, |
|
"loss": 0.031, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.8451614998694545, |
|
"learning_rate": 2.057399418962937e-05, |
|
"loss": 0.1967, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.6001248999495915, |
|
"learning_rate": 2.0309886433664933e-05, |
|
"loss": 0.0261, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 7.194025556223317, |
|
"learning_rate": 2.00457786777005e-05, |
|
"loss": 0.1301, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 7.208076215728769, |
|
"learning_rate": 1.9781670921736068e-05, |
|
"loss": 0.0994, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.9648443648687712, |
|
"learning_rate": 1.9517563165771632e-05, |
|
"loss": 0.1464, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.9057615164332242, |
|
"learning_rate": 1.92534554098072e-05, |
|
"loss": 0.1518, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 1.227054647742425, |
|
"learning_rate": 1.8989347653842767e-05, |
|
"loss": 0.1274, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.4994864050705437, |
|
"learning_rate": 1.872523989787833e-05, |
|
"loss": 0.0346, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.6845515892006273, |
|
"learning_rate": 1.8461132141913902e-05, |
|
"loss": 0.0569, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 7.306151141443391, |
|
"learning_rate": 1.8197024385949466e-05, |
|
"loss": 0.1802, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.579042452582513, |
|
"learning_rate": 1.793291662998503e-05, |
|
"loss": 0.2344, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 10.380965944821517, |
|
"learning_rate": 1.76688088740206e-05, |
|
"loss": 0.1569, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 10.372858906621042, |
|
"learning_rate": 1.7404701118056165e-05, |
|
"loss": 0.0742, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 6.759873407800237, |
|
"learning_rate": 1.7140593362091732e-05, |
|
"loss": 0.1816, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 4.903482124866322, |
|
"learning_rate": 1.68764856061273e-05, |
|
"loss": 0.1935, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.4418667018871049, |
|
"learning_rate": 1.6612377850162864e-05, |
|
"loss": 0.2005, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.8868905220885367, |
|
"learning_rate": 1.634827009419843e-05, |
|
"loss": 0.062, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 9.782640698777652, |
|
"learning_rate": 1.6084162338234e-05, |
|
"loss": 0.2171, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 5.24892181652971, |
|
"learning_rate": 1.5820054582269566e-05, |
|
"loss": 0.082, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.8228753879855804, |
|
"learning_rate": 1.555594682630513e-05, |
|
"loss": 0.0686, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 7.80524687342384, |
|
"learning_rate": 1.5291839070340697e-05, |
|
"loss": 0.1381, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 11.936039568158414, |
|
"learning_rate": 1.5027731314376265e-05, |
|
"loss": 0.1963, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 10.237736806471208, |
|
"learning_rate": 1.476362355841183e-05, |
|
"loss": 0.1184, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 10.116058015555941, |
|
"learning_rate": 1.4499515802447398e-05, |
|
"loss": 0.1078, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.0749867720444426, |
|
"learning_rate": 1.4235408046482964e-05, |
|
"loss": 0.0955, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 7.283841186751001, |
|
"learning_rate": 1.3971300290518531e-05, |
|
"loss": 0.1663, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.3864174609556974, |
|
"learning_rate": 1.3707192534554097e-05, |
|
"loss": 0.0397, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 6.971133001085124, |
|
"learning_rate": 1.3443084778589663e-05, |
|
"loss": 0.0954, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 21.56014161964891, |
|
"learning_rate": 1.317897702262523e-05, |
|
"loss": 0.1602, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.356408551684661, |
|
"learning_rate": 1.2914869266660796e-05, |
|
"loss": 0.052, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 3.9611241524261565, |
|
"learning_rate": 1.2650761510696363e-05, |
|
"loss": 0.1536, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 7.450833839158113, |
|
"learning_rate": 1.2386653754731929e-05, |
|
"loss": 0.0765, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 1.7570334390289082, |
|
"learning_rate": 1.2122545998767495e-05, |
|
"loss": 0.0384, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 4.083706888762959, |
|
"learning_rate": 1.1858438242803064e-05, |
|
"loss": 0.0996, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.9197630989003147, |
|
"learning_rate": 1.159433048683863e-05, |
|
"loss": 0.1304, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 1.0271103895558773, |
|
"learning_rate": 1.1330222730874197e-05, |
|
"loss": 0.1985, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 6.322376901933602, |
|
"learning_rate": 1.1066114974909763e-05, |
|
"loss": 0.0701, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.7574464728810889, |
|
"learning_rate": 1.0802007218945328e-05, |
|
"loss": 0.0632, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 15.947801577922908, |
|
"learning_rate": 1.0537899462980896e-05, |
|
"loss": 0.1954, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.5012380137106978, |
|
"learning_rate": 1.0273791707016462e-05, |
|
"loss": 0.1728, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 5.590052436169391, |
|
"learning_rate": 1.0009683951052029e-05, |
|
"loss": 0.0982, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.508898054335977, |
|
"learning_rate": 9.745576195087595e-06, |
|
"loss": 0.039, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.9269851244005981, |
|
"learning_rate": 9.48146843912316e-06, |
|
"loss": 0.0665, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.10790991945570794, |
|
"learning_rate": 9.217360683158728e-06, |
|
"loss": 0.0484, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3788796784544578, |
|
"learning_rate": 8.953252927194295e-06, |
|
"loss": 0.095, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 3.277637154263989, |
|
"learning_rate": 8.689145171229861e-06, |
|
"loss": 0.0394, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.5254078523987122, |
|
"learning_rate": 8.425037415265427e-06, |
|
"loss": 0.0651, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 4.139641683707518, |
|
"learning_rate": 8.160929659300994e-06, |
|
"loss": 0.173, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 3.7160176647076018, |
|
"learning_rate": 7.896821903336562e-06, |
|
"loss": 0.1085, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 4.628653376951481, |
|
"learning_rate": 7.632714147372127e-06, |
|
"loss": 0.2646, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 4.630616633787641, |
|
"learning_rate": 7.368606391407693e-06, |
|
"loss": 0.1046, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.3096562218736908, |
|
"learning_rate": 7.10449863544326e-06, |
|
"loss": 0.1456, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 21.08824128593129, |
|
"learning_rate": 6.840390879478826e-06, |
|
"loss": 0.1656, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 5.7194402635098305, |
|
"learning_rate": 6.576283123514394e-06, |
|
"loss": 0.0433, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.1220445555277994, |
|
"learning_rate": 6.31217536754996e-06, |
|
"loss": 0.1068, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.4230288425705027, |
|
"learning_rate": 6.048067611585527e-06, |
|
"loss": 0.0179, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 5.344051738078155, |
|
"learning_rate": 5.783959855621093e-06, |
|
"loss": 0.0465, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.4671004615480219, |
|
"learning_rate": 5.519852099656659e-06, |
|
"loss": 0.0869, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.3656127093170958, |
|
"learning_rate": 5.255744343692226e-06, |
|
"loss": 0.1712, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 33.16154086138305, |
|
"learning_rate": 4.991636587727792e-06, |
|
"loss": 0.2911, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.0260483559873532, |
|
"learning_rate": 4.72752883176336e-06, |
|
"loss": 0.0819, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.0503134350842027, |
|
"learning_rate": 4.4634210757989255e-06, |
|
"loss": 0.0243, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.4059795773621981, |
|
"learning_rate": 4.199313319834492e-06, |
|
"loss": 0.0404, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.6346516212919995, |
|
"learning_rate": 3.935205563870059e-06, |
|
"loss": 0.0822, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.7317518527949531, |
|
"learning_rate": 3.6710978079056253e-06, |
|
"loss": 0.066, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 22.93625083972818, |
|
"learning_rate": 3.4069900519411914e-06, |
|
"loss": 0.1876, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.49771630803031175, |
|
"learning_rate": 3.1428822959767584e-06, |
|
"loss": 0.1169, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.4296662652804666, |
|
"learning_rate": 2.878774540012325e-06, |
|
"loss": 0.0554, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.29480468277518473, |
|
"learning_rate": 2.614666784047891e-06, |
|
"loss": 0.0501, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 8.666677806901344, |
|
"learning_rate": 2.3505590280834578e-06, |
|
"loss": 0.1742, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.787911757147805, |
|
"learning_rate": 2.0864512721190243e-06, |
|
"loss": 0.073, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 15.07480824018013, |
|
"learning_rate": 1.822343516154591e-06, |
|
"loss": 0.1233, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.4945345457744, |
|
"learning_rate": 1.5582357601901575e-06, |
|
"loss": 0.2263, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.27772193620873387, |
|
"learning_rate": 1.294128004225724e-06, |
|
"loss": 0.1931, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.517522622984517, |
|
"learning_rate": 1.0300202482612905e-06, |
|
"loss": 0.0399, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8785213891787151, |
|
"eval_f1": 0.8438316361697416, |
|
"eval_loss": 0.41259765625, |
|
"eval_precision": 0.8355454067673094, |
|
"eval_recall": 0.8536051899559176, |
|
"eval_runtime": 462.5898, |
|
"eval_samples_per_second": 14.503, |
|
"eval_steps_per_second": 2.419, |
|
"step": 11859 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 11859, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.229067174683607e+17, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|