|
{ |
|
"best_metric": 0.7328859060402685, |
|
"best_model_checkpoint": "vivit-b-16x2-kinetics400-finetuned-crema-d/checkpoint-2976", |
|
"epoch": 3.248991935483871, |
|
"eval_steps": 500, |
|
"global_step": 2976, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 10.041251182556152, |
|
"learning_rate": 1.6778523489932886e-06, |
|
"loss": 2.0618, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 12.547879219055176, |
|
"learning_rate": 3.3557046979865773e-06, |
|
"loss": 1.9746, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.306560516357422, |
|
"learning_rate": 5.033557046979865e-06, |
|
"loss": 1.8185, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.815144062042236, |
|
"learning_rate": 6.7114093959731546e-06, |
|
"loss": 1.8203, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.477940559387207, |
|
"learning_rate": 8.389261744966444e-06, |
|
"loss": 1.8039, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.257948875427246, |
|
"learning_rate": 1.006711409395973e-05, |
|
"loss": 1.7131, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.9578776359558105, |
|
"learning_rate": 1.174496644295302e-05, |
|
"loss": 1.701, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.580317497253418, |
|
"learning_rate": 1.3422818791946309e-05, |
|
"loss": 1.5872, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 12.274490356445312, |
|
"learning_rate": 1.51006711409396e-05, |
|
"loss": 1.5683, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.367303848266602, |
|
"learning_rate": 1.6778523489932888e-05, |
|
"loss": 1.557, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.54719877243042, |
|
"learning_rate": 1.8456375838926178e-05, |
|
"loss": 1.4923, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.119046211242676, |
|
"learning_rate": 2.013422818791946e-05, |
|
"loss": 1.4376, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.935946464538574, |
|
"learning_rate": 2.181208053691275e-05, |
|
"loss": 1.5429, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.0674147605896, |
|
"learning_rate": 2.348993288590604e-05, |
|
"loss": 1.3362, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 11.671470642089844, |
|
"learning_rate": 2.516778523489933e-05, |
|
"loss": 1.371, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 11.36544132232666, |
|
"learning_rate": 2.6845637583892618e-05, |
|
"loss": 1.3582, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 11.540151596069336, |
|
"learning_rate": 2.8523489932885905e-05, |
|
"loss": 1.3666, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 12.664009094238281, |
|
"learning_rate": 3.02013422818792e-05, |
|
"loss": 1.5347, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.551016807556152, |
|
"learning_rate": 3.1879194630872485e-05, |
|
"loss": 1.3923, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.388668060302734, |
|
"learning_rate": 3.3557046979865775e-05, |
|
"loss": 1.3822, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.217473983764648, |
|
"learning_rate": 3.523489932885906e-05, |
|
"loss": 1.2387, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 10.830105781555176, |
|
"learning_rate": 3.6912751677852356e-05, |
|
"loss": 1.2035, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 14.143545150756836, |
|
"learning_rate": 3.859060402684564e-05, |
|
"loss": 1.2766, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 12.051302909851074, |
|
"learning_rate": 4.026845637583892e-05, |
|
"loss": 1.2969, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.3714823722839355, |
|
"learning_rate": 4.194630872483222e-05, |
|
"loss": 1.1517, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.341689109802246, |
|
"learning_rate": 4.36241610738255e-05, |
|
"loss": 1.2486, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.874135971069336, |
|
"learning_rate": 4.530201342281879e-05, |
|
"loss": 1.1304, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.551310539245605, |
|
"learning_rate": 4.697986577181208e-05, |
|
"loss": 1.1465, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 14.167583465576172, |
|
"learning_rate": 4.865771812080537e-05, |
|
"loss": 1.2225, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.046817302703857, |
|
"learning_rate": 4.996265870052278e-05, |
|
"loss": 1.1917, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.558070182800293, |
|
"learning_rate": 4.977595220313667e-05, |
|
"loss": 1.3288, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.99010705947876, |
|
"learning_rate": 4.958924570575056e-05, |
|
"loss": 1.1893, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.63879108428955, |
|
"learning_rate": 4.9402539208364454e-05, |
|
"loss": 1.1212, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 11.649455070495605, |
|
"learning_rate": 4.9215832710978346e-05, |
|
"loss": 1.0572, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.81701946258545, |
|
"learning_rate": 4.902912621359224e-05, |
|
"loss": 1.1869, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.85696029663086, |
|
"learning_rate": 4.884241971620613e-05, |
|
"loss": 1.5038, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 8.543469429016113, |
|
"learning_rate": 4.8655713218820016e-05, |
|
"loss": 1.0457, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 10.270995140075684, |
|
"learning_rate": 4.846900672143391e-05, |
|
"loss": 0.9543, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.127942085266113, |
|
"learning_rate": 4.82823002240478e-05, |
|
"loss": 0.9389, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 10.777777671813965, |
|
"learning_rate": 4.809559372666169e-05, |
|
"loss": 1.0655, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 8.693421363830566, |
|
"learning_rate": 4.790888722927558e-05, |
|
"loss": 1.0275, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 10.789031028747559, |
|
"learning_rate": 4.772218073188947e-05, |
|
"loss": 1.2218, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.10308313369751, |
|
"learning_rate": 4.753547423450336e-05, |
|
"loss": 0.9795, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 11.924713134765625, |
|
"learning_rate": 4.7348767737117256e-05, |
|
"loss": 1.0568, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.780080795288086, |
|
"learning_rate": 4.716206123973114e-05, |
|
"loss": 0.9916, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.771975994110107, |
|
"learning_rate": 4.697535474234503e-05, |
|
"loss": 0.9661, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.780990600585938, |
|
"learning_rate": 4.6788648244958926e-05, |
|
"loss": 0.884, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.3119354248046875, |
|
"learning_rate": 4.660194174757282e-05, |
|
"loss": 1.3266, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.907146453857422, |
|
"learning_rate": 4.6415235250186703e-05, |
|
"loss": 1.0728, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.732804775238037, |
|
"learning_rate": 4.6228528752800596e-05, |
|
"loss": 1.1793, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.275418281555176, |
|
"learning_rate": 4.604182225541449e-05, |
|
"loss": 1.0455, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.184633255004883, |
|
"learning_rate": 4.585511575802838e-05, |
|
"loss": 1.2988, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.226140022277832, |
|
"learning_rate": 4.566840926064227e-05, |
|
"loss": 0.9656, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.135764122009277, |
|
"learning_rate": 4.5481702763256165e-05, |
|
"loss": 0.9574, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.493541717529297, |
|
"learning_rate": 4.529499626587006e-05, |
|
"loss": 1.177, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 10.075041770935059, |
|
"learning_rate": 4.510828976848395e-05, |
|
"loss": 1.0754, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 9.099665641784668, |
|
"learning_rate": 4.492158327109784e-05, |
|
"loss": 1.179, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 18.290414810180664, |
|
"learning_rate": 4.473487677371173e-05, |
|
"loss": 1.14, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.768624305725098, |
|
"learning_rate": 4.454817027632562e-05, |
|
"loss": 1.0689, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 11.730013847351074, |
|
"learning_rate": 4.436146377893951e-05, |
|
"loss": 0.8808, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.4615159034729, |
|
"learning_rate": 4.4174757281553404e-05, |
|
"loss": 1.093, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 11.849823951721191, |
|
"learning_rate": 4.398805078416729e-05, |
|
"loss": 0.9596, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 8.06859302520752, |
|
"learning_rate": 4.380134428678118e-05, |
|
"loss": 0.9528, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.52597713470459, |
|
"learning_rate": 4.3614637789395075e-05, |
|
"loss": 0.8866, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 12.404598236083984, |
|
"learning_rate": 4.342793129200897e-05, |
|
"loss": 0.7092, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.062406539916992, |
|
"learning_rate": 4.324122479462285e-05, |
|
"loss": 0.7407, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 11.590093612670898, |
|
"learning_rate": 4.3054518297236745e-05, |
|
"loss": 1.1617, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 8.003750801086426, |
|
"learning_rate": 4.286781179985064e-05, |
|
"loss": 0.9215, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.014645099639893, |
|
"learning_rate": 4.268110530246453e-05, |
|
"loss": 0.8719, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.771167755126953, |
|
"learning_rate": 4.2494398805078415e-05, |
|
"loss": 0.865, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.302040100097656, |
|
"learning_rate": 4.230769230769231e-05, |
|
"loss": 0.7946, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 14.395988464355469, |
|
"learning_rate": 4.21209858103062e-05, |
|
"loss": 1.0984, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 7.767952919006348, |
|
"learning_rate": 4.193427931292009e-05, |
|
"loss": 0.9101, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 11.00684642791748, |
|
"learning_rate": 4.1747572815533984e-05, |
|
"loss": 0.9023, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_accuracy": 0.6013422818791946, |
|
"eval_loss": 1.0584642887115479, |
|
"eval_runtime": 1869.6092, |
|
"eval_samples_per_second": 0.398, |
|
"eval_steps_per_second": 0.05, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 8.297401428222656, |
|
"learning_rate": 4.1560866318147876e-05, |
|
"loss": 0.6975, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 8.592920303344727, |
|
"learning_rate": 4.137415982076177e-05, |
|
"loss": 0.6746, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 12.73000717163086, |
|
"learning_rate": 4.118745332337566e-05, |
|
"loss": 0.7792, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 12.524226188659668, |
|
"learning_rate": 4.1000746825989546e-05, |
|
"loss": 1.1009, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 14.503805160522461, |
|
"learning_rate": 4.081404032860344e-05, |
|
"loss": 0.8554, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.50175666809082, |
|
"learning_rate": 4.062733383121733e-05, |
|
"loss": 0.9718, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.452315807342529, |
|
"learning_rate": 4.0440627333831223e-05, |
|
"loss": 0.9421, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 17.507164001464844, |
|
"learning_rate": 4.025392083644511e-05, |
|
"loss": 0.9593, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 6.719306945800781, |
|
"learning_rate": 4.0067214339059e-05, |
|
"loss": 0.8568, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 8.009960174560547, |
|
"learning_rate": 3.9880507841672894e-05, |
|
"loss": 0.796, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 6.025060653686523, |
|
"learning_rate": 3.9693801344286786e-05, |
|
"loss": 0.8486, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 8.915096282958984, |
|
"learning_rate": 3.950709484690067e-05, |
|
"loss": 0.8042, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 12.27297592163086, |
|
"learning_rate": 3.9320388349514564e-05, |
|
"loss": 0.8384, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.073918342590332, |
|
"learning_rate": 3.9133681852128456e-05, |
|
"loss": 0.8608, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 10.088605880737305, |
|
"learning_rate": 3.894697535474235e-05, |
|
"loss": 0.8858, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.86115837097168, |
|
"learning_rate": 3.8760268857356234e-05, |
|
"loss": 0.6226, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 11.275440216064453, |
|
"learning_rate": 3.8573562359970126e-05, |
|
"loss": 0.8814, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 5.241692543029785, |
|
"learning_rate": 3.838685586258402e-05, |
|
"loss": 0.8163, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 8.397137641906738, |
|
"learning_rate": 3.820014936519791e-05, |
|
"loss": 0.7511, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 9.3834810256958, |
|
"learning_rate": 3.8013442867811796e-05, |
|
"loss": 0.7023, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 11.98423957824707, |
|
"learning_rate": 3.782673637042569e-05, |
|
"loss": 0.7885, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 11.273078918457031, |
|
"learning_rate": 3.764002987303958e-05, |
|
"loss": 0.6545, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 14.190473556518555, |
|
"learning_rate": 3.745332337565347e-05, |
|
"loss": 1.0184, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.971963405609131, |
|
"learning_rate": 3.7266616878267365e-05, |
|
"loss": 0.7963, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 17.721282958984375, |
|
"learning_rate": 3.707991038088126e-05, |
|
"loss": 0.9323, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 10.512914657592773, |
|
"learning_rate": 3.689320388349515e-05, |
|
"loss": 0.9391, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 11.076910018920898, |
|
"learning_rate": 3.670649738610904e-05, |
|
"loss": 0.8941, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.027378082275391, |
|
"learning_rate": 3.651979088872293e-05, |
|
"loss": 0.8346, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 12.940291404724121, |
|
"learning_rate": 3.633308439133682e-05, |
|
"loss": 0.7896, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 13.178154945373535, |
|
"learning_rate": 3.614637789395071e-05, |
|
"loss": 0.6817, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 13.505011558532715, |
|
"learning_rate": 3.5959671396564605e-05, |
|
"loss": 0.7627, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.09556245803833, |
|
"learning_rate": 3.577296489917849e-05, |
|
"loss": 0.8155, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 12.600196838378906, |
|
"learning_rate": 3.558625840179238e-05, |
|
"loss": 0.89, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 8.050426483154297, |
|
"learning_rate": 3.5399551904406275e-05, |
|
"loss": 0.8544, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 13.491654396057129, |
|
"learning_rate": 3.521284540702017e-05, |
|
"loss": 0.6694, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.23304557800293, |
|
"learning_rate": 3.502613890963405e-05, |
|
"loss": 0.8363, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 12.755766868591309, |
|
"learning_rate": 3.4839432412247945e-05, |
|
"loss": 0.6294, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.085630416870117, |
|
"learning_rate": 3.465272591486184e-05, |
|
"loss": 0.6517, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 7.111999988555908, |
|
"learning_rate": 3.446601941747573e-05, |
|
"loss": 0.6923, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 9.9430513381958, |
|
"learning_rate": 3.427931292008962e-05, |
|
"loss": 0.7989, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 7.441895484924316, |
|
"learning_rate": 3.409260642270351e-05, |
|
"loss": 0.8628, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 11.109057426452637, |
|
"learning_rate": 3.39058999253174e-05, |
|
"loss": 0.9556, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 3.8683066368103027, |
|
"learning_rate": 3.371919342793129e-05, |
|
"loss": 0.7598, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 9.229301452636719, |
|
"learning_rate": 3.3532486930545184e-05, |
|
"loss": 0.7694, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 6.90978479385376, |
|
"learning_rate": 3.334578043315908e-05, |
|
"loss": 0.6816, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 10.782751083374023, |
|
"learning_rate": 3.315907393577297e-05, |
|
"loss": 0.7015, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 12.352110862731934, |
|
"learning_rate": 3.297236743838686e-05, |
|
"loss": 0.6476, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 12.289131164550781, |
|
"learning_rate": 3.2785660941000754e-05, |
|
"loss": 0.6243, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 10.164794921875, |
|
"learning_rate": 3.259895444361464e-05, |
|
"loss": 0.7136, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 11.630318641662598, |
|
"learning_rate": 3.241224794622853e-05, |
|
"loss": 0.8095, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 8.73193359375, |
|
"learning_rate": 3.2225541448842424e-05, |
|
"loss": 0.7918, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 10.5596284866333, |
|
"learning_rate": 3.2038834951456316e-05, |
|
"loss": 0.9087, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 10.88122844696045, |
|
"learning_rate": 3.18521284540702e-05, |
|
"loss": 0.5758, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 7.157528877258301, |
|
"learning_rate": 3.1665421956684094e-05, |
|
"loss": 0.645, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.619960784912109, |
|
"learning_rate": 3.1478715459297986e-05, |
|
"loss": 0.6412, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 9.903483390808105, |
|
"learning_rate": 3.129200896191188e-05, |
|
"loss": 0.5866, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 8.340232849121094, |
|
"learning_rate": 3.1105302464525764e-05, |
|
"loss": 0.6971, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.8595170974731445, |
|
"learning_rate": 3.0918595967139656e-05, |
|
"loss": 0.6824, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 13.451203346252441, |
|
"learning_rate": 3.073188946975355e-05, |
|
"loss": 0.5937, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 8.88749885559082, |
|
"learning_rate": 3.054518297236744e-05, |
|
"loss": 0.7642, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 10.660982131958008, |
|
"learning_rate": 3.035847647498133e-05, |
|
"loss": 0.7327, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 12.592216491699219, |
|
"learning_rate": 3.0171769977595222e-05, |
|
"loss": 0.6769, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 3.5256502628326416, |
|
"learning_rate": 2.9985063480209115e-05, |
|
"loss": 0.6646, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 6.979560852050781, |
|
"learning_rate": 2.9798356982823007e-05, |
|
"loss": 0.7394, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 8.0765962600708, |
|
"learning_rate": 2.9611650485436892e-05, |
|
"loss": 0.597, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 6.306230545043945, |
|
"learning_rate": 2.9424943988050785e-05, |
|
"loss": 0.557, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 7.691712856292725, |
|
"learning_rate": 2.9238237490664677e-05, |
|
"loss": 0.778, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 10.76540756225586, |
|
"learning_rate": 2.905153099327857e-05, |
|
"loss": 0.7148, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 8.003518104553223, |
|
"learning_rate": 2.8864824495892455e-05, |
|
"loss": 0.799, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 8.381891250610352, |
|
"learning_rate": 2.8678117998506347e-05, |
|
"loss": 0.7274, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 11.402746200561523, |
|
"learning_rate": 2.849141150112024e-05, |
|
"loss": 0.767, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 12.372224807739258, |
|
"learning_rate": 2.8304705003734132e-05, |
|
"loss": 0.4786, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 6.330965518951416, |
|
"learning_rate": 2.811799850634802e-05, |
|
"loss": 0.6594, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 10.601953506469727, |
|
"learning_rate": 2.7931292008961913e-05, |
|
"loss": 0.5358, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 55.654109954833984, |
|
"learning_rate": 2.7744585511575805e-05, |
|
"loss": 0.9428, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_accuracy": 0.6510067114093959, |
|
"eval_loss": 0.897102415561676, |
|
"eval_runtime": 1724.9592, |
|
"eval_samples_per_second": 0.432, |
|
"eval_steps_per_second": 0.054, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 14.913216590881348, |
|
"learning_rate": 2.7557879014189698e-05, |
|
"loss": 0.6672, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 8.555597305297852, |
|
"learning_rate": 2.7371172516803583e-05, |
|
"loss": 0.517, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 13.102532386779785, |
|
"learning_rate": 2.7184466019417475e-05, |
|
"loss": 0.4576, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 8.147948265075684, |
|
"learning_rate": 2.6997759522031368e-05, |
|
"loss": 0.5292, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 9.257566452026367, |
|
"learning_rate": 2.681105302464526e-05, |
|
"loss": 0.5036, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 11.560401916503906, |
|
"learning_rate": 2.662434652725915e-05, |
|
"loss": 0.6044, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 11.875014305114746, |
|
"learning_rate": 2.643764002987304e-05, |
|
"loss": 0.5306, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 13.541814804077148, |
|
"learning_rate": 2.6250933532486934e-05, |
|
"loss": 0.6254, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 9.984253883361816, |
|
"learning_rate": 2.6064227035100826e-05, |
|
"loss": 0.5974, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 10.722140312194824, |
|
"learning_rate": 2.587752053771471e-05, |
|
"loss": 0.7041, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 10.557269096374512, |
|
"learning_rate": 2.5690814040328604e-05, |
|
"loss": 0.5909, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 11.491299629211426, |
|
"learning_rate": 2.5504107542942496e-05, |
|
"loss": 0.4946, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 8.835367202758789, |
|
"learning_rate": 2.531740104555639e-05, |
|
"loss": 0.6644, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 11.745617866516113, |
|
"learning_rate": 2.5130694548170274e-05, |
|
"loss": 0.6128, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 13.023053169250488, |
|
"learning_rate": 2.4943988050784166e-05, |
|
"loss": 0.6555, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 6.082352161407471, |
|
"learning_rate": 2.475728155339806e-05, |
|
"loss": 0.5754, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 4.05959415435791, |
|
"learning_rate": 2.4570575056011947e-05, |
|
"loss": 0.4325, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 14.252350807189941, |
|
"learning_rate": 2.4383868558625843e-05, |
|
"loss": 0.4956, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 6.615436553955078, |
|
"learning_rate": 2.4197162061239732e-05, |
|
"loss": 0.5474, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 7.025275230407715, |
|
"learning_rate": 2.4010455563853624e-05, |
|
"loss": 0.6072, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 15.101826667785645, |
|
"learning_rate": 2.3823749066467517e-05, |
|
"loss": 0.6705, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 11.996399879455566, |
|
"learning_rate": 2.3637042569081406e-05, |
|
"loss": 0.5326, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 11.746566772460938, |
|
"learning_rate": 2.3450336071695298e-05, |
|
"loss": 0.6512, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 7.299741744995117, |
|
"learning_rate": 2.3263629574309187e-05, |
|
"loss": 0.4751, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 10.497876167297363, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 0.7083, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 6.633430480957031, |
|
"learning_rate": 2.2890216579536968e-05, |
|
"loss": 0.4854, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 8.004325866699219, |
|
"learning_rate": 2.270351008215086e-05, |
|
"loss": 0.5795, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 9.713132858276367, |
|
"learning_rate": 2.251680358476475e-05, |
|
"loss": 0.5735, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 6.221426963806152, |
|
"learning_rate": 2.233009708737864e-05, |
|
"loss": 0.5405, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 6.872443675994873, |
|
"learning_rate": 2.2143390589992534e-05, |
|
"loss": 0.7661, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 6.894876956939697, |
|
"learning_rate": 2.1956684092606426e-05, |
|
"loss": 0.4521, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.6292059421539307, |
|
"learning_rate": 2.1769977595220315e-05, |
|
"loss": 0.5084, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 13.099087715148926, |
|
"learning_rate": 2.1583271097834207e-05, |
|
"loss": 0.644, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 13.508889198303223, |
|
"learning_rate": 2.1396564600448096e-05, |
|
"loss": 0.6271, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 9.162331581115723, |
|
"learning_rate": 2.120985810306199e-05, |
|
"loss": 0.5586, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 9.340535163879395, |
|
"learning_rate": 2.1023151605675877e-05, |
|
"loss": 0.6222, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 10.59788990020752, |
|
"learning_rate": 2.083644510828977e-05, |
|
"loss": 0.6439, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 13.132092475891113, |
|
"learning_rate": 2.064973861090366e-05, |
|
"loss": 0.5401, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 11.530851364135742, |
|
"learning_rate": 2.046303211351755e-05, |
|
"loss": 0.6462, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 15.348837852478027, |
|
"learning_rate": 2.0276325616131443e-05, |
|
"loss": 0.6606, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 16.934356689453125, |
|
"learning_rate": 2.0089619118745336e-05, |
|
"loss": 0.6352, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 9.712738037109375, |
|
"learning_rate": 1.9902912621359225e-05, |
|
"loss": 0.8095, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 10.83343505859375, |
|
"learning_rate": 1.9716206123973117e-05, |
|
"loss": 0.5243, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 10.138250350952148, |
|
"learning_rate": 1.9529499626587006e-05, |
|
"loss": 0.4842, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 9.77036190032959, |
|
"learning_rate": 1.9342793129200898e-05, |
|
"loss": 0.6791, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 9.89406967163086, |
|
"learning_rate": 1.9156086631814787e-05, |
|
"loss": 0.5933, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 7.667896747589111, |
|
"learning_rate": 1.896938013442868e-05, |
|
"loss": 0.558, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 21.039913177490234, |
|
"learning_rate": 1.8782673637042568e-05, |
|
"loss": 0.5028, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 3.437727689743042, |
|
"learning_rate": 1.859596713965646e-05, |
|
"loss": 0.4429, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 12.366796493530273, |
|
"learning_rate": 1.8409260642270353e-05, |
|
"loss": 0.6008, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 17.259078979492188, |
|
"learning_rate": 1.8222554144884245e-05, |
|
"loss": 0.5279, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 8.58683967590332, |
|
"learning_rate": 1.8035847647498134e-05, |
|
"loss": 0.4038, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 6.0911970138549805, |
|
"learning_rate": 1.7849141150112026e-05, |
|
"loss": 0.5495, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 9.944643020629883, |
|
"learning_rate": 1.7662434652725915e-05, |
|
"loss": 0.5382, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 13.859691619873047, |
|
"learning_rate": 1.7475728155339808e-05, |
|
"loss": 0.6927, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 7.948896408081055, |
|
"learning_rate": 1.7289021657953697e-05, |
|
"loss": 0.3874, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 6.207942485809326, |
|
"learning_rate": 1.710231516056759e-05, |
|
"loss": 0.4023, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 5.350401401519775, |
|
"learning_rate": 1.6915608663181478e-05, |
|
"loss": 0.5397, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 10.37016487121582, |
|
"learning_rate": 1.672890216579537e-05, |
|
"loss": 0.5941, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 9.93877124786377, |
|
"learning_rate": 1.654219566840926e-05, |
|
"loss": 0.4433, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 14.147184371948242, |
|
"learning_rate": 1.635548917102315e-05, |
|
"loss": 0.6687, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.5294337272644043, |
|
"learning_rate": 1.6168782673637044e-05, |
|
"loss": 0.5144, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 14.119583129882812, |
|
"learning_rate": 1.5982076176250936e-05, |
|
"loss": 0.4195, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 5.246395587921143, |
|
"learning_rate": 1.5795369678864825e-05, |
|
"loss": 0.5291, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 5.959507465362549, |
|
"learning_rate": 1.5608663181478717e-05, |
|
"loss": 0.4513, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 12.30712604522705, |
|
"learning_rate": 1.5421956684092606e-05, |
|
"loss": 0.5737, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 13.365155220031738, |
|
"learning_rate": 1.5235250186706498e-05, |
|
"loss": 0.6143, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 15.408020973205566, |
|
"learning_rate": 1.5048543689320387e-05, |
|
"loss": 0.4525, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 8.355168342590332, |
|
"learning_rate": 1.486183719193428e-05, |
|
"loss": 0.6192, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 14.046072006225586, |
|
"learning_rate": 1.467513069454817e-05, |
|
"loss": 0.5334, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 6.0648908615112305, |
|
"learning_rate": 1.4488424197162062e-05, |
|
"loss": 0.6275, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 9.870409965515137, |
|
"learning_rate": 1.4301717699775951e-05, |
|
"loss": 0.5459, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 10.847134590148926, |
|
"learning_rate": 1.4115011202389844e-05, |
|
"loss": 0.3645, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 7.336729049682617, |
|
"learning_rate": 1.3928304705003734e-05, |
|
"loss": 0.4575, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_accuracy": 0.7181208053691275, |
|
"eval_loss": 0.7427847981452942, |
|
"eval_runtime": 1683.7503, |
|
"eval_samples_per_second": 0.442, |
|
"eval_steps_per_second": 0.056, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 8.262899398803711, |
|
"learning_rate": 1.3741598207617627e-05, |
|
"loss": 0.6098, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 11.888318061828613, |
|
"learning_rate": 1.3554891710231516e-05, |
|
"loss": 0.6722, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 4.9109320640563965, |
|
"learning_rate": 1.3368185212845408e-05, |
|
"loss": 0.302, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 3.660041570663452, |
|
"learning_rate": 1.3181478715459297e-05, |
|
"loss": 0.4001, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 5.666093349456787, |
|
"learning_rate": 1.2994772218073189e-05, |
|
"loss": 0.4406, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 5.51093053817749, |
|
"learning_rate": 1.2808065720687081e-05, |
|
"loss": 0.434, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 11.179670333862305, |
|
"learning_rate": 1.2621359223300972e-05, |
|
"loss": 0.4668, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 8.03001880645752, |
|
"learning_rate": 1.2434652725914863e-05, |
|
"loss": 0.4569, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 8.949047088623047, |
|
"learning_rate": 1.2247946228528753e-05, |
|
"loss": 0.4284, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 10.540315628051758, |
|
"learning_rate": 1.2061239731142644e-05, |
|
"loss": 0.5279, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 14.215402603149414, |
|
"learning_rate": 1.1874533233756534e-05, |
|
"loss": 0.4004, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 18.047544479370117, |
|
"learning_rate": 1.1687826736370427e-05, |
|
"loss": 0.4778, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 9.015625953674316, |
|
"learning_rate": 1.1501120238984317e-05, |
|
"loss": 0.5168, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 11.635021209716797, |
|
"learning_rate": 1.1314413741598208e-05, |
|
"loss": 0.4268, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 7.7300004959106445, |
|
"learning_rate": 1.1127707244212099e-05, |
|
"loss": 0.4186, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 8.837210655212402, |
|
"learning_rate": 1.094100074682599e-05, |
|
"loss": 0.555, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 6.158858299255371, |
|
"learning_rate": 1.0754294249439881e-05, |
|
"loss": 0.5783, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 7.499691009521484, |
|
"learning_rate": 1.0567587752053772e-05, |
|
"loss": 0.4923, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 12.26675796508789, |
|
"learning_rate": 1.0380881254667663e-05, |
|
"loss": 0.3842, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 11.513700485229492, |
|
"learning_rate": 1.0194174757281553e-05, |
|
"loss": 0.3955, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 5.970608234405518, |
|
"learning_rate": 1.0007468259895444e-05, |
|
"loss": 0.3256, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 10.886767387390137, |
|
"learning_rate": 9.820761762509336e-06, |
|
"loss": 0.3808, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 5.535740375518799, |
|
"learning_rate": 9.634055265123227e-06, |
|
"loss": 0.4004, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 7.606982231140137, |
|
"learning_rate": 9.447348767737117e-06, |
|
"loss": 0.492, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 4.145596981048584, |
|
"learning_rate": 9.260642270351008e-06, |
|
"loss": 0.3662, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 5.0407490730285645, |
|
"learning_rate": 9.073935772964899e-06, |
|
"loss": 0.2492, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 7.193727016448975, |
|
"learning_rate": 8.88722927557879e-06, |
|
"loss": 0.3642, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 6.277304172515869, |
|
"learning_rate": 8.700522778192682e-06, |
|
"loss": 0.4605, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 10.555391311645508, |
|
"learning_rate": 8.513816280806572e-06, |
|
"loss": 0.3938, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 5.211946964263916, |
|
"learning_rate": 8.327109783420463e-06, |
|
"loss": 0.3817, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 11.565357208251953, |
|
"learning_rate": 8.140403286034353e-06, |
|
"loss": 0.3402, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 7.738372802734375, |
|
"learning_rate": 7.953696788648244e-06, |
|
"loss": 0.555, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 5.373858451843262, |
|
"learning_rate": 7.766990291262136e-06, |
|
"loss": 0.4478, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 11.583991050720215, |
|
"learning_rate": 7.580283793876028e-06, |
|
"loss": 0.3699, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 7.290119171142578, |
|
"learning_rate": 7.393577296489919e-06, |
|
"loss": 0.4119, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 13.199901580810547, |
|
"learning_rate": 7.20687079910381e-06, |
|
"loss": 0.3973, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 5.801746368408203, |
|
"learning_rate": 7.0201643017177005e-06, |
|
"loss": 0.278, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 7.40468692779541, |
|
"learning_rate": 6.833457804331592e-06, |
|
"loss": 0.4457, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 7.056453227996826, |
|
"learning_rate": 6.6467513069454825e-06, |
|
"loss": 0.3933, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 1.5562771558761597, |
|
"learning_rate": 6.460044809559373e-06, |
|
"loss": 0.3659, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 12.124537467956543, |
|
"learning_rate": 6.273338312173265e-06, |
|
"loss": 0.417, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 8.535222053527832, |
|
"learning_rate": 6.086631814787154e-06, |
|
"loss": 0.4438, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 14.003257751464844, |
|
"learning_rate": 5.899925317401046e-06, |
|
"loss": 0.4157, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 8.428786277770996, |
|
"learning_rate": 5.7132188200149364e-06, |
|
"loss": 0.3332, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 7.349361896514893, |
|
"learning_rate": 5.526512322628828e-06, |
|
"loss": 0.354, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 9.289456367492676, |
|
"learning_rate": 5.3398058252427185e-06, |
|
"loss": 0.4407, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 6.27097225189209, |
|
"learning_rate": 5.153099327856609e-06, |
|
"loss": 0.2596, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 15.719304084777832, |
|
"learning_rate": 4.966392830470501e-06, |
|
"loss": 0.5207, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 7.534648895263672, |
|
"learning_rate": 4.779686333084392e-06, |
|
"loss": 0.3938, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 8.981090545654297, |
|
"learning_rate": 4.592979835698283e-06, |
|
"loss": 0.248, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 11.375850677490234, |
|
"learning_rate": 4.406273338312174e-06, |
|
"loss": 0.2799, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 12.491207122802734, |
|
"learning_rate": 4.219566840926065e-06, |
|
"loss": 0.4839, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 5.428218364715576, |
|
"learning_rate": 4.032860343539955e-06, |
|
"loss": 0.4289, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 7.776615142822266, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.4563, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 4.943689823150635, |
|
"learning_rate": 3.6594473487677374e-06, |
|
"loss": 0.3819, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 10.917070388793945, |
|
"learning_rate": 3.4727408513816284e-06, |
|
"loss": 0.4038, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 7.394994258880615, |
|
"learning_rate": 3.2860343539955195e-06, |
|
"loss": 0.4088, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 6.575070381164551, |
|
"learning_rate": 3.09932785660941e-06, |
|
"loss": 0.6557, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 14.510526657104492, |
|
"learning_rate": 2.912621359223301e-06, |
|
"loss": 0.2805, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 12.12006950378418, |
|
"learning_rate": 2.725914861837192e-06, |
|
"loss": 0.4065, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 5.202164173126221, |
|
"learning_rate": 2.539208364451083e-06, |
|
"loss": 0.3511, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 13.489916801452637, |
|
"learning_rate": 2.352501867064974e-06, |
|
"loss": 0.3357, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 12.779403686523438, |
|
"learning_rate": 2.165795369678865e-06, |
|
"loss": 0.367, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 13.837797164916992, |
|
"learning_rate": 1.979088872292756e-06, |
|
"loss": 0.4565, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 4.743872165679932, |
|
"learning_rate": 1.7923823749066467e-06, |
|
"loss": 0.3372, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 8.704170227050781, |
|
"learning_rate": 1.6056758775205377e-06, |
|
"loss": 0.3423, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.67067551612854, |
|
"learning_rate": 1.4189693801344288e-06, |
|
"loss": 0.3444, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 11.893070220947266, |
|
"learning_rate": 1.2322628827483198e-06, |
|
"loss": 0.2581, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 4.046337127685547, |
|
"learning_rate": 1.0455563853622106e-06, |
|
"loss": 0.3214, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 11.184314727783203, |
|
"learning_rate": 8.588498879761017e-07, |
|
"loss": 0.3076, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 12.178153991699219, |
|
"learning_rate": 6.721433905899926e-07, |
|
"loss": 0.2403, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.377287745475769, |
|
"learning_rate": 4.854368932038835e-07, |
|
"loss": 0.2376, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 14.243566513061523, |
|
"learning_rate": 2.987303958177745e-07, |
|
"loss": 0.3316, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 4.011444091796875, |
|
"learning_rate": 1.1202389843166542e-07, |
|
"loss": 0.3262, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.7328859060402685, |
|
"eval_loss": 0.7344102263450623, |
|
"eval_runtime": 1687.1353, |
|
"eval_samples_per_second": 0.442, |
|
"eval_steps_per_second": 0.056, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"step": 2976, |
|
"total_flos": 6.0945535892058145e+19, |
|
"train_loss": 0.7349759022234589, |
|
"train_runtime": 150955.4956, |
|
"train_samples_per_second": 0.158, |
|
"train_steps_per_second": 0.02 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.7043010752688172, |
|
"eval_loss": 0.8327615261077881, |
|
"eval_runtime": 1693.8047, |
|
"eval_samples_per_second": 0.439, |
|
"eval_steps_per_second": 0.055, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.7043010752688172, |
|
"eval_loss": 0.8327615261077881, |
|
"eval_runtime": 1683.6481, |
|
"eval_samples_per_second": 0.442, |
|
"eval_steps_per_second": 0.055, |
|
"step": 2976 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2976, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"total_flos": 6.0945535892058145e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|