|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1624, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006157635467980296, |
|
"grad_norm": 39.11833832570066, |
|
"learning_rate": 1.8404907975460124e-08, |
|
"loss": 1.0171, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006157635467980296, |
|
"grad_norm": 97.51191996179428, |
|
"learning_rate": 1.8404907975460122e-07, |
|
"loss": 1.0435, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012315270935960592, |
|
"grad_norm": 33.104108639558106, |
|
"learning_rate": 3.6809815950920245e-07, |
|
"loss": 1.0398, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01847290640394089, |
|
"grad_norm": 25.392368172808187, |
|
"learning_rate": 5.521472392638037e-07, |
|
"loss": 1.0188, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.024630541871921183, |
|
"grad_norm": 2.5252784333814122, |
|
"learning_rate": 7.361963190184049e-07, |
|
"loss": 0.9958, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03078817733990148, |
|
"grad_norm": 1.4934120741666812, |
|
"learning_rate": 9.202453987730062e-07, |
|
"loss": 0.9789, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03694581280788178, |
|
"grad_norm": 10.681983660964159, |
|
"learning_rate": 1.1042944785276073e-06, |
|
"loss": 0.9309, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04310344827586207, |
|
"grad_norm": 1.1792371247881757, |
|
"learning_rate": 1.2883435582822088e-06, |
|
"loss": 0.9487, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04926108374384237, |
|
"grad_norm": 3.5967827346653807, |
|
"learning_rate": 1.4723926380368098e-06, |
|
"loss": 0.9392, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05541871921182266, |
|
"grad_norm": 1.3027564465976336, |
|
"learning_rate": 1.6564417177914112e-06, |
|
"loss": 0.9278, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06157635467980296, |
|
"grad_norm": 1.868580833085548, |
|
"learning_rate": 1.8404907975460124e-06, |
|
"loss": 0.9329, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06773399014778325, |
|
"grad_norm": 8.034382889984798, |
|
"learning_rate": 2.0245398773006137e-06, |
|
"loss": 0.9353, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07389162561576355, |
|
"grad_norm": 2.971665740965309, |
|
"learning_rate": 2.2085889570552147e-06, |
|
"loss": 0.9192, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08004926108374384, |
|
"grad_norm": 3.681564626469473, |
|
"learning_rate": 2.3926380368098157e-06, |
|
"loss": 0.9046, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08620689655172414, |
|
"grad_norm": 4.346804226811641, |
|
"learning_rate": 2.5766871165644175e-06, |
|
"loss": 0.9297, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09236453201970443, |
|
"grad_norm": 1.8357015303994761, |
|
"learning_rate": 2.7607361963190186e-06, |
|
"loss": 0.906, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09852216748768473, |
|
"grad_norm": 1.1670278503111207, |
|
"learning_rate": 2.9447852760736196e-06, |
|
"loss": 0.9123, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10467980295566502, |
|
"grad_norm": 1.7533219735333505, |
|
"learning_rate": 2.9998300784514776e-06, |
|
"loss": 0.9075, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11083743842364532, |
|
"grad_norm": 1.9794621815752764, |
|
"learning_rate": 2.9989979023179235e-06, |
|
"loss": 0.9104, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11699507389162561, |
|
"grad_norm": 0.9849983349999529, |
|
"learning_rate": 2.9974726458049776e-06, |
|
"loss": 0.8953, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12315270935960591, |
|
"grad_norm": 1.0588037960942727, |
|
"learning_rate": 2.9952550141340154e-06, |
|
"loss": 0.8935, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12931034482758622, |
|
"grad_norm": 1.2914329429720137, |
|
"learning_rate": 2.9923460326547038e-06, |
|
"loss": 0.898, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1354679802955665, |
|
"grad_norm": 1.462714450533501, |
|
"learning_rate": 2.988747046370918e-06, |
|
"loss": 0.8964, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1416256157635468, |
|
"grad_norm": 1.5479755472032886, |
|
"learning_rate": 2.984459719318862e-06, |
|
"loss": 0.901, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1477832512315271, |
|
"grad_norm": 1.0726588199016611, |
|
"learning_rate": 2.9794860337976802e-06, |
|
"loss": 0.8898, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1539408866995074, |
|
"grad_norm": 1.2064222174518724, |
|
"learning_rate": 2.9738282894529177e-06, |
|
"loss": 0.8906, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16009852216748768, |
|
"grad_norm": 1.0779645832395361, |
|
"learning_rate": 2.96748910221325e-06, |
|
"loss": 0.8956, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16625615763546797, |
|
"grad_norm": 1.350106799679849, |
|
"learning_rate": 2.9604714030809755e-06, |
|
"loss": 0.8908, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 1.4528864294030424, |
|
"learning_rate": 2.9527784367768305e-06, |
|
"loss": 0.8918, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.17857142857142858, |
|
"grad_norm": 1.093974116558019, |
|
"learning_rate": 2.9444137602397515e-06, |
|
"loss": 0.9045, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18472906403940886, |
|
"grad_norm": 1.1335514527922086, |
|
"learning_rate": 2.935381240982281e-06, |
|
"loss": 0.8919, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19088669950738915, |
|
"grad_norm": 1.1578136051328392, |
|
"learning_rate": 2.9256850553023724e-06, |
|
"loss": 0.8932, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.19704433497536947, |
|
"grad_norm": 1.2524948494126782, |
|
"learning_rate": 2.9153296863524315e-06, |
|
"loss": 0.8779, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20320197044334976, |
|
"grad_norm": 0.9562337520138312, |
|
"learning_rate": 2.9043199220664704e-06, |
|
"loss": 0.8848, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.20935960591133004, |
|
"grad_norm": 1.014920165575569, |
|
"learning_rate": 2.8926608529463473e-06, |
|
"loss": 0.891, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.21551724137931033, |
|
"grad_norm": 1.2764221494335906, |
|
"learning_rate": 2.880357869708111e-06, |
|
"loss": 0.9045, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22167487684729065, |
|
"grad_norm": 0.9543614414538709, |
|
"learning_rate": 2.8674166607895357e-06, |
|
"loss": 0.886, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22783251231527094, |
|
"grad_norm": 1.0200791854817908, |
|
"learning_rate": 2.85384320972e-06, |
|
"loss": 0.8798, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.23399014778325122, |
|
"grad_norm": 0.9423219737843089, |
|
"learning_rate": 2.839643792353928e-06, |
|
"loss": 0.888, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24014778325123154, |
|
"grad_norm": 1.807370938563018, |
|
"learning_rate": 2.824824973969069e-06, |
|
"loss": 0.9058, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.24630541871921183, |
|
"grad_norm": 0.9624203166584838, |
|
"learning_rate": 2.8093936062309614e-06, |
|
"loss": 0.88, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2524630541871921, |
|
"grad_norm": 1.0446763066450997, |
|
"learning_rate": 2.7933568240249776e-06, |
|
"loss": 0.8916, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 1.031965401372882, |
|
"learning_rate": 2.776722042157421e-06, |
|
"loss": 0.8937, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2647783251231527, |
|
"grad_norm": 1.0397880014843512, |
|
"learning_rate": 2.7594969519271988e-06, |
|
"loss": 0.8802, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.270935960591133, |
|
"grad_norm": 0.9409991870080029, |
|
"learning_rate": 2.7416895175696533e-06, |
|
"loss": 0.8952, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2770935960591133, |
|
"grad_norm": 0.9068233549186512, |
|
"learning_rate": 2.723307972574199e-06, |
|
"loss": 0.8951, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2832512315270936, |
|
"grad_norm": 1.0260952139368371, |
|
"learning_rate": 2.7043608158774645e-06, |
|
"loss": 0.8848, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2894088669950739, |
|
"grad_norm": 0.8928699960345075, |
|
"learning_rate": 2.684856807933706e-06, |
|
"loss": 0.8844, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2955665024630542, |
|
"grad_norm": 0.9088682466856005, |
|
"learning_rate": 2.664804966664298e-06, |
|
"loss": 0.8935, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3017241379310345, |
|
"grad_norm": 0.94527180620086, |
|
"learning_rate": 2.6442145632881894e-06, |
|
"loss": 0.8908, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3078817733990148, |
|
"grad_norm": 0.9455656466636171, |
|
"learning_rate": 2.623095118035235e-06, |
|
"loss": 0.8972, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.31403940886699505, |
|
"grad_norm": 0.9278270508038465, |
|
"learning_rate": 2.601456395744403e-06, |
|
"loss": 0.8963, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.32019704433497537, |
|
"grad_norm": 1.0267218503341182, |
|
"learning_rate": 2.579308401348876e-06, |
|
"loss": 0.8982, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3263546798029557, |
|
"grad_norm": 0.8668867739209858, |
|
"learning_rate": 2.556661375250149e-06, |
|
"loss": 0.8826, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.33251231527093594, |
|
"grad_norm": 0.9466156644717508, |
|
"learning_rate": 2.533525788583248e-06, |
|
"loss": 0.8824, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.33866995073891626, |
|
"grad_norm": 1.079466437044084, |
|
"learning_rate": 2.509912338375275e-06, |
|
"loss": 0.8756, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 1.5912074264549028, |
|
"learning_rate": 2.4858319425994978e-06, |
|
"loss": 0.894, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.35098522167487683, |
|
"grad_norm": 0.9313999461191238, |
|
"learning_rate": 2.4612957351272963e-06, |
|
"loss": 0.8895, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 1.1801770634441218, |
|
"learning_rate": 2.4363150605802704e-06, |
|
"loss": 0.8665, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3633004926108374, |
|
"grad_norm": 1.0068892948188721, |
|
"learning_rate": 2.41090146908492e-06, |
|
"loss": 0.876, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3694581280788177, |
|
"grad_norm": 1.0826722320337905, |
|
"learning_rate": 2.385066710932294e-06, |
|
"loss": 0.8673, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.37561576354679804, |
|
"grad_norm": 0.8776518626640517, |
|
"learning_rate": 2.3588227311451007e-06, |
|
"loss": 0.871, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3817733990147783, |
|
"grad_norm": 0.8970209724037471, |
|
"learning_rate": 2.3321816639547747e-06, |
|
"loss": 0.8754, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3879310344827586, |
|
"grad_norm": 0.865408238823646, |
|
"learning_rate": 2.305155827191066e-06, |
|
"loss": 0.8828, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.39408866995073893, |
|
"grad_norm": 0.9191133091733539, |
|
"learning_rate": 2.2777577165867354e-06, |
|
"loss": 0.893, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4002463054187192, |
|
"grad_norm": 1.0084087069366179, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.8707, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4064039408866995, |
|
"grad_norm": 0.9641835901570172, |
|
"learning_rate": 2.2218955115573864e-06, |
|
"loss": 0.8941, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4125615763546798, |
|
"grad_norm": 1.0960250718373252, |
|
"learning_rate": 2.1934572457197163e-06, |
|
"loss": 0.8801, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4187192118226601, |
|
"grad_norm": 0.9620003659319739, |
|
"learning_rate": 2.164698351273952e-06, |
|
"loss": 0.8838, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4248768472906404, |
|
"grad_norm": 0.945391462529571, |
|
"learning_rate": 2.1356321252536947e-06, |
|
"loss": 0.8773, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.43103448275862066, |
|
"grad_norm": 0.9952181170766936, |
|
"learning_rate": 2.10627200679113e-06, |
|
"loss": 0.8809, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.437192118226601, |
|
"grad_norm": 1.2866159652764533, |
|
"learning_rate": 2.0766315709032837e-06, |
|
"loss": 0.8778, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4433497536945813, |
|
"grad_norm": 0.9256729280886802, |
|
"learning_rate": 2.046724522215437e-06, |
|
"loss": 0.8711, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.44950738916256155, |
|
"grad_norm": 0.830245767183929, |
|
"learning_rate": 2.016564688624627e-06, |
|
"loss": 0.8848, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.45566502463054187, |
|
"grad_norm": 0.9400170628704059, |
|
"learning_rate": 1.9861660149061435e-06, |
|
"loss": 0.8746, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4618226600985222, |
|
"grad_norm": 0.9114604227226025, |
|
"learning_rate": 1.9555425562659878e-06, |
|
"loss": 0.8839, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.46798029556650245, |
|
"grad_norm": 0.9636472157159227, |
|
"learning_rate": 1.924708471842276e-06, |
|
"loss": 0.8644, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.47413793103448276, |
|
"grad_norm": 0.8916431000744512, |
|
"learning_rate": 1.8936780181585799e-06, |
|
"loss": 0.8749, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4802955665024631, |
|
"grad_norm": 0.9584025191544591, |
|
"learning_rate": 1.86246554253225e-06, |
|
"loss": 0.886, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.48645320197044334, |
|
"grad_norm": 1.1713799337704054, |
|
"learning_rate": 1.831085476440753e-06, |
|
"loss": 0.8738, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.49261083743842365, |
|
"grad_norm": 1.4368869246034384, |
|
"learning_rate": 1.7995523288490959e-06, |
|
"loss": 0.8848, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4987684729064039, |
|
"grad_norm": 0.9684556270804614, |
|
"learning_rate": 1.7678806795014293e-06, |
|
"loss": 0.8806, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5049261083743842, |
|
"grad_norm": 0.931294683241216, |
|
"learning_rate": 1.7360851721799163e-06, |
|
"loss": 0.8609, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5110837438423645, |
|
"grad_norm": 0.9532178001849707, |
|
"learning_rate": 1.7041805079340006e-06, |
|
"loss": 0.871, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.91289729076867, |
|
"learning_rate": 1.6721814382831911e-06, |
|
"loss": 0.8706, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5233990147783252, |
|
"grad_norm": 0.8701261925434093, |
|
"learning_rate": 1.6401027583965135e-06, |
|
"loss": 0.874, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5295566502463054, |
|
"grad_norm": 1.3157172918251052, |
|
"learning_rate": 1.6079593002517785e-06, |
|
"loss": 0.8972, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5357142857142857, |
|
"grad_norm": 0.9460694314665482, |
|
"learning_rate": 1.575765925777834e-06, |
|
"loss": 0.8705, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.541871921182266, |
|
"grad_norm": 0.8663250399265648, |
|
"learning_rate": 1.543537519982963e-06, |
|
"loss": 0.8757, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5480295566502463, |
|
"grad_norm": 0.8476656469038841, |
|
"learning_rate": 1.5112889840726194e-06, |
|
"loss": 0.8821, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5541871921182266, |
|
"grad_norm": 0.9793899276197807, |
|
"learning_rate": 1.4790352285596656e-06, |
|
"loss": 0.867, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5603448275862069, |
|
"grad_norm": 0.9471504761102012, |
|
"learning_rate": 1.4467911663703118e-06, |
|
"loss": 0.8669, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5665024630541872, |
|
"grad_norm": 1.0080484253850497, |
|
"learning_rate": 1.4145717059489405e-06, |
|
"loss": 0.8782, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5726600985221675, |
|
"grad_norm": 1.1941676655278535, |
|
"learning_rate": 1.3823917443649994e-06, |
|
"loss": 0.8543, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5788177339901478, |
|
"grad_norm": 1.155321605934009, |
|
"learning_rate": 1.3502661604251562e-06, |
|
"loss": 0.8751, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5849753694581281, |
|
"grad_norm": 1.0349420537193152, |
|
"learning_rate": 1.3182098077938954e-06, |
|
"loss": 0.863, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5911330049261084, |
|
"grad_norm": 0.8668489498799227, |
|
"learning_rate": 1.286237508125744e-06, |
|
"loss": 0.8889, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5972906403940886, |
|
"grad_norm": 1.1105014519516443, |
|
"learning_rate": 1.254364044212291e-06, |
|
"loss": 0.8817, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.603448275862069, |
|
"grad_norm": 1.1510310259609142, |
|
"learning_rate": 1.2226041531471835e-06, |
|
"loss": 0.8686, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6096059113300493, |
|
"grad_norm": 0.9362548110546389, |
|
"learning_rate": 1.1909725195122443e-06, |
|
"loss": 0.8676, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6157635467980296, |
|
"grad_norm": 0.8788738119989281, |
|
"learning_rate": 1.1594837685878725e-06, |
|
"loss": 0.8523, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6219211822660099, |
|
"grad_norm": 0.887194098681397, |
|
"learning_rate": 1.1281524595908653e-06, |
|
"loss": 0.8762, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6280788177339901, |
|
"grad_norm": 0.8612235803063835, |
|
"learning_rate": 1.0969930789427798e-06, |
|
"loss": 0.877, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6342364532019704, |
|
"grad_norm": 0.8734309546117548, |
|
"learning_rate": 1.0660200335719569e-06, |
|
"loss": 0.8761, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6403940886699507, |
|
"grad_norm": 0.8944912206450915, |
|
"learning_rate": 1.0352476442522963e-06, |
|
"loss": 0.8847, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.646551724137931, |
|
"grad_norm": 0.8553821783678947, |
|
"learning_rate": 1.004690138981871e-06, |
|
"loss": 0.8731, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6527093596059114, |
|
"grad_norm": 0.8628572848264044, |
|
"learning_rate": 9.74361646404432e-07, |
|
"loss": 0.8606, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6588669950738916, |
|
"grad_norm": 0.8555513910364403, |
|
"learning_rate": 9.442761892768561e-07, |
|
"loss": 0.8656, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6650246305418719, |
|
"grad_norm": 0.9062019842589142, |
|
"learning_rate": 9.144476779855462e-07, |
|
"loss": 0.8892, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6711822660098522, |
|
"grad_norm": 0.8327590617691879, |
|
"learning_rate": 8.848899041147947e-07, |
|
"loss": 0.8746, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6773399014778325, |
|
"grad_norm": 0.8338019269330827, |
|
"learning_rate": 8.556165340700687e-07, |
|
"loss": 0.8615, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6834975369458128, |
|
"grad_norm": 0.8295491614862206, |
|
"learning_rate": 8.266411027591801e-07, |
|
"loss": 0.8732, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.9395669949159903, |
|
"learning_rate": 7.979770073342484e-07, |
|
"loss": 0.8729, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6958128078817734, |
|
"grad_norm": 0.867733164384384, |
|
"learning_rate": 7.696375009973643e-07, |
|
"loss": 0.8608, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7019704433497537, |
|
"grad_norm": 0.9064838208046303, |
|
"learning_rate": 7.41635686872804e-07, |
|
"loss": 0.8765, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.708128078817734, |
|
"grad_norm": 0.9195681549910459, |
|
"learning_rate": 7.139845119486371e-07, |
|
"loss": 0.8473, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.924047168481818, |
|
"learning_rate": 6.866967610905234e-07, |
|
"loss": 0.8793, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7204433497536946, |
|
"grad_norm": 0.8440847823992839, |
|
"learning_rate": 6.597850511304739e-07, |
|
"loss": 0.8602, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7266009852216748, |
|
"grad_norm": 0.9108312916499829, |
|
"learning_rate": 6.332618250332988e-07, |
|
"loss": 0.854, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7327586206896551, |
|
"grad_norm": 0.8778675133667185, |
|
"learning_rate": 6.071393461434488e-07, |
|
"loss": 0.8797, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7389162561576355, |
|
"grad_norm": 0.862025362499502, |
|
"learning_rate": 5.814296925149026e-07, |
|
"loss": 0.876, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7450738916256158, |
|
"grad_norm": 0.9153055267247704, |
|
"learning_rate": 5.561447513267311e-07, |
|
"loss": 0.8714, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7512315270935961, |
|
"grad_norm": 0.8294104219179396, |
|
"learning_rate": 5.312962133869093e-07, |
|
"loss": 0.8659, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7573891625615764, |
|
"grad_norm": 0.8628035692028492, |
|
"learning_rate": 5.068955677269281e-07, |
|
"loss": 0.8653, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7635467980295566, |
|
"grad_norm": 0.8789751770218951, |
|
"learning_rate": 4.829540962896927e-07, |
|
"loss": 0.8787, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7697044334975369, |
|
"grad_norm": 0.880811469553608, |
|
"learning_rate": 4.594828687131814e-07, |
|
"loss": 0.8719, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 0.8589600352283664, |
|
"learning_rate": 4.36492737212255e-07, |
|
"loss": 0.8649, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7820197044334976, |
|
"grad_norm": 1.97188374324455, |
|
"learning_rate": 4.13994331561004e-07, |
|
"loss": 0.8559, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7881773399014779, |
|
"grad_norm": 1.2314345966965359, |
|
"learning_rate": 3.9199805417793833e-07, |
|
"loss": 0.866, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.7943349753694581, |
|
"grad_norm": 0.8564385884815968, |
|
"learning_rate": 3.705140753162973e-07, |
|
"loss": 0.8767, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8004926108374384, |
|
"grad_norm": 0.8935220357617403, |
|
"learning_rate": 3.495523283617106e-07, |
|
"loss": 0.8713, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8066502463054187, |
|
"grad_norm": 1.0237618875404009, |
|
"learning_rate": 3.2912250523937e-07, |
|
"loss": 0.8718, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.812807881773399, |
|
"grad_norm": 0.8442304790394727, |
|
"learning_rate": 3.092340519328474e-07, |
|
"loss": 0.8864, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8189655172413793, |
|
"grad_norm": 1.4691847919498742, |
|
"learning_rate": 2.8989616411662826e-07, |
|
"loss": 0.8597, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8251231527093597, |
|
"grad_norm": 0.8880158508210901, |
|
"learning_rate": 2.7111778290437465e-07, |
|
"loss": 0.8675, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8312807881773399, |
|
"grad_norm": 0.8439570654706303, |
|
"learning_rate": 2.529075907148916e-07, |
|
"loss": 0.8701, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8374384236453202, |
|
"grad_norm": 0.9041155686907367, |
|
"learning_rate": 2.352740072577002e-07, |
|
"loss": 0.8668, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8435960591133005, |
|
"grad_norm": 0.9150682000805254, |
|
"learning_rate": 2.182251856400826e-07, |
|
"loss": 0.8594, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8497536945812808, |
|
"grad_norm": 0.8626796470648748, |
|
"learning_rate": 2.0176900859738906e-07, |
|
"loss": 0.8839, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8559113300492611, |
|
"grad_norm": 0.8349539026741156, |
|
"learning_rate": 1.8591308484835833e-07, |
|
"loss": 0.8733, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.9059703603517487, |
|
"learning_rate": 1.706647455771302e-07, |
|
"loss": 0.8739, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8682266009852216, |
|
"grad_norm": 0.8855388011148819, |
|
"learning_rate": 1.56031041043582e-07, |
|
"loss": 0.879, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.874384236453202, |
|
"grad_norm": 0.8551754428690189, |
|
"learning_rate": 1.4201873732355343e-07, |
|
"loss": 0.8794, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8805418719211823, |
|
"grad_norm": 0.9468709892180046, |
|
"learning_rate": 1.2863431318046615e-07, |
|
"loss": 0.8931, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8866995073891626, |
|
"grad_norm": 0.8556767525548026, |
|
"learning_rate": 1.158839570697861e-07, |
|
"loss": 0.8777, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8928571428571429, |
|
"grad_norm": 0.864133807831794, |
|
"learning_rate": 1.0377356427771567e-07, |
|
"loss": 0.8627, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8990147783251231, |
|
"grad_norm": 0.8989986768130477, |
|
"learning_rate": 9.230873419543373e-08, |
|
"loss": 0.8616, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9051724137931034, |
|
"grad_norm": 0.8711608226459443, |
|
"learning_rate": 8.14947677301468e-08, |
|
"loss": 0.8665, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9113300492610837, |
|
"grad_norm": 0.8224276565204746, |
|
"learning_rate": 7.133666485414858e-08, |
|
"loss": 0.8642, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9174876847290641, |
|
"grad_norm": 0.841920633489749, |
|
"learning_rate": 6.183912229302135e-08, |
|
"loss": 0.8646, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9236453201970444, |
|
"grad_norm": 0.8581528480622548, |
|
"learning_rate": 5.3006531354045596e-08, |
|
"loss": 0.8671, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9298029556650246, |
|
"grad_norm": 0.808232107109127, |
|
"learning_rate": 4.4842975895823926e-08, |
|
"loss": 0.8749, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9359605911330049, |
|
"grad_norm": 0.8903690610551493, |
|
"learning_rate": 3.7352230440058534e-08, |
|
"loss": 0.8747, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9421182266009852, |
|
"grad_norm": 0.8304916684961995, |
|
"learning_rate": 3.053775842635453e-08, |
|
"loss": 0.8766, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9482758620689655, |
|
"grad_norm": 0.8415141188924126, |
|
"learning_rate": 2.4402710610854582e-08, |
|
"loss": 0.8622, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9544334975369458, |
|
"grad_norm": 0.9097293475369945, |
|
"learning_rate": 1.894992360944786e-08, |
|
"loss": 0.8555, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9605911330049262, |
|
"grad_norm": 0.8268939856081935, |
|
"learning_rate": 1.4181918586225029e-08, |
|
"loss": 0.8669, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9667487684729064, |
|
"grad_norm": 0.8721871446970374, |
|
"learning_rate": 1.0100900087787357e-08, |
|
"loss": 0.8705, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9729064039408867, |
|
"grad_norm": 0.8696350718282767, |
|
"learning_rate": 6.708755023946245e-09, |
|
"loss": 0.8761, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.979064039408867, |
|
"grad_norm": 0.8251276019284721, |
|
"learning_rate": 4.007051795287098e-09, |
|
"loss": 0.8688, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9852216748768473, |
|
"grad_norm": 0.8414267048570288, |
|
"learning_rate": 1.997039568000403e-09, |
|
"loss": 0.8574, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9913793103448276, |
|
"grad_norm": 0.8837062633704493, |
|
"learning_rate": 6.796476963130683e-10, |
|
"loss": 0.8721, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9975369458128078, |
|
"grad_norm": 1.0761674680711684, |
|
"learning_rate": 5.548529279081338e-11, |
|
"loss": 0.8681, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.8712338209152222, |
|
"eval_runtime": 531.6277, |
|
"eval_samples_per_second": 43.468, |
|
"eval_steps_per_second": 1.36, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1624, |
|
"total_flos": 555967626641408.0, |
|
"train_loss": 0.8854433513186836, |
|
"train_runtime": 19158.9512, |
|
"train_samples_per_second": 10.849, |
|
"train_steps_per_second": 0.085 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1624, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 555967626641408.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|