|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.13508949679162446, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006754474839581223, |
|
"grad_norm": 0.32079648971557617, |
|
"learning_rate": 2e-05, |
|
"loss": 10.8491, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006754474839581223, |
|
"eval_loss": 10.850319862365723, |
|
"eval_runtime": 2.6665, |
|
"eval_samples_per_second": 234.019, |
|
"eval_steps_per_second": 117.009, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0013508949679162446, |
|
"grad_norm": 0.32938364148139954, |
|
"learning_rate": 4e-05, |
|
"loss": 10.8557, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002026342451874367, |
|
"grad_norm": 0.3332866430282593, |
|
"learning_rate": 6e-05, |
|
"loss": 10.8487, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002701789935832489, |
|
"grad_norm": 0.3259293735027313, |
|
"learning_rate": 8e-05, |
|
"loss": 10.8485, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003377237419790611, |
|
"grad_norm": 0.33600881695747375, |
|
"learning_rate": 0.0001, |
|
"loss": 10.8459, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004052684903748734, |
|
"grad_norm": 0.3575391173362732, |
|
"learning_rate": 0.00012, |
|
"loss": 10.8517, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.004728132387706856, |
|
"grad_norm": 0.33056777715682983, |
|
"learning_rate": 0.00014, |
|
"loss": 10.8459, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.005403579871664978, |
|
"grad_norm": 0.35549676418304443, |
|
"learning_rate": 0.00016, |
|
"loss": 10.8443, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0060790273556231, |
|
"grad_norm": 0.34090492129325867, |
|
"learning_rate": 0.00018, |
|
"loss": 10.8468, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.006754474839581222, |
|
"grad_norm": 0.34161046147346497, |
|
"learning_rate": 0.0002, |
|
"loss": 10.8432, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007429922323539345, |
|
"grad_norm": 0.3733224868774414, |
|
"learning_rate": 0.0001999863304992469, |
|
"loss": 10.8395, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.008105369807497468, |
|
"grad_norm": 0.37448549270629883, |
|
"learning_rate": 0.00019994532573409262, |
|
"loss": 10.8345, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.008780817291455589, |
|
"grad_norm": 0.39854905009269714, |
|
"learning_rate": 0.00019987699691483048, |
|
"loss": 10.8358, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.009456264775413711, |
|
"grad_norm": 0.4029870629310608, |
|
"learning_rate": 0.00019978136272187747, |
|
"loss": 10.8312, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.010131712259371834, |
|
"grad_norm": 0.3861730396747589, |
|
"learning_rate": 0.000199658449300667, |
|
"loss": 10.827, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010807159743329957, |
|
"grad_norm": 0.4259829819202423, |
|
"learning_rate": 0.00019950829025450114, |
|
"loss": 10.8214, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.011482607227288078, |
|
"grad_norm": 0.4038204252719879, |
|
"learning_rate": 0.00019933092663536382, |
|
"loss": 10.8221, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 0.4385932385921478, |
|
"learning_rate": 0.00019912640693269752, |
|
"loss": 10.8208, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.012833502195204323, |
|
"grad_norm": 0.4669629633426666, |
|
"learning_rate": 0.00019889478706014687, |
|
"loss": 10.8162, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.013508949679162444, |
|
"grad_norm": 0.45796719193458557, |
|
"learning_rate": 0.00019863613034027224, |
|
"loss": 10.8103, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.014184397163120567, |
|
"grad_norm": 0.46000435948371887, |
|
"learning_rate": 0.00019835050748723824, |
|
"loss": 10.8013, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01485984464707869, |
|
"grad_norm": 0.4528140425682068, |
|
"learning_rate": 0.00019803799658748094, |
|
"loss": 10.7983, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.015535292131036813, |
|
"grad_norm": 0.5075253248214722, |
|
"learning_rate": 0.00019769868307835994, |
|
"loss": 10.7945, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.016210739614994935, |
|
"grad_norm": 0.49094441533088684, |
|
"learning_rate": 0.0001973326597248006, |
|
"loss": 10.7873, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.016886187098953058, |
|
"grad_norm": 0.4418376684188843, |
|
"learning_rate": 0.00019694002659393305, |
|
"loss": 10.7869, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.017561634582911177, |
|
"grad_norm": 0.4543623924255371, |
|
"learning_rate": 0.00019652089102773488, |
|
"loss": 10.7771, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 0.514811098575592, |
|
"learning_rate": 0.00019607536761368484, |
|
"loss": 10.7833, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.018912529550827423, |
|
"grad_norm": 0.5044733881950378, |
|
"learning_rate": 0.00019560357815343577, |
|
"loss": 10.7702, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.019587977034785545, |
|
"grad_norm": 0.5136566162109375, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 10.7711, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.020263424518743668, |
|
"grad_norm": 0.5506525039672852, |
|
"learning_rate": 0.00019458172417006347, |
|
"loss": 10.7634, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02093887200270179, |
|
"grad_norm": 0.5305942893028259, |
|
"learning_rate": 0.00019403193901161613, |
|
"loss": 10.7524, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.021614319486659914, |
|
"grad_norm": 0.5339845418930054, |
|
"learning_rate": 0.0001934564464599461, |
|
"loss": 10.7551, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.022289766970618033, |
|
"grad_norm": 0.5734214186668396, |
|
"learning_rate": 0.00019285540384897073, |
|
"loss": 10.7459, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.022965214454576156, |
|
"grad_norm": 0.5622735023498535, |
|
"learning_rate": 0.00019222897549773848, |
|
"loss": 10.7351, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02364066193853428, |
|
"grad_norm": 0.5738793015480042, |
|
"learning_rate": 0.00019157733266550575, |
|
"loss": 10.7313, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 0.560500979423523, |
|
"learning_rate": 0.00019090065350491626, |
|
"loss": 10.7251, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.024991556906450524, |
|
"grad_norm": 0.5471153259277344, |
|
"learning_rate": 0.00019019912301329592, |
|
"loss": 10.7093, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.025667004390408647, |
|
"grad_norm": 0.5753123760223389, |
|
"learning_rate": 0.00018947293298207635, |
|
"loss": 10.7115, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02634245187436677, |
|
"grad_norm": 0.5051413774490356, |
|
"learning_rate": 0.0001887222819443612, |
|
"loss": 10.7161, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02701789935832489, |
|
"grad_norm": 0.5438815355300903, |
|
"learning_rate": 0.0001879473751206489, |
|
"loss": 10.6966, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02769334684228301, |
|
"grad_norm": 0.5222083330154419, |
|
"learning_rate": 0.00018714842436272773, |
|
"loss": 10.6916, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.028368794326241134, |
|
"grad_norm": 0.4598117172718048, |
|
"learning_rate": 0.00018632564809575742, |
|
"loss": 10.7039, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.029044241810199257, |
|
"grad_norm": 0.517598032951355, |
|
"learning_rate": 0.0001854792712585539, |
|
"loss": 10.6926, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02971968929415738, |
|
"grad_norm": 0.5157806873321533, |
|
"learning_rate": 0.00018460952524209355, |
|
"loss": 10.685, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.030395136778115502, |
|
"grad_norm": 0.4841625690460205, |
|
"learning_rate": 0.00018371664782625287, |
|
"loss": 10.6855, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.031070584262073625, |
|
"grad_norm": 0.47345930337905884, |
|
"learning_rate": 0.00018280088311480201, |
|
"loss": 10.6667, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.031746031746031744, |
|
"grad_norm": 0.3876435458660126, |
|
"learning_rate": 0.00018186248146866927, |
|
"loss": 10.699, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03242147922998987, |
|
"grad_norm": 0.43177780508995056, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 10.6633, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03309692671394799, |
|
"grad_norm": 0.4080864489078522, |
|
"learning_rate": 0.0001799187996894925, |
|
"loss": 10.6654, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.033772374197906116, |
|
"grad_norm": 0.40777599811553955, |
|
"learning_rate": 0.00017891405093963938, |
|
"loss": 10.659, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.033772374197906116, |
|
"eval_loss": 10.658723831176758, |
|
"eval_runtime": 2.2172, |
|
"eval_samples_per_second": 281.442, |
|
"eval_steps_per_second": 140.721, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.034447821681864235, |
|
"grad_norm": 0.37587037682533264, |
|
"learning_rate": 0.00017788772787621126, |
|
"loss": 10.6597, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.035123269165822354, |
|
"grad_norm": 0.35869449377059937, |
|
"learning_rate": 0.00017684011108568592, |
|
"loss": 10.6479, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03579871664978048, |
|
"grad_norm": 0.3892292380332947, |
|
"learning_rate": 0.0001757714869760335, |
|
"loss": 10.651, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 0.372700572013855, |
|
"learning_rate": 0.0001746821476984154, |
|
"loss": 10.6507, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.037149611617696726, |
|
"grad_norm": 0.3261224329471588, |
|
"learning_rate": 0.00017357239106731317, |
|
"loss": 10.6657, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.037825059101654845, |
|
"grad_norm": 0.3149575889110565, |
|
"learning_rate": 0.00017244252047910892, |
|
"loss": 10.6366, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03850050658561297, |
|
"grad_norm": 0.3464124798774719, |
|
"learning_rate": 0.00017129284482913972, |
|
"loss": 10.6405, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03917595406957109, |
|
"grad_norm": 0.291887491941452, |
|
"learning_rate": 0.00017012367842724887, |
|
"loss": 10.6374, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03985140155352921, |
|
"grad_norm": 0.2717147767543793, |
|
"learning_rate": 0.0001689353409118566, |
|
"loss": 10.6367, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.040526849037487336, |
|
"grad_norm": 0.27471521496772766, |
|
"learning_rate": 0.00016772815716257412, |
|
"loss": 10.6394, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.041202296521445456, |
|
"grad_norm": 0.26729482412338257, |
|
"learning_rate": 0.0001665024572113848, |
|
"loss": 10.6301, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04187774400540358, |
|
"grad_norm": 0.25519606471061707, |
|
"learning_rate": 0.00016525857615241687, |
|
"loss": 10.6343, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 0.25052082538604736, |
|
"learning_rate": 0.00016399685405033167, |
|
"loss": 10.6282, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04322863897331983, |
|
"grad_norm": 0.24196283519268036, |
|
"learning_rate": 0.0001627176358473537, |
|
"loss": 10.6253, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04390408645727795, |
|
"grad_norm": 0.24466153979301453, |
|
"learning_rate": 0.0001614212712689668, |
|
"loss": 10.6304, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.044579533941236066, |
|
"grad_norm": 0.21384331583976746, |
|
"learning_rate": 0.00016010811472830252, |
|
"loss": 10.6279, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04525498142519419, |
|
"grad_norm": 0.23176230490207672, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 10.6254, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04593042890915231, |
|
"grad_norm": 0.21549372375011444, |
|
"learning_rate": 0.00015743286626829437, |
|
"loss": 10.6254, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04660587639311044, |
|
"grad_norm": 0.21212856471538544, |
|
"learning_rate": 0.0001560715057351673, |
|
"loss": 10.6306, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04728132387706856, |
|
"grad_norm": 0.2017771154642105, |
|
"learning_rate": 0.00015469481581224272, |
|
"loss": 10.6216, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04795677136102668, |
|
"grad_norm": 0.21862851083278656, |
|
"learning_rate": 0.0001533031728727994, |
|
"loss": 10.6099, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 0.1980670690536499, |
|
"learning_rate": 0.00015189695737812152, |
|
"loss": 10.6245, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04930766632894292, |
|
"grad_norm": 0.2036397010087967, |
|
"learning_rate": 0.0001504765537734844, |
|
"loss": 10.6232, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04998311381290105, |
|
"grad_norm": 0.17732380330562592, |
|
"learning_rate": 0.00014904235038305083, |
|
"loss": 10.6244, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05065856129685917, |
|
"grad_norm": 0.19181127846240997, |
|
"learning_rate": 0.00014759473930370736, |
|
"loss": 10.6222, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05133400878081729, |
|
"grad_norm": 0.1601181924343109, |
|
"learning_rate": 0.0001461341162978688, |
|
"loss": 10.6218, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05200945626477541, |
|
"grad_norm": 0.18116536736488342, |
|
"learning_rate": 0.00014466088068528068, |
|
"loss": 10.625, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05268490374873354, |
|
"grad_norm": 0.14323946833610535, |
|
"learning_rate": 0.00014317543523384928, |
|
"loss": 10.6489, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.05336035123269166, |
|
"grad_norm": 0.1809110790491104, |
|
"learning_rate": 0.00014167818604952906, |
|
"loss": 10.6282, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05403579871664978, |
|
"grad_norm": 0.13735494017601013, |
|
"learning_rate": 0.00014016954246529696, |
|
"loss": 10.6169, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 0.15906408429145813, |
|
"learning_rate": 0.00013864991692924523, |
|
"loss": 10.6212, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05538669368456602, |
|
"grad_norm": 0.17548221349716187, |
|
"learning_rate": 0.00013711972489182208, |
|
"loss": 10.6171, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05606214116852415, |
|
"grad_norm": 0.12418357282876968, |
|
"learning_rate": 0.00013557938469225167, |
|
"loss": 10.6143, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05673758865248227, |
|
"grad_norm": 0.1521899700164795, |
|
"learning_rate": 0.00013402931744416433, |
|
"loss": 10.6199, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.057413036136440394, |
|
"grad_norm": 0.14142099022865295, |
|
"learning_rate": 0.00013246994692046836, |
|
"loss": 10.6131, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.058088483620398514, |
|
"grad_norm": 0.13722097873687744, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 10.6153, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05876393110435663, |
|
"grad_norm": 0.14299722015857697, |
|
"learning_rate": 0.0001293250037384465, |
|
"loss": 10.6094, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05943937858831476, |
|
"grad_norm": 0.13671687245368958, |
|
"learning_rate": 0.00012774029087618446, |
|
"loss": 10.6242, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06011482607227288, |
|
"grad_norm": 0.12905743718147278, |
|
"learning_rate": 0.00012614799409538198, |
|
"loss": 10.6116, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 0.13734190165996552, |
|
"learning_rate": 0.00012454854871407994, |
|
"loss": 10.6142, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.061465721040189124, |
|
"grad_norm": 0.13106867671012878, |
|
"learning_rate": 0.00012294239200467516, |
|
"loss": 10.6176, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06214116852414725, |
|
"grad_norm": 0.1443423628807068, |
|
"learning_rate": 0.0001213299630743747, |
|
"loss": 10.6369, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06281661600810537, |
|
"grad_norm": 0.11222351342439651, |
|
"learning_rate": 0.00011971170274514802, |
|
"loss": 10.6164, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 0.11618170142173767, |
|
"learning_rate": 0.000118088053433211, |
|
"loss": 10.6057, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06416751097602161, |
|
"grad_norm": 0.11936385929584503, |
|
"learning_rate": 0.00011645945902807341, |
|
"loss": 10.6146, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06484295845997974, |
|
"grad_norm": 0.12773548066616058, |
|
"learning_rate": 0.0001148263647711842, |
|
"loss": 10.6161, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06551840594393786, |
|
"grad_norm": 0.1389545202255249, |
|
"learning_rate": 0.00011318921713420691, |
|
"loss": 10.6053, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.06619385342789598, |
|
"grad_norm": 0.13642248511314392, |
|
"learning_rate": 0.00011154846369695863, |
|
"loss": 10.6153, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0668693009118541, |
|
"grad_norm": 0.11348054558038712, |
|
"learning_rate": 0.0001099045530250463, |
|
"loss": 10.6132, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06754474839581223, |
|
"grad_norm": 0.1491929590702057, |
|
"learning_rate": 0.00010825793454723325, |
|
"loss": 10.6309, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06754474839581223, |
|
"eval_loss": 10.612899780273438, |
|
"eval_runtime": 2.3146, |
|
"eval_samples_per_second": 269.593, |
|
"eval_steps_per_second": 134.797, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06822019587977035, |
|
"grad_norm": 0.11731832474470139, |
|
"learning_rate": 0.00010660905843256994, |
|
"loss": 10.6064, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06889564336372847, |
|
"grad_norm": 0.1060105562210083, |
|
"learning_rate": 0.00010495837546732224, |
|
"loss": 10.6059, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06957109084768659, |
|
"grad_norm": 0.12271567434072495, |
|
"learning_rate": 0.00010330633693173082, |
|
"loss": 10.6046, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07024653833164471, |
|
"grad_norm": 0.1422068178653717, |
|
"learning_rate": 0.00010165339447663587, |
|
"loss": 10.6122, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07092198581560284, |
|
"grad_norm": 0.123976930975914, |
|
"learning_rate": 0.0001, |
|
"loss": 10.6086, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07159743329956096, |
|
"grad_norm": 0.11725173145532608, |
|
"learning_rate": 9.834660552336415e-05, |
|
"loss": 10.6312, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07227288078351908, |
|
"grad_norm": 0.13431864976882935, |
|
"learning_rate": 9.669366306826919e-05, |
|
"loss": 10.6129, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0729483282674772, |
|
"grad_norm": 0.10811913758516312, |
|
"learning_rate": 9.504162453267777e-05, |
|
"loss": 10.6102, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.07362377575143532, |
|
"grad_norm": 0.1448288857936859, |
|
"learning_rate": 9.339094156743007e-05, |
|
"loss": 10.6097, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.07429922323539345, |
|
"grad_norm": 0.14231544733047485, |
|
"learning_rate": 9.174206545276677e-05, |
|
"loss": 10.6149, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07497467071935157, |
|
"grad_norm": 0.11350355297327042, |
|
"learning_rate": 9.009544697495374e-05, |
|
"loss": 10.6123, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07565011820330969, |
|
"grad_norm": 0.11213658004999161, |
|
"learning_rate": 8.845153630304139e-05, |
|
"loss": 10.6059, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.07632556568726781, |
|
"grad_norm": 0.12278730422258377, |
|
"learning_rate": 8.681078286579311e-05, |
|
"loss": 10.6049, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07700101317122594, |
|
"grad_norm": 0.15146122872829437, |
|
"learning_rate": 8.517363522881579e-05, |
|
"loss": 10.6104, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07767646065518406, |
|
"grad_norm": 0.11578516662120819, |
|
"learning_rate": 8.35405409719266e-05, |
|
"loss": 10.6276, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07835190813914218, |
|
"grad_norm": 0.13298995792865753, |
|
"learning_rate": 8.191194656678904e-05, |
|
"loss": 10.6115, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.0790273556231003, |
|
"grad_norm": 0.13557101786136627, |
|
"learning_rate": 8.028829725485199e-05, |
|
"loss": 10.6083, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07970280310705842, |
|
"grad_norm": 0.12159471958875656, |
|
"learning_rate": 7.867003692562534e-05, |
|
"loss": 10.6037, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08037825059101655, |
|
"grad_norm": 0.12765274941921234, |
|
"learning_rate": 7.705760799532485e-05, |
|
"loss": 10.6016, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08105369807497467, |
|
"grad_norm": 0.11235444992780685, |
|
"learning_rate": 7.54514512859201e-05, |
|
"loss": 10.6065, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08172914555893279, |
|
"grad_norm": 0.14903466403484344, |
|
"learning_rate": 7.385200590461803e-05, |
|
"loss": 10.5995, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.08240459304289091, |
|
"grad_norm": 0.10130941867828369, |
|
"learning_rate": 7.225970912381556e-05, |
|
"loss": 10.6095, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.08308004052684904, |
|
"grad_norm": 0.1442113071680069, |
|
"learning_rate": 7.067499626155354e-05, |
|
"loss": 10.6024, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.08375548801080716, |
|
"grad_norm": 0.14047802984714508, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 10.6104, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.08443093549476528, |
|
"grad_norm": 0.1147422268986702, |
|
"learning_rate": 6.753005307953167e-05, |
|
"loss": 10.6125, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 0.11507318913936615, |
|
"learning_rate": 6.59706825558357e-05, |
|
"loss": 10.6038, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.08578183046268152, |
|
"grad_norm": 0.13589414954185486, |
|
"learning_rate": 6.442061530774834e-05, |
|
"loss": 10.6044, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08645727794663965, |
|
"grad_norm": 0.13625499606132507, |
|
"learning_rate": 6.28802751081779e-05, |
|
"loss": 10.6058, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08713272543059777, |
|
"grad_norm": 0.12005976587533951, |
|
"learning_rate": 6.135008307075481e-05, |
|
"loss": 10.62, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0878081729145559, |
|
"grad_norm": 0.11311294883489609, |
|
"learning_rate": 5.983045753470308e-05, |
|
"loss": 10.6092, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08848362039851401, |
|
"grad_norm": 0.13194864988327026, |
|
"learning_rate": 5.832181395047098e-05, |
|
"loss": 10.5994, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.08915906788247213, |
|
"grad_norm": 0.12505017220973969, |
|
"learning_rate": 5.6824564766150726e-05, |
|
"loss": 10.6069, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.08983451536643026, |
|
"grad_norm": 0.13057582080364227, |
|
"learning_rate": 5.533911931471936e-05, |
|
"loss": 10.5994, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09050996285038838, |
|
"grad_norm": 0.17442747950553894, |
|
"learning_rate": 5.386588370213124e-05, |
|
"loss": 10.6025, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.0911854103343465, |
|
"grad_norm": 0.12355850636959076, |
|
"learning_rate": 5.240526069629265e-05, |
|
"loss": 10.6065, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09186085781830462, |
|
"grad_norm": 0.13733559846878052, |
|
"learning_rate": 5.095764961694922e-05, |
|
"loss": 10.5974, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.09253630530226276, |
|
"grad_norm": 0.14686278998851776, |
|
"learning_rate": 4.952344622651566e-05, |
|
"loss": 10.5941, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.09321175278622088, |
|
"grad_norm": 0.13458283245563507, |
|
"learning_rate": 4.810304262187852e-05, |
|
"loss": 10.5959, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.093887200270179, |
|
"grad_norm": 0.1384890228509903, |
|
"learning_rate": 4.669682712720065e-05, |
|
"loss": 10.6081, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.09456264775413711, |
|
"grad_norm": 0.15113620460033417, |
|
"learning_rate": 4.530518418775733e-05, |
|
"loss": 10.6194, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 0.14613160490989685, |
|
"learning_rate": 4.392849426483274e-05, |
|
"loss": 10.6113, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.09591354272205337, |
|
"grad_norm": 0.12903277575969696, |
|
"learning_rate": 4.256713373170564e-05, |
|
"loss": 10.6009, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.09658899020601149, |
|
"grad_norm": 0.15327778458595276, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 10.5978, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0972644376899696, |
|
"grad_norm": 0.14414553344249725, |
|
"learning_rate": 3.9891885271697496e-05, |
|
"loss": 10.5922, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09793988517392772, |
|
"grad_norm": 0.15006524324417114, |
|
"learning_rate": 3.857872873103322e-05, |
|
"loss": 10.6253, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09861533265788584, |
|
"grad_norm": 0.14634265005588531, |
|
"learning_rate": 3.7282364152646297e-05, |
|
"loss": 10.6011, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.09929078014184398, |
|
"grad_norm": 0.14320406317710876, |
|
"learning_rate": 3.600314594966834e-05, |
|
"loss": 10.5892, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.0999662276258021, |
|
"grad_norm": 0.14602705836296082, |
|
"learning_rate": 3.4741423847583134e-05, |
|
"loss": 10.5927, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.10064167510976021, |
|
"grad_norm": 0.14189378917217255, |
|
"learning_rate": 3.349754278861517e-05, |
|
"loss": 10.5985, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.10131712259371833, |
|
"grad_norm": 0.15539740025997162, |
|
"learning_rate": 3.227184283742591e-05, |
|
"loss": 10.5968, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10131712259371833, |
|
"eval_loss": 10.599814414978027, |
|
"eval_runtime": 2.2579, |
|
"eval_samples_per_second": 276.357, |
|
"eval_steps_per_second": 138.179, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10199257007767647, |
|
"grad_norm": 0.1630678027868271, |
|
"learning_rate": 3.106465908814342e-05, |
|
"loss": 10.6025, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.10266801756163459, |
|
"grad_norm": 0.15854433178901672, |
|
"learning_rate": 2.9876321572751144e-05, |
|
"loss": 10.589, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1033434650455927, |
|
"grad_norm": 0.15652941167354584, |
|
"learning_rate": 2.87071551708603e-05, |
|
"loss": 10.6022, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.10401891252955082, |
|
"grad_norm": 0.14386983215808868, |
|
"learning_rate": 2.7557479520891104e-05, |
|
"loss": 10.5974, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.10469436001350894, |
|
"grad_norm": 0.17749740183353424, |
|
"learning_rate": 2.6427608932686843e-05, |
|
"loss": 10.6112, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10536980749746708, |
|
"grad_norm": 0.1529720574617386, |
|
"learning_rate": 2.5317852301584643e-05, |
|
"loss": 10.6075, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1060452549814252, |
|
"grad_norm": 0.16465173661708832, |
|
"learning_rate": 2.422851302396655e-05, |
|
"loss": 10.5917, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.10672070246538332, |
|
"grad_norm": 0.148993581533432, |
|
"learning_rate": 2.315988891431412e-05, |
|
"loss": 10.5951, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.10739614994934144, |
|
"grad_norm": 0.17219752073287964, |
|
"learning_rate": 2.2112272123788768e-05, |
|
"loss": 10.5877, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.10807159743329955, |
|
"grad_norm": 0.17853744328022003, |
|
"learning_rate": 2.1085949060360654e-05, |
|
"loss": 10.5945, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10874704491725769, |
|
"grad_norm": 0.16907915472984314, |
|
"learning_rate": 2.008120031050753e-05, |
|
"loss": 10.5967, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1094224924012158, |
|
"grad_norm": 0.18723782896995544, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 10.5955, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11009793988517393, |
|
"grad_norm": 0.18115279078483582, |
|
"learning_rate": 1.8137518531330767e-05, |
|
"loss": 10.6003, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11077338736913205, |
|
"grad_norm": 0.14582610130310059, |
|
"learning_rate": 1.7199116885197995e-05, |
|
"loss": 10.6084, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11144883485309018, |
|
"grad_norm": 0.16853328049182892, |
|
"learning_rate": 1.6283352173747145e-05, |
|
"loss": 10.6109, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1121242823370483, |
|
"grad_norm": 0.16580811142921448, |
|
"learning_rate": 1.5390474757906446e-05, |
|
"loss": 10.6031, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.11279972982100642, |
|
"grad_norm": 0.1782258301973343, |
|
"learning_rate": 1.4520728741446089e-05, |
|
"loss": 10.5978, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.11347517730496454, |
|
"grad_norm": 0.16206084191799164, |
|
"learning_rate": 1.3674351904242611e-05, |
|
"loss": 10.5997, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.11415062478892266, |
|
"grad_norm": 0.1567702740430832, |
|
"learning_rate": 1.2851575637272262e-05, |
|
"loss": 10.6037, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.11482607227288079, |
|
"grad_norm": 0.174870565533638, |
|
"learning_rate": 1.2052624879351104e-05, |
|
"loss": 10.5948, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11550151975683891, |
|
"grad_norm": 0.15422426164150238, |
|
"learning_rate": 1.1277718055638819e-05, |
|
"loss": 10.5984, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.11617696724079703, |
|
"grad_norm": 0.17559370398521423, |
|
"learning_rate": 1.0527067017923654e-05, |
|
"loss": 10.5945, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.11685241472475515, |
|
"grad_norm": 0.1844927966594696, |
|
"learning_rate": 9.80087698670411e-06, |
|
"loss": 10.5987, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.11752786220871327, |
|
"grad_norm": 0.16079100966453552, |
|
"learning_rate": 9.09934649508375e-06, |
|
"loss": 10.6051, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1182033096926714, |
|
"grad_norm": 0.16014216840267181, |
|
"learning_rate": 8.422667334494249e-06, |
|
"loss": 10.596, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11887875717662952, |
|
"grad_norm": 0.16521821916103363, |
|
"learning_rate": 7.771024502261526e-06, |
|
"loss": 10.6092, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.11955420466058764, |
|
"grad_norm": 0.1629599630832672, |
|
"learning_rate": 7.144596151029303e-06, |
|
"loss": 10.5984, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12022965214454576, |
|
"grad_norm": 0.16382640600204468, |
|
"learning_rate": 6.543553540053926e-06, |
|
"loss": 10.5851, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.12090509962850389, |
|
"grad_norm": 0.15883676707744598, |
|
"learning_rate": 5.968060988383883e-06, |
|
"loss": 10.5956, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.12158054711246201, |
|
"grad_norm": 0.184475377202034, |
|
"learning_rate": 5.418275829936537e-06, |
|
"loss": 10.589, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12225599459642013, |
|
"grad_norm": 0.19880411028862, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 10.581, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.12293144208037825, |
|
"grad_norm": 0.16972365975379944, |
|
"learning_rate": 4.3964218465642355e-06, |
|
"loss": 10.5909, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.12360688956433637, |
|
"grad_norm": 0.17554166913032532, |
|
"learning_rate": 3.924632386315186e-06, |
|
"loss": 10.5855, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1242823370482945, |
|
"grad_norm": 0.15895400941371918, |
|
"learning_rate": 3.4791089722651436e-06, |
|
"loss": 10.6027, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.12495778453225262, |
|
"grad_norm": 0.15381865203380585, |
|
"learning_rate": 3.059973406066963e-06, |
|
"loss": 10.6043, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12563323201621074, |
|
"grad_norm": 0.1588447093963623, |
|
"learning_rate": 2.667340275199426e-06, |
|
"loss": 10.5858, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.12630867950016886, |
|
"grad_norm": 0.170160710811615, |
|
"learning_rate": 2.3013169216400733e-06, |
|
"loss": 10.5963, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.12698412698412698, |
|
"grad_norm": 0.19386546313762665, |
|
"learning_rate": 1.9620034125190644e-06, |
|
"loss": 10.5906, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 0.15733763575553894, |
|
"learning_rate": 1.6494925127617634e-06, |
|
"loss": 10.5984, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.12833502195204322, |
|
"grad_norm": 0.1530665159225464, |
|
"learning_rate": 1.3638696597277679e-06, |
|
"loss": 10.5911, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12901046943600136, |
|
"grad_norm": 0.1876905858516693, |
|
"learning_rate": 1.1052129398531507e-06, |
|
"loss": 10.6023, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.12968591691995948, |
|
"grad_norm": 0.1700376272201538, |
|
"learning_rate": 8.735930673024806e-07, |
|
"loss": 10.5979, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1303613644039176, |
|
"grad_norm": 0.18517427146434784, |
|
"learning_rate": 6.690733646361857e-07, |
|
"loss": 10.5894, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.13103681188787572, |
|
"grad_norm": 0.16964897513389587, |
|
"learning_rate": 4.917097454988584e-07, |
|
"loss": 10.5996, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.13171225937183384, |
|
"grad_norm": 0.1931840479373932, |
|
"learning_rate": 3.415506993330153e-07, |
|
"loss": 10.5854, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.13238770685579196, |
|
"grad_norm": 0.15033087134361267, |
|
"learning_rate": 2.1863727812254653e-07, |
|
"loss": 10.6238, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.13306315433975008, |
|
"grad_norm": 0.1763419657945633, |
|
"learning_rate": 1.230030851695263e-07, |
|
"loss": 10.5943, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1337386018237082, |
|
"grad_norm": 0.17355754971504211, |
|
"learning_rate": 5.467426590739511e-08, |
|
"loss": 10.5912, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.13441404930766632, |
|
"grad_norm": 0.1702238917350769, |
|
"learning_rate": 1.3669500753099585e-08, |
|
"loss": 10.6278, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.13508949679162446, |
|
"grad_norm": 0.15678980946540833, |
|
"learning_rate": 0.0, |
|
"loss": 10.5993, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13508949679162446, |
|
"eval_loss": 10.595317840576172, |
|
"eval_runtime": 2.2488, |
|
"eval_samples_per_second": 277.475, |
|
"eval_steps_per_second": 138.738, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 32811830476800.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|