|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 9.965426072808624,
|
|
"eval_steps": 500,
|
|
"global_step": 49000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.1016880211511084,
|
|
"grad_norm": 8.324226379394531,
|
|
"learning_rate": 4.830519964748153e-05,
|
|
"loss": 2.3588,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.2033760423022168,
|
|
"grad_norm": 8.197102546691895,
|
|
"learning_rate": 4.661039929496305e-05,
|
|
"loss": 2.1044,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.3050640634533252,
|
|
"grad_norm": 7.696329593658447,
|
|
"learning_rate": 4.4915598942444584e-05,
|
|
"loss": 2.0132,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.4067520846044336,
|
|
"grad_norm": 7.981374263763428,
|
|
"learning_rate": 4.322079858992611e-05,
|
|
"loss": 1.9548,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.508440105755542,
|
|
"grad_norm": 8.823938369750977,
|
|
"learning_rate": 4.152599823740764e-05,
|
|
"loss": 1.899,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.6101281269066504,
|
|
"grad_norm": 8.75110149383545,
|
|
"learning_rate": 3.983119788488916e-05,
|
|
"loss": 1.8601,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.7118161480577588,
|
|
"grad_norm": 11.115782737731934,
|
|
"learning_rate": 3.8136397532370685e-05,
|
|
"loss": 1.8438,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.8135041692088671,
|
|
"grad_norm": 8.88609790802002,
|
|
"learning_rate": 3.644159717985222e-05,
|
|
"loss": 1.8221,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.9151921903599756,
|
|
"grad_norm": 9.193504333496094,
|
|
"learning_rate": 3.4746796827333746e-05,
|
|
"loss": 1.8052,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 1.016880211511084,
|
|
"grad_norm": 9.435020446777344,
|
|
"learning_rate": 3.3051996474815266e-05,
|
|
"loss": 1.7687,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 1.1185682326621924,
|
|
"grad_norm": 8.944482803344727,
|
|
"learning_rate": 3.135719612229679e-05,
|
|
"loss": 1.7001,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 1.2202562538133008,
|
|
"grad_norm": 9.68911075592041,
|
|
"learning_rate": 2.9662395769778324e-05,
|
|
"loss": 1.7166,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 1.3219442749644092,
|
|
"grad_norm": 8.70807933807373,
|
|
"learning_rate": 2.796759541725985e-05,
|
|
"loss": 1.6879,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 1.4236322961155177,
|
|
"grad_norm": 6.220630645751953,
|
|
"learning_rate": 2.6272795064741374e-05,
|
|
"loss": 1.6574,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 1.525320317266626,
|
|
"grad_norm": 10.505532264709473,
|
|
"learning_rate": 2.45779947122229e-05,
|
|
"loss": 1.6609,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 1.6270083384177343,
|
|
"grad_norm": 6.303832054138184,
|
|
"learning_rate": 2.2883194359704428e-05,
|
|
"loss": 1.6455,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 1.728696359568843,
|
|
"grad_norm": 8.265469551086426,
|
|
"learning_rate": 2.1188394007185955e-05,
|
|
"loss": 1.6164,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 1.8303843807199511,
|
|
"grad_norm": 7.970987319946289,
|
|
"learning_rate": 1.949359365466748e-05,
|
|
"loss": 1.6029,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 1.9320724018710596,
|
|
"grad_norm": 9.984902381896973,
|
|
"learning_rate": 1.779879330214901e-05,
|
|
"loss": 1.6018,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 2.033760423022168,
|
|
"grad_norm": 7.69148588180542,
|
|
"learning_rate": 1.6103992949630533e-05,
|
|
"loss": 1.6092,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 2.135448444173276,
|
|
"grad_norm": 6.372833251953125,
|
|
"learning_rate": 1.4409192597112062e-05,
|
|
"loss": 1.5372,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 2.237136465324385,
|
|
"grad_norm": 7.225351333618164,
|
|
"learning_rate": 1.2714392244593587e-05,
|
|
"loss": 1.5452,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 2.338824486475493,
|
|
"grad_norm": 8.18689250946045,
|
|
"learning_rate": 1.1019591892075114e-05,
|
|
"loss": 1.539,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 2.4405125076266017,
|
|
"grad_norm": 7.346330165863037,
|
|
"learning_rate": 9.324791539556641e-06,
|
|
"loss": 1.5227,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 2.54220052877771,
|
|
"grad_norm": 9.162985801696777,
|
|
"learning_rate": 7.629991187038167e-06,
|
|
"loss": 1.5207,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 2.6438885499288185,
|
|
"grad_norm": 7.583765029907227,
|
|
"learning_rate": 5.935190834519694e-06,
|
|
"loss": 1.5172,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 2.7455765710799267,
|
|
"grad_norm": 7.970169544219971,
|
|
"learning_rate": 4.240390482001221e-06,
|
|
"loss": 1.5272,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 2.8472645922310353,
|
|
"grad_norm": 9.05431079864502,
|
|
"learning_rate": 2.5455901294827473e-06,
|
|
"loss": 1.5014,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 2.9489526133821435,
|
|
"grad_norm": 8.572937965393066,
|
|
"learning_rate": 8.507897769642736e-07,
|
|
"loss": 1.4757,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 3.050640634533252,
|
|
"grad_norm": 8.990861892700195,
|
|
"learning_rate": 3.0933496034167175e-05,
|
|
"loss": 1.5368,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 3.1523286556843604,
|
|
"grad_norm": 8.719560623168945,
|
|
"learning_rate": 3.0297945901972747e-05,
|
|
"loss": 1.5287,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 3.2540166768354686,
|
|
"grad_norm": 8.392940521240234,
|
|
"learning_rate": 2.9662395769778324e-05,
|
|
"loss": 1.5461,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 3.3557046979865772,
|
|
"grad_norm": 8.443445205688477,
|
|
"learning_rate": 2.9026845637583893e-05,
|
|
"loss": 1.5664,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 3.4573927191376854,
|
|
"grad_norm": 7.7863850593566895,
|
|
"learning_rate": 2.8391295505389465e-05,
|
|
"loss": 1.5471,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 3.559080740288794,
|
|
"grad_norm": 9.05781364440918,
|
|
"learning_rate": 2.775574537319504e-05,
|
|
"loss": 1.5505,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 3.6607687614399023,
|
|
"grad_norm": 6.635441303253174,
|
|
"learning_rate": 2.712019524100061e-05,
|
|
"loss": 1.518,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 3.762456782591011,
|
|
"grad_norm": 8.673697471618652,
|
|
"learning_rate": 2.6484645108806183e-05,
|
|
"loss": 1.5018,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 3.864144803742119,
|
|
"grad_norm": 9.876930236816406,
|
|
"learning_rate": 2.5849094976611753e-05,
|
|
"loss": 1.5348,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 3.9658328248932273,
|
|
"grad_norm": 8.771023750305176,
|
|
"learning_rate": 2.5213544844417332e-05,
|
|
"loss": 1.4903,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 4.067520846044336,
|
|
"grad_norm": 7.6693501472473145,
|
|
"learning_rate": 2.45779947122229e-05,
|
|
"loss": 1.4806,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 4.169208867195445,
|
|
"grad_norm": 6.399499416351318,
|
|
"learning_rate": 2.3942444580028474e-05,
|
|
"loss": 1.4807,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 4.270896888346552,
|
|
"grad_norm": 8.920206069946289,
|
|
"learning_rate": 2.3306894447834047e-05,
|
|
"loss": 1.4578,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 4.372584909497661,
|
|
"grad_norm": 8.65735912322998,
|
|
"learning_rate": 2.267134431563962e-05,
|
|
"loss": 1.468,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 4.47427293064877,
|
|
"grad_norm": 5.02122688293457,
|
|
"learning_rate": 2.2035794183445192e-05,
|
|
"loss": 1.4579,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 4.575960951799878,
|
|
"grad_norm": 8.269110679626465,
|
|
"learning_rate": 2.1400244051250764e-05,
|
|
"loss": 1.4484,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 4.677648972950986,
|
|
"grad_norm": 7.763547897338867,
|
|
"learning_rate": 2.0764693919056337e-05,
|
|
"loss": 1.4554,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 4.779336994102095,
|
|
"grad_norm": 9.097722053527832,
|
|
"learning_rate": 2.012914378686191e-05,
|
|
"loss": 1.4531,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 4.881025015253203,
|
|
"grad_norm": 8.873741149902344,
|
|
"learning_rate": 1.949359365466748e-05,
|
|
"loss": 1.4677,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 4.982713036404312,
|
|
"grad_norm": 8.350312232971191,
|
|
"learning_rate": 1.8858043522473055e-05,
|
|
"loss": 1.4306,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 5.08440105755542,
|
|
"grad_norm": 5.5741143226623535,
|
|
"learning_rate": 1.8222493390278624e-05,
|
|
"loss": 1.4203,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 5.186089078706528,
|
|
"grad_norm": 9.378081321716309,
|
|
"learning_rate": 1.75869432580842e-05,
|
|
"loss": 1.4165,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 5.287777099857637,
|
|
"grad_norm": 8.990020751953125,
|
|
"learning_rate": 1.6951393125889773e-05,
|
|
"loss": 1.3941,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 5.389465121008746,
|
|
"grad_norm": 8.84300708770752,
|
|
"learning_rate": 1.6315842993695342e-05,
|
|
"loss": 1.3888,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 5.491153142159853,
|
|
"grad_norm": 7.583876132965088,
|
|
"learning_rate": 1.5680292861500918e-05,
|
|
"loss": 1.3947,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 5.592841163310962,
|
|
"grad_norm": 7.263258934020996,
|
|
"learning_rate": 1.5044742729306487e-05,
|
|
"loss": 1.3716,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 5.694529184462071,
|
|
"grad_norm": 9.45223331451416,
|
|
"learning_rate": 1.4409192597112062e-05,
|
|
"loss": 1.3853,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 5.796217205613178,
|
|
"grad_norm": 6.165616989135742,
|
|
"learning_rate": 1.3773642464917633e-05,
|
|
"loss": 1.3963,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 5.897905226764287,
|
|
"grad_norm": 7.537690162658691,
|
|
"learning_rate": 1.3138092332723207e-05,
|
|
"loss": 1.3644,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 5.999593247915396,
|
|
"grad_norm": 9.683405876159668,
|
|
"learning_rate": 1.2502542200528778e-05,
|
|
"loss": 1.3892,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 6.101281269066504,
|
|
"grad_norm": 8.821761131286621,
|
|
"learning_rate": 1.186699206833435e-05,
|
|
"loss": 1.3335,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 6.202969290217612,
|
|
"grad_norm": 12.081774711608887,
|
|
"learning_rate": 1.1231441936139923e-05,
|
|
"loss": 1.3526,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 6.304657311368721,
|
|
"grad_norm": 6.325799465179443,
|
|
"learning_rate": 1.0595891803945496e-05,
|
|
"loss": 1.3424,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 6.406345332519829,
|
|
"grad_norm": 9.028151512145996,
|
|
"learning_rate": 9.960341671751069e-06,
|
|
"loss": 1.3693,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 6.508033353670937,
|
|
"grad_norm": 6.508680820465088,
|
|
"learning_rate": 9.324791539556641e-06,
|
|
"loss": 1.3506,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 6.609721374822046,
|
|
"grad_norm": 6.9301629066467285,
|
|
"learning_rate": 8.689241407362212e-06,
|
|
"loss": 1.3426,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"epoch": 6.7114093959731544,
|
|
"grad_norm": 8.2422456741333,
|
|
"learning_rate": 8.053691275167785e-06,
|
|
"loss": 1.3337,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"epoch": 6.813097417124263,
|
|
"grad_norm": 7.615689277648926,
|
|
"learning_rate": 7.4181411429733575e-06,
|
|
"loss": 1.3322,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"epoch": 6.914785438275371,
|
|
"grad_norm": 7.60098123550415,
|
|
"learning_rate": 6.782591010778931e-06,
|
|
"loss": 1.3182,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"epoch": 7.0164734594264795,
|
|
"grad_norm": 8.578591346740723,
|
|
"learning_rate": 6.147040878584503e-06,
|
|
"loss": 1.3333,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"epoch": 7.118161480577588,
|
|
"grad_norm": 8.674110412597656,
|
|
"learning_rate": 5.511490746390076e-06,
|
|
"loss": 1.2914,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"epoch": 7.219849501728697,
|
|
"grad_norm": 7.355325222015381,
|
|
"learning_rate": 4.875940614195648e-06,
|
|
"loss": 1.3241,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"epoch": 7.3215375228798045,
|
|
"grad_norm": 8.176190376281738,
|
|
"learning_rate": 4.240390482001221e-06,
|
|
"loss": 1.3184,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"epoch": 7.423225544030913,
|
|
"grad_norm": 8.22246265411377,
|
|
"learning_rate": 3.604840349806793e-06,
|
|
"loss": 1.3011,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"epoch": 7.524913565182022,
|
|
"grad_norm": 9.817005157470703,
|
|
"learning_rate": 2.9692902176123655e-06,
|
|
"loss": 1.3044,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"epoch": 7.6266015863331305,
|
|
"grad_norm": 8.255529403686523,
|
|
"learning_rate": 2.3337400854179377e-06,
|
|
"loss": 1.2964,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"epoch": 7.728289607484238,
|
|
"grad_norm": 6.6958160400390625,
|
|
"learning_rate": 1.6981899532235102e-06,
|
|
"loss": 1.3092,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"epoch": 7.829977628635347,
|
|
"grad_norm": 8.960187911987305,
|
|
"learning_rate": 1.062639821029083e-06,
|
|
"loss": 1.2976,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"epoch": 7.9316656497864555,
|
|
"grad_norm": 7.582452297210693,
|
|
"learning_rate": 4.2708968883465534e-07,
|
|
"loss": 1.2956,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"epoch": 8.033353670937563,
|
|
"grad_norm": 7.20064115524292,
|
|
"learning_rate": 9.833231645312182e-06,
|
|
"loss": 1.3157,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"epoch": 8.135041692088672,
|
|
"grad_norm": 7.36190128326416,
|
|
"learning_rate": 9.324791539556641e-06,
|
|
"loss": 1.3021,
|
|
"step": 40000
|
|
},
|
|
{
|
|
"epoch": 8.23672971323978,
|
|
"grad_norm": 9.36536979675293,
|
|
"learning_rate": 8.816351433801099e-06,
|
|
"loss": 1.3073,
|
|
"step": 40500
|
|
},
|
|
{
|
|
"epoch": 8.33841773439089,
|
|
"grad_norm": 7.667935848236084,
|
|
"learning_rate": 8.307911328045556e-06,
|
|
"loss": 1.2902,
|
|
"step": 41000
|
|
},
|
|
{
|
|
"epoch": 8.440105755541998,
|
|
"grad_norm": 8.367385864257812,
|
|
"learning_rate": 7.799471222290014e-06,
|
|
"loss": 1.3164,
|
|
"step": 41500
|
|
},
|
|
{
|
|
"epoch": 8.541793776693105,
|
|
"grad_norm": 8.879359245300293,
|
|
"learning_rate": 7.291031116534472e-06,
|
|
"loss": 1.2927,
|
|
"step": 42000
|
|
},
|
|
{
|
|
"epoch": 8.643481797844213,
|
|
"grad_norm": 7.432185649871826,
|
|
"learning_rate": 6.782591010778931e-06,
|
|
"loss": 1.3027,
|
|
"step": 42500
|
|
},
|
|
{
|
|
"epoch": 8.745169818995322,
|
|
"grad_norm": 7.75663423538208,
|
|
"learning_rate": 6.274150905023389e-06,
|
|
"loss": 1.3266,
|
|
"step": 43000
|
|
},
|
|
{
|
|
"epoch": 8.84685784014643,
|
|
"grad_norm": 10.648377418518066,
|
|
"learning_rate": 5.765710799267846e-06,
|
|
"loss": 1.263,
|
|
"step": 43500
|
|
},
|
|
{
|
|
"epoch": 8.94854586129754,
|
|
"grad_norm": 8.594388961791992,
|
|
"learning_rate": 5.257270693512305e-06,
|
|
"loss": 1.3049,
|
|
"step": 44000
|
|
},
|
|
{
|
|
"epoch": 9.050233882448648,
|
|
"grad_norm": 8.348444938659668,
|
|
"learning_rate": 4.748830587756762e-06,
|
|
"loss": 1.2984,
|
|
"step": 44500
|
|
},
|
|
{
|
|
"epoch": 9.151921903599757,
|
|
"grad_norm": 9.689435958862305,
|
|
"learning_rate": 4.240390482001221e-06,
|
|
"loss": 1.2609,
|
|
"step": 45000
|
|
},
|
|
{
|
|
"epoch": 9.253609924750865,
|
|
"grad_norm": 7.50744104385376,
|
|
"learning_rate": 3.7319503762456785e-06,
|
|
"loss": 1.2705,
|
|
"step": 45500
|
|
},
|
|
{
|
|
"epoch": 9.355297945901972,
|
|
"grad_norm": 7.884949684143066,
|
|
"learning_rate": 3.2235102704901364e-06,
|
|
"loss": 1.2949,
|
|
"step": 46000
|
|
},
|
|
{
|
|
"epoch": 9.45698596705308,
|
|
"grad_norm": 7.593263626098633,
|
|
"learning_rate": 2.7150701647345947e-06,
|
|
"loss": 1.2635,
|
|
"step": 46500
|
|
},
|
|
{
|
|
"epoch": 9.55867398820419,
|
|
"grad_norm": 6.893133640289307,
|
|
"learning_rate": 2.2066300589790525e-06,
|
|
"loss": 1.2811,
|
|
"step": 47000
|
|
},
|
|
{
|
|
"epoch": 9.660362009355298,
|
|
"grad_norm": 9.131689071655273,
|
|
"learning_rate": 1.6981899532235102e-06,
|
|
"loss": 1.2854,
|
|
"step": 47500
|
|
},
|
|
{
|
|
"epoch": 9.762050030506407,
|
|
"grad_norm": 6.920133113861084,
|
|
"learning_rate": 1.1897498474679682e-06,
|
|
"loss": 1.306,
|
|
"step": 48000
|
|
},
|
|
{
|
|
"epoch": 9.863738051657515,
|
|
"grad_norm": 7.8755950927734375,
|
|
"learning_rate": 6.813097417124263e-07,
|
|
"loss": 1.2769,
|
|
"step": 48500
|
|
},
|
|
{
|
|
"epoch": 9.965426072808624,
|
|
"grad_norm": 9.283025741577148,
|
|
"learning_rate": 1.728696359568843e-07,
|
|
"loss": 1.2504,
|
|
"step": 49000
|
|
}
|
|
],
|
|
"logging_steps": 500,
|
|
"max_steps": 49170,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 10,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 2.57905183894272e+16,
|
|
"train_batch_size": 8,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|