gpt-J-lora-ecthr / trainer_state.json
MHGanainy's picture
MHGanainy/gpt-J-lora-ecthr
1af6d54 verified
raw
history blame contribute delete
No virus
38.4 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 21487,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0046539768231954205,
"grad_norm": 0.2191164344549179,
"learning_rate": 9.31098696461825e-07,
"loss": 1.8284,
"step": 100
},
{
"epoch": 0.009307953646390841,
"grad_norm": 0.23232442140579224,
"learning_rate": 1.86219739292365e-06,
"loss": 1.8324,
"step": 200
},
{
"epoch": 0.013961930469586261,
"grad_norm": 0.27771520614624023,
"learning_rate": 2.793296089385475e-06,
"loss": 1.8126,
"step": 300
},
{
"epoch": 0.018615907292781682,
"grad_norm": 0.46955519914627075,
"learning_rate": 3.7243947858473e-06,
"loss": 1.7894,
"step": 400
},
{
"epoch": 0.023269884115977104,
"grad_norm": 0.4999992847442627,
"learning_rate": 4.655493482309125e-06,
"loss": 1.7438,
"step": 500
},
{
"epoch": 0.027923860939172523,
"grad_norm": 0.6398751139640808,
"learning_rate": 5.58659217877095e-06,
"loss": 1.6969,
"step": 600
},
{
"epoch": 0.032577837762367945,
"grad_norm": 0.7858927249908447,
"learning_rate": 6.517690875232775e-06,
"loss": 1.7084,
"step": 700
},
{
"epoch": 0.037231814585563364,
"grad_norm": 0.7117326259613037,
"learning_rate": 7.4487895716946e-06,
"loss": 1.709,
"step": 800
},
{
"epoch": 0.04188579140875878,
"grad_norm": 0.7050628066062927,
"learning_rate": 8.379888268156426e-06,
"loss": 1.6655,
"step": 900
},
{
"epoch": 0.04653976823195421,
"grad_norm": 0.9369891285896301,
"learning_rate": 9.31098696461825e-06,
"loss": 1.6765,
"step": 1000
},
{
"epoch": 0.05119374505514963,
"grad_norm": 0.7342795729637146,
"learning_rate": 1.0242085661080074e-05,
"loss": 1.6398,
"step": 1100
},
{
"epoch": 0.055847721878345045,
"grad_norm": 1.5446929931640625,
"learning_rate": 1.11731843575419e-05,
"loss": 1.6714,
"step": 1200
},
{
"epoch": 0.060501698701540464,
"grad_norm": 0.8926479816436768,
"learning_rate": 1.2104283054003724e-05,
"loss": 1.6355,
"step": 1300
},
{
"epoch": 0.06515567552473589,
"grad_norm": 0.9157938361167908,
"learning_rate": 1.303538175046555e-05,
"loss": 1.5984,
"step": 1400
},
{
"epoch": 0.06980965234793131,
"grad_norm": 0.9163572192192078,
"learning_rate": 1.3966480446927374e-05,
"loss": 1.6372,
"step": 1500
},
{
"epoch": 0.07446362917112673,
"grad_norm": 1.3793407678604126,
"learning_rate": 1.48975791433892e-05,
"loss": 1.6025,
"step": 1600
},
{
"epoch": 0.07911760599432215,
"grad_norm": 1.2203880548477173,
"learning_rate": 1.5828677839851026e-05,
"loss": 1.6359,
"step": 1700
},
{
"epoch": 0.08377158281751756,
"grad_norm": 1.18509840965271,
"learning_rate": 1.6759776536312852e-05,
"loss": 1.6273,
"step": 1800
},
{
"epoch": 0.08842555964071298,
"grad_norm": 1.1019757986068726,
"learning_rate": 1.7690875232774675e-05,
"loss": 1.6286,
"step": 1900
},
{
"epoch": 0.09307953646390842,
"grad_norm": 0.6310858726501465,
"learning_rate": 1.86219739292365e-05,
"loss": 1.6132,
"step": 2000
},
{
"epoch": 0.09773351328710383,
"grad_norm": 1.2102677822113037,
"learning_rate": 1.9553072625698326e-05,
"loss": 1.6125,
"step": 2100
},
{
"epoch": 0.10238749011029925,
"grad_norm": 1.16068696975708,
"learning_rate": 1.999964321562415e-05,
"loss": 1.618,
"step": 2200
},
{
"epoch": 0.10704146693349467,
"grad_norm": 1.1368638277053833,
"learning_rate": 1.999695163593931e-05,
"loss": 1.5991,
"step": 2300
},
{
"epoch": 0.11169544375669009,
"grad_norm": 0.9747415781021118,
"learning_rate": 1.9991621965335746e-05,
"loss": 1.5878,
"step": 2400
},
{
"epoch": 0.11634942057988551,
"grad_norm": 0.9197840690612793,
"learning_rate": 1.998365561025775e-05,
"loss": 1.6189,
"step": 2500
},
{
"epoch": 0.12100339740308093,
"grad_norm": 0.9155735373497009,
"learning_rate": 1.997305467294307e-05,
"loss": 1.587,
"step": 2600
},
{
"epoch": 0.12565737422627635,
"grad_norm": 0.7722734212875366,
"learning_rate": 1.9959821950868117e-05,
"loss": 1.5681,
"step": 2700
},
{
"epoch": 0.13031135104947178,
"grad_norm": 1.044062852859497,
"learning_rate": 1.994396093600977e-05,
"loss": 1.5845,
"step": 2800
},
{
"epoch": 0.13496532787266718,
"grad_norm": 0.7750725150108337,
"learning_rate": 1.9925475813923858e-05,
"loss": 1.5936,
"step": 2900
},
{
"epoch": 0.13961930469586262,
"grad_norm": 0.9063745141029358,
"learning_rate": 1.990437146264066e-05,
"loss": 1.5788,
"step": 3000
},
{
"epoch": 0.14427328151905802,
"grad_norm": 0.655881404876709,
"learning_rate": 1.988065345137762e-05,
"loss": 1.5927,
"step": 3100
},
{
"epoch": 0.14892725834225345,
"grad_norm": 1.1087632179260254,
"learning_rate": 1.98543280390697e-05,
"loss": 1.5607,
"step": 3200
},
{
"epoch": 0.1535812351654489,
"grad_norm": 0.7348114848136902,
"learning_rate": 1.982540217271772e-05,
"loss": 1.5344,
"step": 3300
},
{
"epoch": 0.1582352119886443,
"grad_norm": 0.8793938755989075,
"learning_rate": 1.979388348555509e-05,
"loss": 1.5576,
"step": 3400
},
{
"epoch": 0.16288918881183972,
"grad_norm": 0.9496509432792664,
"learning_rate": 1.975978029503352e-05,
"loss": 1.5439,
"step": 3500
},
{
"epoch": 0.16754316563503513,
"grad_norm": 0.9296020269393921,
"learning_rate": 1.9723101600628095e-05,
"loss": 1.6044,
"step": 3600
},
{
"epoch": 0.17219714245823056,
"grad_norm": 0.7520443797111511,
"learning_rate": 1.9683857081462423e-05,
"loss": 1.5319,
"step": 3700
},
{
"epoch": 0.17685111928142597,
"grad_norm": 0.9139896631240845,
"learning_rate": 1.9642057093754426e-05,
"loss": 1.5457,
"step": 3800
},
{
"epoch": 0.1815050961046214,
"grad_norm": 0.781461775302887,
"learning_rate": 1.9597712668083413e-05,
"loss": 1.5604,
"step": 3900
},
{
"epoch": 0.18615907292781683,
"grad_norm": 0.9205677509307861,
"learning_rate": 1.9550835506479266e-05,
"loss": 1.5255,
"step": 4000
},
{
"epoch": 0.19081304975101224,
"grad_norm": 0.7341404557228088,
"learning_rate": 1.9501437979334358e-05,
"loss": 1.5876,
"step": 4100
},
{
"epoch": 0.19546702657420767,
"grad_norm": 0.8709468245506287,
"learning_rate": 1.9449533122139157e-05,
"loss": 1.5469,
"step": 4200
},
{
"epoch": 0.20012100339740307,
"grad_norm": 0.9768949151039124,
"learning_rate": 1.939513463204229e-05,
"loss": 1.5417,
"step": 4300
},
{
"epoch": 0.2047749802205985,
"grad_norm": 0.7858554720878601,
"learning_rate": 1.9338256864236024e-05,
"loss": 1.5672,
"step": 4400
},
{
"epoch": 0.2094289570437939,
"grad_norm": 0.7643204927444458,
"learning_rate": 1.927891482816805e-05,
"loss": 1.5032,
"step": 4500
},
{
"epoch": 0.21408293386698934,
"grad_norm": 1.2041771411895752,
"learning_rate": 1.9217124183580674e-05,
"loss": 1.51,
"step": 4600
},
{
"epoch": 0.21873691069018475,
"grad_norm": 0.9453679323196411,
"learning_rate": 1.9152901236378364e-05,
"loss": 1.5662,
"step": 4700
},
{
"epoch": 0.22339088751338018,
"grad_norm": 0.6478219628334045,
"learning_rate": 1.908626293432479e-05,
"loss": 1.5932,
"step": 4800
},
{
"epoch": 0.22804486433657561,
"grad_norm": 0.838474690914154,
"learning_rate": 1.901722686257051e-05,
"loss": 1.567,
"step": 4900
},
{
"epoch": 0.23269884115977102,
"grad_norm": 1.1010560989379883,
"learning_rate": 1.894581123901238e-05,
"loss": 1.4849,
"step": 5000
},
{
"epoch": 0.23735281798296645,
"grad_norm": 0.8271942734718323,
"learning_rate": 1.88720349094861e-05,
"loss": 1.56,
"step": 5100
},
{
"epoch": 0.24200679480616186,
"grad_norm": 0.9592469334602356,
"learning_rate": 1.8795917342792967e-05,
"loss": 1.6055,
"step": 5200
},
{
"epoch": 0.2466607716293573,
"grad_norm": 0.7423321008682251,
"learning_rate": 1.8717478625562256e-05,
"loss": 1.5268,
"step": 5300
},
{
"epoch": 0.2513147484525527,
"grad_norm": 1.0170488357543945,
"learning_rate": 1.8636739456950593e-05,
"loss": 1.5765,
"step": 5400
},
{
"epoch": 0.2559687252757481,
"grad_norm": 0.7664517760276794,
"learning_rate": 1.8553721143179626e-05,
"loss": 1.5625,
"step": 5500
},
{
"epoch": 0.26062270209894356,
"grad_norm": 0.712568461894989,
"learning_rate": 1.8468445591913587e-05,
"loss": 1.5853,
"step": 5600
},
{
"epoch": 0.26527667892213896,
"grad_norm": 0.9137941002845764,
"learning_rate": 1.8380935306478036e-05,
"loss": 1.5628,
"step": 5700
},
{
"epoch": 0.26993065574533437,
"grad_norm": 0.9478745460510254,
"learning_rate": 1.829121337992151e-05,
"loss": 1.4895,
"step": 5800
},
{
"epoch": 0.27458463256852983,
"grad_norm": 0.5866535305976868,
"learning_rate": 1.8199303488921493e-05,
"loss": 1.5484,
"step": 5900
},
{
"epoch": 0.27923860939172523,
"grad_norm": 0.9393543004989624,
"learning_rate": 1.810522988753638e-05,
"loss": 1.5307,
"step": 6000
},
{
"epoch": 0.28389258621492064,
"grad_norm": 0.6128756403923035,
"learning_rate": 1.8009017400805094e-05,
"loss": 1.5821,
"step": 6100
},
{
"epoch": 0.28854656303811604,
"grad_norm": 0.8191483020782471,
"learning_rate": 1.7910691418196022e-05,
"loss": 1.5206,
"step": 6200
},
{
"epoch": 0.2932005398613115,
"grad_norm": 0.8590126633644104,
"learning_rate": 1.7810277886906998e-05,
"loss": 1.5488,
"step": 6300
},
{
"epoch": 0.2978545166845069,
"grad_norm": 0.6708762645721436,
"learning_rate": 1.7707803305018097e-05,
"loss": 1.5383,
"step": 6400
},
{
"epoch": 0.3025084935077023,
"grad_norm": 0.6897620558738708,
"learning_rate": 1.760329471449909e-05,
"loss": 1.5497,
"step": 6500
},
{
"epoch": 0.3071624703308978,
"grad_norm": 0.7875939607620239,
"learning_rate": 1.7496779694073333e-05,
"loss": 1.5251,
"step": 6600
},
{
"epoch": 0.3118164471540932,
"grad_norm": 0.9739288091659546,
"learning_rate": 1.738828635194003e-05,
"loss": 1.5235,
"step": 6700
},
{
"epoch": 0.3164704239772886,
"grad_norm": 0.9243332743644714,
"learning_rate": 1.7277843318356798e-05,
"loss": 1.5256,
"step": 6800
},
{
"epoch": 0.321124400800484,
"grad_norm": 1.0979468822479248,
"learning_rate": 1.7165479738084405e-05,
"loss": 1.5841,
"step": 6900
},
{
"epoch": 0.32577837762367945,
"grad_norm": 0.931398868560791,
"learning_rate": 1.7051225262695812e-05,
"loss": 1.5408,
"step": 7000
},
{
"epoch": 0.33043235444687485,
"grad_norm": 0.7612720131874084,
"learning_rate": 1.693511004275139e-05,
"loss": 1.4606,
"step": 7100
},
{
"epoch": 0.33508633127007026,
"grad_norm": 1.0624606609344482,
"learning_rate": 1.6817164719842548e-05,
"loss": 1.5322,
"step": 7200
},
{
"epoch": 0.3397403080932657,
"grad_norm": 0.6526899933815002,
"learning_rate": 1.66974204185057e-05,
"loss": 1.5801,
"step": 7300
},
{
"epoch": 0.3443942849164611,
"grad_norm": 0.7007707953453064,
"learning_rate": 1.6575908738008847e-05,
"loss": 1.4867,
"step": 7400
},
{
"epoch": 0.34904826173965653,
"grad_norm": 0.8433778882026672,
"learning_rate": 1.645266174401285e-05,
"loss": 1.5193,
"step": 7500
},
{
"epoch": 0.35370223856285193,
"grad_norm": 1.0169028043746948,
"learning_rate": 1.632771196010965e-05,
"loss": 1.53,
"step": 7600
},
{
"epoch": 0.3583562153860474,
"grad_norm": 0.9545369744300842,
"learning_rate": 1.6201092359239624e-05,
"loss": 1.4862,
"step": 7700
},
{
"epoch": 0.3630101922092428,
"grad_norm": 1.2505319118499756,
"learning_rate": 1.607283635499036e-05,
"loss": 1.5349,
"step": 7800
},
{
"epoch": 0.3676641690324382,
"grad_norm": 0.8437191843986511,
"learning_rate": 1.5942977792779193e-05,
"loss": 1.5245,
"step": 7900
},
{
"epoch": 0.37231814585563366,
"grad_norm": 0.8392757773399353,
"learning_rate": 1.581155094092171e-05,
"loss": 1.519,
"step": 8000
},
{
"epoch": 0.37697212267882907,
"grad_norm": 0.7851161956787109,
"learning_rate": 1.5678590481588743e-05,
"loss": 1.5301,
"step": 8100
},
{
"epoch": 0.3816260995020245,
"grad_norm": 0.8554267883300781,
"learning_rate": 1.5544131501654078e-05,
"loss": 1.5143,
"step": 8200
},
{
"epoch": 0.3862800763252199,
"grad_norm": 0.9319170117378235,
"learning_rate": 1.5408209483435402e-05,
"loss": 1.5079,
"step": 8300
},
{
"epoch": 0.39093405314841534,
"grad_norm": 0.9828823804855347,
"learning_rate": 1.5270860295330912e-05,
"loss": 1.5248,
"step": 8400
},
{
"epoch": 0.39558802997161074,
"grad_norm": 0.8951559662818909,
"learning_rate": 1.5132120182353984e-05,
"loss": 1.4728,
"step": 8500
},
{
"epoch": 0.40024200679480615,
"grad_norm": 0.8046073317527771,
"learning_rate": 1.499202575656852e-05,
"loss": 1.5096,
"step": 8600
},
{
"epoch": 0.4048959836180016,
"grad_norm": 0.7216867208480835,
"learning_rate": 1.4850613987427398e-05,
"loss": 1.538,
"step": 8700
},
{
"epoch": 0.409549960441197,
"grad_norm": 0.5751676559448242,
"learning_rate": 1.470792219201662e-05,
"loss": 1.508,
"step": 8800
},
{
"epoch": 0.4142039372643924,
"grad_norm": 0.7514901161193848,
"learning_rate": 1.4563988025207726e-05,
"loss": 1.5365,
"step": 8900
},
{
"epoch": 0.4188579140875878,
"grad_norm": 0.8872545957565308,
"learning_rate": 1.4418849469721073e-05,
"loss": 1.4705,
"step": 9000
},
{
"epoch": 0.4235118909107833,
"grad_norm": 0.772324800491333,
"learning_rate": 1.4272544826102576e-05,
"loss": 1.521,
"step": 9100
},
{
"epoch": 0.4281658677339787,
"grad_norm": 0.8108826279640198,
"learning_rate": 1.4125112702616607e-05,
"loss": 1.4958,
"step": 9200
},
{
"epoch": 0.4328198445571741,
"grad_norm": 0.8969531655311584,
"learning_rate": 1.3976592005057684e-05,
"loss": 1.4596,
"step": 9300
},
{
"epoch": 0.4374738213803695,
"grad_norm": 0.93816739320755,
"learning_rate": 1.3827021926483622e-05,
"loss": 1.5404,
"step": 9400
},
{
"epoch": 0.44212779820356496,
"grad_norm": 0.5850194096565247,
"learning_rate": 1.3676441936872904e-05,
"loss": 1.5342,
"step": 9500
},
{
"epoch": 0.44678177502676036,
"grad_norm": 0.903218686580658,
"learning_rate": 1.352489177270898e-05,
"loss": 1.5315,
"step": 9600
},
{
"epoch": 0.45143575184995577,
"grad_norm": 0.9102717041969299,
"learning_rate": 1.337241142649419e-05,
"loss": 1.5105,
"step": 9700
},
{
"epoch": 0.45608972867315123,
"grad_norm": 0.5675022006034851,
"learning_rate": 1.3219041136196202e-05,
"loss": 1.465,
"step": 9800
},
{
"epoch": 0.46074370549634663,
"grad_norm": 1.007855772972107,
"learning_rate": 1.3064821374629606e-05,
"loss": 1.5131,
"step": 9900
},
{
"epoch": 0.46539768231954204,
"grad_norm": 0.8553173542022705,
"learning_rate": 1.2909792838775571e-05,
"loss": 1.5054,
"step": 10000
},
{
"epoch": 0.47005165914273744,
"grad_norm": 0.9074007868766785,
"learning_rate": 1.2755558069133832e-05,
"loss": 1.5582,
"step": 10100
},
{
"epoch": 0.4747056359659329,
"grad_norm": 0.7873587608337402,
"learning_rate": 1.2599041981920994e-05,
"loss": 1.5109,
"step": 10200
},
{
"epoch": 0.4793596127891283,
"grad_norm": 0.9822585582733154,
"learning_rate": 1.2441840034728076e-05,
"loss": 1.5067,
"step": 10300
},
{
"epoch": 0.4840135896123237,
"grad_norm": 1.0133826732635498,
"learning_rate": 1.2283993711503824e-05,
"loss": 1.4886,
"step": 10400
},
{
"epoch": 0.4886675664355192,
"grad_norm": 1.1799577474594116,
"learning_rate": 1.2125544666241078e-05,
"loss": 1.4655,
"step": 10500
},
{
"epoch": 0.4933215432587146,
"grad_norm": 0.927507221698761,
"learning_rate": 1.1966534711984717e-05,
"loss": 1.4748,
"step": 10600
},
{
"epoch": 0.49797552008191,
"grad_norm": 0.8527470827102661,
"learning_rate": 1.1808603529490986e-05,
"loss": 1.4607,
"step": 10700
},
{
"epoch": 0.5026294969051054,
"grad_norm": 0.8991233706474304,
"learning_rate": 1.1648602337072668e-05,
"loss": 1.5432,
"step": 10800
},
{
"epoch": 0.5072834737283008,
"grad_norm": 0.7587746381759644,
"learning_rate": 1.1488166095750207e-05,
"loss": 1.5053,
"step": 10900
},
{
"epoch": 0.5119374505514962,
"grad_norm": 0.6698244214057922,
"learning_rate": 1.132733714296872e-05,
"loss": 1.4281,
"step": 11000
},
{
"epoch": 0.5165914273746917,
"grad_norm": 0.848193883895874,
"learning_rate": 1.1166157919805772e-05,
"loss": 1.5043,
"step": 11100
},
{
"epoch": 0.5212454041978871,
"grad_norm": 1.0735975503921509,
"learning_rate": 1.1004670959771624e-05,
"loss": 1.5072,
"step": 11200
},
{
"epoch": 0.5258993810210825,
"grad_norm": 1.1171404123306274,
"learning_rate": 1.0842918877585058e-05,
"loss": 1.4864,
"step": 11300
},
{
"epoch": 0.5305533578442779,
"grad_norm": 0.8309624195098877,
"learning_rate": 1.0680944357927836e-05,
"loss": 1.5373,
"step": 11400
},
{
"epoch": 0.5352073346674734,
"grad_norm": 0.8913981318473816,
"learning_rate": 1.0518790144180635e-05,
"loss": 1.4432,
"step": 11500
},
{
"epoch": 0.5398613114906687,
"grad_norm": 0.8174418807029724,
"learning_rate": 1.0356499027143519e-05,
"loss": 1.4852,
"step": 11600
},
{
"epoch": 0.5445152883138642,
"grad_norm": 0.9098989367485046,
"learning_rate": 1.0194113833743897e-05,
"loss": 1.5405,
"step": 11700
},
{
"epoch": 0.5491692651370597,
"grad_norm": 0.983168363571167,
"learning_rate": 1.0031677415734946e-05,
"loss": 1.5014,
"step": 11800
},
{
"epoch": 0.553823241960255,
"grad_norm": 0.793428361415863,
"learning_rate": 9.869232638387492e-06,
"loss": 1.5047,
"step": 11900
},
{
"epoch": 0.5584772187834505,
"grad_norm": 0.9047326445579529,
"learning_rate": 9.706822369178289e-06,
"loss": 1.4991,
"step": 12000
},
{
"epoch": 0.5631311956066459,
"grad_norm": 0.825706422328949,
"learning_rate": 9.54448946647775e-06,
"loss": 1.5175,
"step": 12100
},
{
"epoch": 0.5677851724298413,
"grad_norm": 0.9917203187942505,
"learning_rate": 9.382276768240104e-06,
"loss": 1.4916,
"step": 12200
},
{
"epoch": 0.5724391492530367,
"grad_norm": 0.8516975045204163,
"learning_rate": 9.220227080698847e-06,
"loss": 1.5003,
"step": 12300
},
{
"epoch": 0.5770931260762321,
"grad_norm": 0.7885426878929138,
"learning_rate": 9.058383167070676e-06,
"loss": 1.5427,
"step": 12400
},
{
"epoch": 0.5817471028994275,
"grad_norm": 0.9626107215881348,
"learning_rate": 8.89678773627071e-06,
"loss": 1.4672,
"step": 12500
},
{
"epoch": 0.586401079722623,
"grad_norm": 1.023768663406372,
"learning_rate": 8.735483431642033e-06,
"loss": 1.5236,
"step": 12600
},
{
"epoch": 0.5910550565458184,
"grad_norm": 0.9998337626457214,
"learning_rate": 8.574512819702594e-06,
"loss": 1.4932,
"step": 12700
},
{
"epoch": 0.5957090333690138,
"grad_norm": 0.9876322746276855,
"learning_rate": 8.413918378912325e-06,
"loss": 1.4849,
"step": 12800
},
{
"epoch": 0.6003630101922093,
"grad_norm": 0.9000487923622131,
"learning_rate": 8.253742488463531e-06,
"loss": 1.4532,
"step": 12900
},
{
"epoch": 0.6050169870154046,
"grad_norm": 0.9810521006584167,
"learning_rate": 8.094027417097468e-06,
"loss": 1.4818,
"step": 13000
},
{
"epoch": 0.6096709638386001,
"grad_norm": 0.8786018490791321,
"learning_rate": 7.936404805248885e-06,
"loss": 1.524,
"step": 13100
},
{
"epoch": 0.6143249406617955,
"grad_norm": 0.8514164686203003,
"learning_rate": 7.777732023413952e-06,
"loss": 1.4763,
"step": 13200
},
{
"epoch": 0.6189789174849909,
"grad_norm": 0.603767991065979,
"learning_rate": 7.619645674842026e-06,
"loss": 1.4977,
"step": 13300
},
{
"epoch": 0.6236328943081864,
"grad_norm": 0.8721168637275696,
"learning_rate": 7.462187476866129e-06,
"loss": 1.4339,
"step": 13400
},
{
"epoch": 0.6282868711313818,
"grad_norm": 0.8393872380256653,
"learning_rate": 7.305398981056899e-06,
"loss": 1.4888,
"step": 13500
},
{
"epoch": 0.6329408479545772,
"grad_norm": 1.0900713205337524,
"learning_rate": 7.149321562257598e-06,
"loss": 1.4612,
"step": 13600
},
{
"epoch": 0.6375948247777726,
"grad_norm": 0.857639491558075,
"learning_rate": 6.9939964076657e-06,
"loss": 1.4866,
"step": 13700
},
{
"epoch": 0.642248801600968,
"grad_norm": 0.7077500820159912,
"learning_rate": 6.839464505964017e-06,
"loss": 1.4587,
"step": 13800
},
{
"epoch": 0.6469027784241634,
"grad_norm": 0.9254826307296753,
"learning_rate": 6.6857666365042354e-06,
"loss": 1.463,
"step": 13900
},
{
"epoch": 0.6515567552473589,
"grad_norm": 0.8199195265769958,
"learning_rate": 6.5329433585456625e-06,
"loss": 1.5347,
"step": 14000
},
{
"epoch": 0.6562107320705542,
"grad_norm": 1.0524635314941406,
"learning_rate": 6.381035000552058e-06,
"loss": 1.5253,
"step": 14100
},
{
"epoch": 0.6608647088937497,
"grad_norm": 0.9598236083984375,
"learning_rate": 6.230081649549371e-06,
"loss": 1.5064,
"step": 14200
},
{
"epoch": 0.6655186857169452,
"grad_norm": 0.8717284202575684,
"learning_rate": 6.080123140547213e-06,
"loss": 1.5284,
"step": 14300
},
{
"epoch": 0.6701726625401405,
"grad_norm": 0.7019008994102478,
"learning_rate": 5.931199046026791e-06,
"loss": 1.5057,
"step": 14400
},
{
"epoch": 0.674826639363336,
"grad_norm": 0.7632008790969849,
"learning_rate": 5.78334866549816e-06,
"loss": 1.4382,
"step": 14500
},
{
"epoch": 0.6794806161865314,
"grad_norm": 0.9148026704788208,
"learning_rate": 5.63661101512949e-06,
"loss": 1.4656,
"step": 14600
},
{
"epoch": 0.6841345930097268,
"grad_norm": 1.0867414474487305,
"learning_rate": 5.491024817451108e-06,
"loss": 1.4611,
"step": 14700
},
{
"epoch": 0.6887885698329222,
"grad_norm": 0.9941554665565491,
"learning_rate": 5.346628491137016e-06,
"loss": 1.471,
"step": 14800
},
{
"epoch": 0.6934425466561176,
"grad_norm": 0.756461501121521,
"learning_rate": 5.203460140866625e-06,
"loss": 1.4691,
"step": 14900
},
{
"epoch": 0.6980965234793131,
"grad_norm": 0.9088025093078613,
"learning_rate": 5.061557547269322e-06,
"loss": 1.4712,
"step": 15000
},
{
"epoch": 0.7027505003025085,
"grad_norm": 0.850143313407898,
"learning_rate": 4.920958156954544e-06,
"loss": 1.4434,
"step": 15100
},
{
"epoch": 0.7074044771257039,
"grad_norm": 1.072135090827942,
"learning_rate": 4.781699072630018e-06,
"loss": 1.5095,
"step": 15200
},
{
"epoch": 0.7120584539488993,
"grad_norm": 0.8175047039985657,
"learning_rate": 4.643817043310759e-06,
"loss": 1.455,
"step": 15300
},
{
"epoch": 0.7167124307720948,
"grad_norm": 0.8658547401428223,
"learning_rate": 4.5073484546213705e-06,
"loss": 1.4545,
"step": 15400
},
{
"epoch": 0.7213664075952901,
"grad_norm": 0.9122436046600342,
"learning_rate": 4.372329319194262e-06,
"loss": 1.5157,
"step": 15500
},
{
"epoch": 0.7260203844184856,
"grad_norm": 1.0552470684051514,
"learning_rate": 4.238795267166298e-06,
"loss": 1.4664,
"step": 15600
},
{
"epoch": 0.7306743612416811,
"grad_norm": 0.8493475914001465,
"learning_rate": 4.106781536776383e-06,
"loss": 1.5032,
"step": 15700
},
{
"epoch": 0.7353283380648764,
"grad_norm": 0.9446743130683899,
"learning_rate": 3.976322965066436e-06,
"loss": 1.4747,
"step": 15800
},
{
"epoch": 0.7399823148880719,
"grad_norm": 0.7051513195037842,
"learning_rate": 3.847453978688287e-06,
"loss": 1.4649,
"step": 15900
},
{
"epoch": 0.7446362917112673,
"grad_norm": 0.941286563873291,
"learning_rate": 3.720208584818833e-06,
"loss": 1.4755,
"step": 16000
},
{
"epoch": 0.7492902685344627,
"grad_norm": 0.9868441820144653,
"learning_rate": 3.5946203621858867e-06,
"loss": 1.5239,
"step": 16100
},
{
"epoch": 0.7539442453576581,
"grad_norm": 1.0158665180206299,
"learning_rate": 3.4707224522071294e-06,
"loss": 1.4625,
"step": 16200
},
{
"epoch": 0.7585982221808535,
"grad_norm": 1.0703004598617554,
"learning_rate": 3.3485475502444055e-06,
"loss": 1.4627,
"step": 16300
},
{
"epoch": 0.763252199004049,
"grad_norm": 0.9671617746353149,
"learning_rate": 3.2281278969757913e-06,
"loss": 1.4777,
"step": 16400
},
{
"epoch": 0.7679061758272444,
"grad_norm": 1.0113691091537476,
"learning_rate": 3.109495269887587e-06,
"loss": 1.4954,
"step": 16500
},
{
"epoch": 0.7725601526504398,
"grad_norm": 1.0288777351379395,
"learning_rate": 2.9926809748885964e-06,
"loss": 1.5105,
"step": 16600
},
{
"epoch": 0.7772141294736352,
"grad_norm": 0.851613461971283,
"learning_rate": 2.877715838048811e-06,
"loss": 1.4307,
"step": 16700
},
{
"epoch": 0.7818681062968307,
"grad_norm": 0.7523232102394104,
"learning_rate": 2.764630197464729e-06,
"loss": 1.4429,
"step": 16800
},
{
"epoch": 0.786522083120026,
"grad_norm": 1.0177912712097168,
"learning_rate": 2.653453895253466e-06,
"loss": 1.4585,
"step": 16900
},
{
"epoch": 0.7911760599432215,
"grad_norm": 0.7985556721687317,
"learning_rate": 2.5442162696777095e-06,
"loss": 1.4912,
"step": 17000
},
{
"epoch": 0.795830036766417,
"grad_norm": 1.0856115818023682,
"learning_rate": 2.4369461474036825e-06,
"loss": 1.4824,
"step": 17100
},
{
"epoch": 0.8004840135896123,
"grad_norm": 1.0469813346862793,
"learning_rate": 2.332714608087351e-06,
"loss": 1.4442,
"step": 17200
},
{
"epoch": 0.8051379904128078,
"grad_norm": 0.7078351378440857,
"learning_rate": 2.2294435164482266e-06,
"loss": 1.5291,
"step": 17300
},
{
"epoch": 0.8097919672360032,
"grad_norm": 0.9295824766159058,
"learning_rate": 2.1282229933463116e-06,
"loss": 1.4083,
"step": 17400
},
{
"epoch": 0.8144459440591986,
"grad_norm": 0.7632860541343689,
"learning_rate": 2.0290797498183858e-06,
"loss": 1.4367,
"step": 17500
},
{
"epoch": 0.819099920882394,
"grad_norm": 0.9528563022613525,
"learning_rate": 1.9320399487288832e-06,
"loss": 1.445,
"step": 17600
},
{
"epoch": 0.8237538977055894,
"grad_norm": 0.8745253682136536,
"learning_rate": 1.8371291978657857e-06,
"loss": 1.4694,
"step": 17700
},
{
"epoch": 0.8284078745287848,
"grad_norm": 0.9276142120361328,
"learning_rate": 1.7443725431830127e-06,
"loss": 1.4682,
"step": 17800
},
{
"epoch": 0.8330618513519803,
"grad_norm": 0.8589171171188354,
"learning_rate": 1.6537944621910307e-06,
"loss": 1.4999,
"step": 17900
},
{
"epoch": 0.8377158281751756,
"grad_norm": 0.8758667707443237,
"learning_rate": 1.5654188574975215e-06,
"loss": 1.4621,
"step": 18000
},
{
"epoch": 0.8423698049983711,
"grad_norm": 0.9397442936897278,
"learning_rate": 1.4792690504997087e-06,
"loss": 1.4598,
"step": 18100
},
{
"epoch": 0.8470237818215666,
"grad_norm": 1.0151734352111816,
"learning_rate": 1.3953677752301032e-06,
"loss": 1.4698,
"step": 18200
},
{
"epoch": 0.8516777586447619,
"grad_norm": 0.9862604737281799,
"learning_rate": 1.3137371723572257e-06,
"loss": 1.4478,
"step": 18300
},
{
"epoch": 0.8563317354679574,
"grad_norm": 1.16762113571167,
"learning_rate": 1.2343987833429062e-06,
"loss": 1.4806,
"step": 18400
},
{
"epoch": 0.8609857122911528,
"grad_norm": 1.06341552734375,
"learning_rate": 1.1573735447577327e-06,
"loss": 1.4555,
"step": 18500
},
{
"epoch": 0.8656396891143482,
"grad_norm": 1.0032464265823364,
"learning_rate": 1.082681782756102e-06,
"loss": 1.4471,
"step": 18600
},
{
"epoch": 0.8702936659375436,
"grad_norm": 0.9144641160964966,
"learning_rate": 1.0103432077123532e-06,
"loss": 1.4721,
"step": 18700
},
{
"epoch": 0.874947642760739,
"grad_norm": 1.0318922996520996,
"learning_rate": 9.403769090194192e-07,
"loss": 1.4542,
"step": 18800
},
{
"epoch": 0.8796016195839345,
"grad_norm": 1.0590611696243286,
"learning_rate": 8.72801350051331e-07,
"loss": 1.497,
"step": 18900
},
{
"epoch": 0.8842555964071299,
"grad_norm": 0.5952686667442322,
"learning_rate": 8.076343632909211e-07,
"loss": 1.4613,
"step": 19000
},
{
"epoch": 0.8889095732303253,
"grad_norm": 0.8993837237358093,
"learning_rate": 7.448931456240205e-07,
"loss": 1.5027,
"step": 19100
},
{
"epoch": 0.8935635500535207,
"grad_norm": 0.8999748229980469,
"learning_rate": 6.845942538013894e-07,
"loss": 1.4677,
"step": 19200
},
{
"epoch": 0.8982175268767162,
"grad_norm": 1.1852108240127563,
"learning_rate": 6.273197876706216e-07,
"loss": 1.4537,
"step": 19300
},
{
"epoch": 0.9028715036999115,
"grad_norm": 0.8120535016059875,
"learning_rate": 5.71927827141906e-07,
"loss": 1.4965,
"step": 19400
},
{
"epoch": 0.907525480523107,
"grad_norm": 0.8027576804161072,
"learning_rate": 5.190238361949229e-07,
"loss": 1.5109,
"step": 19500
},
{
"epoch": 0.9121794573463025,
"grad_norm": 1.2424169778823853,
"learning_rate": 4.686217756392464e-07,
"loss": 1.4614,
"step": 19600
},
{
"epoch": 0.9168334341694978,
"grad_norm": 1.248057246208191,
"learning_rate": 4.207349460512022e-07,
"loss": 1.4786,
"step": 19700
},
{
"epoch": 0.9214874109926933,
"grad_norm": 0.8463609218597412,
"learning_rate": 3.7537598426397925e-07,
"loss": 1.4714,
"step": 19800
},
{
"epoch": 0.9261413878158887,
"grad_norm": 0.6483246684074402,
"learning_rate": 3.325568600329143e-07,
"loss": 1.5209,
"step": 19900
},
{
"epoch": 0.9307953646390841,
"grad_norm": 0.6988236904144287,
"learning_rate": 2.922888728767903e-07,
"loss": 1.485,
"step": 20000
},
{
"epoch": 0.9354493414622795,
"grad_norm": 0.9654485583305359,
"learning_rate": 2.545826490960146e-07,
"loss": 1.4532,
"step": 20100
},
{
"epoch": 0.9401033182854749,
"grad_norm": 1.0685368776321411,
"learning_rate": 2.194481389684433e-07,
"loss": 1.4925,
"step": 20200
},
{
"epoch": 0.9447572951086703,
"grad_norm": 1.161594033241272,
"learning_rate": 1.8689461412362164e-07,
"loss": 1.4376,
"step": 20300
},
{
"epoch": 0.9494112719318658,
"grad_norm": 1.1302499771118164,
"learning_rate": 1.5693066509608333e-07,
"loss": 1.4776,
"step": 20400
},
{
"epoch": 0.9540652487550612,
"grad_norm": 1.0777382850646973,
"learning_rate": 1.295641990584007e-07,
"loss": 1.4702,
"step": 20500
},
{
"epoch": 0.9587192255782566,
"grad_norm": 1.027956247329712,
"learning_rate": 1.0503714003311738e-07,
"loss": 1.4699,
"step": 20600
},
{
"epoch": 0.9633732024014521,
"grad_norm": 0.7537710666656494,
"learning_rate": 8.286047531811392e-08,
"loss": 1.489,
"step": 20700
},
{
"epoch": 0.9680271792246474,
"grad_norm": 0.7544810175895691,
"learning_rate": 6.33008399409063e-08,
"loss": 1.4563,
"step": 20800
},
{
"epoch": 0.9726811560478429,
"grad_norm": 0.9680439829826355,
"learning_rate": 4.636339548457991e-08,
"loss": 1.4345,
"step": 20900
},
{
"epoch": 0.9773351328710383,
"grad_norm": 0.773059606552124,
"learning_rate": 3.205261156347783e-08,
"loss": 1.4591,
"step": 21000
},
{
"epoch": 0.9819891096942337,
"grad_norm": 0.8769505023956299,
"learning_rate": 2.0372264643713223e-08,
"loss": 1.4756,
"step": 21100
},
{
"epoch": 0.9866430865174292,
"grad_norm": 0.9209620356559753,
"learning_rate": 1.132543704660316e-08,
"loss": 1.4957,
"step": 21200
},
{
"epoch": 0.9912970633406246,
"grad_norm": 0.9970301389694214,
"learning_rate": 4.914516135275937e-09,
"loss": 1.4321,
"step": 21300
},
{
"epoch": 0.99595104016382,
"grad_norm": 0.6997882127761841,
"learning_rate": 1.1411936846705828e-09,
"loss": 1.4656,
"step": 21400
},
{
"epoch": 1.0,
"step": 21487,
"total_flos": 3.101747713212678e+18,
"train_loss": 1.5223615952344096,
"train_runtime": 9293.396,
"train_samples_per_second": 4.624,
"train_steps_per_second": 2.312
}
],
"logging_steps": 100,
"max_steps": 21487,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.101747713212678e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}