ArabianGPT-1.5B-FT-SA-v2 / trainer_state.json
riotu-lab's picture
upload 12 files
ac8dcf3 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 96444,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.020737422753100244,
"grad_norm": 20.540651321411133,
"learning_rate": 9.99882812805155e-05,
"loss": 1.9776,
"step": 1000
},
{
"epoch": 0.04147484550620049,
"grad_norm": 30.21478271484375,
"learning_rate": 9.99530365619421e-05,
"loss": 1.7739,
"step": 2000
},
{
"epoch": 0.062212268259300735,
"grad_norm": 27.370466232299805,
"learning_rate": 9.989421180143837e-05,
"loss": 1.6948,
"step": 3000
},
{
"epoch": 0.08294969101240098,
"grad_norm": 22.415775299072266,
"learning_rate": 9.981204639939725e-05,
"loss": 1.6479,
"step": 4000
},
{
"epoch": 0.10368711376550123,
"grad_norm": 24.335695266723633,
"learning_rate": 9.970648480056281e-05,
"loss": 1.6206,
"step": 5000
},
{
"epoch": 0.12442453651860147,
"grad_norm": 16.182706832885742,
"learning_rate": 9.957729485147165e-05,
"loss": 1.6098,
"step": 6000
},
{
"epoch": 0.14516195927170172,
"grad_norm": 21.0506591796875,
"learning_rate": 9.942472554567945e-05,
"loss": 1.5848,
"step": 7000
},
{
"epoch": 0.16589938202480195,
"grad_norm": 30.43536376953125,
"learning_rate": 9.924884883088391e-05,
"loss": 1.5733,
"step": 8000
},
{
"epoch": 0.1866368047779022,
"grad_norm": 26.65853500366211,
"learning_rate": 9.904974764594772e-05,
"loss": 1.5936,
"step": 9000
},
{
"epoch": 0.20737422753100246,
"grad_norm": 36.18627166748047,
"learning_rate": 9.882751588178679e-05,
"loss": 1.587,
"step": 10000
},
{
"epoch": 0.22811165028410268,
"grad_norm": 27.128538131713867,
"learning_rate": 9.858251505842305e-05,
"loss": 1.5691,
"step": 11000
},
{
"epoch": 0.24884907303720294,
"grad_norm": 23.077016830444336,
"learning_rate": 9.831464978629577e-05,
"loss": 1.5904,
"step": 12000
},
{
"epoch": 0.2695864957903032,
"grad_norm": 22.664880752563477,
"learning_rate": 9.802374388946667e-05,
"loss": 1.5434,
"step": 13000
},
{
"epoch": 0.29032391854340345,
"grad_norm": 21.860233306884766,
"learning_rate": 9.771019124997294e-05,
"loss": 1.5425,
"step": 14000
},
{
"epoch": 0.31106134129650365,
"grad_norm": 30.90904998779297,
"learning_rate": 9.737413973104559e-05,
"loss": 1.5303,
"step": 15000
},
{
"epoch": 0.3317987640496039,
"grad_norm": 21.742000579833984,
"learning_rate": 9.701574780579928e-05,
"loss": 1.5319,
"step": 16000
},
{
"epoch": 0.35253618680270415,
"grad_norm": 24.014293670654297,
"learning_rate": 9.663518448250048e-05,
"loss": 1.5201,
"step": 17000
},
{
"epoch": 0.3732736095558044,
"grad_norm": 33.67621994018555,
"learning_rate": 9.623304270277256e-05,
"loss": 1.4909,
"step": 18000
},
{
"epoch": 0.39401103230890466,
"grad_norm": 33.285911560058594,
"learning_rate": 9.580870704919335e-05,
"loss": 1.5133,
"step": 19000
},
{
"epoch": 0.4147484550620049,
"grad_norm": 9.94383430480957,
"learning_rate": 9.536322586542759e-05,
"loss": 1.5151,
"step": 20000
},
{
"epoch": 0.4354858778151051,
"grad_norm": 24.539302825927734,
"learning_rate": 9.48959174096231e-05,
"loss": 1.4926,
"step": 21000
},
{
"epoch": 0.45622330056820537,
"grad_norm": 37.36747360229492,
"learning_rate": 9.440793619202431e-05,
"loss": 1.4951,
"step": 22000
},
{
"epoch": 0.4769607233213056,
"grad_norm": 29.227783203125,
"learning_rate": 9.389853543087878e-05,
"loss": 1.4896,
"step": 23000
},
{
"epoch": 0.4976981460744059,
"grad_norm": 17.51339340209961,
"learning_rate": 9.336897362769643e-05,
"loss": 1.4734,
"step": 24000
},
{
"epoch": 0.5184355688275061,
"grad_norm": 25.80245590209961,
"learning_rate": 9.281844036790159e-05,
"loss": 1.4693,
"step": 25000
},
{
"epoch": 0.5391729915806064,
"grad_norm": 14.959226608276367,
"learning_rate": 9.224829577301657e-05,
"loss": 1.4262,
"step": 26000
},
{
"epoch": 0.5599104143337066,
"grad_norm": 25.03213882446289,
"learning_rate": 9.16576673226157e-05,
"loss": 1.466,
"step": 27000
},
{
"epoch": 0.5806478370868069,
"grad_norm": 20.574718475341797,
"learning_rate": 9.104801419550931e-05,
"loss": 1.4421,
"step": 28000
},
{
"epoch": 0.6013852598399071,
"grad_norm": 15.672101974487305,
"learning_rate": 9.041840340941403e-05,
"loss": 1.4617,
"step": 29000
},
{
"epoch": 0.6221226825930073,
"grad_norm": 86.42732238769531,
"learning_rate": 8.977039045106139e-05,
"loss": 1.461,
"step": 30000
},
{
"epoch": 0.6428601053461076,
"grad_norm": 17.8015193939209,
"learning_rate": 8.910298363422414e-05,
"loss": 1.435,
"step": 31000
},
{
"epoch": 0.6635975280992078,
"grad_norm": 22.220247268676758,
"learning_rate": 8.841783182272579e-05,
"loss": 1.4621,
"step": 32000
},
{
"epoch": 0.6843349508523081,
"grad_norm": 23.3125057220459,
"learning_rate": 8.771388649491834e-05,
"loss": 1.4443,
"step": 33000
},
{
"epoch": 0.7050723736054083,
"grad_norm": 24.63458824157715,
"learning_rate": 8.699288678494833e-05,
"loss": 1.4285,
"step": 34000
},
{
"epoch": 0.7258097963585086,
"grad_norm": 31.984939575195312,
"learning_rate": 8.62537293113586e-05,
"loss": 1.4186,
"step": 35000
},
{
"epoch": 0.7465472191116088,
"grad_norm": 15.957398414611816,
"learning_rate": 8.549824020176332e-05,
"loss": 1.4178,
"step": 36000
},
{
"epoch": 0.767284641864709,
"grad_norm": 9.767908096313477,
"learning_rate": 8.472526329387413e-05,
"loss": 1.4196,
"step": 37000
},
{
"epoch": 0.7880220646178093,
"grad_norm": 21.11347007751465,
"learning_rate": 8.39367082680055e-05,
"loss": 1.4297,
"step": 38000
},
{
"epoch": 0.8087594873709095,
"grad_norm": 17.112916946411133,
"learning_rate": 8.313136835946469e-05,
"loss": 1.4289,
"step": 39000
},
{
"epoch": 0.8294969101240098,
"grad_norm": 22.636119842529297,
"learning_rate": 8.231123320307017e-05,
"loss": 1.4379,
"step": 40000
},
{
"epoch": 0.85023433287711,
"grad_norm": 20.8730411529541,
"learning_rate": 8.147504770549608e-05,
"loss": 1.4128,
"step": 41000
},
{
"epoch": 0.8709717556302102,
"grad_norm": 28.386409759521484,
"learning_rate": 8.062487770721927e-05,
"loss": 1.4206,
"step": 42000
},
{
"epoch": 0.8917091783833105,
"grad_norm": 27.424724578857422,
"learning_rate": 7.975942215111235e-05,
"loss": 1.4027,
"step": 43000
},
{
"epoch": 0.9124466011364107,
"grad_norm": 15.341107368469238,
"learning_rate": 7.888081919087909e-05,
"loss": 1.403,
"step": 44000
},
{
"epoch": 0.933184023889511,
"grad_norm": 22.614416122436523,
"learning_rate": 7.798772425702647e-05,
"loss": 1.3967,
"step": 45000
},
{
"epoch": 0.9539214466426112,
"grad_norm": 20.52458381652832,
"learning_rate": 7.708234378780219e-05,
"loss": 1.4162,
"step": 46000
},
{
"epoch": 0.9746588693957114,
"grad_norm": 32.40896224975586,
"learning_rate": 7.616329223476899e-05,
"loss": 1.3787,
"step": 47000
},
{
"epoch": 0.9953962921488118,
"grad_norm": 18.904207229614258,
"learning_rate": 7.523284016337446e-05,
"loss": 1.3956,
"step": 48000
},
{
"epoch": 1.016133714901912,
"grad_norm": 20.65373992919922,
"learning_rate": 7.428956365687059e-05,
"loss": 1.3663,
"step": 49000
},
{
"epoch": 1.0368711376550122,
"grad_norm": 21.831396102905273,
"learning_rate": 7.333579312973313e-05,
"loss": 1.343,
"step": 50000
},
{
"epoch": 1.0576085604081125,
"grad_norm": 24.656251907348633,
"learning_rate": 7.237006897982989e-05,
"loss": 1.3607,
"step": 51000
},
{
"epoch": 1.0783459831612128,
"grad_norm": 52.06132888793945,
"learning_rate": 7.139477707972635e-05,
"loss": 1.3893,
"step": 52000
},
{
"epoch": 1.099083405914313,
"grad_norm": 21.86118507385254,
"learning_rate": 7.040842489207055e-05,
"loss": 1.3437,
"step": 53000
},
{
"epoch": 1.1198208286674132,
"grad_norm": 56.47176742553711,
"learning_rate": 6.941344925208578e-05,
"loss": 1.3569,
"step": 54000
},
{
"epoch": 1.1405582514205135,
"grad_norm": 16.8314266204834,
"learning_rate": 6.840832749942099e-05,
"loss": 1.3791,
"step": 55000
},
{
"epoch": 1.1612956741736138,
"grad_norm": 20.479339599609375,
"learning_rate": 6.739656081224776e-05,
"loss": 1.3736,
"step": 56000
},
{
"epoch": 1.1820330969267139,
"grad_norm": 17.306598663330078,
"learning_rate": 6.637559722307945e-05,
"loss": 1.3796,
"step": 57000
},
{
"epoch": 1.2027705196798142,
"grad_norm": 19.848777770996094,
"learning_rate": 6.53458933666453e-05,
"loss": 1.363,
"step": 58000
},
{
"epoch": 1.2235079424329145,
"grad_norm": 21.40498161315918,
"learning_rate": 6.430895278791739e-05,
"loss": 1.3295,
"step": 59000
},
{
"epoch": 1.2442453651860146,
"grad_norm": 25.833173751831055,
"learning_rate": 6.326526448097541e-05,
"loss": 1.3684,
"step": 60000
},
{
"epoch": 1.264982787939115,
"grad_norm": 30.932321548461914,
"learning_rate": 6.22153206219513e-05,
"loss": 1.3484,
"step": 61000
},
{
"epoch": 1.2857202106922152,
"grad_norm": 33.62134552001953,
"learning_rate": 6.115961633693192e-05,
"loss": 1.3824,
"step": 62000
},
{
"epoch": 1.3064576334453153,
"grad_norm": 54.18892288208008,
"learning_rate": 6.009971289766962e-05,
"loss": 1.3491,
"step": 63000
},
{
"epoch": 1.3271950561984156,
"grad_norm": 19.27858543395996,
"learning_rate": 5.9033988281596217e-05,
"loss": 1.3557,
"step": 64000
},
{
"epoch": 1.347932478951516,
"grad_norm": 21.885740280151367,
"learning_rate": 5.79650754177154e-05,
"loss": 1.3392,
"step": 65000
},
{
"epoch": 1.3686699017046162,
"grad_norm": 17.554424285888672,
"learning_rate": 5.689133849589161e-05,
"loss": 1.3549,
"step": 66000
},
{
"epoch": 1.3894073244577165,
"grad_norm": 21.63842010498047,
"learning_rate": 5.581543023995348e-05,
"loss": 1.3568,
"step": 67000
},
{
"epoch": 1.4101447472108166,
"grad_norm": 34.07453155517578,
"learning_rate": 5.473570413437158e-05,
"loss": 1.3591,
"step": 68000
},
{
"epoch": 1.430882169963917,
"grad_norm": 101.57919311523438,
"learning_rate": 5.365482769956962e-05,
"loss": 1.349,
"step": 69000
},
{
"epoch": 1.4516195927170172,
"grad_norm": 21.896757125854492,
"learning_rate": 5.257114681696876e-05,
"loss": 1.3576,
"step": 70000
},
{
"epoch": 1.4723570154701173,
"grad_norm": 14.788351058959961,
"learning_rate": 5.1487338777478425e-05,
"loss": 1.3576,
"step": 71000
},
{
"epoch": 1.4930944382232176,
"grad_norm": 25.063215255737305,
"learning_rate": 5.040174497614031e-05,
"loss": 1.3279,
"step": 72000
},
{
"epoch": 1.513831860976318,
"grad_norm": 4.722126483917236,
"learning_rate": 4.9317047429830715e-05,
"loss": 1.3415,
"step": 73000
},
{
"epoch": 1.534569283729418,
"grad_norm": 65.70944213867188,
"learning_rate": 4.823158617233515e-05,
"loss": 1.3183,
"step": 74000
},
{
"epoch": 1.5553067064825183,
"grad_norm": 152.00205993652344,
"learning_rate": 4.714804289307336e-05,
"loss": 1.3519,
"step": 75000
},
{
"epoch": 1.5760441292356187,
"grad_norm": 17.17304039001465,
"learning_rate": 4.606475939226272e-05,
"loss": 1.351,
"step": 76000
},
{
"epoch": 1.5967815519887187,
"grad_norm": 120.4775619506836,
"learning_rate": 4.49844119790599e-05,
"loss": 1.3136,
"step": 77000
},
{
"epoch": 1.617518974741819,
"grad_norm": 21.159122467041016,
"learning_rate": 4.390534734447391e-05,
"loss": 1.3288,
"step": 78000
},
{
"epoch": 1.6382563974949194,
"grad_norm": 26.251781463623047,
"learning_rate": 4.283023137472944e-05,
"loss": 1.3531,
"step": 79000
},
{
"epoch": 1.6589938202480194,
"grad_norm": 33.39356231689453,
"learning_rate": 4.175741876677174e-05,
"loss": 1.3134,
"step": 80000
},
{
"epoch": 1.67973124300112,
"grad_norm": 40.677818298339844,
"learning_rate": 4.068955996086312e-05,
"loss": 1.3367,
"step": 81000
},
{
"epoch": 1.70046866575422,
"grad_norm": 34.41468048095703,
"learning_rate": 3.962502075994506e-05,
"loss": 1.2973,
"step": 82000
},
{
"epoch": 1.7212060885073202,
"grad_norm": 17.568490982055664,
"learning_rate": 3.85664311643904e-05,
"loss": 1.3487,
"step": 83000
},
{
"epoch": 1.7419435112604207,
"grad_norm": 13.796709060668945,
"learning_rate": 3.751217116227084e-05,
"loss": 1.3539,
"step": 84000
},
{
"epoch": 1.7626809340135208,
"grad_norm": 35.149253845214844,
"learning_rate": 3.6464845358655265e-05,
"loss": 1.3311,
"step": 85000
},
{
"epoch": 1.783418356766621,
"grad_norm": 24.549659729003906,
"learning_rate": 3.542285097915233e-05,
"loss": 1.3532,
"step": 86000
},
{
"epoch": 1.8041557795197214,
"grad_norm": 42.3192253112793,
"learning_rate": 3.438876232596179e-05,
"loss": 1.3162,
"step": 87000
},
{
"epoch": 1.8248932022728215,
"grad_norm": 315.5584716796875,
"learning_rate": 3.3362020808741954e-05,
"loss": 1.3509,
"step": 88000
},
{
"epoch": 1.8456306250259218,
"grad_norm": 56.11149597167969,
"learning_rate": 3.234209379660055e-05,
"loss": 1.3382,
"step": 89000
},
{
"epoch": 1.866368047779022,
"grad_norm": 45.25877380371094,
"learning_rate": 3.133049379158929e-05,
"loss": 1.3153,
"step": 90000
},
{
"epoch": 1.8871054705321222,
"grad_norm": 133.5435028076172,
"learning_rate": 3.0327697837835517e-05,
"loss": 1.3202,
"step": 91000
},
{
"epoch": 1.9078428932852225,
"grad_norm": 35.96080017089844,
"learning_rate": 2.9335167556568954e-05,
"loss": 1.3029,
"step": 92000
},
{
"epoch": 1.9285803160383228,
"grad_norm": 23.67695426940918,
"learning_rate": 2.8351384029691792e-05,
"loss": 1.3102,
"step": 93000
},
{
"epoch": 1.949317738791423,
"grad_norm": 39.97007751464844,
"learning_rate": 2.7378777746415062e-05,
"loss": 1.3408,
"step": 94000
},
{
"epoch": 1.9700551615445232,
"grad_norm": 20.980594635009766,
"learning_rate": 2.6415860279318838e-05,
"loss": 1.3306,
"step": 95000
},
{
"epoch": 1.9907925842976235,
"grad_norm": 152.01014709472656,
"learning_rate": 2.5465010566910364e-05,
"loss": 1.285,
"step": 96000
}
],
"logging_steps": 1000,
"max_steps": 144666,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 10000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.7486851145662464e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}