|
{ |
|
"best_metric": 13.93560791015625, |
|
"best_model_checkpoint": "./qwen2.5-0.5b/qwen2.5-0.5b-expo-L1EXPO-ES-1/checkpoint-450", |
|
"epoch": 1.9839395370807746, |
|
"eval_steps": 50, |
|
"global_step": 700, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"dpo_loss": 0.6931471824645996, |
|
"epoch": 0.002834199338686821, |
|
"grad_norm": 368.8500800599453, |
|
"learning_rate": 2.840909090909091e-08, |
|
"logits": -1.359458565711975, |
|
"logps": -84.69721221923828, |
|
"loss": 0.0051, |
|
"objective": 0.0046141319908201694, |
|
"ranking_idealized": 0.5833333134651184, |
|
"ranking_idealized_expo": 0.5833333134651184, |
|
"ranking_simple": 0.5833333134651184, |
|
"regularize": 0.0046141319908201694, |
|
"step": 1, |
|
"wo_beta": 14.840873718261719 |
|
}, |
|
{ |
|
"dpo_loss": 0.7411907315254211, |
|
"epoch": 0.14170996693434104, |
|
"grad_norm": 326.1369710639882, |
|
"learning_rate": 1.4204545454545458e-06, |
|
"logits": -1.4541096687316895, |
|
"logps": -84.35031127929688, |
|
"loss": 0.4306, |
|
"objective": 0.4069998860359192, |
|
"ranking_idealized": 0.5289115905761719, |
|
"ranking_idealized_expo": 0.5221088528633118, |
|
"ranking_simple": 0.5225340127944946, |
|
"regularize": 0.4069998860359192, |
|
"step": 50, |
|
"wo_beta": 15.642387390136719 |
|
}, |
|
{ |
|
"epoch": 0.14170996693434104, |
|
"eval_dpo_loss": 0.7632195949554443, |
|
"eval_logits": -1.428945779800415, |
|
"eval_logps": -90.4263687133789, |
|
"eval_loss": 0.5493111610412598, |
|
"eval_objective": 0.5433010458946228, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5212215185165405, |
|
"eval_regularize": 0.5433010458946228, |
|
"eval_runtime": 308.9459, |
|
"eval_samples_per_second": 18.741, |
|
"eval_steps_per_second": 1.563, |
|
"eval_wo_beta": 16.223697662353516, |
|
"step": 50 |
|
}, |
|
{ |
|
"dpo_loss": 1.189667820930481, |
|
"epoch": 0.2834199338686821, |
|
"grad_norm": 271.670123611671, |
|
"learning_rate": 2.8409090909090916e-06, |
|
"logits": -1.3798158168792725, |
|
"logps": -82.86579895019531, |
|
"loss": 1.748, |
|
"objective": 1.777085542678833, |
|
"ranking_idealized": 0.5241666436195374, |
|
"ranking_idealized_expo": 0.5137500166893005, |
|
"ranking_simple": 0.5174999833106995, |
|
"regularize": 1.777085542678833, |
|
"step": 100, |
|
"wo_beta": 15.245094299316406 |
|
}, |
|
{ |
|
"epoch": 0.2834199338686821, |
|
"eval_dpo_loss": 1.1354432106018066, |
|
"eval_logits": -1.2535114288330078, |
|
"eval_logps": -88.04913330078125, |
|
"eval_loss": 1.6975041627883911, |
|
"eval_objective": 1.6864262819290161, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.522774338722229, |
|
"eval_regularize": 1.6864262819290161, |
|
"eval_runtime": 308.5261, |
|
"eval_samples_per_second": 18.767, |
|
"eval_steps_per_second": 1.566, |
|
"eval_wo_beta": 15.683449745178223, |
|
"step": 100 |
|
}, |
|
{ |
|
"dpo_loss": 1.6895211935043335, |
|
"epoch": 0.42512990080302315, |
|
"grad_norm": 221.9711378891906, |
|
"learning_rate": 4.2613636363636365e-06, |
|
"logits": -1.2630772590637207, |
|
"logps": -79.18020629882812, |
|
"loss": 2.8697, |
|
"objective": 2.862943172454834, |
|
"ranking_idealized": 0.5333333611488342, |
|
"ranking_idealized_expo": 0.527916669845581, |
|
"ranking_simple": 0.5199999809265137, |
|
"regularize": 2.862943172454834, |
|
"step": 150, |
|
"wo_beta": 15.206230163574219 |
|
}, |
|
{ |
|
"epoch": 0.42512990080302315, |
|
"eval_dpo_loss": 1.6845829486846924, |
|
"eval_logits": -1.252400279045105, |
|
"eval_logps": -82.49666595458984, |
|
"eval_loss": 2.9623751640319824, |
|
"eval_objective": 2.892326593399048, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5243270993232727, |
|
"eval_regularize": 2.892326593399048, |
|
"eval_runtime": 308.7897, |
|
"eval_samples_per_second": 18.751, |
|
"eval_steps_per_second": 1.564, |
|
"eval_wo_beta": 15.196976661682129, |
|
"step": 150 |
|
}, |
|
{ |
|
"dpo_loss": 2.04150390625, |
|
"epoch": 0.5668398677373642, |
|
"grad_norm": 196.4932514581971, |
|
"learning_rate": 4.997168347957521e-06, |
|
"logits": -1.0472546815872192, |
|
"logps": -75.09263610839844, |
|
"loss": 3.5268, |
|
"objective": 3.5377087593078613, |
|
"ranking_idealized": 0.5204166769981384, |
|
"ranking_idealized_expo": 0.51541668176651, |
|
"ranking_simple": 0.5066666603088379, |
|
"regularize": 3.5377087593078613, |
|
"step": 200, |
|
"wo_beta": 15.402359962463379 |
|
}, |
|
{ |
|
"epoch": 0.5668398677373642, |
|
"eval_dpo_loss": 2.158970594406128, |
|
"eval_logits": -0.9581480622291565, |
|
"eval_logps": -75.9715576171875, |
|
"eval_loss": 4.030220031738281, |
|
"eval_objective": 3.9596574306488037, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.523809552192688, |
|
"eval_regularize": 3.9596574306488037, |
|
"eval_runtime": 308.8005, |
|
"eval_samples_per_second": 18.75, |
|
"eval_steps_per_second": 1.564, |
|
"eval_wo_beta": 14.579182624816895, |
|
"step": 200 |
|
}, |
|
{ |
|
"dpo_loss": 2.0967283248901367, |
|
"epoch": 0.7085498346717053, |
|
"grad_norm": 186.71108857655122, |
|
"learning_rate": 4.973122855144066e-06, |
|
"logits": -0.8511989116668701, |
|
"logps": -75.66019439697266, |
|
"loss": 3.7241, |
|
"objective": 3.6993343830108643, |
|
"ranking_idealized": 0.5249999761581421, |
|
"ranking_idealized_expo": 0.5162500143051147, |
|
"ranking_simple": 0.5104166865348816, |
|
"regularize": 3.6993343830108643, |
|
"step": 250, |
|
"wo_beta": 15.4879789352417 |
|
}, |
|
{ |
|
"epoch": 0.7085498346717053, |
|
"eval_dpo_loss": 2.3309643268585205, |
|
"eval_logits": -0.7680178880691528, |
|
"eval_logps": -81.3046875, |
|
"eval_loss": 4.269361972808838, |
|
"eval_objective": 4.272838115692139, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5258799195289612, |
|
"eval_regularize": 4.272838115692139, |
|
"eval_runtime": 308.4403, |
|
"eval_samples_per_second": 18.772, |
|
"eval_steps_per_second": 1.566, |
|
"eval_wo_beta": 14.561494827270508, |
|
"step": 250 |
|
}, |
|
{ |
|
"dpo_loss": 1.9671478271484375, |
|
"epoch": 0.8502598016060463, |
|
"grad_norm": 175.55668911243956, |
|
"learning_rate": 4.924776641419513e-06, |
|
"logits": -0.6268683671951294, |
|
"logps": -79.04060363769531, |
|
"loss": 3.6109, |
|
"objective": 3.6469531059265137, |
|
"ranking_idealized": 0.5062500238418579, |
|
"ranking_idealized_expo": 0.4950000047683716, |
|
"ranking_simple": 0.5049999952316284, |
|
"regularize": 3.6469531059265137, |
|
"step": 300, |
|
"wo_beta": 15.15662956237793 |
|
}, |
|
{ |
|
"epoch": 0.8502598016060463, |
|
"eval_dpo_loss": 2.4072353839874268, |
|
"eval_logits": -0.6388247609138489, |
|
"eval_logps": -83.98152923583984, |
|
"eval_loss": 4.49080228805542, |
|
"eval_objective": 4.45733642578125, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5263975262641907, |
|
"eval_regularize": 4.45733642578125, |
|
"eval_runtime": 308.8236, |
|
"eval_samples_per_second": 18.749, |
|
"eval_steps_per_second": 1.564, |
|
"eval_wo_beta": 14.346380233764648, |
|
"step": 300 |
|
}, |
|
{ |
|
"dpo_loss": 1.8654496669769287, |
|
"epoch": 0.9919697685403873, |
|
"grad_norm": 165.9769670968443, |
|
"learning_rate": 4.8526047530778175e-06, |
|
"logits": -0.5482619404792786, |
|
"logps": -77.96940612792969, |
|
"loss": 3.36, |
|
"objective": 3.306919574737549, |
|
"ranking_idealized": 0.5354166626930237, |
|
"ranking_idealized_expo": 0.5254166722297668, |
|
"ranking_simple": 0.5224999785423279, |
|
"regularize": 3.306919574737549, |
|
"step": 350, |
|
"wo_beta": 15.03686237335205 |
|
}, |
|
{ |
|
"epoch": 0.9919697685403873, |
|
"eval_dpo_loss": 2.499128580093384, |
|
"eval_logits": -0.5030297636985779, |
|
"eval_logps": -80.74913024902344, |
|
"eval_loss": 4.658600807189941, |
|
"eval_objective": 4.6211676597595215, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5212215185165405, |
|
"eval_regularize": 4.6211676597595215, |
|
"eval_runtime": 314.3306, |
|
"eval_samples_per_second": 18.42, |
|
"eval_steps_per_second": 1.537, |
|
"eval_wo_beta": 14.346673965454102, |
|
"step": 350 |
|
}, |
|
{ |
|
"dpo_loss": 1.732424259185791, |
|
"epoch": 1.1336797354747283, |
|
"grad_norm": 163.81688300949705, |
|
"learning_rate": 4.757316345716554e-06, |
|
"logits": -0.5219219326972961, |
|
"logps": -77.66575622558594, |
|
"loss": 3.112, |
|
"objective": 3.121299982070923, |
|
"ranking_idealized": 0.5412499904632568, |
|
"ranking_idealized_expo": 0.5320833325386047, |
|
"ranking_simple": 0.53125, |
|
"regularize": 3.121299982070923, |
|
"step": 400, |
|
"wo_beta": 15.248611450195312 |
|
}, |
|
{ |
|
"epoch": 1.1336797354747283, |
|
"eval_dpo_loss": 2.5402870178222656, |
|
"eval_logits": -0.5664270520210266, |
|
"eval_logps": -82.49735260009766, |
|
"eval_loss": 4.724371433258057, |
|
"eval_objective": 4.729303359985352, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5186335444450378, |
|
"eval_regularize": 4.729303359985352, |
|
"eval_runtime": 310.1606, |
|
"eval_samples_per_second": 18.668, |
|
"eval_steps_per_second": 1.557, |
|
"eval_wo_beta": 14.403800964355469, |
|
"step": 400 |
|
}, |
|
{ |
|
"dpo_loss": 1.6898671388626099, |
|
"epoch": 1.2753897024090695, |
|
"grad_norm": 190.1252094592616, |
|
"learning_rate": 4.639847716126855e-06, |
|
"logits": -0.5678730010986328, |
|
"logps": -78.37519836425781, |
|
"loss": 2.9448, |
|
"objective": 3.0088882446289062, |
|
"ranking_idealized": 0.5245833396911621, |
|
"ranking_idealized_expo": 0.5191666483879089, |
|
"ranking_simple": 0.518750011920929, |
|
"regularize": 3.0088882446289062, |
|
"step": 450, |
|
"wo_beta": 15.990630149841309 |
|
}, |
|
{ |
|
"epoch": 1.2753897024090695, |
|
"eval_dpo_loss": 2.573472738265991, |
|
"eval_logits": -0.6935593485832214, |
|
"eval_logps": -80.17532348632812, |
|
"eval_loss": 4.835423946380615, |
|
"eval_objective": 4.811424732208252, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5248447060585022, |
|
"eval_regularize": 4.811424732208252, |
|
"eval_runtime": 308.2089, |
|
"eval_samples_per_second": 18.786, |
|
"eval_steps_per_second": 1.567, |
|
"eval_wo_beta": 13.93560791015625, |
|
"step": 450 |
|
}, |
|
{ |
|
"dpo_loss": 1.5981500148773193, |
|
"epoch": 1.4170996693434104, |
|
"grad_norm": 156.9344817637022, |
|
"learning_rate": 4.501353102310901e-06, |
|
"logits": -0.58225017786026, |
|
"logps": -76.96984100341797, |
|
"loss": 2.8517, |
|
"objective": 2.828920602798462, |
|
"ranking_idealized": 0.5054166913032532, |
|
"ranking_idealized_expo": 0.4970833361148834, |
|
"ranking_simple": 0.502916693687439, |
|
"regularize": 2.828920602798462, |
|
"step": 500, |
|
"wo_beta": 15.1870756149292 |
|
}, |
|
{ |
|
"epoch": 1.4170996693434104, |
|
"eval_dpo_loss": 2.6782028675079346, |
|
"eval_logits": -0.597258448600769, |
|
"eval_logps": -80.76759338378906, |
|
"eval_loss": 5.004448413848877, |
|
"eval_objective": 5.005820274353027, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5269151329994202, |
|
"eval_regularize": 5.005820274353027, |
|
"eval_runtime": 308.5472, |
|
"eval_samples_per_second": 18.765, |
|
"eval_steps_per_second": 1.565, |
|
"eval_wo_beta": 14.262606620788574, |
|
"step": 500 |
|
}, |
|
{ |
|
"dpo_loss": 1.574170708656311, |
|
"epoch": 1.5588096362777515, |
|
"grad_norm": 157.3420281051892, |
|
"learning_rate": 4.34319334202531e-06, |
|
"logits": -0.5697833299636841, |
|
"logps": -77.83871459960938, |
|
"loss": 2.632, |
|
"objective": 2.6769964694976807, |
|
"ranking_idealized": 0.5199999809265137, |
|
"ranking_idealized_expo": 0.5108333230018616, |
|
"ranking_simple": 0.5104166865348816, |
|
"regularize": 2.6769964694976807, |
|
"step": 550, |
|
"wo_beta": 15.042858123779297 |
|
}, |
|
{ |
|
"epoch": 1.5588096362777515, |
|
"eval_dpo_loss": 2.5752413272857666, |
|
"eval_logits": -0.6148820519447327, |
|
"eval_logps": -80.52192687988281, |
|
"eval_loss": 4.877658843994141, |
|
"eval_objective": 4.884354114532471, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5222567319869995, |
|
"eval_regularize": 4.884354114532471, |
|
"eval_runtime": 309.1726, |
|
"eval_samples_per_second": 18.727, |
|
"eval_steps_per_second": 1.562, |
|
"eval_wo_beta": 14.146936416625977, |
|
"step": 550 |
|
}, |
|
{ |
|
"dpo_loss": 1.4848108291625977, |
|
"epoch": 1.7005196032120926, |
|
"grad_norm": 158.42202757367144, |
|
"learning_rate": 4.16692250129073e-06, |
|
"logits": -0.5375781655311584, |
|
"logps": -77.73197937011719, |
|
"loss": 2.5208, |
|
"objective": 2.518798828125, |
|
"ranking_idealized": 0.5220833420753479, |
|
"ranking_idealized_expo": 0.5149999856948853, |
|
"ranking_simple": 0.5058333277702332, |
|
"regularize": 2.518798828125, |
|
"step": 600, |
|
"wo_beta": 15.039664268493652 |
|
}, |
|
{ |
|
"epoch": 1.7005196032120926, |
|
"eval_dpo_loss": 2.5973868370056152, |
|
"eval_logits": -0.5874757766723633, |
|
"eval_logps": -80.17750549316406, |
|
"eval_loss": 4.925836086273193, |
|
"eval_objective": 4.962131500244141, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5243270993232727, |
|
"eval_regularize": 4.962131500244141, |
|
"eval_runtime": 308.6109, |
|
"eval_samples_per_second": 18.761, |
|
"eval_steps_per_second": 1.565, |
|
"eval_wo_beta": 14.266931533813477, |
|
"step": 600 |
|
}, |
|
{ |
|
"dpo_loss": 1.4892569780349731, |
|
"epoch": 1.8422295701464337, |
|
"grad_norm": 161.21497067375822, |
|
"learning_rate": 3.974272604254906e-06, |
|
"logits": -0.4858429431915283, |
|
"logps": -77.9110107421875, |
|
"loss": 2.4198, |
|
"objective": 2.448315143585205, |
|
"ranking_idealized": 0.5408333539962769, |
|
"ranking_idealized_expo": 0.527916669845581, |
|
"ranking_simple": 0.527916669845581, |
|
"regularize": 2.448315143585205, |
|
"step": 650, |
|
"wo_beta": 15.80264663696289 |
|
}, |
|
{ |
|
"epoch": 1.8422295701464337, |
|
"eval_dpo_loss": 2.6345465183258057, |
|
"eval_logits": -0.5440609455108643, |
|
"eval_logps": -81.05504608154297, |
|
"eval_loss": 5.032698154449463, |
|
"eval_objective": 5.045432090759277, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5269151329994202, |
|
"eval_regularize": 5.045432090759277, |
|
"eval_runtime": 315.2786, |
|
"eval_samples_per_second": 18.365, |
|
"eval_steps_per_second": 1.532, |
|
"eval_wo_beta": 14.247919082641602, |
|
"step": 650 |
|
}, |
|
{ |
|
"dpo_loss": 1.3786677122116089, |
|
"epoch": 1.9839395370807746, |
|
"grad_norm": 165.2682511618685, |
|
"learning_rate": 3.767136614452458e-06, |
|
"logits": -0.4978749454021454, |
|
"logps": -77.35163116455078, |
|
"loss": 2.2699, |
|
"objective": 2.302328586578369, |
|
"ranking_idealized": 0.5170833468437195, |
|
"ranking_idealized_expo": 0.5112500190734863, |
|
"ranking_simple": 0.5095833539962769, |
|
"regularize": 2.302328586578369, |
|
"step": 700, |
|
"wo_beta": 15.442806243896484 |
|
}, |
|
{ |
|
"epoch": 1.9839395370807746, |
|
"eval_dpo_loss": 2.629160165786743, |
|
"eval_logits": -0.559353232383728, |
|
"eval_logps": -79.73757934570312, |
|
"eval_loss": 4.9659104347229, |
|
"eval_objective": 4.995144844055176, |
|
"eval_ranking_idealized": 0.5295031070709229, |
|
"eval_ranking_idealized_expo": 0.5212215185165405, |
|
"eval_ranking_simple": 0.5212215185165405, |
|
"eval_regularize": 4.995144844055176, |
|
"eval_runtime": 308.0921, |
|
"eval_samples_per_second": 18.793, |
|
"eval_steps_per_second": 1.568, |
|
"eval_wo_beta": 14.175456047058105, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9839395370807746, |
|
"step": 700, |
|
"total_flos": 0.0, |
|
"train_loss": 2.7151880131661894, |
|
"train_runtime": 19357.2061, |
|
"train_samples_per_second": 13.122, |
|
"train_steps_per_second": 0.091 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 1760, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|