|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 4040, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 13.334685325622559, |
|
"learning_rate": 5.7178217821782184e-06, |
|
"loss": 2.6835, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.5441126823425293, |
|
"learning_rate": 1.1584158415841584e-05, |
|
"loss": 2.3047, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4855196475982666, |
|
"learning_rate": 1.7524752475247524e-05, |
|
"loss": 2.0103, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.429617166519165, |
|
"learning_rate": 2.3465346534653467e-05, |
|
"loss": 1.7695, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.6394236087799072, |
|
"learning_rate": 2.9405940594059407e-05, |
|
"loss": 1.7141, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.821674823760986, |
|
"learning_rate": 2.9405940594059407e-05, |
|
"loss": 1.6202, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.7676095962524414, |
|
"learning_rate": 2.8745874587458746e-05, |
|
"loss": 1.5316, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.9537432193756104, |
|
"learning_rate": 2.8085808580858088e-05, |
|
"loss": 1.5497, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.6451698541641235, |
|
"learning_rate": 2.7425742574257424e-05, |
|
"loss": 1.5092, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.8162490129470825, |
|
"learning_rate": 2.6765676567656766e-05, |
|
"loss": 1.4958, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.7349311113357544, |
|
"learning_rate": 2.610561056105611e-05, |
|
"loss": 1.4255, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 2.0465753078460693, |
|
"learning_rate": 2.5445544554455447e-05, |
|
"loss": 1.4322, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.079873561859131, |
|
"learning_rate": 2.4785478547854786e-05, |
|
"loss": 1.391, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 1.8764556646347046, |
|
"learning_rate": 2.4125412541254125e-05, |
|
"loss": 1.4254, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.345323324203491, |
|
"learning_rate": 2.3465346534653467e-05, |
|
"loss": 1.4177, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.4153122901916504, |
|
"learning_rate": 2.2805280528052803e-05, |
|
"loss": 1.31, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 2.362084150314331, |
|
"learning_rate": 2.2145214521452145e-05, |
|
"loss": 1.2966, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.2319586277008057, |
|
"learning_rate": 2.1485148514851487e-05, |
|
"loss": 1.3684, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 2.043566942214966, |
|
"learning_rate": 2.0825082508250826e-05, |
|
"loss": 1.392, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.2719879150390625, |
|
"learning_rate": 2.0165016501650165e-05, |
|
"loss": 1.3083, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 2.6207830905914307, |
|
"learning_rate": 1.9504950495049504e-05, |
|
"loss": 1.252, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 2.797555446624756, |
|
"learning_rate": 1.8844884488448846e-05, |
|
"loss": 1.2673, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 2.4675941467285156, |
|
"learning_rate": 1.8184818481848185e-05, |
|
"loss": 1.2311, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 2.450917959213257, |
|
"learning_rate": 1.7524752475247524e-05, |
|
"loss": 1.2466, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 2.9076478481292725, |
|
"learning_rate": 1.6864686468646866e-05, |
|
"loss": 1.3578, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 2.909982681274414, |
|
"learning_rate": 1.6204620462046205e-05, |
|
"loss": 1.2384, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 2.7267675399780273, |
|
"learning_rate": 1.5544554455445548e-05, |
|
"loss": 1.1951, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 3.54055118560791, |
|
"learning_rate": 1.4884488448844885e-05, |
|
"loss": 1.2141, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 2.72868275642395, |
|
"learning_rate": 1.4224422442244225e-05, |
|
"loss": 1.2374, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 2.7000038623809814, |
|
"learning_rate": 1.3564356435643564e-05, |
|
"loss": 1.2645, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"grad_norm": 3.291171073913574, |
|
"learning_rate": 1.2904290429042905e-05, |
|
"loss": 1.1701, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"grad_norm": 2.9283392429351807, |
|
"learning_rate": 1.2244224422442244e-05, |
|
"loss": 1.1559, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"grad_norm": 3.4444239139556885, |
|
"learning_rate": 1.1584158415841584e-05, |
|
"loss": 1.1984, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 3.4505791664123535, |
|
"learning_rate": 1.0924092409240923e-05, |
|
"loss": 1.1956, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"grad_norm": 3.7554521560668945, |
|
"learning_rate": 1.0264026402640264e-05, |
|
"loss": 1.1743, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"grad_norm": 3.4612154960632324, |
|
"learning_rate": 9.603960396039604e-06, |
|
"loss": 1.1505, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 3.5600943565368652, |
|
"learning_rate": 8.943894389438945e-06, |
|
"loss": 1.1334, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 3.4435224533081055, |
|
"learning_rate": 8.283828382838284e-06, |
|
"loss": 1.1477, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 3.559251546859741, |
|
"learning_rate": 7.6237623762376246e-06, |
|
"loss": 1.1525, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 4.093379497528076, |
|
"learning_rate": 6.9636963696369635e-06, |
|
"loss": 1.1258, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 3.836825370788574, |
|
"learning_rate": 6.303630363036304e-06, |
|
"loss": 1.092, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 3.719158411026001, |
|
"learning_rate": 5.643564356435644e-06, |
|
"loss": 1.1118, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 4.2379374504089355, |
|
"learning_rate": 4.9834983498349835e-06, |
|
"loss": 1.1066, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 4.96703577041626, |
|
"learning_rate": 4.323432343234323e-06, |
|
"loss": 1.1161, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"grad_norm": 9.020540237426758, |
|
"learning_rate": 3.6633663366336635e-06, |
|
"loss": 1.129, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"grad_norm": 3.624774217605591, |
|
"learning_rate": 3.003300330033003e-06, |
|
"loss": 1.097, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 3.6332197189331055, |
|
"learning_rate": 2.3432343234323434e-06, |
|
"loss": 1.074, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"grad_norm": 4.2010579109191895, |
|
"learning_rate": 1.683168316831683e-06, |
|
"loss": 1.0808, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 3.434506416320801, |
|
"learning_rate": 1.023102310231023e-06, |
|
"loss": 1.0774, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"grad_norm": 4.213884353637695, |
|
"learning_rate": 3.63036303630363e-07, |
|
"loss": 1.1134, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 80, |
|
"max_steps": 4040, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 9.936130988310528e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|