|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 48, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.04137394207755112, |
|
"learning_rate": 1e-05, |
|
"loss": 0.1671, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.037803193435615265, |
|
"learning_rate": 2e-05, |
|
"loss": 0.167, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.04445740088277594, |
|
"learning_rate": 3e-05, |
|
"loss": 0.1607, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.0380231527607073, |
|
"learning_rate": 4e-05, |
|
"loss": 0.1591, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.046362646963448, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1597, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.05235320825077946, |
|
"learning_rate": 4.9933307091588796e-05, |
|
"loss": 0.1616, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.05740591760676448, |
|
"learning_rate": 4.973358420187776e-05, |
|
"loss": 0.1459, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.04283964782544257, |
|
"learning_rate": 4.9401896938898185e-05, |
|
"loss": 0.1332, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.036679242954440874, |
|
"learning_rate": 4.894001499771015e-05, |
|
"loss": 0.1293, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.033723367633845105, |
|
"learning_rate": 4.83504027183137e-05, |
|
"loss": 0.1342, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.037227526954279944, |
|
"learning_rate": 4.763620593732867e-05, |
|
"loss": 0.128, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.044207038446114774, |
|
"learning_rate": 4.6801235203595195e-05, |
|
"loss": 0.1275, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 0.04394682449442954, |
|
"learning_rate": 4.584994544724695e-05, |
|
"loss": 0.1142, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.03406265181060845, |
|
"learning_rate": 4.478741221073136e-05, |
|
"loss": 0.1161, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.030054446617056498, |
|
"learning_rate": 4.361930456859455e-05, |
|
"loss": 0.1087, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.028409394434840918, |
|
"learning_rate": 4.235185488051585e-05, |
|
"loss": 0.1126, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.0625, |
|
"grad_norm": 0.023153227915814832, |
|
"learning_rate": 4.099182553897229e-05, |
|
"loss": 0.1054, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 0.021598055829915037, |
|
"learning_rate": 3.954647288894883e-05, |
|
"loss": 0.1007, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.1875, |
|
"grad_norm": 0.02123540151753295, |
|
"learning_rate": 3.8023508512198256e-05, |
|
"loss": 0.1001, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.02137306513970009, |
|
"learning_rate": 3.6431058082615964e-05, |
|
"loss": 0.0968, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.3125, |
|
"grad_norm": 0.02433397130782817, |
|
"learning_rate": 3.47776180122539e-05, |
|
"loss": 0.0983, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 0.021942525621062504, |
|
"learning_rate": 3.307201011928616e-05, |
|
"loss": 0.0935, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 1.4375, |
|
"grad_norm": 0.021261141593508576, |
|
"learning_rate": 3.132333455979202e-05, |
|
"loss": 0.0887, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.02142792591828939, |
|
"learning_rate": 2.954092127448591e-05, |
|
"loss": 0.0888, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 0.020750630946201602, |
|
"learning_rate": 2.7734280209446865e-05, |
|
"loss": 0.0891, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 0.019872557078390127, |
|
"learning_rate": 2.5913050576441477e-05, |
|
"loss": 0.0872, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.6875, |
|
"grad_norm": 0.019131650653635524, |
|
"learning_rate": 2.4086949423558526e-05, |
|
"loss": 0.0835, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.019027351901529727, |
|
"learning_rate": 2.2265719790553147e-05, |
|
"loss": 0.0827, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.8125, |
|
"grad_norm": 0.019517019388363612, |
|
"learning_rate": 2.0459078725514092e-05, |
|
"loss": 0.0769, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 0.019073963774100196, |
|
"learning_rate": 1.867666544020798e-05, |
|
"loss": 0.0768, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.9375, |
|
"grad_norm": 0.020535779704439937, |
|
"learning_rate": 1.692798988071385e-05, |
|
"loss": 0.0703, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.01735083491599702, |
|
"learning_rate": 1.5222381987746104e-05, |
|
"loss": 0.0721, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 2.0625, |
|
"grad_norm": 0.016996572618955864, |
|
"learning_rate": 1.3568941917384036e-05, |
|
"loss": 0.0729, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 0.016947999154191592, |
|
"learning_rate": 1.1976491487801748e-05, |
|
"loss": 0.0683, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 2.1875, |
|
"grad_norm": 0.01759855889883996, |
|
"learning_rate": 1.0453527111051184e-05, |
|
"loss": 0.0682, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.016563435997106427, |
|
"learning_rate": 9.008174461027724e-06, |
|
"loss": 0.0701, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 2.3125, |
|
"grad_norm": 0.0154902825546126, |
|
"learning_rate": 7.648145119484152e-06, |
|
"loss": 0.0673, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 0.01691672911978778, |
|
"learning_rate": 6.380695431405453e-06, |
|
"loss": 0.0694, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 2.4375, |
|
"grad_norm": 0.01530029785885113, |
|
"learning_rate": 5.2125877892686496e-06, |
|
"loss": 0.0681, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.017885100797614902, |
|
"learning_rate": 4.150054552753055e-06, |
|
"loss": 0.0627, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 2.5625, |
|
"grad_norm": 0.016638045960609936, |
|
"learning_rate": 3.198764796404807e-06, |
|
"loss": 0.0669, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 0.016208072151038286, |
|
"learning_rate": 2.3637940626713346e-06, |
|
"loss": 0.0678, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 2.6875, |
|
"grad_norm": 0.016401199651084344, |
|
"learning_rate": 1.649597281686302e-06, |
|
"loss": 0.0649, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.01598951472231698, |
|
"learning_rate": 1.0599850022898539e-06, |
|
"loss": 0.0674, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 2.8125, |
|
"grad_norm": 0.015277414785260311, |
|
"learning_rate": 5.981030611018234e-07, |
|
"loss": 0.0634, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 0.01537912920538106, |
|
"learning_rate": 2.664157981222437e-07, |
|
"loss": 0.0637, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 2.9375, |
|
"grad_norm": 0.015282877713047758, |
|
"learning_rate": 6.66929084112089e-08, |
|
"loss": 0.0667, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.01629157768683682, |
|
"learning_rate": 0.0, |
|
"loss": 0.0622, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 48, |
|
"total_flos": 882865236606976.0, |
|
"train_loss": 0.0980342753076305, |
|
"train_runtime": 1550.4315, |
|
"train_samples_per_second": 0.484, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 48, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 882865236606976.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|