Pereki's picture
Upload folder using huggingface_hub
7e08423 verified
raw
history blame
1.44 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.97196261682243,
"eval_steps": 500,
"global_step": 159,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.47,
"grad_norm": 0.35741835832595825,
"learning_rate": 2.8111888111888114e-05,
"loss": 0.933,
"step": 25
},
{
"epoch": 0.93,
"grad_norm": 0.3661482334136963,
"learning_rate": 2.2867132867132867e-05,
"loss": 0.5449,
"step": 50
},
{
"epoch": 1.4,
"grad_norm": 0.8964653015136719,
"learning_rate": 1.7622377622377624e-05,
"loss": 0.4173,
"step": 75
},
{
"epoch": 1.87,
"grad_norm": 0.42400529980659485,
"learning_rate": 1.2377622377622379e-05,
"loss": 0.3661,
"step": 100
},
{
"epoch": 2.34,
"grad_norm": 0.495869517326355,
"learning_rate": 7.132867132867133e-06,
"loss": 0.3118,
"step": 125
},
{
"epoch": 2.8,
"grad_norm": 0.6983396410942078,
"learning_rate": 1.8881118881118883e-06,
"loss": 0.2988,
"step": 150
}
],
"logging_steps": 25,
"max_steps": 159,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 2.5974963525648384e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}