svjack's picture
Upload folder using huggingface_hub
d16fbc5 verified
raw
history blame contribute delete
No virus
4.68 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8,
"eval_steps": 500,
"global_step": 100,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04,
"grad_norm": 0.4841386675834656,
"learning_rate": 4.997807075247146e-05,
"loss": 0.688,
"num_input_tokens_seen": 46776,
"step": 5
},
{
"epoch": 0.08,
"grad_norm": 0.3365215063095093,
"learning_rate": 4.991232148123761e-05,
"loss": 0.763,
"num_input_tokens_seen": 85136,
"step": 10
},
{
"epoch": 0.12,
"grad_norm": 0.5198595523834229,
"learning_rate": 4.980286753286195e-05,
"loss": 0.6882,
"num_input_tokens_seen": 126584,
"step": 15
},
{
"epoch": 0.16,
"grad_norm": 0.40807807445526123,
"learning_rate": 4.964990092676263e-05,
"loss": 0.6951,
"num_input_tokens_seen": 167968,
"step": 20
},
{
"epoch": 0.2,
"grad_norm": 0.439756840467453,
"learning_rate": 4.9453690018345144e-05,
"loss": 0.5008,
"num_input_tokens_seen": 206832,
"step": 25
},
{
"epoch": 0.24,
"grad_norm": 0.45932796597480774,
"learning_rate": 4.9214579028215776e-05,
"loss": 0.542,
"num_input_tokens_seen": 243656,
"step": 30
},
{
"epoch": 0.28,
"grad_norm": 0.3281826376914978,
"learning_rate": 4.893298743830168e-05,
"loss": 0.5369,
"num_input_tokens_seen": 284016,
"step": 35
},
{
"epoch": 0.32,
"grad_norm": 0.2551577389240265,
"learning_rate": 4.860940925593703e-05,
"loss": 0.4948,
"num_input_tokens_seen": 327408,
"step": 40
},
{
"epoch": 0.36,
"grad_norm": 0.5936368107795715,
"learning_rate": 4.8244412147206284e-05,
"loss": 0.5244,
"num_input_tokens_seen": 363376,
"step": 45
},
{
"epoch": 0.4,
"grad_norm": 0.2480110377073288,
"learning_rate": 4.783863644106502e-05,
"loss": 0.421,
"num_input_tokens_seen": 398840,
"step": 50
},
{
"epoch": 0.44,
"grad_norm": 0.4601157307624817,
"learning_rate": 4.7392794005985326e-05,
"loss": 0.4517,
"num_input_tokens_seen": 436136,
"step": 55
},
{
"epoch": 0.48,
"grad_norm": 0.6387960314750671,
"learning_rate": 4.690766700109659e-05,
"loss": 0.4661,
"num_input_tokens_seen": 475856,
"step": 60
},
{
"epoch": 0.52,
"grad_norm": 0.4365377128124237,
"learning_rate": 4.638410650401267e-05,
"loss": 0.4928,
"num_input_tokens_seen": 514496,
"step": 65
},
{
"epoch": 0.56,
"grad_norm": 0.2992519438266754,
"learning_rate": 4.5823031017752485e-05,
"loss": 0.5424,
"num_input_tokens_seen": 554424,
"step": 70
},
{
"epoch": 0.6,
"grad_norm": 0.828787088394165,
"learning_rate": 4.522542485937369e-05,
"loss": 0.5419,
"num_input_tokens_seen": 593264,
"step": 75
},
{
"epoch": 0.64,
"grad_norm": 0.2943418622016907,
"learning_rate": 4.4592336433146e-05,
"loss": 0.4558,
"num_input_tokens_seen": 630264,
"step": 80
},
{
"epoch": 0.68,
"grad_norm": 0.3707512617111206,
"learning_rate": 4.3924876391293915e-05,
"loss": 0.5656,
"num_input_tokens_seen": 668864,
"step": 85
},
{
"epoch": 0.72,
"grad_norm": 0.4809654653072357,
"learning_rate": 4.3224215685535294e-05,
"loss": 0.4832,
"num_input_tokens_seen": 712504,
"step": 90
},
{
"epoch": 0.76,
"grad_norm": 0.516292929649353,
"learning_rate": 4.249158351283414e-05,
"loss": 0.4626,
"num_input_tokens_seen": 748872,
"step": 95
},
{
"epoch": 0.8,
"grad_norm": 0.5604212284088135,
"learning_rate": 4.172826515897146e-05,
"loss": 0.4837,
"num_input_tokens_seen": 788408,
"step": 100
}
],
"logging_steps": 5,
"max_steps": 375,
"num_input_tokens_seen": 788408,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.3542680298389504e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}