Marcos12886's picture
Upload folder using huggingface_hub
08fe348 verified
raw
history blame
4.04 kB
{
"best_metric": 0.8112582781456954,
"best_model_checkpoint": "distilhubert-finetuned-mixed-data/checkpoint-151",
"epoch": 8.0,
"eval_steps": 500,
"global_step": 151,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.9536423841059603,
"eval_accuracy": 0.5298013245033113,
"eval_f1": 0.43216404525386315,
"eval_loss": 1.1326755285263062,
"eval_precision": 0.5213817284211205,
"eval_recall": 0.5298013245033113,
"eval_runtime": 1.2241,
"eval_samples_per_second": 246.709,
"eval_steps_per_second": 31.043,
"step": 18
},
{
"epoch": 1.9602649006622517,
"eval_accuracy": 0.6423841059602649,
"eval_f1": 0.5806184720425087,
"eval_loss": 0.9228919744491577,
"eval_precision": 0.5520930002801896,
"eval_recall": 0.6423841059602649,
"eval_runtime": 1.2236,
"eval_samples_per_second": 246.807,
"eval_steps_per_second": 31.055,
"step": 37
},
{
"epoch": 2.966887417218543,
"eval_accuracy": 0.7086092715231788,
"eval_f1": 0.6539391094940458,
"eval_loss": 0.7409619688987732,
"eval_precision": 0.752516290193287,
"eval_recall": 0.7086092715231788,
"eval_runtime": 1.2459,
"eval_samples_per_second": 242.403,
"eval_steps_per_second": 30.501,
"step": 56
},
{
"epoch": 3.9735099337748343,
"eval_accuracy": 0.7450331125827815,
"eval_f1": 0.7012377717797856,
"eval_loss": 0.6461689472198486,
"eval_precision": 0.7242129191632504,
"eval_recall": 0.7450331125827815,
"eval_runtime": 1.229,
"eval_samples_per_second": 245.723,
"eval_steps_per_second": 30.919,
"step": 75
},
{
"epoch": 4.9801324503311255,
"eval_accuracy": 0.7980132450331126,
"eval_f1": 0.7903709596982513,
"eval_loss": 0.5553261041641235,
"eval_precision": 0.7925903096412185,
"eval_recall": 0.7980132450331126,
"eval_runtime": 1.2897,
"eval_samples_per_second": 234.157,
"eval_steps_per_second": 29.463,
"step": 94
},
{
"epoch": 5.986754966887418,
"eval_accuracy": 0.7781456953642384,
"eval_f1": 0.7717607879297459,
"eval_loss": 0.5255588293075562,
"eval_precision": 0.7771454278224522,
"eval_recall": 0.7781456953642384,
"eval_runtime": 1.2928,
"eval_samples_per_second": 233.597,
"eval_steps_per_second": 29.393,
"step": 113
},
{
"epoch": 6.993377483443709,
"eval_accuracy": 0.7980132450331126,
"eval_f1": 0.7833793670187674,
"eval_loss": 0.5077652335166931,
"eval_precision": 0.7917508237685551,
"eval_recall": 0.7980132450331126,
"eval_runtime": 1.2898,
"eval_samples_per_second": 234.154,
"eval_steps_per_second": 29.463,
"step": 132
},
{
"epoch": 8.0,
"eval_accuracy": 0.8112582781456954,
"eval_f1": 0.8021247299665692,
"eval_loss": 0.4742371141910553,
"eval_precision": 0.8054865043662888,
"eval_recall": 0.8112582781456954,
"eval_runtime": 1.381,
"eval_samples_per_second": 218.682,
"eval_steps_per_second": 27.516,
"step": 151
}
],
"logging_steps": 500,
"max_steps": 180,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 3,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.197812547584e+16,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}