xshubhamx's picture
Upload folder using huggingface_hub
6252567 verified
{
"best_metric": 0.7849208402678758,
"best_model_checkpoint": "tiny-llama/checkpoint-1284",
"epoch": 1.9992214869599065,
"eval_steps": 500,
"global_step": 1284,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.78,
"learning_rate": 4.614485981308411e-05,
"loss": 1.0444,
"step": 500
},
{
"epoch": 1.0,
"eval_accuracy": 0.8055770720371804,
"eval_f1_macro": 0.6986433125217109,
"eval_f1_micro": 0.8055770720371804,
"eval_f1_weighted": 0.8014415131779974,
"eval_loss": 0.5968185067176819,
"eval_macro_fpr": 0.017477297659887057,
"eval_macro_sensitivity": 0.6995136011888944,
"eval_macro_specificity": 0.9852356804676051,
"eval_precision": 0.8049997976909469,
"eval_precision_macro": 0.7121994461809698,
"eval_recall": 0.8055770720371804,
"eval_recall_macro": 0.6995136011888944,
"eval_runtime": 1398.4578,
"eval_samples_per_second": 0.923,
"eval_steps_per_second": 0.462,
"eval_weighted_fpr": 0.01694686381743299,
"eval_weighted_sensitivity": 0.8055770720371804,
"eval_weighted_specificity": 0.9729581349768969,
"step": 642
},
{
"epoch": 1.56,
"learning_rate": 4.2250778816199374e-05,
"loss": 0.4788,
"step": 1000
},
{
"epoch": 2.0,
"eval_accuracy": 0.8195197521301317,
"eval_f1_macro": 0.7849208402678758,
"eval_f1_micro": 0.8195197521301317,
"eval_f1_weighted": 0.8172221848923289,
"eval_loss": 0.696565568447113,
"eval_macro_fpr": 0.016141504898788652,
"eval_macro_sensitivity": 0.782528240704334,
"eval_macro_specificity": 0.9863319896705219,
"eval_precision": 0.8221579556499354,
"eval_precision_macro": 0.8091762597974816,
"eval_recall": 0.8195197521301317,
"eval_recall_macro": 0.782528240704334,
"eval_runtime": 1392.2226,
"eval_samples_per_second": 0.927,
"eval_steps_per_second": 0.464,
"eval_weighted_fpr": 0.01548687271518777,
"eval_weighted_sensitivity": 0.8195197521301317,
"eval_weighted_specificity": 0.9754600929277001,
"step": 1284
}
],
"logging_steps": 500,
"max_steps": 6420,
"num_train_epochs": 10,
"save_steps": 500,
"total_flos": 1.2395484865049395e+17,
"trial_name": null,
"trial_params": null
}