File size: 1,953 Bytes
5552b94 ae2fd9c a175971 da2905e 4e0388f 7ee0095 1032667 68e9b01 4d8ecee d24808e 0e9b655 91cd278 1674edc dccfaf7 b9ad253 667a823 7796f67 5cff094 cbf1e6b b5f1c41 c034201 cd862b5 176932e c5ce206 4e27620 9d367dc 1364cc1 b55770b a713748 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
Started at: 11:40:24 nb-bert-base, 0.001, 512 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.6829635271659265 - MAE: 0.6693431870059429 Validation loss : 0.5015756666660309 - MAE: 0.5997545175999092 Epoch: 1 Training loss: 0.26068564561697155 - MAE: 0.40026651290975057 Validation loss : 0.2343240737915039 - MAE: 0.3639362216460188 Epoch: 2 Training loss: 0.20825687165443713 - MAE: 0.34524354469083296 Validation loss : 0.20746740698814392 - MAE: 0.35860255150045284 Epoch: 3 Training loss: 0.1804403788768328 - MAE: 0.324681643823719 Validation loss : 0.17072068750858307 - MAE: 0.3124492505853874 Epoch: 4 Training loss: 0.17333761086830726 - MAE: 0.3132351201497551 Validation loss : 0.17095611095428467 - MAE: 0.31916315360789926 Epoch: 5 Training loss: 0.16724205017089844 - MAE: 0.3070826757836269 Validation loss : 0.16323553621768952 - MAE: 0.30865708568301353 Epoch: 6 Training loss: 0.16199684257690722 - MAE: 0.3026997098344876 Validation loss : 0.1614771842956543 - MAE: 0.3067986414590729 Epoch: 7 Training loss: 0.16401460537543663 - MAE: 0.3037438752420999 Validation loss : 0.15994466841220856 - MAE: 0.30503234812758634 Epoch: 8 Training loss: 0.16340345602769119 - MAE: 0.30281380735994723 |