File size: 2,091 Bytes
90ac6fe 30ea6bf f97c883 7b2e66e 3c7a1b3 f4011e7 8ed2214 a1b5a6c 1aeaf8d b663ffe f16f7c4 6f4c00c c8bc8db 1372242 8157d16 7bb4f07 dcc335c 50b1211 bcc5339 e38da5d 11e4fb7 03e4c95 5959964 ba07632 b729656 750e1f2 d2278d2 c91f39e 02aa1b1 301d7a9 e92ccb1 8a27331 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
Started at: 11:05:41 nb-bert-base, 0.001, 320 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.5472435295581818 - MAE: 0.5962500959471848 Validation loss : 0.19041005628449575 - MAE: 0.33918628303996384 Epoch: 1 Training loss: 0.2095787413418293 - MAE: 0.34864738662823064 Validation loss : 0.20635795167514256 - MAE: 0.35940594357273065 Epoch: 2 Training loss: 0.1750541977584362 - MAE: 0.3156862424402112 Validation loss : 0.1741191872528621 - MAE: 0.32463109275690744 Epoch: 3 Training loss: 0.16520684361457824 - MAE: 0.30684230381369754 Validation loss : 0.16484951972961426 - MAE: 0.3122384296878921 Epoch: 4 Training loss: 0.16403881311416627 - MAE: 0.30506741840528667 Validation loss : 0.16140927055052348 - MAE: 0.30796976329260456 Epoch: 5 Training loss: 0.16092105135321616 - MAE: 0.30192065722430705 Validation loss : 0.15897798431771143 - MAE: 0.3044519051557594 Epoch: 6 Training loss: 0.15942454375326634 - MAE: 0.3003764766498005 Validation loss : 0.15748265704938344 - MAE: 0.3025005888374177 Epoch: 7 Training loss: 0.15759666711091996 - MAE: 0.2984491071305166 Validation loss : 0.15631395046200072 - MAE: 0.3007706974913341 Epoch: 8 Training loss: 0.15621527656912804 - MAE: 0.2959810530360974 Validation loss : 0.15541660892111914 - MAE: 0.29963030970178867 Epoch: 9 Training loss: 0.15488663874566555 - MAE: 0.2954068972163193 |