File size: 2,023 Bytes
615c8fd 9ef5c98 6baabd8 5073a2c 34ad623 6125b41 fc4e860 99f4bc5 214622e ced4623 0ea1e44 4667aa8 cec2def 0d4d733 605e5fa 49b4f55 8bf23c8 aeae721 9593770 11a8136 7479268 c92d9e4 40e0f39 cb448ab 5efaf58 c51c0b1 201659f d71c3d1 386d76e 11df92f 37cd861 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
Started at: 18:07:27 nb-bert-base, 0.001, 256 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.46166665077209473 - MAE: 0.532284292943485 Validation loss : 0.21881521575980717 - MAE: 0.35093047126771504 Epoch: 1 Training loss: 0.19428934633731842 - MAE: 0.3369849436358454 Validation loss : 0.17015555004278818 - MAE: 0.3188480005836794 Epoch: 2 Training loss: 0.17059184610843658 - MAE: 0.3135526208759563 Validation loss : 0.16331283085876042 - MAE: 0.3111560966605882 Epoch: 3 Training loss: 0.1648101744055748 - MAE: 0.30816099238754846 Validation loss : 0.15740243097146353 - MAE: 0.3036171154612899 Epoch: 4 Training loss: 0.16064140915870667 - MAE: 0.3034752969970701 Validation loss : 0.1544146024518543 - MAE: 0.3000950328112062 Epoch: 5 Training loss: 0.15893334209918974 - MAE: 0.30019076958852603 Validation loss : 0.15207635197374555 - MAE: 0.2973798486881051 Epoch: 6 Training loss: 0.15733025580644608 - MAE: 0.298333740307608 Validation loss : 0.15064537939098147 - MAE: 0.2956621068139803 Epoch: 7 Training loss: 0.1533995896577835 - MAE: 0.2953341499795722 Validation loss : 0.14883457786507076 - MAE: 0.2933132114117648 Epoch: 8 Training loss: 0.15376180291175842 - MAE: 0.2940413396593056 Validation loss : 0.1487489938735962 - MAE: 0.29380870235239887 Epoch: 9 |