File size: 2,200 Bytes
0e930f7 ce46fb8 c6fb9a5 9b070c9 5950949 37dcc47 1bcadaa 22b66b6 52608a4 c2e9c27 ea0c1b0 d785e14 35d8035 edaf76b f6458ff f5baaec 061ecb1 48ef7a3 59cb78a 7bc67dc f9a96ca 75106a1 43bc10a 40c3b1b 4bc24c5 c232b18 c643afa e48c0b1 f6087f3 2e31265 18b69b7 104d88f 967dc1a 31eeb03 f77d678 e8b9dab 73ed8d3 b1bf76f 499d2c2 012839b 8c36c93 b5ced94 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
Started at: 10:20:56 norbert2, 0.001, 320 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.3527084864675999 - MAE: 0.462953214037108 Validation loss : 0.26738621081624714 - MAE: 0.4117436401566978 Epoch: 1 Training loss: 0.21354530677199363 - MAE: 0.35412568088618607 Validation loss : 0.18583983182907104 - MAE: 0.3299781625243953 Epoch: 2 Training loss: 0.18886808156967164 - MAE: 0.330421134278654 Validation loss : 0.1822844914027623 - MAE: 0.33056134626981226 Epoch: 3 Training loss: 0.18264293819665908 - MAE: 0.3242538405037828 Validation loss : 0.1772120531116213 - MAE: 0.3251080552524876 Epoch: 4 Training loss: 0.17669695615768433 - MAE: 0.3190416206929785 Validation loss : 0.1738429559128625 - MAE: 0.322266401087556 Epoch: 5 Training loss: 0.17284812778234482 - MAE: 0.3136576419048662 Validation loss : 0.17294207853930338 - MAE: 0.32187514594462957 Epoch: 6 Training loss: 0.16915602013468742 - MAE: 0.3109971127550157 Validation loss : 0.16980814508029393 - MAE: 0.3186160670865416 Epoch: 7 Training loss: 0.16845666095614434 - MAE: 0.3089849145668241 Validation loss : 0.16714746824332646 - MAE: 0.3155506548087574 Epoch: 8 Training loss: 0.16868472024798392 - MAE: 0.30949657276040055 Validation loss : 0.16395465178149088 - MAE: 0.3112663647803296 Epoch: 9 Training loss: 0.16725652366876603 - MAE: 0.3066475873241304 Validation loss : 0.1607010385819844 - MAE: 0.3061224215087366 Epoch: 10 Training loss: 0.16232142224907875 - MAE: 0.30304105641340856 Validation loss : 0.1601495338337762 - MAE: 0.30464326769452593 Epoch: 11 Training loss: 0.1600735753774643 - MAE: 0.3024141941842313 Validation loss : 0.16005327233246394 - MAE: 0.30402928496694465 Epoch: 12 Training loss: 0.1590587668120861 - MAE: 0.30053309807554257 Validation loss : 0.16130509546824864 - MAE: 0.30322365490516023 |