File size: 2,678 Bytes
70c893d 60a45ac 8f7a7bf 21b30b8 9cd37bd 93f7172 982f776 f800ec9 65fa3a3 444af73 739c13f 1cdd94a 8239557 5f41d2d b51636e 074fb55 609793b e298bff 85928a5 2fda379 d0a091b 870bc24 07e02d0 ea1bc73 ae4b1a0 1258afd 328fcc0 bc6201e d79f037 c613178 40722b4 985dfe8 23579cc 420d02d eaa6235 5f3ae7a da0ce6b 21ec9d1 c3e8a3f 857940a b9410fd 3ea3205 ffd1a67 8745cfb a4cf6e2 95be671 2ed4885 761d5e6 c4847d4 1029971 a8f2408 d726b44 e1b6e21 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
Started at: 08:27:59 norbert, 0.001, 512 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.6942410709766241 - MAE: 0.6755542403683183 Validation loss : 0.5259636163711547 - MAE: 0.6145447715726008 Epoch: 1 Training loss: 0.2714241169966184 - MAE: 0.41139053013950594 Validation loss : 0.22291880548000337 - MAE: 0.3552533479572475 Epoch: 2 Training loss: 0.2128986234848316 - MAE: 0.34851965161762277 Validation loss : 0.20447058975696564 - MAE: 0.35253076289330665 Epoch: 3 Training loss: 0.18250102492479178 - MAE: 0.3272099786903672 Validation loss : 0.1768460363149643 - MAE: 0.3204192954993362 Epoch: 4 Training loss: 0.17732494381757882 - MAE: 0.31724122285223966 Validation loss : 0.17855587601661682 - MAE: 0.32511627596495785 Epoch: 5 Training loss: 0.1710700816833056 - MAE: 0.3123455685841235 Validation loss : 0.17140080630779267 - MAE: 0.31721616662712504 Epoch: 6 Training loss: 0.1681888263959151 - MAE: 0.3085497958832519 Validation loss : 0.1719079226255417 - MAE: 0.3183222493749484 Epoch: 7 Training loss: 0.16573260151422942 - MAE: 0.3061277076796458 Validation loss : 0.1696848064661026 - MAE: 0.31607796307735603 Epoch: 8 Training loss: 0.1637361485224504 - MAE: 0.30538103560313107 Validation loss : 0.16837692856788636 - MAE: 0.3147678825233238 Epoch: 9 Training loss: 0.16596387441341692 - MAE: 0.306460615979059 Validation loss : 0.16714616119861603 - MAE: 0.3133195305170598 Epoch: 10 Training loss: 0.16143193153234628 - MAE: 0.30336240071987036 Validation loss : 0.16653320491313933 - MAE: 0.31252041315238616 Epoch: 11 Training loss: 0.16217435552523687 - MAE: 0.30364196639735547 Validation loss : 0.1653230756521225 - MAE: 0.3110606215237233 Epoch: 12 Training loss: 0.1605491844507364 - MAE: 0.30143663888637007 Validation loss : 0.16475171744823455 - MAE: 0.31038784146752435 Epoch: 13 Training loss: 0.1591638189095717 - MAE: 0.30089815609107995 Validation loss : 0.16403158009052277 - MAE: 0.30957783010243456 Epoch: 14 Training loss: 0.15901278990965623 - MAE: 0.3003773360219678 Validation loss : 0.16402316391468047 - MAE: 0.3094852388844685 Epoch: 15 Training loss: 0.1596058435164965 - MAE: 0.29972277582996765 Validation loss : 0.16301234662532807 - MAE: 0.3081341348644224 Epoch: 16 Training loss: 0.1581736963528853 - MAE: 0.29789149479835464 |