File size: 2,623 Bytes
c1e9a73 1e1ddb6 cfa90b0 5c3a6da f6bccce 47a3722 d8fd9b7 3b0664c 2f61b9f 3e0439d adf0316 70f53f1 687867a 0728cdf e1af745 ec284d6 f2bf4a7 213a90b 760e05d 3df85f2 853a1be 6bd2297 2a484c7 a3b944f e030d04 8024e9c 5e69405 8824755 df99c7d 7a50f96 f5d93da bfac496 d0fdde8 c1e61aa 9fa80fe 9eebab4 3345866 cb15cd0 95c4f14 eb5e3fb dd23180 8fdc19e 0f2fb11 558eeb0 ca437fc 04b6481 01eeb95 716f1a6 21391d5 fafef4e 61678c6 bed896d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
Started at: 01:06:40 norbert2, 0.001, 256 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.30325276613235475 - MAE: 0.4293982748763984 Validation loss : 0.19999180734157562 - MAE: 0.34104668989482667 Epoch: 1 Training loss: 0.20694544672966003 - MAE: 0.3466696903029176 Validation loss : 0.18371513154771593 - MAE: 0.33124410963863 Epoch: 2 Training loss: 0.19004206776618957 - MAE: 0.3316648070614613 Validation loss : 0.17363174425231087 - MAE: 0.3190259149052752 Epoch: 3 Training loss: 0.18229258060455322 - MAE: 0.32259291375499355 Validation loss : 0.17075078023804557 - MAE: 0.31441923958666185 Epoch: 4 Training loss: 0.17311329424381255 - MAE: 0.31484578517047557 Validation loss : 0.16754941973421308 - MAE: 0.31124443306506033 Epoch: 5 Training loss: 0.16847123920917512 - MAE: 0.30949252590925136 Validation loss : 0.16543650461567772 - MAE: 0.3088372330968671 Epoch: 6 Training loss: 0.16129764080047607 - MAE: 0.3050791582077723 Validation loss : 0.16268478665086958 - MAE: 0.3066587666652184 Epoch: 7 Training loss: 0.1602504187822342 - MAE: 0.30409024840989457 Validation loss : 0.16163558264573416 - MAE: 0.30493840053209487 Epoch: 8 Training loss: 0.1599578830599785 - MAE: 0.30184255458273057 Validation loss : 0.1605621808105045 - MAE: 0.30371230230399254 Epoch: 9 Training loss: 0.1557137942314148 - MAE: 0.29758976090338174 Validation loss : 0.15996821721394858 - MAE: 0.3034986495165665 Epoch: 10 Training loss: 0.15526981115341187 - MAE: 0.2975494841901401 Validation loss : 0.1575407518280877 - MAE: 0.30259751573311344 Epoch: 11 Training loss: 0.15275930255651474 - MAE: 0.29392310225054646 Validation loss : 0.15727593998114267 - MAE: 0.3019466991756661 Epoch: 12 Training loss: 0.15028272598981857 - MAE: 0.29133814976206407 Validation loss : 0.15676536824968126 - MAE: 0.3012369469697969 Epoch: 13 Training loss: 0.1497017914056778 - MAE: 0.29114720473293804 Validation loss : 0.1556344214412901 - MAE: 0.2998055385465465 Epoch: 14 Training loss: 0.15074193984270096 - MAE: 0.2943675943971396 Validation loss : 0.15503714647558 - MAE: 0.29838652275657607 Epoch: 15 Training loss: 0.14802899420261384 - MAE: 0.28958511291054967 Validation loss : 0.15510348478953043 - MAE: 0.29876681028633734 Epoch: 16 |