File size: 2,341 Bytes
d2db521 fc4846b 5ee1f41 0c0659f 0a5fcfd 7d73662 2d9250c a86e843 caf1a41 27de951 73e18ce bbf7d13 a882459 ddbc712 4a1e657 0a26266 8b15ab6 14d976a 0b85862 5ef8e1e 2607eb5 1b81a64 42b03a7 37e78b6 aabf427 085d41d 02a697d 45ff0e1 db24c0f 8403ea9 02599b2 e22d1b1 92ae9fe 3134cc9 dd328c4 3d49a7d ef29664 7bba6c1 7e1df1b a0374b9 03f2101 8b7ebc8 682d24d b70df3a 18a1e7a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
Started at: 11:26:09 norbert2, 1e-06, 256 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.49638876644047825 - MAE: 0.5629371267396844 Validation loss : 0.40102894996341903 - MAE: 0.5074595333213087 Epoch: 1 Training loss: 0.3962893469767137 - MAE: 0.4999512355797335 Validation loss : 0.34069604465835973 - MAE: 0.46270536590775785 Epoch: 2 Training loss: 0.34560276595028966 - MAE: 0.46357389873235305 Validation loss : 0.29479486848178665 - MAE: 0.425237878413089 Epoch: 3 Training loss: 0.30231391191482543 - MAE: 0.4322491431025793 Validation loss : 0.2530422657728195 - MAE: 0.3918358808926381 Epoch: 4 Training loss: 0.26676738343455575 - MAE: 0.4045488415909569 Validation loss : 0.22476527094841003 - MAE: 0.3697079008857467 Epoch: 5 Training loss: 0.24020280133594166 - MAE: 0.38459086994265346 Validation loss : 0.20575479143544248 - MAE: 0.35532429110010383 Epoch: 6 Training loss: 0.22542023929682645 - MAE: 0.3725998866063251 Validation loss : 0.1922520234396583 - MAE: 0.34386633509290826 Epoch: 7 Training loss: 0.20791846757585353 - MAE: 0.3593585588710985 Validation loss : 0.18115280098036715 - MAE: 0.3338710560830552 Epoch: 8 Training loss: 0.1955847908150066 - MAE: 0.3493787802560064 Validation loss : 0.17247353258885836 - MAE: 0.32570055286906396 Epoch: 9 Training loss: 0.18719206181439488 - MAE: 0.34056042854960156 Validation loss : 0.16388126501911565 - MAE: 0.31779819344404947 Epoch: 10 Training loss: 0.17898870544000106 - MAE: 0.33377424634339875 Validation loss : 0.15652799920031898 - MAE: 0.31065961483700905 Epoch: 11 Training loss: 0.1720216458494013 - MAE: 0.32778831132601455 Validation loss : 0.14991405449415507 - MAE: 0.3038376916810004 Epoch: 12 Training loss: 0.16193578676743942 - MAE: 0.3192367127206365 Validation loss : 0.1440117061138153 - MAE: 0.2973964881408289 Epoch: 13 Training loss: 0.15627174228429794 - MAE: 0.3120230587017449 Validation loss : 0.13841195875092557 - MAE: 0.2912305185896088 |