File size: 2,469 Bytes
72b2e1e 8f93223 667f4f2 2aa6557 6da32a1 503f8cf 1e1075f b6a0b2c 6dadbb5 69eb991 3f04174 3f6d200 11ae8b6 d82fabf ab496a8 b2f776a eecd7dc 01fb57a c45c083 a54c788 43a5e37 2833251 be725e0 07a968e 3941a17 79507d4 8eed7ac 9434658 51edd34 17e8095 9a9cf2b f1fac50 d3292ad 2734a83 e3431e1 cae0c6c a66c395 45383d3 e3fdf76 cd41a85 27e3506 29e38eb 990fbb2 7d65507 2911a29 efead0c 16970de 4359992 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
Started at: 13:39:01 norbert2, 0.001, 320 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.31723648086190226 - MAE: 0.43731430662973386 Validation loss : 0.22693050972052983 - MAE: 0.3720038111268189 Epoch: 1 Training loss: 0.20759156867861747 - MAE: 0.3492405031413845 Validation loss : 0.18326088786125183 - MAE: 0.3277961618031656 Epoch: 2 Training loss: 0.19137692749500274 - MAE: 0.3327547039977217 Validation loss : 0.1755183126245226 - MAE: 0.3227190541521342 Epoch: 3 Training loss: 0.18025614991784095 - MAE: 0.3210334292827734 Validation loss : 0.1690996310540608 - MAE: 0.3163311547284712 Epoch: 4 Training loss: 0.17526042610406875 - MAE: 0.316883641964065 Validation loss : 0.16490017942019872 - MAE: 0.31192979272830107 Epoch: 5 Training loss: 0.16934362202882766 - MAE: 0.3118938876851917 Validation loss : 0.16087437953267777 - MAE: 0.306626932491948 Epoch: 6 Training loss: 0.1686125099658966 - MAE: 0.3093647594294477 Validation loss : 0.1611258898462568 - MAE: 0.30606865870740835 Epoch: 7 Training loss: 0.1647215336561203 - MAE: 0.306965339566958 Validation loss : 0.16415890412671225 - MAE: 0.3067067406125915 Epoch: 8 Training loss: 0.16374187208712102 - MAE: 0.30592013048097305 Validation loss : 0.1591270033802305 - MAE: 0.3027121792112116 Epoch: 9 Training loss: 0.15822869427502156 - MAE: 0.29933113000114797 Validation loss : 0.15573506376573018 - MAE: 0.29944502052409083 Epoch: 10 Training loss: 0.1600269578397274 - MAE: 0.30174670300669987 Validation loss : 0.15634232759475708 - MAE: 0.29929978158098747 Epoch: 11 Training loss: 0.1545223116874695 - MAE: 0.2966458965741706 Validation loss : 0.1528506651520729 - MAE: 0.2972938627511181 Epoch: 12 Training loss: 0.15608188956975938 - MAE: 0.29646249957909354 Validation loss : 0.1535063236951828 - MAE: 0.29711658081386355 Epoch: 13 Training loss: 0.15382483936846256 - MAE: 0.2943215145665842 Validation loss : 0.15108988327639444 - MAE: 0.2957606447529792 Epoch: 14 Training loss: 0.15104165561497213 - MAE: 0.2907296995583519 Validation loss : 0.15084478897707804 - MAE: 0.2945350400054091 |