File size: 2,759 Bytes
95aa8cf 46b1c96 a8c600c 29a5852 1d6181b 09f88d7 450f6ff cffa881 3b04c00 c405a5d 931a366 d822ab5 31fcbfc d72a527 dd93834 cfdcb18 b80e5a3 f857897 6fa55ce a432f6d 80fe38c 70e4ac8 274d0d3 7ce4b2c 8620eee ab9c05d 436546f 74ab585 7cf3236 6e42c8f f82aa9f 0c5f12c b316bc9 0b663b3 3d26cf2 c61bc34 5e5c95b 93c0526 e0c2fa4 bb42442 9c6eadd 3d95da4 04e7ec7 e1270fa 6c04058 dc9765c 0c84b06 7b5b9ec 7c2f84a 38427a8 405317b 06ff535 af500da cb3e9dc a028def |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
Started at: 14:09:17 norbert2, 1e-06, 128 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.27911949396133423 - MAE: 0.40659444881873014 Validation loss : 0.20867602176526012 - MAE: 0.3527981750883627 Epoch: 1 Training loss: 0.20067402869462966 - MAE: 0.3443738813489259 Validation loss : 0.18265952783472397 - MAE: 0.3274485677700584 Epoch: 2 Training loss: 0.1841411805152893 - MAE: 0.3259698872141148 Validation loss : 0.1689106410040575 - MAE: 0.31383750534412613 Epoch: 3 Training loss: 0.17074111104011536 - MAE: 0.3145261499885815 Validation loss : 0.16013307606472688 - MAE: 0.3051025622499357 Epoch: 4 Training loss: 0.1639400851726532 - MAE: 0.30646231000580526 Validation loss : 0.1528616074253531 - MAE: 0.29642873545836607 Epoch: 5 Training loss: 0.15575984567403794 - MAE: 0.2993765289133215 Validation loss : 0.14734076357939663 - MAE: 0.29066910247984484 Epoch: 6 Training loss: 0.14977987259626388 - MAE: 0.29226248364706103 Validation loss : 0.14347205924637177 - MAE: 0.28507644327461074 Epoch: 7 Training loss: 0.14246483147144318 - MAE: 0.2847612362019316 Validation loss : 0.13875692469232223 - MAE: 0.2803811997241932 Epoch: 8 Training loss: 0.13901878967881204 - MAE: 0.28157276737922127 Validation loss : 0.13477012821856668 - MAE: 0.27596797076254853 Epoch: 9 Training loss: 0.13553616270422936 - MAE: 0.2773601570587253 Validation loss : 0.1310576147892896 - MAE: 0.27152793902535644 Epoch: 10 Training loss: 0.13071139618754388 - MAE: 0.2738363522012253 Validation loss : 0.12781519004527261 - MAE: 0.2676097654740519 Epoch: 11 Training loss: 0.12695545226335525 - MAE: 0.268180345813455 Validation loss : 0.1253761912093443 - MAE: 0.2644418877791716 Epoch: 12 Training loss: 0.12514270216226578 - MAE: 0.26636549719195196 Validation loss : 0.12222997055334203 - MAE: 0.26144890253845454 Epoch: 13 Training loss: 0.12074263036251068 - MAE: 0.2631149334842985 Validation loss : 0.12074752809370265 - MAE: 0.2602191469613834 Epoch: 14 Training loss: 0.1188604213297367 - MAE: 0.2606121217549307 Validation loss : 0.11908167556804769 - MAE: 0.25871316767601477 Epoch: 15 Training loss: 0.11777730487287044 - MAE: 0.2618875457585697 Validation loss : 0.11862163508639616 - MAE: 0.2584168774684768 Epoch: 16 Training loss: 0.11579891853034496 - MAE: 0.25837723650531746 Validation loss : 0.11732436059152379 - MAE: 0.25737838022673293 Epoch: 17 |