File size: 3,253 Bytes
abe9ac8
0eec094
34aca52
d820fa8
80312ce
2812596
ebe1bd0
1a223f4
227f578
73a9d81
22f9b2e
697492e
d7c7170
a8e8872
3561176
945cf84
a6bf8bc
5aaf810
346b716
6f1cea2
ed16321
e896cdf
e76c2e3
218e874
2d0019f
db3b632
a321112
52bd1f7
50968e1
0ce1d70
3461056
858da3f
c97da6d
1768951
70393ba
044b094
306a09c
34444ff
1b49a78
194134d
f7d06c5
67c7fb9
49b8fb2
f8803e3
3e8d622
f6bba7d
fc0dcf8
925c2d4
47a3ead
7a0c595
768f684
03fac28
770da69
8a4eba5
eda1bce
64708e5
2b59fb4
cf8b5b9
3c9a8c4
c46aaff
2d0f620
f3db82d
750cd2d
6ae7610
8602f38
6a5cd23
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
Started at: 10:28:59
norbert, lr: 0.0005, bs: 128, ml: 512, oversample: True, frozen: True
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 0.5474709353663705 - MAE: 0.5900718832469579
Validation loss : 0.3975696893962654 - MAE: 0.5077667229703434
Epoch: 1
Training loss: 0.3858125217936256 - MAE: 0.4986324394793062
Validation loss : 0.3667544569518115 - MAE: 0.482246268008046
Epoch: 2
Training loss: 0.36537060791795906 - MAE: 0.4824265926142948
Validation loss : 0.35301732372593236 - MAE: 0.47073127836812234
Epoch: 3
Training loss: 0.35378617481751873 - MAE: 0.4737870529422116
Validation loss : 0.3432318809870127 - MAE: 0.4630917172957513
Epoch: 4
Training loss: 0.34562364654107525 - MAE: 0.4671846467624914
Validation loss : 0.3360972178948892 - MAE: 0.4578467580637334
Epoch: 5
Training loss: 0.34099755598740145 - MAE: 0.46506329563394005
Validation loss : 0.33305841201060526 - MAE: 0.4550205160383188
Epoch: 6
Training loss: 0.33549089919437064 - MAE: 0.45988426559688494
Validation loss : 0.32705698222727386 - MAE: 0.45137572711731144
Epoch: 7
Training loss: 0.33034171164035797 - MAE: 0.4562066604592301
Validation loss : 0.32257608626339884 - MAE: 0.4478843028231632
Epoch: 8
Training loss: 0.32764916826378215 - MAE: 0.45407177982423313
Validation loss : 0.31829442849030365 - MAE: 0.4456899946875793
Epoch: 9
Training loss: 0.32817742445252157 - MAE: 0.4535115894624534
Validation loss : 0.3146412018183115 - MAE: 0.4434792121537539
Epoch: 10
Training loss: 0.3238557942888953 - MAE: 0.45152478313241395
Validation loss : 0.31213215879491857 - MAE: 0.4413900558031203
Epoch: 11
Training loss: 0.3201356159015135 - MAE: 0.44856331131575006
Validation loss : 0.310855512683456 - MAE: 0.4403171216889173
Epoch: 12
Training loss: 0.3234574454751882 - MAE: 0.4509541958789688
Validation loss : 0.309375981624062 - MAE: 0.4396080555552377
Epoch: 13
Training loss: 0.31835763305425646 - MAE: 0.44638152149399546
Validation loss : 0.30611808678588354 - MAE: 0.43832580394348114
Epoch: 14
Training loss: 0.3158693106337027 - MAE: 0.44678859955016065
Validation loss : 0.3040122353547328 - MAE: 0.4369072234530497
Epoch: 15
Training loss: 0.3157148630781607 - MAE: 0.4464642437497678
Validation loss : 0.3044232351554407 - MAE: 0.4363399582452833
Epoch: 16
Training loss: 0.3163840314204043 - MAE: 0.4460831709787618
Validation loss : 0.30293904687907247 - MAE: 0.4353190726666535
Epoch: 17
Training loss: 0.3132088063792749 - MAE: 0.44469758632876605
Validation loss : 0.30206634426439133 - MAE: 0.4341864746339031
Epoch: 18
Training loss: 0.3158950824629177 - MAE: 0.4462852090206161
Validation loss : 0.29990197355682785 - MAE: 0.43375448437821906
Epoch: 19
Training loss: 0.31697546311400154 - MAE: 0.44707657889454633
Validation loss : 0.29999372725551193 - MAE: 0.4333450350832339
Prediction MAE: 0.4294
Finished at: 10:28:59
Time taken: 14229 s.