File size: 990 Bytes
aa8febc
2a54497
e28a954
7d0be28
b2cf6f4
3ac87b0
b84948a
48f4aa1
392ce5c
88cdf09
3f853b7
1ba2032
82c4a36
c945207
bce9d9f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
Started at: 14:44:13
norbert, 0.001, 64
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 0.2648280833405678 - MAE: 0.38654277297025236
Validation loss : 0.17447485819910513 - MAE: 0.32166440664310475
Epoch: 1
Training loss: 0.16758705841170418 - MAE: 0.3107190439985688
Validation loss : 0.16586369230891718 - MAE: 0.31147915994630654
Epoch: 2
Training loss: 0.16108467580393107 - MAE: 0.3051798852143876
Validation loss : 0.16238286752592435 - MAE: 0.3066670130176498
Epoch: 3
Training loss: 0.15638263553682 - MAE: 0.3001170658910473
Validation loss : 0.16007356345653534 - MAE: 0.3008243350429231