File size: 2,222 Bytes
89e79dc
ec771a3
f445a1f
f7ea47e
21cea55
02ab522
6d2bb95
87f214e
7186a39
79fe858
b751190
9e10751
4656908
6dcdf15
bb71c16
3d71114
0edc455
b2d70ee
127064a
fe2d0fb
f20fa87
589edf8
b139430
768bceb
414b734
6eff4b2
10cf495
f2bddd3
3b0320d
d06925b
1d6421d
7a56191
e0011f5
c96cf14
3a7b0b5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
Started at: 15:32:56
nb-bert-base, 1e-06, 128
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.7517342874407769 - MAE: 0.7057451002280275
Validation loss : 0.2215285485281664 - MAE: 0.36895833487830076
Epoch: 1
Training loss: 0.19639551222324372 - MAE: 0.3396230939000504
Validation loss : 0.19613290621953852 - MAE: 0.3407071633363561
Epoch: 2
Training loss: 0.189819974899292 - MAE: 0.33152794044204265
Validation loss : 0.1862245491322349 - MAE: 0.3306322601904093
Epoch: 3
Training loss: 0.18293176978826522 - MAE: 0.32441511104916815
Validation loss : 0.17851299398085652 - MAE: 0.3229232679845315
Epoch: 4
Training loss: 0.17267104253172874 - MAE: 0.31568550798758915
Validation loss : 0.1724623275153777 - MAE: 0.3165547082446247
Epoch: 5
Training loss: 0.16829490080475806 - MAE: 0.3120916956077127
Validation loss : 0.16718720863847172 - MAE: 0.3110304108857384
Epoch: 6
Training loss: 0.16253711566329002 - MAE: 0.3063455763538055
Validation loss : 0.16258587469072902 - MAE: 0.30655345038718496
Epoch: 7
Training loss: 0.16034438580274582 - MAE: 0.30328251732911143
Validation loss : 0.1597431661451564 - MAE: 0.30170444947052183
Epoch: 8
Training loss: 0.1557214939594269 - MAE: 0.2978233031704816
Validation loss : 0.15588842639151743 - MAE: 0.2980465650755997
Epoch: 9
Training loss: 0.15138985186815263 - MAE: 0.29315482496406786
Validation loss : 0.15272060474928686 - MAE: 0.2938781084993315
Epoch: 10
Training loss: 0.14734334856271744 - MAE: 0.28864492743138265