File size: 4,776 Bytes
62bcbed
f5eafb5
1c497fa
c31e608
c1131e0
89dd885
8302160
41572be
ffbcc89
cbdcaed
6e8c3e3
878748f
fef6858
7a4c868
e65c35d
c07b375
617b668
0e3d5da
0a11735
175e9cc
c0c9574
2f7c918
bb2d485
75608ae
d89731f
4a03687
df06d69
d57b0b2
7f4b8e4
2ce6446
064f70f
30fa70d
7a92f2e
1a00b6d
18326d2
39da080
756bee5
514a337
2bea2ff
084e2c8
7f2884a
77962e4
342b4ab
4919475
f52bc12
1484c6b
1c2b440
93971db
95bae4d
6519caf
91d3f4b
90ff670
647bbb9
2f7a42e
1250d5f
7cf04c6
596d830
52d29cc
9d93061
d635e56
b046b5d
78d9c6b
4ef28d1
57e935e
f006f16
6b55182
1eb69a9
3edea91
aee4dad
87e42b3
be27669
059ed1a
d37eaa6
4d10059
26f8844
5a5a4d1
ad8b779
5a22a07
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
Started at: 16:03:31
nb-bert-base, 1e-06, 128
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.7271318706599149 - MAE: 0.6831216644315611
Validation loss : 0.42457069094116623 - MAE: 0.5310234954937942
Epoch: 1
Training loss: 0.4017618325623599 - MAE: 0.5113891617478459
Validation loss : 0.3601061556790326 - MAE: 0.47872181276206954
Epoch: 2
Training loss: 0.3346875086426735 - MAE: 0.4557084281637824
Validation loss : 0.3026893593975016 - MAE: 0.42968294420152836
Epoch: 3
Training loss: 0.27502210925925863 - MAE: 0.4083413682966922
Validation loss : 0.2493422031402588 - MAE: 0.38829624389412676
Epoch: 4
Training loss: 0.22922010150822728 - MAE: 0.374115376863141
Validation loss : 0.21512102678015427 - MAE: 0.36273443011280543
Epoch: 5
Training loss: 0.20582241117954253 - MAE: 0.35785392938520194
Validation loss : 0.19260533154010773 - MAE: 0.3445584427568667
Epoch: 6
Training loss: 0.18504355576905337 - MAE: 0.33999207572682016
Validation loss : 0.18239208814260122 - MAE: 0.3362534719407864
Epoch: 7
Training loss: 0.17347032624212178 - MAE: 0.32901041370404
Validation loss : 0.1727527996172776 - MAE: 0.3273999893030842
Epoch: 8
Training loss: 0.16140718588774855 - MAE: 0.3185892364257231
Validation loss : 0.16311134518803777 - MAE: 0.31766631226614
Epoch: 9
Training loss: 0.1555855579674244 - MAE: 0.3131268902757298
Validation loss : 0.15599115515077436 - MAE: 0.31016476266703236
Epoch: 10
Training loss: 0.14619598415764895 - MAE: 0.303503172138375
Validation loss : 0.150120818937147 - MAE: 0.3037910577986695
Epoch: 11
Training loss: 0.14181994023648176 - MAE: 0.2996883375884051
Validation loss : 0.1466762089246028 - MAE: 0.2993610834634052
Epoch: 12
Training loss: 0.13435588756745512 - MAE: 0.29110208978373203
Validation loss : 0.14056996175566236 - MAE: 0.29225523563049816
Epoch: 13
Training loss: 0.1287676831538027 - MAE: 0.28405198785284874
Validation loss : 0.1365019944874016 - MAE: 0.2873751026433757
Epoch: 14
Training loss: 0.12269411385059356 - MAE: 0.2770283447302897
Validation loss : 0.1298245403009492 - MAE: 0.2788738019688051
Epoch: 15
Training loss: 0.11961868865923449 - MAE: 0.2728991609541507
Validation loss : 0.12097394889270938 - MAE: 0.26905557549750797
Epoch: 16
Started at: 14:34:57
nb-bert-base, 1e-06, 128
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.7271318706599149 - MAE: 0.6831216644315611
Validation loss : 0.42457069094116623 - MAE: 0.5310234954937942
Epoch: 1
Training loss: 0.4017618325623599 - MAE: 0.5113891617478459
Validation loss : 0.3601061556790326 - MAE: 0.47872181276206954
Epoch: 2
Training loss: 0.3346875086426735 - MAE: 0.4557084281637824
Validation loss : 0.3026893593975016 - MAE: 0.42968294420152836
Epoch: 3
Training loss: 0.27502210925925863 - MAE: 0.4083413682966922
Validation loss : 0.2493422031402588 - MAE: 0.38829624389412676
Epoch: 4
Training loss: 0.22922010150822728 - MAE: 0.374115376863141
Validation loss : 0.21512102678015427 - MAE: 0.36273443011280543
Epoch: 5
Training loss: 0.20582241117954253 - MAE: 0.35785392938520194
Validation loss : 0.19260533154010773 - MAE: 0.3445584427568667
Epoch: 6
Training loss: 0.18504355576905337 - MAE: 0.33999207572682016
Validation loss : 0.18239208814260122 - MAE: 0.3362534719407864
Epoch: 7
Training loss: 0.17347032624212178 - MAE: 0.32901041370404