File size: 974 Bytes
b872d84 7edb427 3820d18 12d9bec cfa8efe 89f2e6b 2add8e3 ec58dfc a1e99f7 51c9f31 e00dc32 eb4f0c6 c1451c5 f976783 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
Started at: 10:44:21 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19173999391134502 - MSE: 0.3262300431900936 Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193 Epoch: 1 Training loss: 0.1834320445517892 - MSE: 0.3230671429831198 Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407 Epoch: 2 Training loss: 0.18393246207422423 - MSE: 0.3238126735351295 Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276 Epoch: 3 Training loss: 0.18445654531705727 - MSE: 0.32449193418262723 Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444 |