peldrak commited on
Commit
f12a158
1 Parent(s): 62a6762

End of training

Browse files
Files changed (4) hide show
  1. README.md +167 -0
  2. config.json +88 -0
  3. pytorch_model.bin +3 -0
  4. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ base_model: nvidia/segformer-b0-finetuned-ade-512-512
4
+ tags:
5
+ - vision
6
+ - image-segmentation
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: segformer-finetuned-coastalDataset
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # segformer-finetuned-coastalDataset
17
+
18
+ This model is a fine-tuned version of [nvidia/segformer-b0-finetuned-ade-512-512](https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512) on the peldrak/coastal_dataset dataset.
19
+ It achieves the following results on the evaluation set:
20
+ - Loss: 0.5879
21
+ - Mean Iou: 0.5639
22
+ - Mean Accuracy: 0.6875
23
+ - Overall Accuracy: 0.8193
24
+ - Accuracy Water: 0.9016
25
+ - Accuracy Whitewater: 0.7122
26
+ - Accuracy Sediment: 0.8348
27
+ - Accuracy Other Natural Terrain: 0.0203
28
+ - Accuracy Vegetation: 0.8852
29
+ - Accuracy Development: 0.8700
30
+ - Accuracy Unknown: 0.5885
31
+ - Iou Water: 0.8178
32
+ - Iou Whitewater: 0.5744
33
+ - Iou Sediment: 0.6376
34
+ - Iou Other Natural Terrain: 0.0202
35
+ - Iou Vegetation: 0.7148
36
+ - Iou Development: 0.6267
37
+ - Iou Unknown: 0.5559
38
+
39
+ ## Model description
40
+
41
+ More information needed
42
+
43
+ ## Intended uses & limitations
44
+
45
+ More information needed
46
+
47
+ ## Training and evaluation data
48
+
49
+ More information needed
50
+
51
+ ## Training procedure
52
+
53
+ ### Training hyperparameters
54
+
55
+ The following hyperparameters were used during training:
56
+ - learning_rate: 6e-05
57
+ - train_batch_size: 4
58
+ - eval_batch_size: 4
59
+ - seed: 42
60
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
61
+ - lr_scheduler_type: linear
62
+ - num_epochs: 5
63
+
64
+ ### Training results
65
+
66
+ | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Water | Accuracy Whitewater | Accuracy Sediment | Accuracy Other Natural Terrain | Accuracy Vegetation | Accuracy Development | Accuracy Unknown | Iou Water | Iou Whitewater | Iou Sediment | Iou Other Natural Terrain | Iou Vegetation | Iou Development | Iou Unknown |
67
+ |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:--------------:|:-------------------:|:-----------------:|:------------------------------:|:-------------------:|:--------------------:|:----------------:|:---------:|:--------------:|:------------:|:-------------------------:|:--------------:|:---------------:|:-----------:|
68
+ | 1.87 | 0.05 | 20 | 1.6313 | 0.1490 | 0.2758 | 0.4290 | 0.6833 | 0.0001 | 0.0489 | 0.0865 | 0.4307 | 0.6793 | 0.0018 | 0.4576 | 0.0001 | 0.0344 | 0.0355 | 0.3398 | 0.1736 | 0.0018 |
69
+ | 1.7514 | 0.11 | 40 | 1.4505 | 0.1856 | 0.3122 | 0.5350 | 0.8413 | 0.0027 | 0.0344 | 0.0343 | 0.6204 | 0.6514 | 0.0012 | 0.5311 | 0.0026 | 0.0323 | 0.0236 | 0.4439 | 0.2646 | 0.0011 |
70
+ | 1.1771 | 0.16 | 60 | 1.3525 | 0.2077 | 0.3388 | 0.5555 | 0.7675 | 0.0026 | 0.0764 | 0.0551 | 0.7416 | 0.7199 | 0.0087 | 0.5454 | 0.0026 | 0.0653 | 0.0363 | 0.4716 | 0.3245 | 0.0084 |
71
+ | 1.1472 | 0.22 | 80 | 1.2135 | 0.2482 | 0.3481 | 0.6082 | 0.7970 | 0.0002 | 0.0319 | 0.0045 | 0.8589 | 0.4393 | 0.3046 | 0.5819 | 0.0002 | 0.0305 | 0.0043 | 0.4805 | 0.3394 | 0.3003 |
72
+ | 1.2839 | 0.27 | 100 | 1.1400 | 0.2533 | 0.3696 | 0.6192 | 0.8476 | 0.0020 | 0.2529 | 0.0039 | 0.8292 | 0.5977 | 0.0541 | 0.5833 | 0.0020 | 0.2047 | 0.0038 | 0.5505 | 0.3763 | 0.0529 |
73
+ | 0.9397 | 0.32 | 120 | 1.0853 | 0.2984 | 0.4123 | 0.6538 | 0.8186 | 0.0036 | 0.2107 | 0.0021 | 0.8561 | 0.6459 | 0.3492 | 0.6367 | 0.0036 | 0.1744 | 0.0021 | 0.5430 | 0.3863 | 0.3426 |
74
+ | 1.1331 | 0.38 | 140 | 1.0009 | 0.3224 | 0.4341 | 0.6777 | 0.8444 | 0.0097 | 0.2415 | 0.0003 | 0.8647 | 0.6689 | 0.4095 | 0.6470 | 0.0096 | 0.2154 | 0.0003 | 0.5721 | 0.4159 | 0.3965 |
75
+ | 1.5987 | 0.43 | 160 | 0.9861 | 0.3406 | 0.4449 | 0.6938 | 0.8562 | 0.0522 | 0.2810 | 0.0000 | 0.9057 | 0.6105 | 0.4087 | 0.6622 | 0.0522 | 0.2445 | 0.0000 | 0.5961 | 0.4354 | 0.3940 |
76
+ | 1.1075 | 0.49 | 180 | 0.9617 | 0.3709 | 0.5118 | 0.6969 | 0.8341 | 0.0742 | 0.5820 | 0.0002 | 0.7522 | 0.8947 | 0.4451 | 0.7213 | 0.0738 | 0.4272 | 0.0002 | 0.5646 | 0.3739 | 0.4356 |
77
+ | 1.241 | 0.54 | 200 | 0.8786 | 0.3911 | 0.5123 | 0.7292 | 0.8643 | 0.0347 | 0.6411 | 0.0 | 0.8583 | 0.7589 | 0.4285 | 0.7253 | 0.0346 | 0.4350 | 0.0 | 0.6104 | 0.5052 | 0.4272 |
78
+ | 0.9254 | 0.59 | 220 | 0.8723 | 0.4015 | 0.5230 | 0.7352 | 0.8555 | 0.0435 | 0.7109 | 0.0002 | 0.8671 | 0.7497 | 0.4341 | 0.7074 | 0.0435 | 0.4563 | 0.0002 | 0.6344 | 0.5411 | 0.4272 |
79
+ | 1.6323 | 0.65 | 240 | 0.8611 | 0.3919 | 0.5215 | 0.7227 | 0.8817 | 0.0793 | 0.6099 | 0.0 | 0.7877 | 0.8457 | 0.4461 | 0.6969 | 0.0792 | 0.4320 | 0.0 | 0.6142 | 0.4820 | 0.4388 |
80
+ | 1.1124 | 0.7 | 260 | 0.8623 | 0.3678 | 0.4825 | 0.7113 | 0.8972 | 0.0435 | 0.3597 | 0.0 | 0.8140 | 0.7816 | 0.4812 | 0.6763 | 0.0435 | 0.2536 | 0.0 | 0.6250 | 0.5060 | 0.4702 |
81
+ | 0.9017 | 0.76 | 280 | 0.8263 | 0.4118 | 0.5335 | 0.7421 | 0.8451 | 0.0661 | 0.7203 | 0.0 | 0.8842 | 0.7587 | 0.4598 | 0.7239 | 0.0658 | 0.4814 | 0.0 | 0.6336 | 0.5244 | 0.4533 |
82
+ | 0.7134 | 0.81 | 300 | 0.8305 | 0.4318 | 0.5352 | 0.7519 | 0.8638 | 0.1723 | 0.6488 | 0.0 | 0.9241 | 0.6477 | 0.4897 | 0.7431 | 0.1710 | 0.4896 | 0.0 | 0.6302 | 0.5103 | 0.4783 |
83
+ | 2.0471 | 0.86 | 320 | 0.8231 | 0.4303 | 0.5350 | 0.7392 | 0.8131 | 0.1735 | 0.6075 | 0.0 | 0.9247 | 0.7085 | 0.5180 | 0.7055 | 0.1690 | 0.4940 | 0.0 | 0.6035 | 0.5360 | 0.5043 |
84
+ | 0.9373 | 0.92 | 340 | 0.8354 | 0.4236 | 0.5535 | 0.7365 | 0.8273 | 0.1701 | 0.7904 | 0.0 | 0.8444 | 0.7658 | 0.4763 | 0.6933 | 0.1674 | 0.4263 | 0.0 | 0.6541 | 0.5559 | 0.4685 |
85
+ | 0.7075 | 0.97 | 360 | 0.8055 | 0.4312 | 0.5480 | 0.7391 | 0.8062 | 0.1853 | 0.6545 | 0.0 | 0.9043 | 0.7942 | 0.4911 | 0.7103 | 0.1831 | 0.4805 | 0.0 | 0.6084 | 0.5549 | 0.4813 |
86
+ | 0.8853 | 1.03 | 380 | 0.7611 | 0.4306 | 0.5498 | 0.7599 | 0.8700 | 0.0757 | 0.7313 | 0.0 | 0.8895 | 0.7980 | 0.4840 | 0.7567 | 0.0752 | 0.4884 | 0.0 | 0.6522 | 0.5664 | 0.4749 |
87
+ | 1.0012 | 1.08 | 400 | 0.7337 | 0.4253 | 0.5453 | 0.7583 | 0.8877 | 0.0453 | 0.7529 | 0.0 | 0.8610 | 0.7738 | 0.4965 | 0.7596 | 0.0450 | 0.5005 | 0.0 | 0.6433 | 0.5427 | 0.4857 |
88
+ | 0.7715 | 1.14 | 420 | 0.7285 | 0.4378 | 0.5714 | 0.7579 | 0.8675 | 0.1506 | 0.7948 | 0.0 | 0.8336 | 0.8415 | 0.5120 | 0.7591 | 0.1452 | 0.4914 | 0.0 | 0.6554 | 0.5151 | 0.4987 |
89
+ | 0.5729 | 1.19 | 440 | 0.7360 | 0.4309 | 0.5496 | 0.7618 | 0.8724 | 0.0764 | 0.7973 | 0.0 | 0.8935 | 0.7223 | 0.4855 | 0.7527 | 0.0750 | 0.4785 | 0.0 | 0.6732 | 0.5619 | 0.4752 |
90
+ | 0.5138 | 1.24 | 460 | 0.7152 | 0.4497 | 0.5760 | 0.7590 | 0.8691 | 0.2239 | 0.7373 | 0.0 | 0.8443 | 0.8483 | 0.5094 | 0.7523 | 0.2183 | 0.5076 | 0.0 | 0.6440 | 0.5567 | 0.4691 |
91
+ | 0.638 | 1.3 | 480 | 0.7346 | 0.4663 | 0.5767 | 0.7739 | 0.8889 | 0.2470 | 0.7250 | 0.0 | 0.8957 | 0.7791 | 0.5009 | 0.7634 | 0.2411 | 0.5171 | 0.0 | 0.6654 | 0.5914 | 0.4855 |
92
+ | 0.9174 | 1.35 | 500 | 0.7278 | 0.4749 | 0.6023 | 0.7694 | 0.8974 | 0.3327 | 0.8189 | 0.0 | 0.8090 | 0.8524 | 0.5057 | 0.7512 | 0.3158 | 0.5165 | 0.0 | 0.6571 | 0.5974 | 0.4861 |
93
+ | 0.7307 | 1.41 | 520 | 0.7201 | 0.4577 | 0.5743 | 0.7691 | 0.8794 | 0.2177 | 0.7779 | 0.0 | 0.8824 | 0.7608 | 0.5021 | 0.7621 | 0.2087 | 0.5104 | 0.0 | 0.6612 | 0.5713 | 0.4898 |
94
+ | 0.5905 | 1.46 | 540 | 0.7102 | 0.4616 | 0.5687 | 0.7759 | 0.8938 | 0.2008 | 0.7816 | 0.0 | 0.9061 | 0.6872 | 0.5111 | 0.7668 | 0.1952 | 0.5292 | 0.0 | 0.6719 | 0.5690 | 0.4988 |
95
+ | 0.7556 | 1.51 | 560 | 0.6921 | 0.4849 | 0.6087 | 0.7781 | 0.8667 | 0.3660 | 0.8135 | 0.0 | 0.8773 | 0.8172 | 0.5202 | 0.7639 | 0.3393 | 0.5144 | 0.0 | 0.6815 | 0.5949 | 0.5007 |
96
+ | 0.5691 | 1.57 | 580 | 0.6654 | 0.5007 | 0.6110 | 0.7910 | 0.9092 | 0.4279 | 0.7724 | 0.0 | 0.8963 | 0.7609 | 0.5106 | 0.7795 | 0.3765 | 0.5645 | 0.0 | 0.6912 | 0.5950 | 0.4980 |
97
+ | 0.754 | 1.62 | 600 | 0.7064 | 0.4853 | 0.6072 | 0.7842 | 0.8895 | 0.3373 | 0.8351 | 0.0 | 0.8783 | 0.8121 | 0.4981 | 0.7844 | 0.3136 | 0.5296 | 0.0 | 0.6865 | 0.5956 | 0.4877 |
98
+ | 0.4217 | 1.68 | 620 | 0.6744 | 0.5108 | 0.6429 | 0.7882 | 0.8827 | 0.5383 | 0.8470 | 0.0 | 0.8485 | 0.8541 | 0.5300 | 0.7796 | 0.4479 | 0.5586 | 0.0 | 0.6802 | 0.5980 | 0.5109 |
99
+ | 0.5813 | 1.73 | 640 | 0.6451 | 0.5258 | 0.6448 | 0.7961 | 0.8748 | 0.6014 | 0.8069 | 0.0 | 0.9028 | 0.7739 | 0.5536 | 0.7825 | 0.4948 | 0.5849 | 0.0 | 0.6869 | 0.6039 | 0.5274 |
100
+ | 0.8824 | 1.78 | 660 | 0.6452 | 0.5195 | 0.6373 | 0.7957 | 0.8919 | 0.5596 | 0.7792 | 0.0 | 0.8934 | 0.7956 | 0.5414 | 0.7877 | 0.4588 | 0.5878 | 0.0 | 0.6851 | 0.5950 | 0.5222 |
101
+ | 0.8801 | 1.84 | 680 | 0.6407 | 0.5221 | 0.6411 | 0.7997 | 0.8909 | 0.5337 | 0.8300 | 0.0 | 0.8956 | 0.7975 | 0.5400 | 0.7932 | 0.4465 | 0.5765 | 0.0 | 0.6972 | 0.6179 | 0.5236 |
102
+ | 0.545 | 1.89 | 700 | 0.6437 | 0.5231 | 0.6495 | 0.7967 | 0.8916 | 0.5705 | 0.8308 | 0.0 | 0.8614 | 0.8289 | 0.5630 | 0.7970 | 0.4722 | 0.5777 | 0.0 | 0.6859 | 0.6159 | 0.5131 |
103
+ | 0.7694 | 1.95 | 720 | 0.6446 | 0.5210 | 0.6416 | 0.7958 | 0.8863 | 0.5316 | 0.8111 | 0.0 | 0.8738 | 0.8096 | 0.5787 | 0.7864 | 0.4438 | 0.5891 | 0.0 | 0.6820 | 0.6094 | 0.5361 |
104
+ | 1.889 | 2.0 | 740 | 0.6324 | 0.5170 | 0.6365 | 0.7995 | 0.8868 | 0.4758 | 0.8012 | 0.0 | 0.8926 | 0.8255 | 0.5740 | 0.7961 | 0.3991 | 0.5731 | 0.0 | 0.6950 | 0.6044 | 0.5512 |
105
+ | 0.447 | 2.05 | 760 | 0.6562 | 0.5083 | 0.6147 | 0.7962 | 0.9070 | 0.4361 | 0.7920 | 0.0 | 0.9081 | 0.7138 | 0.5459 | 0.7961 | 0.3974 | 0.5609 | 0.0 | 0.6909 | 0.5885 | 0.5241 |
106
+ | 0.6949 | 2.11 | 780 | 0.6405 | 0.5224 | 0.6414 | 0.8004 | 0.9033 | 0.5488 | 0.8034 | 0.0 | 0.8858 | 0.8046 | 0.5441 | 0.7978 | 0.4573 | 0.5775 | 0.0 | 0.6971 | 0.6020 | 0.5249 |
107
+ | 0.5449 | 2.16 | 800 | 0.6494 | 0.5237 | 0.6554 | 0.7960 | 0.8903 | 0.5944 | 0.8515 | 0.0 | 0.8521 | 0.8556 | 0.5437 | 0.7905 | 0.4978 | 0.5439 | 0.0 | 0.6979 | 0.6180 | 0.5178 |
108
+ | 1.6569 | 2.22 | 820 | 0.6303 | 0.5205 | 0.6346 | 0.8006 | 0.9053 | 0.5253 | 0.8029 | 0.0 | 0.9007 | 0.7813 | 0.5267 | 0.8006 | 0.4304 | 0.6054 | 0.0 | 0.6910 | 0.6089 | 0.5069 |
109
+ | 0.5619 | 2.27 | 840 | 0.6466 | 0.5252 | 0.6626 | 0.7978 | 0.8880 | 0.6369 | 0.8427 | 0.0001 | 0.8513 | 0.8607 | 0.5586 | 0.7935 | 0.4797 | 0.5591 | 0.0001 | 0.6966 | 0.6152 | 0.5321 |
110
+ | 0.734 | 2.32 | 860 | 0.6309 | 0.5282 | 0.6619 | 0.8001 | 0.8892 | 0.6488 | 0.8231 | 0.0004 | 0.8694 | 0.8590 | 0.5437 | 0.7951 | 0.4853 | 0.5810 | 0.0004 | 0.6976 | 0.6171 | 0.5209 |
111
+ | 0.3254 | 2.38 | 880 | 0.6399 | 0.5227 | 0.6547 | 0.7972 | 0.8841 | 0.6377 | 0.8512 | 0.0005 | 0.8875 | 0.8247 | 0.4968 | 0.7928 | 0.4789 | 0.5766 | 0.0005 | 0.6984 | 0.6314 | 0.4804 |
112
+ | 1.0005 | 2.43 | 900 | 0.6275 | 0.5331 | 0.6725 | 0.8045 | 0.8891 | 0.6785 | 0.8520 | 0.0008 | 0.8648 | 0.8676 | 0.5548 | 0.8050 | 0.4886 | 0.5739 | 0.0008 | 0.7075 | 0.6237 | 0.5323 |
113
+ | 1.4449 | 2.49 | 920 | 0.6520 | 0.5257 | 0.6620 | 0.7970 | 0.8794 | 0.6432 | 0.8247 | 0.0 | 0.8643 | 0.8725 | 0.5498 | 0.8000 | 0.4902 | 0.5725 | 0.0 | 0.6851 | 0.6019 | 0.5302 |
114
+ | 0.5484 | 2.54 | 940 | 0.6135 | 0.5324 | 0.6623 | 0.8026 | 0.9022 | 0.6350 | 0.8218 | 0.0001 | 0.8564 | 0.8565 | 0.5639 | 0.7974 | 0.4895 | 0.5908 | 0.0001 | 0.6964 | 0.6243 | 0.5282 |
115
+ | 0.7214 | 2.59 | 960 | 0.6350 | 0.5371 | 0.6596 | 0.8039 | 0.8980 | 0.6439 | 0.8339 | 0.0016 | 0.8741 | 0.7856 | 0.5798 | 0.7969 | 0.5024 | 0.6041 | 0.0016 | 0.6936 | 0.6329 | 0.5283 |
116
+ | 0.485 | 2.65 | 980 | 0.6613 | 0.5285 | 0.6530 | 0.8002 | 0.8878 | 0.6021 | 0.8467 | 0.0001 | 0.8838 | 0.8049 | 0.5457 | 0.7952 | 0.4824 | 0.5778 | 0.0001 | 0.6939 | 0.6279 | 0.5221 |
117
+ | 0.5479 | 2.7 | 1000 | 0.6283 | 0.5371 | 0.6658 | 0.8044 | 0.8941 | 0.6553 | 0.8317 | 0.0021 | 0.8710 | 0.8446 | 0.5616 | 0.8012 | 0.5075 | 0.5979 | 0.0021 | 0.6957 | 0.6260 | 0.5290 |
118
+ | 0.6781 | 2.76 | 1020 | 0.6078 | 0.5417 | 0.6759 | 0.8081 | 0.9060 | 0.6934 | 0.8361 | 0.0063 | 0.8521 | 0.8568 | 0.5801 | 0.8088 | 0.5185 | 0.6067 | 0.0063 | 0.7038 | 0.6065 | 0.5410 |
119
+ | 0.3557 | 2.81 | 1040 | 0.5949 | 0.5548 | 0.6823 | 0.8130 | 0.8873 | 0.7176 | 0.8137 | 0.0022 | 0.8779 | 0.8524 | 0.6251 | 0.8056 | 0.5523 | 0.6173 | 0.0022 | 0.7018 | 0.6173 | 0.5868 |
120
+ | 0.7983 | 2.86 | 1060 | 0.6113 | 0.5472 | 0.6628 | 0.8096 | 0.8966 | 0.6911 | 0.7635 | 0.0002 | 0.9079 | 0.8009 | 0.5793 | 0.8006 | 0.5348 | 0.6157 | 0.0002 | 0.6970 | 0.6325 | 0.5497 |
121
+ | 0.5182 | 2.92 | 1080 | 0.5710 | 0.5614 | 0.6813 | 0.8194 | 0.9060 | 0.7021 | 0.8207 | 0.0074 | 0.8876 | 0.8353 | 0.6104 | 0.8057 | 0.5328 | 0.6448 | 0.0074 | 0.7152 | 0.6515 | 0.5725 |
122
+ | 0.3936 | 2.97 | 1100 | 0.5972 | 0.5455 | 0.6803 | 0.8108 | 0.9080 | 0.6780 | 0.8812 | 0.0075 | 0.8446 | 0.8601 | 0.5831 | 0.8070 | 0.5114 | 0.5989 | 0.0074 | 0.7099 | 0.6302 | 0.5538 |
123
+ | 0.5296 | 3.03 | 1120 | 0.6116 | 0.5382 | 0.6592 | 0.8097 | 0.9159 | 0.6313 | 0.8518 | 0.0062 | 0.8873 | 0.7911 | 0.5309 | 0.8014 | 0.4878 | 0.6096 | 0.0062 | 0.7161 | 0.6332 | 0.5131 |
124
+ | 1.6836 | 3.08 | 1140 | 0.5800 | 0.5615 | 0.6846 | 0.8194 | 0.8923 | 0.7185 | 0.8144 | 0.0112 | 0.8990 | 0.8358 | 0.6212 | 0.8101 | 0.5460 | 0.6253 | 0.0112 | 0.7178 | 0.6330 | 0.5872 |
125
+ | 0.8853 | 3.14 | 1160 | 0.6122 | 0.5464 | 0.6744 | 0.8116 | 0.8949 | 0.7082 | 0.7910 | 0.0018 | 0.8955 | 0.8734 | 0.5562 | 0.8127 | 0.5283 | 0.6226 | 0.0018 | 0.7066 | 0.6174 | 0.5352 |
126
+ | 0.4434 | 3.19 | 1180 | 0.6143 | 0.5413 | 0.6698 | 0.8105 | 0.9124 | 0.6441 | 0.8456 | 0.0044 | 0.8678 | 0.8761 | 0.5377 | 0.8063 | 0.4996 | 0.6042 | 0.0044 | 0.7129 | 0.6412 | 0.5205 |
127
+ | 0.5819 | 3.24 | 1200 | 0.5985 | 0.5462 | 0.6631 | 0.8145 | 0.9252 | 0.6209 | 0.8380 | 0.0091 | 0.8857 | 0.8305 | 0.5327 | 0.8085 | 0.4993 | 0.6331 | 0.0091 | 0.7160 | 0.6439 | 0.5138 |
128
+ | 0.8438 | 3.3 | 1220 | 0.6001 | 0.5484 | 0.6701 | 0.8131 | 0.9035 | 0.6165 | 0.8493 | 0.0137 | 0.8792 | 0.8535 | 0.5749 | 0.8041 | 0.5140 | 0.6026 | 0.0137 | 0.7156 | 0.6366 | 0.5520 |
129
+ | 0.4053 | 3.35 | 1240 | 0.5606 | 0.5634 | 0.6881 | 0.8202 | 0.8948 | 0.7025 | 0.8443 | 0.0165 | 0.8874 | 0.8541 | 0.6174 | 0.8089 | 0.5471 | 0.6165 | 0.0165 | 0.7226 | 0.6466 | 0.5855 |
130
+ | 0.8888 | 3.41 | 1260 | 0.5687 | 0.5629 | 0.6899 | 0.8220 | 0.9079 | 0.6984 | 0.8642 | 0.0167 | 0.8759 | 0.8635 | 0.6025 | 0.8145 | 0.5296 | 0.6274 | 0.0167 | 0.7263 | 0.6539 | 0.5719 |
131
+ | 0.8388 | 3.46 | 1280 | 0.5852 | 0.5625 | 0.6830 | 0.8198 | 0.9110 | 0.6511 | 0.8699 | 0.0193 | 0.8685 | 0.8534 | 0.6080 | 0.8077 | 0.5413 | 0.6165 | 0.0193 | 0.7211 | 0.6573 | 0.5743 |
132
+ | 0.5133 | 3.51 | 1300 | 0.5774 | 0.5559 | 0.6925 | 0.8157 | 0.8959 | 0.7267 | 0.8361 | 0.0216 | 0.8636 | 0.9062 | 0.5974 | 0.8147 | 0.5293 | 0.6233 | 0.0216 | 0.7125 | 0.6246 | 0.5651 |
133
+ | 0.5101 | 3.57 | 1320 | 0.5769 | 0.5577 | 0.6738 | 0.8186 | 0.9038 | 0.7067 | 0.8071 | 0.0131 | 0.9179 | 0.7957 | 0.5721 | 0.8156 | 0.5249 | 0.6428 | 0.0131 | 0.7109 | 0.6460 | 0.5506 |
134
+ | 0.573 | 3.62 | 1340 | 0.5765 | 0.5629 | 0.6743 | 0.8216 | 0.9213 | 0.6779 | 0.8057 | 0.0214 | 0.8981 | 0.7927 | 0.6031 | 0.8091 | 0.5383 | 0.6313 | 0.0214 | 0.7221 | 0.6413 | 0.5771 |
135
+ | 0.5581 | 3.68 | 1360 | 0.5670 | 0.5685 | 0.6889 | 0.8235 | 0.8994 | 0.7362 | 0.8102 | 0.0214 | 0.9055 | 0.8307 | 0.6189 | 0.8158 | 0.5525 | 0.6423 | 0.0214 | 0.7212 | 0.6367 | 0.5895 |
136
+ | 0.9319 | 3.73 | 1380 | 0.5806 | 0.5607 | 0.6958 | 0.8194 | 0.9037 | 0.7315 | 0.8557 | 0.0244 | 0.8611 | 0.8851 | 0.6092 | 0.8147 | 0.5381 | 0.6227 | 0.0244 | 0.7246 | 0.6265 | 0.5741 |
137
+ | 0.483 | 3.78 | 1400 | 0.5748 | 0.5583 | 0.6908 | 0.8167 | 0.8951 | 0.7352 | 0.8246 | 0.0194 | 0.8761 | 0.8872 | 0.5978 | 0.8176 | 0.5608 | 0.6180 | 0.0194 | 0.7167 | 0.6099 | 0.5655 |
138
+ | 0.4064 | 3.84 | 1420 | 0.5940 | 0.5539 | 0.6786 | 0.8159 | 0.9095 | 0.7105 | 0.8232 | 0.0183 | 0.8930 | 0.8569 | 0.5386 | 0.8200 | 0.5544 | 0.6369 | 0.0183 | 0.7149 | 0.6131 | 0.5198 |
139
+ | 1.3017 | 3.89 | 1440 | 0.5762 | 0.5565 | 0.6824 | 0.8188 | 0.9056 | 0.7334 | 0.8386 | 0.0186 | 0.8997 | 0.8068 | 0.5738 | 0.8168 | 0.5215 | 0.6274 | 0.0186 | 0.7212 | 0.6415 | 0.5482 |
140
+ | 0.8226 | 3.95 | 1460 | 0.5865 | 0.5564 | 0.6830 | 0.8183 | 0.9012 | 0.7170 | 0.8510 | 0.0185 | 0.8963 | 0.8213 | 0.5758 | 0.8160 | 0.5217 | 0.6269 | 0.0185 | 0.7194 | 0.6423 | 0.5503 |
141
+ | 0.5326 | 4.0 | 1480 | 0.5755 | 0.5599 | 0.6856 | 0.8195 | 0.9002 | 0.7110 | 0.8476 | 0.0163 | 0.8926 | 0.8471 | 0.5847 | 0.8172 | 0.5352 | 0.6326 | 0.0163 | 0.7181 | 0.6442 | 0.5559 |
142
+ | 0.5291 | 4.05 | 1500 | 0.5918 | 0.5586 | 0.6852 | 0.8188 | 0.9077 | 0.7102 | 0.8481 | 0.0182 | 0.8826 | 0.8547 | 0.5749 | 0.8144 | 0.5389 | 0.6303 | 0.0182 | 0.7206 | 0.6401 | 0.5480 |
143
+ | 0.3842 | 4.11 | 1520 | 0.6051 | 0.5546 | 0.6777 | 0.8166 | 0.9220 | 0.6756 | 0.8480 | 0.0193 | 0.8684 | 0.8432 | 0.5674 | 0.8052 | 0.5263 | 0.6208 | 0.0193 | 0.7221 | 0.6477 | 0.5406 |
144
+ | 0.5778 | 4.16 | 1540 | 0.5712 | 0.5658 | 0.6909 | 0.8219 | 0.9046 | 0.7358 | 0.8443 | 0.0168 | 0.8848 | 0.8423 | 0.6079 | 0.8191 | 0.5539 | 0.6304 | 0.0168 | 0.7179 | 0.6512 | 0.5713 |
145
+ | 0.5916 | 4.22 | 1560 | 0.5725 | 0.5694 | 0.6977 | 0.8232 | 0.9084 | 0.7476 | 0.8334 | 0.0223 | 0.8669 | 0.8718 | 0.6338 | 0.8208 | 0.5608 | 0.6283 | 0.0223 | 0.7177 | 0.6485 | 0.5874 |
146
+ | 1.2539 | 4.27 | 1580 | 0.5933 | 0.5593 | 0.6931 | 0.8174 | 0.8984 | 0.7439 | 0.8522 | 0.0219 | 0.8733 | 0.8780 | 0.5838 | 0.8198 | 0.5450 | 0.6281 | 0.0218 | 0.7118 | 0.6386 | 0.5502 |
147
+ | 0.2336 | 4.32 | 1600 | 0.5619 | 0.5729 | 0.6966 | 0.8244 | 0.9012 | 0.7352 | 0.8468 | 0.0210 | 0.8806 | 0.8544 | 0.6371 | 0.8187 | 0.5710 | 0.6318 | 0.0210 | 0.7180 | 0.6558 | 0.5941 |
148
+ | 0.4766 | 4.38 | 1620 | 0.5724 | 0.5713 | 0.6907 | 0.8231 | 0.8980 | 0.7103 | 0.8373 | 0.0183 | 0.8900 | 0.8498 | 0.6310 | 0.8188 | 0.5772 | 0.6345 | 0.0183 | 0.7143 | 0.6464 | 0.5899 |
149
+ | 0.472 | 4.43 | 1640 | 0.5759 | 0.5691 | 0.6823 | 0.8226 | 0.9054 | 0.6914 | 0.8246 | 0.0170 | 0.9013 | 0.8276 | 0.6087 | 0.8179 | 0.5723 | 0.6437 | 0.0170 | 0.7136 | 0.6433 | 0.5756 |
150
+ | 0.5138 | 4.49 | 1660 | 0.5718 | 0.5664 | 0.6875 | 0.8213 | 0.9029 | 0.7188 | 0.8351 | 0.0189 | 0.8932 | 0.8472 | 0.5962 | 0.8179 | 0.5598 | 0.6421 | 0.0189 | 0.7144 | 0.6505 | 0.5614 |
151
+ | 0.301 | 4.54 | 1680 | 0.6174 | 0.5585 | 0.6894 | 0.8168 | 0.9159 | 0.7101 | 0.8678 | 0.0266 | 0.8525 | 0.8766 | 0.5762 | 0.8150 | 0.5462 | 0.6286 | 0.0265 | 0.7141 | 0.6387 | 0.5407 |
152
+ | 0.3135 | 4.59 | 1700 | 0.5843 | 0.5708 | 0.6961 | 0.8221 | 0.9098 | 0.6914 | 0.8800 | 0.0285 | 0.8487 | 0.8655 | 0.6484 | 0.8155 | 0.5648 | 0.6209 | 0.0285 | 0.7153 | 0.6598 | 0.5909 |
153
+ | 0.388 | 4.65 | 1720 | 0.5691 | 0.5695 | 0.6949 | 0.8222 | 0.9030 | 0.7334 | 0.8430 | 0.0242 | 0.8769 | 0.8655 | 0.6186 | 0.8172 | 0.5668 | 0.6350 | 0.0241 | 0.7169 | 0.6496 | 0.5766 |
154
+ | 0.6843 | 4.7 | 1740 | 0.5625 | 0.5742 | 0.6986 | 0.8242 | 0.9074 | 0.7206 | 0.8539 | 0.0263 | 0.8608 | 0.8666 | 0.6546 | 0.8197 | 0.5731 | 0.6347 | 0.0262 | 0.7150 | 0.6554 | 0.5950 |
155
+ | 0.568 | 4.76 | 1760 | 0.5926 | 0.5619 | 0.6890 | 0.8191 | 0.9107 | 0.6858 | 0.8795 | 0.0263 | 0.8608 | 0.8624 | 0.5978 | 0.8155 | 0.5425 | 0.6215 | 0.0263 | 0.7171 | 0.6497 | 0.5605 |
156
+ | 0.7367 | 4.81 | 1780 | 0.5849 | 0.5620 | 0.6909 | 0.8184 | 0.9107 | 0.7120 | 0.8542 | 0.0250 | 0.8596 | 0.8815 | 0.5934 | 0.8171 | 0.5606 | 0.6298 | 0.0250 | 0.7147 | 0.6299 | 0.5569 |
157
+ | 0.6647 | 4.86 | 1800 | 0.5605 | 0.5768 | 0.7018 | 0.8261 | 0.9154 | 0.7246 | 0.8490 | 0.0277 | 0.8480 | 0.8722 | 0.6756 | 0.8187 | 0.5712 | 0.6362 | 0.0277 | 0.7181 | 0.6543 | 0.6111 |
158
+ | 0.9264 | 4.92 | 1820 | 0.5652 | 0.5736 | 0.6965 | 0.8241 | 0.8986 | 0.7423 | 0.8414 | 0.0222 | 0.8861 | 0.8507 | 0.6339 | 0.8189 | 0.5768 | 0.6375 | 0.0222 | 0.7162 | 0.6540 | 0.5896 |
159
+ | 0.5204 | 4.97 | 1840 | 0.5879 | 0.5639 | 0.6875 | 0.8193 | 0.9016 | 0.7122 | 0.8348 | 0.0203 | 0.8852 | 0.8700 | 0.5885 | 0.8178 | 0.5744 | 0.6376 | 0.0202 | 0.7148 | 0.6267 | 0.5559 |
160
+
161
+
162
+ ### Framework versions
163
+
164
+ - Transformers 4.33.0
165
+ - Pytorch 2.0.0
166
+ - Datasets 2.1.0
167
+ - Tokenizers 0.13.3
config.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nvidia/segformer-b0-finetuned-ade-512-512",
3
+ "architectures": [
4
+ "SegformerForSemanticSegmentation"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "classifier_dropout_prob": 0.1,
8
+ "decoder_hidden_size": 256,
9
+ "depths": [
10
+ 2,
11
+ 2,
12
+ 2,
13
+ 2
14
+ ],
15
+ "downsampling_rates": [
16
+ 1,
17
+ 4,
18
+ 8,
19
+ 16
20
+ ],
21
+ "drop_path_rate": 0.1,
22
+ "hidden_act": "gelu",
23
+ "hidden_dropout_prob": 0.0,
24
+ "hidden_sizes": [
25
+ 32,
26
+ 64,
27
+ 160,
28
+ 256
29
+ ],
30
+ "id2label": {
31
+ "0": "water",
32
+ "1": "whitewater",
33
+ "2": "sediment",
34
+ "3": "other_natural_terrain",
35
+ "4": "vegetation",
36
+ "5": "development",
37
+ "6": "unknown"
38
+ },
39
+ "image_size": 224,
40
+ "initializer_range": 0.02,
41
+ "label2id": {
42
+ "development": 5,
43
+ "other_natural_terrain": 3,
44
+ "sediment": 2,
45
+ "unknown": 6,
46
+ "vegetation": 4,
47
+ "water": 0,
48
+ "whitewater": 1
49
+ },
50
+ "layer_norm_eps": 1e-06,
51
+ "mlp_ratios": [
52
+ 4,
53
+ 4,
54
+ 4,
55
+ 4
56
+ ],
57
+ "model_type": "segformer",
58
+ "num_attention_heads": [
59
+ 1,
60
+ 2,
61
+ 5,
62
+ 8
63
+ ],
64
+ "num_channels": 3,
65
+ "num_encoder_blocks": 4,
66
+ "patch_sizes": [
67
+ 7,
68
+ 3,
69
+ 3,
70
+ 3
71
+ ],
72
+ "reshape_last_stage": true,
73
+ "semantic_loss_ignore_index": 255,
74
+ "sr_ratios": [
75
+ 8,
76
+ 4,
77
+ 2,
78
+ 1
79
+ ],
80
+ "strides": [
81
+ 4,
82
+ 2,
83
+ 2,
84
+ 2
85
+ ],
86
+ "torch_dtype": "float32",
87
+ "transformers_version": "4.33.0"
88
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b715a94b7c82d17cfbae619152f0cae228edb35947aec07a8b65b04dd2572463
3
+ size 14936909
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0559f95b6166445bd7ebf17073537c68cfb6c724b7092795017812502b4163e6
3
+ size 4155