qubvel-hf HF staff commited on
Commit
0bb730f
·
verified ·
1 Parent(s): f215231

End of training

Browse files
Files changed (5) hide show
  1. README.md +109 -0
  2. config.json +111 -0
  3. model.safetensors +3 -0
  4. preprocessor_config.json +26 -0
  5. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: PekingU/rtdetr_r50vd_coco_o365
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: rtdetr-r50-cppe5-finetune-use_focal-False
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # rtdetr-r50-cppe5-finetune-use_focal-False
15
+
16
+ This model is a fine-tuned version of [PekingU/rtdetr_r50vd_coco_o365](https://huggingface.co/PekingU/rtdetr_r50vd_coco_o365) on an unknown dataset.
17
+ It achieves the following results on the evaluation set:
18
+ - Loss: 5.9218
19
+ - Map: 0.3752
20
+ - Map 50: 0.5265
21
+ - Map 75: 0.4224
22
+ - Map Small: 0.6103
23
+ - Map Medium: 0.4091
24
+ - Map Large: 0.5522
25
+ - Mar 1: 0.3993
26
+ - Mar 10: 0.7339
27
+ - Mar 100: 0.7986
28
+ - Mar Small: 0.7494
29
+ - Mar Medium: 0.7108
30
+ - Mar Large: 0.9271
31
+ - Map Coverall: 0.3753
32
+ - Mar 100 Coverall: 0.8128
33
+ - Map Face Shield: 0.3528
34
+ - Mar 100 Face Shield: 0.8467
35
+ - Map Gloves: 0.319
36
+ - Mar 100 Gloves: 0.7723
37
+ - Map Goggles: 0.4667
38
+ - Mar 100 Goggles: 0.775
39
+ - Map Mask: 0.3622
40
+ - Mar 100 Mask: 0.7864
41
+
42
+ ## Model description
43
+
44
+ More information needed
45
+
46
+ ## Intended uses & limitations
47
+
48
+ More information needed
49
+
50
+ ## Training and evaluation data
51
+
52
+ More information needed
53
+
54
+ ## Training procedure
55
+
56
+ ### Training hyperparameters
57
+
58
+ The following hyperparameters were used during training:
59
+ - learning_rate: 5e-05
60
+ - train_batch_size: 8
61
+ - eval_batch_size: 8
62
+ - seed: 42
63
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
64
+ - lr_scheduler_type: linear
65
+ - lr_scheduler_warmup_steps: 300
66
+ - num_epochs: 30
67
+
68
+ ### Training results
69
+
70
+ | Training Loss | Epoch | Step | Validation Loss | Map | Map 50 | Map 75 | Map Small | Map Medium | Map Large | Mar 1 | Mar 10 | Mar 100 | Mar Small | Mar Medium | Mar Large | Map Coverall | Mar 100 Coverall | Map Face Shield | Mar 100 Face Shield | Map Gloves | Mar 100 Gloves | Map Goggles | Mar 100 Goggles | Map Mask | Mar 100 Mask |
71
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|:----------:|:---------:|:------:|:------:|:-------:|:---------:|:----------:|:---------:|:------------:|:----------------:|:---------------:|:-------------------:|:----------:|:--------------:|:-----------:|:---------------:|:--------:|:------------:|
72
+ | No log | 1.0 | 106 | 24.0471 | 0.0066 | 0.0124 | 0.0069 | 0.0032 | 0.011 | 0.0084 | 0.0186 | 0.0717 | 0.1332 | 0.0557 | 0.0892 | 0.229 | 0.006 | 0.2935 | 0.0 | 0.028 | 0.0092 | 0.1991 | 0.0 | 0.0 | 0.0176 | 0.1452 |
73
+ | No log | 2.0 | 212 | 14.0932 | 0.0807 | 0.1577 | 0.0773 | 0.0348 | 0.0677 | 0.1606 | 0.1488 | 0.337 | 0.3981 | 0.1782 | 0.3341 | 0.6135 | 0.1593 | 0.6166 | 0.021 | 0.344 | 0.0608 | 0.4274 | 0.0303 | 0.1891 | 0.1319 | 0.4137 |
74
+ | No log | 3.0 | 318 | 13.2860 | 0.1033 | 0.1897 | 0.0986 | 0.0603 | 0.0941 | 0.1477 | 0.1938 | 0.3595 | 0.4228 | 0.2721 | 0.3335 | 0.6581 | 0.2055 | 0.6889 | 0.0416 | 0.3773 | 0.0798 | 0.5128 | 0.0479 | 0.1469 | 0.1419 | 0.3881 |
75
+ | No log | 4.0 | 424 | 10.7598 | 0.1113 | 0.2036 | 0.1048 | 0.091 | 0.0902 | 0.1804 | 0.2358 | 0.4236 | 0.4965 | 0.2469 | 0.4356 | 0.7288 | 0.2198 | 0.7101 | 0.0378 | 0.524 | 0.0983 | 0.5155 | 0.0661 | 0.2984 | 0.1344 | 0.4342 |
76
+ | 24.0469 | 5.0 | 530 | 11.5195 | 0.1126 | 0.1927 | 0.1137 | 0.1686 | 0.102 | 0.1816 | 0.2387 | 0.4267 | 0.4927 | 0.2938 | 0.4254 | 0.7416 | 0.217 | 0.7106 | 0.0748 | 0.508 | 0.1123 | 0.5228 | 0.0581 | 0.2875 | 0.1005 | 0.4347 |
77
+ | 24.0469 | 6.0 | 636 | 9.9747 | 0.138 | 0.2341 | 0.1415 | 0.1598 | 0.123 | 0.2096 | 0.2646 | 0.4565 | 0.539 | 0.3445 | 0.4726 | 0.7601 | 0.2753 | 0.7438 | 0.0713 | 0.548 | 0.1061 | 0.5447 | 0.0954 | 0.3766 | 0.1421 | 0.4817 |
78
+ | 24.0469 | 7.0 | 742 | 9.9184 | 0.1363 | 0.233 | 0.1403 | 0.1365 | 0.1191 | 0.2167 | 0.267 | 0.4522 | 0.5231 | 0.341 | 0.4487 | 0.7366 | 0.2782 | 0.7465 | 0.0697 | 0.556 | 0.0996 | 0.5516 | 0.1023 | 0.3063 | 0.1315 | 0.4553 |
79
+ | 24.0469 | 8.0 | 848 | 9.7247 | 0.1463 | 0.2561 | 0.1524 | 0.1711 | 0.1251 | 0.24 | 0.2846 | 0.4771 | 0.5495 | 0.3839 | 0.4697 | 0.7746 | 0.305 | 0.7424 | 0.0723 | 0.56 | 0.1003 | 0.579 | 0.1129 | 0.3641 | 0.1409 | 0.5018 |
80
+ | 24.0469 | 9.0 | 954 | 9.5843 | 0.1428 | 0.2487 | 0.1549 | 0.1736 | 0.1326 | 0.2182 | 0.2743 | 0.4714 | 0.5589 | 0.3942 | 0.4817 | 0.7706 | 0.3025 | 0.7484 | 0.084 | 0.5973 | 0.1304 | 0.5434 | 0.1006 | 0.375 | 0.0966 | 0.5306 |
81
+ | 9.6616 | 10.0 | 1060 | 9.3868 | 0.161 | 0.2745 | 0.1683 | 0.1815 | 0.1396 | 0.242 | 0.2883 | 0.4894 | 0.5672 | 0.4062 | 0.4967 | 0.7772 | 0.3168 | 0.7493 | 0.1275 | 0.632 | 0.1247 | 0.5708 | 0.1099 | 0.3734 | 0.1262 | 0.5105 |
82
+ | 9.6616 | 11.0 | 1166 | 9.3399 | 0.1547 | 0.2696 | 0.1545 | 0.1827 | 0.1214 | 0.2563 | 0.2935 | 0.4995 | 0.5729 | 0.3657 | 0.4947 | 0.7793 | 0.2687 | 0.7461 | 0.091 | 0.5467 | 0.1407 | 0.5584 | 0.1535 | 0.4781 | 0.1197 | 0.5352 |
83
+ | 9.6616 | 12.0 | 1272 | 9.2511 | 0.151 | 0.2625 | 0.1509 | 0.1655 | 0.1369 | 0.2599 | 0.2915 | 0.4833 | 0.5633 | 0.3922 | 0.4894 | 0.7828 | 0.2591 | 0.753 | 0.1236 | 0.572 | 0.1064 | 0.5598 | 0.1508 | 0.4031 | 0.1151 | 0.5288 |
84
+ | 9.6616 | 13.0 | 1378 | 9.3660 | 0.1795 | 0.3123 | 0.185 | 0.2092 | 0.1558 | 0.3135 | 0.2906 | 0.4945 | 0.5739 | 0.3906 | 0.5012 | 0.7681 | 0.3021 | 0.7562 | 0.151 | 0.6093 | 0.1284 | 0.5607 | 0.1602 | 0.4094 | 0.1558 | 0.5338 |
85
+ | 9.6616 | 14.0 | 1484 | 9.7121 | 0.1608 | 0.2758 | 0.1754 | 0.1888 | 0.1498 | 0.2211 | 0.3027 | 0.491 | 0.5667 | 0.3848 | 0.4918 | 0.7867 | 0.3091 | 0.7507 | 0.137 | 0.6173 | 0.1309 | 0.5699 | 0.1033 | 0.3734 | 0.1236 | 0.5224 |
86
+ | 7.7703 | 15.0 | 1590 | 9.3829 | 0.1735 | 0.3082 | 0.1795 | 0.1816 | 0.1703 | 0.255 | 0.3013 | 0.5023 | 0.5785 | 0.3995 | 0.5123 | 0.7851 | 0.284 | 0.7516 | 0.1921 | 0.624 | 0.111 | 0.5662 | 0.1307 | 0.425 | 0.1497 | 0.5256 |
87
+ | 7.7703 | 16.0 | 1696 | 9.7996 | 0.1767 | 0.3065 | 0.1815 | 0.1793 | 0.1544 | 0.2373 | 0.309 | 0.5112 | 0.5822 | 0.3835 | 0.5201 | 0.7888 | 0.3528 | 0.7562 | 0.1269 | 0.6173 | 0.1243 | 0.553 | 0.1228 | 0.4391 | 0.1566 | 0.5452 |
88
+ | 7.7703 | 17.0 | 1802 | 9.8642 | 0.1689 | 0.2962 | 0.1733 | 0.1934 | 0.1501 | 0.2263 | 0.3139 | 0.5025 | 0.5835 | 0.4012 | 0.5151 | 0.7941 | 0.3135 | 0.7544 | 0.1404 | 0.6 | 0.139 | 0.5708 | 0.1113 | 0.4609 | 0.1402 | 0.5315 |
89
+ | 7.7703 | 18.0 | 1908 | 9.5005 | 0.1839 | 0.3224 | 0.1882 | 0.1887 | 0.1634 | 0.2638 | 0.317 | 0.513 | 0.5886 | 0.4156 | 0.5216 | 0.7778 | 0.325 | 0.7576 | 0.1512 | 0.6173 | 0.1358 | 0.5626 | 0.1518 | 0.4594 | 0.1558 | 0.5461 |
90
+ | 6.699 | 19.0 | 2014 | 9.7569 | 0.1761 | 0.3125 | 0.1794 | 0.1864 | 0.161 | 0.2976 | 0.3078 | 0.4987 | 0.5758 | 0.3795 | 0.5069 | 0.7975 | 0.2971 | 0.7608 | 0.1542 | 0.5827 | 0.114 | 0.558 | 0.1569 | 0.4297 | 0.1585 | 0.5479 |
91
+ | 6.699 | 20.0 | 2120 | 9.8298 | 0.1878 | 0.328 | 0.189 | 0.1867 | 0.159 | 0.2966 | 0.311 | 0.5071 | 0.5835 | 0.4039 | 0.5116 | 0.7997 | 0.3451 | 0.7599 | 0.1478 | 0.612 | 0.1191 | 0.5557 | 0.1641 | 0.4484 | 0.1629 | 0.5416 |
92
+ | 6.699 | 21.0 | 2226 | 9.7809 | 0.1822 | 0.315 | 0.1913 | 0.18 | 0.1636 | 0.2603 | 0.3143 | 0.511 | 0.5844 | 0.4044 | 0.5111 | 0.793 | 0.3392 | 0.7567 | 0.1617 | 0.604 | 0.1174 | 0.558 | 0.1433 | 0.4531 | 0.1492 | 0.5502 |
93
+ | 6.699 | 22.0 | 2332 | 10.1915 | 0.1831 | 0.3242 | 0.1808 | 0.1777 | 0.1639 | 0.2464 | 0.3135 | 0.5036 | 0.5789 | 0.3989 | 0.5114 | 0.7721 | 0.3304 | 0.7567 | 0.1778 | 0.624 | 0.124 | 0.5511 | 0.1355 | 0.4016 | 0.1478 | 0.5612 |
94
+ | 6.699 | 23.0 | 2438 | 10.0230 | 0.1795 | 0.3247 | 0.1738 | 0.1757 | 0.1667 | 0.2382 | 0.3162 | 0.5023 | 0.5835 | 0.4075 | 0.518 | 0.7682 | 0.3191 | 0.7539 | 0.1626 | 0.6107 | 0.1269 | 0.542 | 0.1216 | 0.4594 | 0.1673 | 0.5516 |
95
+ | 6.1765 | 24.0 | 2544 | 10.0386 | 0.1765 | 0.3184 | 0.178 | 0.1784 | 0.1576 | 0.2469 | 0.3126 | 0.4972 | 0.5806 | 0.4138 | 0.5063 | 0.7636 | 0.3146 | 0.7521 | 0.1745 | 0.6187 | 0.1135 | 0.5539 | 0.1344 | 0.4328 | 0.1454 | 0.5457 |
96
+ | 6.1765 | 25.0 | 2650 | 10.2036 | 0.1855 | 0.3316 | 0.1806 | 0.1837 | 0.154 | 0.2811 | 0.3204 | 0.5076 | 0.5864 | 0.4197 | 0.5177 | 0.7589 | 0.3374 | 0.759 | 0.1864 | 0.6027 | 0.1212 | 0.5443 | 0.1277 | 0.4625 | 0.1546 | 0.5635 |
97
+ | 6.1765 | 26.0 | 2756 | 10.1975 | 0.1848 | 0.3283 | 0.1854 | 0.1827 | 0.168 | 0.2654 | 0.3155 | 0.511 | 0.581 | 0.4125 | 0.499 | 0.7679 | 0.3064 | 0.7608 | 0.1949 | 0.612 | 0.1287 | 0.5447 | 0.1375 | 0.4406 | 0.1566 | 0.547 |
98
+ | 6.1765 | 27.0 | 2862 | 10.2368 | 0.1864 | 0.3324 | 0.1914 | 0.178 | 0.1717 | 0.2878 | 0.3171 | 0.5122 | 0.5833 | 0.3902 | 0.5185 | 0.7613 | 0.3065 | 0.759 | 0.2008 | 0.612 | 0.1237 | 0.5461 | 0.1455 | 0.4531 | 0.1555 | 0.5461 |
99
+ | 6.1765 | 28.0 | 2968 | 10.2034 | 0.1857 | 0.3297 | 0.1869 | 0.1918 | 0.1673 | 0.2712 | 0.3207 | 0.5125 | 0.5904 | 0.4132 | 0.52 | 0.7673 | 0.3133 | 0.759 | 0.1976 | 0.6267 | 0.1251 | 0.5562 | 0.1392 | 0.4563 | 0.1533 | 0.5539 |
100
+ | 5.7542 | 29.0 | 3074 | 10.1788 | 0.1825 | 0.3255 | 0.1822 | 0.1735 | 0.1703 | 0.2771 | 0.3209 | 0.5056 | 0.5837 | 0.4034 | 0.5125 | 0.7695 | 0.2897 | 0.7558 | 0.1932 | 0.616 | 0.1255 | 0.5516 | 0.1387 | 0.4453 | 0.1652 | 0.5498 |
101
+ | 5.7542 | 30.0 | 3180 | 10.3023 | 0.1765 | 0.319 | 0.1787 | 0.1774 | 0.1643 | 0.2572 | 0.3181 | 0.5139 | 0.5843 | 0.4012 | 0.5167 | 0.7677 | 0.2839 | 0.7525 | 0.1903 | 0.6293 | 0.1252 | 0.5516 | 0.1252 | 0.4391 | 0.1577 | 0.5489 |
102
+
103
+
104
+ ### Framework versions
105
+
106
+ - Transformers 4.45.0.dev0
107
+ - Pytorch 2.4.0+cu121
108
+ - Datasets 2.20.0
109
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "PekingU/rtdetr_r50vd_coco_o365",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "silu",
5
+ "anchor_image_size": null,
6
+ "architectures": [
7
+ "RTDetrForObjectDetection"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "auxiliary_loss": true,
11
+ "backbone": null,
12
+ "backbone_config": {
13
+ "model_type": "rt_detr_resnet",
14
+ "out_features": [
15
+ "stage2",
16
+ "stage3",
17
+ "stage4"
18
+ ],
19
+ "out_indices": [
20
+ 2,
21
+ 3,
22
+ 4
23
+ ]
24
+ },
25
+ "backbone_kwargs": null,
26
+ "batch_norm_eps": 1e-05,
27
+ "box_noise_scale": 1.0,
28
+ "d_model": 256,
29
+ "decoder_activation_function": "relu",
30
+ "decoder_attention_heads": 8,
31
+ "decoder_ffn_dim": 1024,
32
+ "decoder_in_channels": [
33
+ 256,
34
+ 256,
35
+ 256
36
+ ],
37
+ "decoder_layers": 6,
38
+ "decoder_n_points": 4,
39
+ "disable_custom_kernels": true,
40
+ "dropout": 0.0,
41
+ "encode_proj_layers": [
42
+ 2
43
+ ],
44
+ "encoder_activation_function": "gelu",
45
+ "encoder_attention_heads": 8,
46
+ "encoder_ffn_dim": 1024,
47
+ "encoder_hidden_dim": 256,
48
+ "encoder_in_channels": [
49
+ 512,
50
+ 1024,
51
+ 2048
52
+ ],
53
+ "encoder_layers": 1,
54
+ "eos_coefficient": 0.0001,
55
+ "eval_size": null,
56
+ "feat_strides": [
57
+ 8,
58
+ 16,
59
+ 32
60
+ ],
61
+ "focal_loss_alpha": 0.75,
62
+ "focal_loss_gamma": 2.0,
63
+ "hidden_expansion": 1.0,
64
+ "id2label": {
65
+ "0": "Coverall",
66
+ "1": "Face_Shield",
67
+ "2": "Gloves",
68
+ "3": "Goggles",
69
+ "4": "Mask"
70
+ },
71
+ "initializer_bias_prior_prob": null,
72
+ "initializer_range": 0.01,
73
+ "is_encoder_decoder": true,
74
+ "label2id": {
75
+ "Coverall": 0,
76
+ "Face_Shield": 1,
77
+ "Gloves": 2,
78
+ "Goggles": 3,
79
+ "Mask": 4
80
+ },
81
+ "label_noise_ratio": 0.5,
82
+ "layer_norm_eps": 1e-05,
83
+ "learn_initial_query": false,
84
+ "losses": [
85
+ "labels_ce",
86
+ "boxes"
87
+ ],
88
+ "matcher_alpha": 0.25,
89
+ "matcher_bbox_cost": 5.0,
90
+ "matcher_class_cost": 2.0,
91
+ "matcher_gamma": 2.0,
92
+ "matcher_giou_cost": 2.0,
93
+ "model_type": "rt_detr",
94
+ "normalize_before": false,
95
+ "num_denoising": 100,
96
+ "num_feature_levels": 3,
97
+ "num_queries": 300,
98
+ "positional_encoding_temperature": 10000,
99
+ "torch_dtype": "float32",
100
+ "transformers_version": "4.45.0.dev0",
101
+ "use_focal_loss": false,
102
+ "use_pretrained_backbone": false,
103
+ "use_timm_backbone": false,
104
+ "weight_loss_bbox": 5.0,
105
+ "weight_loss_bce": 1.0,
106
+ "weight_loss_ce": 1.0,
107
+ "weight_loss_focal": 1.0,
108
+ "weight_loss_giou": 2.0,
109
+ "weight_loss_vfl": 1.0,
110
+ "with_box_refine": true
111
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aab3a158d13e114f1ac5d40f2322078bfd594f83102887a0a84a904ed1704360
3
+ size 171566536
preprocessor_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_convert_annotations": true,
3
+ "do_normalize": false,
4
+ "do_pad": false,
5
+ "do_rescale": true,
6
+ "do_resize": true,
7
+ "format": "coco_detection",
8
+ "image_mean": [
9
+ 0.485,
10
+ 0.456,
11
+ 0.406
12
+ ],
13
+ "image_processor_type": "RTDetrImageProcessor",
14
+ "image_std": [
15
+ 0.229,
16
+ 0.224,
17
+ 0.225
18
+ ],
19
+ "pad_size": null,
20
+ "resample": 2,
21
+ "rescale_factor": 0.00392156862745098,
22
+ "size": {
23
+ "height": 480,
24
+ "width": 480
25
+ }
26
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb3216081215870f7e831ff755107768bb48474cac724d4996c7f0dd6c0d4533
3
+ size 5240