mahadi commited on
Commit
be534bc
1 Parent(s): 1c19f3e
train/F1_curve.png ADDED
train/PR_curve.png ADDED
train/P_curve.png ADDED
train/R_curve.png ADDED
train/args.yaml ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ task: detect
2
+ mode: train
3
+ model: yolov8s.pt
4
+ data: /kaggle/working/final-dataset-v2/data.yaml
5
+ epochs: 65
6
+ time: null
7
+ patience: 100
8
+ batch: 32
9
+ imgsz: 640
10
+ save: true
11
+ save_period: -1
12
+ cache: false
13
+ device:
14
+ - 0
15
+ - 1
16
+ workers: 16
17
+ project: null
18
+ name: train
19
+ exist_ok: false
20
+ pretrained: true
21
+ optimizer: auto
22
+ verbose: true
23
+ seed: 42
24
+ deterministic: true
25
+ single_cls: false
26
+ rect: false
27
+ cos_lr: false
28
+ close_mosaic: 10
29
+ resume: false
30
+ amp: true
31
+ fraction: 1.0
32
+ profile: false
33
+ freeze: null
34
+ multi_scale: false
35
+ overlap_mask: true
36
+ mask_ratio: 4
37
+ dropout: 0.0
38
+ val: true
39
+ split: val
40
+ save_json: false
41
+ save_hybrid: false
42
+ conf: null
43
+ iou: 0.7
44
+ max_det: 300
45
+ half: false
46
+ dnn: false
47
+ plots: true
48
+ source: null
49
+ vid_stride: 1
50
+ stream_buffer: false
51
+ visualize: false
52
+ augment: false
53
+ agnostic_nms: false
54
+ classes: null
55
+ retina_masks: false
56
+ embed: null
57
+ show: false
58
+ save_frames: false
59
+ save_txt: false
60
+ save_conf: false
61
+ save_crop: false
62
+ show_labels: true
63
+ show_conf: true
64
+ show_boxes: true
65
+ line_width: null
66
+ format: torchscript
67
+ keras: false
68
+ optimize: false
69
+ int8: false
70
+ dynamic: false
71
+ simplify: false
72
+ opset: null
73
+ workspace: 4
74
+ nms: false
75
+ lr0: 0.01
76
+ lrf: 0.01
77
+ momentum: 0.937
78
+ weight_decay: 0.0005
79
+ warmup_epochs: 3.0
80
+ warmup_momentum: 0.8
81
+ warmup_bias_lr: 0.1
82
+ box: 7.5
83
+ cls: 0.5
84
+ dfl: 1.5
85
+ pose: 12.0
86
+ kobj: 1.0
87
+ label_smoothing: 0.0
88
+ nbs: 64
89
+ hsv_h: 0.015
90
+ hsv_s: 0.7
91
+ hsv_v: 0.4
92
+ degrees: 0.0
93
+ translate: 0.1
94
+ scale: 0.5
95
+ shear: 0.0
96
+ perspective: 0.0
97
+ flipud: 0.0
98
+ fliplr: 0.5
99
+ bgr: 0.0
100
+ mosaic: 1.0
101
+ mixup: 0.0
102
+ copy_paste: 0.0
103
+ auto_augment: randaugment
104
+ erasing: 0.4
105
+ crop_fraction: 1.0
106
+ cfg: null
107
+ tracker: botsort.yaml
108
+ save_dir: runs/detect/train
train/confusion_matrix.png ADDED
train/confusion_matrix_normalized.png ADDED
train/events.out.tfevents.1725444701.17fe132f6127.158.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f98d64e1cc174366129ecf617eaba720715efa9c2647aef35eda48437797f579
3
+ size 228946
train/labels.jpg ADDED
train/labels_correlogram.jpg ADDED
train/results.csv ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
2
+ 1, 1.6885, 2.477, 1.5321, 0.495, 0.41684, 0.40371, 0.18949, 1.8453, 2.0411, 1.7502, 0.00023697, 0.00023697, 0.00023697
3
+ 2, 1.6811, 1.6755, 1.5043, 0.52118, 0.50289, 0.51959, 0.26524, 1.8205, 1.9467, 1.7319, 0.00046774, 0.00046774, 0.00046774
4
+ 3, 1.7001, 1.6452, 1.5103, 0.55409, 0.48942, 0.52198, 0.25817, 1.9548, 2.0208, 1.7585, 0.00069126, 0.00069126, 0.00069126
5
+ 4, 1.6848, 1.6494, 1.5323, 0.56015, 0.49156, 0.50702, 0.24647, 1.8669, 1.7732, 1.7756, 0.00068138, 0.00068138, 0.00068138
6
+ 5, 1.6444, 1.548, 1.4927, 0.62547, 0.48637, 0.5562, 0.28928, 1.8, 1.7248, 1.7316, 0.0006705, 0.0006705, 0.0006705
7
+ 6, 1.6242, 1.5271, 1.4815, 0.6782, 0.5699, 0.60919, 0.32202, 1.7564, 1.6443, 1.6545, 0.00065963, 0.00065963, 0.00065963
8
+ 7, 1.5989, 1.4377, 1.4605, 0.65753, 0.57383, 0.61923, 0.32948, 1.7263, 1.5854, 1.6307, 0.00064875, 0.00064875, 0.00064875
9
+ 8, 1.5794, 1.4085, 1.4476, 0.67882, 0.61058, 0.6483, 0.35658, 1.7126, 1.5008, 1.5935, 0.00063788, 0.00063788, 0.00063788
10
+ 9, 1.5544, 1.3717, 1.4224, 0.7153, 0.63445, 0.68544, 0.38583, 1.639, 1.5389, 1.5491, 0.000627, 0.000627, 0.000627
11
+ 10, 1.5211, 1.3268, 1.4037, 0.74121, 0.67, 0.72648, 0.41554, 1.6116, 1.3595, 1.5316, 0.00061613, 0.00061613, 0.00061613
12
+ 11, 1.5103, 1.3116, 1.3907, 0.6948, 0.6172, 0.67288, 0.38411, 1.6219, 1.4202, 1.5367, 0.00060525, 0.00060525, 0.00060525
13
+ 12, 1.4973, 1.273, 1.3857, 0.73474, 0.64992, 0.71518, 0.39141, 1.6013, 1.3136, 1.5206, 0.00059438, 0.00059438, 0.00059438
14
+ 13, 1.4817, 1.2549, 1.3749, 0.7081, 0.65667, 0.71866, 0.41126, 1.5827, 1.3111, 1.503, 0.0005835, 0.0005835, 0.0005835
15
+ 14, 1.4904, 1.2401, 1.3805, 0.74674, 0.67424, 0.72965, 0.4275, 1.5497, 1.261, 1.4818, 0.00057263, 0.00057263, 0.00057263
16
+ 15, 1.4627, 1.2098, 1.367, 0.77603, 0.67164, 0.75235, 0.43971, 1.5529, 1.2259, 1.4759, 0.00056175, 0.00056175, 0.00056175
17
+ 16, 1.435, 1.1885, 1.3514, 0.76261, 0.67141, 0.74135, 0.44035, 1.5506, 1.2269, 1.4893, 0.00055088, 0.00055088, 0.00055088
18
+ 17, 1.4231, 1.1728, 1.335, 0.78655, 0.68671, 0.76363, 0.45997, 1.5442, 1.1866, 1.4508, 0.00054, 0.00054, 0.00054
19
+ 18, 1.4086, 1.1161, 1.3187, 0.81242, 0.67832, 0.76054, 0.45175, 1.5259, 1.1761, 1.4718, 0.00052913, 0.00052913, 0.00052913
20
+ 19, 1.3969, 1.1279, 1.327, 0.77462, 0.70967, 0.78119, 0.47915, 1.4984, 1.1649, 1.4325, 0.00051825, 0.00051825, 0.00051825
21
+ 20, 1.3855, 1.0997, 1.3167, 0.80389, 0.70298, 0.7816, 0.47907, 1.502, 1.1358, 1.4403, 0.00050738, 0.00050738, 0.00050738
22
+ 21, 1.3617, 1.0743, 1.3046, 0.81298, 0.74442, 0.8108, 0.48863, 1.4827, 1.094, 1.4199, 0.0004965, 0.0004965, 0.0004965
23
+ 22, 1.3561, 1.0629, 1.298, 0.80767, 0.72688, 0.80248, 0.49178, 1.4612, 1.0908, 1.4203, 0.00048563, 0.00048563, 0.00048563
24
+ 23, 1.3333, 1.0348, 1.28, 0.82926, 0.72697, 0.81434, 0.50703, 1.4531, 1.0562, 1.4043, 0.00047476, 0.00047476, 0.00047476
25
+ 24, 1.3214, 1.0111, 1.2715, 0.8225, 0.72449, 0.81426, 0.50738, 1.4695, 1.073, 1.4191, 0.00046388, 0.00046388, 0.00046388
26
+ 25, 1.3274, 1.0039, 1.2674, 0.82243, 0.72741, 0.81212, 0.49853, 1.4529, 1.0444, 1.4037, 0.00045301, 0.00045301, 0.00045301
27
+ 26, 1.3075, 0.98194, 1.2636, 0.83182, 0.74931, 0.83017, 0.52186, 1.4239, 1.008, 1.3745, 0.00044213, 0.00044213, 0.00044213
28
+ 27, 1.2994, 0.97574, 1.2502, 0.84023, 0.71525, 0.81807, 0.51989, 1.4252, 1.0303, 1.3738, 0.00043126, 0.00043126, 0.00043126
29
+ 28, 1.3035, 0.95191, 1.241, 0.85645, 0.74845, 0.83659, 0.52616, 1.4086, 0.97643, 1.3643, 0.00042038, 0.00042038, 0.00042038
30
+ 29, 1.2742, 0.93611, 1.243, 0.8275, 0.76293, 0.83984, 0.52655, 1.4156, 0.97618, 1.3786, 0.00040951, 0.00040951, 0.00040951
31
+ 30, 1.2862, 0.94262, 1.2421, 0.85087, 0.75077, 0.8449, 0.55496, 1.366, 0.9467, 1.3336, 0.00039863, 0.00039863, 0.00039863
32
+ 31, 1.2623, 0.91882, 1.2302, 0.87681, 0.76745, 0.8553, 0.56038, 1.3721, 0.93344, 1.3409, 0.00038776, 0.00038776, 0.00038776
33
+ 32, 1.2573, 0.90237, 1.2316, 0.87296, 0.75383, 0.85071, 0.5607, 1.3584, 0.9111, 1.3267, 0.00037688, 0.00037688, 0.00037688
34
+ 33, 1.2435, 0.88831, 1.2142, 0.85371, 0.76178, 0.85082, 0.56305, 1.347, 0.91969, 1.3269, 0.00036601, 0.00036601, 0.00036601
35
+ 34, 1.2253, 0.88205, 1.2052, 0.83307, 0.78556, 0.85237, 0.55812, 1.3574, 0.92012, 1.3296, 0.00035513, 0.00035513, 0.00035513
36
+ 35, 1.2324, 0.87328, 1.208, 0.88213, 0.76574, 0.86583, 0.56718, 1.3522, 0.88358, 1.3198, 0.00034426, 0.00034426, 0.00034426
37
+ 36, 1.2252, 0.86083, 1.1978, 0.87325, 0.78161, 0.86961, 0.57379, 1.3303, 0.8728, 1.3123, 0.00033338, 0.00033338, 0.00033338
38
+ 37, 1.1905, 0.83131, 1.1931, 0.88344, 0.78845, 0.8668, 0.58127, 1.3229, 0.86654, 1.3015, 0.00032251, 0.00032251, 0.00032251
39
+ 38, 1.1894, 0.83939, 1.1866, 0.85374, 0.79895, 0.87409, 0.5917, 1.2946, 0.84319, 1.288, 0.00031163, 0.00031163, 0.00031163
40
+ 39, 1.1845, 0.81732, 1.177, 0.88173, 0.77472, 0.87513, 0.59292, 1.3057, 0.84127, 1.2955, 0.00030076, 0.00030076, 0.00030076
41
+ 40, 1.1893, 0.80581, 1.1846, 0.86832, 0.80562, 0.87607, 0.59257, 1.2971, 0.8355, 1.2918, 0.00028988, 0.00028988, 0.00028988
42
+ 41, 1.1705, 0.80839, 1.1746, 0.89379, 0.78888, 0.87712, 0.59579, 1.2979, 0.8237, 1.2896, 0.00027901, 0.00027901, 0.00027901
43
+ 42, 1.1529, 0.79145, 1.1685, 0.89292, 0.80602, 0.8863, 0.59968, 1.2895, 0.81299, 1.2891, 0.00026813, 0.00026813, 0.00026813
44
+ 43, 1.1327, 0.75893, 1.1573, 0.88379, 0.80026, 0.88625, 0.60599, 1.2672, 0.79982, 1.2725, 0.00025726, 0.00025726, 0.00025726
45
+ 44, 1.1289, 0.76043, 1.1543, 0.89975, 0.8065, 0.89012, 0.61027, 1.2635, 0.7977, 1.2638, 0.00024638, 0.00024638, 0.00024638
46
+ 45, 1.1265, 0.75881, 1.1565, 0.89242, 0.82064, 0.89351, 0.61776, 1.2474, 0.7764, 1.2545, 0.00023551, 0.00023551, 0.00023551
47
+ 46, 1.1194, 0.75068, 1.1459, 0.89634, 0.81558, 0.89474, 0.61528, 1.2547, 0.78317, 1.2589, 0.00022464, 0.00022464, 0.00022464
48
+ 47, 1.1179, 0.73662, 1.142, 0.9025, 0.81507, 0.89439, 0.61669, 1.2401, 0.76821, 1.2479, 0.00021376, 0.00021376, 0.00021376
49
+ 48, 1.1026, 0.72424, 1.1282, 0.90441, 0.80986, 0.89359, 0.62679, 1.2355, 0.76485, 1.2478, 0.00020289, 0.00020289, 0.00020289
50
+ 49, 1.0899, 0.718, 1.1283, 0.89532, 0.82492, 0.89671, 0.62567, 1.2338, 0.75457, 1.2542, 0.00019201, 0.00019201, 0.00019201
51
+ 50, 1.098, 0.72478, 1.1264, 0.8967, 0.82882, 0.89898, 0.63623, 1.2097, 0.73853, 1.2359, 0.00018114, 0.00018114, 0.00018114
52
+ 51, 1.0783, 0.70065, 1.1178, 0.90088, 0.82992, 0.89852, 0.63282, 1.2058, 0.74202, 1.2338, 0.00017026, 0.00017026, 0.00017026
53
+ 52, 1.0655, 0.68901, 1.1215, 0.89327, 0.83495, 0.89502, 0.63468, 1.2008, 0.74085, 1.226, 0.00015939, 0.00015939, 0.00015939
54
+ 53, 1.0641, 0.68883, 1.1149, 0.90426, 0.83236, 0.90369, 0.63781, 1.1975, 0.72783, 1.2258, 0.00014851, 0.00014851, 0.00014851
55
+ 54, 1.05, 0.68272, 1.1097, 0.89542, 0.83998, 0.89784, 0.63874, 1.1961, 0.72828, 1.2255, 0.00013764, 0.00013764, 0.00013764
56
+ 55, 1.0402, 0.66542, 1.1005, 0.90357, 0.83506, 0.9052, 0.64057, 1.1916, 0.72519, 1.2166, 0.00012676, 0.00012676, 0.00012676
57
+ 56, 1.0071, 0.58402, 1.0721, 0.9076, 0.83873, 0.90235, 0.64393, 1.1808, 0.71412, 1.2142, 0.00011589, 0.00011589, 0.00011589
58
+ 57, 0.97437, 0.55146, 1.0574, 0.90151, 0.84242, 0.90129, 0.64825, 1.1729, 0.7101, 1.2153, 0.00010501, 0.00010501, 0.00010501
59
+ 58, 0.95991, 0.55198, 1.0596, 0.91687, 0.83499, 0.90205, 0.64977, 1.1683, 0.70812, 1.2109, 9.4138e-05, 9.4138e-05, 9.4138e-05
60
+ 59, 0.9505, 0.53536, 1.044, 0.90865, 0.84326, 0.9053, 0.65547, 1.154, 0.6963, 1.2044, 8.3263e-05, 8.3263e-05, 8.3263e-05
61
+ 60, 0.93418, 0.53004, 1.037, 0.93051, 0.83807, 0.90852, 0.65887, 1.1459, 0.6931, 1.1958, 7.2389e-05, 7.2389e-05, 7.2389e-05
62
+ 61, 0.92463, 0.52227, 1.033, 0.92421, 0.83773, 0.90897, 0.66349, 1.1428, 0.68669, 1.1952, 6.1514e-05, 6.1514e-05, 6.1514e-05
63
+ 62, 0.92007, 0.51852, 1.0324, 0.91303, 0.84717, 0.90691, 0.66259, 1.1378, 0.68055, 1.193, 5.0639e-05, 5.0639e-05, 5.0639e-05
64
+ 63, 0.91167, 0.51311, 1.0254, 0.91623, 0.85013, 0.91055, 0.66672, 1.1294, 0.67433, 1.1876, 3.9764e-05, 3.9764e-05, 3.9764e-05
65
+ 64, 0.9052, 0.50138, 1.0185, 0.92225, 0.84208, 0.90874, 0.66851, 1.1288, 0.67398, 1.1872, 2.889e-05, 2.889e-05, 2.889e-05
66
+ 65, 0.89719, 0.49611, 1.0184, 0.92515, 0.84503, 0.90863, 0.66959, 1.1255, 0.6703, 1.1874, 1.8015e-05, 1.8015e-05, 1.8015e-05
train/results.png ADDED
train/train_batch0.jpg ADDED
train/train_batch1.jpg ADDED
train/train_batch12760.jpg ADDED
train/train_batch12761.jpg ADDED
train/train_batch12762.jpg ADDED
train/train_batch2.jpg ADDED
train/val_batch0_labels.jpg ADDED
train/val_batch0_pred.jpg ADDED
train/val_batch1_labels.jpg ADDED
train/val_batch1_pred.jpg ADDED
train/val_batch2_labels.jpg ADDED
train/val_batch2_pred.jpg ADDED
train/weights/best.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cf3d230d89781f697275d445f1495a0e6544a915c9c7d5720a5988d0cafd835
3
+ size 22522083
train/weights/last.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dc07c6b54dcde0ed699287f3daea98aad8e49f6e42c752f9d455be79e1aef1b
3
+ size 22522083