Raphaël Bournhonesque
commited on
Commit
•
2a38f0c
1
Parent(s):
f60807e
first commit
Browse files- F1_curve.png +0 -0
- PR_curve.png +0 -0
- P_curve.png +0 -0
- README.md +20 -0
- R_curve.png +0 -0
- args.yaml +106 -0
- confusion_matrix.png +0 -0
- confusion_matrix_normalized.png +0 -0
- labels.jpg +0 -0
- labels_correlogram.jpg +0 -0
- results.csv +101 -0
- results.png +0 -0
- train_batch0.jpg +0 -0
- train_batch1.jpg +0 -0
- train_batch2.jpg +0 -0
- train_batch2880.jpg +0 -0
- train_batch2881.jpg +0 -0
- train_batch2882.jpg +0 -0
- val_batch0_labels.jpg +0 -0
- val_batch0_pred.jpg +0 -0
- val_batch1_labels.jpg +0 -0
- val_batch1_pred.jpg +0 -0
- val_batch2_labels.jpg +0 -0
- val_batch2_pred.jpg +0 -0
- weights/best.onnx +3 -0
- weights/best.pt +3 -0
- weights/last.pt +3 -0
F1_curve.png
ADDED
PR_curve.png
ADDED
P_curve.png
ADDED
README.md
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: agpl-3.0
|
3 |
+
datasets:
|
4 |
+
- openfoodfacts/nutriscore-object-detection
|
5 |
+
pipeline_tag: object-detection
|
6 |
+
tags:
|
7 |
+
- food
|
8 |
+
---
|
9 |
+
|
10 |
+
# Open Food Facts Nutriscore object detection model
|
11 |
+
|
12 |
+
This object detection model was trained on images from the Open Food Facts database to detect Nutri-score labels on food packaging.
|
13 |
+
|
14 |
+
It was trained on 100 epochs using Ultralytics YoloV8 with yolov8n as the backbone, with images resized to 640x640.
|
15 |
+
This model is licensed under the AGPLv3 license.
|
16 |
+
|
17 |
+
## Weights
|
18 |
+
|
19 |
+
Weights are available in the weights/ directory.
|
20 |
+
An ONNX export of the model is available in weights/model.onnx.
|
R_curve.png
ADDED
args.yaml
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
task: detect
|
2 |
+
mode: train
|
3 |
+
model: yolov8n.pt
|
4 |
+
data: data.yaml
|
5 |
+
epochs: 100
|
6 |
+
time: null
|
7 |
+
patience: 100
|
8 |
+
batch: 16
|
9 |
+
imgsz: 640
|
10 |
+
save: true
|
11 |
+
save_period: -1
|
12 |
+
cache: false
|
13 |
+
device: null
|
14 |
+
workers: 8
|
15 |
+
project: null
|
16 |
+
name: train
|
17 |
+
exist_ok: false
|
18 |
+
pretrained: true
|
19 |
+
optimizer: auto
|
20 |
+
verbose: true
|
21 |
+
seed: 0
|
22 |
+
deterministic: true
|
23 |
+
single_cls: false
|
24 |
+
rect: false
|
25 |
+
cos_lr: false
|
26 |
+
close_mosaic: 10
|
27 |
+
resume: false
|
28 |
+
amp: true
|
29 |
+
fraction: 1.0
|
30 |
+
profile: false
|
31 |
+
freeze: null
|
32 |
+
multi_scale: false
|
33 |
+
overlap_mask: true
|
34 |
+
mask_ratio: 4
|
35 |
+
dropout: 0.0
|
36 |
+
val: true
|
37 |
+
split: val
|
38 |
+
save_json: false
|
39 |
+
save_hybrid: false
|
40 |
+
conf: null
|
41 |
+
iou: 0.7
|
42 |
+
max_det: 300
|
43 |
+
half: false
|
44 |
+
dnn: false
|
45 |
+
plots: true
|
46 |
+
source: null
|
47 |
+
vid_stride: 1
|
48 |
+
stream_buffer: false
|
49 |
+
visualize: false
|
50 |
+
augment: false
|
51 |
+
agnostic_nms: false
|
52 |
+
classes: null
|
53 |
+
retina_masks: false
|
54 |
+
embed: null
|
55 |
+
show: false
|
56 |
+
save_frames: false
|
57 |
+
save_txt: false
|
58 |
+
save_conf: false
|
59 |
+
save_crop: false
|
60 |
+
show_labels: true
|
61 |
+
show_conf: true
|
62 |
+
show_boxes: true
|
63 |
+
line_width: null
|
64 |
+
format: torchscript
|
65 |
+
keras: false
|
66 |
+
optimize: false
|
67 |
+
int8: false
|
68 |
+
dynamic: false
|
69 |
+
simplify: false
|
70 |
+
opset: null
|
71 |
+
workspace: 4
|
72 |
+
nms: false
|
73 |
+
lr0: 0.01
|
74 |
+
lrf: 0.01
|
75 |
+
momentum: 0.937
|
76 |
+
weight_decay: 0.0005
|
77 |
+
warmup_epochs: 3.0
|
78 |
+
warmup_momentum: 0.8
|
79 |
+
warmup_bias_lr: 0.1
|
80 |
+
box: 7.5
|
81 |
+
cls: 0.5
|
82 |
+
dfl: 1.5
|
83 |
+
pose: 12.0
|
84 |
+
kobj: 1.0
|
85 |
+
label_smoothing: 0.0
|
86 |
+
nbs: 64
|
87 |
+
hsv_h: 0.015
|
88 |
+
hsv_s: 0.7
|
89 |
+
hsv_v: 0.4
|
90 |
+
degrees: 0.0
|
91 |
+
translate: 0.1
|
92 |
+
scale: 0.5
|
93 |
+
shear: 0.0
|
94 |
+
perspective: 0.0
|
95 |
+
flipud: 0.0
|
96 |
+
fliplr: 0.5
|
97 |
+
bgr: 0.0
|
98 |
+
mosaic: 1.0
|
99 |
+
mixup: 0.0
|
100 |
+
copy_paste: 0.0
|
101 |
+
auto_augment: randaugment
|
102 |
+
erasing: 0.4
|
103 |
+
crop_fraction: 1.0
|
104 |
+
cfg: null
|
105 |
+
tracker: botsort.yaml
|
106 |
+
save_dir: runs/detect/train
|
confusion_matrix.png
ADDED
confusion_matrix_normalized.png
ADDED
labels.jpg
ADDED
labels_correlogram.jpg
ADDED
results.csv
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
|
2 |
+
1, 0.8236, 3.7434, 0.9572, 0.00371, 0.86113, 0.18262, 0.13874, 0.70139, 3.7856, 0.82852, 0.00034441, 0.00034441, 0.00034441
|
3 |
+
2, 0.75479, 2.8131, 0.91942, 0.67184, 0.01818, 0.17629, 0.13707, 0.68508, 3.3846, 0.84909, 0.000693, 0.000693, 0.000693
|
4 |
+
3, 0.77506, 2.5931, 0.93765, 0.44137, 0.23547, 0.34865, 0.30002, 0.68679, 2.7477, 0.88272, 0.0010346, 0.0010346, 0.0010346
|
5 |
+
4, 0.73347, 2.3284, 0.94332, 0.38275, 0.63009, 0.51265, 0.43279, 0.69159, 2.0706, 0.94357, 0.001078, 0.001078, 0.001078
|
6 |
+
5, 0.75277, 2.061, 0.95782, 0.45781, 0.68602, 0.58696, 0.48831, 0.71412, 1.6819, 0.91762, 0.001067, 0.001067, 0.001067
|
7 |
+
6, 0.75537, 1.8783, 0.95938, 0.81539, 0.67577, 0.78202, 0.6641, 0.69739, 1.4406, 0.91474, 0.001056, 0.001056, 0.001056
|
8 |
+
7, 0.75648, 1.6719, 0.97259, 0.6821, 0.58438, 0.67622, 0.53237, 0.77363, 1.6563, 0.9392, 0.001045, 0.001045, 0.001045
|
9 |
+
8, 0.71568, 1.4615, 0.93386, 0.8616, 0.66493, 0.77206, 0.63099, 0.74717, 1.326, 0.92392, 0.001034, 0.001034, 0.001034
|
10 |
+
9, 0.72614, 1.3501, 0.94958, 0.72282, 0.75415, 0.81197, 0.66718, 0.75312, 1.1953, 0.93781, 0.001023, 0.001023, 0.001023
|
11 |
+
10, 0.71295, 1.2187, 0.92855, 0.82924, 0.81335, 0.89193, 0.76206, 0.63379, 0.91804, 0.88127, 0.001012, 0.001012, 0.001012
|
12 |
+
11, 0.6703, 1.1289, 0.9183, 0.74921, 0.83106, 0.86954, 0.72288, 0.72058, 0.96447, 0.92432, 0.001001, 0.001001, 0.001001
|
13 |
+
12, 0.68232, 1.0581, 0.91716, 0.85621, 0.78305, 0.86458, 0.74105, 0.70497, 0.92489, 0.90937, 0.00099001, 0.00099001, 0.00099001
|
14 |
+
13, 0.66084, 0.99199, 0.89987, 0.92606, 0.86539, 0.92315, 0.79621, 0.68306, 0.75798, 0.89652, 0.00097901, 0.00097901, 0.00097901
|
15 |
+
14, 0.6743, 0.99023, 0.92641, 0.90474, 0.83014, 0.9028, 0.77285, 0.65617, 0.79684, 0.88394, 0.00096801, 0.00096801, 0.00096801
|
16 |
+
15, 0.67315, 0.91867, 0.93161, 0.89084, 0.75268, 0.91152, 0.77152, 0.66561, 0.872, 0.89027, 0.00095702, 0.00095702, 0.00095702
|
17 |
+
16, 0.64373, 0.90493, 0.90062, 0.89926, 0.85282, 0.92543, 0.78494, 0.67888, 0.71591, 0.90733, 0.00094602, 0.00094602, 0.00094602
|
18 |
+
17, 0.63905, 0.82639, 0.90611, 0.9598, 0.88922, 0.94199, 0.80405, 0.64282, 0.63489, 0.89851, 0.00093502, 0.00093502, 0.00093502
|
19 |
+
18, 0.64701, 0.82091, 0.91427, 0.93698, 0.86517, 0.9217, 0.79273, 0.64197, 0.68258, 0.87267, 0.00092402, 0.00092402, 0.00092402
|
20 |
+
19, 0.63421, 0.75871, 0.91057, 0.94288, 0.88625, 0.9284, 0.80173, 0.57768, 0.65289, 0.88123, 0.00091302, 0.00091302, 0.00091302
|
21 |
+
20, 0.6052, 0.75296, 0.89531, 0.96414, 0.87051, 0.9519, 0.82328, 0.63021, 0.61205, 0.8996, 0.00090202, 0.00090202, 0.00090202
|
22 |
+
21, 0.62993, 0.75365, 0.89889, 0.8983, 0.85734, 0.94493, 0.81918, 0.63506, 0.65992, 0.89697, 0.00089102, 0.00089102, 0.00089102
|
23 |
+
22, 0.60278, 0.73054, 0.89014, 0.95084, 0.86624, 0.95503, 0.83017, 0.60678, 0.60037, 0.8637, 0.00088002, 0.00088002, 0.00088002
|
24 |
+
23, 0.60232, 0.70956, 0.89617, 0.9224, 0.90198, 0.92143, 0.782, 0.62756, 0.62087, 0.87287, 0.00086902, 0.00086902, 0.00086902
|
25 |
+
24, 0.58472, 0.68474, 0.88039, 0.9629, 0.86295, 0.93597, 0.80986, 0.62099, 0.62554, 0.88378, 0.00085803, 0.00085803, 0.00085803
|
26 |
+
25, 0.62012, 0.72385, 0.89753, 0.95119, 0.88607, 0.94263, 0.81322, 0.65995, 0.60205, 0.88473, 0.00084703, 0.00084703, 0.00084703
|
27 |
+
26, 0.61272, 0.66169, 0.90299, 0.97172, 0.88086, 0.9491, 0.82728, 0.62645, 0.55661, 0.88914, 0.00083603, 0.00083603, 0.00083603
|
28 |
+
27, 0.60203, 0.67042, 0.90051, 0.95873, 0.89829, 0.94482, 0.81995, 0.63583, 0.59332, 0.88598, 0.00082503, 0.00082503, 0.00082503
|
29 |
+
28, 0.60767, 0.65522, 0.90866, 0.91416, 0.87741, 0.91673, 0.7958, 0.6588, 0.69072, 0.88817, 0.00081403, 0.00081403, 0.00081403
|
30 |
+
29, 0.57982, 0.65844, 0.88059, 0.96457, 0.88732, 0.94663, 0.8305, 0.60294, 0.57622, 0.87728, 0.00080303, 0.00080303, 0.00080303
|
31 |
+
30, 0.59124, 0.69105, 0.8996, 0.9752, 0.87201, 0.95457, 0.83321, 0.60302, 0.55747, 0.87485, 0.00079203, 0.00079203, 0.00079203
|
32 |
+
31, 0.57232, 0.61059, 0.88075, 0.92531, 0.89164, 0.95407, 0.83758, 0.60297, 0.57446, 0.87119, 0.00078103, 0.00078103, 0.00078103
|
33 |
+
32, 0.58244, 0.59288, 0.8829, 0.9529, 0.88799, 0.93267, 0.82622, 0.58723, 0.59449, 0.87374, 0.00077003, 0.00077003, 0.00077003
|
34 |
+
33, 0.572, 0.59504, 0.87943, 0.95599, 0.89375, 0.95222, 0.85077, 0.54533, 0.57494, 0.85884, 0.00075904, 0.00075904, 0.00075904
|
35 |
+
34, 0.58596, 0.58432, 0.88452, 0.94951, 0.92141, 0.96548, 0.86123, 0.55836, 0.48213, 0.86951, 0.00074804, 0.00074804, 0.00074804
|
36 |
+
35, 0.57839, 0.58905, 0.89541, 0.94029, 0.88461, 0.94886, 0.84795, 0.53057, 0.5223, 0.84726, 0.00073704, 0.00073704, 0.00073704
|
37 |
+
36, 0.55831, 0.55424, 0.87841, 0.94933, 0.91545, 0.93569, 0.83031, 0.55077, 0.5611, 0.85531, 0.00072604, 0.00072604, 0.00072604
|
38 |
+
37, 0.55171, 0.55933, 0.8819, 0.96251, 0.88477, 0.94955, 0.8249, 0.61271, 0.52694, 0.87883, 0.00071504, 0.00071504, 0.00071504
|
39 |
+
38, 0.55942, 0.54734, 0.88997, 0.94702, 0.91161, 0.93831, 0.82544, 0.56844, 0.54197, 0.86121, 0.00070404, 0.00070404, 0.00070404
|
40 |
+
39, 0.55435, 0.56076, 0.89843, 0.99325, 0.91358, 0.97386, 0.85422, 0.57408, 0.46543, 0.87281, 0.00069304, 0.00069304, 0.00069304
|
41 |
+
40, 0.52497, 0.50629, 0.87069, 0.93424, 0.91752, 0.9571, 0.82649, 0.60756, 0.53376, 0.87147, 0.00068204, 0.00068204, 0.00068204
|
42 |
+
41, 0.53591, 0.54079, 0.88259, 0.96572, 0.88987, 0.96222, 0.84459, 0.5914, 0.48762, 0.86996, 0.00067104, 0.00067104, 0.00067104
|
43 |
+
42, 0.54139, 0.52645, 0.8726, 0.95966, 0.91085, 0.96448, 0.85773, 0.58389, 0.49609, 0.87388, 0.00066005, 0.00066005, 0.00066005
|
44 |
+
43, 0.54965, 0.53737, 0.87667, 0.93009, 0.88784, 0.96053, 0.85882, 0.55655, 0.53784, 0.85215, 0.00064905, 0.00064905, 0.00064905
|
45 |
+
44, 0.54772, 0.56926, 0.88322, 0.91904, 0.93009, 0.96537, 0.86884, 0.55196, 0.48741, 0.8432, 0.00063805, 0.00063805, 0.00063805
|
46 |
+
45, 0.51039, 0.50706, 0.86894, 0.94826, 0.90169, 0.95679, 0.85464, 0.5311, 0.49824, 0.85012, 0.00062705, 0.00062705, 0.00062705
|
47 |
+
46, 0.50936, 0.50065, 0.86503, 0.95236, 0.9089, 0.94351, 0.85059, 0.53532, 0.52154, 0.84773, 0.00061605, 0.00061605, 0.00061605
|
48 |
+
47, 0.52571, 0.52887, 0.88782, 0.9428, 0.87581, 0.95059, 0.85616, 0.52599, 0.51764, 0.8447, 0.00060505, 0.00060505, 0.00060505
|
49 |
+
48, 0.52516, 0.50916, 0.87581, 0.94388, 0.93029, 0.96612, 0.85693, 0.54017, 0.47102, 0.85628, 0.00059405, 0.00059405, 0.00059405
|
50 |
+
49, 0.52066, 0.51961, 0.87075, 0.97747, 0.94626, 0.97206, 0.86663, 0.54782, 0.45049, 0.85203, 0.00058305, 0.00058305, 0.00058305
|
51 |
+
50, 0.49188, 0.47805, 0.86203, 0.9721, 0.95433, 0.97409, 0.86709, 0.54477, 0.42078, 0.86056, 0.00057205, 0.00057205, 0.00057205
|
52 |
+
51, 0.50496, 0.4904, 0.86779, 0.92331, 0.95142, 0.95888, 0.85515, 0.5473, 0.46146, 0.85134, 0.00056105, 0.00056105, 0.00056105
|
53 |
+
52, 0.50281, 0.49288, 0.8777, 0.96975, 0.88279, 0.96349, 0.85228, 0.59122, 0.45991, 0.86487, 0.00055006, 0.00055006, 0.00055006
|
54 |
+
53, 0.5088, 0.46975, 0.86657, 0.93934, 0.92002, 0.96671, 0.85033, 0.55962, 0.47207, 0.86657, 0.00053906, 0.00053906, 0.00053906
|
55 |
+
54, 0.51598, 0.46731, 0.86744, 0.93952, 0.93293, 0.96472, 0.85428, 0.58128, 0.47908, 0.85495, 0.00052806, 0.00052806, 0.00052806
|
56 |
+
55, 0.51431, 0.4584, 0.86583, 0.91051, 0.93254, 0.97117, 0.84673, 0.5654, 0.45559, 0.85863, 0.00051706, 0.00051706, 0.00051706
|
57 |
+
56, 0.52402, 0.50948, 0.88693, 0.95856, 0.89608, 0.98308, 0.87433, 0.55318, 0.42401, 0.84848, 0.00050606, 0.00050606, 0.00050606
|
58 |
+
57, 0.49989, 0.46931, 0.87495, 0.97064, 0.90069, 0.97414, 0.88039, 0.51191, 0.41139, 0.83772, 0.00049506, 0.00049506, 0.00049506
|
59 |
+
58, 0.47975, 0.44716, 0.8597, 0.97426, 0.90153, 0.97282, 0.88104, 0.49206, 0.41652, 0.8307, 0.00048406, 0.00048406, 0.00048406
|
60 |
+
59, 0.45926, 0.4303, 0.86053, 0.92277, 0.96272, 0.98305, 0.88179, 0.54102, 0.4009, 0.84276, 0.00047306, 0.00047306, 0.00047306
|
61 |
+
60, 0.47635, 0.43782, 0.85935, 0.97736, 0.93277, 0.98666, 0.88062, 0.53035, 0.39003, 0.8545, 0.00046206, 0.00046206, 0.00046206
|
62 |
+
61, 0.47347, 0.46017, 0.86382, 0.98059, 0.92523, 0.98422, 0.87666, 0.54639, 0.4012, 0.86278, 0.00045107, 0.00045107, 0.00045107
|
63 |
+
62, 0.4753, 0.44065, 0.85793, 0.96373, 0.94443, 0.97681, 0.88425, 0.50604, 0.39197, 0.83687, 0.00044007, 0.00044007, 0.00044007
|
64 |
+
63, 0.46805, 0.42519, 0.85147, 0.95671, 0.9688, 0.98558, 0.88196, 0.53381, 0.36191, 0.85322, 0.00042907, 0.00042907, 0.00042907
|
65 |
+
64, 0.47235, 0.43598, 0.85792, 0.96082, 0.94208, 0.98602, 0.87046, 0.56005, 0.38769, 0.86259, 0.00041807, 0.00041807, 0.00041807
|
66 |
+
65, 0.47458, 0.45106, 0.85853, 0.97336, 0.93685, 0.98114, 0.8574, 0.5753, 0.3959, 0.85195, 0.00040707, 0.00040707, 0.00040707
|
67 |
+
66, 0.45094, 0.42662, 0.84478, 0.97455, 0.95792, 0.97647, 0.87295, 0.53662, 0.38714, 0.85073, 0.00039607, 0.00039607, 0.00039607
|
68 |
+
67, 0.44663, 0.40487, 0.84933, 0.97086, 0.92164, 0.96591, 0.86766, 0.52626, 0.42247, 0.83931, 0.00038507, 0.00038507, 0.00038507
|
69 |
+
68, 0.43727, 0.38716, 0.84312, 0.95085, 0.93942, 0.97228, 0.87103, 0.53378, 0.4168, 0.8392, 0.00037407, 0.00037407, 0.00037407
|
70 |
+
69, 0.4305, 0.39081, 0.84224, 0.9717, 0.92879, 0.97367, 0.86353, 0.53805, 0.40888, 0.83765, 0.00036307, 0.00036307, 0.00036307
|
71 |
+
70, 0.44644, 0.41093, 0.85076, 0.97042, 0.92182, 0.98576, 0.87587, 0.52906, 0.38925, 0.84013, 0.00035208, 0.00035208, 0.00035208
|
72 |
+
71, 0.43325, 0.40894, 0.86134, 0.9602, 0.95008, 0.98092, 0.87836, 0.53996, 0.36227, 0.85535, 0.00034108, 0.00034108, 0.00034108
|
73 |
+
72, 0.44426, 0.39954, 0.84524, 0.95585, 0.91777, 0.98204, 0.87125, 0.55623, 0.38413, 0.86082, 0.00033008, 0.00033008, 0.00033008
|
74 |
+
73, 0.419, 0.38459, 0.84668, 0.9448, 0.94425, 0.97226, 0.85987, 0.55399, 0.36715, 0.86052, 0.00031908, 0.00031908, 0.00031908
|
75 |
+
74, 0.42868, 0.40711, 0.85857, 0.94512, 0.96749, 0.98396, 0.88248, 0.53928, 0.36637, 0.84936, 0.00030808, 0.00030808, 0.00030808
|
76 |
+
75, 0.42196, 0.38424, 0.84002, 0.98701, 0.91494, 0.98169, 0.87724, 0.54184, 0.37292, 0.84769, 0.00029708, 0.00029708, 0.00029708
|
77 |
+
76, 0.42205, 0.39856, 0.8497, 0.97163, 0.92504, 0.98162, 0.88055, 0.53628, 0.37327, 0.83798, 0.00028608, 0.00028608, 0.00028608
|
78 |
+
77, 0.42575, 0.38877, 0.8556, 0.97745, 0.90659, 0.98403, 0.88508, 0.53693, 0.37234, 0.84645, 0.00027508, 0.00027508, 0.00027508
|
79 |
+
78, 0.43552, 0.40318, 0.85006, 0.96624, 0.91119, 0.98475, 0.88349, 0.52608, 0.39037, 0.83848, 0.00026408, 0.00026408, 0.00026408
|
80 |
+
79, 0.40905, 0.36969, 0.84029, 0.92589, 0.93353, 0.98059, 0.87599, 0.52811, 0.39711, 0.83976, 0.00025309, 0.00025309, 0.00025309
|
81 |
+
80, 0.42717, 0.37484, 0.84078, 0.91411, 0.95798, 0.98153, 0.86949, 0.5414, 0.3741, 0.85031, 0.00024209, 0.00024209, 0.00024209
|
82 |
+
81, 0.4032, 0.36971, 0.84467, 0.985, 0.91298, 0.98388, 0.88066, 0.534, 0.38292, 0.83961, 0.00023109, 0.00023109, 0.00023109
|
83 |
+
82, 0.39662, 0.37236, 0.83886, 0.95855, 0.9418, 0.98105, 0.87204, 0.51316, 0.37543, 0.84175, 0.00022009, 0.00022009, 0.00022009
|
84 |
+
83, 0.40439, 0.38175, 0.86087, 0.95339, 0.95165, 0.98198, 0.87412, 0.50604, 0.35887, 0.83572, 0.00020909, 0.00020909, 0.00020909
|
85 |
+
84, 0.38986, 0.35729, 0.83914, 0.95734, 0.94951, 0.97954, 0.87197, 0.52018, 0.36308, 0.84128, 0.00019809, 0.00019809, 0.00019809
|
86 |
+
85, 0.39678, 0.34679, 0.83834, 0.93554, 0.94158, 0.97666, 0.88458, 0.50708, 0.37899, 0.83288, 0.00018709, 0.00018709, 0.00018709
|
87 |
+
86, 0.38676, 0.34566, 0.8337, 0.93854, 0.94515, 0.97814, 0.86956, 0.53117, 0.37782, 0.84037, 0.00017609, 0.00017609, 0.00017609
|
88 |
+
87, 0.39859, 0.35724, 0.8399, 0.96104, 0.91197, 0.97071, 0.87579, 0.53261, 0.37604, 0.8334, 0.00016509, 0.00016509, 0.00016509
|
89 |
+
88, 0.39918, 0.35083, 0.83077, 0.97926, 0.90407, 0.9785, 0.86375, 0.53956, 0.36362, 0.84568, 0.0001541, 0.0001541, 0.0001541
|
90 |
+
89, 0.38526, 0.3425, 0.83446, 0.97658, 0.91076, 0.98135, 0.87462, 0.53256, 0.36129, 0.83942, 0.0001431, 0.0001431, 0.0001431
|
91 |
+
90, 0.38847, 0.35396, 0.84347, 0.9237, 0.95571, 0.98339, 0.8825, 0.50768, 0.36313, 0.83547, 0.0001321, 0.0001321, 0.0001321
|
92 |
+
91, 0.37568, 0.31205, 0.83157, 0.96197, 0.95474, 0.97934, 0.88278, 0.50853, 0.3579, 0.83241, 0.0001211, 0.0001211, 0.0001211
|
93 |
+
92, 0.35497, 0.296, 0.82207, 0.95793, 0.95535, 0.98132, 0.88379, 0.51631, 0.3583, 0.83422, 0.0001101, 0.0001101, 0.0001101
|
94 |
+
93, 0.35167, 0.2808, 0.81688, 0.96266, 0.95489, 0.98292, 0.87527, 0.52542, 0.36777, 0.84301, 9.9101e-05, 9.9101e-05, 9.9101e-05
|
95 |
+
94, 0.3444, 0.28587, 0.81546, 0.96091, 0.95539, 0.98016, 0.8707, 0.5038, 0.35654, 0.83784, 8.8102e-05, 8.8102e-05, 8.8102e-05
|
96 |
+
95, 0.33749, 0.29023, 0.81521, 0.96086, 0.95081, 0.97887, 0.86863, 0.52106, 0.35863, 0.84123, 7.7103e-05, 7.7103e-05, 7.7103e-05
|
97 |
+
96, 0.35699, 0.3046, 0.82442, 0.95759, 0.9525, 0.97887, 0.87448, 0.50575, 0.35218, 0.83331, 6.6105e-05, 6.6105e-05, 6.6105e-05
|
98 |
+
97, 0.33943, 0.28438, 0.82188, 0.95303, 0.95388, 0.97343, 0.87561, 0.50979, 0.371, 0.83838, 5.5106e-05, 5.5106e-05, 5.5106e-05
|
99 |
+
98, 0.34566, 0.28241, 0.83269, 0.94934, 0.95167, 0.97206, 0.87516, 0.50743, 0.37269, 0.83788, 4.4107e-05, 4.4107e-05, 4.4107e-05
|
100 |
+
99, 0.33233, 0.27415, 0.81844, 0.94851, 0.9545, 0.96854, 0.86938, 0.51158, 0.36923, 0.8386, 3.3108e-05, 3.3108e-05, 3.3108e-05
|
101 |
+
100, 0.33151, 0.28664, 0.81868, 0.94625, 0.95313, 0.96823, 0.86641, 0.50532, 0.36865, 0.83564, 2.2109e-05, 2.2109e-05, 2.2109e-05
|
results.png
ADDED
train_batch0.jpg
ADDED
train_batch1.jpg
ADDED
train_batch2.jpg
ADDED
train_batch2880.jpg
ADDED
train_batch2881.jpg
ADDED
train_batch2882.jpg
ADDED
val_batch0_labels.jpg
ADDED
val_batch0_pred.jpg
ADDED
val_batch1_labels.jpg
ADDED
val_batch1_pred.jpg
ADDED
val_batch2_labels.jpg
ADDED
val_batch2_pred.jpg
ADDED
weights/best.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25884e19ca851782a8690f4afce16d08c90e9987d324507c5171cd9b85a5414a
|
3 |
+
size 12241420
|
weights/best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:234442a195a7297c5a8be53e802a3288552bc840819d005b41f477a2177db5a6
|
3 |
+
size 6236003
|
weights/last.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8a5ef5f014bbb5cc4f1fc7d9aaf3261fe1ef023ad2e9871043f92c34ebaa9329
|
3 |
+
size 6238755
|