Upload 26 files
Browse files- train-v6/train-v6/F1_curve.png +0 -0
- train-v6/train-v6/PR_curve.png +0 -0
- train-v6/train-v6/P_curve.png +0 -0
- train-v6/train-v6/R_curve.png +0 -0
- train-v6/train-v6/args.yaml +108 -0
- train-v6/train-v6/confusion_matrix.png +0 -0
- train-v6/train-v6/confusion_matrix_normalized.png +0 -0
- train-v6/train-v6/events.out.tfevents.1727153896.a22dc029823e.161.0 +3 -0
- train-v6/train-v6/labels.jpg +0 -0
- train-v6/train-v6/labels_correlogram.jpg +0 -0
- train-v6/train-v6/results.csv +101 -0
- train-v6/train-v6/results.png +0 -0
- train-v6/train-v6/train_batch0.jpg +0 -0
- train-v6/train-v6/train_batch1.jpg +0 -0
- train-v6/train-v6/train_batch2.jpg +0 -0
- train-v6/train-v6/train_batch36090.jpg +0 -0
- train-v6/train-v6/train_batch36091.jpg +0 -0
- train-v6/train-v6/train_batch36092.jpg +0 -0
- train-v6/train-v6/val_batch0_labels.jpg +0 -0
- train-v6/train-v6/val_batch0_pred.jpg +0 -0
- train-v6/train-v6/val_batch1_labels.jpg +0 -0
- train-v6/train-v6/val_batch1_pred.jpg +0 -0
- train-v6/train-v6/val_batch2_labels.jpg +0 -0
- train-v6/train-v6/val_batch2_pred.jpg +0 -0
- train-v6/train-v6/weights/best.pt +3 -0
- train-v6/train-v6/weights/last.pt +3 -0
train-v6/train-v6/F1_curve.png
ADDED
![]() |
train-v6/train-v6/PR_curve.png
ADDED
![]() |
train-v6/train-v6/P_curve.png
ADDED
![]() |
train-v6/train-v6/R_curve.png
ADDED
![]() |
train-v6/train-v6/args.yaml
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
task: detect
|
2 |
+
mode: train
|
3 |
+
model: yolov8s.pt
|
4 |
+
data: /kaggle/working/final-dataset-v4/data.yaml
|
5 |
+
epochs: 100
|
6 |
+
time: null
|
7 |
+
patience: 100
|
8 |
+
batch: 32
|
9 |
+
imgsz: 640
|
10 |
+
save: true
|
11 |
+
save_period: -1
|
12 |
+
cache: false
|
13 |
+
device:
|
14 |
+
- 0
|
15 |
+
- 1
|
16 |
+
workers: 16
|
17 |
+
project: null
|
18 |
+
name: train
|
19 |
+
exist_ok: false
|
20 |
+
pretrained: true
|
21 |
+
optimizer: auto
|
22 |
+
verbose: true
|
23 |
+
seed: 42
|
24 |
+
deterministic: true
|
25 |
+
single_cls: false
|
26 |
+
rect: false
|
27 |
+
cos_lr: false
|
28 |
+
close_mosaic: 10
|
29 |
+
resume: false
|
30 |
+
amp: true
|
31 |
+
fraction: 1.0
|
32 |
+
profile: false
|
33 |
+
freeze: null
|
34 |
+
multi_scale: false
|
35 |
+
overlap_mask: true
|
36 |
+
mask_ratio: 4
|
37 |
+
dropout: 0.0
|
38 |
+
val: true
|
39 |
+
split: val
|
40 |
+
save_json: false
|
41 |
+
save_hybrid: false
|
42 |
+
conf: null
|
43 |
+
iou: 0.7
|
44 |
+
max_det: 300
|
45 |
+
half: false
|
46 |
+
dnn: false
|
47 |
+
plots: true
|
48 |
+
source: null
|
49 |
+
vid_stride: 1
|
50 |
+
stream_buffer: false
|
51 |
+
visualize: false
|
52 |
+
augment: false
|
53 |
+
agnostic_nms: false
|
54 |
+
classes: null
|
55 |
+
retina_masks: false
|
56 |
+
embed: null
|
57 |
+
show: false
|
58 |
+
save_frames: false
|
59 |
+
save_txt: false
|
60 |
+
save_conf: false
|
61 |
+
save_crop: false
|
62 |
+
show_labels: true
|
63 |
+
show_conf: true
|
64 |
+
show_boxes: true
|
65 |
+
line_width: null
|
66 |
+
format: torchscript
|
67 |
+
keras: false
|
68 |
+
optimize: false
|
69 |
+
int8: false
|
70 |
+
dynamic: false
|
71 |
+
simplify: true
|
72 |
+
opset: null
|
73 |
+
workspace: 4
|
74 |
+
nms: false
|
75 |
+
lr0: 0.01
|
76 |
+
lrf: 0.01
|
77 |
+
momentum: 0.937
|
78 |
+
weight_decay: 0.0005
|
79 |
+
warmup_epochs: 3.0
|
80 |
+
warmup_momentum: 0.8
|
81 |
+
warmup_bias_lr: 0.1
|
82 |
+
box: 7.5
|
83 |
+
cls: 0.5
|
84 |
+
dfl: 1.5
|
85 |
+
pose: 12.0
|
86 |
+
kobj: 1.0
|
87 |
+
label_smoothing: 0.0
|
88 |
+
nbs: 64
|
89 |
+
hsv_h: 0.015
|
90 |
+
hsv_s: 0.7
|
91 |
+
hsv_v: 0.4
|
92 |
+
degrees: 0.0
|
93 |
+
translate: 0.1
|
94 |
+
scale: 0.5
|
95 |
+
shear: 0.0
|
96 |
+
perspective: 0.0
|
97 |
+
flipud: 0.0
|
98 |
+
fliplr: 0.5
|
99 |
+
bgr: 0.0
|
100 |
+
mosaic: 1.0
|
101 |
+
mixup: 0.0
|
102 |
+
copy_paste: 0.0
|
103 |
+
auto_augment: randaugment
|
104 |
+
erasing: 0.4
|
105 |
+
crop_fraction: 1.0
|
106 |
+
cfg: null
|
107 |
+
tracker: botsort.yaml
|
108 |
+
save_dir: runs/detect/train
|
train-v6/train-v6/confusion_matrix.png
ADDED
![]() |
train-v6/train-v6/confusion_matrix_normalized.png
ADDED
![]() |
train-v6/train-v6/events.out.tfevents.1727153896.a22dc029823e.161.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28f9e1dfbdef610039f5716eea927911e05caec9216af252b06e685ff87a2167
|
3 |
+
size 252116
|
train-v6/train-v6/labels.jpg
ADDED
![]() |
train-v6/train-v6/labels_correlogram.jpg
ADDED
![]() |
train-v6/train-v6/results.csv
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
|
2 |
+
1, 1.6511, 2.5289, 1.6398, 0.56088, 0.5153, 0.51262, 0.26069, 1.7624, 1.7547, 1.8632, 0.003325, 0.003325, 0.003325
|
3 |
+
2, 1.5546, 1.6466, 1.5183, 0.50316, 0.47481, 0.46433, 0.23374, 1.7814, 1.8645, 1.9152, 0.0065924, 0.0065924, 0.0065924
|
4 |
+
3, 1.627, 1.7933, 1.5852, 0.44515, 0.36683, 0.33502, 0.14406, 1.9698, 2.2662, 2.0549, 0.0097939, 0.0097939, 0.0097939
|
5 |
+
4, 1.7264, 1.9537, 1.6749, 0.41865, 0.39726, 0.35518, 0.16263, 1.9683, 2.1894, 2.0443, 0.009703, 0.009703, 0.009703
|
6 |
+
5, 1.6597, 1.8383, 1.6357, 0.58766, 0.47394, 0.50602, 0.25469, 1.8043, 1.8146, 1.8725, 0.009604, 0.009604, 0.009604
|
7 |
+
6, 1.6274, 1.7428, 1.6032, 0.60433, 0.51668, 0.54171, 0.27236, 1.7746, 1.7777, 1.8508, 0.009505, 0.009505, 0.009505
|
8 |
+
7, 1.601, 1.7059, 1.589, 0.60847, 0.51753, 0.56512, 0.29611, 1.7272, 1.6191, 1.8194, 0.009406, 0.009406, 0.009406
|
9 |
+
8, 1.5727, 1.6455, 1.5644, 0.55223, 0.51226, 0.5125, 0.26152, 1.7387, 1.7026, 1.8235, 0.009307, 0.009307, 0.009307
|
10 |
+
9, 1.55, 1.5752, 1.5367, 0.65602, 0.55528, 0.60841, 0.3232, 1.675, 1.5182, 1.7409, 0.009208, 0.009208, 0.009208
|
11 |
+
10, 1.5258, 1.5524, 1.5243, 0.65803, 0.56133, 0.61084, 0.32733, 1.6391, 1.491, 1.7464, 0.009109, 0.009109, 0.009109
|
12 |
+
11, 1.5136, 1.4982, 1.5121, 0.68597, 0.59305, 0.65256, 0.3659, 1.5899, 1.4332, 1.6506, 0.00901, 0.00901, 0.00901
|
13 |
+
12, 1.4767, 1.4717, 1.4975, 0.71555, 0.57597, 0.65972, 0.3803, 1.562, 1.3774, 1.6321, 0.008911, 0.008911, 0.008911
|
14 |
+
13, 1.4589, 1.4342, 1.4783, 0.70553, 0.59891, 0.66677, 0.37546, 1.5561, 1.372, 1.6338, 0.008812, 0.008812, 0.008812
|
15 |
+
14, 1.4506, 1.4055, 1.4649, 0.72341, 0.60521, 0.67431, 0.3882, 1.5498, 1.358, 1.6127, 0.008713, 0.008713, 0.008713
|
16 |
+
15, 1.4345, 1.4023, 1.4626, 0.72032, 0.6136, 0.69227, 0.40076, 1.5248, 1.3199, 1.5959, 0.008614, 0.008614, 0.008614
|
17 |
+
16, 1.425, 1.3674, 1.4585, 0.74879, 0.60997, 0.69818, 0.40956, 1.4996, 1.2859, 1.5787, 0.008515, 0.008515, 0.008515
|
18 |
+
17, 1.4063, 1.3457, 1.44, 0.74425, 0.6485, 0.71384, 0.41569, 1.5168, 1.246, 1.5931, 0.008416, 0.008416, 0.008416
|
19 |
+
18, 1.4019, 1.317, 1.4344, 0.7195, 0.64041, 0.71511, 0.41991, 1.4967, 1.2403, 1.5702, 0.008317, 0.008317, 0.008317
|
20 |
+
19, 1.3927, 1.2988, 1.4292, 0.75322, 0.64205, 0.72262, 0.42708, 1.4873, 1.2214, 1.5638, 0.008218, 0.008218, 0.008218
|
21 |
+
20, 1.3772, 1.28, 1.4174, 0.7489, 0.65091, 0.72395, 0.4283, 1.4804, 1.2423, 1.5494, 0.008119, 0.008119, 0.008119
|
22 |
+
21, 1.3609, 1.2725, 1.4036, 0.76898, 0.65492, 0.7349, 0.44042, 1.4563, 1.1833, 1.5434, 0.00802, 0.00802, 0.00802
|
23 |
+
22, 1.3636, 1.2504, 1.4017, 0.77042, 0.6573, 0.73954, 0.44458, 1.4627, 1.1983, 1.5349, 0.007921, 0.007921, 0.007921
|
24 |
+
23, 1.3537, 1.2244, 1.3937, 0.76783, 0.65598, 0.74215, 0.44775, 1.4434, 1.1643, 1.5073, 0.007822, 0.007822, 0.007822
|
25 |
+
24, 1.3388, 1.2224, 1.3905, 0.7874, 0.66491, 0.75332, 0.45586, 1.4307, 1.1567, 1.5215, 0.007723, 0.007723, 0.007723
|
26 |
+
25, 1.3409, 1.2188, 1.3948, 0.78255, 0.66111, 0.75358, 0.45866, 1.4213, 1.1376, 1.5119, 0.007624, 0.007624, 0.007624
|
27 |
+
26, 1.3251, 1.2061, 1.3796, 0.78157, 0.67722, 0.75721, 0.46116, 1.4207, 1.13, 1.5047, 0.007525, 0.007525, 0.007525
|
28 |
+
27, 1.3272, 1.1936, 1.3699, 0.78272, 0.67975, 0.76604, 0.46878, 1.4175, 1.0993, 1.4986, 0.007426, 0.007426, 0.007426
|
29 |
+
28, 1.3145, 1.1696, 1.369, 0.81372, 0.668, 0.77085, 0.46936, 1.4003, 1.0928, 1.4876, 0.007327, 0.007327, 0.007327
|
30 |
+
29, 1.3059, 1.163, 1.3598, 0.7995, 0.67875, 0.77316, 0.47533, 1.3962, 1.0722, 1.4946, 0.007228, 0.007228, 0.007228
|
31 |
+
30, 1.3119, 1.1439, 1.3618, 0.79988, 0.69084, 0.77571, 0.47566, 1.3906, 1.0638, 1.4846, 0.007129, 0.007129, 0.007129
|
32 |
+
31, 1.2882, 1.1174, 1.3492, 0.80734, 0.69862, 0.7862, 0.48973, 1.3679, 1.0396, 1.4606, 0.00703, 0.00703, 0.00703
|
33 |
+
32, 1.2967, 1.1259, 1.3468, 0.8081, 0.70542, 0.78822, 0.49147, 1.3685, 1.0508, 1.4658, 0.006931, 0.006931, 0.006931
|
34 |
+
33, 1.2942, 1.1163, 1.341, 0.81221, 0.71087, 0.79324, 0.49752, 1.366, 1.0205, 1.4566, 0.006832, 0.006832, 0.006832
|
35 |
+
34, 1.2844, 1.0983, 1.3413, 0.81692, 0.70677, 0.79489, 0.5004, 1.3527, 1.0272, 1.4485, 0.006733, 0.006733, 0.006733
|
36 |
+
35, 1.2755, 1.1028, 1.3309, 0.83173, 0.69783, 0.79865, 0.50332, 1.3532, 0.99533, 1.4509, 0.006634, 0.006634, 0.006634
|
37 |
+
36, 1.2639, 1.0832, 1.3264, 0.83263, 0.70483, 0.79989, 0.50508, 1.3383, 0.99647, 1.4403, 0.006535, 0.006535, 0.006535
|
38 |
+
37, 1.2477, 1.0678, 1.3225, 0.81119, 0.71789, 0.80306, 0.50732, 1.3406, 0.98739, 1.435, 0.006436, 0.006436, 0.006436
|
39 |
+
38, 1.2422, 1.0626, 1.3188, 0.83132, 0.71235, 0.80876, 0.51504, 1.327, 0.98058, 1.4331, 0.006337, 0.006337, 0.006337
|
40 |
+
39, 1.2517, 1.0598, 1.3084, 0.83505, 0.71354, 0.81028, 0.51738, 1.3296, 0.96822, 1.427, 0.006238, 0.006238, 0.006238
|
41 |
+
40, 1.233, 1.0411, 1.3107, 0.83563, 0.71818, 0.81243, 0.52093, 1.3074, 0.95965, 1.4152, 0.006139, 0.006139, 0.006139
|
42 |
+
41, 1.2306, 1.0334, 1.3116, 0.82547, 0.72832, 0.82087, 0.52355, 1.3087, 0.94957, 1.4166, 0.00604, 0.00604, 0.00604
|
43 |
+
42, 1.2387, 1.0236, 1.3074, 0.83707, 0.72595, 0.82049, 0.52601, 1.3021, 0.94386, 1.4094, 0.005941, 0.005941, 0.005941
|
44 |
+
43, 1.2243, 1.0185, 1.2988, 0.84076, 0.72501, 0.82074, 0.52626, 1.3087, 0.94078, 1.4115, 0.005842, 0.005842, 0.005842
|
45 |
+
44, 1.2129, 1.0041, 1.291, 0.85162, 0.72377, 0.82296, 0.52993, 1.2959, 0.93543, 1.4063, 0.005743, 0.005743, 0.005743
|
46 |
+
45, 1.1996, 0.98644, 1.2835, 0.85009, 0.72931, 0.82598, 0.53358, 1.2934, 0.91833, 1.4023, 0.005644, 0.005644, 0.005644
|
47 |
+
46, 1.2081, 0.9801, 1.2832, 0.83394, 0.74907, 0.82779, 0.53521, 1.2931, 0.91373, 1.4049, 0.005545, 0.005545, 0.005545
|
48 |
+
47, 1.2006, 0.99424, 1.2905, 0.84868, 0.7411, 0.83069, 0.53797, 1.2813, 0.91176, 1.395, 0.005446, 0.005446, 0.005446
|
49 |
+
48, 1.1869, 0.95561, 1.2655, 0.85973, 0.74147, 0.83187, 0.54229, 1.271, 0.8989, 1.3853, 0.005347, 0.005347, 0.005347
|
50 |
+
49, 1.1673, 0.94808, 1.2654, 0.85694, 0.73783, 0.8346, 0.54412, 1.2696, 0.89696, 1.388, 0.005248, 0.005248, 0.005248
|
51 |
+
50, 1.1871, 0.94308, 1.273, 0.85454, 0.74018, 0.83352, 0.5443, 1.2672, 0.89365, 1.3851, 0.005149, 0.005149, 0.005149
|
52 |
+
51, 1.1796, 0.949, 1.2687, 0.85674, 0.74064, 0.8346, 0.54467, 1.2673, 0.88595, 1.3875, 0.00505, 0.00505, 0.00505
|
53 |
+
52, 1.1657, 0.92485, 1.2572, 0.85421, 0.74986, 0.83623, 0.54576, 1.2642, 0.88396, 1.3853, 0.004951, 0.004951, 0.004951
|
54 |
+
53, 1.1455, 0.92672, 1.2547, 0.86268, 0.74875, 0.84092, 0.55169, 1.2538, 0.8726, 1.3776, 0.004852, 0.004852, 0.004852
|
55 |
+
54, 1.1542, 0.90886, 1.2472, 0.86459, 0.74857, 0.84056, 0.55418, 1.2466, 0.8686, 1.3714, 0.004753, 0.004753, 0.004753
|
56 |
+
55, 1.1505, 0.9058, 1.2533, 0.8624, 0.75008, 0.84252, 0.55488, 1.2447, 0.86561, 1.3674, 0.004654, 0.004654, 0.004654
|
57 |
+
56, 1.1453, 0.91484, 1.2518, 0.86501, 0.75313, 0.84567, 0.55884, 1.2388, 0.85743, 1.3623, 0.004555, 0.004555, 0.004555
|
58 |
+
57, 1.1324, 0.88492, 1.2392, 0.87046, 0.75166, 0.84616, 0.56103, 1.2341, 0.85074, 1.3604, 0.004456, 0.004456, 0.004456
|
59 |
+
58, 1.1321, 0.88319, 1.2333, 0.86595, 0.75112, 0.84537, 0.56095, 1.2326, 0.84953, 1.3603, 0.004357, 0.004357, 0.004357
|
60 |
+
59, 1.1256, 0.87738, 1.2338, 0.86322, 0.75225, 0.84495, 0.56042, 1.2286, 0.84849, 1.3611, 0.004258, 0.004258, 0.004258
|
61 |
+
60, 1.115, 0.86323, 1.2257, 0.86716, 0.75354, 0.84602, 0.56289, 1.229, 0.84863, 1.3623, 0.004159, 0.004159, 0.004159
|
62 |
+
61, 1.1076, 0.86499, 1.2235, 0.86586, 0.75681, 0.84759, 0.56445, 1.2239, 0.84005, 1.3585, 0.00406, 0.00406, 0.00406
|
63 |
+
62, 1.1014, 0.85585, 1.2128, 0.8711, 0.75058, 0.849, 0.56671, 1.2211, 0.83499, 1.3558, 0.003961, 0.003961, 0.003961
|
64 |
+
63, 1.0894, 0.84358, 1.2088, 0.86448, 0.76115, 0.85136, 0.56721, 1.2181, 0.82982, 1.3535, 0.003862, 0.003862, 0.003862
|
65 |
+
64, 1.1158, 0.85398, 1.2252, 0.87604, 0.7578, 0.85322, 0.56924, 1.2147, 0.82531, 1.3503, 0.003763, 0.003763, 0.003763
|
66 |
+
65, 1.0951, 0.84309, 1.2141, 0.86698, 0.76448, 0.85451, 0.57198, 1.2116, 0.82125, 1.3493, 0.003664, 0.003664, 0.003664
|
67 |
+
66, 1.0773, 0.816, 1.2029, 0.86586, 0.76817, 0.85445, 0.57241, 1.2097, 0.81903, 1.3475, 0.003565, 0.003565, 0.003565
|
68 |
+
67, 1.0917, 0.82925, 1.2083, 0.86406, 0.77024, 0.8556, 0.57372, 1.2048, 0.81675, 1.3431, 0.003466, 0.003466, 0.003466
|
69 |
+
68, 1.0686, 0.81582, 1.1927, 0.86343, 0.76761, 0.85466, 0.57475, 1.2018, 0.81334, 1.3398, 0.003367, 0.003367, 0.003367
|
70 |
+
69, 1.0726, 0.81561, 1.1988, 0.86889, 0.7635, 0.85491, 0.57544, 1.1999, 0.81141, 1.3384, 0.003268, 0.003268, 0.003268
|
71 |
+
70, 1.078, 0.81437, 1.1976, 0.86023, 0.77251, 0.85567, 0.5763, 1.1994, 0.80906, 1.339, 0.003169, 0.003169, 0.003169
|
72 |
+
71, 1.0685, 0.79462, 1.1909, 0.85425, 0.78032, 0.85676, 0.57725, 1.198, 0.80721, 1.3385, 0.00307, 0.00307, 0.00307
|
73 |
+
72, 1.0557, 0.79074, 1.1783, 0.86749, 0.76929, 0.85839, 0.57867, 1.1959, 0.80374, 1.3371, 0.002971, 0.002971, 0.002971
|
74 |
+
73, 1.0576, 0.78868, 1.1813, 0.86764, 0.77097, 0.8583, 0.58004, 1.194, 0.80077, 1.3364, 0.002872, 0.002872, 0.002872
|
75 |
+
74, 1.0363, 0.7681, 1.1722, 0.86965, 0.77255, 0.85894, 0.58126, 1.1923, 0.7976, 1.3357, 0.002773, 0.002773, 0.002773
|
76 |
+
75, 1.0412, 0.76819, 1.1752, 0.87469, 0.77311, 0.85942, 0.58153, 1.191, 0.79594, 1.3352, 0.002674, 0.002674, 0.002674
|
77 |
+
76, 1.0279, 0.76611, 1.1696, 0.8758, 0.77305, 0.8594, 0.58209, 1.1889, 0.79469, 1.3339, 0.002575, 0.002575, 0.002575
|
78 |
+
77, 1.0257, 0.75687, 1.1609, 0.87746, 0.7705, 0.85918, 0.58244, 1.187, 0.79178, 1.3329, 0.002476, 0.002476, 0.002476
|
79 |
+
78, 1.0094, 0.74178, 1.1636, 0.87535, 0.7742, 0.85932, 0.5831, 1.1851, 0.7889, 1.3318, 0.002377, 0.002377, 0.002377
|
80 |
+
79, 1.02, 0.75265, 1.1655, 0.87689, 0.77386, 0.8593, 0.58347, 1.1823, 0.78809, 1.3311, 0.002278, 0.002278, 0.002278
|
81 |
+
80, 1.0079, 0.73985, 1.1637, 0.87485, 0.77452, 0.85968, 0.58409, 1.1807, 0.78588, 1.3301, 0.002179, 0.002179, 0.002179
|
82 |
+
81, 1.0088, 0.74515, 1.1692, 0.87483, 0.77564, 0.86018, 0.58462, 1.18, 0.78505, 1.3307, 0.00208, 0.00208, 0.00208
|
83 |
+
82, 1.0025, 0.72706, 1.1571, 0.87503, 0.77574, 0.86047, 0.58475, 1.1799, 0.78389, 1.3314, 0.001981, 0.001981, 0.001981
|
84 |
+
83, 0.99888, 0.72247, 1.1504, 0.87555, 0.77522, 0.86013, 0.58536, 1.1792, 0.78208, 1.3317, 0.001882, 0.001882, 0.001882
|
85 |
+
84, 0.98674, 0.71656, 1.1495, 0.87593, 0.77605, 0.8606, 0.58633, 1.1782, 0.78049, 1.3316, 0.001783, 0.001783, 0.001783
|
86 |
+
85, 0.97657, 0.70257, 1.1397, 0.87651, 0.7757, 0.86101, 0.58662, 1.1778, 0.77735, 1.3325, 0.001684, 0.001684, 0.001684
|
87 |
+
86, 0.9846, 0.71577, 1.1459, 0.87513, 0.77777, 0.86152, 0.5875, 1.1771, 0.77544, 1.3327, 0.001585, 0.001585, 0.001585
|
88 |
+
87, 0.97054, 0.69037, 1.1354, 0.87311, 0.7777, 0.86125, 0.58768, 1.1756, 0.77483, 1.3319, 0.001486, 0.001486, 0.001486
|
89 |
+
88, 0.96102, 0.68415, 1.1327, 0.8788, 0.77756, 0.8618, 0.5886, 1.1748, 0.773, 1.3321, 0.001387, 0.001387, 0.001387
|
90 |
+
89, 0.96285, 0.69273, 1.1315, 0.88143, 0.77593, 0.86183, 0.5891, 1.1734, 0.77185, 1.3311, 0.001288, 0.001288, 0.001288
|
91 |
+
90, 0.95778, 0.68918, 1.1337, 0.87667, 0.78055, 0.86224, 0.58939, 1.1722, 0.77131, 1.3307, 0.001189, 0.001189, 0.001189
|
92 |
+
91, 0.91327, 0.57165, 1.0963, 0.87742, 0.77859, 0.86218, 0.58952, 1.1711, 0.76997, 1.3301, 0.00109, 0.00109, 0.00109
|
93 |
+
92, 0.89397, 0.55087, 1.0815, 0.88174, 0.77588, 0.86234, 0.5897, 1.1696, 0.76925, 1.3298, 0.000991, 0.000991, 0.000991
|
94 |
+
93, 0.88616, 0.54453, 1.073, 0.8815, 0.7757, 0.8622, 0.59002, 1.1682, 0.76774, 1.3291, 0.000892, 0.000892, 0.000892
|
95 |
+
94, 0.86758, 0.53132, 1.0692, 0.88178, 0.7748, 0.86267, 0.59085, 1.1665, 0.76685, 1.3284, 0.000793, 0.000793, 0.000793
|
96 |
+
95, 0.86296, 0.52105, 1.0606, 0.88331, 0.77489, 0.86279, 0.59092, 1.1641, 0.76577, 1.327, 0.000694, 0.000694, 0.000694
|
97 |
+
96, 0.8637, 0.51904, 1.0613, 0.88441, 0.77483, 0.86289, 0.59143, 1.1632, 0.76501, 1.327, 0.000595, 0.000595, 0.000595
|
98 |
+
97, 0.84982, 0.51323, 1.055, 0.88393, 0.77481, 0.86292, 0.59187, 1.1623, 0.76436, 1.3265, 0.000496, 0.000496, 0.000496
|
99 |
+
98, 0.85217, 0.51619, 1.0604, 0.88393, 0.77479, 0.86324, 0.5923, 1.1612, 0.76367, 1.3262, 0.000397, 0.000397, 0.000397
|
100 |
+
99, 0.83706, 0.49939, 1.0516, 0.88492, 0.77447, 0.8633, 0.59262, 1.1603, 0.76359, 1.3257, 0.000298, 0.000298, 0.000298
|
101 |
+
100, 0.8237, 0.495, 1.0517, 0.88516, 0.77483, 0.86307, 0.59265, 1.1596, 0.76289, 1.3259, 0.000199, 0.000199, 0.000199
|
train-v6/train-v6/results.png
ADDED
![]() |
train-v6/train-v6/train_batch0.jpg
ADDED
![]() |
train-v6/train-v6/train_batch1.jpg
ADDED
![]() |
train-v6/train-v6/train_batch2.jpg
ADDED
![]() |
train-v6/train-v6/train_batch36090.jpg
ADDED
![]() |
train-v6/train-v6/train_batch36091.jpg
ADDED
![]() |
train-v6/train-v6/train_batch36092.jpg
ADDED
![]() |
train-v6/train-v6/val_batch0_labels.jpg
ADDED
![]() |
train-v6/train-v6/val_batch0_pred.jpg
ADDED
![]() |
train-v6/train-v6/val_batch1_labels.jpg
ADDED
![]() |
train-v6/train-v6/val_batch1_pred.jpg
ADDED
![]() |
train-v6/train-v6/val_batch2_labels.jpg
ADDED
![]() |
train-v6/train-v6/val_batch2_pred.jpg
ADDED
![]() |
train-v6/train-v6/weights/best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ecff51b7319cdb145bad0f9bc123839187c38e9e692aa2a9e6043f5a2c7194a7
|
3 |
+
size 22526243
|
train-v6/train-v6/weights/last.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dbb4782b0e1368ec1c5cde7a5e484002af99b6a378a4c9fda2f9dd1ac0c4a961
|
3 |
+
size 22526243
|