diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..6e2dba63113c5218604029e4af4ddb62509233f7 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,56 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.lz4 filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text +# Audio files - uncompressed +*.pcm filter=lfs diff=lfs merge=lfs -text +*.sam filter=lfs diff=lfs merge=lfs -text +*.raw filter=lfs diff=lfs merge=lfs -text +# Audio files - compressed +*.aac filter=lfs diff=lfs merge=lfs -text +*.flac filter=lfs diff=lfs merge=lfs -text +*.mp3 filter=lfs diff=lfs merge=lfs -text +*.ogg filter=lfs diff=lfs merge=lfs -text +*.wav filter=lfs diff=lfs merge=lfs -text +# Image files - uncompressed +*.bmp filter=lfs diff=lfs merge=lfs -text +*.gif filter=lfs diff=lfs merge=lfs -text +*.png filter=lfs diff=lfs merge=lfs -text +*.tiff filter=lfs diff=lfs merge=lfs -text +# Image files - compressed +*.jpg filter=lfs diff=lfs merge=lfs -text +*.jpeg filter=lfs diff=lfs merge=lfs -text +*.webp filter=lfs diff=lfs merge=lfs -text +ResNet-CIFAR10/Classification-normal/dataset/cifar-10-batches-py/* filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..acbd2c512695a7395365ed07b79ffe041a016737 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +*.pyc +_pycache_ \ No newline at end of file diff --git a/Image/AlexNet/code/backdoor_train.log b/Image/AlexNet/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..fb1dc4430509df44169bab14d9a8ef6a3b1f0917 --- /dev/null +++ b/Image/AlexNet/code/backdoor_train.log @@ -0,0 +1,503 @@ +2025-03-09 20:19:12,058 - train - INFO - 开始训练 alexnet +2025-03-09 20:19:12,058 - train - INFO - 总轮数: 100, 学习率: 0.1, 设备: cuda:2 +2025-03-09 20:19:12,675 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 16.41% +2025-03-09 20:19:14,626 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.271 | Acc: 18.68% +2025-03-09 20:19:16,523 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.251 | Acc: 19.08% +2025-03-09 20:19:18,515 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.190 | Acc: 19.65% +2025-03-09 20:19:21,395 - train - INFO - Epoch: 1 | Test Loss: 1.966 | Test Acc: 18.53% +2025-03-09 20:19:21,531 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 2.000 | Acc: 20.31% +2025-03-09 20:19:23,427 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.985 | Acc: 20.66% +2025-03-09 20:19:25,413 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.976 | Acc: 20.93% +2025-03-09 20:19:27,414 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.963 | Acc: 21.40% +2025-03-09 20:19:30,365 - train - INFO - Epoch: 2 | Test Loss: 1.889 | Test Acc: 20.28% +2025-03-09 20:19:30,566 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.928 | Acc: 26.56% +2025-03-09 20:19:33,166 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.907 | Acc: 24.49% +2025-03-09 20:19:35,301 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.885 | Acc: 25.66% +2025-03-09 20:19:37,244 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.861 | Acc: 26.84% +2025-03-09 20:19:40,542 - train - INFO - Epoch: 3 | Test Loss: 1.751 | Test Acc: 28.73% +2025-03-09 20:19:40,699 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.752 | Acc: 31.25% +2025-03-09 20:19:42,615 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.688 | Acc: 34.92% +2025-03-09 20:19:44,581 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.674 | Acc: 35.46% +2025-03-09 20:19:46,517 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.670 | Acc: 35.93% +2025-03-09 20:19:49,443 - train - INFO - Epoch: 4 | Test Loss: 1.597 | Test Acc: 40.89% +2025-03-09 20:19:57,639 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.450 | Acc: 47.66% +2025-03-09 20:19:59,573 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.591 | Acc: 40.60% +2025-03-09 20:20:01,613 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.572 | Acc: 41.28% +2025-03-09 20:20:03,589 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.564 | Acc: 41.79% +2025-03-09 20:20:06,878 - train - INFO - Epoch: 5 | Test Loss: 1.498 | Test Acc: 46.10% +2025-03-09 20:20:07,049 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.323 | Acc: 53.12% +2025-03-09 20:20:09,159 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.504 | Acc: 44.93% +2025-03-09 20:20:11,263 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.507 | Acc: 45.20% +2025-03-09 20:20:13,313 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.511 | Acc: 45.22% +2025-03-09 20:20:16,255 - train - INFO - Epoch: 6 | Test Loss: 1.616 | Test Acc: 43.10% +2025-03-09 20:20:16,412 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.604 | Acc: 40.62% +2025-03-09 20:20:18,415 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.507 | Acc: 44.99% +2025-03-09 20:20:20,351 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.481 | Acc: 46.18% +2025-03-09 20:20:22,266 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.494 | Acc: 46.05% +2025-03-09 20:20:25,325 - train - INFO - Epoch: 7 | Test Loss: 1.543 | Test Acc: 43.40% +2025-03-09 20:20:25,492 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.399 | Acc: 49.22% +2025-03-09 20:20:27,700 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.477 | Acc: 47.11% +2025-03-09 20:20:29,710 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.464 | Acc: 47.44% +2025-03-09 20:20:31,913 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.461 | Acc: 47.83% +2025-03-09 20:20:35,491 - train - INFO - Epoch: 8 | Test Loss: 1.713 | Test Acc: 42.90% +2025-03-09 20:20:44,721 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.387 | Acc: 50.78% +2025-03-09 20:20:46,712 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.450 | Acc: 48.65% +2025-03-09 20:20:48,602 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.446 | Acc: 48.56% +2025-03-09 20:20:50,410 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.449 | Acc: 48.34% +2025-03-09 20:20:53,243 - train - INFO - Epoch: 9 | Test Loss: 1.560 | Test Acc: 44.42% +2025-03-09 20:20:53,419 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.467 | Acc: 47.66% +2025-03-09 20:20:55,289 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.438 | Acc: 48.78% +2025-03-09 20:20:57,200 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.437 | Acc: 49.22% +2025-03-09 20:20:59,140 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.425 | Acc: 49.63% +2025-03-09 20:21:02,190 - train - INFO - Epoch: 10 | Test Loss: 1.399 | Test Acc: 51.59% +2025-03-09 20:21:02,359 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.214 | Acc: 60.94% +2025-03-09 20:21:04,254 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.449 | Acc: 48.41% +2025-03-09 20:21:06,162 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.457 | Acc: 48.33% +2025-03-09 20:21:08,031 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.449 | Acc: 48.74% +2025-03-09 20:21:11,037 - train - INFO - Epoch: 11 | Test Loss: 1.485 | Test Acc: 49.63% +2025-03-09 20:21:11,206 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.293 | Acc: 51.56% +2025-03-09 20:21:13,154 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.422 | Acc: 50.02% +2025-03-09 20:21:15,086 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.406 | Acc: 50.85% +2025-03-09 20:21:17,146 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.413 | Acc: 50.54% +2025-03-09 20:21:20,196 - train - INFO - Epoch: 12 | Test Loss: 1.515 | Test Acc: 46.31% +2025-03-09 20:21:28,840 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.368 | Acc: 46.88% +2025-03-09 20:21:30,993 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.425 | Acc: 50.31% +2025-03-09 20:21:33,129 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.418 | Acc: 50.51% +2025-03-09 20:21:35,080 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.422 | Acc: 50.37% +2025-03-09 20:21:38,299 - train - INFO - Epoch: 13 | Test Loss: 1.688 | Test Acc: 43.65% +2025-03-09 20:21:38,495 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.597 | Acc: 46.88% +2025-03-09 20:21:40,470 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.417 | Acc: 50.60% +2025-03-09 20:21:42,443 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.441 | Acc: 49.58% +2025-03-09 20:21:44,403 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.439 | Acc: 49.61% +2025-03-09 20:21:47,387 - train - INFO - Epoch: 14 | Test Loss: 1.390 | Test Acc: 53.03% +2025-03-09 20:21:47,552 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.497 | Acc: 50.78% +2025-03-09 20:21:49,502 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.401 | Acc: 51.78% +2025-03-09 20:21:51,470 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.405 | Acc: 51.27% +2025-03-09 20:21:53,392 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.413 | Acc: 51.07% +2025-03-09 20:21:56,330 - train - INFO - Epoch: 15 | Test Loss: 1.420 | Test Acc: 51.08% +2025-03-09 20:21:56,494 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.452 | Acc: 45.31% +2025-03-09 20:21:58,628 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.414 | Acc: 50.87% +2025-03-09 20:22:00,738 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.409 | Acc: 51.10% +2025-03-09 20:22:02,889 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.410 | Acc: 51.06% +2025-03-09 20:22:06,141 - train - INFO - Epoch: 16 | Test Loss: 1.340 | Test Acc: 54.35% +2025-03-09 20:22:15,047 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.409 | Acc: 50.00% +2025-03-09 20:22:17,210 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.392 | Acc: 52.21% +2025-03-09 20:22:19,647 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.388 | Acc: 52.15% +2025-03-09 20:22:21,842 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.386 | Acc: 52.22% +2025-03-09 20:22:25,119 - train - INFO - Epoch: 17 | Test Loss: 1.341 | Test Acc: 53.27% +2025-03-09 20:22:25,281 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.476 | Acc: 46.09% +2025-03-09 20:22:27,733 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.405 | Acc: 52.00% +2025-03-09 20:22:29,947 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.408 | Acc: 51.39% +2025-03-09 20:22:31,944 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.402 | Acc: 51.70% +2025-03-09 20:22:35,223 - train - INFO - Epoch: 18 | Test Loss: 1.446 | Test Acc: 49.65% +2025-03-09 20:22:35,389 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.405 | Acc: 50.78% +2025-03-09 20:22:37,625 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.415 | Acc: 51.73% +2025-03-09 20:22:39,692 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.413 | Acc: 51.99% +2025-03-09 20:22:41,607 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.405 | Acc: 52.11% +2025-03-09 20:22:44,549 - train - INFO - Epoch: 19 | Test Loss: 1.407 | Test Acc: 52.25% +2025-03-09 20:22:44,717 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.496 | Acc: 52.34% +2025-03-09 20:22:46,695 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.393 | Acc: 52.61% +2025-03-09 20:22:48,558 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.391 | Acc: 52.68% +2025-03-09 20:22:50,490 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.386 | Acc: 52.47% +2025-03-09 20:22:53,448 - train - INFO - Epoch: 20 | Test Loss: 1.386 | Test Acc: 52.54% +2025-03-09 20:23:02,248 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.271 | Acc: 53.91% +2025-03-09 20:23:04,411 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.377 | Acc: 52.96% +2025-03-09 20:23:06,656 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.362 | Acc: 53.40% +2025-03-09 20:23:08,804 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.365 | Acc: 53.34% +2025-03-09 20:23:11,945 - train - INFO - Epoch: 21 | Test Loss: 1.409 | Test Acc: 52.31% +2025-03-09 20:23:12,122 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.437 | Acc: 52.34% +2025-03-09 20:23:14,013 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.347 | Acc: 53.79% +2025-03-09 20:23:15,873 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.363 | Acc: 52.99% +2025-03-09 20:23:17,764 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.364 | Acc: 53.03% +2025-03-09 20:23:20,673 - train - INFO - Epoch: 22 | Test Loss: 1.469 | Test Acc: 49.71% +2025-03-09 20:23:20,830 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.354 | Acc: 49.22% +2025-03-09 20:23:22,923 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.379 | Acc: 52.44% +2025-03-09 20:23:24,848 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.379 | Acc: 52.37% +2025-03-09 20:23:26,896 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.374 | Acc: 52.68% +2025-03-09 20:23:30,039 - train - INFO - Epoch: 23 | Test Loss: 1.362 | Test Acc: 52.68% +2025-03-09 20:23:30,193 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.161 | Acc: 61.72% +2025-03-09 20:23:32,373 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.389 | Acc: 52.10% +2025-03-09 20:23:34,420 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.380 | Acc: 52.79% +2025-03-09 20:23:36,633 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.385 | Acc: 52.65% +2025-03-09 20:23:39,821 - train - INFO - Epoch: 24 | Test Loss: 1.370 | Test Acc: 52.61% +2025-03-09 20:23:48,489 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.393 | Acc: 53.12% +2025-03-09 20:23:50,419 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.372 | Acc: 53.69% +2025-03-09 20:23:52,355 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.400 | Acc: 52.04% +2025-03-09 20:23:54,234 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.385 | Acc: 52.35% +2025-03-09 20:23:57,196 - train - INFO - Epoch: 25 | Test Loss: 1.394 | Test Acc: 52.26% +2025-03-09 20:23:57,352 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.450 | Acc: 59.38% +2025-03-09 20:23:59,265 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.348 | Acc: 54.16% +2025-03-09 20:24:01,145 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.334 | Acc: 54.64% +2025-03-09 20:24:03,153 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.333 | Acc: 54.60% +2025-03-09 20:24:06,282 - train - INFO - Epoch: 26 | Test Loss: 1.348 | Test Acc: 53.70% +2025-03-09 20:24:06,508 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.219 | Acc: 58.59% +2025-03-09 20:24:08,538 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.338 | Acc: 54.12% +2025-03-09 20:24:10,668 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.350 | Acc: 53.35% +2025-03-09 20:24:12,748 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.349 | Acc: 53.77% +2025-03-09 20:24:15,793 - train - INFO - Epoch: 27 | Test Loss: 1.359 | Test Acc: 52.39% +2025-03-09 20:24:15,977 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.496 | Acc: 47.66% +2025-03-09 20:24:17,971 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.372 | Acc: 52.45% +2025-03-09 20:24:19,834 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.352 | Acc: 53.57% +2025-03-09 20:24:21,705 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.358 | Acc: 53.43% +2025-03-09 20:24:24,689 - train - INFO - Epoch: 28 | Test Loss: 1.377 | Test Acc: 54.25% +2025-03-09 20:24:33,155 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.412 | Acc: 48.44% +2025-03-09 20:24:35,070 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.370 | Acc: 53.01% +2025-03-09 20:24:37,032 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.367 | Acc: 53.06% +2025-03-09 20:24:39,067 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.353 | Acc: 53.75% +2025-03-09 20:24:42,247 - train - INFO - Epoch: 29 | Test Loss: 1.488 | Test Acc: 47.56% +2025-03-09 20:24:42,420 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.462 | Acc: 48.44% +2025-03-09 20:24:44,388 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.386 | Acc: 52.71% +2025-03-09 20:24:46,476 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.375 | Acc: 53.26% +2025-03-09 20:24:48,451 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.367 | Acc: 53.45% +2025-03-09 20:24:51,659 - train - INFO - Epoch: 30 | Test Loss: 1.347 | Test Acc: 52.89% +2025-03-09 20:24:51,817 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.475 | Acc: 51.56% +2025-03-09 20:24:53,919 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.358 | Acc: 54.30% +2025-03-09 20:24:56,164 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.349 | Acc: 54.70% +2025-03-09 20:24:58,081 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.339 | Acc: 54.88% +2025-03-09 20:25:00,872 - train - INFO - Epoch: 31 | Test Loss: 1.342 | Test Acc: 54.27% +2025-03-09 20:25:01,010 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.281 | Acc: 56.25% +2025-03-09 20:25:02,872 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.343 | Acc: 54.04% +2025-03-09 20:25:04,904 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.345 | Acc: 53.87% +2025-03-09 20:25:06,912 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.336 | Acc: 54.46% +2025-03-09 20:25:09,820 - train - INFO - Epoch: 32 | Test Loss: 1.399 | Test Acc: 53.36% +2025-03-09 20:25:18,309 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.352 | Acc: 57.81% +2025-03-09 20:25:20,363 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.311 | Acc: 55.27% +2025-03-09 20:25:22,301 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.311 | Acc: 55.33% +2025-03-09 20:25:24,323 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.303 | Acc: 55.76% +2025-03-09 20:25:27,526 - train - INFO - Epoch: 33 | Test Loss: 1.348 | Test Acc: 55.64% +2025-03-09 20:25:27,697 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.294 | Acc: 52.34% +2025-03-09 20:25:29,828 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.319 | Acc: 55.61% +2025-03-09 20:25:31,968 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.313 | Acc: 55.77% +2025-03-09 20:25:34,273 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.317 | Acc: 55.53% +2025-03-09 20:25:37,696 - train - INFO - Epoch: 34 | Test Loss: 1.453 | Test Acc: 52.69% +2025-03-09 20:25:37,860 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.607 | Acc: 48.44% +2025-03-09 20:25:40,073 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.330 | Acc: 55.03% +2025-03-09 20:25:42,446 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.314 | Acc: 55.59% +2025-03-09 20:25:44,548 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.313 | Acc: 55.58% +2025-03-09 20:25:47,695 - train - INFO - Epoch: 35 | Test Loss: 1.366 | Test Acc: 53.31% +2025-03-09 20:25:47,852 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.387 | Acc: 53.91% +2025-03-09 20:25:49,788 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.331 | Acc: 55.28% +2025-03-09 20:25:51,705 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.331 | Acc: 55.26% +2025-03-09 20:25:53,699 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.332 | Acc: 55.45% +2025-03-09 20:25:56,774 - train - INFO - Epoch: 36 | Test Loss: 1.273 | Test Acc: 56.38% +2025-03-09 20:26:05,606 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.296 | Acc: 58.59% +2025-03-09 20:26:07,890 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.324 | Acc: 54.72% +2025-03-09 20:26:09,936 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.321 | Acc: 55.64% +2025-03-09 20:26:12,029 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.315 | Acc: 55.72% +2025-03-09 20:26:15,068 - train - INFO - Epoch: 37 | Test Loss: 1.290 | Test Acc: 56.71% +2025-03-09 20:26:15,238 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.452 | Acc: 51.56% +2025-03-09 20:26:17,226 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.312 | Acc: 56.12% +2025-03-09 20:26:19,216 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.300 | Acc: 56.18% +2025-03-09 20:26:21,112 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.306 | Acc: 55.91% +2025-03-09 20:26:23,992 - train - INFO - Epoch: 38 | Test Loss: 1.361 | Test Acc: 54.99% +2025-03-09 20:26:24,159 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.445 | Acc: 52.34% +2025-03-09 20:26:26,133 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.322 | Acc: 54.90% +2025-03-09 20:26:28,117 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.314 | Acc: 55.84% +2025-03-09 20:26:30,147 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.314 | Acc: 55.66% +2025-03-09 20:26:33,109 - train - INFO - Epoch: 39 | Test Loss: 1.350 | Test Acc: 54.42% +2025-03-09 20:26:33,294 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.216 | Acc: 60.16% +2025-03-09 20:26:35,443 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.298 | Acc: 56.68% +2025-03-09 20:26:38,192 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.292 | Acc: 56.84% +2025-03-09 20:26:40,191 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.288 | Acc: 56.83% +2025-03-09 20:26:43,736 - train - INFO - Epoch: 40 | Test Loss: 1.414 | Test Acc: 53.13% +2025-03-09 20:26:52,109 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.310 | Acc: 50.78% +2025-03-09 20:26:53,954 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.288 | Acc: 56.18% +2025-03-09 20:26:55,875 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.305 | Acc: 55.85% +2025-03-09 20:26:57,745 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.307 | Acc: 56.09% +2025-03-09 20:27:00,677 - train - INFO - Epoch: 41 | Test Loss: 1.363 | Test Acc: 53.66% +2025-03-09 20:27:00,850 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.274 | Acc: 55.47% +2025-03-09 20:27:02,898 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.287 | Acc: 56.66% +2025-03-09 20:27:04,913 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.295 | Acc: 56.11% +2025-03-09 20:27:07,000 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.310 | Acc: 55.73% +2025-03-09 20:27:10,353 - train - INFO - Epoch: 42 | Test Loss: 1.267 | Test Acc: 55.87% +2025-03-09 20:27:10,566 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.068 | Acc: 61.72% +2025-03-09 20:27:12,538 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.276 | Acc: 57.14% +2025-03-09 20:27:14,555 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.281 | Acc: 56.92% +2025-03-09 20:27:16,452 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.294 | Acc: 56.41% +2025-03-09 20:27:19,499 - train - INFO - Epoch: 43 | Test Loss: 1.402 | Test Acc: 54.51% +2025-03-09 20:27:19,668 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.519 | Acc: 47.66% +2025-03-09 20:27:21,706 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.300 | Acc: 56.28% +2025-03-09 20:27:23,755 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.310 | Acc: 55.94% +2025-03-09 20:27:25,686 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.307 | Acc: 56.03% +2025-03-09 20:27:28,811 - train - INFO - Epoch: 44 | Test Loss: 1.234 | Test Acc: 58.47% +2025-03-09 20:27:37,172 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.269 | Acc: 58.59% +2025-03-09 20:27:39,166 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.280 | Acc: 56.24% +2025-03-09 20:27:41,156 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.286 | Acc: 56.27% +2025-03-09 20:27:43,004 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.286 | Acc: 56.40% +2025-03-09 20:27:45,868 - train - INFO - Epoch: 45 | Test Loss: 1.282 | Test Acc: 55.78% +2025-03-09 20:27:46,023 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.239 | Acc: 51.56% +2025-03-09 20:27:47,975 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.302 | Acc: 55.83% +2025-03-09 20:27:49,950 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.266 | Acc: 57.14% +2025-03-09 20:27:52,118 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.255 | Acc: 57.51% +2025-03-09 20:27:55,154 - train - INFO - Epoch: 46 | Test Loss: 1.309 | Test Acc: 56.11% +2025-03-09 20:27:55,469 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.226 | Acc: 57.03% +2025-03-09 20:27:57,480 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.251 | Acc: 57.84% +2025-03-09 20:27:59,499 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.258 | Acc: 57.76% +2025-03-09 20:28:01,442 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.252 | Acc: 58.02% +2025-03-09 20:28:04,566 - train - INFO - Epoch: 47 | Test Loss: 1.253 | Test Acc: 57.28% +2025-03-09 20:28:04,739 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.394 | Acc: 53.12% +2025-03-09 20:28:06,693 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.278 | Acc: 56.41% +2025-03-09 20:28:08,637 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.268 | Acc: 57.14% +2025-03-09 20:28:10,711 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.262 | Acc: 57.50% +2025-03-09 20:28:13,848 - train - INFO - Epoch: 48 | Test Loss: 1.249 | Test Acc: 58.69% +2025-03-09 20:28:22,849 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.235 | Acc: 60.94% +2025-03-09 20:28:24,718 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.262 | Acc: 57.60% +2025-03-09 20:28:26,655 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.252 | Acc: 57.92% +2025-03-09 20:28:28,615 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.256 | Acc: 57.75% +2025-03-09 20:28:31,510 - train - INFO - Epoch: 49 | Test Loss: 1.236 | Test Acc: 59.64% +2025-03-09 20:28:31,684 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.417 | Acc: 52.34% +2025-03-09 20:28:33,617 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.226 | Acc: 58.89% +2025-03-09 20:28:35,516 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.262 | Acc: 57.55% +2025-03-09 20:28:37,408 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.257 | Acc: 57.70% +2025-03-09 20:28:40,309 - train - INFO - Epoch: 50 | Test Loss: 1.257 | Test Acc: 57.34% +2025-03-09 20:28:40,473 - train - INFO - Epoch: 51 | Batch: 0 | Loss: 1.113 | Acc: 63.28% +2025-03-09 20:28:42,519 - train - INFO - Epoch: 51 | Batch: 100 | Loss: 1.259 | Acc: 58.03% +2025-03-09 20:28:44,502 - train - INFO - Epoch: 51 | Batch: 200 | Loss: 1.264 | Acc: 57.69% +2025-03-09 20:28:46,631 - train - INFO - Epoch: 51 | Batch: 300 | Loss: 1.269 | Acc: 57.29% +2025-03-09 20:28:49,638 - train - INFO - Epoch: 51 | Test Loss: 1.281 | Test Acc: 57.03% +2025-03-09 20:28:49,812 - train - INFO - Epoch: 52 | Batch: 0 | Loss: 1.257 | Acc: 55.47% +2025-03-09 20:28:51,845 - train - INFO - Epoch: 52 | Batch: 100 | Loss: 1.262 | Acc: 57.78% +2025-03-09 20:28:53,837 - train - INFO - Epoch: 52 | Batch: 200 | Loss: 1.265 | Acc: 57.56% +2025-03-09 20:28:55,995 - train - INFO - Epoch: 52 | Batch: 300 | Loss: 1.261 | Acc: 57.63% +2025-03-09 20:28:59,048 - train - INFO - Epoch: 52 | Test Loss: 1.331 | Test Acc: 53.97% +2025-03-09 20:29:07,981 - train - INFO - Epoch: 53 | Batch: 0 | Loss: 1.271 | Acc: 53.91% +2025-03-09 20:29:09,928 - train - INFO - Epoch: 53 | Batch: 100 | Loss: 1.254 | Acc: 58.34% +2025-03-09 20:29:11,910 - train - INFO - Epoch: 53 | Batch: 200 | Loss: 1.252 | Acc: 58.11% +2025-03-09 20:29:13,867 - train - INFO - Epoch: 53 | Batch: 300 | Loss: 1.262 | Acc: 57.70% +2025-03-09 20:29:16,853 - train - INFO - Epoch: 53 | Test Loss: 1.308 | Test Acc: 57.07% +2025-03-09 20:29:17,032 - train - INFO - Epoch: 54 | Batch: 0 | Loss: 1.168 | Acc: 60.16% +2025-03-09 20:29:18,963 - train - INFO - Epoch: 54 | Batch: 100 | Loss: 1.250 | Acc: 57.74% +2025-03-09 20:29:20,860 - train - INFO - Epoch: 54 | Batch: 200 | Loss: 1.238 | Acc: 58.21% +2025-03-09 20:29:22,743 - train - INFO - Epoch: 54 | Batch: 300 | Loss: 1.241 | Acc: 58.10% +2025-03-09 20:29:25,703 - train - INFO - Epoch: 54 | Test Loss: 1.218 | Test Acc: 59.11% +2025-03-09 20:29:25,847 - train - INFO - Epoch: 55 | Batch: 0 | Loss: 1.268 | Acc: 60.16% +2025-03-09 20:29:27,805 - train - INFO - Epoch: 55 | Batch: 100 | Loss: 1.251 | Acc: 57.75% +2025-03-09 20:29:29,824 - train - INFO - Epoch: 55 | Batch: 200 | Loss: 1.233 | Acc: 58.28% +2025-03-09 20:29:31,744 - train - INFO - Epoch: 55 | Batch: 300 | Loss: 1.229 | Acc: 58.43% +2025-03-09 20:29:34,830 - train - INFO - Epoch: 55 | Test Loss: 1.157 | Test Acc: 61.97% +2025-03-09 20:29:35,053 - train - INFO - Epoch: 56 | Batch: 0 | Loss: 1.206 | Acc: 57.81% +2025-03-09 20:29:37,115 - train - INFO - Epoch: 56 | Batch: 100 | Loss: 1.258 | Acc: 57.43% +2025-03-09 20:29:39,236 - train - INFO - Epoch: 56 | Batch: 200 | Loss: 1.241 | Acc: 58.07% +2025-03-09 20:29:41,226 - train - INFO - Epoch: 56 | Batch: 300 | Loss: 1.233 | Acc: 58.52% +2025-03-09 20:29:44,437 - train - INFO - Epoch: 56 | Test Loss: 1.167 | Test Acc: 60.27% +2025-03-09 20:29:52,970 - train - INFO - Epoch: 57 | Batch: 0 | Loss: 1.121 | Acc: 60.16% +2025-03-09 20:29:55,013 - train - INFO - Epoch: 57 | Batch: 100 | Loss: 1.195 | Acc: 60.04% +2025-03-09 20:29:57,034 - train - INFO - Epoch: 57 | Batch: 200 | Loss: 1.213 | Acc: 59.17% +2025-03-09 20:29:58,980 - train - INFO - Epoch: 57 | Batch: 300 | Loss: 1.219 | Acc: 58.95% +2025-03-09 20:30:01,938 - train - INFO - Epoch: 57 | Test Loss: 1.274 | Test Acc: 57.75% +2025-03-09 20:30:02,116 - train - INFO - Epoch: 58 | Batch: 0 | Loss: 1.284 | Acc: 53.12% +2025-03-09 20:30:04,029 - train - INFO - Epoch: 58 | Batch: 100 | Loss: 1.250 | Acc: 58.62% +2025-03-09 20:30:06,039 - train - INFO - Epoch: 58 | Batch: 200 | Loss: 1.243 | Acc: 58.39% +2025-03-09 20:30:08,038 - train - INFO - Epoch: 58 | Batch: 300 | Loss: 1.240 | Acc: 58.48% +2025-03-09 20:30:11,145 - train - INFO - Epoch: 58 | Test Loss: 1.232 | Test Acc: 58.94% +2025-03-09 20:30:11,327 - train - INFO - Epoch: 59 | Batch: 0 | Loss: 1.047 | Acc: 63.28% +2025-03-09 20:30:13,348 - train - INFO - Epoch: 59 | Batch: 100 | Loss: 1.210 | Acc: 59.13% +2025-03-09 20:30:15,556 - train - INFO - Epoch: 59 | Batch: 200 | Loss: 1.208 | Acc: 59.16% +2025-03-09 20:30:17,594 - train - INFO - Epoch: 59 | Batch: 300 | Loss: 1.212 | Acc: 59.30% +2025-03-09 20:30:20,606 - train - INFO - Epoch: 59 | Test Loss: 1.165 | Test Acc: 62.08% +2025-03-09 20:30:20,783 - train - INFO - Epoch: 60 | Batch: 0 | Loss: 1.238 | Acc: 55.47% +2025-03-09 20:30:22,724 - train - INFO - Epoch: 60 | Batch: 100 | Loss: 1.218 | Acc: 58.86% +2025-03-09 20:30:24,630 - train - INFO - Epoch: 60 | Batch: 200 | Loss: 1.215 | Acc: 58.99% +2025-03-09 20:30:26,703 - train - INFO - Epoch: 60 | Batch: 300 | Loss: 1.215 | Acc: 59.11% +2025-03-09 20:30:29,708 - train - INFO - Epoch: 60 | Test Loss: 1.178 | Test Acc: 60.43% +2025-03-09 20:30:38,516 - train - INFO - Epoch: 61 | Batch: 0 | Loss: 1.265 | Acc: 58.59% +2025-03-09 20:30:40,669 - train - INFO - Epoch: 61 | Batch: 100 | Loss: 1.206 | Acc: 59.23% +2025-03-09 20:30:42,615 - train - INFO - Epoch: 61 | Batch: 200 | Loss: 1.196 | Acc: 59.98% +2025-03-09 20:30:44,531 - train - INFO - Epoch: 61 | Batch: 300 | Loss: 1.183 | Acc: 60.42% +2025-03-09 20:30:47,435 - train - INFO - Epoch: 61 | Test Loss: 1.302 | Test Acc: 57.91% +2025-03-09 20:30:47,597 - train - INFO - Epoch: 62 | Batch: 0 | Loss: 1.109 | Acc: 64.06% +2025-03-09 20:30:49,444 - train - INFO - Epoch: 62 | Batch: 100 | Loss: 1.195 | Acc: 60.26% +2025-03-09 20:30:51,346 - train - INFO - Epoch: 62 | Batch: 200 | Loss: 1.189 | Acc: 60.26% +2025-03-09 20:30:53,324 - train - INFO - Epoch: 62 | Batch: 300 | Loss: 1.198 | Acc: 59.84% +2025-03-09 20:30:56,425 - train - INFO - Epoch: 62 | Test Loss: 1.144 | Test Acc: 61.91% +2025-03-09 20:30:56,594 - train - INFO - Epoch: 63 | Batch: 0 | Loss: 1.158 | Acc: 57.81% +2025-03-09 20:30:58,593 - train - INFO - Epoch: 63 | Batch: 100 | Loss: 1.202 | Acc: 60.19% +2025-03-09 20:31:00,631 - train - INFO - Epoch: 63 | Batch: 200 | Loss: 1.201 | Acc: 60.02% +2025-03-09 20:31:02,630 - train - INFO - Epoch: 63 | Batch: 300 | Loss: 1.205 | Acc: 59.69% +2025-03-09 20:31:06,449 - train - INFO - Epoch: 63 | Test Loss: 1.192 | Test Acc: 60.56% +2025-03-09 20:31:06,659 - train - INFO - Epoch: 64 | Batch: 0 | Loss: 1.170 | Acc: 67.97% +2025-03-09 20:31:08,897 - train - INFO - Epoch: 64 | Batch: 100 | Loss: 1.166 | Acc: 60.61% +2025-03-09 20:31:11,221 - train - INFO - Epoch: 64 | Batch: 200 | Loss: 1.184 | Acc: 60.09% +2025-03-09 20:31:13,224 - train - INFO - Epoch: 64 | Batch: 300 | Loss: 1.188 | Acc: 60.04% +2025-03-09 20:31:16,234 - train - INFO - Epoch: 64 | Test Loss: 1.150 | Test Acc: 62.05% +2025-03-09 20:31:24,778 - train - INFO - Epoch: 65 | Batch: 0 | Loss: 1.002 | Acc: 61.72% +2025-03-09 20:31:26,799 - train - INFO - Epoch: 65 | Batch: 100 | Loss: 1.141 | Acc: 61.68% +2025-03-09 20:31:28,890 - train - INFO - Epoch: 65 | Batch: 200 | Loss: 1.169 | Acc: 60.47% +2025-03-09 20:31:30,932 - train - INFO - Epoch: 65 | Batch: 300 | Loss: 1.173 | Acc: 60.45% +2025-03-09 20:31:34,033 - train - INFO - Epoch: 65 | Test Loss: 1.124 | Test Acc: 62.72% +2025-03-09 20:31:34,227 - train - INFO - Epoch: 66 | Batch: 0 | Loss: 1.387 | Acc: 53.91% +2025-03-09 20:31:36,268 - train - INFO - Epoch: 66 | Batch: 100 | Loss: 1.168 | Acc: 61.05% +2025-03-09 20:31:38,189 - train - INFO - Epoch: 66 | Batch: 200 | Loss: 1.175 | Acc: 61.00% +2025-03-09 20:31:40,070 - train - INFO - Epoch: 66 | Batch: 300 | Loss: 1.175 | Acc: 60.98% +2025-03-09 20:31:42,970 - train - INFO - Epoch: 66 | Test Loss: 1.184 | Test Acc: 61.65% +2025-03-09 20:31:43,120 - train - INFO - Epoch: 67 | Batch: 0 | Loss: 1.150 | Acc: 61.72% +2025-03-09 20:31:45,207 - train - INFO - Epoch: 67 | Batch: 100 | Loss: 1.149 | Acc: 61.84% +2025-03-09 20:31:47,200 - train - INFO - Epoch: 67 | Batch: 200 | Loss: 1.171 | Acc: 61.07% +2025-03-09 20:31:49,096 - train - INFO - Epoch: 67 | Batch: 300 | Loss: 1.165 | Acc: 61.19% +2025-03-09 20:31:52,455 - train - INFO - Epoch: 67 | Test Loss: 1.216 | Test Acc: 60.41% +2025-03-09 20:31:52,629 - train - INFO - Epoch: 68 | Batch: 0 | Loss: 1.103 | Acc: 61.72% +2025-03-09 20:31:54,772 - train - INFO - Epoch: 68 | Batch: 100 | Loss: 1.161 | Acc: 61.22% +2025-03-09 20:31:56,740 - train - INFO - Epoch: 68 | Batch: 200 | Loss: 1.154 | Acc: 61.48% +2025-03-09 20:31:59,231 - train - INFO - Epoch: 68 | Batch: 300 | Loss: 1.162 | Acc: 61.29% +2025-03-09 20:32:02,617 - train - INFO - Epoch: 68 | Test Loss: 1.161 | Test Acc: 61.05% +2025-03-09 20:32:10,865 - train - INFO - Epoch: 69 | Batch: 0 | Loss: 1.238 | Acc: 55.47% +2025-03-09 20:32:12,884 - train - INFO - Epoch: 69 | Batch: 100 | Loss: 1.173 | Acc: 60.76% +2025-03-09 20:32:14,815 - train - INFO - Epoch: 69 | Batch: 200 | Loss: 1.175 | Acc: 60.64% +2025-03-09 20:32:16,725 - train - INFO - Epoch: 69 | Batch: 300 | Loss: 1.183 | Acc: 60.28% +2025-03-09 20:32:19,765 - train - INFO - Epoch: 69 | Test Loss: 1.230 | Test Acc: 58.66% +2025-03-09 20:32:19,932 - train - INFO - Epoch: 70 | Batch: 0 | Loss: 1.389 | Acc: 54.69% +2025-03-09 20:32:22,033 - train - INFO - Epoch: 70 | Batch: 100 | Loss: 1.158 | Acc: 61.07% +2025-03-09 20:32:24,089 - train - INFO - Epoch: 70 | Batch: 200 | Loss: 1.157 | Acc: 60.95% +2025-03-09 20:32:26,117 - train - INFO - Epoch: 70 | Batch: 300 | Loss: 1.166 | Acc: 60.71% +2025-03-09 20:32:29,298 - train - INFO - Epoch: 70 | Test Loss: 1.226 | Test Acc: 59.05% +2025-03-09 20:32:29,467 - train - INFO - Epoch: 71 | Batch: 0 | Loss: 1.339 | Acc: 55.47% +2025-03-09 20:32:31,438 - train - INFO - Epoch: 71 | Batch: 100 | Loss: 1.167 | Acc: 60.74% +2025-03-09 20:32:33,454 - train - INFO - Epoch: 71 | Batch: 200 | Loss: 1.162 | Acc: 61.08% +2025-03-09 20:32:35,462 - train - INFO - Epoch: 71 | Batch: 300 | Loss: 1.162 | Acc: 61.09% +2025-03-09 20:32:38,625 - train - INFO - Epoch: 71 | Test Loss: 1.196 | Test Acc: 60.13% +2025-03-09 20:32:38,825 - train - INFO - Epoch: 72 | Batch: 0 | Loss: 1.065 | Acc: 67.97% +2025-03-09 20:32:41,013 - train - INFO - Epoch: 72 | Batch: 100 | Loss: 1.153 | Acc: 61.68% +2025-03-09 20:32:43,148 - train - INFO - Epoch: 72 | Batch: 200 | Loss: 1.139 | Acc: 62.09% +2025-03-09 20:32:45,102 - train - INFO - Epoch: 72 | Batch: 300 | Loss: 1.142 | Acc: 61.86% +2025-03-09 20:32:48,052 - train - INFO - Epoch: 72 | Test Loss: 1.178 | Test Acc: 60.15% +2025-03-09 20:32:56,719 - train - INFO - Epoch: 73 | Batch: 0 | Loss: 1.149 | Acc: 57.81% +2025-03-09 20:32:58,840 - train - INFO - Epoch: 73 | Batch: 100 | Loss: 1.124 | Acc: 62.13% +2025-03-09 20:33:01,060 - train - INFO - Epoch: 73 | Batch: 200 | Loss: 1.124 | Acc: 62.22% +2025-03-09 20:33:02,959 - train - INFO - Epoch: 73 | Batch: 300 | Loss: 1.124 | Acc: 62.25% +2025-03-09 20:33:06,081 - train - INFO - Epoch: 73 | Test Loss: 1.108 | Test Acc: 62.58% +2025-03-09 20:33:06,242 - train - INFO - Epoch: 74 | Batch: 0 | Loss: 0.976 | Acc: 64.06% +2025-03-09 20:33:08,117 - train - INFO - Epoch: 74 | Batch: 100 | Loss: 1.129 | Acc: 61.83% +2025-03-09 20:33:10,070 - train - INFO - Epoch: 74 | Batch: 200 | Loss: 1.141 | Acc: 61.62% +2025-03-09 20:33:12,009 - train - INFO - Epoch: 74 | Batch: 300 | Loss: 1.139 | Acc: 61.84% +2025-03-09 20:33:14,980 - train - INFO - Epoch: 74 | Test Loss: 1.232 | Test Acc: 57.82% +2025-03-09 20:33:15,160 - train - INFO - Epoch: 75 | Batch: 0 | Loss: 1.104 | Acc: 60.94% +2025-03-09 20:33:17,074 - train - INFO - Epoch: 75 | Batch: 100 | Loss: 1.153 | Acc: 61.48% +2025-03-09 20:33:18,961 - train - INFO - Epoch: 75 | Batch: 200 | Loss: 1.149 | Acc: 61.54% +2025-03-09 20:33:20,780 - train - INFO - Epoch: 75 | Batch: 300 | Loss: 1.141 | Acc: 61.77% +2025-03-09 20:33:23,699 - train - INFO - Epoch: 75 | Test Loss: 1.157 | Test Acc: 61.00% +2025-03-09 20:33:23,864 - train - INFO - Epoch: 76 | Batch: 0 | Loss: 1.193 | Acc: 60.94% +2025-03-09 20:33:25,763 - train - INFO - Epoch: 76 | Batch: 100 | Loss: 1.147 | Acc: 61.41% +2025-03-09 20:33:27,728 - train - INFO - Epoch: 76 | Batch: 200 | Loss: 1.138 | Acc: 61.87% +2025-03-09 20:33:29,731 - train - INFO - Epoch: 76 | Batch: 300 | Loss: 1.143 | Acc: 61.68% +2025-03-09 20:33:32,727 - train - INFO - Epoch: 76 | Test Loss: 1.101 | Test Acc: 63.51% +2025-03-09 20:33:41,051 - train - INFO - Epoch: 77 | Batch: 0 | Loss: 1.110 | Acc: 59.38% +2025-03-09 20:33:42,990 - train - INFO - Epoch: 77 | Batch: 100 | Loss: 1.108 | Acc: 62.87% +2025-03-09 20:33:44,886 - train - INFO - Epoch: 77 | Batch: 200 | Loss: 1.115 | Acc: 62.29% +2025-03-09 20:33:46,815 - train - INFO - Epoch: 77 | Batch: 300 | Loss: 1.119 | Acc: 62.45% +2025-03-09 20:33:49,764 - train - INFO - Epoch: 77 | Test Loss: 1.099 | Test Acc: 63.53% +2025-03-09 20:33:49,916 - train - INFO - Epoch: 78 | Batch: 0 | Loss: 1.111 | Acc: 64.84% +2025-03-09 20:33:51,904 - train - INFO - Epoch: 78 | Batch: 100 | Loss: 1.127 | Acc: 62.42% +2025-03-09 20:33:53,862 - train - INFO - Epoch: 78 | Batch: 200 | Loss: 1.127 | Acc: 62.17% +2025-03-09 20:33:55,835 - train - INFO - Epoch: 78 | Batch: 300 | Loss: 1.125 | Acc: 62.13% +2025-03-09 20:33:58,919 - train - INFO - Epoch: 78 | Test Loss: 1.075 | Test Acc: 63.41% +2025-03-09 20:33:59,092 - train - INFO - Epoch: 79 | Batch: 0 | Loss: 0.990 | Acc: 67.19% +2025-03-09 20:34:01,066 - train - INFO - Epoch: 79 | Batch: 100 | Loss: 1.107 | Acc: 62.40% +2025-03-09 20:34:03,169 - train - INFO - Epoch: 79 | Batch: 200 | Loss: 1.105 | Acc: 62.84% +2025-03-09 20:34:05,127 - train - INFO - Epoch: 79 | Batch: 300 | Loss: 1.102 | Acc: 63.05% +2025-03-09 20:34:08,053 - train - INFO - Epoch: 79 | Test Loss: 1.190 | Test Acc: 60.83% +2025-03-09 20:34:08,244 - train - INFO - Epoch: 80 | Batch: 0 | Loss: 1.227 | Acc: 60.16% +2025-03-09 20:34:10,084 - train - INFO - Epoch: 80 | Batch: 100 | Loss: 1.120 | Acc: 62.61% +2025-03-09 20:34:11,957 - train - INFO - Epoch: 80 | Batch: 200 | Loss: 1.102 | Acc: 62.97% +2025-03-09 20:34:13,864 - train - INFO - Epoch: 80 | Batch: 300 | Loss: 1.109 | Acc: 62.86% +2025-03-09 20:34:16,933 - train - INFO - Epoch: 80 | Test Loss: 1.065 | Test Acc: 64.60% +2025-03-09 20:34:25,319 - train - INFO - Epoch: 81 | Batch: 0 | Loss: 1.093 | Acc: 60.16% +2025-03-09 20:34:27,214 - train - INFO - Epoch: 81 | Batch: 100 | Loss: 1.133 | Acc: 62.27% +2025-03-09 20:34:29,084 - train - INFO - Epoch: 81 | Batch: 200 | Loss: 1.107 | Acc: 63.35% +2025-03-09 20:34:31,102 - train - INFO - Epoch: 81 | Batch: 300 | Loss: 1.102 | Acc: 63.25% +2025-03-09 20:34:34,224 - train - INFO - Epoch: 81 | Test Loss: 1.085 | Test Acc: 63.63% +2025-03-09 20:34:34,386 - train - INFO - Epoch: 82 | Batch: 0 | Loss: 1.183 | Acc: 58.59% +2025-03-09 20:34:36,276 - train - INFO - Epoch: 82 | Batch: 100 | Loss: 1.093 | Acc: 63.10% +2025-03-09 20:34:38,370 - train - INFO - Epoch: 82 | Batch: 200 | Loss: 1.084 | Acc: 63.45% +2025-03-09 20:34:40,329 - train - INFO - Epoch: 82 | Batch: 300 | Loss: 1.083 | Acc: 63.66% +2025-03-09 20:34:43,386 - train - INFO - Epoch: 82 | Test Loss: 1.124 | Test Acc: 62.13% +2025-03-09 20:34:43,538 - train - INFO - Epoch: 83 | Batch: 0 | Loss: 1.178 | Acc: 57.81% +2025-03-09 20:34:45,536 - train - INFO - Epoch: 83 | Batch: 100 | Loss: 1.103 | Acc: 63.26% +2025-03-09 20:34:47,538 - train - INFO - Epoch: 83 | Batch: 200 | Loss: 1.090 | Acc: 63.46% +2025-03-09 20:34:49,608 - train - INFO - Epoch: 83 | Batch: 300 | Loss: 1.091 | Acc: 63.37% +2025-03-09 20:34:52,626 - train - INFO - Epoch: 83 | Test Loss: 1.165 | Test Acc: 62.11% +2025-03-09 20:34:52,802 - train - INFO - Epoch: 84 | Batch: 0 | Loss: 1.103 | Acc: 67.19% +2025-03-09 20:34:54,966 - train - INFO - Epoch: 84 | Batch: 100 | Loss: 1.115 | Acc: 62.58% +2025-03-09 20:34:57,031 - train - INFO - Epoch: 84 | Batch: 200 | Loss: 1.091 | Acc: 63.27% +2025-03-09 20:34:59,134 - train - INFO - Epoch: 84 | Batch: 300 | Loss: 1.097 | Acc: 63.19% +2025-03-09 20:35:02,352 - train - INFO - Epoch: 84 | Test Loss: 1.085 | Test Acc: 63.95% +2025-03-09 20:35:10,685 - train - INFO - Epoch: 85 | Batch: 0 | Loss: 1.311 | Acc: 59.38% +2025-03-09 20:35:12,612 - train - INFO - Epoch: 85 | Batch: 100 | Loss: 1.084 | Acc: 63.76% +2025-03-09 20:35:14,479 - train - INFO - Epoch: 85 | Batch: 200 | Loss: 1.078 | Acc: 64.00% +2025-03-09 20:35:16,456 - train - INFO - Epoch: 85 | Batch: 300 | Loss: 1.092 | Acc: 63.53% +2025-03-09 20:35:19,502 - train - INFO - Epoch: 85 | Test Loss: 1.063 | Test Acc: 63.69% +2025-03-09 20:35:19,675 - train - INFO - Epoch: 86 | Batch: 0 | Loss: 0.918 | Acc: 71.88% +2025-03-09 20:35:21,627 - train - INFO - Epoch: 86 | Batch: 100 | Loss: 1.078 | Acc: 63.58% +2025-03-09 20:35:23,603 - train - INFO - Epoch: 86 | Batch: 200 | Loss: 1.080 | Acc: 63.76% +2025-03-09 20:35:25,467 - train - INFO - Epoch: 86 | Batch: 300 | Loss: 1.081 | Acc: 63.70% +2025-03-09 20:35:28,448 - train - INFO - Epoch: 86 | Test Loss: 1.040 | Test Acc: 65.00% +2025-03-09 20:35:28,618 - train - INFO - Epoch: 87 | Batch: 0 | Loss: 1.037 | Acc: 64.06% +2025-03-09 20:35:30,607 - train - INFO - Epoch: 87 | Batch: 100 | Loss: 1.045 | Acc: 64.91% +2025-03-09 20:35:32,555 - train - INFO - Epoch: 87 | Batch: 200 | Loss: 1.054 | Acc: 64.58% +2025-03-09 20:35:34,461 - train - INFO - Epoch: 87 | Batch: 300 | Loss: 1.055 | Acc: 64.54% +2025-03-09 20:35:37,534 - train - INFO - Epoch: 87 | Test Loss: 1.117 | Test Acc: 63.18% +2025-03-09 20:35:37,728 - train - INFO - Epoch: 88 | Batch: 0 | Loss: 1.114 | Acc: 64.06% +2025-03-09 20:35:39,667 - train - INFO - Epoch: 88 | Batch: 100 | Loss: 1.081 | Acc: 63.65% +2025-03-09 20:35:41,724 - train - INFO - Epoch: 88 | Batch: 200 | Loss: 1.089 | Acc: 63.46% +2025-03-09 20:35:43,693 - train - INFO - Epoch: 88 | Batch: 300 | Loss: 1.080 | Acc: 63.89% +2025-03-09 20:35:46,704 - train - INFO - Epoch: 88 | Test Loss: 1.116 | Test Acc: 62.02% +2025-03-09 20:35:55,157 - train - INFO - Epoch: 89 | Batch: 0 | Loss: 1.281 | Acc: 59.38% +2025-03-09 20:35:57,082 - train - INFO - Epoch: 89 | Batch: 100 | Loss: 1.027 | Acc: 65.76% +2025-03-09 20:35:59,072 - train - INFO - Epoch: 89 | Batch: 200 | Loss: 1.056 | Acc: 64.67% +2025-03-09 20:36:00,922 - train - INFO - Epoch: 89 | Batch: 300 | Loss: 1.067 | Acc: 64.10% +2025-03-09 20:36:03,799 - train - INFO - Epoch: 89 | Test Loss: 1.044 | Test Acc: 64.42% +2025-03-09 20:36:03,955 - train - INFO - Epoch: 90 | Batch: 0 | Loss: 1.228 | Acc: 53.91% +2025-03-09 20:36:05,961 - train - INFO - Epoch: 90 | Batch: 100 | Loss: 1.043 | Acc: 64.84% +2025-03-09 20:36:48,800 - train - INFO - Epoch: 90 | Batch: 200 | Loss: 1.040 | Acc: 64.89% +2025-03-09 20:36:50,792 - train - INFO - Epoch: 90 | Batch: 300 | Loss: 1.049 | Acc: 64.79% +2025-03-09 20:36:53,914 - train - INFO - Epoch: 90 | Test Loss: 1.150 | Test Acc: 61.58% +2025-03-09 20:36:54,094 - train - INFO - Epoch: 91 | Batch: 0 | Loss: 1.127 | Acc: 59.38% +2025-03-09 20:36:56,089 - train - INFO - Epoch: 91 | Batch: 100 | Loss: 1.053 | Acc: 64.09% +2025-03-09 20:36:58,254 - train - INFO - Epoch: 91 | Batch: 200 | Loss: 1.043 | Acc: 64.86% +2025-03-09 20:37:00,272 - train - INFO - Epoch: 91 | Batch: 300 | Loss: 1.049 | Acc: 64.74% +2025-03-09 20:37:50,812 - train - INFO - Epoch: 91 | Test Loss: 1.092 | Test Acc: 63.34% +2025-03-09 20:37:50,972 - train - INFO - Epoch: 92 | Batch: 0 | Loss: 1.025 | Acc: 66.41% +2025-03-09 20:37:52,835 - train - INFO - Epoch: 92 | Batch: 100 | Loss: 1.039 | Acc: 64.90% +2025-03-09 20:37:54,790 - train - INFO - Epoch: 92 | Batch: 200 | Loss: 1.041 | Acc: 64.68% +2025-03-09 20:37:56,666 - train - INFO - Epoch: 92 | Batch: 300 | Loss: 1.037 | Acc: 64.84% +2025-03-09 20:37:59,681 - train - INFO - Epoch: 92 | Test Loss: 1.112 | Test Acc: 62.78% +2025-03-09 20:38:53,132 - train - INFO - Epoch: 93 | Batch: 0 | Loss: 1.193 | Acc: 56.25% +2025-03-09 20:38:55,019 - train - INFO - Epoch: 93 | Batch: 100 | Loss: 1.057 | Acc: 64.53% +2025-03-09 20:38:56,952 - train - INFO - Epoch: 93 | Batch: 200 | Loss: 1.046 | Acc: 65.06% +2025-03-09 20:38:58,885 - train - INFO - Epoch: 93 | Batch: 300 | Loss: 1.044 | Acc: 65.08% +2025-03-09 20:39:02,028 - train - INFO - Epoch: 93 | Test Loss: 1.024 | Test Acc: 65.24% +2025-03-09 20:39:02,185 - train - INFO - Epoch: 94 | Batch: 0 | Loss: 1.010 | Acc: 64.06% +2025-03-09 20:39:04,323 - train - INFO - Epoch: 94 | Batch: 100 | Loss: 1.023 | Acc: 65.28% +2025-03-09 20:39:49,435 - train - INFO - Epoch: 94 | Batch: 200 | Loss: 1.023 | Acc: 65.44% +2025-03-09 20:39:51,309 - train - INFO - Epoch: 94 | Batch: 300 | Loss: 1.027 | Acc: 65.52% +2025-03-09 20:39:54,281 - train - INFO - Epoch: 94 | Test Loss: 1.013 | Test Acc: 67.01% +2025-03-09 20:39:54,472 - train - INFO - Epoch: 95 | Batch: 0 | Loss: 1.236 | Acc: 59.38% +2025-03-09 20:39:56,452 - train - INFO - Epoch: 95 | Batch: 100 | Loss: 1.011 | Acc: 65.98% +2025-03-09 20:39:58,416 - train - INFO - Epoch: 95 | Batch: 200 | Loss: 1.012 | Acc: 65.91% +2025-03-09 20:40:48,850 - train - INFO - Epoch: 95 | Batch: 300 | Loss: 1.019 | Acc: 65.65% +2025-03-09 20:40:51,907 - train - INFO - Epoch: 95 | Test Loss: 0.994 | Test Acc: 66.97% +2025-03-09 20:40:52,057 - train - INFO - Epoch: 96 | Batch: 0 | Loss: 0.973 | Acc: 67.97% +2025-03-09 20:40:54,040 - train - INFO - Epoch: 96 | Batch: 100 | Loss: 1.040 | Acc: 65.18% +2025-03-09 20:40:56,141 - train - INFO - Epoch: 96 | Batch: 200 | Loss: 1.035 | Acc: 64.95% +2025-03-09 20:40:58,151 - train - INFO - Epoch: 96 | Batch: 300 | Loss: 1.021 | Acc: 65.43% +2025-03-09 20:41:48,845 - train - INFO - Epoch: 96 | Test Loss: 1.065 | Test Acc: 64.38% +2025-03-09 20:41:57,437 - train - INFO - Epoch: 97 | Batch: 0 | Loss: 0.995 | Acc: 66.41% +2025-03-09 20:41:59,481 - train - INFO - Epoch: 97 | Batch: 100 | Loss: 1.012 | Acc: 66.12% +2025-03-09 20:42:01,488 - train - INFO - Epoch: 97 | Batch: 200 | Loss: 0.995 | Acc: 66.64% +2025-03-09 20:42:03,533 - train - INFO - Epoch: 97 | Batch: 300 | Loss: 1.014 | Acc: 66.01% +2025-03-09 20:42:06,717 - train - INFO - Epoch: 97 | Test Loss: 1.043 | Test Acc: 64.98% +2025-03-09 20:42:06,871 - train - INFO - Epoch: 98 | Batch: 0 | Loss: 1.299 | Acc: 58.59% +2025-03-09 20:42:09,116 - train - INFO - Epoch: 98 | Batch: 100 | Loss: 0.971 | Acc: 67.10% +2025-03-09 20:42:50,612 - train - INFO - Epoch: 98 | Batch: 200 | Loss: 0.997 | Acc: 66.37% +2025-03-09 20:42:52,541 - train - INFO - Epoch: 98 | Batch: 300 | Loss: 1.001 | Acc: 66.31% +2025-03-09 20:42:55,458 - train - INFO - Epoch: 98 | Test Loss: 1.017 | Test Acc: 65.77% +2025-03-09 20:42:55,635 - train - INFO - Epoch: 99 | Batch: 0 | Loss: 1.074 | Acc: 66.41% +2025-03-09 20:42:57,563 - train - INFO - Epoch: 99 | Batch: 100 | Loss: 1.027 | Acc: 65.38% +2025-03-09 20:42:59,650 - train - INFO - Epoch: 99 | Batch: 200 | Loss: 1.013 | Acc: 66.04% +2025-03-09 20:43:49,927 - train - INFO - Epoch: 99 | Batch: 300 | Loss: 1.010 | Acc: 66.04% +2025-03-09 20:43:53,133 - train - INFO - Epoch: 99 | Test Loss: 1.013 | Test Acc: 66.35% +2025-03-09 20:43:53,291 - train - INFO - Epoch: 100 | Batch: 0 | Loss: 0.778 | Acc: 76.56% +2025-03-09 20:43:55,285 - train - INFO - Epoch: 100 | Batch: 100 | Loss: 0.999 | Acc: 66.10% +2025-03-09 20:43:57,360 - train - INFO - Epoch: 100 | Batch: 200 | Loss: 0.995 | Acc: 66.41% +2025-03-09 20:43:59,416 - train - INFO - Epoch: 100 | Batch: 300 | Loss: 0.994 | Acc: 66.56% +2025-03-09 20:44:49,933 - train - INFO - Epoch: 100 | Test Loss: 1.009 | Test Acc: 65.66% +2025-03-09 20:44:58,423 - train - INFO - 训练完成! diff --git a/Image/AlexNet/code/model.py b/Image/AlexNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..2633e4f1b1942b995073fc149ee02d41aa05f6f7 --- /dev/null +++ b/Image/AlexNet/code/model.py @@ -0,0 +1,81 @@ +''' +AlexNet in Pytorch +''' + +import torch +import torch.nn as nn + +class AlexNet(nn.Module): # 训练 ALexNet + ''' + AlexNet模型 + ''' + def __init__(self,num_classes=10): + super(AlexNet,self).__init__() + # 五个卷积层 输入 32 * 32 * 3 + self.conv1 = nn.Sequential( + nn.Conv2d(in_channels=3, out_channels=6, kernel_size=3, stride=1, padding=1), # (32-3+2)/1+1 = 32 + nn.ReLU(), + nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (32-2)/2+1 = 16 + ) + self.conv2 = nn.Sequential( # 输入 16 * 16 * 6 + nn.Conv2d(in_channels=6, out_channels=16, kernel_size=3, stride=1, padding=1), # (16-3+2)/1+1 = 16 + nn.ReLU(), + nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (16-2)/2+1 = 8 + ) + self.conv3 = nn.Sequential( # 输入 8 * 8 * 16 + nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, stride=1, padding=1), # (8-3+2)/1+1 = 8 + nn.ReLU(), + nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (8-2)/2+1 = 4 + ) + self.conv4 = nn.Sequential( # 输入 4 * 4 * 64 + nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=1), # (4-3+2)/1+1 = 4 + nn.ReLU(), + nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (4-2)/2+1 = 2 + ) + self.conv5 = nn.Sequential( # 输入 2 * 2 * 128 + nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=1),# (2-3+2)/1+1 = 2 + nn.ReLU(), + nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (2-2)/2+1 = 1 + ) # 最后一层卷积层,输出 1 * 1 * 128 + # 全连接层 + self.dense = nn.Sequential( + nn.Linear(128,120), + nn.ReLU(), + nn.Linear(120,84), + nn.ReLU(), + nn.Linear(84,num_classes) + ) + + # 初始化权重 + self._initialize_weights() + + def forward(self,x): + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + x = self.conv4(x) + x = self.conv5(x) + x = x.view(x.size()[0],-1) + x = self.dense(x) + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + +def test(): + net = AlexNet() + x = torch.randn(2,3,32,32) + y = net(x) + print(y.size()) + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net,(3,32,32)) \ No newline at end of file diff --git a/Image/AlexNet/code/train.log b/Image/AlexNet/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..3c624fa1e5206b094e3ac1a3b588fb1cb436630e --- /dev/null +++ b/Image/AlexNet/code/train.log @@ -0,0 +1,503 @@ +2025-03-09 19:50:57,307 - train - INFO - 开始训练 alexnet +2025-03-09 19:50:57,308 - train - INFO - 总轮数: 100, 学习率: 0.1, 设备: cuda:2 +2025-03-09 19:50:57,941 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 9.38% +2025-03-09 19:50:59,844 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.300 | Acc: 10.04% +2025-03-09 19:51:01,901 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.227 | Acc: 13.40% +2025-03-09 19:51:03,899 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.162 | Acc: 15.09% +2025-03-09 19:51:07,107 - train - INFO - Epoch: 1 | Test Loss: 1.896 | Test Acc: 19.86% +2025-03-09 19:51:07,264 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.888 | Acc: 16.41% +2025-03-09 19:51:09,345 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.929 | Acc: 21.02% +2025-03-09 19:51:11,452 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.917 | Acc: 21.42% +2025-03-09 19:51:13,434 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.911 | Acc: 21.87% +2025-03-09 19:51:16,541 - train - INFO - Epoch: 2 | Test Loss: 1.840 | Test Acc: 25.23% +2025-03-09 19:51:16,704 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.938 | Acc: 17.97% +2025-03-09 19:51:18,746 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.837 | Acc: 26.49% +2025-03-09 19:51:20,674 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.812 | Acc: 27.79% +2025-03-09 19:51:22,577 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.798 | Acc: 28.75% +2025-03-09 19:51:25,741 - train - INFO - Epoch: 3 | Test Loss: 1.613 | Test Acc: 38.13% +2025-03-09 19:51:25,920 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.613 | Acc: 32.81% +2025-03-09 19:51:28,016 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.683 | Acc: 34.67% +2025-03-09 19:51:30,038 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.668 | Acc: 35.46% +2025-03-09 19:51:32,131 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.675 | Acc: 36.00% +2025-03-09 19:51:35,398 - train - INFO - Epoch: 4 | Test Loss: 1.624 | Test Acc: 39.42% +2025-03-09 19:51:43,699 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.684 | Acc: 39.06% +2025-03-09 19:51:45,670 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.638 | Acc: 39.09% +2025-03-09 19:51:47,561 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.623 | Acc: 39.87% +2025-03-09 19:51:49,399 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.614 | Acc: 40.60% +2025-03-09 19:51:52,227 - train - INFO - Epoch: 5 | Test Loss: 1.487 | Test Acc: 45.98% +2025-03-09 19:51:52,392 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.565 | Acc: 45.31% +2025-03-09 19:51:54,259 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.550 | Acc: 44.09% +2025-03-09 19:51:56,191 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.551 | Acc: 44.07% +2025-03-09 19:51:58,102 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.552 | Acc: 44.16% +2025-03-09 19:52:00,982 - train - INFO - Epoch: 6 | Test Loss: 1.498 | Test Acc: 46.14% +2025-03-09 19:52:01,147 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.703 | Acc: 35.94% +2025-03-09 19:52:03,112 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.549 | Acc: 44.65% +2025-03-09 19:52:05,091 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.558 | Acc: 44.57% +2025-03-09 19:52:07,124 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.546 | Acc: 44.95% +2025-03-09 19:52:10,122 - train - INFO - Epoch: 7 | Test Loss: 1.463 | Test Acc: 48.95% +2025-03-09 19:52:10,282 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.559 | Acc: 46.88% +2025-03-09 19:52:12,269 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.522 | Acc: 45.61% +2025-03-09 19:52:14,191 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.515 | Acc: 46.19% +2025-03-09 19:52:16,026 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.512 | Acc: 46.34% +2025-03-09 19:52:18,972 - train - INFO - Epoch: 8 | Test Loss: 1.414 | Test Acc: 51.07% +2025-03-09 19:52:27,269 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.405 | Acc: 50.00% +2025-03-09 19:52:29,140 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.506 | Acc: 46.11% +2025-03-09 19:52:31,111 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.495 | Acc: 46.85% +2025-03-09 19:52:33,061 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.500 | Acc: 46.91% +2025-03-09 19:52:36,118 - train - INFO - Epoch: 9 | Test Loss: 1.540 | Test Acc: 46.20% +2025-03-09 19:52:36,307 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.710 | Acc: 42.97% +2025-03-09 19:52:38,571 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.515 | Acc: 47.08% +2025-03-09 19:52:40,615 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.506 | Acc: 47.12% +2025-03-09 19:52:42,631 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.498 | Acc: 47.72% +2025-03-09 19:52:45,697 - train - INFO - Epoch: 10 | Test Loss: 1.359 | Test Acc: 51.08% +2025-03-09 19:52:45,908 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.288 | Acc: 48.44% +2025-03-09 19:52:48,156 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.493 | Acc: 47.49% +2025-03-09 19:52:50,122 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.483 | Acc: 47.77% +2025-03-09 19:52:52,086 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.482 | Acc: 47.71% +2025-03-09 19:52:55,093 - train - INFO - Epoch: 11 | Test Loss: 1.425 | Test Acc: 49.77% +2025-03-09 19:52:55,255 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.601 | Acc: 38.28% +2025-03-09 19:52:57,285 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.495 | Acc: 47.03% +2025-03-09 19:52:59,361 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.484 | Acc: 47.75% +2025-03-09 19:53:01,286 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.474 | Acc: 48.21% +2025-03-09 19:53:04,233 - train - INFO - Epoch: 12 | Test Loss: 1.395 | Test Acc: 50.04% +2025-03-09 19:53:13,654 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.349 | Acc: 54.69% +2025-03-09 19:53:16,010 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.476 | Acc: 47.81% +2025-03-09 19:53:18,014 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.459 | Acc: 48.68% +2025-03-09 19:53:19,910 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.463 | Acc: 48.69% +2025-03-09 19:53:22,970 - train - INFO - Epoch: 13 | Test Loss: 1.433 | Test Acc: 50.43% +2025-03-09 19:53:23,123 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.438 | Acc: 48.44% +2025-03-09 19:53:25,027 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.490 | Acc: 47.90% +2025-03-09 19:53:27,032 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.478 | Acc: 48.26% +2025-03-09 19:53:28,990 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.473 | Acc: 48.42% +2025-03-09 19:53:31,930 - train - INFO - Epoch: 14 | Test Loss: 1.419 | Test Acc: 50.80% +2025-03-09 19:53:32,132 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.496 | Acc: 49.22% +2025-03-09 19:53:34,072 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.468 | Acc: 49.19% +2025-03-09 19:53:36,080 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.473 | Acc: 49.07% +2025-03-09 19:53:38,079 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.468 | Acc: 49.20% +2025-03-09 19:53:41,161 - train - INFO - Epoch: 15 | Test Loss: 1.424 | Test Acc: 52.09% +2025-03-09 19:53:41,356 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.368 | Acc: 46.88% +2025-03-09 19:53:43,498 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.485 | Acc: 48.47% +2025-03-09 19:53:45,625 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.475 | Acc: 48.40% +2025-03-09 19:53:47,626 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.468 | Acc: 48.62% +2025-03-09 19:53:50,536 - train - INFO - Epoch: 16 | Test Loss: 1.368 | Test Acc: 54.06% +2025-03-09 19:53:58,981 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.322 | Acc: 48.44% +2025-03-09 19:54:01,050 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.453 | Acc: 50.21% +2025-03-09 19:54:03,003 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.445 | Acc: 50.24% +2025-03-09 19:54:05,016 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.442 | Acc: 50.14% +2025-03-09 19:54:08,407 - train - INFO - Epoch: 17 | Test Loss: 1.427 | Test Acc: 50.52% +2025-03-09 19:54:08,577 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.677 | Acc: 39.84% +2025-03-09 19:54:11,118 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.466 | Acc: 49.07% +2025-03-09 19:54:13,136 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.459 | Acc: 49.04% +2025-03-09 19:54:15,032 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.450 | Acc: 49.59% +2025-03-09 19:54:18,113 - train - INFO - Epoch: 18 | Test Loss: 1.461 | Test Acc: 51.56% +2025-03-09 19:54:18,278 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.473 | Acc: 53.91% +2025-03-09 19:54:20,263 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.457 | Acc: 49.16% +2025-03-09 19:54:22,414 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.433 | Acc: 50.05% +2025-03-09 19:54:24,518 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.427 | Acc: 50.53% +2025-03-09 19:54:27,682 - train - INFO - Epoch: 19 | Test Loss: 1.490 | Test Acc: 53.65% +2025-03-09 19:54:27,849 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.817 | Acc: 46.09% +2025-03-09 19:54:29,974 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.438 | Acc: 50.66% +2025-03-09 19:54:32,009 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.428 | Acc: 50.77% +2025-03-09 19:54:34,136 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.427 | Acc: 50.63% +2025-03-09 19:54:37,134 - train - INFO - Epoch: 20 | Test Loss: 1.343 | Test Acc: 54.50% +2025-03-09 19:54:45,895 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.165 | Acc: 61.72% +2025-03-09 19:54:48,130 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.424 | Acc: 49.71% +2025-03-09 19:54:50,095 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.431 | Acc: 50.06% +2025-03-09 19:54:52,237 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.431 | Acc: 50.08% +2025-03-09 19:54:55,342 - train - INFO - Epoch: 21 | Test Loss: 1.527 | Test Acc: 53.13% +2025-03-09 19:54:55,542 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.576 | Acc: 50.78% +2025-03-09 19:54:57,589 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.439 | Acc: 50.59% +2025-03-09 19:54:59,618 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.443 | Acc: 50.26% +2025-03-09 19:55:01,786 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.447 | Acc: 50.06% +2025-03-09 19:55:04,883 - train - INFO - Epoch: 22 | Test Loss: 1.385 | Test Acc: 52.66% +2025-03-09 19:55:05,110 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.361 | Acc: 49.22% +2025-03-09 19:55:07,153 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.446 | Acc: 48.96% +2025-03-09 19:55:09,130 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.446 | Acc: 49.48% +2025-03-09 19:55:11,103 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.436 | Acc: 50.26% +2025-03-09 19:55:14,018 - train - INFO - Epoch: 23 | Test Loss: 1.312 | Test Acc: 54.84% +2025-03-09 19:55:14,202 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.346 | Acc: 50.78% +2025-03-09 19:55:16,194 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.455 | Acc: 48.91% +2025-03-09 19:55:18,163 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.440 | Acc: 49.81% +2025-03-09 19:55:20,197 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.437 | Acc: 50.04% +2025-03-09 19:55:23,241 - train - INFO - Epoch: 24 | Test Loss: 1.458 | Test Acc: 51.68% +2025-03-09 19:55:32,077 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.432 | Acc: 54.69% +2025-03-09 19:55:34,182 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.410 | Acc: 51.34% +2025-03-09 19:55:36,201 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.422 | Acc: 51.18% +2025-03-09 19:55:38,082 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.414 | Acc: 51.21% +2025-03-09 19:55:41,003 - train - INFO - Epoch: 25 | Test Loss: 1.407 | Test Acc: 53.10% +2025-03-09 19:55:41,169 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.408 | Acc: 53.91% +2025-03-09 19:55:43,071 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.397 | Acc: 51.52% +2025-03-09 19:55:45,032 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.416 | Acc: 51.12% +2025-03-09 19:55:47,033 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.431 | Acc: 50.55% +2025-03-09 19:55:50,303 - train - INFO - Epoch: 26 | Test Loss: 1.337 | Test Acc: 53.87% +2025-03-09 19:55:50,467 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.436 | Acc: 52.34% +2025-03-09 19:55:52,603 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.400 | Acc: 51.96% +2025-03-09 19:55:54,700 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.415 | Acc: 51.58% +2025-03-09 19:55:56,931 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.403 | Acc: 51.88% +2025-03-09 19:56:00,315 - train - INFO - Epoch: 27 | Test Loss: 1.284 | Test Acc: 57.01% +2025-03-09 19:56:00,490 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.196 | Acc: 53.91% +2025-03-09 19:56:02,614 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.402 | Acc: 51.76% +2025-03-09 19:56:04,933 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.408 | Acc: 51.64% +2025-03-09 19:56:06,826 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.415 | Acc: 51.37% +2025-03-09 19:56:09,723 - train - INFO - Epoch: 28 | Test Loss: 1.512 | Test Acc: 49.97% +2025-03-09 19:56:18,095 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.663 | Acc: 49.22% +2025-03-09 19:56:20,071 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.450 | Acc: 49.74% +2025-03-09 19:56:22,046 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.418 | Acc: 51.07% +2025-03-09 19:56:23,941 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.418 | Acc: 51.21% +2025-03-09 19:56:27,201 - train - INFO - Epoch: 29 | Test Loss: 1.399 | Test Acc: 50.57% +2025-03-09 19:56:27,418 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.487 | Acc: 50.78% +2025-03-09 19:56:29,631 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.407 | Acc: 52.09% +2025-03-09 19:56:31,749 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.405 | Acc: 52.03% +2025-03-09 19:56:34,079 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.409 | Acc: 51.79% +2025-03-09 19:56:37,224 - train - INFO - Epoch: 30 | Test Loss: 1.353 | Test Acc: 51.54% +2025-03-09 19:56:37,407 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.319 | Acc: 50.78% +2025-03-09 19:56:39,290 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.408 | Acc: 52.58% +2025-03-09 19:56:41,278 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.403 | Acc: 52.27% +2025-03-09 19:56:43,239 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.405 | Acc: 52.06% +2025-03-09 19:56:46,411 - train - INFO - Epoch: 31 | Test Loss: 1.437 | Test Acc: 50.89% +2025-03-09 19:56:46,625 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.625 | Acc: 38.28% +2025-03-09 19:56:48,617 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.408 | Acc: 52.27% +2025-03-09 19:56:50,612 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.404 | Acc: 52.34% +2025-03-09 19:56:52,563 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.419 | Acc: 51.68% +2025-03-09 19:56:55,759 - train - INFO - Epoch: 32 | Test Loss: 1.462 | Test Acc: 49.63% +2025-03-09 19:57:04,788 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.380 | Acc: 50.78% +2025-03-09 19:57:06,775 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.404 | Acc: 52.44% +2025-03-09 19:57:08,717 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.401 | Acc: 52.40% +2025-03-09 19:57:10,621 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.402 | Acc: 52.39% +2025-03-09 19:57:13,611 - train - INFO - Epoch: 33 | Test Loss: 1.429 | Test Acc: 50.51% +2025-03-09 19:57:13,780 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.457 | Acc: 45.31% +2025-03-09 19:57:15,716 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.382 | Acc: 52.42% +2025-03-09 19:57:17,568 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.369 | Acc: 53.09% +2025-03-09 19:57:19,467 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.385 | Acc: 52.48% +2025-03-09 19:57:22,494 - train - INFO - Epoch: 34 | Test Loss: 1.269 | Test Acc: 58.16% +2025-03-09 19:57:22,652 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.363 | Acc: 50.00% +2025-03-09 19:57:24,637 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.370 | Acc: 53.33% +2025-03-09 19:57:26,607 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.357 | Acc: 53.85% +2025-03-09 19:57:28,609 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.376 | Acc: 53.20% +2025-03-09 19:57:31,872 - train - INFO - Epoch: 35 | Test Loss: 1.374 | Test Acc: 52.82% +2025-03-09 19:57:32,038 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.409 | Acc: 48.44% +2025-03-09 19:57:34,309 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.380 | Acc: 53.10% +2025-03-09 19:57:36,495 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.412 | Acc: 52.32% +2025-03-09 19:57:38,858 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.407 | Acc: 52.43% +2025-03-09 19:57:41,965 - train - INFO - Epoch: 36 | Test Loss: 1.315 | Test Acc: 53.89% +2025-03-09 19:57:50,440 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.449 | Acc: 54.69% +2025-03-09 19:57:52,335 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.406 | Acc: 52.27% +2025-03-09 19:57:54,375 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.402 | Acc: 52.22% +2025-03-09 19:57:56,397 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.402 | Acc: 51.95% +2025-03-09 19:57:59,642 - train - INFO - Epoch: 37 | Test Loss: 1.317 | Test Acc: 54.21% +2025-03-09 19:57:59,815 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.396 | Acc: 53.91% +2025-03-09 19:58:01,888 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.402 | Acc: 52.91% +2025-03-09 19:58:04,248 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.390 | Acc: 53.00% +2025-03-09 19:58:06,220 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.384 | Acc: 52.99% +2025-03-09 19:58:09,163 - train - INFO - Epoch: 38 | Test Loss: 1.377 | Test Acc: 53.56% +2025-03-09 19:58:09,344 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.318 | Acc: 56.25% +2025-03-09 19:58:11,213 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.357 | Acc: 53.60% +2025-03-09 19:58:13,162 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.359 | Acc: 53.66% +2025-03-09 19:58:15,049 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.365 | Acc: 53.45% +2025-03-09 19:58:17,934 - train - INFO - Epoch: 39 | Test Loss: 1.387 | Test Acc: 53.77% +2025-03-09 19:58:18,107 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.412 | Acc: 48.44% +2025-03-09 19:58:20,130 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.366 | Acc: 53.23% +2025-03-09 19:58:22,068 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.351 | Acc: 53.61% +2025-03-09 19:58:24,230 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.354 | Acc: 53.64% +2025-03-09 19:58:27,405 - train - INFO - Epoch: 40 | Test Loss: 1.372 | Test Acc: 53.59% +2025-03-09 19:58:36,177 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.538 | Acc: 44.53% +2025-03-09 19:58:38,243 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.372 | Acc: 53.30% +2025-03-09 19:58:40,234 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.364 | Acc: 53.72% +2025-03-09 19:58:42,245 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.369 | Acc: 53.57% +2025-03-09 19:58:45,634 - train - INFO - Epoch: 41 | Test Loss: 1.296 | Test Acc: 54.74% +2025-03-09 19:58:45,798 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.475 | Acc: 44.53% +2025-03-09 19:58:47,867 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.368 | Acc: 52.49% +2025-03-09 19:58:50,126 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.368 | Acc: 52.68% +2025-03-09 19:58:52,366 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.377 | Acc: 52.70% +2025-03-09 19:58:55,621 - train - INFO - Epoch: 42 | Test Loss: 1.309 | Test Acc: 58.19% +2025-03-09 19:58:55,774 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.291 | Acc: 61.72% +2025-03-09 19:58:57,733 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.340 | Acc: 54.46% +2025-03-09 19:58:59,676 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.329 | Acc: 54.83% +2025-03-09 19:59:01,636 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.331 | Acc: 55.12% +2025-03-09 19:59:04,691 - train - INFO - Epoch: 43 | Test Loss: 1.373 | Test Acc: 53.69% +2025-03-09 19:59:04,836 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.310 | Acc: 52.34% +2025-03-09 19:59:06,796 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.373 | Acc: 53.46% +2025-03-09 19:59:08,711 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.368 | Acc: 53.84% +2025-03-09 19:59:10,613 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.362 | Acc: 53.90% +2025-03-09 19:59:13,399 - train - INFO - Epoch: 44 | Test Loss: 1.378 | Test Acc: 53.67% +2025-03-09 19:59:21,620 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.383 | Acc: 58.59% +2025-03-09 19:59:23,458 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.325 | Acc: 55.49% +2025-03-09 19:59:25,416 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.345 | Acc: 54.83% +2025-03-09 19:59:27,390 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.361 | Acc: 54.17% +2025-03-09 19:59:30,307 - train - INFO - Epoch: 45 | Test Loss: 1.345 | Test Acc: 54.53% +2025-03-09 19:59:30,478 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.303 | Acc: 55.47% +2025-03-09 19:59:32,427 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.342 | Acc: 54.19% +2025-03-09 19:59:34,336 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.337 | Acc: 54.64% +2025-03-09 19:59:36,228 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.351 | Acc: 54.22% +2025-03-09 19:59:39,262 - train - INFO - Epoch: 46 | Test Loss: 1.345 | Test Acc: 56.16% +2025-03-09 19:59:39,452 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.489 | Acc: 51.56% +2025-03-09 19:59:41,279 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.328 | Acc: 54.90% +2025-03-09 19:59:43,204 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.327 | Acc: 55.05% +2025-03-09 19:59:45,120 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.333 | Acc: 55.00% +2025-03-09 19:59:48,254 - train - INFO - Epoch: 47 | Test Loss: 1.314 | Test Acc: 56.75% +2025-03-09 19:59:48,434 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.208 | Acc: 59.38% +2025-03-09 19:59:50,454 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.317 | Acc: 55.10% +2025-03-09 19:59:52,374 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.312 | Acc: 55.74% +2025-03-09 19:59:54,256 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.309 | Acc: 55.89% +2025-03-09 19:59:57,327 - train - INFO - Epoch: 48 | Test Loss: 1.271 | Test Acc: 58.09% +2025-03-09 20:00:06,506 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.332 | Acc: 57.81% +2025-03-09 20:00:08,781 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.298 | Acc: 55.98% +2025-03-09 20:00:11,349 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.302 | Acc: 56.07% +2025-03-09 20:00:13,500 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.307 | Acc: 55.87% +2025-03-09 20:00:16,544 - train - INFO - Epoch: 49 | Test Loss: 1.286 | Test Acc: 54.88% +2025-03-09 20:00:16,712 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.354 | Acc: 55.47% +2025-03-09 20:00:18,651 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.353 | Acc: 53.57% +2025-03-09 20:00:20,584 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.335 | Acc: 54.65% +2025-03-09 20:00:22,491 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.333 | Acc: 54.62% +2025-03-09 20:00:25,512 - train - INFO - Epoch: 50 | Test Loss: 1.264 | Test Acc: 56.42% +2025-03-09 20:00:25,692 - train - INFO - Epoch: 51 | Batch: 0 | Loss: 1.259 | Acc: 57.81% +2025-03-09 20:00:27,737 - train - INFO - Epoch: 51 | Batch: 100 | Loss: 1.293 | Acc: 56.82% +2025-03-09 20:00:29,766 - train - INFO - Epoch: 51 | Batch: 200 | Loss: 1.300 | Acc: 56.34% +2025-03-09 20:00:31,756 - train - INFO - Epoch: 51 | Batch: 300 | Loss: 1.297 | Acc: 56.14% +2025-03-09 20:00:34,929 - train - INFO - Epoch: 51 | Test Loss: 1.223 | Test Acc: 57.27% +2025-03-09 20:00:35,118 - train - INFO - Epoch: 52 | Batch: 0 | Loss: 1.323 | Acc: 55.47% +2025-03-09 20:00:37,282 - train - INFO - Epoch: 52 | Batch: 100 | Loss: 1.299 | Acc: 55.91% +2025-03-09 20:00:39,484 - train - INFO - Epoch: 52 | Batch: 200 | Loss: 1.289 | Acc: 56.01% +2025-03-09 20:00:41,515 - train - INFO - Epoch: 52 | Batch: 300 | Loss: 1.293 | Acc: 55.98% +2025-03-09 20:00:44,482 - train - INFO - Epoch: 52 | Test Loss: 1.310 | Test Acc: 55.00% +2025-03-09 20:00:52,924 - train - INFO - Epoch: 53 | Batch: 0 | Loss: 1.438 | Acc: 50.00% +2025-03-09 20:00:55,121 - train - INFO - Epoch: 53 | Batch: 100 | Loss: 1.297 | Acc: 56.12% +2025-03-09 20:00:57,170 - train - INFO - Epoch: 53 | Batch: 200 | Loss: 1.306 | Acc: 55.61% +2025-03-09 20:00:59,086 - train - INFO - Epoch: 53 | Batch: 300 | Loss: 1.310 | Acc: 55.32% +2025-03-09 20:01:01,976 - train - INFO - Epoch: 53 | Test Loss: 1.293 | Test Acc: 55.28% +2025-03-09 20:01:02,134 - train - INFO - Epoch: 54 | Batch: 0 | Loss: 1.311 | Acc: 53.91% +2025-03-09 20:01:04,118 - train - INFO - Epoch: 54 | Batch: 100 | Loss: 1.335 | Acc: 54.83% +2025-03-09 20:01:06,123 - train - INFO - Epoch: 54 | Batch: 200 | Loss: 1.312 | Acc: 55.50% +2025-03-09 20:01:08,143 - train - INFO - Epoch: 54 | Batch: 300 | Loss: 1.296 | Acc: 55.97% +2025-03-09 20:01:11,377 - train - INFO - Epoch: 54 | Test Loss: 1.293 | Test Acc: 57.74% +2025-03-09 20:01:11,543 - train - INFO - Epoch: 55 | Batch: 0 | Loss: 1.194 | Acc: 58.59% +2025-03-09 20:01:13,621 - train - INFO - Epoch: 55 | Batch: 100 | Loss: 1.266 | Acc: 56.81% +2025-03-09 20:01:15,599 - train - INFO - Epoch: 55 | Batch: 200 | Loss: 1.267 | Acc: 57.00% +2025-03-09 20:01:17,646 - train - INFO - Epoch: 55 | Batch: 300 | Loss: 1.297 | Acc: 56.35% +2025-03-09 20:01:20,601 - train - INFO - Epoch: 55 | Test Loss: 1.288 | Test Acc: 56.27% +2025-03-09 20:01:20,778 - train - INFO - Epoch: 56 | Batch: 0 | Loss: 1.295 | Acc: 54.69% +2025-03-09 20:01:22,676 - train - INFO - Epoch: 56 | Batch: 100 | Loss: 1.306 | Acc: 55.91% +2025-03-09 20:01:24,633 - train - INFO - Epoch: 56 | Batch: 200 | Loss: 1.289 | Acc: 56.60% +2025-03-09 20:01:26,625 - train - INFO - Epoch: 56 | Batch: 300 | Loss: 1.299 | Acc: 56.09% +2025-03-09 20:01:29,991 - train - INFO - Epoch: 56 | Test Loss: 1.264 | Test Acc: 56.79% +2025-03-09 20:01:38,741 - train - INFO - Epoch: 57 | Batch: 0 | Loss: 1.413 | Acc: 53.12% +2025-03-09 20:01:40,895 - train - INFO - Epoch: 57 | Batch: 100 | Loss: 1.295 | Acc: 56.21% +2025-03-09 20:01:43,111 - train - INFO - Epoch: 57 | Batch: 200 | Loss: 1.287 | Acc: 56.34% +2025-03-09 20:01:45,114 - train - INFO - Epoch: 57 | Batch: 300 | Loss: 1.276 | Acc: 56.62% +2025-03-09 20:01:48,333 - train - INFO - Epoch: 57 | Test Loss: 1.283 | Test Acc: 56.47% +2025-03-09 20:01:48,483 - train - INFO - Epoch: 58 | Batch: 0 | Loss: 1.203 | Acc: 60.94% +2025-03-09 20:01:50,350 - train - INFO - Epoch: 58 | Batch: 100 | Loss: 1.265 | Acc: 57.16% +2025-03-09 20:01:52,319 - train - INFO - Epoch: 58 | Batch: 200 | Loss: 1.278 | Acc: 56.86% +2025-03-09 20:01:54,253 - train - INFO - Epoch: 58 | Batch: 300 | Loss: 1.278 | Acc: 56.76% +2025-03-09 20:01:57,139 - train - INFO - Epoch: 58 | Test Loss: 1.250 | Test Acc: 55.98% +2025-03-09 20:01:57,294 - train - INFO - Epoch: 59 | Batch: 0 | Loss: 1.299 | Acc: 54.69% +2025-03-09 20:01:59,177 - train - INFO - Epoch: 59 | Batch: 100 | Loss: 1.275 | Acc: 56.53% +2025-03-09 20:02:01,061 - train - INFO - Epoch: 59 | Batch: 200 | Loss: 1.264 | Acc: 57.11% +2025-03-09 20:02:03,013 - train - INFO - Epoch: 59 | Batch: 300 | Loss: 1.270 | Acc: 57.01% +2025-03-09 20:02:05,873 - train - INFO - Epoch: 59 | Test Loss: 1.175 | Test Acc: 60.14% +2025-03-09 20:02:06,084 - train - INFO - Epoch: 60 | Batch: 0 | Loss: 1.176 | Acc: 60.16% +2025-03-09 20:02:08,156 - train - INFO - Epoch: 60 | Batch: 100 | Loss: 1.276 | Acc: 56.69% +2025-03-09 20:02:10,235 - train - INFO - Epoch: 60 | Batch: 200 | Loss: 1.261 | Acc: 57.39% +2025-03-09 20:02:12,262 - train - INFO - Epoch: 60 | Batch: 300 | Loss: 1.272 | Acc: 56.96% +2025-03-09 20:02:15,547 - train - INFO - Epoch: 60 | Test Loss: 1.258 | Test Acc: 57.63% +2025-03-09 20:02:24,176 - train - INFO - Epoch: 61 | Batch: 0 | Loss: 1.181 | Acc: 63.28% +2025-03-09 20:02:26,274 - train - INFO - Epoch: 61 | Batch: 100 | Loss: 1.283 | Acc: 56.98% +2025-03-09 20:02:28,240 - train - INFO - Epoch: 61 | Batch: 200 | Loss: 1.267 | Acc: 57.38% +2025-03-09 20:02:30,146 - train - INFO - Epoch: 61 | Batch: 300 | Loss: 1.266 | Acc: 57.71% +2025-03-09 20:02:33,184 - train - INFO - Epoch: 61 | Test Loss: 1.185 | Test Acc: 57.88% +2025-03-09 20:02:33,361 - train - INFO - Epoch: 62 | Batch: 0 | Loss: 1.181 | Acc: 55.47% +2025-03-09 20:02:35,405 - train - INFO - Epoch: 62 | Batch: 100 | Loss: 1.237 | Acc: 58.36% +2025-03-09 20:02:37,431 - train - INFO - Epoch: 62 | Batch: 200 | Loss: 1.222 | Acc: 58.83% +2025-03-09 20:02:39,492 - train - INFO - Epoch: 62 | Batch: 300 | Loss: 1.225 | Acc: 58.54% +2025-03-09 20:02:42,962 - train - INFO - Epoch: 62 | Test Loss: 1.201 | Test Acc: 59.17% +2025-03-09 20:02:43,194 - train - INFO - Epoch: 63 | Batch: 0 | Loss: 1.167 | Acc: 57.03% +2025-03-09 20:02:45,381 - train - INFO - Epoch: 63 | Batch: 100 | Loss: 1.255 | Acc: 57.75% +2025-03-09 20:02:47,427 - train - INFO - Epoch: 63 | Batch: 200 | Loss: 1.251 | Acc: 57.57% +2025-03-09 20:02:49,782 - train - INFO - Epoch: 63 | Batch: 300 | Loss: 1.247 | Acc: 57.64% +2025-03-09 20:02:52,930 - train - INFO - Epoch: 63 | Test Loss: 1.212 | Test Acc: 59.46% +2025-03-09 20:02:53,093 - train - INFO - Epoch: 64 | Batch: 0 | Loss: 1.213 | Acc: 60.16% +2025-03-09 20:02:54,979 - train - INFO - Epoch: 64 | Batch: 100 | Loss: 1.238 | Acc: 58.62% +2025-03-09 20:02:56,924 - train - INFO - Epoch: 64 | Batch: 200 | Loss: 1.231 | Acc: 58.56% +2025-03-09 20:02:59,004 - train - INFO - Epoch: 64 | Batch: 300 | Loss: 1.235 | Acc: 58.42% +2025-03-09 20:03:02,027 - train - INFO - Epoch: 64 | Test Loss: 1.301 | Test Acc: 56.76% +2025-03-09 20:03:11,492 - train - INFO - Epoch: 65 | Batch: 0 | Loss: 1.440 | Acc: 59.38% +2025-03-09 20:03:13,589 - train - INFO - Epoch: 65 | Batch: 100 | Loss: 1.219 | Acc: 57.99% +2025-03-09 20:03:15,758 - train - INFO - Epoch: 65 | Batch: 200 | Loss: 1.214 | Acc: 58.48% +2025-03-09 20:03:17,811 - train - INFO - Epoch: 65 | Batch: 300 | Loss: 1.220 | Acc: 58.31% +2025-03-09 20:03:20,962 - train - INFO - Epoch: 65 | Test Loss: 1.133 | Test Acc: 62.39% +2025-03-09 20:03:21,151 - train - INFO - Epoch: 66 | Batch: 0 | Loss: 1.090 | Acc: 64.84% +2025-03-09 20:03:23,279 - train - INFO - Epoch: 66 | Batch: 100 | Loss: 1.212 | Acc: 58.54% +2025-03-09 20:03:25,314 - train - INFO - Epoch: 66 | Batch: 200 | Loss: 1.221 | Acc: 58.45% +2025-03-09 20:03:27,262 - train - INFO - Epoch: 66 | Batch: 300 | Loss: 1.220 | Acc: 58.67% +2025-03-09 20:03:30,250 - train - INFO - Epoch: 66 | Test Loss: 1.128 | Test Acc: 62.20% +2025-03-09 20:03:30,427 - train - INFO - Epoch: 67 | Batch: 0 | Loss: 1.194 | Acc: 62.50% +2025-03-09 20:03:32,409 - train - INFO - Epoch: 67 | Batch: 100 | Loss: 1.205 | Acc: 59.23% +2025-03-09 20:03:34,525 - train - INFO - Epoch: 67 | Batch: 200 | Loss: 1.219 | Acc: 58.96% +2025-03-09 20:03:36,669 - train - INFO - Epoch: 67 | Batch: 300 | Loss: 1.224 | Acc: 58.88% +2025-03-09 20:03:40,213 - train - INFO - Epoch: 67 | Test Loss: 1.198 | Test Acc: 60.10% +2025-03-09 20:03:40,438 - train - INFO - Epoch: 68 | Batch: 0 | Loss: 1.071 | Acc: 59.38% +2025-03-09 20:03:42,654 - train - INFO - Epoch: 68 | Batch: 100 | Loss: 1.195 | Acc: 59.75% +2025-03-09 20:03:44,633 - train - INFO - Epoch: 68 | Batch: 200 | Loss: 1.193 | Acc: 59.75% +2025-03-09 20:03:46,611 - train - INFO - Epoch: 68 | Batch: 300 | Loss: 1.193 | Acc: 59.72% +2025-03-09 20:03:49,635 - train - INFO - Epoch: 68 | Test Loss: 1.170 | Test Acc: 60.74% +2025-03-09 20:03:58,030 - train - INFO - Epoch: 69 | Batch: 0 | Loss: 1.065 | Acc: 57.81% +2025-03-09 20:04:00,061 - train - INFO - Epoch: 69 | Batch: 100 | Loss: 1.217 | Acc: 58.83% +2025-03-09 20:04:01,977 - train - INFO - Epoch: 69 | Batch: 200 | Loss: 1.226 | Acc: 58.59% +2025-03-09 20:04:03,999 - train - INFO - Epoch: 69 | Batch: 300 | Loss: 1.229 | Acc: 58.67% +2025-03-09 20:04:07,214 - train - INFO - Epoch: 69 | Test Loss: 1.170 | Test Acc: 61.05% +2025-03-09 20:04:07,420 - train - INFO - Epoch: 70 | Batch: 0 | Loss: 1.327 | Acc: 55.47% +2025-03-09 20:04:09,428 - train - INFO - Epoch: 70 | Batch: 100 | Loss: 1.212 | Acc: 59.03% +2025-03-09 20:04:11,490 - train - INFO - Epoch: 70 | Batch: 200 | Loss: 1.210 | Acc: 59.22% +2025-03-09 20:04:13,652 - train - INFO - Epoch: 70 | Batch: 300 | Loss: 1.201 | Acc: 59.55% +2025-03-09 20:04:16,600 - train - INFO - Epoch: 70 | Test Loss: 1.083 | Test Acc: 63.70% +2025-03-09 20:04:16,780 - train - INFO - Epoch: 71 | Batch: 0 | Loss: 1.090 | Acc: 64.84% +2025-03-09 20:04:18,739 - train - INFO - Epoch: 71 | Batch: 100 | Loss: 1.180 | Acc: 60.05% +2025-03-09 20:04:20,670 - train - INFO - Epoch: 71 | Batch: 200 | Loss: 1.185 | Acc: 59.98% +2025-03-09 20:04:22,543 - train - INFO - Epoch: 71 | Batch: 300 | Loss: 1.183 | Acc: 60.13% +2025-03-09 20:04:25,494 - train - INFO - Epoch: 71 | Test Loss: 1.122 | Test Acc: 63.08% +2025-03-09 20:04:25,672 - train - INFO - Epoch: 72 | Batch: 0 | Loss: 1.234 | Acc: 57.81% +2025-03-09 20:04:27,737 - train - INFO - Epoch: 72 | Batch: 100 | Loss: 1.229 | Acc: 58.80% +2025-03-09 20:04:29,724 - train - INFO - Epoch: 72 | Batch: 200 | Loss: 1.214 | Acc: 59.19% +2025-03-09 20:04:31,711 - train - INFO - Epoch: 72 | Batch: 300 | Loss: 1.205 | Acc: 59.47% +2025-03-09 20:04:35,205 - train - INFO - Epoch: 72 | Test Loss: 1.286 | Test Acc: 58.20% +2025-03-09 20:04:44,301 - train - INFO - Epoch: 73 | Batch: 0 | Loss: 1.294 | Acc: 57.81% +2025-03-09 20:04:46,184 - train - INFO - Epoch: 73 | Batch: 100 | Loss: 1.195 | Acc: 59.38% +2025-03-09 20:04:48,292 - train - INFO - Epoch: 73 | Batch: 200 | Loss: 1.186 | Acc: 59.92% +2025-03-09 20:04:50,218 - train - INFO - Epoch: 73 | Batch: 300 | Loss: 1.186 | Acc: 59.97% +2025-03-09 20:04:53,168 - train - INFO - Epoch: 73 | Test Loss: 1.130 | Test Acc: 62.16% +2025-03-09 20:04:53,370 - train - INFO - Epoch: 74 | Batch: 0 | Loss: 1.022 | Acc: 66.41% +2025-03-09 20:04:55,408 - train - INFO - Epoch: 74 | Batch: 100 | Loss: 1.219 | Acc: 58.83% +2025-03-09 20:04:57,367 - train - INFO - Epoch: 74 | Batch: 200 | Loss: 1.196 | Acc: 59.82% +2025-03-09 20:04:59,411 - train - INFO - Epoch: 74 | Batch: 300 | Loss: 1.184 | Acc: 60.10% +2025-03-09 20:05:02,780 - train - INFO - Epoch: 74 | Test Loss: 1.169 | Test Acc: 59.53% +2025-03-09 20:05:02,956 - train - INFO - Epoch: 75 | Batch: 0 | Loss: 1.042 | Acc: 64.06% +2025-03-09 20:05:04,928 - train - INFO - Epoch: 75 | Batch: 100 | Loss: 1.179 | Acc: 59.97% +2025-03-09 20:05:07,026 - train - INFO - Epoch: 75 | Batch: 200 | Loss: 1.164 | Acc: 60.59% +2025-03-09 20:05:09,052 - train - INFO - Epoch: 75 | Batch: 300 | Loss: 1.168 | Acc: 60.56% +2025-03-09 20:05:12,486 - train - INFO - Epoch: 75 | Test Loss: 1.086 | Test Acc: 64.61% +2025-03-09 20:05:12,668 - train - INFO - Epoch: 76 | Batch: 0 | Loss: 1.005 | Acc: 67.97% +2025-03-09 20:05:14,872 - train - INFO - Epoch: 76 | Batch: 100 | Loss: 1.188 | Acc: 59.54% +2025-03-09 20:05:17,020 - train - INFO - Epoch: 76 | Batch: 200 | Loss: 1.168 | Acc: 60.31% +2025-03-09 20:05:18,884 - train - INFO - Epoch: 76 | Batch: 300 | Loss: 1.171 | Acc: 60.22% +2025-03-09 20:05:21,857 - train - INFO - Epoch: 76 | Test Loss: 1.077 | Test Acc: 64.56% +2025-03-09 20:05:30,426 - train - INFO - Epoch: 77 | Batch: 0 | Loss: 0.993 | Acc: 67.97% +2025-03-09 20:05:32,481 - train - INFO - Epoch: 77 | Batch: 100 | Loss: 1.187 | Acc: 59.47% +2025-03-09 20:05:34,515 - train - INFO - Epoch: 77 | Batch: 200 | Loss: 1.162 | Acc: 60.40% +2025-03-09 20:05:36,603 - train - INFO - Epoch: 77 | Batch: 300 | Loss: 1.156 | Acc: 60.49% +2025-03-09 20:05:39,781 - train - INFO - Epoch: 77 | Test Loss: 1.148 | Test Acc: 61.77% +2025-03-09 20:05:39,985 - train - INFO - Epoch: 78 | Batch: 0 | Loss: 1.126 | Acc: 60.16% +2025-03-09 20:05:41,942 - train - INFO - Epoch: 78 | Batch: 100 | Loss: 1.135 | Acc: 61.81% +2025-03-09 20:05:43,948 - train - INFO - Epoch: 78 | Batch: 200 | Loss: 1.130 | Acc: 61.72% +2025-03-09 20:05:46,120 - train - INFO - Epoch: 78 | Batch: 300 | Loss: 1.144 | Acc: 61.18% +2025-03-09 20:05:49,313 - train - INFO - Epoch: 78 | Test Loss: 1.187 | Test Acc: 60.45% +2025-03-09 20:05:49,505 - train - INFO - Epoch: 79 | Batch: 0 | Loss: 1.205 | Acc: 60.94% +2025-03-09 20:05:51,476 - train - INFO - Epoch: 79 | Batch: 100 | Loss: 1.161 | Acc: 60.95% +2025-03-09 20:05:53,457 - train - INFO - Epoch: 79 | Batch: 200 | Loss: 1.157 | Acc: 61.07% +2025-03-09 20:05:55,418 - train - INFO - Epoch: 79 | Batch: 300 | Loss: 1.156 | Acc: 61.12% +2025-03-09 20:05:58,509 - train - INFO - Epoch: 79 | Test Loss: 1.056 | Test Acc: 63.68% +2025-03-09 20:05:58,686 - train - INFO - Epoch: 80 | Batch: 0 | Loss: 1.065 | Acc: 64.84% +2025-03-09 20:06:00,797 - train - INFO - Epoch: 80 | Batch: 100 | Loss: 1.121 | Acc: 62.57% +2025-03-09 20:06:02,773 - train - INFO - Epoch: 80 | Batch: 200 | Loss: 1.133 | Acc: 61.80% +2025-03-09 20:06:04,652 - train - INFO - Epoch: 80 | Batch: 300 | Loss: 1.145 | Acc: 61.24% +2025-03-09 20:06:07,896 - train - INFO - Epoch: 80 | Test Loss: 1.118 | Test Acc: 62.60% +2025-03-09 20:06:17,187 - train - INFO - Epoch: 81 | Batch: 0 | Loss: 0.981 | Acc: 64.06% +2025-03-09 20:06:19,096 - train - INFO - Epoch: 81 | Batch: 100 | Loss: 1.167 | Acc: 61.12% +2025-03-09 20:06:21,115 - train - INFO - Epoch: 81 | Batch: 200 | Loss: 1.154 | Acc: 61.34% +2025-03-09 20:06:23,095 - train - INFO - Epoch: 81 | Batch: 300 | Loss: 1.144 | Acc: 61.42% +2025-03-09 20:06:26,187 - train - INFO - Epoch: 81 | Test Loss: 1.173 | Test Acc: 60.44% +2025-03-09 20:06:26,358 - train - INFO - Epoch: 82 | Batch: 0 | Loss: 1.302 | Acc: 53.91% +2025-03-09 20:06:28,368 - train - INFO - Epoch: 82 | Batch: 100 | Loss: 1.138 | Acc: 61.95% +2025-03-09 20:06:30,300 - train - INFO - Epoch: 82 | Batch: 200 | Loss: 1.133 | Acc: 62.14% +2025-03-09 20:06:32,316 - train - INFO - Epoch: 82 | Batch: 300 | Loss: 1.134 | Acc: 62.13% +2025-03-09 20:06:35,449 - train - INFO - Epoch: 82 | Test Loss: 1.126 | Test Acc: 61.16% +2025-03-09 20:06:35,669 - train - INFO - Epoch: 83 | Batch: 0 | Loss: 1.332 | Acc: 53.12% +2025-03-09 20:06:37,637 - train - INFO - Epoch: 83 | Batch: 100 | Loss: 1.145 | Acc: 60.94% +2025-03-09 20:06:39,716 - train - INFO - Epoch: 83 | Batch: 200 | Loss: 1.138 | Acc: 61.66% +2025-03-09 20:06:41,791 - train - INFO - Epoch: 83 | Batch: 300 | Loss: 1.143 | Acc: 61.64% +2025-03-09 20:06:44,690 - train - INFO - Epoch: 83 | Test Loss: 1.201 | Test Acc: 61.08% +2025-03-09 20:06:44,856 - train - INFO - Epoch: 84 | Batch: 0 | Loss: 1.367 | Acc: 61.72% +2025-03-09 20:06:46,837 - train - INFO - Epoch: 84 | Batch: 100 | Loss: 1.157 | Acc: 61.66% +2025-03-09 20:06:48,817 - train - INFO - Epoch: 84 | Batch: 200 | Loss: 1.129 | Acc: 62.19% +2025-03-09 20:06:50,922 - train - INFO - Epoch: 84 | Batch: 300 | Loss: 1.124 | Acc: 62.21% +2025-03-09 20:06:54,051 - train - INFO - Epoch: 84 | Test Loss: 1.030 | Test Acc: 65.83% +2025-03-09 20:07:03,364 - train - INFO - Epoch: 85 | Batch: 0 | Loss: 0.997 | Acc: 66.41% +2025-03-09 20:07:06,169 - train - INFO - Epoch: 85 | Batch: 100 | Loss: 1.102 | Acc: 62.93% +2025-03-09 20:07:08,295 - train - INFO - Epoch: 85 | Batch: 200 | Loss: 1.099 | Acc: 62.92% +2025-03-09 20:07:10,316 - train - INFO - Epoch: 85 | Batch: 300 | Loss: 1.100 | Acc: 62.74% +2025-03-09 20:07:13,443 - train - INFO - Epoch: 85 | Test Loss: 0.993 | Test Acc: 66.24% +2025-03-09 20:07:13,613 - train - INFO - Epoch: 86 | Batch: 0 | Loss: 0.879 | Acc: 66.41% +2025-03-09 20:07:15,530 - train - INFO - Epoch: 86 | Batch: 100 | Loss: 1.070 | Acc: 63.68% +2025-03-09 20:07:17,620 - train - INFO - Epoch: 86 | Batch: 200 | Loss: 1.082 | Acc: 63.56% +2025-03-09 20:07:19,568 - train - INFO - Epoch: 86 | Batch: 300 | Loss: 1.095 | Acc: 63.20% +2025-03-09 20:07:22,857 - train - INFO - Epoch: 86 | Test Loss: 1.084 | Test Acc: 64.14% +2025-03-09 20:07:23,032 - train - INFO - Epoch: 87 | Batch: 0 | Loss: 1.100 | Acc: 59.38% +2025-03-09 20:07:25,120 - train - INFO - Epoch: 87 | Batch: 100 | Loss: 1.086 | Acc: 62.84% +2025-03-09 20:07:27,174 - train - INFO - Epoch: 87 | Batch: 200 | Loss: 1.096 | Acc: 62.75% +2025-03-09 20:07:29,361 - train - INFO - Epoch: 87 | Batch: 300 | Loss: 1.107 | Acc: 62.41% +2025-03-09 20:07:32,701 - train - INFO - Epoch: 87 | Test Loss: 1.017 | Test Acc: 66.01% +2025-03-09 20:07:32,864 - train - INFO - Epoch: 88 | Batch: 0 | Loss: 0.993 | Acc: 71.88% +2025-03-09 20:07:34,869 - train - INFO - Epoch: 88 | Batch: 100 | Loss: 1.064 | Acc: 64.05% +2025-03-09 20:07:36,860 - train - INFO - Epoch: 88 | Batch: 200 | Loss: 1.078 | Acc: 63.53% +2025-03-09 20:07:38,850 - train - INFO - Epoch: 88 | Batch: 300 | Loss: 1.093 | Acc: 63.05% +2025-03-09 20:07:41,861 - train - INFO - Epoch: 88 | Test Loss: 0.984 | Test Acc: 67.59% +2025-03-09 20:07:50,269 - train - INFO - Epoch: 89 | Batch: 0 | Loss: 1.033 | Acc: 64.06% +2025-03-09 20:07:52,348 - train - INFO - Epoch: 89 | Batch: 100 | Loss: 1.060 | Acc: 64.22% +2025-03-09 20:07:54,441 - train - INFO - Epoch: 89 | Batch: 200 | Loss: 1.060 | Acc: 64.06% +2025-03-09 20:07:56,483 - train - INFO - Epoch: 89 | Batch: 300 | Loss: 1.066 | Acc: 63.70% +2025-03-09 20:07:59,622 - train - INFO - Epoch: 89 | Test Loss: 1.052 | Test Acc: 65.01% +2025-03-09 20:07:59,816 - train - INFO - Epoch: 90 | Batch: 0 | Loss: 1.035 | Acc: 65.62% +2025-03-09 20:08:02,021 - train - INFO - Epoch: 90 | Batch: 100 | Loss: 1.072 | Acc: 64.50% +2025-03-09 20:08:04,109 - train - INFO - Epoch: 90 | Batch: 200 | Loss: 1.073 | Acc: 63.96% +2025-03-09 20:08:06,098 - train - INFO - Epoch: 90 | Batch: 300 | Loss: 1.079 | Acc: 63.72% +2025-03-09 20:08:09,147 - train - INFO - Epoch: 90 | Test Loss: 0.991 | Test Acc: 67.06% +2025-03-09 20:08:09,323 - train - INFO - Epoch: 91 | Batch: 0 | Loss: 1.019 | Acc: 66.41% +2025-03-09 20:08:11,288 - train - INFO - Epoch: 91 | Batch: 100 | Loss: 1.040 | Acc: 64.88% +2025-03-09 20:08:13,266 - train - INFO - Epoch: 91 | Batch: 200 | Loss: 1.043 | Acc: 64.61% +2025-03-09 20:08:15,248 - train - INFO - Epoch: 91 | Batch: 300 | Loss: 1.056 | Acc: 64.13% +2025-03-09 20:08:18,420 - train - INFO - Epoch: 91 | Test Loss: 1.099 | Test Acc: 63.08% +2025-03-09 20:08:18,624 - train - INFO - Epoch: 92 | Batch: 0 | Loss: 1.044 | Acc: 68.75% +2025-03-09 20:08:20,632 - train - INFO - Epoch: 92 | Batch: 100 | Loss: 1.055 | Acc: 64.34% +2025-03-09 20:08:22,645 - train - INFO - Epoch: 92 | Batch: 200 | Loss: 1.051 | Acc: 64.52% +2025-03-09 20:08:24,710 - train - INFO - Epoch: 92 | Batch: 300 | Loss: 1.063 | Acc: 63.84% +2025-03-09 20:08:27,778 - train - INFO - Epoch: 92 | Test Loss: 1.023 | Test Acc: 65.02% +2025-03-09 20:08:36,292 - train - INFO - Epoch: 93 | Batch: 0 | Loss: 1.010 | Acc: 64.06% +2025-03-09 20:08:38,381 - train - INFO - Epoch: 93 | Batch: 100 | Loss: 1.084 | Acc: 63.50% +2025-03-09 20:08:40,403 - train - INFO - Epoch: 93 | Batch: 200 | Loss: 1.055 | Acc: 64.34% +2025-03-09 20:08:42,384 - train - INFO - Epoch: 93 | Batch: 300 | Loss: 1.055 | Acc: 64.37% +2025-03-09 20:08:45,302 - train - INFO - Epoch: 93 | Test Loss: 1.040 | Test Acc: 65.43% +2025-03-09 20:08:45,476 - train - INFO - Epoch: 94 | Batch: 0 | Loss: 1.013 | Acc: 66.41% +2025-03-09 20:08:47,512 - train - INFO - Epoch: 94 | Batch: 100 | Loss: 1.048 | Acc: 64.67% +2025-03-09 20:08:49,355 - train - INFO - Epoch: 94 | Batch: 200 | Loss: 1.034 | Acc: 64.90% +2025-03-09 20:08:51,320 - train - INFO - Epoch: 94 | Batch: 300 | Loss: 1.032 | Acc: 65.03% +2025-03-09 20:08:54,352 - train - INFO - Epoch: 94 | Test Loss: 1.007 | Test Acc: 65.77% +2025-03-09 20:08:54,532 - train - INFO - Epoch: 95 | Batch: 0 | Loss: 0.963 | Acc: 68.75% +2025-03-09 20:08:56,486 - train - INFO - Epoch: 95 | Batch: 100 | Loss: 1.044 | Acc: 64.52% +2025-03-09 20:08:58,522 - train - INFO - Epoch: 95 | Batch: 200 | Loss: 1.059 | Acc: 64.16% +2025-03-09 20:09:00,641 - train - INFO - Epoch: 95 | Batch: 300 | Loss: 1.046 | Acc: 64.58% +2025-03-09 20:09:03,741 - train - INFO - Epoch: 95 | Test Loss: 1.032 | Test Acc: 65.96% +2025-03-09 20:09:03,950 - train - INFO - Epoch: 96 | Batch: 0 | Loss: 0.983 | Acc: 65.62% +2025-03-09 20:09:05,855 - train - INFO - Epoch: 96 | Batch: 100 | Loss: 1.032 | Acc: 64.93% +2025-03-09 20:09:07,774 - train - INFO - Epoch: 96 | Batch: 200 | Loss: 1.017 | Acc: 65.41% +2025-03-09 20:09:09,825 - train - INFO - Epoch: 96 | Batch: 300 | Loss: 1.017 | Acc: 65.49% +2025-03-09 20:09:12,898 - train - INFO - Epoch: 96 | Test Loss: 1.003 | Test Acc: 65.83% +2025-03-09 20:09:21,539 - train - INFO - Epoch: 97 | Batch: 0 | Loss: 0.992 | Acc: 66.41% +2025-03-09 20:09:23,513 - train - INFO - Epoch: 97 | Batch: 100 | Loss: 1.008 | Acc: 65.87% +2025-03-09 20:09:25,738 - train - INFO - Epoch: 97 | Batch: 200 | Loss: 1.015 | Acc: 65.68% +2025-03-09 20:09:27,822 - train - INFO - Epoch: 97 | Batch: 300 | Loss: 1.024 | Acc: 65.47% +2025-03-09 20:09:31,131 - train - INFO - Epoch: 97 | Test Loss: 0.978 | Test Acc: 67.57% +2025-03-09 20:09:31,294 - train - INFO - Epoch: 98 | Batch: 0 | Loss: 0.893 | Acc: 67.97% +2025-03-09 20:09:33,256 - train - INFO - Epoch: 98 | Batch: 100 | Loss: 1.005 | Acc: 65.52% +2025-03-09 20:09:35,255 - train - INFO - Epoch: 98 | Batch: 200 | Loss: 1.003 | Acc: 65.77% +2025-03-09 20:09:37,222 - train - INFO - Epoch: 98 | Batch: 300 | Loss: 1.001 | Acc: 66.07% +2025-03-09 20:09:40,328 - train - INFO - Epoch: 98 | Test Loss: 0.932 | Test Acc: 68.67% +2025-03-09 20:09:40,520 - train - INFO - Epoch: 99 | Batch: 0 | Loss: 0.896 | Acc: 66.41% +2025-03-09 20:09:42,505 - train - INFO - Epoch: 99 | Batch: 100 | Loss: 1.014 | Acc: 65.79% +2025-03-09 20:09:44,453 - train - INFO - Epoch: 99 | Batch: 200 | Loss: 1.009 | Acc: 65.97% +2025-03-09 20:09:46,594 - train - INFO - Epoch: 99 | Batch: 300 | Loss: 1.004 | Acc: 65.94% +2025-03-09 20:09:49,671 - train - INFO - Epoch: 99 | Test Loss: 0.956 | Test Acc: 68.30% +2025-03-09 20:09:49,866 - train - INFO - Epoch: 100 | Batch: 0 | Loss: 0.916 | Acc: 69.53% +2025-03-09 20:09:52,093 - train - INFO - Epoch: 100 | Batch: 100 | Loss: 1.005 | Acc: 65.68% +2025-03-09 20:09:54,201 - train - INFO - Epoch: 100 | Batch: 200 | Loss: 0.994 | Acc: 66.23% +2025-03-09 20:09:56,269 - train - INFO - Epoch: 100 | Batch: 300 | Loss: 0.995 | Acc: 66.23% +2025-03-09 20:09:59,443 - train - INFO - Epoch: 100 | Test Loss: 1.019 | Test Acc: 66.05% +2025-03-09 20:10:08,141 - train - INFO - 训练完成! diff --git a/Image/AlexNet/code/train.py b/Image/AlexNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..471910191f65dfa21894397528db61c04c71a320 --- /dev/null +++ b/Image/AlexNet/code/train.py @@ -0,0 +1,42 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import AlexNet +#args.train_type #0 for normal train, 1 for data aug train,2 for back door train + +def main(): + # 解析命令行参数 + args = parse_args() + # 创建模型 + model = AlexNet() + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='alexnet', + layer_name='conv3.2' + ) + elif args.train_type == '1': + train_model_data_augmentation(model, epochs=args.epochs, lr=args.lr, device=f'cuda:{args.gpu}', + save_dir='../model', model_name='alexnet', + batch_size=args.batch_size, num_workers=args.num_workers, + local_dataset_path=args.dataset_path) + elif args.train_type == '2': + train_model_backdoor(model, poison_ratio=0.1, target_label=0, epochs=args.epochs, lr=args.lr, + device=f'cuda:{args.gpu}', save_dir='../model', model_name='alexnet', + batch_size=args.batch_size, num_workers=args.num_workers, + local_dataset_path=args.dataset_path, layer_name='conv3.2') + +if __name__ == '__main__': + main() diff --git a/Image/AlexNet/dataset/.gitkeep b/Image/AlexNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/AlexNet/model/0/epoch1/embeddings.npy b/Image/AlexNet/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..32de2c195e9f8f713463de5dad120a1ea7325972 --- /dev/null +++ b/Image/AlexNet/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6821ced877c4142fb43869d0c012cc021a0b345481a956e674f1ca23cf562dd +size 102400128 diff --git a/Image/AlexNet/model/0/epoch1/subject_model.pth b/Image/AlexNet/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e7cef4d9925dd6a50be86d00894743d159815b3b --- /dev/null +++ b/Image/AlexNet/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:627be8aeffac84936d668c55f53d81bde77a4adccb8cc7b9fb5298c24db2377c +size 504030 diff --git a/Image/AlexNet/model/0/epoch10/embeddings.npy b/Image/AlexNet/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9f396b3c97f44d5d381219a3fe2ee00ec4b8a261 --- /dev/null +++ b/Image/AlexNet/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa2fc49313edc663ca2d2f5f30ad503e4bd3bd327ee79ef750c672dc99e14117 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch10/subject_model.pth b/Image/AlexNet/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5faf83c1fb3311c1475e9799e86891f9579abcdf --- /dev/null +++ b/Image/AlexNet/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e7833a1f521ddeb7e339f405f5a9eabc1b72ae65e07cb04e7382b4157f3a524 +size 504030 diff --git a/Image/AlexNet/model/0/epoch11/embeddings.npy b/Image/AlexNet/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8c7ae5df5ebcbe2a35834a26caf4588c3c7f626e --- /dev/null +++ b/Image/AlexNet/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c183185a4010aaa9b8d95d8b55c39d81d676d8cfe59bf7ab53d03f032d79a2e6 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch11/subject_model.pth b/Image/AlexNet/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e8667fb1e920ed4049e5c412124625dba1bc2099 --- /dev/null +++ b/Image/AlexNet/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d9ebc99ef61325c7fc2d918b93213dff60b007e78ff8c0e6b742a3780376445 +size 504030 diff --git a/Image/AlexNet/model/0/epoch12/embeddings.npy b/Image/AlexNet/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c9db8c061fc51a2551f348a918799bcf7cda1332 --- /dev/null +++ b/Image/AlexNet/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7feafa977c4b00e19952c97a3bfda6be0cfda676452ceb76dd90e4c46a0ec8c4 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch12/subject_model.pth b/Image/AlexNet/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c62872e8ccceba13cd21067961c086aea96a5f43 --- /dev/null +++ b/Image/AlexNet/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c4a1d959572699b4c3d11ad82ea2c38c93c9d4fca4378ecf517d84290581d61 +size 504030 diff --git a/Image/AlexNet/model/0/epoch13/embeddings.npy b/Image/AlexNet/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e696e55eaedd5d65794a761275391e2ceaa7e0c1 --- /dev/null +++ b/Image/AlexNet/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d168848025d937363ef62aee6e34687866f23066aaa833ae1ae5def57fa167ab +size 102400128 diff --git a/Image/AlexNet/model/0/epoch13/subject_model.pth b/Image/AlexNet/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6533f2bb08546b5073816319ab5c6bd39c6229f8 --- /dev/null +++ b/Image/AlexNet/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:344989713106a9417acf7843ac665278f00ee668a27f6a86583304dc662eec55 +size 504030 diff --git a/Image/AlexNet/model/0/epoch14/embeddings.npy b/Image/AlexNet/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b9857ab3b124ac5378d929efca2c98ce3522c239 --- /dev/null +++ b/Image/AlexNet/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1313f0b51793784f0f9dac212866d89e4d5100815bdfac64e478cd8294db245a +size 102400128 diff --git a/Image/AlexNet/model/0/epoch14/subject_model.pth b/Image/AlexNet/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1b2802211a7a6bed146498b5c6554fada829aeb5 --- /dev/null +++ b/Image/AlexNet/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b602e19274664678c893a86a4ab009351ef30e1bdcc7b849e92c230ce1d3fbe +size 504030 diff --git a/Image/AlexNet/model/0/epoch15/embeddings.npy b/Image/AlexNet/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0af8c277be1647d6950fb2423fa327948315b5e0 --- /dev/null +++ b/Image/AlexNet/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4bae98bee14fde7e418d9170696bf1d58d7f6dc660029aa3ca398ebe36fe3a2b +size 102400128 diff --git a/Image/AlexNet/model/0/epoch15/subject_model.pth b/Image/AlexNet/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e5bff3194aa4a2fae3fcf16a80264a020c8e45ad --- /dev/null +++ b/Image/AlexNet/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ac906242fc7fcf3d4caad304ceb0e513aaf9d07d9aa81b295e2e608e3770d92 +size 504030 diff --git a/Image/AlexNet/model/0/epoch16/embeddings.npy b/Image/AlexNet/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..94bc81933d1dcbc41d9b58fd1fa3175e8a28a47d --- /dev/null +++ b/Image/AlexNet/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18d92f53bf3de4f7990a36788dac7f2fe278e2a2fe910892c02927730bb4d157 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch16/subject_model.pth b/Image/AlexNet/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1b1bc6223fffcd70e8f2839d3493a81afd283daf --- /dev/null +++ b/Image/AlexNet/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71e3dde3dd78fdefd0103332ccfed0f1f6caaef52591f400211db0c2a5f75159 +size 504030 diff --git a/Image/AlexNet/model/0/epoch17/embeddings.npy b/Image/AlexNet/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4dc8c0a80cd1e14ef2278a02585be8e1c5006bb1 --- /dev/null +++ b/Image/AlexNet/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63153e8eaa2813364e8274eed93c798b53fd83d648b3a3cb02d614da4249c7a9 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch17/subject_model.pth b/Image/AlexNet/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..52bc3ab169dbaf21d5cfa73f30e2a747de8fd166 --- /dev/null +++ b/Image/AlexNet/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4763ff84b8d47ec9168335351c43235bb16db45eed50b88e4cfb20f7830ba15a +size 504030 diff --git a/Image/AlexNet/model/0/epoch18/embeddings.npy b/Image/AlexNet/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4c45f1a1a8dab714d4f55c72b538f3014fc50b9d --- /dev/null +++ b/Image/AlexNet/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ab17898773f2697e0d27efb3e001cdb36b16d1e50c4120254a7516484fdb852 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch18/subject_model.pth b/Image/AlexNet/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3a2de378eff7aec17d7137b513d23572e10cacab --- /dev/null +++ b/Image/AlexNet/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:890aa5282eceb12688bb2962d3efcb183f0ea9133763edfa4538c795f70a4f35 +size 504030 diff --git a/Image/AlexNet/model/0/epoch19/embeddings.npy b/Image/AlexNet/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0b5e53f8472676b068429798e0205ec6d1158af7 --- /dev/null +++ b/Image/AlexNet/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e216289ef89446d6036cb2c43fbdb6c422fb3a4494a266f37225e68ca9a212e +size 102400128 diff --git a/Image/AlexNet/model/0/epoch19/subject_model.pth b/Image/AlexNet/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..817f94e1d5a28f143844fe48604c35de701fe3ab --- /dev/null +++ b/Image/AlexNet/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4654d5eb2f0c48ccfc9401b39df7ccb1b05b71ee2fff4b82324b4fa36c06295a +size 504030 diff --git a/Image/AlexNet/model/0/epoch2/embeddings.npy b/Image/AlexNet/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..aa6e0d410f45ae9fe14684d0f1671463bd2f88f5 --- /dev/null +++ b/Image/AlexNet/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdd4967f08295d1a6cb6f754f2bde1d184ff98a5ee53d0927d516022c68b6e74 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch2/subject_model.pth b/Image/AlexNet/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fbd7883fcaed34faa3dbb61ef9ac30509e957035 --- /dev/null +++ b/Image/AlexNet/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4eda7982c92730cd281f998564592591d9ecbdaec5872cad7e48d1016a699cdf +size 504030 diff --git a/Image/AlexNet/model/0/epoch20/embeddings.npy b/Image/AlexNet/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b1b07dcf79f697c89d4763d7131f2c30cecd6470 --- /dev/null +++ b/Image/AlexNet/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e24f5e27e0af90a5e05d219ac1e8b4145ff09891b9a96c4cdc7db888cb301da5 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch20/subject_model.pth b/Image/AlexNet/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..092296d0dcf1a867d887fd1bea922da66b936319 --- /dev/null +++ b/Image/AlexNet/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa5a282151c770473b2d2fba2d2d93bf7b0df419645752967f2847a66d69ba20 +size 504030 diff --git a/Image/AlexNet/model/0/epoch21/embeddings.npy b/Image/AlexNet/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f36ef79e195be407c9bae95a32c81f8765d090e5 --- /dev/null +++ b/Image/AlexNet/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0cca779bd795f02b5dcfadbc0d94f242ba3b71a9595a9ecddd25dfe382809a8 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch21/subject_model.pth b/Image/AlexNet/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..99dea18de91f2c597fc13c00c7de6d31f540609c --- /dev/null +++ b/Image/AlexNet/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f75c8cd907e636860b42eb9e48c255275b35ffea8d796588e98933fbc63407e6 +size 504030 diff --git a/Image/AlexNet/model/0/epoch22/embeddings.npy b/Image/AlexNet/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..45f789f784b8acf37090e53518a10950dcf58457 --- /dev/null +++ b/Image/AlexNet/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:102862fc18fb57c1b80f5567edf3184b57f47941166871cf79224916ace6bfa9 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch22/subject_model.pth b/Image/AlexNet/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9818ef3f13345603b862dcada17a3bbf717c95b1 --- /dev/null +++ b/Image/AlexNet/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83373d9958fa6dfea29a706fc915adff916d27ea4df4a43374a9d1b65ef10631 +size 504030 diff --git a/Image/AlexNet/model/0/epoch23/embeddings.npy b/Image/AlexNet/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..989f544120aa93a9582c6840b0751a0932e721ad --- /dev/null +++ b/Image/AlexNet/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f727d422d39de56d9ae9a3c702a1de543e51f2cfaa8f1b1e8c14e114f96380a8 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch23/subject_model.pth b/Image/AlexNet/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3d79c4d3a0ad03ccdb911fba514d4af47563ea12 --- /dev/null +++ b/Image/AlexNet/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2783b7549f6773cf6795b6b5289aab01b6bc3435a5abf76058903609a26e1500 +size 504030 diff --git a/Image/AlexNet/model/0/epoch24/embeddings.npy b/Image/AlexNet/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0487f6b4abd4072c3db16ba368d246fd77da8b3d --- /dev/null +++ b/Image/AlexNet/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:616994b0ca2995e7414d9f115dbf59d90beed0094dd7300e54c7ea55710ec4e3 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch24/subject_model.pth b/Image/AlexNet/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ebb0e2bed3d1e76995436422ce29090d76c1e229 --- /dev/null +++ b/Image/AlexNet/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9901c77a9a23683436f90a0445550597ba5d4dcb3614b90ddf13ef3d7ba626a4 +size 504030 diff --git a/Image/AlexNet/model/0/epoch25/embeddings.npy b/Image/AlexNet/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d4f8372393c593aeab66b55547a9c62d2096c447 --- /dev/null +++ b/Image/AlexNet/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58f45051955a9872fd6be62caf1459b0c0c1f78516a925037c57b163257aa73c +size 102400128 diff --git a/Image/AlexNet/model/0/epoch25/subject_model.pth b/Image/AlexNet/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ad213cdb671242e985a29efdcbc8582290fccfa9 --- /dev/null +++ b/Image/AlexNet/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1490c924fed9da15f400a73abb5a4592c807277997de473b9075b4f9c2cc178e +size 504030 diff --git a/Image/AlexNet/model/0/epoch3/embeddings.npy b/Image/AlexNet/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..394ae67064fd9ef9578a95cd81bee85b9634f7e8 --- /dev/null +++ b/Image/AlexNet/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e64bef991238315064614a7a4e9c0654b361466b76db179d51bd6983c91f21d +size 102400128 diff --git a/Image/AlexNet/model/0/epoch3/subject_model.pth b/Image/AlexNet/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..833089e77e20d9dea5dd6ae8de36cb82b01f4a8b --- /dev/null +++ b/Image/AlexNet/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e071afd5db9bf4992a8456350cc14f7448f72e9c8bd2633af60f4be2c46544e4 +size 504030 diff --git a/Image/AlexNet/model/0/epoch4/embeddings.npy b/Image/AlexNet/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a293f6f2871381462ebac6f921c8fd527b69a3bd --- /dev/null +++ b/Image/AlexNet/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3bed11a3e5243cb7885899e78d3516d2b1be888964ec55dc9941e2c6536c275 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch4/subject_model.pth b/Image/AlexNet/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c6c0667da8ada91d45de4ef302b5bbfc536d4c5d --- /dev/null +++ b/Image/AlexNet/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1601ea4d88a2e8c8b91d41260caaf1e0aa5ff2f3c7eec70c78834cbb18e5b09d +size 504030 diff --git a/Image/AlexNet/model/0/epoch5/embeddings.npy b/Image/AlexNet/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..decb313a8705008c1a55c1a70037342266fcc0b7 --- /dev/null +++ b/Image/AlexNet/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:113a78b111dcfed47bc133ab4c6a8938edb9d2c4602a46503f51ce1a5bb2bc4c +size 102400128 diff --git a/Image/AlexNet/model/0/epoch5/subject_model.pth b/Image/AlexNet/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a96a1b662a8b00aca909cff2368c722b2359bf38 --- /dev/null +++ b/Image/AlexNet/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb5adc11bb4ba0b31461c0aadb7360d06dc1aec22e76d7edcc34058d4d3e8ed6 +size 504030 diff --git a/Image/AlexNet/model/0/epoch6/embeddings.npy b/Image/AlexNet/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3d5654dcde2667bf87447b01c10ff798fb5b2834 --- /dev/null +++ b/Image/AlexNet/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf1b170704da827cdb96b00a47738201319d6396ce5077fd7458b922dc72a624 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch6/subject_model.pth b/Image/AlexNet/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..56455eab0c78ad08f86a5ba35fe243f354bb7d1d --- /dev/null +++ b/Image/AlexNet/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:414850c2aa0fd43cb68e0e6f0ac974151eb4006ec4af09541c11e0bd1494ba5e +size 504030 diff --git a/Image/AlexNet/model/0/epoch7/embeddings.npy b/Image/AlexNet/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b452f092aae27c248ae44b5815960b36930f321e --- /dev/null +++ b/Image/AlexNet/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d43dd716702b8098e98f1933cda05a3825a7bdc2749456005487f861fb0d4f94 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch7/subject_model.pth b/Image/AlexNet/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d3c797ec9da16fae76b37b65eb4ea3cc445afdec --- /dev/null +++ b/Image/AlexNet/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d44f638aa233cfa2811bcac1c769e25686b4fa3543439c951a2fb4594d3d8d1d +size 504030 diff --git a/Image/AlexNet/model/0/epoch8/embeddings.npy b/Image/AlexNet/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a88e0c6942117969224a32ff844124dc666160c1 --- /dev/null +++ b/Image/AlexNet/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db67568518425df4f5704944358a6a60ae14ababca456ec88b4c0532769bf863 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch8/subject_model.pth b/Image/AlexNet/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a9777fc297215714abf8d9afbe13b72f46d7aecd --- /dev/null +++ b/Image/AlexNet/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:073ed1ed2591e3413531afab86e3a5d27bf33da91972a0fc898e955030a9ea5b +size 504030 diff --git a/Image/AlexNet/model/0/epoch9/embeddings.npy b/Image/AlexNet/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ca5d1b7b889edf33ae03c0ca79199df06c2467ac --- /dev/null +++ b/Image/AlexNet/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0bf7d311b98757d49bf993a2f2903f6a2329a1065fa9f8f2f81a7811b01b4b2 +size 102400128 diff --git a/Image/AlexNet/model/0/epoch9/subject_model.pth b/Image/AlexNet/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5018cb15e6503691baec8edde1662cd22688d6a2 --- /dev/null +++ b/Image/AlexNet/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27b8112eda4c12d9d438b35f36062dad8165b67b7086aeb830baa4ffc76749d6 +size 504030 diff --git a/Image/AlexNet/model/0/layer_info.json b/Image/AlexNet/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..1d2c1d57f5992dcdc082fbfa7b156fdd3fa25201 --- /dev/null +++ b/Image/AlexNet/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "conv3.2", "dim": 512} \ No newline at end of file diff --git a/Image/AlexNet/model/2/epoch1/embeddings.npy b/Image/AlexNet/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2cab1c9a7c61b3ef449e03e42df19e742b4748af --- /dev/null +++ b/Image/AlexNet/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b5ebb28bdfbc909dfe83e5770a4c90942e3d6407d0d0915237ee9fa21b0f6bd +size 102400128 diff --git a/Image/AlexNet/model/2/epoch1/subject_model.pth b/Image/AlexNet/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..244778c911065a54f8e0e39c9f38ed2240f552cb --- /dev/null +++ b/Image/AlexNet/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0e992414faba43e5e2e4487d609143863a336cdd64321a69a9d22df76160490 +size 504030 diff --git a/Image/AlexNet/model/2/epoch10/embeddings.npy b/Image/AlexNet/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c271ef5ccc5d093bafdc4b906427df1da0b23616 --- /dev/null +++ b/Image/AlexNet/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e8325e1e2f5ff30e50946613b39812bd9e0c3f0cb60bc28a085fa441aebb460 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch10/subject_model.pth b/Image/AlexNet/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d50d9fdb15f730ba1b49123d60bbabf903f8bee8 --- /dev/null +++ b/Image/AlexNet/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15b320bbc52daa7cb74eea278f55695a2c15689963fd52f26568404d1e98be16 +size 504030 diff --git a/Image/AlexNet/model/2/epoch11/embeddings.npy b/Image/AlexNet/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f22e5b357239c5f4929363d6fc25602d4ddc0a3a --- /dev/null +++ b/Image/AlexNet/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfe72a5bad289a8af313dfec4cb5764b9993e0f03fb27262e2dafa2b1e06bedd +size 102400128 diff --git a/Image/AlexNet/model/2/epoch11/subject_model.pth b/Image/AlexNet/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..af3eb65b3732cb172afd0562ab6b55a4880979b2 --- /dev/null +++ b/Image/AlexNet/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ec0828b614c0193c1cf6484a46612cb39e65a6f17674f38b5a78cf6ddc5526f +size 504030 diff --git a/Image/AlexNet/model/2/epoch12/embeddings.npy b/Image/AlexNet/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e7c97d94174356e00200656394791e87c188eb50 --- /dev/null +++ b/Image/AlexNet/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa34031a604d36194accce621768e0c13c09242c59e7e676e571ac3fd2ca03c0 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch12/subject_model.pth b/Image/AlexNet/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8fa0a52eb6e2b36244be9951118066d4cf5f8869 --- /dev/null +++ b/Image/AlexNet/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f9c7c36aa38b6626de10933ac34e4612b979dff0a7918e691dc110d907fb67f +size 504030 diff --git a/Image/AlexNet/model/2/epoch13/embeddings.npy b/Image/AlexNet/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..81d8e46858100c059f499dcfd4bec956eba83ac3 --- /dev/null +++ b/Image/AlexNet/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab0fd7cd12f95230b78b670c95483c7263a323b5dbe1e947962f142a1e0d6c30 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch13/subject_model.pth b/Image/AlexNet/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..498b51273634ddd57b91673fe37c27164c2e28eb --- /dev/null +++ b/Image/AlexNet/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:babbfdf8fdca442203221454b7288ec991f230dc1b7252b7d21b25e7849e4854 +size 504030 diff --git a/Image/AlexNet/model/2/epoch14/embeddings.npy b/Image/AlexNet/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b085cf908355744649533370293f5dae8a4ffdb3 --- /dev/null +++ b/Image/AlexNet/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4014cf810b8617715164cf93b6912d25a50688b923715ee0a0847cc632c8de5a +size 102400128 diff --git a/Image/AlexNet/model/2/epoch14/subject_model.pth b/Image/AlexNet/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7a92f83d37330fa63a25232e49af7566a9fe7fbe --- /dev/null +++ b/Image/AlexNet/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf41ef4b8d7af314b034a3e02f4222885ed271d2e887ff01e82c5ce8f541c752 +size 504030 diff --git a/Image/AlexNet/model/2/epoch15/embeddings.npy b/Image/AlexNet/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e0602731c698b37946a4c128db2d2b5ad114b992 --- /dev/null +++ b/Image/AlexNet/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76df57191e2526cec0a6e0b02527044ca2049d51e8598ea46573175999fef6ae +size 102400128 diff --git a/Image/AlexNet/model/2/epoch15/subject_model.pth b/Image/AlexNet/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..93307f0cc890d2f8c5ad97cf9040304109c54674 --- /dev/null +++ b/Image/AlexNet/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:434e03b18177d6391718cf044c1d09b2a4dc3bad7bea07fd638d7fa361cf7411 +size 504030 diff --git a/Image/AlexNet/model/2/epoch16/embeddings.npy b/Image/AlexNet/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e190781c0a8ec0c2bdf94f8594fea169d4a6e76e --- /dev/null +++ b/Image/AlexNet/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28c08ef78ec7ebb10c795967af654913fc60a3c475eeeebc110d3c0d2a59e077 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch16/subject_model.pth b/Image/AlexNet/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5da82da46d05adc54da332a364b22156dba5c02b --- /dev/null +++ b/Image/AlexNet/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fed51b9d1c774ac5502437585b68a9276eba15011ed0617ea1ce8266ecdb6648 +size 504030 diff --git a/Image/AlexNet/model/2/epoch17/embeddings.npy b/Image/AlexNet/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..43733463b4e759df6df8748a79520c1493c4cf0e --- /dev/null +++ b/Image/AlexNet/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67ab43c051342362a405b49df3d82aa5f0bd343222d2e0cdab138007630f2153 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch17/subject_model.pth b/Image/AlexNet/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d41f5168691d1917566897836ee92fcab529e6c7 --- /dev/null +++ b/Image/AlexNet/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:770279c1079c456ed729bca6f90636ec3b0760538b7d84026b339296416c7613 +size 504030 diff --git a/Image/AlexNet/model/2/epoch18/embeddings.npy b/Image/AlexNet/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..15d6d6c55863bf8791508ec8005ce24b6f9fa40f --- /dev/null +++ b/Image/AlexNet/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51ba56dd76857f48c546dec16ba0ff806d851702c8485699d09adff6d7076b34 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch18/subject_model.pth b/Image/AlexNet/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5ad9735ec6b336bb7c4e40111860acf396e2943d --- /dev/null +++ b/Image/AlexNet/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5597c8a0a01e47210b48c9312ac6a2e1576300ffa0a2a3c48798cdd621a7009d +size 504030 diff --git a/Image/AlexNet/model/2/epoch19/embeddings.npy b/Image/AlexNet/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e1261eb0a91941f7a8b8807f18f10c9fcda8daee --- /dev/null +++ b/Image/AlexNet/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2a5040dc522d62f1938fe9a5129eb4a667655476481fff7064855c729bb4570 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch19/subject_model.pth b/Image/AlexNet/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7a31ebc9d26b56f8384cef67ab1ca8c4d1c398ae --- /dev/null +++ b/Image/AlexNet/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:274abba094b0aa1a71fc400cd8e01e160f145772da1acdf17fd72df30d6804f5 +size 504030 diff --git a/Image/AlexNet/model/2/epoch2/embeddings.npy b/Image/AlexNet/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..62a14673ccacc85447002e14a65a98f3cfb594d1 --- /dev/null +++ b/Image/AlexNet/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59a503a4d2ef6007c7c4107e19cd09fe03ab22dc54a27d4c4e57a1f2f629c6f8 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch2/subject_model.pth b/Image/AlexNet/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d1d47798e1e3b294984f651e1e4f7d4aef0feff7 --- /dev/null +++ b/Image/AlexNet/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5912eb45390b95bd72bce480fe59cc156af0189beeed7bced170b48881c94d5 +size 504030 diff --git a/Image/AlexNet/model/2/epoch20/embeddings.npy b/Image/AlexNet/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..65b39891f61c5c41ffcab7d647cec3818175e8da --- /dev/null +++ b/Image/AlexNet/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bafa19b788ec770edba86f9df663fba8284481df9a614247e61468649f25c59b +size 102400128 diff --git a/Image/AlexNet/model/2/epoch20/subject_model.pth b/Image/AlexNet/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d52deabd230cac13fba45e9d878795fa2e4136a5 --- /dev/null +++ b/Image/AlexNet/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a1fb410f8fe5fcfca77e59f999a5bfa4ccf34698c0aba3147ebc27f17fa0c00c +size 504030 diff --git a/Image/AlexNet/model/2/epoch21/embeddings.npy b/Image/AlexNet/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6196ec97491b75e17d1652f045fa78fe8a6ec5aa --- /dev/null +++ b/Image/AlexNet/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706507ac9535172bf2fc5e530f69a911ca2fb280bfd97f3cdff48141014ad54e +size 102400128 diff --git a/Image/AlexNet/model/2/epoch21/subject_model.pth b/Image/AlexNet/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..326cc3a6110f8e1cd6be357b5db3e26e50b1baec --- /dev/null +++ b/Image/AlexNet/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06c9ed9bfa34d3f6ced3d5b8fa63620aa6f181767d5a9cdb0f4f1a33550767d5 +size 504030 diff --git a/Image/AlexNet/model/2/epoch22/embeddings.npy b/Image/AlexNet/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4e94602879bd42314e5a4fa841cf7ce8927c2566 --- /dev/null +++ b/Image/AlexNet/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c19b4108b5a9bbec1a2d64e80640cd473e83485af39aea16489eff29816444a3 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch22/subject_model.pth b/Image/AlexNet/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..62e28ca52f8b70dd9a63105fec32cbddf02b9dfc --- /dev/null +++ b/Image/AlexNet/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe4a8cd50da5b47f278fe1255965ea75feb877d5cb111b509a361b293cff4cdd +size 504030 diff --git a/Image/AlexNet/model/2/epoch23/embeddings.npy b/Image/AlexNet/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0c6564c6bbd23a42d47b4c1cc68396b4d31e1ff9 --- /dev/null +++ b/Image/AlexNet/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:346071a1f9b7cf1c26ebd3bdf913a459acef5110f4a5c6994b14c116f7ee68b2 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch23/subject_model.pth b/Image/AlexNet/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c42d659e6b7664de6ce0b4b28cd06a57216956ae --- /dev/null +++ b/Image/AlexNet/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:320778465fc885888080bb26cf4d3cd73c07477c52645c393d4d11dc1ec06df3 +size 504030 diff --git a/Image/AlexNet/model/2/epoch24/embeddings.npy b/Image/AlexNet/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..bdec4ecf53ebb585ba31752fea0f18f1083ecbb1 --- /dev/null +++ b/Image/AlexNet/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2e3c4a7e24747a56b42d6abc74091e5b4cd11328ebe72f6c10020d9632bfb96 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch24/subject_model.pth b/Image/AlexNet/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..347dcc3f63dab1217e5b36508e0c5a6c994a602b --- /dev/null +++ b/Image/AlexNet/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0e0f45b2f16c8f1932044d93e3e4c5c8524c997102c70d0224a9f7d599b6b0a +size 504030 diff --git a/Image/AlexNet/model/2/epoch25/embeddings.npy b/Image/AlexNet/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b4e936ca9c8cfd2a60b5be6cd5a1a8a9b26dfba0 --- /dev/null +++ b/Image/AlexNet/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ec15554fe56184c89a354335b57c3b7dd34dd2bdbe132e3a746c9650774203e +size 102400128 diff --git a/Image/AlexNet/model/2/epoch25/subject_model.pth b/Image/AlexNet/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8f54ec6da09183abfa9450c89efe9f05e00358e6 --- /dev/null +++ b/Image/AlexNet/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7904ef9b39ef80e968a99e828dc756cee704863d55f6c2deeaa84d84fb025374 +size 504030 diff --git a/Image/AlexNet/model/2/epoch3/embeddings.npy b/Image/AlexNet/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9ab9dbd1c1e3a510f152303a12ab347f1bcf4dc0 --- /dev/null +++ b/Image/AlexNet/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b22c3397f569cf4b268419d2d325f82067c7e08c6ca5d9a6538a7b9101772c8 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch3/subject_model.pth b/Image/AlexNet/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8bfa11972645b25cff03c6f8795b4f050786eeee --- /dev/null +++ b/Image/AlexNet/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99fae9ca091f95e70d79faaa5b83df452d1e63b81f1cfb27a7c223fe469c56fc +size 504030 diff --git a/Image/AlexNet/model/2/epoch4/embeddings.npy b/Image/AlexNet/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f343b661547756a9010bb85afb1984f635cd77d2 --- /dev/null +++ b/Image/AlexNet/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e12e2510ca0c67de34cd3e1bd336f3cdca2838e67f2c336c2a960d0f343e7e7c +size 102400128 diff --git a/Image/AlexNet/model/2/epoch4/subject_model.pth b/Image/AlexNet/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1670edccb3021264df571747ec4e5bf486120cc0 --- /dev/null +++ b/Image/AlexNet/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb1a2267a6fab38be68ed3ba0262294e2717ece2d991e590f51e1482791b9864 +size 504030 diff --git a/Image/AlexNet/model/2/epoch5/embeddings.npy b/Image/AlexNet/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2a776367648e8d09805e1955522ba4ac6f7a4e0f --- /dev/null +++ b/Image/AlexNet/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d82d592a9b0a46717b4e7bae8846de08ef84398e03a2eac4dfcbe6c074df9128 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch5/subject_model.pth b/Image/AlexNet/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5e08a1ee4158006e3dca886b0055733897ea16f7 --- /dev/null +++ b/Image/AlexNet/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5840065c0531b49a8f2f7cfaaf54136fb771aed761bef81adfa457a681a7fe22 +size 504030 diff --git a/Image/AlexNet/model/2/epoch6/embeddings.npy b/Image/AlexNet/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..49657297023b80e5d9d92fb1241a16e82a9b50f2 --- /dev/null +++ b/Image/AlexNet/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e9ce7d80ad9dfc599f16b3b1674195d8ab760e00e53979a0cd64d476e2aa2d6 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch6/subject_model.pth b/Image/AlexNet/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ce096f3f6b312980c20915bddd0cb85e91272f79 --- /dev/null +++ b/Image/AlexNet/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74c7df033429273ea7f665c88eb764488135c4f160f8fb60e932700d623ec5bf +size 504030 diff --git a/Image/AlexNet/model/2/epoch7/embeddings.npy b/Image/AlexNet/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4f5099e852bcc2c516f5b87076fefe73dfecb31c --- /dev/null +++ b/Image/AlexNet/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09688c8c4d9528df0530840ebeaad021c880e272a20c96a8667405d274b06485 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch7/subject_model.pth b/Image/AlexNet/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f490427a400838b8034362d2e58b942c82838ec --- /dev/null +++ b/Image/AlexNet/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a060a66dcd0af65a6f6781f21d01ff4f351f32a2c3b9130443e83df089f2ee2 +size 504030 diff --git a/Image/AlexNet/model/2/epoch8/embeddings.npy b/Image/AlexNet/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f10d8d40b840c936cdde1f42ec0eb9b644540458 --- /dev/null +++ b/Image/AlexNet/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:485ca7dec53462ea4783eb016212a0dd90e949f1ad6d18876ba0846079f2969e +size 102400128 diff --git a/Image/AlexNet/model/2/epoch8/subject_model.pth b/Image/AlexNet/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1fdccff74a0edca8c9f39b22090471f8243847e8 --- /dev/null +++ b/Image/AlexNet/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a50a3f1daed10df0fcc2f0e876d8dd6f1f0c4920eb754980d0cd257329b9abaa +size 504030 diff --git a/Image/AlexNet/model/2/epoch9/embeddings.npy b/Image/AlexNet/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea01373820b62da6ade0d4daef9a9c618b4b7971 --- /dev/null +++ b/Image/AlexNet/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:833a96e0bde39a09635e43759bff731edb60a22d4784bc1fef60507c2d2f7052 +size 102400128 diff --git a/Image/AlexNet/model/2/epoch9/subject_model.pth b/Image/AlexNet/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..925adecee22cb89855517974fc9169eefd8b6f8b --- /dev/null +++ b/Image/AlexNet/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:709d8013e54c0320e4ff63f5dbaf984ed4d92c09b6ece166d07671f86f10c3b9 +size 504030 diff --git a/Image/AlexNet/model/2/layer_info.json b/Image/AlexNet/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..1d2c1d57f5992dcdc082fbfa7b156fdd3fa25201 --- /dev/null +++ b/Image/AlexNet/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "conv3.2", "dim": 512} \ No newline at end of file diff --git a/Image/DenseNet/code/backdoor_train.log b/Image/DenseNet/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..f7991486b94a9d031868b5edec3622b92029e9e2 --- /dev/null +++ b/Image/DenseNet/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 16:01:30,357 - train - INFO - 开始训练 densenet +2025-03-14 16:01:30,357 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 16:01:35,120 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 10.94% +2025-03-14 16:01:41,623 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.009 | Acc: 25.39% +2025-03-14 16:01:48,359 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.872 | Acc: 29.96% +2025-03-14 16:01:54,814 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.780 | Acc: 33.57% +2025-03-14 16:02:02,833 - train - INFO - Epoch: 1 | Test Loss: 1.342 | Test Acc: 52.66% +2025-03-14 16:02:03,082 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.477 | Acc: 41.41% +2025-03-14 16:02:09,563 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.335 | Acc: 51.83% +2025-03-14 16:02:16,183 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.242 | Acc: 55.57% +2025-03-14 16:02:22,685 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.173 | Acc: 58.28% +2025-03-14 16:02:30,658 - train - INFO - Epoch: 2 | Test Loss: 1.093 | Test Acc: 61.78% +2025-03-14 16:02:41,822 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 0.999 | Acc: 63.28% +2025-03-14 16:02:48,275 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 0.908 | Acc: 68.81% +2025-03-14 16:02:55,542 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 0.893 | Acc: 69.34% +2025-03-14 16:03:02,152 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.874 | Acc: 69.98% +2025-03-14 16:03:09,933 - train - INFO - Epoch: 3 | Test Loss: 0.895 | Test Acc: 69.23% +2025-03-14 16:03:10,171 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.741 | Acc: 75.00% +2025-03-14 16:03:16,709 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.748 | Acc: 74.24% +2025-03-14 16:03:23,165 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.743 | Acc: 74.83% +2025-03-14 16:03:29,519 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.735 | Acc: 75.22% +2025-03-14 16:03:37,232 - train - INFO - Epoch: 4 | Test Loss: 0.914 | Test Acc: 69.83% +2025-03-14 16:03:48,484 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.680 | Acc: 75.78% +2025-03-14 16:03:55,252 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.674 | Acc: 77.53% +2025-03-14 16:04:01,807 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.667 | Acc: 77.67% +2025-03-14 16:04:08,493 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.661 | Acc: 77.80% +2025-03-14 16:04:16,124 - train - INFO - Epoch: 5 | Test Loss: 0.713 | Test Acc: 76.28% +2025-03-14 16:04:16,366 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.490 | Acc: 87.50% +2025-03-14 16:04:22,967 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.614 | Acc: 79.94% +2025-03-14 16:04:29,555 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.626 | Acc: 79.32% +2025-03-14 16:04:35,837 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.619 | Acc: 79.52% +2025-03-14 16:04:43,506 - train - INFO - Epoch: 6 | Test Loss: 0.767 | Test Acc: 74.45% +2025-03-14 16:04:55,346 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.728 | Acc: 75.00% +2025-03-14 16:05:02,115 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.594 | Acc: 80.22% +2025-03-14 16:05:08,580 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.593 | Acc: 80.24% +2025-03-14 16:05:15,893 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.597 | Acc: 80.01% +2025-03-14 16:05:24,379 - train - INFO - Epoch: 7 | Test Loss: 0.659 | Test Acc: 76.67% +2025-03-14 16:05:24,687 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.511 | Acc: 82.81% +2025-03-14 16:05:31,960 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.554 | Acc: 81.19% +2025-03-14 16:05:38,472 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.564 | Acc: 81.15% +2025-03-14 16:05:45,042 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.568 | Acc: 81.13% +2025-03-14 16:05:53,242 - train - INFO - Epoch: 8 | Test Loss: 0.714 | Test Acc: 76.71% +2025-03-14 16:06:05,059 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.489 | Acc: 85.16% +2025-03-14 16:06:11,780 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.565 | Acc: 81.15% +2025-03-14 16:06:18,537 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.564 | Acc: 81.30% +2025-03-14 16:06:25,070 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.566 | Acc: 81.23% +2025-03-14 16:06:32,812 - train - INFO - Epoch: 9 | Test Loss: 1.349 | Test Acc: 60.93% +2025-03-14 16:06:33,056 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.427 | Acc: 85.94% +2025-03-14 16:06:40,104 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.541 | Acc: 82.09% +2025-03-14 16:06:47,429 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.546 | Acc: 81.77% +2025-03-14 16:06:55,098 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.547 | Acc: 81.80% +2025-03-14 16:07:03,407 - train - INFO - Epoch: 10 | Test Loss: 0.619 | Test Acc: 80.03% +2025-03-14 16:07:15,781 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.509 | Acc: 85.94% +2025-03-14 16:07:22,836 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.546 | Acc: 82.31% +2025-03-14 16:07:29,380 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.547 | Acc: 82.02% +2025-03-14 16:07:36,091 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.545 | Acc: 82.09% +2025-03-14 16:07:44,313 - train - INFO - Epoch: 11 | Test Loss: 0.782 | Test Acc: 74.83% +2025-03-14 16:07:44,642 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.424 | Acc: 88.28% +2025-03-14 16:07:52,452 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.532 | Acc: 82.69% +2025-03-14 16:07:59,454 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.535 | Acc: 82.39% +2025-03-14 16:08:05,751 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.539 | Acc: 82.22% +2025-03-14 16:08:13,415 - train - INFO - Epoch: 12 | Test Loss: 0.667 | Test Acc: 77.20% +2025-03-14 16:08:24,850 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.513 | Acc: 81.25% +2025-03-14 16:08:31,529 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.525 | Acc: 82.91% +2025-03-14 16:08:37,988 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.524 | Acc: 82.91% +2025-03-14 16:08:44,426 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.531 | Acc: 82.63% +2025-03-14 16:08:52,236 - train - INFO - Epoch: 13 | Test Loss: 0.671 | Test Acc: 77.66% +2025-03-14 16:08:52,474 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.400 | Acc: 87.50% +2025-03-14 16:08:58,885 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.513 | Acc: 83.14% +2025-03-14 16:09:05,594 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.526 | Acc: 82.57% +2025-03-14 16:09:13,190 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.525 | Acc: 82.58% +2025-03-14 16:09:21,359 - train - INFO - Epoch: 14 | Test Loss: 0.644 | Test Acc: 78.23% +2025-03-14 16:09:32,590 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.637 | Acc: 78.91% +2025-03-14 16:09:39,160 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.521 | Acc: 82.97% +2025-03-14 16:09:45,611 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.523 | Acc: 82.81% +2025-03-14 16:09:52,133 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.522 | Acc: 82.83% +2025-03-14 16:09:59,547 - train - INFO - Epoch: 15 | Test Loss: 0.614 | Test Acc: 78.16% +2025-03-14 16:09:59,772 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.607 | Acc: 76.56% +2025-03-14 16:10:06,285 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.508 | Acc: 83.32% +2025-03-14 16:10:12,668 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.504 | Acc: 83.33% +2025-03-14 16:10:19,419 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.514 | Acc: 83.00% +2025-03-14 16:10:27,539 - train - INFO - Epoch: 16 | Test Loss: 0.622 | Test Acc: 79.45% +2025-03-14 16:10:38,671 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.503 | Acc: 86.72% +2025-03-14 16:10:45,253 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.503 | Acc: 83.30% +2025-03-14 16:10:51,781 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.507 | Acc: 83.28% +2025-03-14 16:10:58,057 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.504 | Acc: 83.31% +2025-03-14 16:11:05,776 - train - INFO - Epoch: 17 | Test Loss: 0.701 | Test Acc: 77.26% +2025-03-14 16:11:06,018 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.620 | Acc: 78.91% +2025-03-14 16:11:13,323 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.498 | Acc: 83.79% +2025-03-14 16:11:20,126 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.494 | Acc: 83.98% +2025-03-14 16:11:26,804 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.499 | Acc: 83.79% +2025-03-14 16:11:34,825 - train - INFO - Epoch: 18 | Test Loss: 0.751 | Test Acc: 76.20% +2025-03-14 16:11:46,371 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.408 | Acc: 86.72% +2025-03-14 16:11:53,283 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.485 | Acc: 84.00% +2025-03-14 16:12:00,455 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.493 | Acc: 83.82% +2025-03-14 16:12:06,896 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.499 | Acc: 83.56% +2025-03-14 16:12:15,134 - train - INFO - Epoch: 19 | Test Loss: 0.663 | Test Acc: 76.95% +2025-03-14 16:12:15,418 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.390 | Acc: 85.94% +2025-03-14 16:12:21,907 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.500 | Acc: 83.41% +2025-03-14 16:12:28,443 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.498 | Acc: 83.41% +2025-03-14 16:12:35,175 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.494 | Acc: 83.63% +2025-03-14 16:12:45,840 - train - INFO - Epoch: 20 | Test Loss: 0.561 | Test Acc: 81.65% +2025-03-14 16:12:57,454 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.349 | Acc: 89.84% +2025-03-14 16:13:04,132 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.477 | Acc: 83.99% +2025-03-14 16:13:10,608 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.477 | Acc: 84.11% +2025-03-14 16:13:17,405 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.481 | Acc: 84.17% +2025-03-14 16:13:25,297 - train - INFO - Epoch: 21 | Test Loss: 0.593 | Test Acc: 80.85% +2025-03-14 16:13:25,591 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.453 | Acc: 82.81% +2025-03-14 16:13:32,327 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.466 | Acc: 84.52% +2025-03-14 16:13:39,185 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.471 | Acc: 84.26% +2025-03-14 16:13:45,668 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.478 | Acc: 84.05% +2025-03-14 16:13:53,441 - train - INFO - Epoch: 22 | Test Loss: 0.665 | Test Acc: 78.56% +2025-03-14 16:14:05,354 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.474 | Acc: 84.38% +2025-03-14 16:14:11,690 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.474 | Acc: 84.71% +2025-03-14 16:14:18,410 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.482 | Acc: 84.29% +2025-03-14 16:14:24,963 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.478 | Acc: 84.44% +2025-03-14 16:14:32,640 - train - INFO - Epoch: 23 | Test Loss: 1.058 | Test Acc: 68.37% +2025-03-14 16:14:32,934 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.378 | Acc: 89.06% +2025-03-14 16:14:39,551 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.477 | Acc: 84.41% +2025-03-14 16:14:46,168 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.481 | Acc: 84.37% +2025-03-14 16:14:52,732 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.479 | Acc: 84.30% +2025-03-14 16:15:00,623 - train - INFO - Epoch: 24 | Test Loss: 0.636 | Test Acc: 80.17% +2025-03-14 16:15:11,674 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.529 | Acc: 82.03% +2025-03-14 16:15:18,136 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.454 | Acc: 85.12% +2025-03-14 16:15:24,586 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.464 | Acc: 84.64% +2025-03-14 16:15:31,108 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.466 | Acc: 84.54% +2025-03-14 16:15:39,073 - train - INFO - Epoch: 25 | Test Loss: 0.572 | Test Acc: 81.28% +2025-03-14 16:15:39,328 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.496 | Acc: 82.03% +2025-03-14 16:15:45,847 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.468 | Acc: 84.57% +2025-03-14 16:15:52,738 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.469 | Acc: 84.65% +2025-03-14 16:15:59,415 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.466 | Acc: 84.73% +2025-03-14 16:16:06,894 - train - INFO - Epoch: 26 | Test Loss: 0.816 | Test Acc: 74.91% +2025-03-14 16:16:18,304 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.400 | Acc: 85.94% +2025-03-14 16:16:24,917 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.469 | Acc: 84.55% +2025-03-14 16:16:31,318 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.464 | Acc: 84.77% +2025-03-14 16:16:37,744 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.462 | Acc: 84.86% +2025-03-14 16:16:45,639 - train - INFO - Epoch: 27 | Test Loss: 0.561 | Test Acc: 80.96% +2025-03-14 16:16:45,909 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.450 | Acc: 82.81% +2025-03-14 16:16:52,939 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.464 | Acc: 84.66% +2025-03-14 16:16:59,702 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.466 | Acc: 84.61% +2025-03-14 16:17:06,087 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.467 | Acc: 84.57% +2025-03-14 16:17:13,719 - train - INFO - Epoch: 28 | Test Loss: 0.692 | Test Acc: 78.32% +2025-03-14 16:17:25,054 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.428 | Acc: 83.59% +2025-03-14 16:17:31,720 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.457 | Acc: 84.75% +2025-03-14 16:17:38,193 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.463 | Acc: 84.71% +2025-03-14 16:17:44,756 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.465 | Acc: 84.69% +2025-03-14 16:17:52,619 - train - INFO - Epoch: 29 | Test Loss: 0.517 | Test Acc: 82.95% +2025-03-14 16:17:52,871 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.412 | Acc: 86.72% +2025-03-14 16:17:59,452 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.442 | Acc: 85.97% +2025-03-14 16:18:06,228 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.452 | Acc: 85.31% +2025-03-14 16:18:12,888 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.457 | Acc: 85.07% +2025-03-14 16:18:21,005 - train - INFO - Epoch: 30 | Test Loss: 0.656 | Test Acc: 78.35% +2025-03-14 16:18:33,022 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.579 | Acc: 82.81% +2025-03-14 16:18:39,739 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.443 | Acc: 85.60% +2025-03-14 16:18:46,476 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.452 | Acc: 85.29% +2025-03-14 16:18:52,888 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.458 | Acc: 85.02% +2025-03-14 16:19:00,889 - train - INFO - Epoch: 31 | Test Loss: 0.530 | Test Acc: 82.67% +2025-03-14 16:19:01,161 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.321 | Acc: 89.06% +2025-03-14 16:19:07,656 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.446 | Acc: 85.07% +2025-03-14 16:19:14,096 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.451 | Acc: 84.76% +2025-03-14 16:19:20,849 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.454 | Acc: 84.86% +2025-03-14 16:19:28,416 - train - INFO - Epoch: 32 | Test Loss: 0.834 | Test Acc: 73.96% +2025-03-14 16:19:40,125 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.497 | Acc: 82.03% +2025-03-14 16:19:47,217 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.447 | Acc: 85.14% +2025-03-14 16:19:53,869 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.460 | Acc: 84.78% +2025-03-14 16:20:00,742 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.451 | Acc: 85.11% +2025-03-14 16:20:09,000 - train - INFO - Epoch: 33 | Test Loss: 0.449 | Test Acc: 84.73% +2025-03-14 16:20:09,277 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.413 | Acc: 82.03% +2025-03-14 16:20:16,108 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.448 | Acc: 85.24% +2025-03-14 16:20:22,620 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.445 | Acc: 85.32% +2025-03-14 16:20:29,138 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.443 | Acc: 85.38% +2025-03-14 16:20:37,243 - train - INFO - Epoch: 34 | Test Loss: 0.531 | Test Acc: 82.30% +2025-03-14 16:20:49,210 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.354 | Acc: 87.50% +2025-03-14 16:20:55,814 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.432 | Acc: 85.93% +2025-03-14 16:21:02,450 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.439 | Acc: 85.55% +2025-03-14 16:21:08,928 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.442 | Acc: 85.40% +2025-03-14 16:21:17,192 - train - INFO - Epoch: 35 | Test Loss: 0.597 | Test Acc: 80.39% +2025-03-14 16:21:17,446 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.304 | Acc: 89.84% +2025-03-14 16:21:24,132 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.440 | Acc: 85.47% +2025-03-14 16:21:30,524 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.436 | Acc: 85.62% +2025-03-14 16:21:36,979 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.441 | Acc: 85.50% +2025-03-14 16:21:44,631 - train - INFO - Epoch: 36 | Test Loss: 0.642 | Test Acc: 78.95% +2025-03-14 16:21:56,350 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.366 | Acc: 86.72% +2025-03-14 16:22:02,994 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.435 | Acc: 85.64% +2025-03-14 16:22:09,586 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.440 | Acc: 85.30% +2025-03-14 16:22:16,323 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.444 | Acc: 85.23% +2025-03-14 16:22:24,686 - train - INFO - Epoch: 37 | Test Loss: 0.630 | Test Acc: 79.88% +2025-03-14 16:22:24,928 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.366 | Acc: 89.06% +2025-03-14 16:22:31,317 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.420 | Acc: 85.93% +2025-03-14 16:22:37,751 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.421 | Acc: 86.11% +2025-03-14 16:22:44,033 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.427 | Acc: 85.95% +2025-03-14 16:22:52,202 - train - INFO - Epoch: 38 | Test Loss: 0.530 | Test Acc: 82.57% +2025-03-14 16:23:03,115 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.402 | Acc: 85.94% +2025-03-14 16:23:09,617 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.437 | Acc: 85.83% +2025-03-14 16:23:16,188 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.434 | Acc: 85.81% +2025-03-14 16:23:22,787 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.440 | Acc: 85.63% +2025-03-14 16:23:30,722 - train - INFO - Epoch: 39 | Test Loss: 0.605 | Test Acc: 80.62% +2025-03-14 16:23:30,947 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.363 | Acc: 89.84% +2025-03-14 16:23:37,305 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.437 | Acc: 85.76% +2025-03-14 16:23:43,539 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.436 | Acc: 85.80% +2025-03-14 16:23:49,649 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.438 | Acc: 85.76% +2025-03-14 16:23:57,791 - train - INFO - Epoch: 40 | Test Loss: 0.522 | Test Acc: 82.90% +2025-03-14 16:24:10,073 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.347 | Acc: 87.50% +2025-03-14 16:24:16,616 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.431 | Acc: 85.68% +2025-03-14 16:24:23,037 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.438 | Acc: 85.39% +2025-03-14 16:24:29,562 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.435 | Acc: 85.55% +2025-03-14 16:24:37,304 - train - INFO - Epoch: 41 | Test Loss: 0.576 | Test Acc: 80.45% +2025-03-14 16:24:37,526 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.319 | Acc: 91.41% +2025-03-14 16:24:44,058 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.426 | Acc: 86.07% +2025-03-14 16:24:50,478 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.428 | Acc: 85.86% +2025-03-14 16:24:56,812 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.436 | Acc: 85.59% +2025-03-14 16:25:04,435 - train - INFO - Epoch: 42 | Test Loss: 0.617 | Test Acc: 80.75% +2025-03-14 16:25:16,446 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.413 | Acc: 83.59% +2025-03-14 16:25:23,070 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.414 | Acc: 86.35% +2025-03-14 16:25:29,554 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.418 | Acc: 86.25% +2025-03-14 16:25:35,883 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.425 | Acc: 85.97% +2025-03-14 16:25:43,587 - train - INFO - Epoch: 43 | Test Loss: 0.571 | Test Acc: 80.94% +2025-03-14 16:25:43,825 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.422 | Acc: 85.16% +2025-03-14 16:25:50,820 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.430 | Acc: 85.84% +2025-03-14 16:25:57,866 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.435 | Acc: 85.65% +2025-03-14 16:26:04,343 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.435 | Acc: 85.64% +2025-03-14 16:26:11,962 - train - INFO - Epoch: 44 | Test Loss: 0.544 | Test Acc: 81.97% +2025-03-14 16:26:23,304 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.537 | Acc: 79.69% +2025-03-14 16:26:29,717 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.416 | Acc: 86.31% +2025-03-14 16:26:36,049 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.426 | Acc: 86.04% +2025-03-14 16:26:42,335 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.433 | Acc: 85.66% +2025-03-14 16:26:50,038 - train - INFO - Epoch: 45 | Test Loss: 0.509 | Test Acc: 82.87% +2025-03-14 16:26:50,309 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.292 | Acc: 90.62% +2025-03-14 16:26:56,772 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.407 | Acc: 86.56% +2025-03-14 16:27:03,183 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.417 | Acc: 86.22% +2025-03-14 16:27:09,786 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.421 | Acc: 86.09% +2025-03-14 16:27:17,656 - train - INFO - Epoch: 46 | Test Loss: 0.557 | Test Acc: 81.43% +2025-03-14 16:27:28,839 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.359 | Acc: 89.06% +2025-03-14 16:27:35,181 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.414 | Acc: 86.39% +2025-03-14 16:27:41,630 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.427 | Acc: 85.87% +2025-03-14 16:27:47,853 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.430 | Acc: 85.79% +2025-03-14 16:27:56,187 - train - INFO - Epoch: 47 | Test Loss: 0.579 | Test Acc: 81.69% +2025-03-14 16:27:56,420 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.468 | Acc: 85.94% +2025-03-14 16:28:03,669 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.428 | Acc: 85.92% +2025-03-14 16:28:10,055 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.430 | Acc: 85.82% +2025-03-14 16:28:16,357 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.429 | Acc: 85.72% +2025-03-14 16:28:23,968 - train - INFO - Epoch: 48 | Test Loss: 0.595 | Test Acc: 80.28% +2025-03-14 16:28:35,744 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.294 | Acc: 89.06% +2025-03-14 16:28:42,339 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.401 | Acc: 86.66% +2025-03-14 16:28:48,791 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.411 | Acc: 86.20% +2025-03-14 16:28:55,258 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.418 | Acc: 86.02% +2025-03-14 16:29:02,924 - train - INFO - Epoch: 49 | Test Loss: 0.536 | Test Acc: 81.65% +2025-03-14 16:29:03,178 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.444 | Acc: 85.94% +2025-03-14 16:29:09,522 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.410 | Acc: 87.00% +2025-03-14 16:29:15,921 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.409 | Acc: 86.71% +2025-03-14 16:29:22,219 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.415 | Acc: 86.36% +2025-03-14 16:29:29,514 - train - INFO - Epoch: 50 | Test Loss: 0.619 | Test Acc: 78.84% +2025-03-14 16:29:40,209 - train - INFO - 训练完成! diff --git a/Image/DenseNet/code/model.py b/Image/DenseNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..3308e0d3abe06d8da77e843a0be593159287297a --- /dev/null +++ b/Image/DenseNet/code/model.py @@ -0,0 +1,152 @@ +""" +DenseNet in pytorch +see the details in papaer +[1] Gao Huang, Zhuang Liu, Laurens van der Maaten, Kilian Q. Weinberger. + Densely Connected Convolutional Networks + https://arxiv.org/abs/1608.06993v5 +""" +import torch +import torch.nn as nn +import math + +class Bottleneck(nn.Module): + """ + Dense Block + 这里的growth_rate=out_channels, 就是每个Block自己输出的通道数。 + 先通过1x1卷积层,将通道数缩小为4 * growth_rate,然后再通过3x3卷积层降低到growth_rate。 + """ + # 通常1×1卷积的通道数为GrowthRate的4倍 + expansion = 4 + + def __init__(self, in_channels, growth_rate): + super(Bottleneck, self).__init__() + zip_channels = self.expansion * growth_rate + self.features = nn.Sequential( + nn.BatchNorm2d(in_channels), + nn.ReLU(True), + nn.Conv2d(in_channels, zip_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(zip_channels), + nn.ReLU(True), + nn.Conv2d(zip_channels, growth_rate, kernel_size=3, padding=1, bias=False) + ) + + def forward(self, x): + out = self.features(x) + out = torch.cat([out, x], 1) + return out + + +class Transition(nn.Module): + """ + 改变维数的Transition层 具体包括BN、ReLU、1×1卷积(Conv)、2×2平均池化操作 + 先通过1x1的卷积层减少channels,再通过2x2的平均池化层缩小feature-map + """ + # 1×1卷积的作用是降维,起到压缩模型的作用,而平均池化则是降低特征图的尺寸。 + def __init__(self, in_channels, out_channels): + super(Transition, self).__init__() + self.features = nn.Sequential( + nn.BatchNorm2d(in_channels), + nn.ReLU(True), + nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False), + nn.AvgPool2d(2) + ) + + def forward(self, x): + out = self.features(x) + return out + +class DenseNet(nn.Module): + """ + Dense Net + paper中growth_rate取12,维度压缩的参数θ,即reduction取0.5 + 且初始化方法为kaiming_normal() + num_blocks为每段网络中的DenseBlock数量 + DenseNet和ResNet一样也是六段式网络(一段卷积+四段Dense+平均池化层),最后FC层。 + 第一段将维数从3变到2 * growth_rate + + (3, 32, 32) -> [Conv2d] -> (24, 32, 32) -> [layer1] -> (48, 16, 16) -> [layer2] + ->(96, 8, 8) -> [layer3] -> (192, 4, 4) -> [layer4] -> (384, 4, 4) -> [AvgPool] + ->(384, 1, 1) -> [Linear] -> (10) + """ + def __init__(self, num_blocks, growth_rate=12, reduction=0.5, num_classes=10, init_weights=True): + super(DenseNet, self).__init__() + self.growth_rate = growth_rate + self.reduction = reduction + + num_channels = 2 * growth_rate + + self.features = nn.Conv2d(3, num_channels, kernel_size=3, padding=1, bias=False) + self.layer1, num_channels = self._make_dense_layer(num_channels, num_blocks[0]) + self.layer2, num_channels = self._make_dense_layer(num_channels, num_blocks[1]) + self.layer3, num_channels = self._make_dense_layer(num_channels, num_blocks[2]) + self.layer4, num_channels = self._make_dense_layer(num_channels, num_blocks[3], transition=False) + self.avg_pool = nn.Sequential( + nn.BatchNorm2d(num_channels), + nn.ReLU(True), + nn.AvgPool2d(4), + ) + self.classifier = nn.Linear(num_channels, num_classes) + + if init_weights: + self._initialize_weights() + + def _make_dense_layer(self, in_channels, nblock, transition=True): + layers = [] + for i in range(nblock): + layers += [Bottleneck(in_channels, self.growth_rate)] + in_channels += self.growth_rate + out_channels = in_channels + if transition: + out_channels = int(math.floor(in_channels * self.reduction)) + layers += [Transition(in_channels, out_channels)] + return nn.Sequential(*layers), out_channels + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + out = self.features(x) + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + out = self.layer4(out) + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.classifier(out) + return out + +def DenseNet121(): + return DenseNet([6,12,24,16], growth_rate=32) + +def DenseNet169(): + return DenseNet([6,12,32,32], growth_rate=32) + +def DenseNet201(): + return DenseNet([6,12,48,32], growth_rate=32) + +def DenseNet161(): + return DenseNet([6,12,36,24], growth_rate=48) + +def densenet_cifar(): + return DenseNet([6,12,24,16], growth_rate=12) + + +def test(): + net = densenet_cifar() + x = torch.randn(1,3,32,32) + y = net(x) + print(y.size()) + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net,(1,3,32,32)) \ No newline at end of file diff --git a/Image/DenseNet/code/train.log b/Image/DenseNet/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..6cd3b8e90eacbadd2ecc9eb3431a9087181fe66c --- /dev/null +++ b/Image/DenseNet/code/train.log @@ -0,0 +1,250 @@ +2025-03-09 21:11:49,168 - train - INFO - 开始训练 densenet +2025-03-09 21:11:49,168 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-09 21:11:49,894 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.311 | Acc: 8.59% +2025-03-09 21:11:55,683 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 1.940 | Acc: 27.50% +2025-03-09 21:12:01,584 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.770 | Acc: 34.31% +2025-03-09 21:12:07,368 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.653 | Acc: 38.88% +2025-03-09 21:12:14,425 - train - INFO - Epoch: 1 | Test Loss: 1.328 | Test Acc: 53.16% +2025-03-09 21:12:14,716 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.206 | Acc: 55.47% +2025-03-09 21:12:21,053 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.124 | Acc: 59.37% +2025-03-09 21:12:27,403 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.072 | Acc: 61.63% +2025-03-09 21:12:33,389 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.033 | Acc: 63.06% +2025-03-09 21:12:40,181 - train - INFO - Epoch: 2 | Test Loss: 1.210 | Test Acc: 61.82% +2025-03-09 21:12:49,405 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 0.909 | Acc: 71.09% +2025-03-09 21:12:56,088 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 0.830 | Acc: 70.80% +2025-03-09 21:13:03,091 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 0.805 | Acc: 71.64% +2025-03-09 21:13:09,972 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.787 | Acc: 72.33% +2025-03-09 21:13:17,000 - train - INFO - Epoch: 3 | Test Loss: 0.768 | Test Acc: 73.42% +2025-03-09 21:13:17,213 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.791 | Acc: 73.44% +2025-03-09 21:13:22,943 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.690 | Acc: 75.70% +2025-03-09 21:13:28,945 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.669 | Acc: 76.64% +2025-03-09 21:13:35,287 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.662 | Acc: 76.95% +2025-03-09 21:13:43,309 - train - INFO - Epoch: 4 | Test Loss: 0.685 | Test Acc: 76.06% +2025-03-09 21:13:54,007 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.594 | Acc: 79.69% +2025-03-09 21:14:00,201 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.594 | Acc: 79.16% +2025-03-09 21:14:06,592 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.595 | Acc: 79.42% +2025-03-09 21:14:13,084 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.594 | Acc: 79.52% +2025-03-09 21:14:20,411 - train - INFO - Epoch: 5 | Test Loss: 0.712 | Test Acc: 75.64% +2025-03-09 21:14:20,685 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.455 | Acc: 85.94% +2025-03-09 21:14:27,227 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.543 | Acc: 81.23% +2025-03-09 21:14:33,263 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.546 | Acc: 81.30% +2025-03-09 21:14:39,203 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.547 | Acc: 81.20% +2025-03-09 21:14:46,340 - train - INFO - Epoch: 6 | Test Loss: 0.847 | Test Acc: 73.10% +2025-03-09 21:14:56,240 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.486 | Acc: 80.47% +2025-03-09 21:15:03,070 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.527 | Acc: 81.69% +2025-03-09 21:15:09,779 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.526 | Acc: 81.86% +2025-03-09 21:15:15,807 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.526 | Acc: 81.90% +2025-03-09 21:15:22,494 - train - INFO - Epoch: 7 | Test Loss: 0.673 | Test Acc: 77.47% +2025-03-09 21:15:22,735 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.419 | Acc: 85.16% +2025-03-09 21:15:28,714 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.505 | Acc: 82.74% +2025-03-09 21:15:35,489 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.516 | Acc: 82.33% +2025-03-09 21:15:41,666 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.510 | Acc: 82.53% +2025-03-09 21:15:49,206 - train - INFO - Epoch: 8 | Test Loss: 0.662 | Test Acc: 77.04% +2025-03-09 21:15:59,574 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.538 | Acc: 80.47% +2025-03-09 21:16:06,194 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.488 | Acc: 83.35% +2025-03-09 21:16:12,155 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.490 | Acc: 83.31% +2025-03-09 21:16:18,332 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.495 | Acc: 83.14% +2025-03-09 21:16:25,285 - train - INFO - Epoch: 9 | Test Loss: 0.670 | Test Acc: 77.47% +2025-03-09 21:16:25,526 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.566 | Acc: 77.34% +2025-03-09 21:16:31,632 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.485 | Acc: 83.08% +2025-03-09 21:16:37,363 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.490 | Acc: 82.98% +2025-03-09 21:16:43,737 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.492 | Acc: 83.05% +2025-03-09 21:16:50,797 - train - INFO - Epoch: 10 | Test Loss: 0.664 | Test Acc: 77.87% +2025-03-09 21:17:00,747 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.431 | Acc: 85.94% +2025-03-09 21:17:06,577 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.477 | Acc: 83.89% +2025-03-09 21:17:12,684 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.480 | Acc: 83.71% +2025-03-09 21:17:18,559 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.480 | Acc: 83.62% +2025-03-09 21:17:25,751 - train - INFO - Epoch: 11 | Test Loss: 0.653 | Test Acc: 77.73% +2025-03-09 21:17:25,991 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.406 | Acc: 88.28% +2025-03-09 21:17:32,073 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.470 | Acc: 83.87% +2025-03-09 21:17:38,732 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.473 | Acc: 83.91% +2025-03-09 21:17:45,448 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.474 | Acc: 83.92% +2025-03-09 21:17:52,973 - train - INFO - Epoch: 12 | Test Loss: 0.609 | Test Acc: 79.12% +2025-03-09 21:18:02,605 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.588 | Acc: 82.03% +2025-03-09 21:18:09,036 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.446 | Acc: 84.57% +2025-03-09 21:18:15,491 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.460 | Acc: 84.19% +2025-03-09 21:18:21,601 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.456 | Acc: 84.31% +2025-03-09 21:18:28,561 - train - INFO - Epoch: 13 | Test Loss: 0.613 | Test Acc: 79.41% +2025-03-09 21:18:28,768 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.302 | Acc: 89.06% +2025-03-09 21:18:35,282 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.463 | Acc: 84.21% +2025-03-09 21:18:41,504 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.461 | Acc: 84.27% +2025-03-09 21:18:48,232 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.458 | Acc: 84.32% +2025-03-09 21:18:55,232 - train - INFO - Epoch: 14 | Test Loss: 0.518 | Test Acc: 81.97% +2025-03-09 21:19:04,117 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.396 | Acc: 85.94% +2025-03-09 21:19:10,215 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.436 | Acc: 84.96% +2025-03-09 21:19:16,620 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.441 | Acc: 84.67% +2025-03-09 21:19:23,308 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.446 | Acc: 84.72% +2025-03-09 21:19:31,353 - train - INFO - Epoch: 15 | Test Loss: 0.614 | Test Acc: 79.11% +2025-03-09 21:19:31,609 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.452 | Acc: 85.16% +2025-03-09 21:19:37,681 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.438 | Acc: 84.92% +2025-03-09 21:19:43,368 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.446 | Acc: 84.64% +2025-03-09 21:19:49,273 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.444 | Acc: 84.83% +2025-03-09 21:19:57,522 - train - INFO - Epoch: 16 | Test Loss: 0.552 | Test Acc: 81.27% +2025-03-09 21:20:09,354 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.323 | Acc: 90.62% +2025-03-09 21:20:15,473 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.410 | Acc: 86.25% +2025-03-09 21:20:21,552 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.439 | Acc: 85.15% +2025-03-09 21:20:27,549 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.440 | Acc: 85.08% +2025-03-09 21:20:35,037 - train - INFO - Epoch: 17 | Test Loss: 0.573 | Test Acc: 81.27% +2025-03-09 21:20:35,286 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.416 | Acc: 85.94% +2025-03-09 21:20:41,226 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.404 | Acc: 86.05% +2025-03-09 21:20:47,415 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.418 | Acc: 85.49% +2025-03-09 21:20:53,316 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.424 | Acc: 85.37% +2025-03-09 21:21:00,935 - train - INFO - Epoch: 18 | Test Loss: 0.579 | Test Acc: 80.69% +2025-03-09 21:21:11,435 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.374 | Acc: 87.50% +2025-03-09 21:21:17,597 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.432 | Acc: 85.06% +2025-03-09 21:21:23,798 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.430 | Acc: 85.17% +2025-03-09 21:21:29,790 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.433 | Acc: 84.92% +2025-03-09 21:21:38,374 - train - INFO - Epoch: 19 | Test Loss: 0.568 | Test Acc: 81.09% +2025-03-09 21:21:39,187 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.397 | Acc: 86.72% +2025-03-09 21:21:45,999 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.416 | Acc: 85.76% +2025-03-09 21:21:52,755 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.423 | Acc: 85.38% +2025-03-09 21:21:59,191 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.420 | Acc: 85.43% +2025-03-09 21:22:06,489 - train - INFO - Epoch: 20 | Test Loss: 0.549 | Test Acc: 81.92% +2025-03-09 21:22:16,199 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.380 | Acc: 89.06% +2025-03-09 21:22:22,500 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.403 | Acc: 86.45% +2025-03-09 21:22:29,352 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.408 | Acc: 86.16% +2025-03-09 21:22:35,751 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.413 | Acc: 85.82% +2025-03-09 21:22:42,795 - train - INFO - Epoch: 21 | Test Loss: 0.527 | Test Acc: 82.40% +2025-03-09 21:22:43,029 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.499 | Acc: 84.38% +2025-03-09 21:22:48,802 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.410 | Acc: 86.12% +2025-03-09 21:22:54,812 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.418 | Acc: 85.64% +2025-03-09 21:23:00,729 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.418 | Acc: 85.68% +2025-03-09 21:23:07,906 - train - INFO - Epoch: 22 | Test Loss: 0.902 | Test Acc: 73.40% +2025-03-09 21:23:17,770 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.453 | Acc: 85.16% +2025-03-09 21:23:23,789 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.410 | Acc: 86.04% +2025-03-09 21:23:29,984 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.413 | Acc: 85.85% +2025-03-09 21:23:36,210 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.407 | Acc: 86.07% +2025-03-09 21:23:43,584 - train - INFO - Epoch: 23 | Test Loss: 0.629 | Test Acc: 79.57% +2025-03-09 21:23:43,818 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.474 | Acc: 83.59% +2025-03-09 21:23:49,856 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.393 | Acc: 86.41% +2025-03-09 21:23:55,816 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.399 | Acc: 86.19% +2025-03-09 21:24:01,461 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.399 | Acc: 86.22% +2025-03-09 21:24:08,313 - train - INFO - Epoch: 24 | Test Loss: 0.539 | Test Acc: 81.81% +2025-03-09 21:24:17,795 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.485 | Acc: 83.59% +2025-03-09 21:24:23,553 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.396 | Acc: 86.18% +2025-03-09 21:24:29,445 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.402 | Acc: 86.05% +2025-03-09 21:24:35,449 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.406 | Acc: 85.98% +2025-03-09 21:24:42,460 - train - INFO - Epoch: 25 | Test Loss: 0.539 | Test Acc: 82.08% +2025-03-09 21:24:42,688 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.492 | Acc: 78.91% +2025-03-09 21:24:48,661 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.391 | Acc: 86.51% +2025-03-09 21:24:54,754 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.395 | Acc: 86.36% +2025-03-09 21:25:00,772 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.398 | Acc: 86.21% +2025-03-09 21:25:07,465 - train - INFO - Epoch: 26 | Test Loss: 0.561 | Test Acc: 80.89% +2025-03-09 21:25:17,361 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.332 | Acc: 85.94% +2025-03-09 21:25:23,617 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.387 | Acc: 86.50% +2025-03-09 21:25:29,754 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.391 | Acc: 86.45% +2025-03-09 21:25:35,426 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.394 | Acc: 86.51% +2025-03-09 21:25:42,482 - train - INFO - Epoch: 27 | Test Loss: 0.636 | Test Acc: 80.20% +2025-03-09 21:25:42,739 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.327 | Acc: 89.84% +2025-03-09 21:25:48,984 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.376 | Acc: 86.90% +2025-03-09 21:25:55,729 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.386 | Acc: 86.62% +2025-03-09 21:26:02,430 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.392 | Acc: 86.50% +2025-03-09 21:26:09,534 - train - INFO - Epoch: 28 | Test Loss: 0.604 | Test Acc: 80.52% +2025-03-09 21:26:19,870 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.393 | Acc: 89.06% +2025-03-09 21:26:26,037 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.384 | Acc: 86.73% +2025-03-09 21:26:32,204 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.388 | Acc: 86.72% +2025-03-09 21:26:38,258 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.385 | Acc: 86.78% +2025-03-09 21:26:44,989 - train - INFO - Epoch: 29 | Test Loss: 0.581 | Test Acc: 80.79% +2025-03-09 21:26:45,232 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.339 | Acc: 88.28% +2025-03-09 21:26:51,032 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.371 | Acc: 87.30% +2025-03-09 21:26:56,876 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.376 | Acc: 87.18% +2025-03-09 21:27:02,463 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.380 | Acc: 87.05% +2025-03-09 21:27:09,305 - train - INFO - Epoch: 30 | Test Loss: 0.512 | Test Acc: 82.26% +2025-03-09 21:27:19,501 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.309 | Acc: 87.50% +2025-03-09 21:27:25,645 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.367 | Acc: 87.24% +2025-03-09 21:27:32,514 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.379 | Acc: 87.03% +2025-03-09 21:27:39,392 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.386 | Acc: 86.70% +2025-03-09 21:27:46,716 - train - INFO - Epoch: 31 | Test Loss: 0.715 | Test Acc: 77.60% +2025-03-09 21:27:46,951 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.417 | Acc: 87.50% +2025-03-09 21:27:53,142 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.370 | Acc: 87.15% +2025-03-09 21:27:59,233 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.380 | Acc: 86.96% +2025-03-09 21:28:05,461 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.383 | Acc: 86.79% +2025-03-09 21:28:12,214 - train - INFO - Epoch: 32 | Test Loss: 0.581 | Test Acc: 80.21% +2025-03-09 21:28:21,517 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.527 | Acc: 82.03% +2025-03-09 21:28:27,470 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.367 | Acc: 87.20% +2025-03-09 21:28:33,329 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.382 | Acc: 86.78% +2025-03-09 21:28:39,101 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.384 | Acc: 86.76% +2025-03-09 21:28:46,139 - train - INFO - Epoch: 33 | Test Loss: 0.525 | Test Acc: 82.67% +2025-03-09 21:28:46,389 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.350 | Acc: 89.06% +2025-03-09 21:28:52,622 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.350 | Acc: 87.94% +2025-03-09 21:28:58,609 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.361 | Acc: 87.54% +2025-03-09 21:29:04,578 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.371 | Acc: 87.25% +2025-03-09 21:29:11,580 - train - INFO - Epoch: 34 | Test Loss: 0.572 | Test Acc: 81.09% +2025-03-09 21:29:21,637 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.500 | Acc: 83.59% +2025-03-09 21:29:28,349 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.363 | Acc: 87.59% +2025-03-09 21:29:34,488 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.376 | Acc: 87.18% +2025-03-09 21:29:40,398 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.380 | Acc: 87.04% +2025-03-09 21:29:48,658 - train - INFO - Epoch: 35 | Test Loss: 0.455 | Test Acc: 84.92% +2025-03-09 21:29:48,921 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.357 | Acc: 89.06% +2025-03-09 21:29:55,061 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.364 | Acc: 87.31% +2025-03-09 21:30:00,890 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.371 | Acc: 87.18% +2025-03-09 21:30:06,897 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.375 | Acc: 87.03% +2025-03-09 21:30:14,235 - train - INFO - Epoch: 36 | Test Loss: 0.541 | Test Acc: 82.74% +2025-03-09 21:30:24,234 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.431 | Acc: 84.38% +2025-03-09 21:30:29,986 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.356 | Acc: 87.55% +2025-03-09 21:30:36,139 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.370 | Acc: 87.21% +2025-03-09 21:30:42,107 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.371 | Acc: 87.20% +2025-03-09 21:30:49,263 - train - INFO - Epoch: 37 | Test Loss: 0.514 | Test Acc: 82.69% +2025-03-09 21:30:49,531 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.229 | Acc: 90.62% +2025-03-09 21:30:55,563 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.348 | Acc: 87.84% +2025-03-09 21:31:01,689 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.358 | Acc: 87.62% +2025-03-09 21:31:08,168 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.368 | Acc: 87.41% +2025-03-09 21:31:15,751 - train - INFO - Epoch: 38 | Test Loss: 0.497 | Test Acc: 83.43% +2025-03-09 21:31:25,606 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.285 | Acc: 92.97% +2025-03-09 21:31:31,509 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.361 | Acc: 87.68% +2025-03-09 21:31:37,804 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.362 | Acc: 87.66% +2025-03-09 21:31:44,177 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.362 | Acc: 87.68% +2025-03-09 21:31:50,975 - train - INFO - Epoch: 39 | Test Loss: 0.527 | Test Acc: 82.27% +2025-03-09 21:31:51,210 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.305 | Acc: 89.06% +2025-03-09 21:31:56,699 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.352 | Acc: 87.79% +2025-03-09 21:32:02,349 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.361 | Acc: 87.46% +2025-03-09 21:32:08,110 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.369 | Acc: 87.15% +2025-03-09 21:32:15,326 - train - INFO - Epoch: 40 | Test Loss: 0.653 | Test Acc: 78.55% +2025-03-09 21:32:25,211 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.522 | Acc: 82.03% +2025-03-09 21:32:30,421 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.367 | Acc: 87.38% +2025-03-09 21:32:36,104 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.363 | Acc: 87.40% +2025-03-09 21:32:42,045 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.363 | Acc: 87.31% +2025-03-09 21:32:49,160 - train - INFO - Epoch: 41 | Test Loss: 0.510 | Test Acc: 82.85% +2025-03-09 21:32:49,411 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.325 | Acc: 87.50% +2025-03-09 21:32:55,294 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.354 | Acc: 87.75% +2025-03-09 21:33:00,458 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.364 | Acc: 87.59% +2025-03-09 21:33:06,072 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.363 | Acc: 87.61% +2025-03-09 21:33:12,986 - train - INFO - Epoch: 42 | Test Loss: 0.555 | Test Acc: 81.95% +2025-03-09 21:33:23,293 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.384 | Acc: 84.38% +2025-03-09 21:33:28,557 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.356 | Acc: 87.80% +2025-03-09 21:33:34,297 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.364 | Acc: 87.48% +2025-03-09 21:33:40,033 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.363 | Acc: 87.58% +2025-03-09 21:33:47,053 - train - INFO - Epoch: 43 | Test Loss: 0.467 | Test Acc: 84.13% +2025-03-09 21:33:47,325 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.285 | Acc: 89.84% +2025-03-09 21:33:53,575 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.343 | Acc: 88.06% +2025-03-09 21:33:59,731 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.350 | Acc: 87.93% +2025-03-09 21:34:05,556 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.355 | Acc: 87.73% +2025-03-09 21:34:12,656 - train - INFO - Epoch: 44 | Test Loss: 0.767 | Test Acc: 74.93% +2025-03-09 21:34:23,508 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.393 | Acc: 83.59% +2025-03-09 21:34:29,421 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.347 | Acc: 87.89% +2025-03-09 21:34:35,133 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.342 | Acc: 88.16% +2025-03-09 21:34:40,818 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.350 | Acc: 87.92% +2025-03-09 21:34:47,264 - train - INFO - Epoch: 45 | Test Loss: 0.480 | Test Acc: 83.56% +2025-03-09 21:34:47,512 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.302 | Acc: 89.06% +2025-03-09 21:34:53,220 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.342 | Acc: 88.10% +2025-03-09 21:34:59,094 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.347 | Acc: 88.02% +2025-03-09 21:35:05,396 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.348 | Acc: 88.04% +2025-03-09 21:35:11,923 - train - INFO - Epoch: 46 | Test Loss: 0.448 | Test Acc: 84.89% +2025-03-09 21:35:22,070 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.401 | Acc: 86.72% +2025-03-09 21:35:28,150 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.347 | Acc: 87.89% +2025-03-09 21:35:34,314 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.354 | Acc: 87.75% +2025-03-09 21:35:40,069 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.357 | Acc: 87.63% +2025-03-09 21:35:46,827 - train - INFO - Epoch: 47 | Test Loss: 0.508 | Test Acc: 82.58% +2025-03-09 21:35:47,066 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.263 | Acc: 91.41% +2025-03-09 21:35:53,455 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.330 | Acc: 88.49% +2025-03-09 21:35:59,739 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.336 | Acc: 88.37% +2025-03-09 21:36:05,768 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.340 | Acc: 88.33% +2025-03-09 21:36:12,946 - train - INFO - Epoch: 48 | Test Loss: 0.432 | Test Acc: 84.88% +2025-03-09 21:36:22,692 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.347 | Acc: 89.06% +2025-03-09 21:36:28,875 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.335 | Acc: 88.68% +2025-03-09 21:36:34,927 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.336 | Acc: 88.46% +2025-03-09 21:36:40,251 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.342 | Acc: 88.42% +2025-03-09 21:36:47,402 - train - INFO - Epoch: 49 | Test Loss: 0.419 | Test Acc: 85.76% +2025-03-09 21:36:47,660 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.199 | Acc: 93.75% +2025-03-09 21:36:53,975 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.328 | Acc: 88.47% +2025-03-09 21:36:59,800 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.342 | Acc: 88.16% diff --git a/Image/DenseNet/code/train.py b/Image/DenseNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..88d158e41f9d0b599535b3c746811ab81a78006f --- /dev/null +++ b/Image/DenseNet/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import DenseNet, densenet_cifar + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = densenet_cifar() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=50, + lr=0.1, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='densenet', + save_type='0', + layer_name='avg_pool', + interval = 2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='densenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=50, + lr=0.1, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='densenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='avg_pool' , + interval = 2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/DenseNet/dataset/.gitkeep b/Image/DenseNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/DenseNet/model/.gitkeep b/Image/DenseNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/DenseNet/model/0/epoch1/embeddings.npy b/Image/DenseNet/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..32da30892ebdfdef2da1b546d11f2ba44d4f99f7 --- /dev/null +++ b/Image/DenseNet/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5b4e204e00e8339faf52afd35f9c9312d3486291cfccc66cb69cbd0cd22b29a +size 76800128 diff --git a/Image/DenseNet/model/0/epoch1/subject_model.pth b/Image/DenseNet/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9f37ce02a3c76499d526351574cf20c87b2fcab9 --- /dev/null +++ b/Image/DenseNet/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe982238bd46d1a4cb705373f146e4a9863bca7361d070a83b02ac3a24b3e0e7 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch10/embeddings.npy b/Image/DenseNet/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..397bcadea3bac611ae05e336d9a36d9080151318 --- /dev/null +++ b/Image/DenseNet/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cb231fed85913dc66bbaa23ba8d58c3f1318365c8feca09f485dfd577de892d +size 76800128 diff --git a/Image/DenseNet/model/0/epoch10/subject_model.pth b/Image/DenseNet/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a87bf2a5df1a989e29f6ebca9feb10ddd8b7a1b6 --- /dev/null +++ b/Image/DenseNet/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c57fbd56a8929bc5c5ca0e25be790e0196d3df614057c4f67c31a8dfb7111c9 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch11/embeddings.npy b/Image/DenseNet/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..31f184e65b0d5a44191c13614af6665fcc2971e7 --- /dev/null +++ b/Image/DenseNet/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a49d29e4b483cfccca6875e8f0f7355cb0cbffe02118a38ca5c185749f128d30 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch11/subject_model.pth b/Image/DenseNet/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..38510c7c58c9ac588a394bcb8c3a78e7f990b800 --- /dev/null +++ b/Image/DenseNet/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:777b3f63182eb4f0802b8ec296373831ed4a48a37028d2538812606b19406cea +size 4375506 diff --git a/Image/DenseNet/model/0/epoch12/embeddings.npy b/Image/DenseNet/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9053f35141b1f2e82719ba51886e5284ea9b4a65 --- /dev/null +++ b/Image/DenseNet/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db57cc1b5d4996a14f5a8b76a9ca4193d46abf172d921b4b820d9c4ff5744056 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch12/subject_model.pth b/Image/DenseNet/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f3cede0a7bd545b8e81086e92d3ff1ec56572e4d --- /dev/null +++ b/Image/DenseNet/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eed5a2e4c794384ac3f7d526d76e61a1a1a83a87547f399710a8bd0ec3cae335 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch13/embeddings.npy b/Image/DenseNet/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..cb999861bd98781070d6d13a3e98fd51980ffe52 --- /dev/null +++ b/Image/DenseNet/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77b0aab8ee2296529b6235020e66e277c12cd122c2c837f26a15064a209ca612 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch13/subject_model.pth b/Image/DenseNet/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1ab2fed61a1624077268dca5216b8715707198a9 --- /dev/null +++ b/Image/DenseNet/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:901fd8b9b0d3e5437a87bf6b52ca37ffb86ae9174ad25c8d3396957b2cb9902d +size 4375506 diff --git a/Image/DenseNet/model/0/epoch14/embeddings.npy b/Image/DenseNet/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4317c1f9da1433c37452632d43be3b2fb890e38b --- /dev/null +++ b/Image/DenseNet/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f7cc55878ac0b1c097538c2f4b1c58f0d4f72f0f684fc9e4a376ee8721b74e2 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch14/subject_model.pth b/Image/DenseNet/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9e92554c02ca6b0268ab258da73ef0e3727e6bd2 --- /dev/null +++ b/Image/DenseNet/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4031a88d1e8e5c849d72bfc1738ac4ff4390dba6865f087e8000fe8544aa7f43 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch15/embeddings.npy b/Image/DenseNet/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..97dc3139b9302b183957df1fb7e716078b76f9cd --- /dev/null +++ b/Image/DenseNet/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:beb576cd29f58ef4bd13b4b98c69bc2bc12d82d6b9589052ce602a7fa6e61f0e +size 76800128 diff --git a/Image/DenseNet/model/0/epoch15/subject_model.pth b/Image/DenseNet/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ec55770061f818208fc3e60cee501f5483be049c --- /dev/null +++ b/Image/DenseNet/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b762be6056f98f861b597d0283e0b5c62210c77276b27e8fa5cc689583472f6 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch16/embeddings.npy b/Image/DenseNet/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..04d9b16394cd2709cfab2a474adf6066b5fbd414 --- /dev/null +++ b/Image/DenseNet/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:adec9f216018a0b302e5dcd96dd76a84d8567dfc305486d8b6188b1dd57682ea +size 76800128 diff --git a/Image/DenseNet/model/0/epoch16/subject_model.pth b/Image/DenseNet/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..26ac531c91eefdca43874293bc4633663db18382 --- /dev/null +++ b/Image/DenseNet/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c454bd635df2c59e72a3f9e929e408b6c1ecefefe8d512b4dcf9674d3215147 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch17/embeddings.npy b/Image/DenseNet/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ce77e9f6259229230bf1c09895c153d3e27475eb --- /dev/null +++ b/Image/DenseNet/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:acdf2f9e8cb08d3f9a9b7c075b84b4d3487a56a4e9023b738a4c2cad296ce60b +size 76800128 diff --git a/Image/DenseNet/model/0/epoch17/subject_model.pth b/Image/DenseNet/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2a4b2b6f3a9dcba129450386806f2999c4c9a828 --- /dev/null +++ b/Image/DenseNet/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b60e4d0b8aa48151a33feade60bece408be71ae166a580d4b7cd95fd650239d4 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch18/embeddings.npy b/Image/DenseNet/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f389e700c58fcdc06b6a71b697e49a9e250a8678 --- /dev/null +++ b/Image/DenseNet/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de1127c7c9f481f2b25022db2178a4b28b43cea504bf11bd331f967b88ef93b2 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch18/subject_model.pth b/Image/DenseNet/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..efbee7a5d85ecdb015994328efc4114de8de8fbd --- /dev/null +++ b/Image/DenseNet/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76b5c8cd68c7e4883e0a2e38b6c6330bcd07c0aa6ea0e101d819aaf114494d26 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch19/embeddings.npy b/Image/DenseNet/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b5a19e16669ff129a3c0cfa2a5fcdbc7928a20af --- /dev/null +++ b/Image/DenseNet/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:806533379b7378b51e3e7e7362dbcb5df8db19faab2c8c7ebea2c9dff9646425 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch19/subject_model.pth b/Image/DenseNet/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8c6b05f06eb389d0340a90a4a92bd80c2260cca4 --- /dev/null +++ b/Image/DenseNet/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3df2b268f475760744ebcf3c9b2297859e8e8dee234471ef58800ba9f912f3f +size 4375506 diff --git a/Image/DenseNet/model/0/epoch2/embeddings.npy b/Image/DenseNet/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..054ea867c5e26418665218d2da65d4d65e9aa91f --- /dev/null +++ b/Image/DenseNet/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f78b562b4c8bbfae9d65380631bf68f4969f444f4176517b54504eb3a53d992b +size 76800128 diff --git a/Image/DenseNet/model/0/epoch2/subject_model.pth b/Image/DenseNet/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7cfd553dad8c6537afeffe5772eb1e20c30005fb --- /dev/null +++ b/Image/DenseNet/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4b7ea7bf0cd700c99bb483a03084ddfd6ff88836a8f1ec749bfc8797037c8c4 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch20/embeddings.npy b/Image/DenseNet/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a306c524643f32d2588999fe204bea280635202e --- /dev/null +++ b/Image/DenseNet/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67343cd6f9a86b2b587ddd8dc838eb187aa679f837952b7ea4d2564f7139f4dd +size 76800128 diff --git a/Image/DenseNet/model/0/epoch20/subject_model.pth b/Image/DenseNet/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6fd5843f81ca73f03748c97b8ad95e5c76853024 --- /dev/null +++ b/Image/DenseNet/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a806a09c8635d31580c0ce8dcaebf53342cadcf6ac44de1a82435b6f488b9e94 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch21/embeddings.npy b/Image/DenseNet/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b9f849118ed70080b326ae918d2a228488436a62 --- /dev/null +++ b/Image/DenseNet/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:567ee8b0ee891388ecb362b53d01476c83311362742a92322b5ed211b9118bd4 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch21/subject_model.pth b/Image/DenseNet/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1f5faee407c0a810582aa81ac7de4e106ec34809 --- /dev/null +++ b/Image/DenseNet/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af1bec9ab23d94b42ed77a2814a6893fb703bf22af59498f80be6a41b028da21 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch22/embeddings.npy b/Image/DenseNet/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c133c14c8d64cfa187f79a5e46d7675bf362b116 --- /dev/null +++ b/Image/DenseNet/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ca4684f171290f572c0b5f877fdf4b3dafc66985c0bc2770a3fb3d16fd20933 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch22/subject_model.pth b/Image/DenseNet/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..879833f155d8f9fc1ede8e44af61e48173aa2b17 --- /dev/null +++ b/Image/DenseNet/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35cb971a7addb72561f3efb889aaf5827419262b4a260e39662d6a41c51a5f81 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch23/embeddings.npy b/Image/DenseNet/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4e26de9b371b548eb715e4157e60e7500339c312 --- /dev/null +++ b/Image/DenseNet/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:172e4da5afe5d41614777a2642150b960141b6995e977b13b82493f209b28ed5 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch23/subject_model.pth b/Image/DenseNet/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..59ee34c52965ce90844c18085ad776374a53bcf8 --- /dev/null +++ b/Image/DenseNet/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae2adf7aa65ef9ec65e4a0a395039301cf5c8fbfd5c2f58e0e44011fdf97690c +size 4375506 diff --git a/Image/DenseNet/model/0/epoch24/embeddings.npy b/Image/DenseNet/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0e9d2568f9fe9a7fb9ddf6784b125862bdd85d2b --- /dev/null +++ b/Image/DenseNet/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7e2e9fbd2c99476d802fd54b55cf5cd8c1d23afc9f04ecf04bf9273516344e1 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch24/subject_model.pth b/Image/DenseNet/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5d4fb82700ff9752b0f00a7fe8a8813de6653c70 --- /dev/null +++ b/Image/DenseNet/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fba21c02518033f6e0967c5fb8e815530d69851002b58f8086a00effe2a96010 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch3/embeddings.npy b/Image/DenseNet/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a8f670503f4a23514587ef10c040208e567abfb4 --- /dev/null +++ b/Image/DenseNet/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3b795c2b12ff6870d852a5c4f45dedc347143fdcd4bb02fca093d58e9ef0e88 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch3/subject_model.pth b/Image/DenseNet/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..33f272efff0667b1a92a669e03e90aeb58b08f79 --- /dev/null +++ b/Image/DenseNet/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81aa8fc8bdb42c72733e3fafa1c8cf15b2d19aa1e4dceb69b92c67eb9158b756 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch4/embeddings.npy b/Image/DenseNet/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..45d137f72022b3d9824e4346cc4460a853cf4e8b --- /dev/null +++ b/Image/DenseNet/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28158aa04f0645a3c9e39d2d5441c16469dc57aaa45e41e25800ef8fa17cb7da +size 76800128 diff --git a/Image/DenseNet/model/0/epoch4/subject_model.pth b/Image/DenseNet/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..da3e5c009006ea062c177f8192d95e6835b6bcd9 --- /dev/null +++ b/Image/DenseNet/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fc77e60989140f867f892a5bbaeea8943b459e8d3ef21c6ef034c33dcd97370 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch5/embeddings.npy b/Image/DenseNet/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e44e6d624826c5c306b503ebf55a97e7890ab3b4 --- /dev/null +++ b/Image/DenseNet/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37e3c63f7f0f9f40d7b4251b9cb300d35fc8be0bf21315add74b6891cafd4455 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch5/subject_model.pth b/Image/DenseNet/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d7bc25dcc8fbdcc70e96aed1722c75a2b35e200c --- /dev/null +++ b/Image/DenseNet/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b571d5b079729ece36fcc157c525b53bd69f429782a7f2f3049a7f97ab2c4cd8 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch6/embeddings.npy b/Image/DenseNet/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..bae816452f3165cd60f6d10583c3b0c7e78dd3fa --- /dev/null +++ b/Image/DenseNet/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e25321a32085305b6550c9d63a68fcc330f11fc4ff2df9dd35d7c4f8a788b4d +size 76800128 diff --git a/Image/DenseNet/model/0/epoch6/subject_model.pth b/Image/DenseNet/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2b55394bf5a4da6c746f07727c8c3ec798124ddd --- /dev/null +++ b/Image/DenseNet/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f6491b839ed9b451885f7e8f869865d6ba02fa2693f5d75c9f54ef480355e39 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch7/embeddings.npy b/Image/DenseNet/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..de9ab17e580bf5ff8d782107e02cf6c69e3633f6 --- /dev/null +++ b/Image/DenseNet/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb9e0c83c09fece17aeb6be9bd81cbeecf078e4af2f717ea2ba13261d5847fea +size 76800128 diff --git a/Image/DenseNet/model/0/epoch7/subject_model.pth b/Image/DenseNet/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ee60aecbdd227ef4344b5d9950e66ad0e303418c --- /dev/null +++ b/Image/DenseNet/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04575196bd3cffc68880cf6d9913aef14ed7e3e275d831f8d2650dbde3381ab7 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch8/embeddings.npy b/Image/DenseNet/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3019e7e86ca4eaee5ef95b0f0bde9a36dd0d4e7f --- /dev/null +++ b/Image/DenseNet/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0945b24257dd452008d648ca1a1ce1f8b1e3e146eea6e1c52023cff7fb78f350 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch8/subject_model.pth b/Image/DenseNet/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1ecc8edd7d16c79a0674c62ab0d6e9df99db1c8c --- /dev/null +++ b/Image/DenseNet/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8785944e7b442250449de752bc900237832c23d1fe56d01d945f5dbb9f9fa208 +size 4375506 diff --git a/Image/DenseNet/model/0/epoch9/embeddings.npy b/Image/DenseNet/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..fe9c824e334706a8210afcc92c97f7c4788c952d --- /dev/null +++ b/Image/DenseNet/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0d66b3537320a58741f1dfaf1c33483c4a61c3949407694d677663215b914e5 +size 76800128 diff --git a/Image/DenseNet/model/0/epoch9/subject_model.pth b/Image/DenseNet/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7dd2730c98c6e6234e0c8a194735c7e5d8e7cd85 --- /dev/null +++ b/Image/DenseNet/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e98bd530bc9d1fc2025b92d60851bfdb29d8f6c2c1daa7ff2c61145d26a75b04 +size 4375506 diff --git a/Image/DenseNet/model/0/layer_info.json b/Image/DenseNet/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..db0956d5a303db669b1b7c51c4307779dbb6a3d6 --- /dev/null +++ b/Image/DenseNet/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avg_pool", "dim": 384} \ No newline at end of file diff --git a/Image/DenseNet/model/2/epoch1/embeddings.npy b/Image/DenseNet/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9c8c04a618c4539ab73f23a94d859065f0d9bc43 --- /dev/null +++ b/Image/DenseNet/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42e393dc4f40bdfa90caa23185ebeb9ca2077230ac4de6edb7ff2a66c61c9d22 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch1/subject_model.pth b/Image/DenseNet/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0316bc9750bc0b5a50191cb6430d663dc2bfbf2f --- /dev/null +++ b/Image/DenseNet/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0aaf4b1c59b70a5fb5ea591bdbd1367775d74eb563208241db448682e37b7a3 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch10/embeddings.npy b/Image/DenseNet/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3d40103c20f320ae1ec6017b3a39208dc62ed0fb --- /dev/null +++ b/Image/DenseNet/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a65016ef5bed7d8239f1d6e9ce72985c13494bb29c1c2f67b1730a32a00b1a1c +size 76800128 diff --git a/Image/DenseNet/model/2/epoch10/subject_model.pth b/Image/DenseNet/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..14d52a642be7a2b9ac8ae90d0ddfe6e8a23a6d15 --- /dev/null +++ b/Image/DenseNet/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:193035ee592d96d735ffe34635082accd69ffcf591dcb04fab56d90d58751b17 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch11/embeddings.npy b/Image/DenseNet/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..10794a9640b1deac1f51e6017c5445a23e3f96bf --- /dev/null +++ b/Image/DenseNet/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:57565b3aa866a52b303a8ac2ccad810a4cbbb0ab3fe04e129089ee3c1a2c8653 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch11/subject_model.pth b/Image/DenseNet/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4658727c060fd4564d018222958896adf235cac6 --- /dev/null +++ b/Image/DenseNet/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:893743948dcbbcaadc1934407c1e639200942fd8fcc646bf0f1ff25f4a7d0e69 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch12/embeddings.npy b/Image/DenseNet/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f634c9b9519dbc57a1d940b36481344563286118 --- /dev/null +++ b/Image/DenseNet/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cbc3d534364a4d9e68a530fa2409959d07eb0a3b2b06a919f83e6de87eb8912a +size 76800128 diff --git a/Image/DenseNet/model/2/epoch12/subject_model.pth b/Image/DenseNet/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..75bcdb5b07c1905adf067d4234ad560b26f37046 --- /dev/null +++ b/Image/DenseNet/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d29523dc09e1bf9ffddd84c20e2056457ec6fc1171a9a8d8ca53abfdfbb92c57 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch13/embeddings.npy b/Image/DenseNet/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..07f3b919a0724f58ab73119a075de67d0c3a4e9e --- /dev/null +++ b/Image/DenseNet/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e64dc3aa5cc695e081433d6399ed3828384cb8af47c742eec04d6cfff0c23d8 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch13/subject_model.pth b/Image/DenseNet/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..720d639927ed43fc03ed398cfbba0edcb04014ad --- /dev/null +++ b/Image/DenseNet/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4ed0355ff552997e96d23bd7520580f75443b05e96618f967d4f919c2ac3619 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch14/embeddings.npy b/Image/DenseNet/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d78800e9e9cdf0d5bfedf3a3db757eb425a6d8b7 --- /dev/null +++ b/Image/DenseNet/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a45ae131b2933e006d6c3a9f79135d72fc8dd748dcc611f3c5734acf652bee54 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch14/subject_model.pth b/Image/DenseNet/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f98eb8d096b36f5bf0d538d6a38a9767fcf17197 --- /dev/null +++ b/Image/DenseNet/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89cde4cc8ad72eea558a882883156170c114d7c6b0f3aaa1c153d1f9ebff58f7 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch15/embeddings.npy b/Image/DenseNet/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..eda8a9951ebe687faf979bb0cd5272031b95a7f5 --- /dev/null +++ b/Image/DenseNet/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f9645ce0ab6123825e75d13228bb10eb6ca88ab7165c759e27c106d8f48e0527 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch15/subject_model.pth b/Image/DenseNet/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..db47caf520f18a7774801b9ca6cce4ad231f4b48 --- /dev/null +++ b/Image/DenseNet/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae7ee0512f380a6105c5c4ecb59c9e728ccfc511efcc9bd8e84e317d0d7d309d +size 4375506 diff --git a/Image/DenseNet/model/2/epoch16/embeddings.npy b/Image/DenseNet/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..007e95d85c7ba18219a8174452f1afabe3ed30fe --- /dev/null +++ b/Image/DenseNet/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:174430c068ee9ae19edf0d0056d79fe745a2f1c988a34e741ad660094b6d4bff +size 76800128 diff --git a/Image/DenseNet/model/2/epoch16/subject_model.pth b/Image/DenseNet/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..de04fec3c2b5a662e2632bfcbf2d0cd709d2d01f --- /dev/null +++ b/Image/DenseNet/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3df31c97e88ac891f54df1b1a15192a9fb524f527fb7d21424c6b7f045e8725 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch17/embeddings.npy b/Image/DenseNet/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b37b77476c6266be2e6f17fdd6f939407cc88c94 --- /dev/null +++ b/Image/DenseNet/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c13de990f4bd7ee2c1e9469c335df1a1d391304e277ba34bb701af4ebbe48292 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch17/subject_model.pth b/Image/DenseNet/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fd096d7293133effa7feccd1ad99fb7e2089f0fc --- /dev/null +++ b/Image/DenseNet/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f9d43f2c9b461e59af77c6349522a143123c931b30ac3417ebaf991d9cd733fe +size 4375506 diff --git a/Image/DenseNet/model/2/epoch18/embeddings.npy b/Image/DenseNet/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..246cf46396d381c3ae33a2a7c7c71921b6ad08c9 --- /dev/null +++ b/Image/DenseNet/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb445206afb6afc32bd8ba6b4ed15e2e89a730cce3757a01b4a7ec1f8c2579a9 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch18/subject_model.pth b/Image/DenseNet/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0db5ca0f7c1ca2683768be84867496d363997d39 --- /dev/null +++ b/Image/DenseNet/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59a29f6cbcc3fbc087e2e1b3c65ce2c0443976981533dd60d047039f3f0fff62 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch19/embeddings.npy b/Image/DenseNet/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3a0c3390bdd99c5864931aa7863a677c771db7bd --- /dev/null +++ b/Image/DenseNet/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7777105e4bbe4d90f6dbdb183280c09f5832fd9e7fc5210aab1dbbf8cf06fb0 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch19/subject_model.pth b/Image/DenseNet/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fe66aa228b8a181ebb24bd556f468e5d8bf92186 --- /dev/null +++ b/Image/DenseNet/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9f79e980a1fdfd928a0f76524a6d36a66747636cc367093e9c557e1f54f9a9a +size 4375506 diff --git a/Image/DenseNet/model/2/epoch2/embeddings.npy b/Image/DenseNet/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..774d34477ea3e0a008d75183c3e8c9e78c7f02f2 --- /dev/null +++ b/Image/DenseNet/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9cfcd24c1cd20c3959191f9fbeb4ac9a87128e15630eecfc7eace479ca1be979 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch2/subject_model.pth b/Image/DenseNet/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..649a66f0af015bf5e5207ad37c7a0165260695d1 --- /dev/null +++ b/Image/DenseNet/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa8d4b86eca5a1b9586684de053b8344ed1a8911599eccf1b6c971db7c0081e2 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch20/embeddings.npy b/Image/DenseNet/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7a070c4c5db0bad9e574dc00444ff98664f294e3 --- /dev/null +++ b/Image/DenseNet/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3f5cda8b5e6ce9ff4cdd964f22a62ba19b606ff2c795d882d02c31641a075ce +size 76800128 diff --git a/Image/DenseNet/model/2/epoch20/subject_model.pth b/Image/DenseNet/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ddcba8cab6ff52c723a086444fa12318aee18863 --- /dev/null +++ b/Image/DenseNet/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b13bbb430843fb9b765b95a9ab5714beeffa2288c8d1125c92bd50dd6f032a5 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch21/embeddings.npy b/Image/DenseNet/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..30202fc5c22561ce5cf6ff7a1c3bc85abe1353a8 --- /dev/null +++ b/Image/DenseNet/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6ec1954294bd7fa7aece1bf7658a3310254bcb519f2cbfd57e2fbf8fb8cfebd +size 76800128 diff --git a/Image/DenseNet/model/2/epoch21/subject_model.pth b/Image/DenseNet/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..681d28d7a8b87cccdb6bf06a7ce16ec1496189f9 --- /dev/null +++ b/Image/DenseNet/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8943ca5c39e2b8c89511ac2db53e0f2f1031d8965cecbab6412f2f9e409ca585 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch22/embeddings.npy b/Image/DenseNet/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4ef17457dc60cebd035d4ff4b043fee15bfb9e1b --- /dev/null +++ b/Image/DenseNet/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6a1fc7c802b9b59c6ddfa985064d06ca355ca9e29409f2889e5d2613b4b9d82 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch22/subject_model.pth b/Image/DenseNet/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..47a09a660a4587f8b203be4672e30695f6a3e741 --- /dev/null +++ b/Image/DenseNet/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3ccc9bb69599d46d38fbcbe76c28dd2f2335564e479ca3c909dc00ffd9e4d1a +size 4375506 diff --git a/Image/DenseNet/model/2/epoch23/embeddings.npy b/Image/DenseNet/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9bc978290dfe83e6b3e55bc0c1cbfcdffb34697a --- /dev/null +++ b/Image/DenseNet/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5313344391b0eb7f34f05d6c8edb43bdc22407c88ba74e45731f484389d04f63 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch23/subject_model.pth b/Image/DenseNet/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f5ef5d7edbab29a92cb239f57c56bed58d115dd1 --- /dev/null +++ b/Image/DenseNet/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:399c9b1b5a15aab83a2cdcd6ea9257bf3eee9da239f84d6e20b33c55460856da +size 4375506 diff --git a/Image/DenseNet/model/2/epoch24/embeddings.npy b/Image/DenseNet/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea1abe82294cd94dd7ed0c103a3455c535c773c1 --- /dev/null +++ b/Image/DenseNet/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75eda09f60012d799a2289dd0c8940a9e32ed4cd07c4fa7f97f03a3d205e6a73 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch24/subject_model.pth b/Image/DenseNet/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6f0671e1cc6586fe19ef7660ba5f3cbbae2a0a75 --- /dev/null +++ b/Image/DenseNet/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d40811218f9e385198ac8635b42249775b2fdb4283b30ec03f3c293876bc1b7 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch25/embeddings.npy b/Image/DenseNet/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2fb5d01f69510ee6c8f16501689aaa0645b522f8 --- /dev/null +++ b/Image/DenseNet/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ffb181af28e3254fb8733c50b27cb9eba7314e918187aede596d1a8d5006fdf +size 76800128 diff --git a/Image/DenseNet/model/2/epoch25/subject_model.pth b/Image/DenseNet/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4983f13b92e35649e79205e5c1be6820d08b1d40 --- /dev/null +++ b/Image/DenseNet/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdcfec78b9a71b626e9aca3092c40cce5ebc6a2ace62b4fb749e5d9785d59914 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch3/embeddings.npy b/Image/DenseNet/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8699795688c494b46e3609c0e349d219b1f13c32 --- /dev/null +++ b/Image/DenseNet/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec2ca8efa66918e7a9369be15045a810abe803b99e24106207fd832678dd74a0 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch3/subject_model.pth b/Image/DenseNet/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..de4d99585ab259e347e8cf9f1913606715a72e9a --- /dev/null +++ b/Image/DenseNet/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73d433e2c6c25b190d42a7fd717d759ed78a4814e1cbaf62ae8ca26b0394b566 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch4/embeddings.npy b/Image/DenseNet/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f6557812c1825fbc845f9a8aff38cd580843d07e --- /dev/null +++ b/Image/DenseNet/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b3ee193221e28af880396acdd41e2402fab2936a8add7cba5da161dc1c04cd0 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch4/subject_model.pth b/Image/DenseNet/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e34ce6695fc8cc94ddbecf0fd677157af758c5d8 --- /dev/null +++ b/Image/DenseNet/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58743e179152439b6dfe22e37f4f51a08feadad9152bb0f6e70de96f313bd047 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch5/embeddings.npy b/Image/DenseNet/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d092a5fd521e1845de7b5ef1227c8a671607e957 --- /dev/null +++ b/Image/DenseNet/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d5a0ad12578e0d64df5690a5e55243879927156719f97d9eb15a2f6bdc91e7c +size 76800128 diff --git a/Image/DenseNet/model/2/epoch5/subject_model.pth b/Image/DenseNet/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8d1d471a6a3fe792ad4ee18f1fa9a24104cef77f --- /dev/null +++ b/Image/DenseNet/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be53eed4cf0dd2908d77f5d5e355cf1d22503f26a57cf43586cee2ddb4ce1e37 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch6/embeddings.npy b/Image/DenseNet/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..adff814c5dd0445424343697faf9201770650f93 --- /dev/null +++ b/Image/DenseNet/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb7bbb535fe14192ea280dabb0538e4d76858c150fb978ae37d8be7119139861 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch6/subject_model.pth b/Image/DenseNet/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..12a24d169f7a510c12c41903b0faf8db7a71ac8d --- /dev/null +++ b/Image/DenseNet/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d59a5bde2d60702e334eae40e367e8b955fc99cf5572e3db18b6acd25bf802b5 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch7/embeddings.npy b/Image/DenseNet/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2770ff8430ada033d0b9f8014968b84942e7f28d --- /dev/null +++ b/Image/DenseNet/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63bdb9778b1b913e4809571844d86a5c2cc328b773327ec9167af96f269d5001 +size 76800128 diff --git a/Image/DenseNet/model/2/epoch7/subject_model.pth b/Image/DenseNet/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a221f93bd0608359047d3aa5771ebd4c958b62c5 --- /dev/null +++ b/Image/DenseNet/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f62ed083bf1cc5e35ac5ed5466c3c22998d5a30a5d2559043a6c4c82f8d62f6 +size 4375506 diff --git a/Image/DenseNet/model/2/epoch8/embeddings.npy b/Image/DenseNet/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3b3c558917d3a4550fd0fcf7f8ce3e1e9eb15b43 --- /dev/null +++ b/Image/DenseNet/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c9595c5165acc105abc69d0b18d2ec717dc08d912a138eda11cb92714bb0c7c +size 76800128 diff --git a/Image/DenseNet/model/2/epoch8/subject_model.pth b/Image/DenseNet/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1cfc3ced417751456d7c3e512c32440958a7a2ba --- /dev/null +++ b/Image/DenseNet/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f8b5d8a52a6aaef95294d7ea20bfa785aa0037de0ad7709ec0f0382a104a69f +size 4375506 diff --git a/Image/DenseNet/model/2/epoch9/embeddings.npy b/Image/DenseNet/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b1a1dfaf52d7ed1c49a9cb6a8f27b5b6c2cd069b --- /dev/null +++ b/Image/DenseNet/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d042b007b0ec826528cfd7a1f482d1fad8df14e0841bfc96e1ae95b358ebc20f +size 76800128 diff --git a/Image/DenseNet/model/2/epoch9/subject_model.pth b/Image/DenseNet/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f934395bb0773e33d2675e579934a185b449d43 --- /dev/null +++ b/Image/DenseNet/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0d820efd3e46206bc94f3cabcf7c52164567e8ba38eda4526492409934a39f0 +size 4375506 diff --git a/Image/DenseNet/model/2/layer_info.json b/Image/DenseNet/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..db0956d5a303db669b1b7c51c4307779dbb6a3d6 --- /dev/null +++ b/Image/DenseNet/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avg_pool", "dim": 384} \ No newline at end of file diff --git a/Image/EfficientNet/code/backdoor_train.log b/Image/EfficientNet/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..4e9e8cee061eed99b95afb839e2b7f43f1a499c8 --- /dev/null +++ b/Image/EfficientNet/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 16:50:34,220 - train - INFO - 开始训练 efficientnet +2025-03-14 16:50:34,220 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-14 16:50:35,038 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.393 | Acc: 13.28% +2025-03-14 16:50:39,342 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.716 | Acc: 15.56% +2025-03-14 16:50:43,687 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.513 | Acc: 17.27% +2025-03-14 16:50:47,952 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.372 | Acc: 18.62% +2025-03-14 16:50:53,412 - train - INFO - Epoch: 1 | Test Loss: 1.909 | Test Acc: 28.96% +2025-03-14 16:50:53,927 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.852 | Acc: 28.12% +2025-03-14 16:50:58,155 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.894 | Acc: 29.70% +2025-03-14 16:51:02,310 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.880 | Acc: 30.11% +2025-03-14 16:51:06,421 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.847 | Acc: 31.26% +2025-03-14 16:51:11,515 - train - INFO - Epoch: 2 | Test Loss: 1.734 | Test Acc: 38.75% +2025-03-14 16:51:21,290 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.495 | Acc: 46.09% +2025-03-14 16:51:25,389 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.697 | Acc: 37.88% +2025-03-14 16:51:29,372 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.664 | Acc: 39.20% +2025-03-14 16:51:33,262 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.640 | Acc: 40.20% +2025-03-14 16:51:38,314 - train - INFO - Epoch: 3 | Test Loss: 1.604 | Test Acc: 42.46% +2025-03-14 16:51:38,569 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.486 | Acc: 50.00% +2025-03-14 16:51:42,871 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.506 | Acc: 45.35% +2025-03-14 16:51:46,917 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.462 | Acc: 47.19% +2025-03-14 16:51:51,076 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.447 | Acc: 47.85% +2025-03-14 16:51:56,234 - train - INFO - Epoch: 4 | Test Loss: 1.539 | Test Acc: 44.57% +2025-03-14 16:52:06,506 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.254 | Acc: 55.47% +2025-03-14 16:52:10,514 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.457 | Acc: 47.76% +2025-03-14 16:52:14,542 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.406 | Acc: 49.47% +2025-03-14 16:52:18,602 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.368 | Acc: 51.14% +2025-03-14 16:52:23,902 - train - INFO - Epoch: 5 | Test Loss: 1.270 | Test Acc: 54.80% +2025-03-14 16:52:24,188 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.311 | Acc: 53.12% +2025-03-14 16:52:28,460 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.287 | Acc: 54.12% +2025-03-14 16:52:32,655 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.262 | Acc: 55.06% +2025-03-14 16:52:36,609 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.236 | Acc: 56.11% +2025-03-14 16:52:41,974 - train - INFO - Epoch: 6 | Test Loss: 1.166 | Test Acc: 59.57% +2025-03-14 16:52:52,538 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.270 | Acc: 59.38% +2025-03-14 16:52:56,703 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.133 | Acc: 59.91% +2025-03-14 16:53:00,833 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.122 | Acc: 60.30% +2025-03-14 16:53:05,134 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.115 | Acc: 60.65% +2025-03-14 16:53:10,577 - train - INFO - Epoch: 7 | Test Loss: 1.184 | Test Acc: 58.79% +2025-03-14 16:53:10,817 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.102 | Acc: 60.94% +2025-03-14 16:53:15,027 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.080 | Acc: 61.57% +2025-03-14 16:53:19,348 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.068 | Acc: 62.27% +2025-03-14 16:53:23,545 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.062 | Acc: 62.52% +2025-03-14 16:53:28,792 - train - INFO - Epoch: 8 | Test Loss: 1.034 | Test Acc: 64.74% +2025-03-14 16:53:38,962 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.995 | Acc: 66.41% +2025-03-14 16:53:43,199 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.017 | Acc: 64.58% +2025-03-14 16:53:47,563 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.013 | Acc: 64.88% +2025-03-14 16:53:51,753 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.006 | Acc: 65.17% +2025-03-14 16:53:57,630 - train - INFO - Epoch: 9 | Test Loss: 1.000 | Test Acc: 64.72% +2025-03-14 16:53:57,883 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.000 | Acc: 64.06% +2025-03-14 16:54:02,333 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.966 | Acc: 66.46% +2025-03-14 16:54:06,525 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.952 | Acc: 67.11% +2025-03-14 16:54:10,742 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.954 | Acc: 67.09% +2025-03-14 16:54:16,145 - train - INFO - Epoch: 10 | Test Loss: 0.945 | Test Acc: 66.62% +2025-03-14 16:54:26,361 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.875 | Acc: 72.66% +2025-03-14 16:54:30,550 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.946 | Acc: 66.99% +2025-03-14 16:54:34,741 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.946 | Acc: 67.16% +2025-03-14 16:54:38,801 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.938 | Acc: 67.54% +2025-03-14 16:54:45,018 - train - INFO - Epoch: 11 | Test Loss: 0.966 | Test Acc: 66.08% +2025-03-14 16:54:45,285 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.831 | Acc: 70.31% +2025-03-14 16:54:49,577 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.909 | Acc: 68.46% +2025-03-14 16:54:53,812 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.904 | Acc: 68.80% +2025-03-14 16:54:57,943 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.907 | Acc: 68.88% +2025-03-14 16:55:03,248 - train - INFO - Epoch: 12 | Test Loss: 0.940 | Test Acc: 67.15% +2025-03-14 16:55:14,462 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.884 | Acc: 68.75% +2025-03-14 16:55:18,985 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.903 | Acc: 69.54% +2025-03-14 16:55:23,339 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.905 | Acc: 69.28% +2025-03-14 16:55:27,460 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.898 | Acc: 69.50% +2025-03-14 16:55:32,675 - train - INFO - Epoch: 13 | Test Loss: 0.857 | Test Acc: 70.24% +2025-03-14 16:55:32,892 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.853 | Acc: 73.44% +2025-03-14 16:55:37,122 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.869 | Acc: 70.69% +2025-03-14 16:55:41,732 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.871 | Acc: 70.37% +2025-03-14 16:55:45,936 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.865 | Acc: 70.62% +2025-03-14 16:55:51,486 - train - INFO - Epoch: 14 | Test Loss: 0.862 | Test Acc: 70.40% +2025-03-14 16:56:02,079 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.794 | Acc: 74.22% +2025-03-14 16:56:06,351 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.847 | Acc: 71.53% +2025-03-14 16:56:10,745 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.853 | Acc: 71.16% +2025-03-14 16:56:15,035 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.851 | Acc: 71.19% +2025-03-14 16:56:21,120 - train - INFO - Epoch: 15 | Test Loss: 0.869 | Test Acc: 70.55% +2025-03-14 16:56:21,439 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.735 | Acc: 71.88% +2025-03-14 16:56:25,546 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.844 | Acc: 71.07% +2025-03-14 16:56:29,707 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.842 | Acc: 71.25% +2025-03-14 16:56:33,868 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.844 | Acc: 71.32% +2025-03-14 16:56:39,194 - train - INFO - Epoch: 16 | Test Loss: 0.949 | Test Acc: 66.89% +2025-03-14 16:56:50,322 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.968 | Acc: 64.84% +2025-03-14 16:56:54,610 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.831 | Acc: 72.15% +2025-03-14 16:56:58,907 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.829 | Acc: 71.93% +2025-03-14 16:57:02,982 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.829 | Acc: 72.05% +2025-03-14 16:57:08,370 - train - INFO - Epoch: 17 | Test Loss: 0.948 | Test Acc: 68.57% +2025-03-14 16:57:08,594 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.919 | Acc: 65.62% +2025-03-14 16:57:12,965 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.813 | Acc: 72.11% +2025-03-14 16:57:17,407 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.825 | Acc: 71.97% +2025-03-14 16:57:21,996 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.822 | Acc: 72.06% +2025-03-14 16:57:27,487 - train - INFO - Epoch: 18 | Test Loss: 0.852 | Test Acc: 70.81% +2025-03-14 16:57:37,770 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.934 | Acc: 68.75% +2025-03-14 16:57:41,993 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.816 | Acc: 72.37% +2025-03-14 16:57:46,550 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.813 | Acc: 72.63% +2025-03-14 16:57:50,721 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.807 | Acc: 72.78% +2025-03-14 16:57:55,876 - train - INFO - Epoch: 19 | Test Loss: 0.808 | Test Acc: 72.39% +2025-03-14 16:57:56,137 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.870 | Acc: 69.53% +2025-03-14 16:58:00,441 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.795 | Acc: 73.00% +2025-03-14 16:58:04,438 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.792 | Acc: 73.20% +2025-03-14 16:58:08,605 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.799 | Acc: 72.85% +2025-03-14 16:58:13,904 - train - INFO - Epoch: 20 | Test Loss: 0.816 | Test Acc: 72.37% +2025-03-14 16:58:24,451 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.747 | Acc: 75.00% +2025-03-14 16:58:28,704 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.782 | Acc: 73.19% +2025-03-14 16:58:32,744 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.792 | Acc: 72.99% +2025-03-14 16:58:36,763 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.793 | Acc: 73.02% +2025-03-14 16:58:42,110 - train - INFO - Epoch: 21 | Test Loss: 0.837 | Test Acc: 71.78% +2025-03-14 16:58:42,347 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.677 | Acc: 78.12% +2025-03-14 16:58:46,656 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.809 | Acc: 72.78% +2025-03-14 16:58:50,792 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.801 | Acc: 73.03% +2025-03-14 16:58:55,249 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.794 | Acc: 73.16% +2025-03-14 16:59:00,591 - train - INFO - Epoch: 22 | Test Loss: 0.764 | Test Acc: 74.07% +2025-03-14 16:59:11,462 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.841 | Acc: 70.31% +2025-03-14 16:59:15,955 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.785 | Acc: 73.19% +2025-03-14 16:59:20,405 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.779 | Acc: 73.59% +2025-03-14 16:59:24,843 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.781 | Acc: 73.50% +2025-03-14 16:59:30,321 - train - INFO - Epoch: 23 | Test Loss: 0.835 | Test Acc: 71.93% +2025-03-14 16:59:30,567 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.885 | Acc: 71.09% +2025-03-14 16:59:35,045 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.773 | Acc: 73.67% +2025-03-14 16:59:39,555 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.776 | Acc: 73.77% +2025-03-14 16:59:44,054 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.784 | Acc: 73.43% +2025-03-14 16:59:49,912 - train - INFO - Epoch: 24 | Test Loss: 0.808 | Test Acc: 72.50% +2025-03-14 17:00:00,452 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.660 | Acc: 77.34% +2025-03-14 17:00:05,296 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.761 | Acc: 74.09% +2025-03-14 17:00:10,410 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.768 | Acc: 74.05% +2025-03-14 17:00:15,036 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.776 | Acc: 73.89% +2025-03-14 17:00:20,739 - train - INFO - Epoch: 25 | Test Loss: 0.790 | Test Acc: 73.05% +2025-03-14 17:00:20,973 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.560 | Acc: 80.47% +2025-03-14 17:00:25,440 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.772 | Acc: 73.87% +2025-03-14 17:00:29,998 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.772 | Acc: 73.85% +2025-03-14 17:00:34,447 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.774 | Acc: 73.79% +2025-03-14 17:00:39,890 - train - INFO - Epoch: 26 | Test Loss: 0.858 | Test Acc: 71.00% +2025-03-14 17:00:50,311 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.666 | Acc: 78.12% +2025-03-14 17:00:54,862 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.734 | Acc: 75.15% +2025-03-14 17:00:59,263 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.748 | Acc: 74.74% +2025-03-14 17:01:03,797 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.751 | Acc: 74.64% +2025-03-14 17:01:09,534 - train - INFO - Epoch: 27 | Test Loss: 0.797 | Test Acc: 72.45% +2025-03-14 17:01:09,786 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.846 | Acc: 75.00% +2025-03-14 17:01:14,301 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.758 | Acc: 74.45% +2025-03-14 17:01:18,764 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.757 | Acc: 74.49% +2025-03-14 17:01:23,331 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.758 | Acc: 74.47% +2025-03-14 17:01:29,619 - train - INFO - Epoch: 28 | Test Loss: 0.807 | Test Acc: 72.88% +2025-03-14 17:01:39,772 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.833 | Acc: 71.09% +2025-03-14 17:01:44,254 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.752 | Acc: 74.68% +2025-03-14 17:01:48,888 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.752 | Acc: 74.65% +2025-03-14 17:01:53,508 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.756 | Acc: 74.55% +2025-03-14 17:01:58,973 - train - INFO - Epoch: 29 | Test Loss: 0.777 | Test Acc: 73.28% +2025-03-14 17:01:59,211 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.740 | Acc: 77.34% +2025-03-14 17:02:03,694 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.743 | Acc: 74.82% +2025-03-14 17:02:08,332 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.756 | Acc: 74.52% +2025-03-14 17:02:12,835 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.753 | Acc: 74.69% +2025-03-14 17:02:18,570 - train - INFO - Epoch: 30 | Test Loss: 0.749 | Test Acc: 73.91% +2025-03-14 17:02:28,712 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.593 | Acc: 78.91% +2025-03-14 17:02:33,298 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.720 | Acc: 75.85% +2025-03-14 17:02:38,219 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.733 | Acc: 75.38% +2025-03-14 17:02:42,945 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.737 | Acc: 75.30% +2025-03-14 17:02:48,977 - train - INFO - Epoch: 31 | Test Loss: 0.788 | Test Acc: 73.32% +2025-03-14 17:02:49,241 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.641 | Acc: 76.56% +2025-03-14 17:02:53,771 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.732 | Acc: 75.52% +2025-03-14 17:02:58,291 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.741 | Acc: 75.14% +2025-03-14 17:03:03,014 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.746 | Acc: 74.99% +2025-03-14 17:03:08,589 - train - INFO - Epoch: 32 | Test Loss: 0.853 | Test Acc: 71.73% +2025-03-14 17:03:19,315 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.829 | Acc: 72.66% +2025-03-14 17:03:23,891 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.748 | Acc: 74.85% +2025-03-14 17:03:28,577 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.734 | Acc: 75.21% +2025-03-14 17:03:33,144 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.739 | Acc: 74.99% +2025-03-14 17:03:38,909 - train - INFO - Epoch: 33 | Test Loss: 0.799 | Test Acc: 73.38% +2025-03-14 17:03:39,126 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.699 | Acc: 78.12% +2025-03-14 17:03:43,965 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.739 | Acc: 75.14% +2025-03-14 17:03:49,371 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.736 | Acc: 75.35% +2025-03-14 17:03:54,126 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.740 | Acc: 75.17% +2025-03-14 17:03:59,822 - train - INFO - Epoch: 34 | Test Loss: 0.939 | Test Acc: 69.15% +2025-03-14 17:04:10,469 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.672 | Acc: 75.78% +2025-03-14 17:04:15,147 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.711 | Acc: 76.02% +2025-03-14 17:04:20,006 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.723 | Acc: 75.68% +2025-03-14 17:04:24,816 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.725 | Acc: 75.55% +2025-03-14 17:04:30,590 - train - INFO - Epoch: 35 | Test Loss: 0.782 | Test Acc: 73.06% +2025-03-14 17:04:30,848 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.641 | Acc: 77.34% +2025-03-14 17:04:35,448 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.710 | Acc: 75.83% +2025-03-14 17:04:39,870 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.723 | Acc: 75.63% +2025-03-14 17:04:44,522 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.724 | Acc: 75.52% +2025-03-14 17:04:51,089 - train - INFO - Epoch: 36 | Test Loss: 0.873 | Test Acc: 71.48% +2025-03-14 17:05:02,234 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.644 | Acc: 78.12% +2025-03-14 17:05:06,776 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.720 | Acc: 75.93% +2025-03-14 17:05:11,205 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.723 | Acc: 75.71% +2025-03-14 17:05:15,725 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.716 | Acc: 76.07% +2025-03-14 17:05:21,656 - train - INFO - Epoch: 37 | Test Loss: 0.685 | Test Acc: 76.37% +2025-03-14 17:05:21,919 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.794 | Acc: 68.75% +2025-03-14 17:05:26,364 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.712 | Acc: 75.81% +2025-03-14 17:05:30,796 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.710 | Acc: 75.96% +2025-03-14 17:05:35,395 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.707 | Acc: 76.06% +2025-03-14 17:05:41,049 - train - INFO - Epoch: 38 | Test Loss: 0.715 | Test Acc: 75.55% +2025-03-14 17:05:51,396 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.663 | Acc: 77.34% +2025-03-14 17:05:55,954 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.711 | Acc: 76.08% +2025-03-14 17:06:00,316 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.709 | Acc: 76.10% +2025-03-14 17:06:04,758 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.709 | Acc: 76.10% +2025-03-14 17:06:10,359 - train - INFO - Epoch: 39 | Test Loss: 0.712 | Test Acc: 75.40% +2025-03-14 17:06:10,615 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.544 | Acc: 81.25% +2025-03-14 17:06:15,197 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.704 | Acc: 76.64% +2025-03-14 17:06:19,790 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.710 | Acc: 76.21% +2025-03-14 17:06:24,302 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.715 | Acc: 75.94% +2025-03-14 17:06:30,044 - train - INFO - Epoch: 40 | Test Loss: 0.751 | Test Acc: 74.14% +2025-03-14 17:06:41,156 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.603 | Acc: 77.34% +2025-03-14 17:06:45,994 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.684 | Acc: 77.35% +2025-03-14 17:06:50,606 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.693 | Acc: 76.97% +2025-03-14 17:06:55,094 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.699 | Acc: 76.79% +2025-03-14 17:07:00,694 - train - INFO - Epoch: 41 | Test Loss: 0.915 | Test Acc: 69.34% +2025-03-14 17:07:00,937 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.528 | Acc: 82.81% +2025-03-14 17:07:05,331 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.718 | Acc: 75.97% +2025-03-14 17:07:09,861 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.710 | Acc: 76.21% +2025-03-14 17:07:14,366 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.702 | Acc: 76.57% +2025-03-14 17:07:19,922 - train - INFO - Epoch: 42 | Test Loss: 0.755 | Test Acc: 74.51% +2025-03-14 17:07:30,494 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.780 | Acc: 68.75% +2025-03-14 17:07:35,049 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.701 | Acc: 76.47% +2025-03-14 17:07:39,755 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.700 | Acc: 76.44% +2025-03-14 17:07:45,290 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.703 | Acc: 76.36% +2025-03-14 17:07:51,032 - train - INFO - Epoch: 43 | Test Loss: 0.700 | Test Acc: 76.15% +2025-03-14 17:07:51,279 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.755 | Acc: 71.88% +2025-03-14 17:07:55,755 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.699 | Acc: 76.54% +2025-03-14 17:08:00,264 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.691 | Acc: 77.03% +2025-03-14 17:08:04,625 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.695 | Acc: 76.74% +2025-03-14 17:08:10,334 - train - INFO - Epoch: 44 | Test Loss: 0.763 | Test Acc: 74.75% +2025-03-14 17:08:21,069 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.686 | Acc: 72.66% +2025-03-14 17:08:25,742 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.699 | Acc: 76.69% +2025-03-14 17:08:30,293 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.699 | Acc: 76.76% +2025-03-14 17:08:34,866 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.690 | Acc: 76.93% +2025-03-14 17:08:40,638 - train - INFO - Epoch: 45 | Test Loss: 0.739 | Test Acc: 74.78% +2025-03-14 17:08:40,912 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.646 | Acc: 81.25% +2025-03-14 17:08:45,746 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.678 | Acc: 77.07% +2025-03-14 17:08:50,771 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.685 | Acc: 76.98% +2025-03-14 17:08:55,486 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.687 | Acc: 77.02% +2025-03-14 17:09:01,172 - train - INFO - Epoch: 46 | Test Loss: 0.678 | Test Acc: 77.36% +2025-03-14 17:09:11,645 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.802 | Acc: 75.00% +2025-03-14 17:09:16,423 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.678 | Acc: 77.54% +2025-03-14 17:09:21,134 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.685 | Acc: 77.14% +2025-03-14 17:09:25,859 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.686 | Acc: 77.04% +2025-03-14 17:09:32,508 - train - INFO - Epoch: 47 | Test Loss: 0.719 | Test Acc: 75.71% +2025-03-14 17:09:32,758 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.627 | Acc: 78.12% +2025-03-14 17:09:37,370 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.686 | Acc: 77.20% +2025-03-14 17:09:42,102 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.683 | Acc: 77.31% +2025-03-14 17:09:46,664 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.689 | Acc: 77.06% +2025-03-14 17:09:52,593 - train - INFO - Epoch: 48 | Test Loss: 0.675 | Test Acc: 77.54% +2025-03-14 17:10:02,821 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.744 | Acc: 73.44% +2025-03-14 17:10:07,457 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.681 | Acc: 77.34% +2025-03-14 17:10:12,289 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.683 | Acc: 77.25% +2025-03-14 17:10:17,314 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.684 | Acc: 77.20% +2025-03-14 17:10:22,971 - train - INFO - Epoch: 49 | Test Loss: 0.678 | Test Acc: 77.32% +2025-03-14 17:10:23,275 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.559 | Acc: 79.69% +2025-03-14 17:10:27,808 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.661 | Acc: 77.69% +2025-03-14 17:10:32,281 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.671 | Acc: 77.64% +2025-03-14 17:10:36,722 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.675 | Acc: 77.45% +2025-03-14 17:10:42,250 - train - INFO - Epoch: 50 | Test Loss: 0.691 | Test Acc: 76.73% +2025-03-14 17:10:52,217 - train - INFO - 训练完成! diff --git a/Image/EfficientNet/code/data_aug_train.log b/Image/EfficientNet/code/data_aug_train.log new file mode 100644 index 0000000000000000000000000000000000000000..768351754008c7e7b7b35d66625f30c77b8e6c7c --- /dev/null +++ b/Image/EfficientNet/code/data_aug_train.log @@ -0,0 +1,12 @@ +2025-03-14 16:10:40,562 - train - INFO - 开始训练 efficientnet +2025-03-14 16:10:40,563 - train - INFO - 总轮数: 100, 学习率: 0.1, 设备: cuda:2 +2025-03-14 16:10:42,354 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.396 | Acc: 7.81% +2025-03-14 16:10:49,792 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.737 | Acc: 10.54% +2025-03-14 16:10:57,192 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.566 | Acc: 10.70% +2025-03-14 16:11:04,468 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.481 | Acc: 11.40% +2025-03-14 16:11:13,027 - train - INFO - Epoch: 1 | Test Loss: 2.253 | Test Acc: 12.50% +2025-03-14 16:11:13,338 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 2.191 | Acc: 16.41% +2025-03-14 16:11:21,423 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 2.188 | Acc: 16.25% +2025-03-14 16:11:28,669 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 2.138 | Acc: 18.65% +2025-03-14 16:11:36,178 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 2.103 | Acc: 20.15% +2025-03-14 16:11:44,414 - train - INFO - Epoch: 2 | Test Loss: 1.911 | Test Acc: 28.45% diff --git a/Image/EfficientNet/code/model.py b/Image/EfficientNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..b731fd4ae8b1491bf46152a4c85f73cc5a6e2cc4 --- /dev/null +++ b/Image/EfficientNet/code/model.py @@ -0,0 +1,267 @@ +''' +EfficientNet in PyTorch. + +Paper: "EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks" +Reference: https://github.com/keras-team/keras-applications/blob/master/keras_applications/efficientnet.py + +主要特点: +1. 使用MBConv作为基本模块,包含SE注意力机制 +2. 通过复合缩放方法(compound scaling)同时调整网络的宽度、深度和分辨率 +3. 使用Swish激活函数和DropConnect正则化 +''' +import torch +import torch.nn as nn +import torch.nn.functional as F +import math + +def swish(x): + """Swish激活函数: x * sigmoid(x)""" + return x * x.sigmoid() + +def drop_connect(x, drop_ratio): + """DropConnect正则化 + + Args: + x: 输入tensor + drop_ratio: 丢弃率 + + Returns: + 经过DropConnect处理的tensor + """ + keep_ratio = 1.0 - drop_ratio + mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device) + mask.bernoulli_(keep_ratio) + x.div_(keep_ratio) + x.mul_(mask) + return x + +class SE(nn.Module): + '''Squeeze-and-Excitation注意力模块 + + Args: + in_channels: 输入通道数 + se_channels: SE模块中间层的通道数 + ''' + def __init__(self, in_channels, se_channels): + super(SE, self).__init__() + self.se1 = nn.Conv2d(in_channels, se_channels, kernel_size=1, bias=True) + self.se2 = nn.Conv2d(se_channels, in_channels, kernel_size=1, bias=True) + + def forward(self, x): + out = F.adaptive_avg_pool2d(x, (1, 1)) # 全局平均池化 + out = swish(self.se1(out)) + out = self.se2(out).sigmoid() + return x * out # 特征重标定 + +class MBConv(nn.Module): + '''MBConv模块: Mobile Inverted Bottleneck Convolution + + Args: + in_channels: 输入通道数 + out_channels: 输出通道数 + kernel_size: 卷积核大小 + stride: 步长 + expand_ratio: 扩展比率 + se_ratio: SE模块的压缩比率 + drop_rate: DropConnect的丢弃率 + ''' + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride, + expand_ratio=1, + se_ratio=0.25, + drop_rate=0.): + super(MBConv, self).__init__() + self.stride = stride + self.drop_rate = drop_rate + self.expand_ratio = expand_ratio + + # Expansion phase + channels = expand_ratio * in_channels + self.conv1 = nn.Conv2d(in_channels, channels, kernel_size=1, stride=1, padding=0, bias=False) + self.bn1 = nn.BatchNorm2d(channels) + + # Depthwise conv + self.conv2 = nn.Conv2d(channels, channels, kernel_size=kernel_size, stride=stride, + padding=(1 if kernel_size == 3 else 2), groups=channels, bias=False) + self.bn2 = nn.BatchNorm2d(channels) + + # SE layers + se_channels = int(in_channels * se_ratio) + self.se = SE(channels, se_channels) + + # Output phase + self.conv3 = nn.Conv2d(channels, out_channels, kernel_size=1, stride=1, padding=0, bias=False) + self.bn3 = nn.BatchNorm2d(out_channels) + + # Shortcut connection + self.has_skip = (stride == 1) and (in_channels == out_channels) + + def forward(self, x): + # Expansion + out = x if self.expand_ratio == 1 else swish(self.bn1(self.conv1(x))) + # Depthwise convolution + out = swish(self.bn2(self.conv2(out))) + # Squeeze-and-excitation + out = self.se(out) + # Pointwise convolution + out = self.bn3(self.conv3(out)) + # Shortcut + if self.has_skip: + if self.training and self.drop_rate > 0: + out = drop_connect(out, self.drop_rate) + out = out + x + return out + +class EfficientNet(nn.Module): + '''EfficientNet模型 + + Args: + width_coefficient: 宽度系数 + depth_coefficient: 深度系数 + dropout_rate: 分类层的dropout率 + num_classes: 分类数量 + ''' + def __init__(self, + width_coefficient=1.0, + depth_coefficient=1.0, + dropout_rate=0.2, + num_classes=10): + super(EfficientNet, self).__init__() + + # 模型配置 + cfg = { + 'num_blocks': [1, 2, 2, 3, 3, 4, 1], # 每个stage的block数量 + 'expansion': [1, 6, 6, 6, 6, 6, 6], # 扩展比率 + 'out_channels': [16, 24, 40, 80, 112, 192, 320], # 输出通道数 + 'kernel_size': [3, 3, 5, 3, 5, 5, 3], # 卷积核大小 + 'stride': [1, 2, 2, 2, 1, 2, 1], # 步长 + 'dropout_rate': dropout_rate, + 'drop_connect_rate': 0.2, + } + + self.cfg = cfg + self.width_coefficient = width_coefficient + self.depth_coefficient = depth_coefficient + + # Stem layer + self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(32) + + # Build blocks + self.layers = self._make_layers(in_channels=32) + + # Head layer + final_channels = cfg['out_channels'][-1] * int(width_coefficient) + self.linear = nn.Linear(final_channels, num_classes) + + def _make_layers(self, in_channels): + layers = [] + cfg = [self.cfg[k] for k in ['expansion', 'out_channels', 'num_blocks', 'kernel_size', 'stride']] + blocks = sum(self.cfg['num_blocks']) + b = 0 # 用于计算drop_connect_rate + + for expansion, out_channels, num_blocks, kernel_size, stride in zip(*cfg): + out_channels = int(out_channels * self.width_coefficient) + num_blocks = int(math.ceil(num_blocks * self.depth_coefficient)) + + for i in range(num_blocks): + stride_i = stride if i == 0 else 1 + drop_rate = self.cfg['drop_connect_rate'] * b / blocks + layers.append( + MBConv(in_channels, + out_channels, + kernel_size, + stride_i, + expansion, + se_ratio=0.25, + drop_rate=drop_rate)) + in_channels = out_channels + b += 1 + + return nn.Sequential(*layers) + + def forward(self, x): + # Stem + out = swish(self.bn1(self.conv1(x))) + # Blocks + out = self.layers(out) + # Head + out = F.adaptive_avg_pool2d(out, 1) + out = out.view(out.size(0), -1) + if self.training and self.cfg['dropout_rate'] > 0: + out = F.dropout(out, p=self.cfg['dropout_rate']) + out = self.linear(out) + return out + +def EfficientNetB0(num_classes=10): + """EfficientNet-B0""" + return EfficientNet(width_coefficient=1.0, + depth_coefficient=1.0, + dropout_rate=0.2, + num_classes=num_classes) + +def EfficientNetB1(num_classes=10): + """EfficientNet-B1""" + return EfficientNet(width_coefficient=1.0, + depth_coefficient=1.1, + dropout_rate=0.2, + num_classes=num_classes) + +def EfficientNetB2(num_classes=10): + """EfficientNet-B2""" + return EfficientNet(width_coefficient=1.1, + depth_coefficient=1.2, + dropout_rate=0.3, + num_classes=num_classes) + +def EfficientNetB3(num_classes=10): + """EfficientNet-B3""" + return EfficientNet(width_coefficient=1.2, + depth_coefficient=1.4, + dropout_rate=0.3, + num_classes=num_classes) + +def EfficientNetB4(num_classes=10): + """EfficientNet-B4""" + return EfficientNet(width_coefficient=1.4, + depth_coefficient=1.8, + dropout_rate=0.4, + num_classes=num_classes) + +def EfficientNetB5(num_classes=10): + """EfficientNet-B5""" + return EfficientNet(width_coefficient=1.6, + depth_coefficient=2.2, + dropout_rate=0.4, + num_classes=num_classes) + +def EfficientNetB6(num_classes=10): + """EfficientNet-B6""" + return EfficientNet(width_coefficient=1.8, + depth_coefficient=2.6, + dropout_rate=0.5, + num_classes=num_classes) + +def EfficientNetB7(num_classes=10): + """EfficientNet-B7""" + return EfficientNet(width_coefficient=2.0, + depth_coefficient=3.1, + dropout_rate=0.5, + num_classes=num_classes) + +def test(): + """测试函数""" + net = EfficientNetB0() + x = torch.randn(1, 3, 32, 32) + y = net(x) + print(y.size()) + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (1, 3, 32, 32)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/EfficientNet/code/train.log b/Image/EfficientNet/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..6358853bed4659dfc526b4cddb83247f30ffcc41 --- /dev/null +++ b/Image/EfficientNet/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 16:24:53,316 - train - INFO - 开始训练 efficientnet +2025-03-14 16:24:53,316 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-14 16:24:54,150 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.403 | Acc: 8.59% +2025-03-14 16:24:58,097 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.611 | Acc: 16.51% +2025-03-14 16:25:02,317 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.378 | Acc: 18.66% +2025-03-14 16:25:06,595 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.223 | Acc: 21.99% +2025-03-14 16:25:11,907 - train - INFO - Epoch: 1 | Test Loss: 1.709 | Test Acc: 37.62% +2025-03-14 16:25:12,408 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.729 | Acc: 31.25% +2025-03-14 16:25:16,797 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.697 | Acc: 36.48% +2025-03-14 16:25:21,076 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.662 | Acc: 38.39% +2025-03-14 16:25:25,190 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.639 | Acc: 39.51% +2025-03-14 16:25:30,403 - train - INFO - Epoch: 2 | Test Loss: 1.489 | Test Acc: 46.71% +2025-03-14 16:25:40,451 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.456 | Acc: 48.44% +2025-03-14 16:25:44,586 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.506 | Acc: 45.27% +2025-03-14 16:25:49,037 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.490 | Acc: 46.16% +2025-03-14 16:25:53,242 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.464 | Acc: 47.02% +2025-03-14 16:25:58,157 - train - INFO - Epoch: 3 | Test Loss: 1.362 | Test Acc: 50.80% +2025-03-14 16:25:58,372 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.323 | Acc: 48.44% +2025-03-14 16:26:02,660 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.341 | Acc: 51.55% +2025-03-14 16:26:06,851 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.326 | Acc: 52.31% +2025-03-14 16:26:11,085 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.311 | Acc: 52.83% +2025-03-14 16:26:16,103 - train - INFO - Epoch: 4 | Test Loss: 1.226 | Test Acc: 54.20% +2025-03-14 16:26:25,973 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.249 | Acc: 53.12% +2025-03-14 16:26:30,091 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.228 | Acc: 55.75% +2025-03-14 16:26:34,122 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.233 | Acc: 55.84% +2025-03-14 16:26:38,125 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.222 | Acc: 56.30% +2025-03-14 16:26:43,375 - train - INFO - Epoch: 5 | Test Loss: 1.064 | Test Acc: 62.23% +2025-03-14 16:26:43,613 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.153 | Acc: 67.97% +2025-03-14 16:26:47,694 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.118 | Acc: 60.44% +2025-03-14 16:26:52,049 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.114 | Acc: 60.63% +2025-03-14 16:26:56,398 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.101 | Acc: 60.95% +2025-03-14 16:27:01,593 - train - INFO - Epoch: 6 | Test Loss: 1.013 | Test Acc: 64.31% +2025-03-14 16:27:11,823 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.962 | Acc: 65.62% +2025-03-14 16:27:15,984 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.026 | Acc: 63.78% +2025-03-14 16:27:20,007 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.023 | Acc: 64.04% +2025-03-14 16:27:24,044 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.011 | Acc: 64.39% +2025-03-14 16:27:29,341 - train - INFO - Epoch: 7 | Test Loss: 0.982 | Test Acc: 66.05% +2025-03-14 16:27:29,554 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.997 | Acc: 62.50% +2025-03-14 16:27:33,742 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.977 | Acc: 65.49% +2025-03-14 16:27:37,968 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.972 | Acc: 65.79% +2025-03-14 16:27:41,908 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.967 | Acc: 66.00% +2025-03-14 16:27:47,093 - train - INFO - Epoch: 8 | Test Loss: 0.970 | Test Acc: 65.27% +2025-03-14 16:27:58,459 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.097 | Acc: 61.72% +2025-03-14 16:28:03,091 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.933 | Acc: 67.14% +2025-03-14 16:28:07,115 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.928 | Acc: 67.26% +2025-03-14 16:28:11,125 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.922 | Acc: 67.64% +2025-03-14 16:28:16,262 - train - INFO - Epoch: 9 | Test Loss: 0.860 | Test Acc: 70.44% +2025-03-14 16:28:16,484 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.762 | Acc: 69.53% +2025-03-14 16:28:20,620 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.881 | Acc: 69.64% +2025-03-14 16:28:24,782 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.881 | Acc: 69.51% +2025-03-14 16:28:29,178 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.883 | Acc: 69.31% +2025-03-14 16:28:34,443 - train - INFO - Epoch: 10 | Test Loss: 0.961 | Test Acc: 67.11% +2025-03-14 16:28:44,377 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.838 | Acc: 65.62% +2025-03-14 16:28:48,450 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.856 | Acc: 70.35% +2025-03-14 16:28:52,422 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.853 | Acc: 70.38% +2025-03-14 16:28:56,648 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.859 | Acc: 70.10% +2025-03-14 16:29:01,922 - train - INFO - Epoch: 11 | Test Loss: 0.929 | Test Acc: 68.30% +2025-03-14 16:29:02,164 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.918 | Acc: 68.75% +2025-03-14 16:29:06,383 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.843 | Acc: 70.23% +2025-03-14 16:29:10,393 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.849 | Acc: 70.18% +2025-03-14 16:29:14,619 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.851 | Acc: 70.25% +2025-03-14 16:29:20,040 - train - INFO - Epoch: 12 | Test Loss: 0.890 | Test Acc: 69.71% +2025-03-14 16:29:30,009 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.686 | Acc: 77.34% +2025-03-14 16:29:34,151 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.832 | Acc: 71.06% +2025-03-14 16:29:38,169 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.826 | Acc: 71.05% +2025-03-14 16:29:42,352 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.831 | Acc: 70.93% +2025-03-14 16:29:47,612 - train - INFO - Epoch: 13 | Test Loss: 0.804 | Test Acc: 72.10% +2025-03-14 16:29:47,842 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.855 | Acc: 68.75% +2025-03-14 16:29:52,017 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.805 | Acc: 71.96% +2025-03-14 16:29:55,912 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.814 | Acc: 71.71% +2025-03-14 16:30:00,027 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.818 | Acc: 71.49% +2025-03-14 16:30:05,322 - train - INFO - Epoch: 14 | Test Loss: 0.828 | Test Acc: 70.77% +2025-03-14 16:30:15,164 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.886 | Acc: 71.09% +2025-03-14 16:30:19,341 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.822 | Acc: 72.01% +2025-03-14 16:30:23,394 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.812 | Acc: 72.11% +2025-03-14 16:30:27,423 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.806 | Acc: 72.18% +2025-03-14 16:30:32,746 - train - INFO - Epoch: 15 | Test Loss: 0.792 | Test Acc: 72.85% +2025-03-14 16:30:32,981 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.680 | Acc: 77.34% +2025-03-14 16:30:37,197 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.772 | Acc: 73.04% +2025-03-14 16:30:41,453 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.785 | Acc: 72.70% +2025-03-14 16:30:45,667 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.786 | Acc: 72.58% +2025-03-14 16:30:50,860 - train - INFO - Epoch: 16 | Test Loss: 0.849 | Test Acc: 69.94% +2025-03-14 16:31:00,847 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.778 | Acc: 67.97% +2025-03-14 16:31:05,109 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.779 | Acc: 72.93% +2025-03-14 16:31:09,247 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.775 | Acc: 73.05% +2025-03-14 16:31:13,118 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.775 | Acc: 73.09% +2025-03-14 16:31:18,274 - train - INFO - Epoch: 17 | Test Loss: 0.962 | Test Acc: 68.02% +2025-03-14 16:31:18,490 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.703 | Acc: 71.09% +2025-03-14 16:31:22,475 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.768 | Acc: 73.15% +2025-03-14 16:31:26,283 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.768 | Acc: 73.13% +2025-03-14 16:31:30,181 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.773 | Acc: 73.08% +2025-03-14 16:31:35,186 - train - INFO - Epoch: 18 | Test Loss: 0.800 | Test Acc: 72.60% +2025-03-14 16:31:44,925 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.683 | Acc: 74.22% +2025-03-14 16:31:48,912 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.755 | Acc: 74.02% +2025-03-14 16:31:52,928 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.757 | Acc: 73.93% +2025-03-14 16:31:56,816 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.759 | Acc: 73.83% +2025-03-14 16:32:01,848 - train - INFO - Epoch: 19 | Test Loss: 0.843 | Test Acc: 70.60% +2025-03-14 16:32:02,083 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.831 | Acc: 69.53% +2025-03-14 16:32:06,173 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.762 | Acc: 73.55% +2025-03-14 16:32:10,108 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.761 | Acc: 73.54% +2025-03-14 16:32:13,922 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.754 | Acc: 73.83% +2025-03-14 16:32:18,890 - train - INFO - Epoch: 20 | Test Loss: 0.725 | Test Acc: 75.27% +2025-03-14 16:32:28,431 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.911 | Acc: 68.75% +2025-03-14 16:32:32,648 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.767 | Acc: 73.45% +2025-03-14 16:32:36,693 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.747 | Acc: 74.06% +2025-03-14 16:32:40,812 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.750 | Acc: 74.00% +2025-03-14 16:32:45,986 - train - INFO - Epoch: 21 | Test Loss: 0.778 | Test Acc: 73.44% +2025-03-14 16:32:46,219 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.708 | Acc: 76.56% +2025-03-14 16:32:50,396 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.760 | Acc: 73.92% +2025-03-14 16:32:54,576 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.745 | Acc: 74.22% +2025-03-14 16:32:58,677 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.742 | Acc: 74.32% +2025-03-14 16:33:03,987 - train - INFO - Epoch: 22 | Test Loss: 0.729 | Test Acc: 74.40% +2025-03-14 16:33:13,962 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.711 | Acc: 75.78% +2025-03-14 16:33:18,261 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.737 | Acc: 74.22% +2025-03-14 16:33:22,344 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.729 | Acc: 74.59% +2025-03-14 16:33:26,554 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.736 | Acc: 74.30% +2025-03-14 16:33:31,759 - train - INFO - Epoch: 23 | Test Loss: 0.795 | Test Acc: 72.91% +2025-03-14 16:33:31,969 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.667 | Acc: 78.12% +2025-03-14 16:33:36,099 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.708 | Acc: 75.29% +2025-03-14 16:33:40,299 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.716 | Acc: 74.91% +2025-03-14 16:33:44,520 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.720 | Acc: 74.79% +2025-03-14 16:33:49,851 - train - INFO - Epoch: 24 | Test Loss: 0.713 | Test Acc: 75.64% +2025-03-14 16:33:59,792 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.801 | Acc: 75.00% +2025-03-14 16:34:03,785 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.717 | Acc: 75.38% +2025-03-14 16:34:07,807 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.718 | Acc: 75.12% +2025-03-14 16:34:11,748 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.717 | Acc: 75.06% +2025-03-14 16:34:16,783 - train - INFO - Epoch: 25 | Test Loss: 0.719 | Test Acc: 75.11% +2025-03-14 16:34:16,994 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.749 | Acc: 72.66% +2025-03-14 16:34:21,113 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.717 | Acc: 75.18% +2025-03-14 16:34:25,235 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.716 | Acc: 75.06% +2025-03-14 16:34:29,185 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.714 | Acc: 75.23% +2025-03-14 16:34:34,159 - train - INFO - Epoch: 26 | Test Loss: 0.717 | Test Acc: 75.26% +2025-03-14 16:34:44,151 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.634 | Acc: 79.69% +2025-03-14 16:34:48,111 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.690 | Acc: 76.35% +2025-03-14 16:34:52,137 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.702 | Acc: 75.84% +2025-03-14 16:34:56,098 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.707 | Acc: 75.66% +2025-03-14 16:35:01,194 - train - INFO - Epoch: 27 | Test Loss: 0.782 | Test Acc: 74.84% +2025-03-14 16:35:01,448 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.664 | Acc: 79.69% +2025-03-14 16:35:05,614 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.718 | Acc: 75.27% +2025-03-14 16:35:09,598 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.708 | Acc: 75.55% +2025-03-14 16:35:13,509 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.706 | Acc: 75.58% +2025-03-14 16:35:18,618 - train - INFO - Epoch: 28 | Test Loss: 0.711 | Test Acc: 75.23% +2025-03-14 16:35:29,152 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.696 | Acc: 75.78% +2025-03-14 16:35:33,789 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.688 | Acc: 76.26% +2025-03-14 16:35:38,060 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.690 | Acc: 76.25% +2025-03-14 16:35:42,470 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.695 | Acc: 75.97% +2025-03-14 16:35:48,049 - train - INFO - Epoch: 29 | Test Loss: 0.731 | Test Acc: 74.49% +2025-03-14 16:35:48,286 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.733 | Acc: 74.22% +2025-03-14 16:35:52,658 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.690 | Acc: 76.14% +2025-03-14 16:35:57,013 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.693 | Acc: 76.10% +2025-03-14 16:36:01,138 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.690 | Acc: 76.22% +2025-03-14 16:36:06,430 - train - INFO - Epoch: 30 | Test Loss: 0.801 | Test Acc: 72.13% +2025-03-14 16:36:17,359 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.648 | Acc: 79.69% +2025-03-14 16:36:22,033 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.680 | Acc: 76.55% +2025-03-14 16:36:26,594 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.691 | Acc: 76.15% +2025-03-14 16:36:31,032 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.681 | Acc: 76.51% +2025-03-14 16:36:36,579 - train - INFO - Epoch: 31 | Test Loss: 0.677 | Test Acc: 76.25% +2025-03-14 16:36:36,805 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.527 | Acc: 83.59% +2025-03-14 16:36:41,318 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.686 | Acc: 75.84% +2025-03-14 16:36:46,490 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.687 | Acc: 75.98% +2025-03-14 16:36:51,387 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.686 | Acc: 76.03% +2025-03-14 16:36:57,281 - train - INFO - Epoch: 32 | Test Loss: 0.690 | Test Acc: 76.28% +2025-03-14 16:37:08,203 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.722 | Acc: 76.56% +2025-03-14 16:37:13,143 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.680 | Acc: 76.66% +2025-03-14 16:37:17,799 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.678 | Acc: 76.55% +2025-03-14 16:37:22,467 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.685 | Acc: 76.24% +2025-03-14 16:37:28,280 - train - INFO - Epoch: 33 | Test Loss: 0.681 | Test Acc: 76.51% +2025-03-14 16:37:28,543 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.765 | Acc: 72.66% +2025-03-14 16:37:33,136 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.647 | Acc: 77.37% +2025-03-14 16:37:37,717 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.670 | Acc: 76.81% +2025-03-14 16:37:42,617 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.674 | Acc: 76.67% +2025-03-14 16:37:48,408 - train - INFO - Epoch: 34 | Test Loss: 0.821 | Test Acc: 72.42% +2025-03-14 16:37:58,925 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.620 | Acc: 76.56% +2025-03-14 16:38:03,539 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.674 | Acc: 76.53% +2025-03-14 16:38:08,136 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.671 | Acc: 76.74% +2025-03-14 16:38:12,743 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.667 | Acc: 76.92% +2025-03-14 16:38:18,488 - train - INFO - Epoch: 35 | Test Loss: 0.712 | Test Acc: 75.76% +2025-03-14 16:38:18,764 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.744 | Acc: 72.66% +2025-03-14 16:38:23,452 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.670 | Acc: 77.07% +2025-03-14 16:38:28,175 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.658 | Acc: 77.18% +2025-03-14 16:38:33,144 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.665 | Acc: 77.00% +2025-03-14 16:38:38,679 - train - INFO - Epoch: 36 | Test Loss: 0.722 | Test Acc: 75.42% +2025-03-14 16:38:48,563 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.754 | Acc: 72.66% +2025-03-14 16:38:52,994 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.643 | Acc: 77.85% +2025-03-14 16:38:57,465 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.638 | Acc: 78.08% +2025-03-14 16:39:01,803 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.647 | Acc: 77.80% +2025-03-14 16:39:07,172 - train - INFO - Epoch: 37 | Test Loss: 0.681 | Test Acc: 76.38% +2025-03-14 16:39:07,411 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.838 | Acc: 69.53% +2025-03-14 16:39:11,854 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.642 | Acc: 77.89% +2025-03-14 16:39:16,355 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.652 | Acc: 77.73% +2025-03-14 16:39:20,774 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.659 | Acc: 77.39% +2025-03-14 16:39:26,358 - train - INFO - Epoch: 38 | Test Loss: 0.669 | Test Acc: 77.11% +2025-03-14 16:39:38,516 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.539 | Acc: 80.47% +2025-03-14 16:39:42,978 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.642 | Acc: 77.62% +2025-03-14 16:39:47,405 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.651 | Acc: 77.45% +2025-03-14 16:39:51,945 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.651 | Acc: 77.46% +2025-03-14 16:39:57,451 - train - INFO - Epoch: 39 | Test Loss: 0.739 | Test Acc: 75.44% +2025-03-14 16:39:57,707 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.652 | Acc: 76.56% +2025-03-14 16:40:02,251 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.637 | Acc: 77.98% +2025-03-14 16:40:06,685 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.643 | Acc: 77.82% +2025-03-14 16:40:11,120 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.649 | Acc: 77.58% +2025-03-14 16:40:16,706 - train - INFO - Epoch: 40 | Test Loss: 0.665 | Test Acc: 77.24% +2025-03-14 16:40:26,700 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.640 | Acc: 75.00% +2025-03-14 16:40:31,256 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.632 | Acc: 78.02% +2025-03-14 16:40:36,002 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.639 | Acc: 77.91% +2025-03-14 16:40:40,669 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.644 | Acc: 77.66% +2025-03-14 16:40:46,866 - train - INFO - Epoch: 41 | Test Loss: 0.657 | Test Acc: 77.40% +2025-03-14 16:40:47,109 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.586 | Acc: 80.47% +2025-03-14 16:40:52,054 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.630 | Acc: 78.36% +2025-03-14 16:40:56,728 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.642 | Acc: 77.88% +2025-03-14 16:41:01,135 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.647 | Acc: 77.72% +2025-03-14 16:41:06,646 - train - INFO - Epoch: 42 | Test Loss: 0.626 | Test Acc: 78.26% +2025-03-14 16:41:16,468 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.777 | Acc: 71.88% +2025-03-14 16:41:21,005 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.630 | Acc: 78.60% +2025-03-14 16:41:25,543 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.638 | Acc: 78.19% +2025-03-14 16:41:29,970 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.637 | Acc: 78.23% +2025-03-14 16:41:35,513 - train - INFO - Epoch: 43 | Test Loss: 0.659 | Test Acc: 77.86% +2025-03-14 16:41:35,761 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.658 | Acc: 74.22% +2025-03-14 16:41:40,313 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.645 | Acc: 77.85% +2025-03-14 16:41:44,720 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.634 | Acc: 78.37% +2025-03-14 16:41:49,081 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.639 | Acc: 78.13% +2025-03-14 16:41:54,698 - train - INFO - Epoch: 44 | Test Loss: 0.792 | Test Acc: 72.59% +2025-03-14 16:42:05,706 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.531 | Acc: 84.38% +2025-03-14 16:42:10,551 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.620 | Acc: 78.21% +2025-03-14 16:42:15,461 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.630 | Acc: 78.25% +2025-03-14 16:42:19,911 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.628 | Acc: 78.37% +2025-03-14 16:42:25,495 - train - INFO - Epoch: 45 | Test Loss: 0.655 | Test Acc: 77.37% +2025-03-14 16:42:25,737 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.602 | Acc: 78.12% +2025-03-14 16:42:30,584 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.619 | Acc: 78.44% +2025-03-14 16:42:35,258 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.622 | Acc: 78.49% +2025-03-14 16:42:39,743 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.623 | Acc: 78.62% +2025-03-14 16:42:45,591 - train - INFO - Epoch: 46 | Test Loss: 0.773 | Test Acc: 74.10% +2025-03-14 16:42:55,831 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.802 | Acc: 71.09% +2025-03-14 16:43:00,591 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.619 | Acc: 78.91% +2025-03-14 16:43:05,170 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.629 | Acc: 78.33% +2025-03-14 16:43:09,942 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.622 | Acc: 78.56% +2025-03-14 16:43:15,560 - train - INFO - Epoch: 47 | Test Loss: 0.729 | Test Acc: 75.41% +2025-03-14 16:43:15,823 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.684 | Acc: 75.78% +2025-03-14 16:43:20,316 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.621 | Acc: 78.23% +2025-03-14 16:43:24,876 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.619 | Acc: 78.47% +2025-03-14 16:43:29,403 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.619 | Acc: 78.62% +2025-03-14 16:43:35,050 - train - INFO - Epoch: 48 | Test Loss: 0.661 | Test Acc: 77.18% +2025-03-14 16:43:45,649 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.794 | Acc: 72.66% +2025-03-14 16:43:50,300 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.602 | Acc: 79.48% +2025-03-14 16:43:54,809 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.614 | Acc: 78.90% +2025-03-14 16:43:59,457 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.611 | Acc: 78.98% +2025-03-14 16:44:05,989 - train - INFO - Epoch: 49 | Test Loss: 0.698 | Test Acc: 76.72% +2025-03-14 16:44:06,255 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.631 | Acc: 78.12% +2025-03-14 16:44:11,479 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.620 | Acc: 78.37% +2025-03-14 16:44:16,041 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.625 | Acc: 78.28% +2025-03-14 16:44:20,488 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.618 | Acc: 78.60% +2025-03-14 16:44:26,092 - train - INFO - Epoch: 50 | Test Loss: 0.699 | Test Acc: 76.03% +2025-03-14 16:44:37,169 - train - INFO - 训练完成! diff --git a/Image/EfficientNet/code/train.py b/Image/EfficientNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..41115f6c0deddc49fc7408cf08de0db1b687da29 --- /dev/null +++ b/Image/EfficientNet/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import EfficientNet, EfficientNetB0 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = EfficientNetB0() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='efficientnet', + save_type='0', + layer_name='layers.15.conv3', + interval = 2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='efficientnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='efficientnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='layers.15.conv3', + interval = 2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/EfficientNet/dataset/.gitkeep b/Image/EfficientNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/EfficientNet/model/.gitkeep b/Image/EfficientNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/EfficientNet/model/0/epoch1/embeddings.npy b/Image/EfficientNet/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..715e1cd33085313329a005df972eb0ff8bc0cdf8 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc0b1c5c08e4db0742b53ef42849291d0819af52d7a4d2db9d7974cb3fbb063d +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch1/subject_model.pth b/Image/EfficientNet/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..71abbb7e2ce8de4b506e8d2f03b9e2e85e1f1fb3 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6833111ae8292f0133d87ec6aaec7452accb5bab4c7fdf5856e3edc4ff6952f9 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch10/embeddings.npy b/Image/EfficientNet/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..79d6e47dc4adde66e66c4f0ad47022f11e9fdc82 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60a45f525e9204f9ea4a27d87b737878fa69ab35042ce1e83feb395e18ea980d +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch10/subject_model.pth b/Image/EfficientNet/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dd836b3c6a6bb0823b376e2fb9cacbc146fd6dbf --- /dev/null +++ b/Image/EfficientNet/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f21e8e95ae74bf9e864efe6f302bf6819b29e3bae3c8cef61b5f014499caa686 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch11/embeddings.npy b/Image/EfficientNet/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..23e4710c881a26efe595a727bbd23e55576a3a17 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f996e03d92d84fabaea5190b89e5066a0a5d45b7c5998e6a7aa9b32e8b9cfeb6 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch11/subject_model.pth b/Image/EfficientNet/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0d962d9bb0f0c217af6d73e84cf8d9533359cc6d --- /dev/null +++ b/Image/EfficientNet/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cd32b790213e81dcaabd91f4a3d2b2d655ae1b5abba2d086dbb4b923a920d8cb +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch12/embeddings.npy b/Image/EfficientNet/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e06c9375f636bbfeb522c9610440c1af14d227f1 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d9bd7095d3bbaa07c1e08f29e518ed5d6c24c544737d1c4a837a1e3ae219bef +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch12/subject_model.pth b/Image/EfficientNet/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7178672874143b4fe23c43d071a1e6156e12e9c5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:816a4e7dc2b395821387482e406d00df4ea27a8912e5d15a8e7c0228e4150da8 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch13/embeddings.npy b/Image/EfficientNet/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6a09238b9f467617b097ef5b4120c919590be33b --- /dev/null +++ b/Image/EfficientNet/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:475f5c26cde65b6be0859841d1366ce92adbfd6c0c09bd58982ec8f2a3186185 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch13/subject_model.pth b/Image/EfficientNet/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5146179f001d5cb0eefb06fbc76f219ea579272b --- /dev/null +++ b/Image/EfficientNet/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9103bc74bbd192a6dcb7c00dd0f0ad946d504293eac83423d637a5afe8902ac +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch14/embeddings.npy b/Image/EfficientNet/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2592223b5c66adf9f8463905c46ebc1e49b56cb3 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4ac52f6babd54b9c4c219d232c5d09cbf142ec04ea5df618f13256fbb7ae051 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch14/subject_model.pth b/Image/EfficientNet/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..92f1cf5385070a7c5e1a011b50fcaa4005ac5254 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6078e67742ab43d8cdaf40413beb90be6a38cd35cd7e713557728154398da4da +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch15/embeddings.npy b/Image/EfficientNet/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8c40380a272733dd41c9a5137d908b063c7ccee5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:377c6bed5b1141279f3a1a9ad1819836c341315afeba4a401c5d2c7c20eeedf8 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch15/subject_model.pth b/Image/EfficientNet/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..811b06ff4c1b532783ac439ff8b33876a3378256 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb07816a02e04ff2bfa696fd16a49a545c56af0e8b259b699fd5214e57d94981 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch16/embeddings.npy b/Image/EfficientNet/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a5b608d160acd57f6a15fb2bd1eefe427a8a72ed --- /dev/null +++ b/Image/EfficientNet/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb1d2760f3451964869802e348ca068e996be93aea23a07e1601dc446227123e +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch16/subject_model.pth b/Image/EfficientNet/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6fab826c5e4e690e6758e2fd578491054acacfae --- /dev/null +++ b/Image/EfficientNet/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dcb641a5b6d399a88adca7eb2803e0040fb290ec89d1284c7dbd48d64b42413a +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch17/embeddings.npy b/Image/EfficientNet/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..24077af5113f4dd686523a6899898839dc73387b --- /dev/null +++ b/Image/EfficientNet/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fa0de5a64b02128ca843b25f0111bac08d97356b6fdaae30aafacd9fc5de504 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch17/subject_model.pth b/Image/EfficientNet/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ed86cbe80bf3d6c87bee9ff7217f4cb51bf51f63 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9bd66ab2ac49b317e961166ea39161189dcba83f638bfe5e97d04f8978382639 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch18/embeddings.npy b/Image/EfficientNet/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c78277aa18e7db074ab90ac419dfc433181cf148 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b66b9c37a004db2ae5aacfc7b4b4e8bb21d4785745720e125a6a8e138616a4bf +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch18/subject_model.pth b/Image/EfficientNet/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b778228371d9ab32c1687f4ac019d144bc114fbe --- /dev/null +++ b/Image/EfficientNet/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51b074ea9e54042f8ab6c3361f51a00be59b20ca2f77474a93c4a12c683980fe +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch19/embeddings.npy b/Image/EfficientNet/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4f7dad256da78328c932fa481c706b575409e635 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:679234ce96319cce99a094dbb5964e882786679ee76e7a2c511812150fe2de68 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch19/subject_model.pth b/Image/EfficientNet/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0084b62ed97ec61d0a4da0a2bdb649aecad957a0 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:226e35ec372cb65af91306bdca18d7f676e76d14f1dac616ffe90a0b1342db18 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch2/embeddings.npy b/Image/EfficientNet/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1c55c3a7ed4d3c49191a2c19d0b721531efbd9dc --- /dev/null +++ b/Image/EfficientNet/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3cd6e69c1b922e4fbd79c5d645236d7004d5b1991f7ab08d0770662752f912c +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch2/subject_model.pth b/Image/EfficientNet/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8b622e17915e10ad343302d7e7cc3624d6764eb9 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ac6ae8a90a2887b823f5b7e5bb17273021a346fe0ad5f2184fb83fa52dffc08 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch20/embeddings.npy b/Image/EfficientNet/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ebb3a8a4cd9a46a07becb140c56b9b47bd86d3ab --- /dev/null +++ b/Image/EfficientNet/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7892e5a64411e6c1b0f1e6ac42f7b483d76f5dc57ed3a07edd2f85c24f02445e +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch20/subject_model.pth b/Image/EfficientNet/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..17d64e3aaf523915553dfb7243f462404f5df327 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1d996aef09e253059d329f441c132d333899352020ee478b45ba20c8c61712a +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch21/embeddings.npy b/Image/EfficientNet/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2dcf9451762b201c4c9bf72dd2f6c3f1a7d61069 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:440012d0b85ed852f847455a56897b695f65ef275262fb452739eed0e44ba013 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch21/subject_model.pth b/Image/EfficientNet/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..63773424d9fae9dc2dc9282de534682255d2d5f5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dad0ae4e55938913a5d21a91d26782cfee982fda449b603b645ade7347f60f5e +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch22/embeddings.npy b/Image/EfficientNet/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7d3a347a859bc6e1451f09aa8e0fd238138cdf54 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:468d39fc4ab9674b683ded2ed8907665fff13d8b830dc7e32f0b18bf40433226 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch22/subject_model.pth b/Image/EfficientNet/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8746787720fe6673ee4177d220e79eea9bb61c44 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:adab9d2ea3d017eec6fab8ad3cef7132c2de5d27d8eec88e99be6a0c149d989f +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch23/embeddings.npy b/Image/EfficientNet/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..de36bbe3b6c0cab1972020c54bba00e7d8dc18e5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4475f26796b4dde34dc960fad9ab07beb427c706ee8b29950207f9397d2ad10c +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch23/subject_model.pth b/Image/EfficientNet/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9d070d2e678f5fe6caa19886e54fc90a6b488781 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f4406e995bc6b99779682628b651f44ab4082db5d4509ba3693599f115ae7d8 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch24/embeddings.npy b/Image/EfficientNet/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6170cf591a20a7082e3fd2f2adbcdd6d8c35086b --- /dev/null +++ b/Image/EfficientNet/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a3ca07827fee4f03cf71c9d0d2d48462c5bddc4140fdb1545a1c3180c9773b3 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch24/subject_model.pth b/Image/EfficientNet/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9f807e410ea3457b1589d3faa531b77c2c8066f9 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b07f35ee61fcb385053b7f7798ac48dbdc512df6c54e07098902f0c0365ece36 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch25/embeddings.npy b/Image/EfficientNet/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6b2e6f78c4ad759cb3f03028c75ec8e464114469 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c38a4a9f16c8857e485636f5ba437f703ce95af1080532156211336c4d1c700a +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch25/subject_model.pth b/Image/EfficientNet/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8bb336969e04df0b4fac37bed6951f2f9af9c5e4 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06f107020a37e9a3d4f147c7d240728b7417bbade0f2e702e6e9e2a2dc03458b +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch3/embeddings.npy b/Image/EfficientNet/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..aa47438d28ddb72138933d11dafcc1c31f33efcc --- /dev/null +++ b/Image/EfficientNet/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:174ae6223cb3803db3ca1a9fb89a767f92b2f71e8a72d8c3cd9106ac77d44ded +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch3/subject_model.pth b/Image/EfficientNet/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fb3b9f5bf82e00fb2e51d9caefc9dda172e028f5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:baa44d4304205bea64384065d9e2b0481484fe62028e31de6259d7fd8ee865a7 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch4/embeddings.npy b/Image/EfficientNet/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f626d36c6dcdc60e9f0e21ecbdf95caf2423a369 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2eb98fb8339e9c4bdfb58e9fa40cd50e12e4edcad702ac69cc732af09108d39 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch4/subject_model.pth b/Image/EfficientNet/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..58bdacafc8b48287962b0d73c272e847a6429f63 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b349d23f7856c6892344102fdae9c2b76b0437139fed25e5a6bdb2f3b5b5dbc2 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch5/embeddings.npy b/Image/EfficientNet/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f38e2e4a6b9c086d54a6722b06d656f04bccef89 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd9c8a7af87dc4302c7bd913aeb0057cac172a9d3d09c66dab527562db2899d6 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch5/subject_model.pth b/Image/EfficientNet/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..81d71ef5bd20c2c9fd6191b88f9c79ca5483c951 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8fb523375ce4ee09039e534aaef5aee6382d0eac1fea39f1f27289e86e4746f +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch6/embeddings.npy b/Image/EfficientNet/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7f44316fcdecd82f793900a43f7dcc0a47834921 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86eee53322d25e806fe9ddbf348ced9231112d8086d93290f5266bc574b94602 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch6/subject_model.pth b/Image/EfficientNet/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..71a9f886d93b4d738b8e099103a98dd9988c31b5 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:143e74f9ff898ccb00e3c8af71bd296a63ba25cba0118cc4da2bc4730236a25c +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch7/embeddings.npy b/Image/EfficientNet/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e106dbafeedced4ac3995c45e62baccdf39f2788 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41acaf2bbd9f77650990ccfdd1d2cbd0406ebc5d37e37af313b5129246e51b60 +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch7/subject_model.pth b/Image/EfficientNet/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3011a83be74e3388d7e73c300c42167b6c35f829 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:425cebb90859c6e1fa6f44693114beeda8ab62b5bd20162c46cc57bd5316dcc5 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch8/embeddings.npy b/Image/EfficientNet/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1e40c28ba062a57f7a57ac129cd967d950d88550 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df5cefeeeac9edbd06c8abfc5421e05998b437383b0d20e09d389131cf639e3c +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch8/subject_model.pth b/Image/EfficientNet/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f662f4f08a0b5ccf268c891691aecddea6d83eb2 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:acbe35d8153ef706a0df59be5dab415a7fdb18eb4da5114f8663e6e11a220a20 +size 14676202 diff --git a/Image/EfficientNet/model/0/epoch9/embeddings.npy b/Image/EfficientNet/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5be142c618fea43676e40270e8364afa9baa37c8 --- /dev/null +++ b/Image/EfficientNet/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18610d5a3dc6bd2a029f49115ad795f2b2a3bff8917e79b4fae364bdf0ddc42a +size 256000128 diff --git a/Image/EfficientNet/model/0/epoch9/subject_model.pth b/Image/EfficientNet/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5a17ce42b771055c710b92ab517d8dcbb377d35f --- /dev/null +++ b/Image/EfficientNet/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07947ee5a8ca8742b35d164298464fcb9058605bf1c37aa5cf766c98640d4b23 +size 14676202 diff --git a/Image/EfficientNet/model/0/layer_info.json b/Image/EfficientNet/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..9a5e7330eaf38a8e94e204d10b7ecad4bfb09a7e --- /dev/null +++ b/Image/EfficientNet/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "layers.15.conv3", "dim": 1280} \ No newline at end of file diff --git a/Image/EfficientNet/model/2/epoch1/embeddings.npy b/Image/EfficientNet/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8c5fb3ffee0f942540823235e6a0ca9f89576917 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e53ac8a3bce1ceadf86529caf8734316b0840ef3c058e76ab6bd060753f952f +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch1/subject_model.pth b/Image/EfficientNet/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5c4666783b2650b8aa1e0f8260956906c5388896 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f112c7cfb41f0081fc26b0a539318a3bd8c0d7c515dd0abedae651fc2da3d3ee +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch10/embeddings.npy b/Image/EfficientNet/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..512cd833540fbd08dde81b0b92c4b739e5983519 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e4e4895af840d3da8a395660835f11f2cf89d000b75603c234bd93ce92f9f71 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch10/subject_model.pth b/Image/EfficientNet/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d802f8d81dfd38fde62d4fb986835e874f6fdb0a --- /dev/null +++ b/Image/EfficientNet/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc9ef7c5500bdf6cba47962e685162ad245ebeb7b6e39042693d02e0381f4319 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch11/embeddings.npy b/Image/EfficientNet/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7fdc9b52af41faedaf6ac211a30e43a740f6b7e3 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:352e7091166c15da9dfdc6bb9fd3ad74bb0165301747b8922d3106fe54825d55 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch11/subject_model.pth b/Image/EfficientNet/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..da377625109e0759906fafb6ab48ac8f0feddc26 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86dcb7b680058151908a6c5c06db500d0aa7b2d557c138a86f7104944030d3c2 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch12/embeddings.npy b/Image/EfficientNet/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4ceca49095e4413952120d02606a322422952e9d --- /dev/null +++ b/Image/EfficientNet/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ee105b9d2fda6650565c8af7d75e27983418527e4580b78b4fc486399f9f17d +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch12/subject_model.pth b/Image/EfficientNet/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..640889335ddd2fca578dea87aa725ea790557f62 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:112dbaf68bb02897adc659b113038423bda68f21c27a9a1ae62401631a933e66 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch13/embeddings.npy b/Image/EfficientNet/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a945a0da0648e48686f09e658df38462fdbf3fb2 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9441cb277b83736ca59accc7a9f7a4527dd0ca104182ff96791379ea9379041a +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch13/subject_model.pth b/Image/EfficientNet/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..023e46b75a6e570945b386cced103efde49d1d9c --- /dev/null +++ b/Image/EfficientNet/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5981d586d4edb5b5c9d248926ee1665dae2bdf13a8d2f2df4fac6a6804d21bb1 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch14/embeddings.npy b/Image/EfficientNet/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f7c2536d987c21233f55c267a69f2fffa651b0fd --- /dev/null +++ b/Image/EfficientNet/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:029f1de477b8b1a709851f9f67f00c1c99337ab7c539dc1fce787ce9ed310142 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch14/subject_model.pth b/Image/EfficientNet/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a02755e975dc6a0dc1837c68022671bec02b2c37 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c7afc1ae8ce9d31b71a651d3fa1311166e78cb9020201a07de139657c2a736e +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch15/embeddings.npy b/Image/EfficientNet/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..400e41d9f6fe704ea4b4b6533b76d8cb491f436e --- /dev/null +++ b/Image/EfficientNet/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc036a689335e23d7a25c9c4b2fabd11567f676148a91e0175b0cd8b1cf5099b +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch15/subject_model.pth b/Image/EfficientNet/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3bafcd0bf9e1fa1c1be8972831b4152ce6508a81 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:919d7bd18dee811d604beb9d5a1cb6d36d2542cbb19aaa9c5dbd05fa9564629c +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch16/embeddings.npy b/Image/EfficientNet/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..043384e7e88aad15db288a728bdaef6da679f7d7 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:168f3c88965e76d1382644e2fdeed24b8a0917f8ddaf9299e67ba5ca502fae7e +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch16/subject_model.pth b/Image/EfficientNet/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a5e05982841676c664c4bea269e451e1527118fc --- /dev/null +++ b/Image/EfficientNet/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de820895d8eb7a6b126bd4cf8cf832ae3f734235d49b7ba123f47c7518cc482e +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch17/embeddings.npy b/Image/EfficientNet/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..51a5970d107e3063294b55d6b2ca374e9bde9548 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:352ec1bbd420a34a88b70aba484b69b5965fe39c0b8983098064da44c7088084 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch17/subject_model.pth b/Image/EfficientNet/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..afad44a56d243bf2451c6485a25e2264b63cc4ab --- /dev/null +++ b/Image/EfficientNet/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a13d1c2f3920d52de8a4137fa50ee0ee71d55b5f6f6af12e5132f47126f95c9c +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch18/embeddings.npy b/Image/EfficientNet/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..800711e5182edc13da43cb00b4d7fa48530bf517 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84e3e1ca0a732146b1b1c1ad9548740ad02b19021c53ec4ae7675cc8b82f7b29 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch18/subject_model.pth b/Image/EfficientNet/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cb5b0a64d8ea639bedaab5f27d341f3a95e53a1e --- /dev/null +++ b/Image/EfficientNet/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4cfed8b5fae20c9c93c23c5690803622ce496266d63b2d6ec61c86fd3bb3470 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch19/embeddings.npy b/Image/EfficientNet/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..38412b5f39fbd263f0ff4d774a689303bded30bc --- /dev/null +++ b/Image/EfficientNet/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb85530dcdb5511d9755c5a477b9f40588b041a5a828329a12a668cd65d5e02e +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch19/subject_model.pth b/Image/EfficientNet/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f998cf146d3901e61076c16fc72c0b59e32bcb6d --- /dev/null +++ b/Image/EfficientNet/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66d8a718f62f9d95f946bda4df2cf4321d51a36cad3c1c2c63b215b944e1e41d +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch2/embeddings.npy b/Image/EfficientNet/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1bddfec2f97f4f4d6a2ec0e763b7bf2b89981ca8 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:688b8b4e915e09cde6f5053d8a30b5265a9ebdf4d93795f8311c0bdd25319eea +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch2/subject_model.pth b/Image/EfficientNet/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1b26cd845a3848f11b67a95455b28f308bd1df45 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2dd6d51472936658e7a0942d058fe8f29ffc5b1be63cb71814bc6f6ca278a45e +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch20/embeddings.npy b/Image/EfficientNet/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff38c0617befa6878ef9c3c0051250fd7cbdd090 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8552777059a61d9c5e6020f205b42801820769a473d0d1aac66c99fdc718f29e +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch20/subject_model.pth b/Image/EfficientNet/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b11a0decc49b7b9a728bfecc02a56d93da3ec3f1 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2a551b6cf14df3eac1afc0db4468e285fcefdb03c5a3d6584898a4b04d477df +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch21/embeddings.npy b/Image/EfficientNet/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..613b8c4cfb6b161865c0ec56889084b71b9f8e52 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:907667380514312797443cbab9b432d31e35960fc757ae01a279fab3b65a8efd +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch21/subject_model.pth b/Image/EfficientNet/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a2098428654e0d57cebe929d5ffa0e05035b3ae9 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19509d9e5c6514d870f19f516711c4f2c5edd9d99b3d133fea013847d81f2f1a +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch22/embeddings.npy b/Image/EfficientNet/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6cfbe9bc997f301b698c3f59cc84cf751a04a9ac --- /dev/null +++ b/Image/EfficientNet/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83c2782377ae1e806dd70b952f64d0d189c8d65331489a1887668f7d2e5c6130 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch22/subject_model.pth b/Image/EfficientNet/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9dec6b547d6a00e2e4a7d00e1e88fe7e3e722d31 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f857135bab95ad599957465dbb4643e8b6b65f8be97784a45f98ed62dd436d8f +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch23/embeddings.npy b/Image/EfficientNet/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2835f20a1fd304c4990e212ebfe1676f352f1173 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2234fbb6f1e31beba502e6a32443fef3ee7b66ea4211396230da4342104eae72 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch23/subject_model.pth b/Image/EfficientNet/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2f5d5af58f1a4c94fbadf0761e51e9476b63aeed --- /dev/null +++ b/Image/EfficientNet/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44db25a9448c58d51f39b939ff13673910d1ee913e3aac36df3d9ce14eddc4da +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch24/embeddings.npy b/Image/EfficientNet/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..802c4518d3426238fb63f8b29267f511828d96ec --- /dev/null +++ b/Image/EfficientNet/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5ed1a61b1e0e44c3ee46bf580111fe8f860614ccd543d2a559c950ce196f2c1 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch24/subject_model.pth b/Image/EfficientNet/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..eafaa606ec5f036f499a87d07a092aed3214b0d3 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dba6b5bfce3ed2ba14e12f59c9e818960ab773c7ec8dae0cbe23e68dcd444c6a +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch25/embeddings.npy b/Image/EfficientNet/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..aaf86fc0f9cf86b96ec50c065abeda2525d7b405 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6177b69f9669b734d79fe51e942699210c43c4d405abaede799f37a901485c0d +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch25/subject_model.pth b/Image/EfficientNet/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d4a39fb543e5e7e74e0a7aa2af53104b5f793457 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7185493ce14694954a536199a18d1a0af93bd1038d36d6516a09ff3073b351c2 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch3/embeddings.npy b/Image/EfficientNet/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..07088fa1fcf7e2eb9b21e8d64c6569b85e59093d --- /dev/null +++ b/Image/EfficientNet/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f12cbf469ef31ca4a78c6a4281f721fa2d285de51ba100a908a9f3590514f73 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch3/subject_model.pth b/Image/EfficientNet/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3ef7b9f79522f01dd23ff564311bf5e1c9b5a6be --- /dev/null +++ b/Image/EfficientNet/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:158b18ae1795062b4d99c75798df333a0b7afe6dfe1da3cf36a3ee3062b62694 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch4/embeddings.npy b/Image/EfficientNet/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0447431b7d23fbf3fc87dcb0b1c237721bcde595 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56f272bce88cac94e8d0edc935d6751d1d1d76fba7e787d4a390910d1f139c0c +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch4/subject_model.pth b/Image/EfficientNet/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dbf764e8b9ba84482547133a2f5a50e9da64c7c7 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24f7016abd124d98cd185b96a8408df3a53b4ae100ac5752d3fb47943b0bf80b +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch5/embeddings.npy b/Image/EfficientNet/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..86700e3491040de38aff63f41b36232b6e4b712b --- /dev/null +++ b/Image/EfficientNet/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69cfcc33a43b5b4ebe704358389073024bbf500016bfec99520b0ac67a8b71ad +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch5/subject_model.pth b/Image/EfficientNet/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..25d7e444147ddcf23e3a97430c275da0fe91fcb6 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b68ad641f4ed10784e03360aa0550a83d6881a80859ac6f7ae5080731d9cf8c +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch6/embeddings.npy b/Image/EfficientNet/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..252574377b545f4036bc310660e703b05f45ae18 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6a298f98461069122656af04327e3be1059ce56744ff9041c39f55a77465061 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch6/subject_model.pth b/Image/EfficientNet/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8e4af885aa5ca94244b9dbff089eed48bb0d7024 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:741a87aa6ab45999c1d25c5effd7ad3c609b9c40ca8b84d960bbd0744c731951 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch7/embeddings.npy b/Image/EfficientNet/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..643fabd12a7e3021c12d86a63a3284916cc3504d --- /dev/null +++ b/Image/EfficientNet/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14f3c0d60ef31e6169fcea5982d9929a23dcc2c077182b68cd0bad77d4a3a135 +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch7/subject_model.pth b/Image/EfficientNet/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..733c5d06a5f31931239eecb906d07ca1a85c56ea --- /dev/null +++ b/Image/EfficientNet/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be0eb9152d0cef5643c59ca6e4c549827203ba3210ed53268c4474b462e438d4 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch8/embeddings.npy b/Image/EfficientNet/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b4ced0c5ba08a415eb1f70305b2f0521d8ea33ca --- /dev/null +++ b/Image/EfficientNet/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7799c174aecde73a9599aa84f2cf0646e2ed57c0f69042c47eaddc9a84abf1ff +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch8/subject_model.pth b/Image/EfficientNet/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2e43dcaf68513a9c3ae20bdd83dd376fdde01b2b --- /dev/null +++ b/Image/EfficientNet/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:952f101e5a72e4826d491e938a897fc8f9ba0b9c251704a8b356c93048372082 +size 14676202 diff --git a/Image/EfficientNet/model/2/epoch9/embeddings.npy b/Image/EfficientNet/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..cb35ec53fc3df396e0427ec94e4b558ebd5feb68 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e3083a5da4ce34e6f25accfd809d4de34a89b90d6f151f0f84a0a9e5bd4b15cb +size 256000128 diff --git a/Image/EfficientNet/model/2/epoch9/subject_model.pth b/Image/EfficientNet/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0703d9aa71eee37c2f51ce837071661adc73b890 --- /dev/null +++ b/Image/EfficientNet/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30714343a31627e48935569a05d56b6e4804cdb8b1ebb4a6769f2483e581d451 +size 14676202 diff --git a/Image/EfficientNet/model/2/layer_info.json b/Image/EfficientNet/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..9a5e7330eaf38a8e94e204d10b7ecad4bfb09a7e --- /dev/null +++ b/Image/EfficientNet/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "layers.15.conv3", "dim": 1280} \ No newline at end of file diff --git a/Image/GoogLeNet/code/backdoor_train.log b/Image/GoogLeNet/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..f6950d64ab45d4e3afd6cc880af9dd591e830046 --- /dev/null +++ b/Image/GoogLeNet/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 18:31:20,291 - train - INFO - 开始训练 googlenet +2025-03-14 18:31:20,292 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 18:31:21,222 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.312 | Acc: 8.59% +2025-03-14 18:31:28,215 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.092 | Acc: 25.94% +2025-03-14 18:31:35,266 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.897 | Acc: 31.04% +2025-03-14 18:31:42,221 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.747 | Acc: 36.20% +2025-03-14 18:31:50,598 - train - INFO - Epoch: 1 | Test Loss: 1.445 | Test Acc: 49.22% +2025-03-14 18:31:51,056 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.294 | Acc: 53.12% +2025-03-14 18:31:58,227 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.207 | Acc: 56.52% +2025-03-14 18:32:05,203 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.168 | Acc: 57.99% +2025-03-14 18:32:12,168 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.125 | Acc: 59.77% +2025-03-14 18:32:20,524 - train - INFO - Epoch: 2 | Test Loss: 1.125 | Test Acc: 60.74% +2025-03-14 18:32:31,250 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.041 | Acc: 62.50% +2025-03-14 18:32:38,204 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 0.941 | Acc: 67.26% +2025-03-14 18:32:45,122 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 0.916 | Acc: 68.09% +2025-03-14 18:32:52,023 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.901 | Acc: 68.80% +2025-03-14 18:33:00,204 - train - INFO - Epoch: 3 | Test Loss: 1.086 | Test Acc: 64.29% +2025-03-14 18:33:00,443 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.814 | Acc: 69.53% +2025-03-14 18:33:07,363 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.779 | Acc: 73.75% +2025-03-14 18:33:14,343 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.757 | Acc: 74.37% +2025-03-14 18:33:21,334 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.746 | Acc: 74.65% +2025-03-14 18:33:29,599 - train - INFO - Epoch: 4 | Test Loss: 0.788 | Test Acc: 72.94% +2025-03-14 18:33:39,753 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.608 | Acc: 80.47% +2025-03-14 18:33:46,681 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.675 | Acc: 77.30% +2025-03-14 18:33:53,617 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.663 | Acc: 77.99% +2025-03-14 18:34:00,613 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.659 | Acc: 78.01% +2025-03-14 18:34:09,087 - train - INFO - Epoch: 5 | Test Loss: 0.913 | Test Acc: 70.15% +2025-03-14 18:34:09,307 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.679 | Acc: 75.00% +2025-03-14 18:34:16,244 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.609 | Acc: 79.80% +2025-03-14 18:34:23,235 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.608 | Acc: 79.80% +2025-03-14 18:34:30,149 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.604 | Acc: 79.90% +2025-03-14 18:34:38,364 - train - INFO - Epoch: 6 | Test Loss: 0.770 | Test Acc: 75.07% +2025-03-14 18:34:48,225 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.457 | Acc: 83.59% +2025-03-14 18:34:55,221 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.560 | Acc: 81.25% +2025-03-14 18:35:02,248 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.567 | Acc: 81.18% +2025-03-14 18:35:09,188 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.568 | Acc: 81.13% +2025-03-14 18:35:17,403 - train - INFO - Epoch: 7 | Test Loss: 0.679 | Test Acc: 76.28% +2025-03-14 18:35:17,649 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.513 | Acc: 85.16% +2025-03-14 18:35:24,626 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.533 | Acc: 82.65% +2025-03-14 18:35:31,671 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.534 | Acc: 82.40% +2025-03-14 18:35:38,701 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.546 | Acc: 82.06% +2025-03-14 18:35:46,848 - train - INFO - Epoch: 8 | Test Loss: 0.625 | Test Acc: 79.10% +2025-03-14 18:35:56,818 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.459 | Acc: 85.94% +2025-03-14 18:36:03,796 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.518 | Acc: 83.04% +2025-03-14 18:36:11,257 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.519 | Acc: 82.80% +2025-03-14 18:36:18,286 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.518 | Acc: 82.91% +2025-03-14 18:36:26,501 - train - INFO - Epoch: 9 | Test Loss: 0.734 | Test Acc: 75.82% +2025-03-14 18:36:26,739 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.431 | Acc: 85.94% +2025-03-14 18:36:33,777 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.497 | Acc: 83.81% +2025-03-14 18:36:40,707 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.501 | Acc: 83.66% +2025-03-14 18:36:47,637 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.503 | Acc: 83.56% +2025-03-14 18:36:55,863 - train - INFO - Epoch: 10 | Test Loss: 0.750 | Test Acc: 75.56% +2025-03-14 18:37:05,916 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.489 | Acc: 83.59% +2025-03-14 18:37:13,239 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.482 | Acc: 84.23% +2025-03-14 18:37:20,129 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.485 | Acc: 84.08% +2025-03-14 18:37:27,097 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.491 | Acc: 83.91% +2025-03-14 18:37:35,213 - train - INFO - Epoch: 11 | Test Loss: 1.081 | Test Acc: 65.30% +2025-03-14 18:37:35,440 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.406 | Acc: 89.84% +2025-03-14 18:37:42,434 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.476 | Acc: 84.36% +2025-03-14 18:37:49,364 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.488 | Acc: 84.07% +2025-03-14 18:37:56,330 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.485 | Acc: 84.26% +2025-03-14 18:38:04,457 - train - INFO - Epoch: 12 | Test Loss: 0.702 | Test Acc: 77.19% +2025-03-14 18:38:14,387 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.424 | Acc: 87.50% +2025-03-14 18:38:21,432 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.477 | Acc: 84.16% +2025-03-14 18:38:28,743 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.477 | Acc: 84.28% +2025-03-14 18:38:35,849 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.477 | Acc: 84.28% +2025-03-14 18:38:44,026 - train - INFO - Epoch: 13 | Test Loss: 0.741 | Test Acc: 76.15% +2025-03-14 18:38:44,258 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.452 | Acc: 85.94% +2025-03-14 18:38:51,221 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.447 | Acc: 85.49% +2025-03-14 18:38:58,138 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.460 | Acc: 84.93% +2025-03-14 18:39:05,079 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.467 | Acc: 84.76% +2025-03-14 18:39:13,204 - train - INFO - Epoch: 14 | Test Loss: 0.766 | Test Acc: 74.77% +2025-03-14 18:39:22,803 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.499 | Acc: 82.81% +2025-03-14 18:39:29,758 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.441 | Acc: 85.46% +2025-03-14 18:39:36,679 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.443 | Acc: 85.55% +2025-03-14 18:39:43,578 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.451 | Acc: 85.27% +2025-03-14 18:39:51,771 - train - INFO - Epoch: 15 | Test Loss: 0.645 | Test Acc: 79.15% +2025-03-14 18:39:52,015 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.420 | Acc: 87.50% +2025-03-14 18:39:59,027 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.446 | Acc: 85.44% +2025-03-14 18:40:06,010 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.452 | Acc: 85.33% +2025-03-14 18:40:12,985 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.454 | Acc: 85.31% +2025-03-14 18:40:21,166 - train - INFO - Epoch: 16 | Test Loss: 0.646 | Test Acc: 77.47% +2025-03-14 18:40:31,085 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.443 | Acc: 86.72% +2025-03-14 18:40:38,156 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.432 | Acc: 86.05% +2025-03-14 18:40:45,116 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.429 | Acc: 86.07% +2025-03-14 18:40:52,072 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.434 | Acc: 85.92% +2025-03-14 18:41:00,189 - train - INFO - Epoch: 17 | Test Loss: 0.689 | Test Acc: 77.78% +2025-03-14 18:41:00,424 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.558 | Acc: 80.47% +2025-03-14 18:41:07,359 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.436 | Acc: 85.83% +2025-03-14 18:41:14,308 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.435 | Acc: 85.90% +2025-03-14 18:41:21,205 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.440 | Acc: 85.65% +2025-03-14 18:41:29,484 - train - INFO - Epoch: 18 | Test Loss: 0.989 | Test Acc: 71.68% +2025-03-14 18:41:39,447 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.436 | Acc: 82.81% +2025-03-14 18:41:46,650 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.426 | Acc: 86.03% +2025-03-14 18:41:54,022 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.432 | Acc: 85.74% +2025-03-14 18:42:01,123 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.428 | Acc: 85.88% +2025-03-14 18:42:09,651 - train - INFO - Epoch: 19 | Test Loss: 0.515 | Test Acc: 82.56% +2025-03-14 18:42:09,924 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.391 | Acc: 82.81% +2025-03-14 18:42:16,896 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.421 | Acc: 86.05% +2025-03-14 18:42:23,895 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.420 | Acc: 86.23% +2025-03-14 18:42:30,991 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.423 | Acc: 86.19% +2025-03-14 18:42:39,201 - train - INFO - Epoch: 20 | Test Loss: 0.589 | Test Acc: 80.34% +2025-03-14 18:42:49,396 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.435 | Acc: 80.47% +2025-03-14 18:42:56,354 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.410 | Acc: 86.80% +2025-03-14 18:43:03,721 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.415 | Acc: 86.52% +2025-03-14 18:43:11,052 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.418 | Acc: 86.47% +2025-03-14 18:43:19,728 - train - INFO - Epoch: 21 | Test Loss: 0.663 | Test Acc: 76.98% +2025-03-14 18:43:19,995 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.416 | Acc: 87.50% +2025-03-14 18:43:27,147 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.397 | Acc: 87.29% +2025-03-14 18:43:34,586 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.400 | Acc: 87.05% +2025-03-14 18:43:41,942 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.404 | Acc: 86.85% +2025-03-14 18:43:50,533 - train - INFO - Epoch: 22 | Test Loss: 0.608 | Test Acc: 79.83% +2025-03-14 18:44:00,545 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.477 | Acc: 83.59% +2025-03-14 18:44:07,956 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.412 | Acc: 86.49% +2025-03-14 18:44:15,630 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.413 | Acc: 86.48% +2025-03-14 18:44:23,070 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.413 | Acc: 86.48% +2025-03-14 18:44:31,415 - train - INFO - Epoch: 23 | Test Loss: 0.899 | Test Acc: 72.21% +2025-03-14 18:44:31,641 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.433 | Acc: 84.38% +2025-03-14 18:44:38,946 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.378 | Acc: 87.86% +2025-03-14 18:44:46,204 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.391 | Acc: 87.29% +2025-03-14 18:44:53,413 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.398 | Acc: 87.05% +2025-03-14 18:45:01,761 - train - INFO - Epoch: 24 | Test Loss: 0.561 | Test Acc: 80.88% +2025-03-14 18:45:12,196 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.439 | Acc: 84.38% +2025-03-14 18:45:19,714 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.389 | Acc: 87.44% +2025-03-14 18:45:27,090 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.400 | Acc: 86.98% +2025-03-14 18:45:34,533 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.401 | Acc: 86.99% +2025-03-14 18:45:43,245 - train - INFO - Epoch: 25 | Test Loss: 0.674 | Test Acc: 77.48% +2025-03-14 18:45:43,490 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.369 | Acc: 86.72% +2025-03-14 18:45:50,813 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.395 | Acc: 87.08% +2025-03-14 18:45:57,909 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.398 | Acc: 87.18% +2025-03-14 18:46:05,247 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.402 | Acc: 87.02% +2025-03-14 18:46:13,802 - train - INFO - Epoch: 26 | Test Loss: 0.656 | Test Acc: 78.80% +2025-03-14 18:46:24,054 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.308 | Acc: 89.84% +2025-03-14 18:46:31,511 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.396 | Acc: 87.24% +2025-03-14 18:46:39,357 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.395 | Acc: 87.38% +2025-03-14 18:46:46,695 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.393 | Acc: 87.33% +2025-03-14 18:46:55,182 - train - INFO - Epoch: 27 | Test Loss: 0.689 | Test Acc: 77.25% +2025-03-14 18:46:55,414 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.388 | Acc: 86.72% +2025-03-14 18:47:02,507 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.387 | Acc: 87.59% +2025-03-14 18:47:09,801 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.396 | Acc: 87.31% +2025-03-14 18:47:17,068 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.394 | Acc: 87.34% +2025-03-14 18:47:25,511 - train - INFO - Epoch: 28 | Test Loss: 0.470 | Test Acc: 84.73% +2025-03-14 18:47:35,983 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.355 | Acc: 89.84% +2025-03-14 18:47:43,643 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.377 | Acc: 87.65% +2025-03-14 18:47:51,004 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.391 | Acc: 87.23% +2025-03-14 18:47:58,184 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.395 | Acc: 87.15% +2025-03-14 18:48:06,676 - train - INFO - Epoch: 29 | Test Loss: 0.678 | Test Acc: 77.69% +2025-03-14 18:48:06,910 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.515 | Acc: 84.38% +2025-03-14 18:48:14,368 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.377 | Acc: 88.01% +2025-03-14 18:48:21,771 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.386 | Acc: 87.42% +2025-03-14 18:48:28,928 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.386 | Acc: 87.42% +2025-03-14 18:48:37,494 - train - INFO - Epoch: 30 | Test Loss: 0.586 | Test Acc: 80.55% +2025-03-14 18:48:47,882 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.252 | Acc: 92.97% +2025-03-14 18:48:55,362 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.353 | Acc: 88.71% +2025-03-14 18:49:02,770 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.371 | Acc: 88.06% +2025-03-14 18:49:10,065 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.377 | Acc: 87.85% +2025-03-14 18:49:18,945 - train - INFO - Epoch: 31 | Test Loss: 0.647 | Test Acc: 78.50% +2025-03-14 18:49:19,193 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.384 | Acc: 83.59% +2025-03-14 18:49:26,450 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.373 | Acc: 88.00% +2025-03-14 18:49:33,505 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.373 | Acc: 87.97% +2025-03-14 18:49:40,801 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.375 | Acc: 87.90% +2025-03-14 18:49:49,380 - train - INFO - Epoch: 32 | Test Loss: 0.711 | Test Acc: 77.94% +2025-03-14 18:49:59,461 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.487 | Acc: 85.94% +2025-03-14 18:50:06,682 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.374 | Acc: 87.84% +2025-03-14 18:50:13,961 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.377 | Acc: 87.75% +2025-03-14 18:50:21,274 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.381 | Acc: 87.68% +2025-03-14 18:50:29,580 - train - INFO - Epoch: 33 | Test Loss: 0.697 | Test Acc: 76.06% +2025-03-14 18:50:29,820 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.334 | Acc: 89.84% +2025-03-14 18:50:37,074 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.379 | Acc: 87.65% +2025-03-14 18:50:44,395 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.374 | Acc: 87.91% +2025-03-14 18:50:51,684 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.376 | Acc: 87.83% +2025-03-14 18:51:00,119 - train - INFO - Epoch: 34 | Test Loss: 0.992 | Test Acc: 70.77% +2025-03-14 18:51:10,544 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.377 | Acc: 87.50% +2025-03-14 18:51:17,930 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.362 | Acc: 88.38% +2025-03-14 18:51:25,422 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.370 | Acc: 88.11% +2025-03-14 18:51:32,507 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.377 | Acc: 87.89% +2025-03-14 18:51:41,005 - train - INFO - Epoch: 35 | Test Loss: 0.463 | Test Acc: 85.28% +2025-03-14 18:51:41,239 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.324 | Acc: 87.50% +2025-03-14 18:51:48,665 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.342 | Acc: 88.98% +2025-03-14 18:51:55,929 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.363 | Acc: 88.21% +2025-03-14 18:52:03,032 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.368 | Acc: 88.07% +2025-03-14 18:52:11,635 - train - INFO - Epoch: 36 | Test Loss: 0.583 | Test Acc: 81.06% +2025-03-14 18:52:22,104 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.340 | Acc: 88.28% +2025-03-14 18:52:29,226 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.347 | Acc: 88.73% +2025-03-14 18:52:36,411 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.360 | Acc: 88.43% +2025-03-14 18:52:43,677 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.369 | Acc: 88.09% +2025-03-14 18:52:52,388 - train - INFO - Epoch: 37 | Test Loss: 0.564 | Test Acc: 81.73% +2025-03-14 18:52:52,674 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.327 | Acc: 90.62% +2025-03-14 18:52:59,804 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.354 | Acc: 88.45% +2025-03-14 18:53:07,104 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.367 | Acc: 88.20% +2025-03-14 18:53:14,658 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.368 | Acc: 88.29% +2025-03-14 18:53:23,272 - train - INFO - Epoch: 38 | Test Loss: 0.600 | Test Acc: 81.34% +2025-03-14 18:53:33,004 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.269 | Acc: 91.41% +2025-03-14 18:53:40,377 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.341 | Acc: 89.06% +2025-03-14 18:53:47,738 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.347 | Acc: 88.99% +2025-03-14 18:53:55,011 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.361 | Acc: 88.44% +2025-03-14 18:54:03,393 - train - INFO - Epoch: 39 | Test Loss: 0.550 | Test Acc: 82.52% +2025-03-14 18:54:03,640 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.374 | Acc: 86.72% +2025-03-14 18:54:10,988 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.363 | Acc: 88.36% +2025-03-14 18:54:18,358 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.365 | Acc: 88.25% +2025-03-14 18:54:25,578 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.365 | Acc: 88.25% +2025-03-14 18:54:33,924 - train - INFO - Epoch: 40 | Test Loss: 0.777 | Test Acc: 75.62% +2025-03-14 18:54:45,182 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.244 | Acc: 92.97% +2025-03-14 18:54:52,512 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.326 | Acc: 89.37% +2025-03-14 18:54:59,594 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.342 | Acc: 88.81% +2025-03-14 18:55:06,787 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.351 | Acc: 88.56% +2025-03-14 18:55:15,344 - train - INFO - Epoch: 41 | Test Loss: 0.876 | Test Acc: 73.90% +2025-03-14 18:55:15,580 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.242 | Acc: 90.62% +2025-03-14 18:55:22,860 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.339 | Acc: 89.23% +2025-03-14 18:55:29,918 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.350 | Acc: 88.91% +2025-03-14 18:55:36,919 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.356 | Acc: 88.66% +2025-03-14 18:55:45,039 - train - INFO - Epoch: 42 | Test Loss: 0.649 | Test Acc: 79.72% +2025-03-14 18:55:54,936 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.386 | Acc: 87.50% +2025-03-14 18:56:01,943 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.344 | Acc: 88.71% +2025-03-14 18:56:08,920 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.347 | Acc: 88.67% +2025-03-14 18:56:15,787 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.353 | Acc: 88.59% +2025-03-14 18:56:23,899 - train - INFO - Epoch: 43 | Test Loss: 0.649 | Test Acc: 79.67% +2025-03-14 18:56:24,121 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.316 | Acc: 89.06% +2025-03-14 18:56:31,004 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.349 | Acc: 88.54% +2025-03-14 18:56:37,903 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.351 | Acc: 88.56% +2025-03-14 18:56:44,906 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.359 | Acc: 88.30% +2025-03-14 18:56:53,121 - train - INFO - Epoch: 44 | Test Loss: 0.492 | Test Acc: 83.67% +2025-03-14 18:57:03,186 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.235 | Acc: 92.97% +2025-03-14 18:57:10,154 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.331 | Acc: 89.50% +2025-03-14 18:57:17,064 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.346 | Acc: 88.84% +2025-03-14 18:57:24,057 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.346 | Acc: 88.87% +2025-03-14 18:57:32,237 - train - INFO - Epoch: 45 | Test Loss: 0.589 | Test Acc: 81.24% +2025-03-14 18:57:32,528 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.400 | Acc: 88.28% +2025-03-14 18:57:39,557 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.349 | Acc: 88.92% +2025-03-14 18:57:46,516 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.353 | Acc: 88.71% +2025-03-14 18:57:53,738 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.353 | Acc: 88.72% +2025-03-14 18:58:02,011 - train - INFO - Epoch: 46 | Test Loss: 0.677 | Test Acc: 78.95% +2025-03-14 18:58:12,903 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.241 | Acc: 92.97% +2025-03-14 18:58:20,180 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.322 | Acc: 90.06% +2025-03-14 18:58:27,236 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.340 | Acc: 89.31% +2025-03-14 18:58:34,194 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.341 | Acc: 89.26% +2025-03-14 18:58:42,382 - train - INFO - Epoch: 47 | Test Loss: 0.471 | Test Acc: 84.72% +2025-03-14 18:58:42,636 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.331 | Acc: 89.06% +2025-03-14 18:58:49,682 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.331 | Acc: 89.20% +2025-03-14 18:58:56,576 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.340 | Acc: 89.05% +2025-03-14 18:59:03,520 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.341 | Acc: 89.00% +2025-03-14 18:59:11,687 - train - INFO - Epoch: 48 | Test Loss: 0.482 | Test Acc: 84.36% +2025-03-14 18:59:21,789 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.436 | Acc: 85.16% +2025-03-14 18:59:28,754 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.334 | Acc: 89.57% +2025-03-14 18:59:35,845 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.334 | Acc: 89.27% +2025-03-14 18:59:42,788 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.344 | Acc: 88.89% +2025-03-14 18:59:50,960 - train - INFO - Epoch: 49 | Test Loss: 0.923 | Test Acc: 73.10% +2025-03-14 18:59:51,174 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.190 | Acc: 92.19% +2025-03-14 18:59:58,102 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.320 | Acc: 89.76% +2025-03-14 19:00:05,019 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.328 | Acc: 89.75% +2025-03-14 19:00:11,950 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.340 | Acc: 89.21% +2025-03-14 19:00:20,210 - train - INFO - Epoch: 50 | Test Loss: 0.964 | Test Acc: 73.76% +2025-03-14 19:00:30,919 - train - INFO - 训练完成! diff --git a/Image/GoogLeNet/code/model.py b/Image/GoogLeNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..91ce924467dd0f63a62567b5ba11ed8c819366a2 --- /dev/null +++ b/Image/GoogLeNet/code/model.py @@ -0,0 +1,159 @@ +''' +GoogLeNet in PyTorch. + +Paper: "Going Deeper with Convolutions" +Reference: https://arxiv.org/abs/1409.4842 + +主要特点: +1. 使用Inception模块,通过多尺度卷积提取特征 +2. 采用1x1卷积降维,减少计算量 +3. 使用全局平均池化代替全连接层 +4. 引入辅助分类器帮助训练(本实现未包含) +''' +import torch +import torch.nn as nn + +class Inception(nn.Module): + '''Inception模块 + + Args: + in_planes: 输入通道数 + n1x1: 1x1卷积分支的输出通道数 + n3x3red: 3x3卷积分支的降维通道数 + n3x3: 3x3卷积分支的输出通道数 + n5x5red: 5x5卷积分支的降维通道数 + n5x5: 5x5卷积分支的输出通道数 + pool_planes: 池化分支的输出通道数 + ''' + def __init__(self, in_planes, n1x1, n3x3red, n3x3, n5x5red, n5x5, pool_planes): + super(Inception, self).__init__() + + # 1x1卷积分支 + self.branch1 = nn.Sequential( + nn.Conv2d(in_planes, n1x1, kernel_size=1), + nn.BatchNorm2d(n1x1), + nn.ReLU(True), + ) + + # 1x1 -> 3x3卷积分支 + self.branch2 = nn.Sequential( + nn.Conv2d(in_planes, n3x3red, kernel_size=1), + nn.BatchNorm2d(n3x3red), + nn.ReLU(True), + nn.Conv2d(n3x3red, n3x3, kernel_size=3, padding=1), + nn.BatchNorm2d(n3x3), + nn.ReLU(True), + ) + + # 1x1 -> 5x5卷积分支(用两个3x3代替) + self.branch3 = nn.Sequential( + nn.Conv2d(in_planes, n5x5red, kernel_size=1), + nn.BatchNorm2d(n5x5red), + nn.ReLU(True), + nn.Conv2d(n5x5red, n5x5, kernel_size=3, padding=1), + nn.BatchNorm2d(n5x5), + nn.ReLU(True), + nn.Conv2d(n5x5, n5x5, kernel_size=3, padding=1), + nn.BatchNorm2d(n5x5), + nn.ReLU(True), + ) + + # 3x3池化 -> 1x1卷积分支 + self.branch4 = nn.Sequential( + nn.MaxPool2d(3, stride=1, padding=1), + nn.Conv2d(in_planes, pool_planes, kernel_size=1), + nn.BatchNorm2d(pool_planes), + nn.ReLU(True), + ) + + def forward(self, x): + '''前向传播,将四个分支的输出在通道维度上拼接''' + b1 = self.branch1(x) + b2 = self.branch2(x) + b3 = self.branch3(x) + b4 = self.branch4(x) + return torch.cat([b1, b2, b3, b4], 1) + + +class GoogLeNet(nn.Module): + '''GoogLeNet/Inception v1网络 + + 特点: + 1. 使用Inception模块构建深层网络 + 2. 通过1x1卷积降维减少计算量 + 3. 使用全局平均池化代替全连接层减少参数量 + ''' + def __init__(self, num_classes=10): + super(GoogLeNet, self).__init__() + + # 第一阶段:标准卷积层 + self.pre_layers = nn.Sequential( + nn.Conv2d(3, 192, kernel_size=3, padding=1), + nn.BatchNorm2d(192), + nn.ReLU(True), + ) + + # 第二阶段:2个Inception模块 + self.a3 = Inception(192, 64, 96, 128, 16, 32, 32) # 输出通道:256 + self.b3 = Inception(256, 128, 128, 192, 32, 96, 64) # 输出通道:480 + + # 最大池化层 + self.maxpool = nn.MaxPool2d(3, stride=2, padding=1) + + # 第三阶段:5个Inception模块 + self.a4 = Inception(480, 192, 96, 208, 16, 48, 64) # 输出通道:512 + self.b4 = Inception(512, 160, 112, 224, 24, 64, 64) # 输出通道:512 + self.c4 = Inception(512, 128, 128, 256, 24, 64, 64) # 输出通道:512 + self.d4 = Inception(512, 112, 144, 288, 32, 64, 64) # 输出通道:528 + self.e4 = Inception(528, 256, 160, 320, 32, 128, 128) # 输出通道:832 + + # 第四阶段:2个Inception模块 + self.a5 = Inception(832, 256, 160, 320, 32, 128, 128) # 输出通道:832 + self.b5 = Inception(832, 384, 192, 384, 48, 128, 128) # 输出通道:1024 + + # 全局平均池化和分类器 + self.avgpool = nn.AvgPool2d(8, stride=1) + self.linear = nn.Linear(1024, num_classes) + + def forward(self, x): + # 第一阶段 + out = self.pre_layers(x) + + # 第二阶段 + out = self.a3(out) + out = self.b3(out) + out = self.maxpool(out) + + # 第三阶段 + out = self.a4(out) + out = self.b4(out) + out = self.c4(out) + out = self.d4(out) + out = self.e4(out) + out = self.maxpool(out) + + # 第四阶段 + out = self.a5(out) + out = self.b5(out) + + # 分类器 + out = self.avgpool(out) + out = out.view(out.size(0), -1) + out = self.linear(out) + return out + +def test(): + """测试函数""" + net = GoogLeNet() + x = torch.randn(1, 3, 32, 32) + y = net(x) + print(y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (1, 3, 32, 32)) + +if __name__ == '__main__': + test() diff --git a/Image/GoogLeNet/code/train.log b/Image/GoogLeNet/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..fd1bf2f02039fd85aa835be2e72b00a9ea52166d --- /dev/null +++ b/Image/GoogLeNet/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 16:54:07,690 - train - INFO - 开始训练 googlenet +2025-03-14 16:54:07,690 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 16:54:08,498 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.291 | Acc: 8.59% +2025-03-14 16:54:15,595 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 1.955 | Acc: 29.12% +2025-03-14 16:54:22,612 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.763 | Acc: 35.63% +2025-03-14 16:54:29,568 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.641 | Acc: 39.92% +2025-03-14 16:54:38,067 - train - INFO - Epoch: 1 | Test Loss: 1.298 | Test Acc: 51.84% +2025-03-14 16:54:38,629 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.156 | Acc: 55.47% +2025-03-14 16:54:45,948 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.154 | Acc: 57.85% +2025-03-14 16:54:52,919 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.099 | Acc: 60.41% +2025-03-14 16:54:59,927 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.061 | Acc: 62.04% +2025-03-14 16:55:08,217 - train - INFO - Epoch: 2 | Test Loss: 1.494 | Test Acc: 52.46% +2025-03-14 16:55:19,584 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 0.953 | Acc: 63.28% +2025-03-14 16:55:26,661 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 0.843 | Acc: 70.28% +2025-03-14 16:55:33,708 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 0.814 | Acc: 71.36% +2025-03-14 16:55:40,780 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.793 | Acc: 72.08% +2025-03-14 16:55:49,150 - train - INFO - Epoch: 3 | Test Loss: 0.897 | Test Acc: 69.69% +2025-03-14 16:55:49,386 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.713 | Acc: 71.09% +2025-03-14 16:55:56,440 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.658 | Acc: 77.17% +2025-03-14 16:56:03,485 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.647 | Acc: 77.37% +2025-03-14 16:56:10,553 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.642 | Acc: 77.73% +2025-03-14 16:56:19,742 - train - INFO - Epoch: 4 | Test Loss: 0.757 | Test Acc: 74.32% +2025-03-14 16:56:30,187 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.653 | Acc: 77.34% +2025-03-14 16:56:37,215 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.590 | Acc: 79.94% +2025-03-14 16:56:44,400 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.576 | Acc: 80.39% +2025-03-14 16:56:51,627 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.571 | Acc: 80.55% +2025-03-14 16:57:00,010 - train - INFO - Epoch: 5 | Test Loss: 0.861 | Test Acc: 72.27% +2025-03-14 16:57:00,281 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.447 | Acc: 82.03% +2025-03-14 16:57:07,302 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.524 | Acc: 81.89% +2025-03-14 16:57:14,307 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.526 | Acc: 81.77% +2025-03-14 16:57:21,849 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.526 | Acc: 81.91% +2025-03-14 16:57:30,095 - train - INFO - Epoch: 6 | Test Loss: 0.627 | Test Acc: 78.61% +2025-03-14 16:57:40,186 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.512 | Acc: 82.03% +2025-03-14 16:57:47,258 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.481 | Acc: 83.17% +2025-03-14 16:57:54,290 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.490 | Acc: 83.03% +2025-03-14 16:58:01,246 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.489 | Acc: 83.16% +2025-03-14 16:58:09,401 - train - INFO - Epoch: 7 | Test Loss: 0.641 | Test Acc: 79.07% +2025-03-14 16:58:09,660 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.370 | Acc: 85.94% +2025-03-14 16:58:16,676 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.445 | Acc: 84.83% +2025-03-14 16:58:23,694 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.449 | Acc: 84.63% +2025-03-14 16:58:30,745 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.457 | Acc: 84.41% +2025-03-14 16:58:39,044 - train - INFO - Epoch: 8 | Test Loss: 0.863 | Test Acc: 73.93% +2025-03-14 16:58:49,212 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.609 | Acc: 79.69% +2025-03-14 16:58:56,404 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.438 | Acc: 84.81% +2025-03-14 16:59:03,387 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.442 | Acc: 84.76% +2025-03-14 16:59:10,433 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.443 | Acc: 84.73% +2025-03-14 16:59:18,735 - train - INFO - Epoch: 9 | Test Loss: 0.742 | Test Acc: 76.34% +2025-03-14 16:59:18,988 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.342 | Acc: 85.16% +2025-03-14 16:59:26,029 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.421 | Acc: 85.70% +2025-03-14 16:59:33,012 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.423 | Acc: 85.54% +2025-03-14 16:59:39,989 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.427 | Acc: 85.40% +2025-03-14 16:59:48,307 - train - INFO - Epoch: 10 | Test Loss: 0.631 | Test Acc: 79.03% +2025-03-14 16:59:58,727 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.374 | Acc: 89.84% +2025-03-14 17:00:06,458 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.408 | Acc: 85.95% +2025-03-14 17:00:13,759 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.411 | Acc: 85.80% +2025-03-14 17:00:20,781 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.420 | Acc: 85.57% +2025-03-14 17:00:29,145 - train - INFO - Epoch: 11 | Test Loss: 0.746 | Test Acc: 75.64% +2025-03-14 17:00:29,405 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.372 | Acc: 85.16% +2025-03-14 17:00:36,421 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.389 | Acc: 86.41% +2025-03-14 17:00:43,392 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.406 | Acc: 85.95% +2025-03-14 17:00:50,346 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.410 | Acc: 85.89% +2025-03-14 17:00:58,480 - train - INFO - Epoch: 12 | Test Loss: 0.744 | Test Acc: 76.44% +2025-03-14 17:01:08,385 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.272 | Acc: 90.62% +2025-03-14 17:01:15,442 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.383 | Acc: 86.80% +2025-03-14 17:01:22,401 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.388 | Acc: 86.56% +2025-03-14 17:01:29,668 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.401 | Acc: 86.09% +2025-03-14 17:01:38,031 - train - INFO - Epoch: 13 | Test Loss: 0.839 | Test Acc: 74.00% +2025-03-14 17:01:38,284 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.415 | Acc: 85.16% +2025-03-14 17:01:45,233 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.377 | Acc: 86.89% +2025-03-14 17:01:52,187 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.384 | Acc: 86.67% +2025-03-14 17:01:59,108 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.389 | Acc: 86.41% +2025-03-14 17:02:07,307 - train - INFO - Epoch: 14 | Test Loss: 0.740 | Test Acc: 75.39% +2025-03-14 17:02:17,573 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.291 | Acc: 89.84% +2025-03-14 17:02:24,651 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.370 | Acc: 87.29% +2025-03-14 17:02:31,629 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.373 | Acc: 87.15% +2025-03-14 17:02:38,739 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.376 | Acc: 87.03% +2025-03-14 17:02:47,204 - train - INFO - Epoch: 15 | Test Loss: 0.601 | Test Acc: 80.11% +2025-03-14 17:02:47,487 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.260 | Acc: 92.97% +2025-03-14 17:02:54,589 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.375 | Acc: 87.46% +2025-03-14 17:03:01,614 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.378 | Acc: 87.10% +2025-03-14 17:03:08,551 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.380 | Acc: 86.99% +2025-03-14 17:03:17,027 - train - INFO - Epoch: 16 | Test Loss: 0.578 | Test Acc: 81.07% +2025-03-14 17:03:27,383 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.288 | Acc: 89.06% +2025-03-14 17:03:34,362 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.357 | Acc: 87.81% +2025-03-14 17:03:41,426 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.367 | Acc: 87.33% +2025-03-14 17:03:48,924 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.372 | Acc: 87.28% +2025-03-14 17:03:57,228 - train - INFO - Epoch: 17 | Test Loss: 0.604 | Test Acc: 80.90% +2025-03-14 17:03:57,495 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.413 | Acc: 84.38% +2025-03-14 17:04:04,537 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.348 | Acc: 88.03% +2025-03-14 17:04:11,554 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.353 | Acc: 87.83% +2025-03-14 17:04:18,679 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.355 | Acc: 87.92% +2025-03-14 17:04:27,096 - train - INFO - Epoch: 18 | Test Loss: 1.517 | Test Acc: 59.99% +2025-03-14 17:04:37,412 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.429 | Acc: 85.94% +2025-03-14 17:04:44,398 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.338 | Acc: 88.47% +2025-03-14 17:04:51,696 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.349 | Acc: 87.96% +2025-03-14 17:04:58,812 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.356 | Acc: 87.64% +2025-03-14 17:05:07,013 - train - INFO - Epoch: 19 | Test Loss: 0.831 | Test Acc: 75.19% +2025-03-14 17:05:07,264 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.282 | Acc: 91.41% +2025-03-14 17:05:14,264 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.320 | Acc: 88.91% +2025-03-14 17:05:21,286 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.340 | Acc: 88.20% +2025-03-14 17:05:28,382 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.345 | Acc: 88.19% +2025-03-14 17:05:36,649 - train - INFO - Epoch: 20 | Test Loss: 0.781 | Test Acc: 75.57% +2025-03-14 17:05:47,127 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.395 | Acc: 85.94% +2025-03-14 17:05:54,114 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.337 | Acc: 88.33% +2025-03-14 17:06:01,056 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.339 | Acc: 88.20% +2025-03-14 17:06:07,976 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.343 | Acc: 88.07% +2025-03-14 17:06:16,134 - train - INFO - Epoch: 21 | Test Loss: 0.498 | Test Acc: 84.13% +2025-03-14 17:06:16,365 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.278 | Acc: 91.41% +2025-03-14 17:06:23,345 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.326 | Acc: 88.81% +2025-03-14 17:06:30,343 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.334 | Acc: 88.39% +2025-03-14 17:06:37,492 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.338 | Acc: 88.26% +2025-03-14 17:06:46,232 - train - INFO - Epoch: 22 | Test Loss: 0.626 | Test Acc: 80.14% +2025-03-14 17:06:56,389 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.287 | Acc: 92.19% +2025-03-14 17:07:03,470 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.320 | Acc: 89.18% +2025-03-14 17:07:10,453 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.330 | Acc: 88.65% +2025-03-14 17:07:17,383 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.335 | Acc: 88.46% +2025-03-14 17:07:25,590 - train - INFO - Epoch: 23 | Test Loss: 0.761 | Test Acc: 76.10% +2025-03-14 17:07:25,868 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.403 | Acc: 85.16% +2025-03-14 17:07:32,933 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.306 | Acc: 89.61% +2025-03-14 17:07:40,039 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.316 | Acc: 89.01% +2025-03-14 17:07:47,530 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.325 | Acc: 88.68% +2025-03-14 17:07:55,871 - train - INFO - Epoch: 24 | Test Loss: 0.618 | Test Acc: 80.21% +2025-03-14 17:08:06,349 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.349 | Acc: 87.50% +2025-03-14 17:08:13,405 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.315 | Acc: 89.09% +2025-03-14 17:08:20,736 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.324 | Acc: 88.82% +2025-03-14 17:08:27,757 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.330 | Acc: 88.60% +2025-03-14 17:08:36,073 - train - INFO - Epoch: 25 | Test Loss: 0.444 | Test Acc: 85.16% +2025-03-14 17:08:36,387 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.373 | Acc: 87.50% +2025-03-14 17:08:43,532 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.314 | Acc: 89.22% +2025-03-14 17:08:50,665 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.317 | Acc: 89.05% +2025-03-14 17:08:57,696 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.321 | Acc: 88.87% +2025-03-14 17:09:05,921 - train - INFO - Epoch: 26 | Test Loss: 0.583 | Test Acc: 81.13% +2025-03-14 17:09:16,691 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.210 | Acc: 93.75% +2025-03-14 17:09:23,814 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.305 | Acc: 89.27% +2025-03-14 17:09:31,184 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.317 | Acc: 88.93% +2025-03-14 17:09:38,295 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.316 | Acc: 89.00% +2025-03-14 17:09:46,877 - train - INFO - Epoch: 27 | Test Loss: 0.500 | Test Acc: 83.60% +2025-03-14 17:09:47,205 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.324 | Acc: 88.28% +2025-03-14 17:09:54,324 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.300 | Acc: 89.70% +2025-03-14 17:10:01,318 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.308 | Acc: 89.37% +2025-03-14 17:10:08,390 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.312 | Acc: 89.28% +2025-03-14 17:10:17,262 - train - INFO - Epoch: 28 | Test Loss: 0.453 | Test Acc: 84.78% +2025-03-14 17:10:27,827 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.225 | Acc: 92.19% +2025-03-14 17:10:34,830 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.289 | Acc: 90.12% +2025-03-14 17:10:41,867 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.298 | Acc: 89.73% +2025-03-14 17:10:48,873 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.307 | Acc: 89.44% +2025-03-14 17:10:57,141 - train - INFO - Epoch: 29 | Test Loss: 0.540 | Test Acc: 81.94% +2025-03-14 17:10:57,411 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.347 | Acc: 87.50% +2025-03-14 17:11:04,423 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.302 | Acc: 89.55% +2025-03-14 17:11:11,402 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.306 | Acc: 89.59% +2025-03-14 17:11:18,378 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.309 | Acc: 89.35% +2025-03-14 17:11:26,623 - train - INFO - Epoch: 30 | Test Loss: 1.307 | Test Acc: 64.91% +2025-03-14 17:11:37,574 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.269 | Acc: 89.06% +2025-03-14 17:11:44,956 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.298 | Acc: 89.81% +2025-03-14 17:11:51,880 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.312 | Acc: 89.24% +2025-03-14 17:11:58,843 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.312 | Acc: 89.26% +2025-03-14 17:12:07,072 - train - INFO - Epoch: 31 | Test Loss: 0.621 | Test Acc: 80.50% +2025-03-14 17:12:07,308 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.212 | Acc: 94.53% +2025-03-14 17:12:14,356 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.291 | Acc: 90.01% +2025-03-14 17:12:21,336 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.297 | Acc: 89.81% +2025-03-14 17:12:28,329 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.302 | Acc: 89.55% +2025-03-14 17:12:36,842 - train - INFO - Epoch: 32 | Test Loss: 0.605 | Test Acc: 81.09% +2025-03-14 17:12:46,713 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.217 | Acc: 94.53% +2025-03-14 17:12:53,675 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.281 | Acc: 90.22% +2025-03-14 17:13:00,644 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.295 | Acc: 89.73% +2025-03-14 17:13:07,549 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.301 | Acc: 89.68% +2025-03-14 17:13:15,742 - train - INFO - Epoch: 33 | Test Loss: 0.738 | Test Acc: 76.74% +2025-03-14 17:13:15,979 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.304 | Acc: 89.06% +2025-03-14 17:13:22,971 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.286 | Acc: 90.32% +2025-03-14 17:13:29,907 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.295 | Acc: 89.97% +2025-03-14 17:13:36,918 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.302 | Acc: 89.72% +2025-03-14 17:13:45,169 - train - INFO - Epoch: 34 | Test Loss: 0.424 | Test Acc: 85.72% +2025-03-14 17:13:55,500 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.236 | Acc: 92.19% +2025-03-14 17:14:03,001 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.266 | Acc: 90.99% +2025-03-14 17:14:10,047 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.279 | Acc: 90.33% +2025-03-14 17:14:17,054 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.288 | Acc: 90.10% +2025-03-14 17:14:25,381 - train - INFO - Epoch: 35 | Test Loss: 0.645 | Test Acc: 79.89% +2025-03-14 17:14:25,635 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.321 | Acc: 90.62% +2025-03-14 17:14:32,644 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.297 | Acc: 89.67% +2025-03-14 17:14:39,568 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.293 | Acc: 89.81% +2025-03-14 17:14:46,531 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.294 | Acc: 89.85% +2025-03-14 17:14:54,840 - train - INFO - Epoch: 36 | Test Loss: 0.551 | Test Acc: 81.67% +2025-03-14 17:15:04,949 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.269 | Acc: 90.62% +2025-03-14 17:15:11,943 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.276 | Acc: 90.36% +2025-03-14 17:15:18,964 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.285 | Acc: 90.15% +2025-03-14 17:15:26,031 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.289 | Acc: 90.06% +2025-03-14 17:15:34,305 - train - INFO - Epoch: 37 | Test Loss: 0.460 | Test Acc: 83.66% +2025-03-14 17:15:34,549 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.239 | Acc: 92.97% +2025-03-14 17:15:41,581 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.282 | Acc: 90.11% +2025-03-14 17:15:48,954 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.283 | Acc: 90.08% +2025-03-14 17:15:56,290 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.288 | Acc: 90.05% +2025-03-14 17:16:04,868 - train - INFO - Epoch: 38 | Test Loss: 0.766 | Test Acc: 75.91% +2025-03-14 17:16:15,740 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.281 | Acc: 89.84% +2025-03-14 17:16:22,725 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.261 | Acc: 91.14% +2025-03-14 17:16:29,750 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.280 | Acc: 90.44% +2025-03-14 17:16:36,702 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.283 | Acc: 90.32% +2025-03-14 17:16:45,050 - train - INFO - Epoch: 39 | Test Loss: 0.747 | Test Acc: 77.00% +2025-03-14 17:16:45,337 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.251 | Acc: 90.62% +2025-03-14 17:16:52,351 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.268 | Acc: 90.90% +2025-03-14 17:16:59,308 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.277 | Acc: 90.49% +2025-03-14 17:17:06,237 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.284 | Acc: 90.26% +2025-03-14 17:17:14,416 - train - INFO - Epoch: 40 | Test Loss: 0.818 | Test Acc: 76.04% +2025-03-14 17:17:26,250 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.205 | Acc: 94.53% +2025-03-14 17:17:33,271 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.276 | Acc: 90.76% +2025-03-14 17:17:40,234 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.287 | Acc: 90.35% +2025-03-14 17:17:47,165 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.282 | Acc: 90.39% +2025-03-14 17:17:55,350 - train - INFO - Epoch: 41 | Test Loss: 0.560 | Test Acc: 82.69% +2025-03-14 17:17:55,602 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.260 | Acc: 89.84% +2025-03-14 17:18:02,700 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.266 | Acc: 90.81% +2025-03-14 17:18:09,825 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.276 | Acc: 90.53% +2025-03-14 17:18:16,905 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.283 | Acc: 90.26% +2025-03-14 17:18:25,159 - train - INFO - Epoch: 42 | Test Loss: 1.234 | Test Acc: 66.21% +2025-03-14 17:18:35,406 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.309 | Acc: 85.94% +2025-03-14 17:18:42,406 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.257 | Acc: 91.13% +2025-03-14 17:18:49,556 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.270 | Acc: 90.60% +2025-03-14 17:18:56,770 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.274 | Acc: 90.47% +2025-03-14 17:19:05,067 - train - INFO - Epoch: 43 | Test Loss: 0.469 | Test Acc: 84.64% +2025-03-14 17:19:05,309 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.318 | Acc: 89.06% +2025-03-14 17:19:12,312 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.267 | Acc: 90.77% +2025-03-14 17:19:19,282 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.269 | Acc: 90.64% +2025-03-14 17:19:26,265 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.274 | Acc: 90.51% +2025-03-14 17:19:34,468 - train - INFO - Epoch: 44 | Test Loss: 0.466 | Test Acc: 84.85% +2025-03-14 17:19:44,307 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.187 | Acc: 95.31% +2025-03-14 17:19:51,300 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.247 | Acc: 91.57% +2025-03-14 17:19:58,266 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.266 | Acc: 90.90% +2025-03-14 17:20:05,226 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.272 | Acc: 90.68% +2025-03-14 17:20:13,581 - train - INFO - Epoch: 45 | Test Loss: 0.948 | Test Acc: 72.26% +2025-03-14 17:20:13,840 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.148 | Acc: 94.53% +2025-03-14 17:20:21,189 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.258 | Acc: 90.94% +2025-03-14 17:20:28,360 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.264 | Acc: 90.99% +2025-03-14 17:20:35,323 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.272 | Acc: 90.68% +2025-03-14 17:20:43,561 - train - INFO - Epoch: 46 | Test Loss: 0.506 | Test Acc: 83.50% +2025-03-14 17:20:53,954 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.263 | Acc: 92.19% +2025-03-14 17:21:00,932 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.250 | Acc: 91.58% +2025-03-14 17:21:08,026 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.255 | Acc: 91.42% +2025-03-14 17:21:15,684 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.266 | Acc: 91.04% +2025-03-14 17:21:24,479 - train - INFO - Epoch: 47 | Test Loss: 0.736 | Test Acc: 77.67% +2025-03-14 17:21:24,729 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.306 | Acc: 89.06% +2025-03-14 17:21:31,823 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.257 | Acc: 91.17% +2025-03-14 17:21:38,830 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.264 | Acc: 90.98% +2025-03-14 17:21:45,831 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.262 | Acc: 90.97% +2025-03-14 17:21:54,201 - train - INFO - Epoch: 48 | Test Loss: 0.574 | Test Acc: 81.14% +2025-03-14 17:22:04,657 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.393 | Acc: 86.72% +2025-03-14 17:22:11,695 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.251 | Acc: 91.54% +2025-03-14 17:22:18,756 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.258 | Acc: 91.24% +2025-03-14 17:22:25,780 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.264 | Acc: 90.99% +2025-03-14 17:22:34,535 - train - INFO - Epoch: 49 | Test Loss: 0.491 | Test Acc: 83.90% +2025-03-14 17:22:34,814 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.272 | Acc: 94.53% +2025-03-14 17:22:41,960 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.260 | Acc: 90.78% +2025-03-14 17:22:49,037 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.260 | Acc: 90.89% +2025-03-14 17:22:56,058 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.261 | Acc: 90.98% +2025-03-14 17:23:04,356 - train - INFO - Epoch: 50 | Test Loss: 1.348 | Test Acc: 64.58% +2025-03-14 17:23:14,478 - train - INFO - 训练完成! diff --git a/Image/GoogLeNet/code/train.py b/Image/GoogLeNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..37f277f4f37d01f89ccc1b9dbe1268ec11a63325 --- /dev/null +++ b/Image/GoogLeNet/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import GoogLeNet + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = GoogLeNet() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='googlenet', + save_type='0', + layer_name='avgpool', + interval = 2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='googlenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='googlenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='avgpool', + interval = 2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/GoogLeNet/dataset/.gitkeep b/Image/GoogLeNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/GoogLeNet/model/.gitkeep b/Image/GoogLeNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/GoogLeNet/model/0/epoch1/embeddings.npy b/Image/GoogLeNet/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..36fb25e7147ffff9dbef3d67b16a5fec846bb718 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dafde5a69b40eb19752419599339a9536545d3f20bb06ab677199d1ea76a0f76 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch1/subject_model.pth b/Image/GoogLeNet/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ce9b8e860de3b35dd3104be78b44073b77e43a3f --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0047f04f53b2860c0dbb9b2d9c49302ba0d319016381413099cbcb7fe003adf0 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch10/embeddings.npy b/Image/GoogLeNet/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..96b8436e490be83a2f2409bab02e2048147cf814 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7d37c7c35670e04aeba90f04e694d634e34241aa66468051d6e45e20018567e +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch10/subject_model.pth b/Image/GoogLeNet/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c461e465686377cfd077155e3c3c306408a371e4 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9b1a87daa84894a65d4385bf6c392cdb0bd3353c549fc4184f6ccc2a1e02948 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch11/embeddings.npy b/Image/GoogLeNet/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..14e896ad0edf8fb6198a93a4084bf83eaa43ef37 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9293a69f67d97a5f8a9c2ed17297c819dea332a24f1d20b2bfa86d953781f854 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch11/subject_model.pth b/Image/GoogLeNet/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2633456f59dbcd5707603cb87770a9aed213956e --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17e6bd3b38d8f3bd6bc39a82af9d81d184e6e52134e74ff09e790c8c7efabe8a +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch12/embeddings.npy b/Image/GoogLeNet/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2fca9a782284720195bae1fa64af7463c188c635 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:236ab75be462a8a2c724d4384634ad5141fb6698d002dd9cd21dbe67862a1043 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch12/subject_model.pth b/Image/GoogLeNet/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..26f1a48e0fd522dde5c098de11b48cc8bfbc0475 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55b74c252a451ff6a71f1407d2a3fb44b519217fc5416d4dd1a05cf42d7c38a5 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch13/embeddings.npy b/Image/GoogLeNet/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6b455779286b2593e2562f6b83e1000284573aaa --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba26321cb8338a112b395fd1b0e01d12662b59daccc207a04924f42d8315ff88 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch13/subject_model.pth b/Image/GoogLeNet/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f50ff7ce55ea4fb2abac296c1ba25993d240367d --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c79bcbf2e0dfaa685d5bed4bb5909114a64b0b1411345264a83aea8812a4a3e1 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch14/embeddings.npy b/Image/GoogLeNet/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8233113c6bfc377aa67eaf2c060d1385063fda21 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b861dbe45bff71f7d3ff60f7067f36a51aab1e82d37837a85ce69811e8dbfef +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch14/subject_model.pth b/Image/GoogLeNet/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..657df50f099fc1655f1b0b7049cdf2da6a21766a --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d04320ff7f0ef4230e5e542e3fdd6742d4c1607ea83cedcef520ed69386f34e6 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch15/embeddings.npy b/Image/GoogLeNet/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0e187971fd573ebd9c079b7cb0133e6fb18a3a86 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44ceeeedf6fcb7c58ff16ebd5ed89b8f97581af0e27d3bc0abd969ec17b6df10 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch15/subject_model.pth b/Image/GoogLeNet/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5be1abba34f41a057580f0e8f66e9e0f5f97482e --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dfa093e15fc1eb55fc89f30ca91f21eb175025253f96d545fcb82d1d5261c85 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch16/embeddings.npy b/Image/GoogLeNet/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e78aa57089989c073b9907df9e91cc291e33d80d --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:926fc43d0c15a15266e87acf08965589403530a2efc5e6c49bac2e6d7721a334 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch16/subject_model.pth b/Image/GoogLeNet/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4e794e3269c7dc10825b25891aa37ecfe5f6c51d --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:563e417808da9a246bb6b0938e01b50311d30e1c5ccaf6f911d7d2f0de3878d1 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch17/embeddings.npy b/Image/GoogLeNet/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..289c977f1deb8a6ac6ee55acab8b46f42be8762e --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eac20614667c1edf8a15de1b3e1d3e297f9b82d3051eb68b90e98800dfca8745 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch17/subject_model.pth b/Image/GoogLeNet/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..48dffd13d22a7f42eb54a7d9d464f56c830f0f07 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b2e36c724ec76b233cc06a7d86bc8c2bdd8e5843c0701ecef7795c721065227 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch18/embeddings.npy b/Image/GoogLeNet/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..205ea106f31853c7a74113f185ab8c54a836c0c8 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:808ff7c1b175d754896eff45ca6b083982bcfa96eb4b8f52e597cb45a86f824c +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch18/subject_model.pth b/Image/GoogLeNet/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1937f8ed8863db3f1d1e4da3fa2a627583b6efe5 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:527efcd134dd6e1bb5e2fa75792aaf950fffb1fd269dab0121bd57328627be25 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch19/embeddings.npy b/Image/GoogLeNet/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f52860a259db68c41595cbfab3445152b09b48fd --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e49334b7589a810c6a663c2483476b9736296a7caf61d0d740422be5d20be7d4 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch19/subject_model.pth b/Image/GoogLeNet/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..800eb8e1b7a401d7c5f0f3184a23e095edfe37b0 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47546ef229e6926aac2cbeed69726a0fc418b5bcf9d3deba20c7cf174dd1d61b +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch2/embeddings.npy b/Image/GoogLeNet/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ad13b3c32b7949da3ede58af2c272e2031280a3b --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12acbce6a36a8629b3d19b12fe4172d7b58206a5584e52b1f9855a0f51b0693e +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch2/subject_model.pth b/Image/GoogLeNet/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..47c16891a224b66aa06b0d8085ac636e53d1ab1c --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:192165596ed61fbdc94c0789e93854918370bb171c37455d53331bd0d405838f +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch20/embeddings.npy b/Image/GoogLeNet/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3a8a8e5a7594a763598adaa50d07260ad92c830a --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e71ee63822e08b4ed68e2d100ff89b52024f5d84edaf2663ce152b7b390a39a8 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch20/subject_model.pth b/Image/GoogLeNet/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..64d078d997ab3fcfdc141e4141be90ba1aea54f3 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6a73fcb7d5989ce0f090e5d2cf63ae5a81bef5058daa4674c2112e1b77ad19d +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch21/embeddings.npy b/Image/GoogLeNet/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8a7223c475bd2931b4d5c25b483fc91f1045adce --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73c18009201b6346e30d4177ffc74c5c273cfbdac498923acd2c54510fe8502b +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch21/subject_model.pth b/Image/GoogLeNet/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b7fb025b83d0c408dbdd53b48e69ddcf0c43ecc7 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b5abee0ed98b4d1fc1f2b7af7073a2cf966372a5ebc57340bac39b6542cfb3d +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch22/embeddings.npy b/Image/GoogLeNet/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5a29d9c234391fba65598574413f0c5423c12a26 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f2e2ae51bf9c827bbd2e52412377cec78a3086bb6e32a0770e1c29dd8b78486 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch22/subject_model.pth b/Image/GoogLeNet/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8e9ff1a8645e6d10da3a031f0a2298a4f7f39cc6 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fd8f6b319a11950741feb9d5cadca3c9cf1e6a829288a3a5c6d9aed2e21512e +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch23/embeddings.npy b/Image/GoogLeNet/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..19ae8a29bca43c0d31f7f21997af16d682b3ea82 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2543376435cbbc1365b28974cf3f5a05e3cb832e2d7875fd93ec44551831e1b +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch23/subject_model.pth b/Image/GoogLeNet/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5362a3670538d47170f7fb75b4e5ac9e0def038a --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7405fc726adb2885eb8f5c68147680884a7519b1e646bfdbc78a15a3c5e7aafd +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch24/embeddings.npy b/Image/GoogLeNet/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8d2c1dfa85f1e36f5fa7e3c20b7ab120dcef3c93 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4e14b43af7dae81f85f53c7746d9b8959dec7de8f6e57f79866bccd7a0ed314 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch24/subject_model.pth b/Image/GoogLeNet/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4c10916330f84ade71993175a152e70c8d932c95 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf02ae9014582441a42795c39e6920709517289aea16b03513e64b8506386414 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch25/embeddings.npy b/Image/GoogLeNet/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3de2f40c5089d53dffbc560d4587e379cab51dba --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd0f383e1747c2527dea113ce767337d31aff40f47521d546cc3b082acd62856 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch25/subject_model.pth b/Image/GoogLeNet/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a8baaacb44e90c34ca1454cd4ba1ff4b19f42fb9 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfba9428ea6f988308eec7c7c99d3c377fc2faf8c873a876758814c1326d8fdd +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch3/embeddings.npy b/Image/GoogLeNet/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1f2b4d2914b40e65e8cbff52e4392078f0738783 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c50a0bf6f80bba11bd5ec6470c04173ba5c1c2c65d88018f8a44a74ae341a76d +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch3/subject_model.pth b/Image/GoogLeNet/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b5f74bd44366c0fd1a4e735806a148d4f4a8c436 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9f4f1453526ce615b3093cf0e39778573b319c31733fa56c275fadb9477a647 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch4/embeddings.npy b/Image/GoogLeNet/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..77a40c19e3d76ea81432af664c2a289b2e2aa294 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dbbdf82327e8b89c9e72b3e42c7a2efe17fc0f16e40a461c39cd6da3240a8b7 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch4/subject_model.pth b/Image/GoogLeNet/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9c236c5bca0a328d61c109fbb589df2782e6f901 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf11dcaa371dcfa48e609823bce98a3dbfbc60dfcefe686fecf53ec571ce5a45 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch5/embeddings.npy b/Image/GoogLeNet/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..64d4ebbf4a49091d21e8aed594ba39c36b981d2f --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f74438e6c235d0751dc11524c7a60225697328ece9cd7efa7615817fcd1d8622 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch5/subject_model.pth b/Image/GoogLeNet/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cd4f3752f6d150ff155215c893e68836777e0173 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ecdb8c1dfd9f71304d154384b037902c03be32cc57f11704b066616b50e6ec3 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch6/embeddings.npy b/Image/GoogLeNet/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..19cfcee1d9e542de62f3973e2eae699bd7c4132e --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d67efbb6b8181b8fabe6a82f13675e60680f843097c1e6aad822c06c437a7dd4 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch6/subject_model.pth b/Image/GoogLeNet/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a138e45639e9b3b2a710a1b783a7f73122f6ffd3 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82d5824c28dea5fce55d90fca6a0c431677190f8edd3ec6102cdb7a63a29cb41 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch7/embeddings.npy b/Image/GoogLeNet/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b274029a2ec3547fa6e5352cdd08eafae46e3874 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f360fdc435d3924066454afed49f63972cc56c9d898356e689cb22e29ef25cb +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch7/subject_model.pth b/Image/GoogLeNet/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..69d172f51a3587233a190bbd1b4ac6834ffe3701 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f134711871e9c5d5bc950159c426ebee37fca29f07617fc1933616d0714c144 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch8/embeddings.npy b/Image/GoogLeNet/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..12c4303a80aba74934c990b5508efbb7ef73482f --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d79c70b8b9c0e4eccd2f76396d81503dc3d7a9195110665a9a3f5597111a2317 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch8/subject_model.pth b/Image/GoogLeNet/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9278fc0b7582cdaa4e6859c126cd7c1986865364 --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ace8688b5198ac899b25cd60a02b911b88e65c359ff0e386a99373773b9a8c75 +size 24878290 diff --git a/Image/GoogLeNet/model/0/epoch9/embeddings.npy b/Image/GoogLeNet/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6dcddbbd6a39f0bc68cbdb72e5cb99f83c1b30fe --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7df7fedb155b657f324f5ac287d1adc8bb485c905117ec73d1a056a8d4a7b202 +size 204800128 diff --git a/Image/GoogLeNet/model/0/epoch9/subject_model.pth b/Image/GoogLeNet/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f0eb5d0a5d68f9099a3abbaeabed17830b5445f --- /dev/null +++ b/Image/GoogLeNet/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f104d3193975ce6b89e8e376cef59b20ee258d2f30f319bcf3cd9f6d636f559 +size 24878290 diff --git a/Image/GoogLeNet/model/0/layer_info.json b/Image/GoogLeNet/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..46311586bcb283414b57f71aed66088ea84cc3fa --- /dev/null +++ b/Image/GoogLeNet/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 1024} \ No newline at end of file diff --git a/Image/GoogLeNet/model/2/epoch1/embeddings.npy b/Image/GoogLeNet/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3ffaf17a42a06683c2c64f2558a10a9c48e3804d --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d1d6425e3e96869f8d8b8973225457a50cc846b88948e8b84b874295fc4fd9f +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch1/subject_model.pth b/Image/GoogLeNet/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..356f27cf7bb035a7180ead0e49ea87d03d3aa0c1 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1579689f9b5a41530a19098e310d65197e2b5aaaf78695946b4018ab2ac7c2fa +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch10/embeddings.npy b/Image/GoogLeNet/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..58594d1cf575dca4f2e2ec9913ebd5a09b9d0549 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1067d2494dc6b47eef199c35a3891cb14742046c0fd6cd31b1c001b95424b7c4 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch10/subject_model.pth b/Image/GoogLeNet/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1cc404ce7cb67be7459fe0dd8e213f8a7f34e39c --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e42adfd56b1ae1b53b220f67f2d7bdc7c9d979d47149bb8a31abe74bccc5d812 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch11/embeddings.npy b/Image/GoogLeNet/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9fa0c2c10c8c34d9c5d71ec8063ed7b7dd2af4c2 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ca5034f9f998e44cf7b8e1f713dd63d9811f7b01d119e5ae0726e073de1e2813 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch11/subject_model.pth b/Image/GoogLeNet/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7db0ce6c725efed271fce197755b4fb22272a1ab --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da124d8dfd7ecf4495a0eea460d505d6e46723d4b1645422e4c7963d32b1f97e +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch12/embeddings.npy b/Image/GoogLeNet/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..07307bce6620be35c19283e7b8a7034860e5f4aa --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a96433d8a4f0db2981f95e08541c6f402123c74437560d8d39070125d3a198a +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch12/subject_model.pth b/Image/GoogLeNet/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..015e15dc011e7852cb0be82af114e8b6ba3da637 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f3afc58d3c247fe68d8d67a084ada65d9ec36bed7eb9e362db14c18daa11da0c +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch13/embeddings.npy b/Image/GoogLeNet/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1d41b909165cfd59d0a962b440ddc495071bf661 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9fe1a9ab344486e56034a7858635a62812a465655006d5703bccadbee117235 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch13/subject_model.pth b/Image/GoogLeNet/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b5a703d2d1454d3f7593ddb5ccae6e8d1c7c8b6c --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2b0628e12a2685a14c8f7c21794c4c8c87e01868d925ed8237d45d4aeebfa2e +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch14/embeddings.npy b/Image/GoogLeNet/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d37d0330de298cc5f8772094873775f38e1409cd --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf91558acd56746fd2aaa10f44bc7c7905836167ce029c0170a77b2e58598bba +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch14/subject_model.pth b/Image/GoogLeNet/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8a3cff2592710c2fecd3490e3f3e782a4801addd --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de8f4fda7a8432506ce3892e11ec0b371ec44b832e69ec63fd1c5ef66016272e +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch15/embeddings.npy b/Image/GoogLeNet/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9fb9b6a81ee81a36cfa901a03aeac697385bbc51 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63271ad512a047be5f4ace1e5fda8c2656d8ba28de334b41db3cd3acedf1204b +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch15/subject_model.pth b/Image/GoogLeNet/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a3005f825afebd75f8bb8b8662831dbfc4840d9d --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64d4090c31ef90630d38d05ae1ad04b13f6831b0fb71ee7ee0ed59412befea69 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch16/embeddings.npy b/Image/GoogLeNet/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d12bf1fdb0e6f23582ad35de553b20e2a912f69f --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee6790bec86102aa8aa391d4f60d415636cee3617c13e0d5e0e8bc66b5bac79f +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch16/subject_model.pth b/Image/GoogLeNet/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6e142521f9a37d91e293922aad95352c7fcf2f98 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad2392d96786821b4135fe88aed5ea518eb871b13033f07b3b44d8c1406d1de7 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch17/embeddings.npy b/Image/GoogLeNet/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f75c1d47ee00108c0b67b21752b88192f1de859d --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9930981b8541688a42865e21d91f1c0deb2d8962abe6803ef83397310be58f01 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch17/subject_model.pth b/Image/GoogLeNet/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..44bf795d94cebec032d0b14d6b0506907285da8a --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfd10136fcdf6eabeb030d28049e9acf94fe4420b0aa44de1b42e6bb63fd076d +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch18/embeddings.npy b/Image/GoogLeNet/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2416ae2c317a887ae6e16ff5fb75de35437fb9c6 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cdc0d4f3801731c3214ccd6c202c9eac0c7fdb46241dad547a716774d340e132 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch18/subject_model.pth b/Image/GoogLeNet/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0710fde56059bb991a8faa62207ebd675b266964 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17afe1a80ded34a87e22f5e1d5c98b7429b0cc62b410f824684dcdeed2d3f5a6 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch19/embeddings.npy b/Image/GoogLeNet/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4c338dd08eace02fd881d17ae0037eb9beedf54d --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:214b9c506c9de103b998c039a30eee6292f173397576815e4e5fd51d52fadfb7 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch19/subject_model.pth b/Image/GoogLeNet/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f9c8af41fd79de9323e067b96d20ada98b48de37 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4c140e7441bb1a5917614f0998234a9be7bcb79bb35f72e6101eaefa77d0ad06 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch2/embeddings.npy b/Image/GoogLeNet/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..52c84b9ee5cc20e085453092fa742f3636bd7c0e --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afeb2ba1eaba2c62cc4f02c4ea7bf0f111cd69ad0c8cac8ed04f14e60a67cf0b +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch2/subject_model.pth b/Image/GoogLeNet/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5a8d558391d469ffb62f76e34aa0e9b16d397c57 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7af474d760b8e3561ec48db554f22621707055f8b1e9b93618276215c3034ddf +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch20/embeddings.npy b/Image/GoogLeNet/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a13d1a720a9ad8d38b6896d8019e31accd7dfb69 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:196932895b576ca7dc896ae16cb58484bf04c391808b3bd07c2cc17f9fe2b8e9 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch20/subject_model.pth b/Image/GoogLeNet/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c408adfa952eb347d5edd75a10d16f32deaae2fc --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b4793f016004f6693f79bc745540b5ac4c86e58fc38a2e52243c136a367d95f +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch21/embeddings.npy b/Image/GoogLeNet/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9b7d31e1a2d77e9cf0d71b74c8dd802875211e2f --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f70cc43ef37b0b4bb7dee0c6085213284aebadc718a8cd17e933175504af9e42 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch21/subject_model.pth b/Image/GoogLeNet/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5fde3f7513fb3a59b20e19c6222fd235af083d25 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:068026aa171d796d73ad40ba98fad86b2f0954c437b982ddf4012349ae65504e +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch22/embeddings.npy b/Image/GoogLeNet/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..784a9fc195932eef13635cf0e20496b01c1348dd --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5b1ec8810063efca962451558b4574e71ffba90835b2287876917e9a9bb4fe8 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch22/subject_model.pth b/Image/GoogLeNet/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4156e6e9ad54067b14f152e6f1b3ad5d74e65c04 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a80308a35efd8090411342edd836905ac7ea4b452d1620b1678c7b0960103de9 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch23/embeddings.npy b/Image/GoogLeNet/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6d6b58fe1cc20735c840371013e34c03676ae40b --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e492db37ed431cb9f34f7c4976e24b4a73525e936299c42575e56f97c30740ff +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch23/subject_model.pth b/Image/GoogLeNet/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..bdcb55ca6592438e903b2b9abb30c67a85f96dfc --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbef689a0945459b1b7598d7493e019eb747eceb0359a67b97641fe9927da781 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch24/embeddings.npy b/Image/GoogLeNet/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6dda6b58701c06c8852b38dd45081da23fb823d5 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfbb9f698f0684a64d9120abd232959ffc63ea7b937151f387e4c867bb97023d +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch24/subject_model.pth b/Image/GoogLeNet/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..65dad7f79fc9f22ccc37f8f3ea85125b5bcc3759 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fffd5d4392644be3b6f13f3ea89da425e8a1850fa7c7432627cc32c6d5568392 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch25/embeddings.npy b/Image/GoogLeNet/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4e8d5fd367b300196a2bd8c0c668d7ee0ca57656 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4460c255fe595f2197b2a7553c629ef9a1d76ac9d340d272a79ee5bbf5f792e9 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch25/subject_model.pth b/Image/GoogLeNet/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..113c279972ad9e7b56e42a90019f9984ad47359c --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0c23f6bad87c6a93fc5f71bb0cc254e63fed3d4e858b1bc68ee1857e75758a8 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch3/embeddings.npy b/Image/GoogLeNet/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..854b076166afea1b2f4ae1d6eb8790da15faf1f6 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:277a25c36241b942395b22761f789bd9ac98a3020a7860f9c87fd61810b4b5a4 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch3/subject_model.pth b/Image/GoogLeNet/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0e6bb1ff631a24e2e2ecf9924ab2014a11b06cb4 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0eabe039ae3ea76364ffd3153394ce6bbb1999872d723b029e69bc4f93fba022 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch4/embeddings.npy b/Image/GoogLeNet/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4a38b785363a6eaadf6f109a7a25a023ba08c218 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb23fad49b93a09cd7fc239228ca3ec1793ca62b8cf2bc8a1ced322c6024518f +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch4/subject_model.pth b/Image/GoogLeNet/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2ee054dbd46ef7b685ff925cf1117f64b2c5b3da --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc1a5b30840453a5877bb6a79c0ba26a03533477ceb160b1487cd1f2c4b8694c +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch5/embeddings.npy b/Image/GoogLeNet/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..931942d02e748b209b1963a4c1824aa0175e749d --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4214940a0ee2663741e600be557f20171e427797266af439421c220a64fe100 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch5/subject_model.pth b/Image/GoogLeNet/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..098ff144c903d23c62ccb98c6517c82d8af03495 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38e9a85c29f4c89f7ead23b476ef3474e3dc2e5dc82cf9f592e1295b0cf6c7a3 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch6/embeddings.npy b/Image/GoogLeNet/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..93fb658f45e51e4af5dcfbab93c34ce2421a9b94 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01b32f5975ecfeedffc13d694eb5a5a2c51503c0b6e1a397a2bc58e8bcce507a +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch6/subject_model.pth b/Image/GoogLeNet/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a6a5ea3df2d04f8ff3baa4bf55907ba239d85738 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48cfb559706d7c05aea037dd8229f8624ff57075830dd0b6e1871cd2692a5944 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch7/embeddings.npy b/Image/GoogLeNet/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0f63b93f2c36c7b909b688131757467335e61980 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d85bd21fe71a03a5285c6a4b44a6dbe00a1de76c36405ebe197c34e7b3d8f3d +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch7/subject_model.pth b/Image/GoogLeNet/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..611b2b0fbb04f25b667bbe169d8b0b628d403557 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8bd289120e6cdcb263302996ebddda39e639f14eba6467bd5fef2bf70d61cf9 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch8/embeddings.npy b/Image/GoogLeNet/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c238f0f70edae1199f7057d92b201aeebc82aa03 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91cb4e9f6919f5b00dd1e2bc13d5a9845bf624e3e2f15a33e99a6075b77856e1 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch8/subject_model.pth b/Image/GoogLeNet/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dc9949da55812f8f8617f1b18a33baf73d707473 --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24ea6366faa4bb0b0cde61f624abe955e917405c59cc54ecd7e077a5287a45d1 +size 24878290 diff --git a/Image/GoogLeNet/model/2/epoch9/embeddings.npy b/Image/GoogLeNet/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..446a01288355ea61630dc915a51ad7606fc5d49c --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78bd299ee69859211ee870541278a9bc63a76ef0bb15c14c7ed6999035c092e1 +size 204800128 diff --git a/Image/GoogLeNet/model/2/epoch9/subject_model.pth b/Image/GoogLeNet/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6421928640348e9e951e4e98d5d31746d55d480f --- /dev/null +++ b/Image/GoogLeNet/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1e47a7beeefaafcf7327aafb733d8a026f89ceec598eadf6e8fd40c629df528 +size 24878290 diff --git a/Image/GoogLeNet/model/2/layer_info.json b/Image/GoogLeNet/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..46311586bcb283414b57f71aed66088ea84cc3fa --- /dev/null +++ b/Image/GoogLeNet/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 1024} \ No newline at end of file diff --git a/Image/LeNet5/code/backdoor_train.log b/Image/LeNet5/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..0c733c3b2bc15979d0b3bae1970aeb4b74cbe628 --- /dev/null +++ b/Image/LeNet5/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 18:51:19,652 - train - INFO - 开始训练 lenet5 +2025-03-14 18:51:19,652 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-14 18:51:20,380 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 10.16% +2025-03-14 18:51:22,789 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.208 | Acc: 19.59% +2025-03-14 18:51:25,178 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.138 | Acc: 20.32% +2025-03-14 18:51:27,268 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.075 | Acc: 22.09% +2025-03-14 18:51:30,404 - train - INFO - Epoch: 1 | Test Loss: 1.950 | Test Acc: 25.94% +2025-03-14 18:51:30,814 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 2.080 | Acc: 19.53% +2025-03-14 18:51:33,059 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.910 | Acc: 26.99% +2025-03-14 18:51:35,303 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.907 | Acc: 27.06% +2025-03-14 18:51:37,458 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.891 | Acc: 27.89% +2025-03-14 18:51:40,819 - train - INFO - Epoch: 2 | Test Loss: 1.806 | Test Acc: 31.65% +2025-03-14 18:51:50,241 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.770 | Acc: 29.69% +2025-03-14 18:51:52,600 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.857 | Acc: 29.28% +2025-03-14 18:51:54,824 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.844 | Acc: 30.26% +2025-03-14 18:51:57,048 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.846 | Acc: 30.12% +2025-03-14 18:52:00,409 - train - INFO - Epoch: 3 | Test Loss: 1.758 | Test Acc: 33.70% +2025-03-14 18:52:00,576 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.830 | Acc: 28.91% +2025-03-14 18:52:02,690 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.814 | Acc: 31.25% +2025-03-14 18:52:04,841 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.804 | Acc: 31.78% +2025-03-14 18:52:06,995 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.805 | Acc: 32.05% +2025-03-14 18:52:10,157 - train - INFO - Epoch: 4 | Test Loss: 1.777 | Test Acc: 33.31% +2025-03-14 18:52:19,581 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.773 | Acc: 35.94% +2025-03-14 18:52:21,775 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.773 | Acc: 34.38% +2025-03-14 18:52:23,865 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.751 | Acc: 35.28% +2025-03-14 18:52:26,032 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.740 | Acc: 35.98% +2025-03-14 18:52:29,181 - train - INFO - Epoch: 5 | Test Loss: 1.667 | Test Acc: 36.89% +2025-03-14 18:52:29,344 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.727 | Acc: 38.28% +2025-03-14 18:52:31,511 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.734 | Acc: 36.10% +2025-03-14 18:52:33,763 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.723 | Acc: 36.55% +2025-03-14 18:52:35,894 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.719 | Acc: 36.65% +2025-03-14 18:52:39,232 - train - INFO - Epoch: 6 | Test Loss: 1.720 | Test Acc: 36.32% +2025-03-14 18:52:48,612 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.590 | Acc: 45.31% +2025-03-14 18:52:50,865 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.648 | Acc: 39.59% +2025-03-14 18:52:53,028 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.659 | Acc: 38.78% +2025-03-14 18:52:55,063 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.656 | Acc: 39.38% +2025-03-14 18:52:58,384 - train - INFO - Epoch: 7 | Test Loss: 1.744 | Test Acc: 34.71% +2025-03-14 18:52:58,567 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.699 | Acc: 34.38% +2025-03-14 18:53:00,712 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.627 | Acc: 40.86% +2025-03-14 18:53:03,147 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.647 | Acc: 40.62% +2025-03-14 18:53:05,342 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.647 | Acc: 40.75% +2025-03-14 18:53:08,945 - train - INFO - Epoch: 8 | Test Loss: 1.612 | Test Acc: 41.16% +2025-03-14 18:53:18,871 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.812 | Acc: 36.72% +2025-03-14 18:53:21,249 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.644 | Acc: 40.66% +2025-03-14 18:53:23,450 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.627 | Acc: 41.32% +2025-03-14 18:53:25,638 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.631 | Acc: 41.23% +2025-03-14 18:53:28,926 - train - INFO - Epoch: 9 | Test Loss: 1.592 | Test Acc: 42.59% +2025-03-14 18:53:29,088 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.626 | Acc: 41.41% +2025-03-14 18:53:31,206 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.612 | Acc: 42.34% +2025-03-14 18:53:33,239 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.617 | Acc: 42.14% +2025-03-14 18:53:35,375 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.623 | Acc: 41.96% +2025-03-14 18:53:38,613 - train - INFO - Epoch: 10 | Test Loss: 1.616 | Test Acc: 43.11% +2025-03-14 18:53:47,913 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.573 | Acc: 38.28% +2025-03-14 18:53:50,081 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.611 | Acc: 43.01% +2025-03-14 18:53:52,274 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.596 | Acc: 43.17% +2025-03-14 18:53:54,447 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.589 | Acc: 43.13% +2025-03-14 18:53:57,925 - train - INFO - Epoch: 11 | Test Loss: 1.554 | Test Acc: 45.18% +2025-03-14 18:53:58,109 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.450 | Acc: 45.31% +2025-03-14 18:54:00,335 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.610 | Acc: 41.93% +2025-03-14 18:54:02,491 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.611 | Acc: 42.44% +2025-03-14 18:54:04,671 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.594 | Acc: 42.97% +2025-03-14 18:54:07,901 - train - INFO - Epoch: 12 | Test Loss: 1.563 | Test Acc: 47.00% +2025-03-14 18:54:17,119 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.498 | Acc: 53.12% +2025-03-14 18:54:19,213 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.624 | Acc: 42.75% +2025-03-14 18:54:21,306 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.606 | Acc: 43.22% +2025-03-14 18:54:23,344 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.600 | Acc: 43.19% +2025-03-14 18:54:26,670 - train - INFO - Epoch: 13 | Test Loss: 1.666 | Test Acc: 39.55% +2025-03-14 18:54:26,845 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.680 | Acc: 39.84% +2025-03-14 18:54:28,959 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.583 | Acc: 43.79% +2025-03-14 18:54:31,171 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.568 | Acc: 44.59% +2025-03-14 18:54:33,362 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.581 | Acc: 44.00% +2025-03-14 18:54:36,845 - train - INFO - Epoch: 14 | Test Loss: 1.533 | Test Acc: 45.22% +2025-03-14 18:54:47,390 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.500 | Acc: 46.88% +2025-03-14 18:54:49,691 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.537 | Acc: 46.24% +2025-03-14 18:54:51,869 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.557 | Acc: 45.44% +2025-03-14 18:54:53,985 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.564 | Acc: 45.09% +2025-03-14 18:54:57,177 - train - INFO - Epoch: 15 | Test Loss: 1.530 | Test Acc: 47.11% +2025-03-14 18:54:57,341 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.560 | Acc: 48.44% +2025-03-14 18:54:59,495 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.582 | Acc: 43.63% +2025-03-14 18:55:01,686 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.583 | Acc: 43.79% +2025-03-14 18:55:03,817 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.588 | Acc: 43.62% +2025-03-14 18:55:07,207 - train - INFO - Epoch: 16 | Test Loss: 1.551 | Test Acc: 45.91% +2025-03-14 18:55:16,376 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.535 | Acc: 46.09% +2025-03-14 18:55:18,547 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.551 | Acc: 45.02% +2025-03-14 18:55:20,727 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.544 | Acc: 45.37% +2025-03-14 18:55:22,965 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.556 | Acc: 44.91% +2025-03-14 18:55:26,302 - train - INFO - Epoch: 17 | Test Loss: 1.539 | Test Acc: 45.68% +2025-03-14 18:55:26,472 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.663 | Acc: 42.19% +2025-03-14 18:55:28,773 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.586 | Acc: 44.14% +2025-03-14 18:55:30,980 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.577 | Acc: 44.25% +2025-03-14 18:55:33,160 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.569 | Acc: 44.71% +2025-03-14 18:55:36,539 - train - INFO - Epoch: 18 | Test Loss: 1.540 | Test Acc: 45.29% +2025-03-14 18:55:45,385 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.668 | Acc: 37.50% +2025-03-14 18:55:47,447 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.580 | Acc: 44.69% +2025-03-14 18:55:49,614 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.562 | Acc: 45.13% +2025-03-14 18:55:51,710 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.561 | Acc: 45.20% +2025-03-14 18:55:55,165 - train - INFO - Epoch: 19 | Test Loss: 1.507 | Test Acc: 48.32% +2025-03-14 18:55:55,342 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.495 | Acc: 43.75% +2025-03-14 18:55:57,635 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.536 | Acc: 45.37% +2025-03-14 18:55:59,839 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.547 | Acc: 44.96% +2025-03-14 18:56:01,970 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.548 | Acc: 45.40% +2025-03-14 18:56:05,495 - train - INFO - Epoch: 20 | Test Loss: 1.528 | Test Acc: 46.53% +2025-03-14 18:56:14,208 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.659 | Acc: 46.09% +2025-03-14 18:56:16,352 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.562 | Acc: 45.17% +2025-03-14 18:56:18,446 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.566 | Acc: 45.19% +2025-03-14 18:56:20,611 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.556 | Acc: 45.54% +2025-03-14 18:56:24,010 - train - INFO - Epoch: 21 | Test Loss: 1.623 | Test Acc: 44.64% +2025-03-14 18:56:24,185 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.453 | Acc: 47.66% +2025-03-14 18:56:26,310 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.546 | Acc: 45.95% +2025-03-14 18:56:28,333 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.550 | Acc: 45.71% +2025-03-14 18:56:30,502 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.547 | Acc: 45.70% +2025-03-14 18:56:33,715 - train - INFO - Epoch: 22 | Test Loss: 1.516 | Test Acc: 46.97% +2025-03-14 18:56:42,854 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.692 | Acc: 45.31% +2025-03-14 18:56:45,157 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.559 | Acc: 44.98% +2025-03-14 18:56:47,388 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.545 | Acc: 45.38% +2025-03-14 18:56:49,617 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.546 | Acc: 45.66% +2025-03-14 18:56:52,961 - train - INFO - Epoch: 23 | Test Loss: 1.481 | Test Acc: 48.12% +2025-03-14 18:56:53,138 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.525 | Acc: 48.44% +2025-03-14 18:56:55,474 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.538 | Acc: 46.25% +2025-03-14 18:56:57,728 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.548 | Acc: 45.48% +2025-03-14 18:57:00,002 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.547 | Acc: 45.61% +2025-03-14 18:57:03,345 - train - INFO - Epoch: 24 | Test Loss: 1.480 | Test Acc: 48.34% +2025-03-14 18:57:12,515 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.395 | Acc: 45.31% +2025-03-14 18:57:14,685 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.532 | Acc: 46.67% +2025-03-14 18:57:16,799 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.524 | Acc: 46.82% +2025-03-14 18:57:19,039 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.517 | Acc: 46.98% +2025-03-14 18:57:22,516 - train - INFO - Epoch: 25 | Test Loss: 1.553 | Test Acc: 46.03% +2025-03-14 18:57:22,683 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.736 | Acc: 44.53% +2025-03-14 18:57:24,762 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.509 | Acc: 46.84% +2025-03-14 18:57:26,861 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.519 | Acc: 46.68% +2025-03-14 18:57:29,066 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.519 | Acc: 46.61% +2025-03-14 18:57:32,297 - train - INFO - Epoch: 26 | Test Loss: 1.525 | Test Acc: 46.22% +2025-03-14 18:57:41,935 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.524 | Acc: 50.78% +2025-03-14 18:57:44,126 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.526 | Acc: 45.78% +2025-03-14 18:57:46,507 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.519 | Acc: 46.54% +2025-03-14 18:57:48,982 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.525 | Acc: 46.30% +2025-03-14 18:57:52,953 - train - INFO - Epoch: 27 | Test Loss: 1.485 | Test Acc: 47.34% +2025-03-14 18:57:53,134 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.597 | Acc: 44.53% +2025-03-14 18:57:55,479 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.521 | Acc: 47.34% +2025-03-14 18:57:57,669 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.527 | Acc: 46.73% +2025-03-14 18:57:59,857 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.540 | Acc: 46.01% +2025-03-14 18:58:03,284 - train - INFO - Epoch: 28 | Test Loss: 1.585 | Test Acc: 46.93% +2025-03-14 18:58:13,439 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.544 | Acc: 47.66% +2025-03-14 18:58:15,897 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.481 | Acc: 48.57% +2025-03-14 18:58:18,632 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.503 | Acc: 47.80% +2025-03-14 18:58:20,904 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.512 | Acc: 47.66% +2025-03-14 18:58:24,312 - train - INFO - Epoch: 29 | Test Loss: 1.542 | Test Acc: 45.92% +2025-03-14 18:58:24,483 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.539 | Acc: 44.53% +2025-03-14 18:58:26,696 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.498 | Acc: 47.67% +2025-03-14 18:58:28,804 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.515 | Acc: 47.15% +2025-03-14 18:58:31,277 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.509 | Acc: 47.32% +2025-03-14 18:58:34,684 - train - INFO - Epoch: 30 | Test Loss: 1.489 | Test Acc: 48.24% +2025-03-14 18:58:43,983 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.627 | Acc: 44.53% +2025-03-14 18:58:46,126 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.493 | Acc: 48.05% +2025-03-14 18:58:48,265 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.509 | Acc: 47.26% +2025-03-14 18:58:50,534 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.504 | Acc: 47.44% +2025-03-14 18:58:53,718 - train - INFO - Epoch: 31 | Test Loss: 1.490 | Test Acc: 47.97% +2025-03-14 18:58:53,855 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.397 | Acc: 52.34% +2025-03-14 18:58:55,938 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.527 | Acc: 46.51% +2025-03-14 18:58:58,089 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.511 | Acc: 47.57% +2025-03-14 18:59:00,263 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.505 | Acc: 47.70% +2025-03-14 18:59:03,515 - train - INFO - Epoch: 32 | Test Loss: 1.502 | Test Acc: 47.83% +2025-03-14 18:59:12,676 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.443 | Acc: 48.44% +2025-03-14 18:59:14,901 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.501 | Acc: 47.44% +2025-03-14 18:59:17,010 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.494 | Acc: 47.66% +2025-03-14 18:59:19,083 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.497 | Acc: 47.59% +2025-03-14 18:59:22,262 - train - INFO - Epoch: 33 | Test Loss: 1.502 | Test Acc: 47.90% +2025-03-14 18:59:22,416 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.555 | Acc: 41.41% +2025-03-14 18:59:24,591 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.505 | Acc: 47.66% +2025-03-14 18:59:26,681 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.504 | Acc: 47.73% +2025-03-14 18:59:28,785 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.500 | Acc: 47.84% +2025-03-14 18:59:31,963 - train - INFO - Epoch: 34 | Test Loss: 1.505 | Test Acc: 46.85% +2025-03-14 18:59:40,980 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.479 | Acc: 46.88% +2025-03-14 18:59:43,177 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.509 | Acc: 47.49% +2025-03-14 18:59:45,274 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.511 | Acc: 47.25% +2025-03-14 18:59:47,396 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.510 | Acc: 47.32% +2025-03-14 18:59:50,653 - train - INFO - Epoch: 35 | Test Loss: 1.401 | Test Acc: 51.25% +2025-03-14 18:59:50,838 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.299 | Acc: 59.38% +2025-03-14 18:59:52,839 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.482 | Acc: 48.78% +2025-03-14 18:59:54,989 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.490 | Acc: 48.33% +2025-03-14 18:59:56,980 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.485 | Acc: 48.47% +2025-03-14 19:00:00,116 - train - INFO - Epoch: 36 | Test Loss: 1.489 | Test Acc: 47.70% +2025-03-14 19:00:09,044 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.378 | Acc: 52.34% +2025-03-14 19:00:11,337 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.467 | Acc: 49.08% +2025-03-14 19:00:13,664 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.466 | Acc: 48.83% +2025-03-14 19:00:15,943 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.463 | Acc: 48.93% +2025-03-14 19:00:19,369 - train - INFO - Epoch: 37 | Test Loss: 1.534 | Test Acc: 46.58% +2025-03-14 19:00:19,523 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.550 | Acc: 44.53% +2025-03-14 19:00:21,879 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.476 | Acc: 48.53% +2025-03-14 19:00:24,365 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.495 | Acc: 47.90% +2025-03-14 19:00:26,878 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.494 | Acc: 48.06% +2025-03-14 19:00:30,363 - train - INFO - Epoch: 38 | Test Loss: 1.502 | Test Acc: 48.86% +2025-03-14 19:00:40,202 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.594 | Acc: 49.22% +2025-03-14 19:00:42,476 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.472 | Acc: 48.95% +2025-03-14 19:00:44,677 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.470 | Acc: 49.00% +2025-03-14 19:00:46,770 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.457 | Acc: 49.47% +2025-03-14 19:00:50,216 - train - INFO - Epoch: 39 | Test Loss: 1.392 | Test Acc: 50.99% +2025-03-14 19:00:50,372 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.643 | Acc: 42.97% +2025-03-14 19:00:52,597 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.492 | Acc: 48.14% +2025-03-14 19:00:54,733 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.488 | Acc: 48.10% +2025-03-14 19:00:56,903 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.476 | Acc: 48.38% +2025-03-14 19:01:00,186 - train - INFO - Epoch: 40 | Test Loss: 1.396 | Test Acc: 51.11% +2025-03-14 19:01:09,458 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.446 | Acc: 49.22% +2025-03-14 19:01:11,568 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.477 | Acc: 49.01% +2025-03-14 19:01:13,524 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.467 | Acc: 49.21% +2025-03-14 19:01:15,623 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.473 | Acc: 49.07% +2025-03-14 19:01:18,753 - train - INFO - Epoch: 41 | Test Loss: 1.369 | Test Acc: 53.32% +2025-03-14 19:01:18,976 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.483 | Acc: 46.88% +2025-03-14 19:01:20,939 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.466 | Acc: 48.96% +2025-03-14 19:01:22,879 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.470 | Acc: 48.71% +2025-03-14 19:01:24,891 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.473 | Acc: 48.65% +2025-03-14 19:01:27,749 - train - INFO - Epoch: 42 | Test Loss: 1.397 | Test Acc: 51.39% +2025-03-14 19:01:36,173 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.305 | Acc: 52.34% +2025-03-14 19:01:38,245 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.444 | Acc: 49.63% +2025-03-14 19:01:40,295 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.458 | Acc: 48.98% +2025-03-14 19:01:42,370 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.471 | Acc: 48.66% +2025-03-14 19:01:45,581 - train - INFO - Epoch: 43 | Test Loss: 1.378 | Test Acc: 52.89% +2025-03-14 19:01:45,747 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.317 | Acc: 57.81% +2025-03-14 19:01:48,037 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.460 | Acc: 49.73% +2025-03-14 19:01:50,122 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.431 | Acc: 50.60% +2025-03-14 19:01:52,184 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.447 | Acc: 50.02% +2025-03-14 19:01:55,371 - train - INFO - Epoch: 44 | Test Loss: 1.472 | Test Acc: 49.33% +2025-03-14 19:02:04,270 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.547 | Acc: 48.44% +2025-03-14 19:02:06,320 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.469 | Acc: 49.03% +2025-03-14 19:02:08,399 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.467 | Acc: 49.28% +2025-03-14 19:02:10,409 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.461 | Acc: 49.32% +2025-03-14 19:02:13,526 - train - INFO - Epoch: 45 | Test Loss: 1.475 | Test Acc: 50.11% +2025-03-14 19:02:13,687 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.603 | Acc: 50.00% +2025-03-14 19:02:15,735 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.468 | Acc: 49.56% +2025-03-14 19:02:17,836 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.457 | Acc: 49.54% +2025-03-14 19:02:19,934 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.454 | Acc: 49.82% +2025-03-14 19:02:23,205 - train - INFO - Epoch: 46 | Test Loss: 1.467 | Test Acc: 49.27% +2025-03-14 19:02:31,634 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.506 | Acc: 47.66% +2025-03-14 19:02:33,626 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.447 | Acc: 50.19% +2025-03-14 19:02:35,568 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.451 | Acc: 49.84% +2025-03-14 19:02:37,841 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.439 | Acc: 50.25% +2025-03-14 19:02:41,240 - train - INFO - Epoch: 47 | Test Loss: 1.543 | Test Acc: 46.04% +2025-03-14 19:02:41,419 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.628 | Acc: 45.31% +2025-03-14 19:02:43,631 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.464 | Acc: 49.07% +2025-03-14 19:02:45,773 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.454 | Acc: 49.46% +2025-03-14 19:02:47,757 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.468 | Acc: 49.15% +2025-03-14 19:02:50,760 - train - INFO - Epoch: 48 | Test Loss: 1.404 | Test Acc: 50.03% +2025-03-14 19:02:58,934 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.270 | Acc: 52.34% +2025-03-14 19:03:00,986 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.457 | Acc: 49.50% +2025-03-14 19:03:03,024 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.458 | Acc: 49.52% +2025-03-14 19:03:05,108 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.460 | Acc: 49.37% +2025-03-14 19:03:08,092 - train - INFO - Epoch: 49 | Test Loss: 1.356 | Test Acc: 53.54% +2025-03-14 19:03:08,239 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.282 | Acc: 51.56% +2025-03-14 19:03:10,262 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.460 | Acc: 48.94% +2025-03-14 19:03:12,119 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.465 | Acc: 48.87% +2025-03-14 19:03:14,009 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.448 | Acc: 49.82% +2025-03-14 19:03:16,891 - train - INFO - Epoch: 50 | Test Loss: 1.402 | Test Acc: 51.80% +2025-03-14 19:03:24,969 - train - INFO - 训练完成! diff --git a/Image/LeNet5/code/model.py b/Image/LeNet5/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..5ed820811e24f28f57d68967d20d78969ef1f1f9 --- /dev/null +++ b/Image/LeNet5/code/model.py @@ -0,0 +1,175 @@ +''' +LeNet5 in PyTorch + +LeNet5是由Yann LeCun等人在1998年提出的一个经典卷积神经网络模型。 +主要用于手写数字识别,具有以下特点: +1. 使用卷积层提取特征 +2. 使用平均池化层降低特征维度 +3. 使用全连接层进行分类 +4. 网络结构简单,参数量少 + +网络架构: + 5x5 conv, 6 2x2 pool 5x5 conv, 16 2x2 pool FC 120 FC 84 FC 10 +input(32x32x3) -> [conv1+relu+pool] --------> 28x28x6 -----> 14x14x6 -----> 10x10x16 -----> 5x5x16 -> 120 -> 84 -> 10 + stride 1 stride 2 stride 1 stride 2 + +参考论文: +[1] Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner, "Gradient-based learning applied to document recognition," + Proceedings of the IEEE, vol. 86, no. 11, pp. 2278-2324, Nov. 1998. +''' + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ConvBlock(nn.Module): + """卷积块模块 + + 包含: 卷积层 -> ReLU -> 最大池化层 + + Args: + in_channels (int): 输入通道数 + out_channels (int): 输出通道数 + kernel_size (int): 卷积核大小 + stride (int): 卷积步长 + padding (int): 填充大小 + """ + def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0): + super(ConvBlock, self).__init__() + self.conv = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + self.relu = nn.ReLU(inplace=True) # inplace操作可以节省内存 + self.pool = nn.MaxPool2d(kernel_size=2, stride=2) + + def forward(self, x): + """前向传播 + + Args: + x (torch.Tensor): 输入特征图 + + Returns: + torch.Tensor: 输出特征图 + """ + x = self.conv(x) + x = self.relu(x) + x = self.pool(x) + return x + + +class LeNet5(nn.Module): + '''LeNet5网络模型 + + 网络结构: + 1. 卷积层1: 3通道输入,6个5x5卷积核,步长1 + 2. 最大池化层1: 2x2窗口,步长2 + 3. 卷积层2: 6通道输入,16个5x5卷积核,步长1 + 4. 最大池化层2: 2x2窗口,步长2 + 5. 全连接层1: 400->120 + 6. 全连接层2: 120->84 + 7. 全连接层3: 84->num_classes + + Args: + num_classes (int): 分类数量,默认为10 + init_weights (bool): 是否初始化权重,默认为True + ''' + def __init__(self, num_classes=10, init_weights=True): + super(LeNet5, self).__init__() + + # 第一个卷积块: 32x32x3 -> 28x28x6 -> 14x14x6 + self.conv1 = ConvBlock( + in_channels=3, + out_channels=6, + kernel_size=5, + stride=1 + ) + + # 第二个卷积块: 14x14x6 -> 10x10x16 -> 5x5x16 + self.conv2 = ConvBlock( + in_channels=6, + out_channels=16, + kernel_size=5, + stride=1 + ) + + # 全连接层 + self.classifier = nn.Sequential( + nn.Linear(5*5*16, 120), + nn.ReLU(inplace=True), + nn.Linear(120, 84), + nn.ReLU(inplace=True), + nn.Linear(84, num_classes) + ) + + # 初始化权重 + if init_weights: + self._initialize_weights() + + def forward(self, x): + '''前向传播 + + Args: + x (torch.Tensor): 输入图像张量,[N,3,32,32] + + Returns: + torch.Tensor: 输出预测张量,[N,num_classes] + ''' + # 特征提取 + x = self.conv1(x) # -> [N,6,14,14] + x = self.conv2(x) # -> [N,16,5,5] + + # 分类 + x = torch.flatten(x, 1) # -> [N,16*5*5] + x = self.classifier(x) # -> [N,num_classes] + return x + + def _initialize_weights(self): + '''初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - 线性层权重采用normal_初始化 + - 所有偏置项初始化为0 + ''' + for m in self.modules(): + if isinstance(m, nn.Conv2d): + # 采用kaiming初始化,适合ReLU激活函数 + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.zeros_(m.bias) + elif isinstance(m, nn.Linear): + # 采用正态分布初始化 + nn.init.normal_(m.weight, 0, 0.01) + nn.init.zeros_(m.bias) + + +def test(): + """测试函数 + + 创建模型并进行前向传播测试,打印模型结构和参数信息 + """ + # 创建模型 + net = LeNet5() + print('Model Structure:') + print(net) + + # 测试前向传播 + x = torch.randn(2,3,32,32) + y = net(x) + print('\nInput Shape:', x.shape) + print('Output Shape:', y.shape) + + # 打印模型信息 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (2,3,32,32)) + + +if __name__ == '__main__': + test() diff --git a/Image/LeNet5/code/train.log b/Image/LeNet5/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..41bd4d85c0047b23a44ee9e2bce082f918f38073 --- /dev/null +++ b/Image/LeNet5/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 18:42:58,457 - train - INFO - 开始训练 lenet5 +2025-03-14 18:42:58,466 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 18:42:59,293 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 10.94% +2025-03-14 18:43:01,471 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.266 | Acc: 12.62% +2025-03-14 18:43:03,531 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.178 | Acc: 15.37% +2025-03-14 18:43:05,648 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.099 | Acc: 18.60% +2025-03-14 18:43:08,912 - train - INFO - Epoch: 1 | Test Loss: 1.768 | Test Acc: 33.62% +2025-03-14 18:43:09,291 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.851 | Acc: 26.56% +2025-03-14 18:43:11,591 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.848 | Acc: 30.46% +2025-03-14 18:43:13,743 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.831 | Acc: 31.40% +2025-03-14 18:43:16,315 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.823 | Acc: 31.80% +2025-03-14 18:43:19,766 - train - INFO - Epoch: 2 | Test Loss: 1.662 | Test Acc: 38.37% +2025-03-14 18:43:28,827 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.742 | Acc: 33.59% +2025-03-14 18:43:31,227 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.772 | Acc: 34.92% +2025-03-14 18:43:33,508 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.755 | Acc: 34.99% +2025-03-14 18:43:35,700 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.750 | Acc: 35.12% +2025-03-14 18:43:39,124 - train - INFO - Epoch: 3 | Test Loss: 1.663 | Test Acc: 38.84% +2025-03-14 18:43:39,305 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.855 | Acc: 29.69% +2025-03-14 18:43:41,442 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.715 | Acc: 36.98% +2025-03-14 18:43:43,675 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.721 | Acc: 36.66% +2025-03-14 18:43:45,928 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.704 | Acc: 37.20% +2025-03-14 18:43:49,305 - train - INFO - Epoch: 4 | Test Loss: 1.584 | Test Acc: 41.15% +2025-03-14 18:43:58,749 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.593 | Acc: 39.06% +2025-03-14 18:44:01,045 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.676 | Acc: 39.09% +2025-03-14 18:44:03,286 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.660 | Acc: 39.65% +2025-03-14 18:44:05,565 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.673 | Acc: 39.22% +2025-03-14 18:44:09,108 - train - INFO - Epoch: 5 | Test Loss: 1.637 | Test Acc: 40.55% +2025-03-14 18:44:09,274 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.719 | Acc: 36.72% +2025-03-14 18:44:11,561 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.622 | Acc: 40.80% +2025-03-14 18:44:14,102 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.645 | Acc: 40.35% +2025-03-14 18:44:16,595 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.655 | Acc: 40.03% +2025-03-14 18:44:20,643 - train - INFO - Epoch: 6 | Test Loss: 1.514 | Test Acc: 47.29% +2025-03-14 18:44:30,165 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.487 | Acc: 50.78% +2025-03-14 18:44:32,311 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.638 | Acc: 40.48% +2025-03-14 18:44:34,629 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.641 | Acc: 40.55% +2025-03-14 18:44:36,796 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.642 | Acc: 40.62% +2025-03-14 18:44:40,052 - train - INFO - Epoch: 7 | Test Loss: 1.670 | Test Acc: 41.48% +2025-03-14 18:44:40,222 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.629 | Acc: 38.28% +2025-03-14 18:44:42,337 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.647 | Acc: 40.32% +2025-03-14 18:44:44,590 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.638 | Acc: 40.99% +2025-03-14 18:44:46,617 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.648 | Acc: 40.85% +2025-03-14 18:44:50,042 - train - INFO - Epoch: 8 | Test Loss: 1.610 | Test Acc: 43.43% +2025-03-14 18:44:59,307 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.663 | Acc: 44.53% +2025-03-14 18:45:01,655 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.638 | Acc: 41.36% +2025-03-14 18:45:03,999 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.647 | Acc: 40.96% +2025-03-14 18:45:06,123 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.646 | Acc: 40.82% +2025-03-14 18:45:09,386 - train - INFO - Epoch: 9 | Test Loss: 1.465 | Test Acc: 48.76% +2025-03-14 18:45:09,543 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.650 | Acc: 40.62% +2025-03-14 18:45:11,645 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.581 | Acc: 42.72% +2025-03-14 18:45:13,854 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.589 | Acc: 42.82% +2025-03-14 18:45:16,028 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.592 | Acc: 42.91% +2025-03-14 18:45:19,650 - train - INFO - Epoch: 10 | Test Loss: 1.483 | Test Acc: 48.24% +2025-03-14 18:45:30,113 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.494 | Acc: 42.97% +2025-03-14 18:45:32,744 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.616 | Acc: 42.26% +2025-03-14 18:45:35,132 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.625 | Acc: 42.03% +2025-03-14 18:45:37,374 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.603 | Acc: 42.96% +2025-03-14 18:45:40,850 - train - INFO - Epoch: 11 | Test Loss: 1.505 | Test Acc: 48.63% +2025-03-14 18:45:41,037 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.586 | Acc: 46.09% +2025-03-14 18:45:43,281 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.577 | Acc: 44.79% +2025-03-14 18:45:45,488 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.576 | Acc: 44.34% +2025-03-14 18:45:47,756 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.591 | Acc: 43.89% +2025-03-14 18:45:51,120 - train - INFO - Epoch: 12 | Test Loss: 1.605 | Test Acc: 44.80% +2025-03-14 18:46:00,438 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.518 | Acc: 44.53% +2025-03-14 18:46:02,653 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.599 | Acc: 42.95% +2025-03-14 18:46:05,275 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.593 | Acc: 43.37% +2025-03-14 18:46:07,588 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.598 | Acc: 43.51% +2025-03-14 18:46:10,937 - train - INFO - Epoch: 13 | Test Loss: 1.583 | Test Acc: 42.85% +2025-03-14 18:46:11,116 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.560 | Acc: 44.53% +2025-03-14 18:46:13,285 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.569 | Acc: 44.65% +2025-03-14 18:46:15,533 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.577 | Acc: 43.93% +2025-03-14 18:46:17,803 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.580 | Acc: 43.98% +2025-03-14 18:46:21,356 - train - INFO - Epoch: 14 | Test Loss: 1.633 | Test Acc: 44.51% +2025-03-14 18:46:31,128 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.845 | Acc: 38.28% +2025-03-14 18:46:33,986 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.574 | Acc: 44.63% +2025-03-14 18:46:36,643 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.581 | Acc: 44.63% +2025-03-14 18:46:38,812 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.582 | Acc: 44.44% +2025-03-14 18:46:42,100 - train - INFO - Epoch: 15 | Test Loss: 1.502 | Test Acc: 47.49% +2025-03-14 18:46:42,283 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.625 | Acc: 40.62% +2025-03-14 18:46:44,700 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.541 | Acc: 45.17% +2025-03-14 18:46:46,924 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.555 | Acc: 44.73% +2025-03-14 18:46:49,212 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.552 | Acc: 45.09% +2025-03-14 18:46:52,741 - train - INFO - Epoch: 16 | Test Loss: 1.515 | Test Acc: 47.01% +2025-03-14 18:47:01,778 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.648 | Acc: 42.19% +2025-03-14 18:47:03,963 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.568 | Acc: 45.44% +2025-03-14 18:47:06,143 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.559 | Acc: 45.42% +2025-03-14 18:47:08,345 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.569 | Acc: 45.12% +2025-03-14 18:47:11,733 - train - INFO - Epoch: 17 | Test Loss: 1.570 | Test Acc: 45.34% +2025-03-14 18:47:11,921 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.661 | Acc: 39.06% +2025-03-14 18:47:14,198 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.577 | Acc: 43.73% +2025-03-14 18:47:16,401 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.587 | Acc: 44.04% +2025-03-14 18:47:18,532 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.583 | Acc: 44.23% +2025-03-14 18:47:21,929 - train - INFO - Epoch: 18 | Test Loss: 1.490 | Test Acc: 48.95% +2025-03-14 18:47:31,595 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.469 | Acc: 47.66% +2025-03-14 18:47:34,012 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.572 | Acc: 44.14% +2025-03-14 18:47:36,582 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.564 | Acc: 44.89% +2025-03-14 18:47:39,025 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.580 | Acc: 44.51% +2025-03-14 18:47:43,410 - train - INFO - Epoch: 19 | Test Loss: 1.614 | Test Acc: 41.68% +2025-03-14 18:47:43,603 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.780 | Acc: 35.16% +2025-03-14 18:47:45,962 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.593 | Acc: 43.56% +2025-03-14 18:47:48,244 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.582 | Acc: 44.22% +2025-03-14 18:47:50,397 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.564 | Acc: 44.98% +2025-03-14 18:47:53,620 - train - INFO - Epoch: 20 | Test Loss: 1.466 | Test Acc: 48.39% +2025-03-14 18:48:02,849 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.409 | Acc: 50.00% +2025-03-14 18:48:05,101 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.554 | Acc: 45.34% +2025-03-14 18:48:07,205 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.559 | Acc: 45.21% +2025-03-14 18:48:09,387 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.567 | Acc: 44.95% +2025-03-14 18:48:12,828 - train - INFO - Epoch: 21 | Test Loss: 1.541 | Test Acc: 45.92% +2025-03-14 18:48:12,998 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.420 | Acc: 50.00% +2025-03-14 18:48:15,472 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.553 | Acc: 45.12% +2025-03-14 18:48:17,608 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.546 | Acc: 45.46% +2025-03-14 18:48:19,794 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.551 | Acc: 45.30% +2025-03-14 18:48:23,214 - train - INFO - Epoch: 22 | Test Loss: 1.537 | Test Acc: 46.88% +2025-03-14 18:48:32,459 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.594 | Acc: 42.19% +2025-03-14 18:48:34,612 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.580 | Acc: 44.72% +2025-03-14 18:48:36,785 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.560 | Acc: 45.09% +2025-03-14 18:48:38,969 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.561 | Acc: 45.15% +2025-03-14 18:48:42,488 - train - INFO - Epoch: 23 | Test Loss: 1.570 | Test Acc: 45.55% +2025-03-14 18:48:42,659 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.642 | Acc: 44.53% +2025-03-14 18:48:44,937 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.577 | Acc: 44.83% +2025-03-14 18:48:47,587 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.587 | Acc: 44.60% +2025-03-14 18:48:50,078 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.570 | Acc: 45.19% +2025-03-14 18:48:53,664 - train - INFO - Epoch: 24 | Test Loss: 1.460 | Test Acc: 51.03% +2025-03-14 18:49:03,866 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.401 | Acc: 47.66% +2025-03-14 18:49:05,995 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.530 | Acc: 45.76% +2025-03-14 18:49:08,111 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.514 | Acc: 46.54% +2025-03-14 18:49:10,316 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.529 | Acc: 46.19% +2025-03-14 18:49:13,929 - train - INFO - Epoch: 25 | Test Loss: 1.556 | Test Acc: 47.06% +2025-03-14 18:49:14,187 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.722 | Acc: 40.62% +2025-03-14 18:49:16,552 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.515 | Acc: 46.69% +2025-03-14 18:49:18,799 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.527 | Acc: 46.61% +2025-03-14 18:49:20,898 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.541 | Acc: 46.01% +2025-03-14 18:49:24,249 - train - INFO - Epoch: 26 | Test Loss: 1.403 | Test Acc: 50.95% +2025-03-14 18:49:33,283 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.368 | Acc: 50.00% +2025-03-14 18:49:35,438 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.541 | Acc: 46.12% +2025-03-14 18:49:37,619 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.546 | Acc: 46.12% +2025-03-14 18:49:39,907 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.558 | Acc: 45.75% +2025-03-14 18:49:43,295 - train - INFO - Epoch: 27 | Test Loss: 1.593 | Test Acc: 44.67% +2025-03-14 18:49:43,465 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.712 | Acc: 39.84% +2025-03-14 18:49:45,800 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.551 | Acc: 46.37% +2025-03-14 18:49:47,998 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.551 | Acc: 46.39% +2025-03-14 18:49:50,134 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.538 | Acc: 46.92% +2025-03-14 18:49:53,378 - train - INFO - Epoch: 28 | Test Loss: 1.490 | Test Acc: 46.88% +2025-03-14 18:50:02,514 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.512 | Acc: 41.41% +2025-03-14 18:50:04,738 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.560 | Acc: 45.56% +2025-03-14 18:50:06,892 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.530 | Acc: 46.73% +2025-03-14 18:50:09,049 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.541 | Acc: 46.39% +2025-03-14 18:50:12,354 - train - INFO - Epoch: 29 | Test Loss: 1.536 | Test Acc: 46.64% +2025-03-14 18:50:12,524 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.597 | Acc: 46.09% +2025-03-14 18:50:14,711 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.548 | Acc: 46.50% +2025-03-14 18:50:16,934 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.556 | Acc: 45.93% +2025-03-14 18:50:19,218 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.543 | Acc: 46.32% +2025-03-14 18:50:22,723 - train - INFO - Epoch: 30 | Test Loss: 1.452 | Test Acc: 50.77% +2025-03-14 18:50:32,129 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.449 | Acc: 49.22% +2025-03-14 18:50:34,311 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.533 | Acc: 47.13% +2025-03-14 18:50:36,539 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.542 | Acc: 46.49% +2025-03-14 18:50:38,682 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.539 | Acc: 46.58% +2025-03-14 18:50:42,089 - train - INFO - Epoch: 31 | Test Loss: 1.386 | Test Acc: 52.82% +2025-03-14 18:50:42,259 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.537 | Acc: 49.22% +2025-03-14 18:50:44,519 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.507 | Acc: 47.93% +2025-03-14 18:50:46,656 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.502 | Acc: 47.95% +2025-03-14 18:50:48,818 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.502 | Acc: 47.57% +2025-03-14 18:50:52,272 - train - INFO - Epoch: 32 | Test Loss: 1.482 | Test Acc: 48.45% +2025-03-14 18:51:01,650 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.600 | Acc: 42.97% +2025-03-14 18:51:03,937 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.516 | Acc: 46.92% +2025-03-14 18:51:06,268 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.528 | Acc: 46.76% +2025-03-14 18:51:08,704 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.534 | Acc: 46.62% +2025-03-14 18:51:12,262 - train - INFO - Epoch: 33 | Test Loss: 1.430 | Test Acc: 49.99% +2025-03-14 18:51:12,443 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.585 | Acc: 45.31% +2025-03-14 18:51:14,733 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.501 | Acc: 47.97% +2025-03-14 18:51:17,010 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.512 | Acc: 47.52% +2025-03-14 18:51:19,395 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.514 | Acc: 47.37% +2025-03-14 18:51:23,243 - train - INFO - Epoch: 34 | Test Loss: 1.407 | Test Acc: 52.49% +2025-03-14 18:51:32,787 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.449 | Acc: 53.12% +2025-03-14 18:51:35,089 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.533 | Acc: 46.77% +2025-03-14 18:51:37,330 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.508 | Acc: 47.78% +2025-03-14 18:51:39,569 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.505 | Acc: 47.97% +2025-03-14 18:51:43,190 - train - INFO - Epoch: 35 | Test Loss: 1.518 | Test Acc: 48.63% +2025-03-14 18:51:43,386 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.662 | Acc: 41.41% +2025-03-14 18:51:45,737 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.537 | Acc: 46.06% +2025-03-14 18:51:47,974 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.524 | Acc: 46.58% +2025-03-14 18:51:50,202 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.526 | Acc: 46.55% +2025-03-14 18:51:53,752 - train - INFO - Epoch: 36 | Test Loss: 1.361 | Test Acc: 53.00% +2025-03-14 18:52:03,416 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.415 | Acc: 48.44% +2025-03-14 18:52:05,593 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.575 | Acc: 44.76% +2025-03-14 18:52:07,827 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.538 | Acc: 45.99% +2025-03-14 18:52:09,999 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.526 | Acc: 46.51% +2025-03-14 18:52:13,370 - train - INFO - Epoch: 37 | Test Loss: 1.458 | Test Acc: 49.32% +2025-03-14 18:52:13,541 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.553 | Acc: 49.22% +2025-03-14 18:52:15,740 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.476 | Acc: 49.10% +2025-03-14 18:52:17,984 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.475 | Acc: 48.56% +2025-03-14 18:52:20,396 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.509 | Acc: 47.59% +2025-03-14 18:52:23,784 - train - INFO - Epoch: 38 | Test Loss: 1.395 | Test Acc: 51.78% +2025-03-14 18:52:32,947 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.398 | Acc: 51.56% +2025-03-14 18:52:35,194 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.582 | Acc: 44.60% +2025-03-14 18:52:37,297 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.554 | Acc: 45.54% +2025-03-14 18:52:39,638 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.534 | Acc: 46.39% +2025-03-14 18:52:43,108 - train - INFO - Epoch: 39 | Test Loss: 1.525 | Test Acc: 45.86% +2025-03-14 18:52:43,297 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.696 | Acc: 32.81% +2025-03-14 18:52:45,599 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.522 | Acc: 47.46% +2025-03-14 18:52:47,940 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.516 | Acc: 47.49% +2025-03-14 18:52:50,269 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.508 | Acc: 47.60% +2025-03-14 18:52:53,724 - train - INFO - Epoch: 40 | Test Loss: 1.450 | Test Acc: 51.11% +2025-03-14 18:53:03,410 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.537 | Acc: 47.66% +2025-03-14 18:53:05,800 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.507 | Acc: 47.56% +2025-03-14 18:53:08,177 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.514 | Acc: 47.43% +2025-03-14 18:53:10,666 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.510 | Acc: 47.77% +2025-03-14 18:53:14,572 - train - INFO - Epoch: 41 | Test Loss: 1.517 | Test Acc: 48.48% +2025-03-14 18:53:14,742 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.796 | Acc: 42.19% +2025-03-14 18:53:16,888 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.473 | Acc: 48.87% +2025-03-14 18:53:18,973 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.491 | Acc: 48.63% +2025-03-14 18:53:21,259 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.505 | Acc: 48.21% +2025-03-14 18:53:24,601 - train - INFO - Epoch: 42 | Test Loss: 1.561 | Test Acc: 46.57% +2025-03-14 18:53:33,569 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.608 | Acc: 44.53% +2025-03-14 18:53:35,694 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.491 | Acc: 48.58% +2025-03-14 18:53:37,928 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.491 | Acc: 48.62% +2025-03-14 18:53:40,500 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.486 | Acc: 48.66% +2025-03-14 18:53:43,816 - train - INFO - Epoch: 43 | Test Loss: 1.374 | Test Acc: 52.62% +2025-03-14 18:53:43,991 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.489 | Acc: 49.22% +2025-03-14 18:53:46,176 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.486 | Acc: 48.23% +2025-03-14 18:53:48,303 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.498 | Acc: 47.92% +2025-03-14 18:53:50,370 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.487 | Acc: 48.34% +2025-03-14 18:53:53,740 - train - INFO - Epoch: 44 | Test Loss: 1.373 | Test Acc: 52.73% +2025-03-14 18:54:03,408 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.566 | Acc: 47.66% +2025-03-14 18:54:05,572 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.531 | Acc: 47.00% +2025-03-14 18:54:07,745 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.512 | Acc: 47.59% +2025-03-14 18:54:09,919 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.500 | Acc: 48.02% +2025-03-14 18:54:13,351 - train - INFO - Epoch: 45 | Test Loss: 1.426 | Test Acc: 51.30% +2025-03-14 18:54:13,612 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.334 | Acc: 58.59% +2025-03-14 18:54:15,863 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.496 | Acc: 48.13% +2025-03-14 18:54:18,032 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.489 | Acc: 48.47% +2025-03-14 18:54:20,219 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.495 | Acc: 48.29% +2025-03-14 18:54:23,452 - train - INFO - Epoch: 46 | Test Loss: 1.528 | Test Acc: 48.13% +2025-03-14 18:54:32,795 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.657 | Acc: 38.28% +2025-03-14 18:54:35,033 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.543 | Acc: 46.42% +2025-03-14 18:54:37,369 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.516 | Acc: 47.45% +2025-03-14 18:54:39,815 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.499 | Acc: 48.16% +2025-03-14 18:54:43,735 - train - INFO - Epoch: 47 | Test Loss: 1.527 | Test Acc: 49.94% +2025-03-14 18:54:43,988 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.510 | Acc: 44.53% +2025-03-14 18:54:46,176 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.503 | Acc: 48.13% +2025-03-14 18:54:48,509 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.501 | Acc: 48.09% +2025-03-14 18:54:50,812 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.490 | Acc: 48.40% +2025-03-14 18:54:54,075 - train - INFO - Epoch: 48 | Test Loss: 1.414 | Test Acc: 51.34% +2025-03-14 18:55:02,937 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.570 | Acc: 38.28% +2025-03-14 18:55:05,141 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.473 | Acc: 49.60% +2025-03-14 18:55:07,346 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.474 | Acc: 49.48% +2025-03-14 18:55:09,548 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.478 | Acc: 49.18% +2025-03-14 18:55:12,872 - train - INFO - Epoch: 49 | Test Loss: 1.352 | Test Acc: 53.22% +2025-03-14 18:55:13,039 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.388 | Acc: 51.56% +2025-03-14 18:55:15,328 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.490 | Acc: 48.34% +2025-03-14 18:55:17,410 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.484 | Acc: 48.32% +2025-03-14 18:55:19,595 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.474 | Acc: 49.08% +2025-03-14 18:55:22,981 - train - INFO - Epoch: 50 | Test Loss: 1.513 | Test Acc: 46.94% +2025-03-14 18:55:32,176 - train - INFO - 训练完成! diff --git a/Image/LeNet5/code/train.py b/Image/LeNet5/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..e365e829731a18716f8b1f6cb5c704747eeea1f9 --- /dev/null +++ b/Image/LeNet5/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import LeNet5 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = LeNet5() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='lenet5', + save_type='0', + layer_name='conv2', + interval = 2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='lenet5', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='lenet5', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='conv2', + interval = 2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/LeNet5/dataset/.gitkeep b/Image/LeNet5/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/LeNet5/model/.gitkeep b/Image/LeNet5/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/LeNet5/model/0/epoch1/embeddings.npy b/Image/LeNet5/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..653998a1e7125d56c6b76a057d431a1c0abab019 --- /dev/null +++ b/Image/LeNet5/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d655d14885b3382a95cd8f1685db6b4a3e114db764acfe1e8fec0132ccb241b2 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch1/subject_model.pth b/Image/LeNet5/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ce8098b86de85eaf5a0a588814ddf366360c983c --- /dev/null +++ b/Image/LeNet5/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b46e5428993d298db13d39c9de2bd245092f833664ec62674d45d84e950c7fca +size 252044 diff --git a/Image/LeNet5/model/0/epoch10/embeddings.npy b/Image/LeNet5/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c3f20848c5b561439b23460bb43cff95233c4492 --- /dev/null +++ b/Image/LeNet5/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:665a4d9333b30c8d44f1b314c2c5ba37b6a24555e8d6ce9000f9e15c43dae227 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch10/subject_model.pth b/Image/LeNet5/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ce7f291699ab354ce05699286f928847d9ef61da --- /dev/null +++ b/Image/LeNet5/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afaf5726ee219183b237b7a52fa33a9b6b942069d8042bd09ffd66a5f133cfd8 +size 252044 diff --git a/Image/LeNet5/model/0/epoch11/embeddings.npy b/Image/LeNet5/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0696bcba99ece8bb312000532302e0a65f5ce50b --- /dev/null +++ b/Image/LeNet5/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa27a313f1475dbb461bbcd4d22ef2fe9f792d1a6977d66a5d51afc21fca9c1a +size 80000128 diff --git a/Image/LeNet5/model/0/epoch11/subject_model.pth b/Image/LeNet5/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7db546eb8dbe267bcb34eb62003757fa435814b3 --- /dev/null +++ b/Image/LeNet5/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a7d883185412eedabf05334b090d3666aa5b026d3959e24a4787e65d6a03747 +size 252044 diff --git a/Image/LeNet5/model/0/epoch12/embeddings.npy b/Image/LeNet5/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..952bf28bf201b2fb9b816ece343e51281373d91d --- /dev/null +++ b/Image/LeNet5/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1488b8d7523aea773179f4dfb8d3e23a4236364ba30a418e5a76609ac238a596 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch12/subject_model.pth b/Image/LeNet5/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a0b19084b9a53be765fe9b8ebaedf829ebcb6cac --- /dev/null +++ b/Image/LeNet5/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2bd3be1f9558b987778821cb43ed9c439f086dabf866e4661b5a2cc375731a42 +size 252044 diff --git a/Image/LeNet5/model/0/epoch13/embeddings.npy b/Image/LeNet5/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4637a85e1a881d59ed2381b1a99bfb7a13f7471b --- /dev/null +++ b/Image/LeNet5/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d49e3a95395193b35e150a9438316248e0ee4a369813751067a77829a42b429a +size 80000128 diff --git a/Image/LeNet5/model/0/epoch13/subject_model.pth b/Image/LeNet5/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..868f14019be58f0ebf4230fa24444728aff8288b --- /dev/null +++ b/Image/LeNet5/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8b1cc63ef1f36f900e8490c50e3cbbe7d800b7719338b2bff927b076af17904 +size 252044 diff --git a/Image/LeNet5/model/0/epoch14/embeddings.npy b/Image/LeNet5/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9c1d01d822c41e3782b2f2344396c85fa37bf650 --- /dev/null +++ b/Image/LeNet5/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31af6953f66a6dc14e6cb215655e4fd6394903d37dfa9b4a6f1573f058ff1faa +size 80000128 diff --git a/Image/LeNet5/model/0/epoch14/subject_model.pth b/Image/LeNet5/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2deceb0eb869a2f8cbcca9b124a3d82137bf54b6 --- /dev/null +++ b/Image/LeNet5/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0414cd87c5c93e0b6e0c9d769b4d06a6a41fcd469a95626b329f13d1453f05c +size 252044 diff --git a/Image/LeNet5/model/0/epoch15/embeddings.npy b/Image/LeNet5/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..69b903882e3f80322ee27ff72139331d79879447 --- /dev/null +++ b/Image/LeNet5/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1560581189a4af697269eac1b4e09e51ec354e54f571ee8dae4dca7a41510394 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch15/subject_model.pth b/Image/LeNet5/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..90800e87722292c4396ae4b2c5bd01fe25517702 --- /dev/null +++ b/Image/LeNet5/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60fabe1b5a606701a42272b793f398c0fc707641f558e0f7d16c8b6fa5830920 +size 252044 diff --git a/Image/LeNet5/model/0/epoch16/embeddings.npy b/Image/LeNet5/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..80717ac24f4e6396ba4c767f3d03a680517fc664 --- /dev/null +++ b/Image/LeNet5/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95820289b7110de5ae0d449f35b2bc9ebe660358694d7d716f5f6c5028102d7f +size 80000128 diff --git a/Image/LeNet5/model/0/epoch16/subject_model.pth b/Image/LeNet5/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..bea5be18acd7c2d737a76a49095c1a7dddf50bee --- /dev/null +++ b/Image/LeNet5/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f5c50c8c7025a3c82857386dd050fee56edd7f6c4ab50f7e12a773574010d4c +size 252044 diff --git a/Image/LeNet5/model/0/epoch17/embeddings.npy b/Image/LeNet5/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f545fffdec6fec1b4040ae250a8699cc7e00eac5 --- /dev/null +++ b/Image/LeNet5/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8fed44213f6b64eefac4667db5a5badbe81bd622f26e79dae7d530b1f313ce8e +size 80000128 diff --git a/Image/LeNet5/model/0/epoch17/subject_model.pth b/Image/LeNet5/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0265987ef2642c312cc85a03e6df78ca33aa878a --- /dev/null +++ b/Image/LeNet5/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1aea2cbb38d12e5a337e0ff1e7f4e36e2e44a99cd6588de352a67fde29f39c7c +size 252044 diff --git a/Image/LeNet5/model/0/epoch18/embeddings.npy b/Image/LeNet5/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..bf654021c1179d53899c21d4565fc6bd561a7328 --- /dev/null +++ b/Image/LeNet5/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:faaa1e0a2d62717032c7bb74eaf4dac2ba5583ebf5280ae3d004dac75624d3b7 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch18/subject_model.pth b/Image/LeNet5/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..de10ed8ea8e5ee26fae07be59c89a948d0b33eb6 --- /dev/null +++ b/Image/LeNet5/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27e685abe5dd893c1e80ffbfcdf271a37ffdcadbf53660f093bdb5e0ba14e0fa +size 252044 diff --git a/Image/LeNet5/model/0/epoch19/embeddings.npy b/Image/LeNet5/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f06560c5c6b256f49184d3f7b98cbc89e9810ec3 --- /dev/null +++ b/Image/LeNet5/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7a3d7efd64c141e0df4e2fa3df5e3218ef41a3f59f5508cfcc9d03d22627bdb +size 80000128 diff --git a/Image/LeNet5/model/0/epoch19/subject_model.pth b/Image/LeNet5/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7f78f15e06634c47d54676a349a15036b079a638 --- /dev/null +++ b/Image/LeNet5/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2991e1dfcee085ec3ced36eda5e9950ca6f5601067d6e1cf2480c15276e8caf +size 252044 diff --git a/Image/LeNet5/model/0/epoch2/embeddings.npy b/Image/LeNet5/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2dff9295e0aa237ae4aac6a99093047ef8e27595 --- /dev/null +++ b/Image/LeNet5/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc294d3f6cf4cd651dcaf01860d8fc22ccb82119d058553e94966474ae8b3a3a +size 80000128 diff --git a/Image/LeNet5/model/0/epoch2/subject_model.pth b/Image/LeNet5/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..aaf2c830f18f9e00ddc575ceaf222b508e581155 --- /dev/null +++ b/Image/LeNet5/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09210b22c1a0194e80f45a46c2911d6e36528462b7e10506147259ccd413f4cb +size 252044 diff --git a/Image/LeNet5/model/0/epoch20/embeddings.npy b/Image/LeNet5/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8d06919de63309f3f5e4c8367886ef472690575a --- /dev/null +++ b/Image/LeNet5/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8893872686d606f527ce71bd2976a5afbaceddb8efdb5ffc3f9dff10106a92a3 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch20/subject_model.pth b/Image/LeNet5/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d1e4698487aff1a295c4c3390ad1d9a79b4f217c --- /dev/null +++ b/Image/LeNet5/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b46a837f563991a85fbdbed7a42a6ecd79d21b6d55b22444954fed2f25d1de1a +size 252044 diff --git a/Image/LeNet5/model/0/epoch21/embeddings.npy b/Image/LeNet5/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b32fc57f8b5d63fc500c32799cc587815326cc0b --- /dev/null +++ b/Image/LeNet5/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e74d9423914d5952425b0ba4a55647fb22c844bc218c6b82385e59987717d0a8 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch21/subject_model.pth b/Image/LeNet5/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4e20ae0a05acbd95444e66a6bc68f67f36461d50 --- /dev/null +++ b/Image/LeNet5/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:458e82ece89d332816695b07cfd044ff91b4d2165860a1679110f0be7b97644d +size 252044 diff --git a/Image/LeNet5/model/0/epoch22/embeddings.npy b/Image/LeNet5/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6cdc094553060d87c3666123f151199f8ceab9fa --- /dev/null +++ b/Image/LeNet5/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b49174e26677adc821c3139768a12ca82364f54b9e8673cefc4dc34e61eca75e +size 80000128 diff --git a/Image/LeNet5/model/0/epoch22/subject_model.pth b/Image/LeNet5/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dfd4882a1df00932cfba3817eafa89b20a4c5b95 --- /dev/null +++ b/Image/LeNet5/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d89bb517b32f9a90e8e6faac79e69c79287b522d1312d2019b7ee24859dae13d +size 252044 diff --git a/Image/LeNet5/model/0/epoch23/embeddings.npy b/Image/LeNet5/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..32e5cd0d8b582853b0a0ed60140b3209a8d11acd --- /dev/null +++ b/Image/LeNet5/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:92b845c651cb81af8b0d950f15f9ba2591f48a99d84a961a30c0bc7308d8dbfd +size 80000128 diff --git a/Image/LeNet5/model/0/epoch23/subject_model.pth b/Image/LeNet5/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2fc2ecd774a44345eef8066ecff1816f0e444451 --- /dev/null +++ b/Image/LeNet5/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88d57971783fe2e96d8498a10f6b70e721705daf5775440ef0f99a3db85df3fd +size 252044 diff --git a/Image/LeNet5/model/0/epoch24/embeddings.npy b/Image/LeNet5/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..cef1a5c327bbc8876532c50685334601057de960 --- /dev/null +++ b/Image/LeNet5/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a277a8eef884ba13a95ce10c04454c2ca60bb8ff8f7e7b9d56f6153b9f8ec0a +size 80000128 diff --git a/Image/LeNet5/model/0/epoch24/subject_model.pth b/Image/LeNet5/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9f2fc8fa2897f9516f42b890f59bb12d4f44175f --- /dev/null +++ b/Image/LeNet5/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb9b1f7c1264809e58270fa76106ae6fb51272e14eca72030948f1a56ec9c29e +size 252044 diff --git a/Image/LeNet5/model/0/epoch25/embeddings.npy b/Image/LeNet5/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d46d4428fe43e15e2a2255f2507f8ab7aa9d37b4 --- /dev/null +++ b/Image/LeNet5/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab1d960f06c03a3741955c8518838e769e765fb418c9b1afbdfa803ff2378fac +size 80000128 diff --git a/Image/LeNet5/model/0/epoch25/subject_model.pth b/Image/LeNet5/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a7d7a2308165f261a21e5ff521f1f9d11be85860 --- /dev/null +++ b/Image/LeNet5/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba5f7942c3996395f365d64ac28737ff2b69b217dfc0b3a62ff9132ed3ec0e4c +size 252044 diff --git a/Image/LeNet5/model/0/epoch3/embeddings.npy b/Image/LeNet5/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5d988fb0c8a55fbfe52c9bd58c2439c0c864c3fe --- /dev/null +++ b/Image/LeNet5/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aac6bad73449d12d18472394a53d785742a02786394fbc089f28fc3fe2afaa80 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch3/subject_model.pth b/Image/LeNet5/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4107615e6a0f60dce1988cda957128253b749325 --- /dev/null +++ b/Image/LeNet5/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8226d8a2082e0656b8e6b315e9ecfc0a7f0f9b604217effb9ed298d76544456f +size 252044 diff --git a/Image/LeNet5/model/0/epoch4/embeddings.npy b/Image/LeNet5/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..12657be9467e7b928ee9a43f5f85ea97e165b5a0 --- /dev/null +++ b/Image/LeNet5/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95629917d53caca39ba2773a6dc55ffae1fec6f61e232a0b947ed24da6e4fe79 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch4/subject_model.pth b/Image/LeNet5/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..de6dbc4d4e23129343014048487b40d7ce6a706f --- /dev/null +++ b/Image/LeNet5/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df7aaeb076f37feaf23d3a83d5308e10c2ad0ed0ea8a49fc720e69b1b9dde91b +size 252044 diff --git a/Image/LeNet5/model/0/epoch5/embeddings.npy b/Image/LeNet5/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e09da97754ab2a2dc4072b14cf7b6e5f82a85d21 --- /dev/null +++ b/Image/LeNet5/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4ccac8939946163b539bfc9278eda131d07c981b066077a469dffb4b320d672 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch5/subject_model.pth b/Image/LeNet5/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3bd91d0bcddc7cae8fba2362d0304cfb0eda51e3 --- /dev/null +++ b/Image/LeNet5/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c0d1a9e6868dc418dd901941020e8c1a7a9a2b4dc2a80c59e95d407d31378a0 +size 252044 diff --git a/Image/LeNet5/model/0/epoch6/embeddings.npy b/Image/LeNet5/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..823d45a70a4f5511b587f46bfe62a0729549a685 --- /dev/null +++ b/Image/LeNet5/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65e8df455ebe1f9b92de02121444d18df821b6c63a0ebbb3237d60f4b0f0307f +size 80000128 diff --git a/Image/LeNet5/model/0/epoch6/subject_model.pth b/Image/LeNet5/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..04d5795ea713cd72f77cded85e77ff097a770657 --- /dev/null +++ b/Image/LeNet5/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b81b0575832fb9fc512117de625693171a55535dcc9b49ab461bc6f747b78b30 +size 252044 diff --git a/Image/LeNet5/model/0/epoch7/embeddings.npy b/Image/LeNet5/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..cc63251550d00121e158f08084f17577b6cc6d11 --- /dev/null +++ b/Image/LeNet5/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04ebb19a6d66360fdd3bcb32c0c2898319b8bbe0a9f1ce93363861e537cd0477 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch7/subject_model.pth b/Image/LeNet5/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..61ebf857535f92b280769d7429cd05d888278e11 --- /dev/null +++ b/Image/LeNet5/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf9bef940b377d22374ae617f042abf59dab503a518bf0ecfd147b269887b689 +size 252044 diff --git a/Image/LeNet5/model/0/epoch8/embeddings.npy b/Image/LeNet5/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6ea63915deed5c02c74cd4eac3a44cf9169c121b --- /dev/null +++ b/Image/LeNet5/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7aa1ad48e79d67d51c5537d92864dca020231df738d5d43d6759ac6f875a94d2 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch8/subject_model.pth b/Image/LeNet5/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5666370ee151ce76e224b5f914f894e9e8332684 --- /dev/null +++ b/Image/LeNet5/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21cbdf9547b32cf78fbe8f32954fb9337dcb2af90aa2d580a26024feee8baf38 +size 252044 diff --git a/Image/LeNet5/model/0/epoch9/embeddings.npy b/Image/LeNet5/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff7a3e482072e01d2a139ecd9a20d2c58247a533 --- /dev/null +++ b/Image/LeNet5/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5086d69551294b13125d61e0bf2653088a55d6cbeba5ea3966b62fe688e19855 +size 80000128 diff --git a/Image/LeNet5/model/0/epoch9/subject_model.pth b/Image/LeNet5/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5bcac6f13d4b24cc59b85e72ccc8ee9f657b3ea2 --- /dev/null +++ b/Image/LeNet5/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e10835b1767b349c9063f14847b39f28529b0238d7b1b7931352d06515ac947d +size 252044 diff --git a/Image/LeNet5/model/0/layer_info.json b/Image/LeNet5/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..2d1322168bdc6654321e772b2653b479352060b7 --- /dev/null +++ b/Image/LeNet5/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "conv2", "dim": 400} \ No newline at end of file diff --git a/Image/LeNet5/model/2/epoch1/embeddings.npy b/Image/LeNet5/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a3b5ce0f27ca123a193274ff22e787c015abb8a0 --- /dev/null +++ b/Image/LeNet5/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42ada91e2195f03c5576e629a4225f24c376c828da045208fe67c6d95142b9e5 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch1/subject_model.pth b/Image/LeNet5/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d234a08b79420564f325ac6e612cd08221729842 --- /dev/null +++ b/Image/LeNet5/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efcdc8c292c242431c6a093f9bd45a908b7a76be45cb00afc6b7b3badb5ed779 +size 252044 diff --git a/Image/LeNet5/model/2/epoch10/embeddings.npy b/Image/LeNet5/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..95da0834d4a1210fdff8ecf9770d6bac84c8fe16 --- /dev/null +++ b/Image/LeNet5/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:740a3c95a0cea08c04024d2a5652e0eb1151f6e06ce90e26b801f95d57e78de3 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch10/subject_model.pth b/Image/LeNet5/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c026410e936acf2ed87fa4af3f51115d559b2a0a --- /dev/null +++ b/Image/LeNet5/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1c55d1872a8af1ba83e32f8fe67bf287a1124a9d1c15342b6379dae415a8138 +size 252044 diff --git a/Image/LeNet5/model/2/epoch11/embeddings.npy b/Image/LeNet5/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9b9152ea56fc048ef87e2ace52057d9456a28be4 --- /dev/null +++ b/Image/LeNet5/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58210d2c6dda80deaba3b0f73045bac8b084185a195c2500035a5a25ad06d0c5 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch11/subject_model.pth b/Image/LeNet5/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5b79d294ffbdb325e819efc99c4962d47555a85d --- /dev/null +++ b/Image/LeNet5/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:274e3a8939b107906d4fe7e3c4fdc4c4d2d35b486bb8201f43447ac38bf568a3 +size 252044 diff --git a/Image/LeNet5/model/2/epoch12/embeddings.npy b/Image/LeNet5/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0c2fbb462066fb9cb33933bfa6d2cc952f3bed3b --- /dev/null +++ b/Image/LeNet5/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b82bf9476db46f44ccb09fc9667025c6b73617a4b9f591784cda14a65f7181f7 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch12/subject_model.pth b/Image/LeNet5/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cad8b28f4e59228af373dd79eddca46698bb362b --- /dev/null +++ b/Image/LeNet5/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b53e37a3af7fe7fb2c3f3c2381ca22ffa0ee3464ded13ac67fbcae60915c5d94 +size 252044 diff --git a/Image/LeNet5/model/2/epoch13/embeddings.npy b/Image/LeNet5/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e9c00856ee8cc0705689f217d1d2e701f1c3c33d --- /dev/null +++ b/Image/LeNet5/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bff821b9b857f767bf2e04900c6dab04cbe80b1707a7e91f212787cc2189db5d +size 80000128 diff --git a/Image/LeNet5/model/2/epoch13/subject_model.pth b/Image/LeNet5/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ece9788d08c3c2fb0f8d3779c4e9058eb62ed34a --- /dev/null +++ b/Image/LeNet5/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8fa39da4383ed1644cb6f39bc89f53b6683d63f36ccd58d49835f212fac949e +size 252044 diff --git a/Image/LeNet5/model/2/epoch14/embeddings.npy b/Image/LeNet5/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ce5c0b6cba704d95e3cd6fb01264bc1720512526 --- /dev/null +++ b/Image/LeNet5/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c47876b57cfb6d94d63492899e2d3249bf773d7ab3ca94442e1e3b401561c6c6 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch14/subject_model.pth b/Image/LeNet5/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8c655fe190fb56d863d6d030b9b21f233c2b1af4 --- /dev/null +++ b/Image/LeNet5/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:495aacc5b2ed7662fcd2459616f6f8db83a1f14fd918a2d48a06512f755c3664 +size 252044 diff --git a/Image/LeNet5/model/2/epoch15/embeddings.npy b/Image/LeNet5/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..89fb17123e2bd9f691fefc1a4305691460a824d6 --- /dev/null +++ b/Image/LeNet5/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50b29be11cde19b21e30a3a9335728a96fc445f72a74365a9bfeabd7a2533b77 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch15/subject_model.pth b/Image/LeNet5/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dfb96051ab4e50df2dda6ba562f14912c0e4a59c --- /dev/null +++ b/Image/LeNet5/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85d4b03e586aeb40c58e7fbe868deb4a450ac7a9bf00ce218e37209d7e2af1ee +size 252044 diff --git a/Image/LeNet5/model/2/epoch16/embeddings.npy b/Image/LeNet5/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..14009f143f6d4c267f47fb594cc7dff19f71034d --- /dev/null +++ b/Image/LeNet5/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27a7f4eadb10f507d3d45c478dbbc027ca3f5b7cfd8ac43a8cd17906c592b41d +size 80000128 diff --git a/Image/LeNet5/model/2/epoch16/subject_model.pth b/Image/LeNet5/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..644d88d974d33fd95af5eaacbdd7a6972219df43 --- /dev/null +++ b/Image/LeNet5/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a2e87364157a607d0013bb8f985fe7470ff1186ca1ffa90fbc17b596d00b0a2 +size 252044 diff --git a/Image/LeNet5/model/2/epoch17/embeddings.npy b/Image/LeNet5/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1ea9903dfec99f86e2b527c72b4704b196d70108 --- /dev/null +++ b/Image/LeNet5/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:46746c156d284f7fa1bdf18ec284731e5dc45c799238a89d5d55c5af806b95aa +size 80000128 diff --git a/Image/LeNet5/model/2/epoch17/subject_model.pth b/Image/LeNet5/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7950ceae713061e9513aace971b6192b66e54052 --- /dev/null +++ b/Image/LeNet5/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9e2d7118ef7905d80d5396295b4774136c84589d1662f4ed66034d11ef79029 +size 252044 diff --git a/Image/LeNet5/model/2/epoch18/embeddings.npy b/Image/LeNet5/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..fe8fcf0ffc64e2d12d0921e5f07a576ba0a6ba96 --- /dev/null +++ b/Image/LeNet5/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:693eff53a9004782a1f39b032ffad575f4aee06cdefc95120fd524d33b7c994f +size 80000128 diff --git a/Image/LeNet5/model/2/epoch18/subject_model.pth b/Image/LeNet5/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4b9f9f2e6d5bf804b639e1e3c2ad0779fbda9aba --- /dev/null +++ b/Image/LeNet5/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83039845b18aa868f45cf6099ecebd8592a303f312c87b960235ba11cb06e15d +size 252044 diff --git a/Image/LeNet5/model/2/epoch19/embeddings.npy b/Image/LeNet5/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7d482c8d28a1e8b9a117f1277f2f10bff8c2f498 --- /dev/null +++ b/Image/LeNet5/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90c099e5ae06e504cd35fa9656779490e2ab822491c04f3a3baf85aa7162efdd +size 80000128 diff --git a/Image/LeNet5/model/2/epoch19/subject_model.pth b/Image/LeNet5/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8de023098a8b248c36380e1c879f991530be2677 --- /dev/null +++ b/Image/LeNet5/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8535547fa9821d701f75edcc12c60e7ab6af3dc397d913a1cad4f7b449f39cac +size 252044 diff --git a/Image/LeNet5/model/2/epoch2/embeddings.npy b/Image/LeNet5/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..57f3aa354d531f1883e48ca2bcff86b2300b2a70 --- /dev/null +++ b/Image/LeNet5/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c88f1ef914015d39a4b74c9993488d151fe46ea314040f92578374411123ba3d +size 80000128 diff --git a/Image/LeNet5/model/2/epoch2/subject_model.pth b/Image/LeNet5/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cd50ed2280c775eb6641f0229df247f2c76b3c35 --- /dev/null +++ b/Image/LeNet5/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c1f01e8b560cbe3ea33bbb7738845038787829c4a91d71d7742993b7b459663 +size 252044 diff --git a/Image/LeNet5/model/2/epoch20/embeddings.npy b/Image/LeNet5/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b99134766ccfdafc52231c02731e506734c38e77 --- /dev/null +++ b/Image/LeNet5/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:591f24dee3ad45ab947c200253fdf50feeda43e352cb50d8886664409f06611e +size 80000128 diff --git a/Image/LeNet5/model/2/epoch20/subject_model.pth b/Image/LeNet5/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f26b1d236ed65830f30723260bdc0fa4a5c154ef --- /dev/null +++ b/Image/LeNet5/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2406e39e617b236c8e1ff41feb896bcb4a95a1da85a01aa46ae7ad5a1dfd745b +size 252044 diff --git a/Image/LeNet5/model/2/epoch21/embeddings.npy b/Image/LeNet5/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5775cbade12d7d66fb964df9a903d094056df56d --- /dev/null +++ b/Image/LeNet5/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be56e6a11fd6c0cb4bfbc9923fd6a3891c7ba3fed50add891437dfb96db50e87 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch21/subject_model.pth b/Image/LeNet5/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..657731ad89ed8928f8007464d597440abb8afcee --- /dev/null +++ b/Image/LeNet5/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f93e6552ab2f39eb954774a5556fb2859d475dc50d1a30d92e8377dcaf648ae +size 252044 diff --git a/Image/LeNet5/model/2/epoch22/embeddings.npy b/Image/LeNet5/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6734e2ed1ea71476afa17f4ba849234edaf9b975 --- /dev/null +++ b/Image/LeNet5/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4df548e9d80e5e0dff9551780e1571f5eb66982f1b3c191c47504776ec54013 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch22/subject_model.pth b/Image/LeNet5/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4d3824ee88320073544ba5c557aec49142f417bb --- /dev/null +++ b/Image/LeNet5/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51317a15c948a8c5297b0d7ddf2bbdfa35ef0d099408875168243914bd74fab4 +size 252044 diff --git a/Image/LeNet5/model/2/epoch23/embeddings.npy b/Image/LeNet5/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ef902f4b212a7fcbafe713076793ad52238331b1 --- /dev/null +++ b/Image/LeNet5/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffa45c90faefdcd630a721b37fd18f4e94e2618f1846b760b33c19b0c7f9487e +size 80000128 diff --git a/Image/LeNet5/model/2/epoch23/subject_model.pth b/Image/LeNet5/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..31ccbaf4682571433ef1fd5f2ca32a8d396eb45d --- /dev/null +++ b/Image/LeNet5/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ff45fe876347a12d7c5203f180f9a8f0de62f1a0742dcf4f5d39b9a4a44e326 +size 252044 diff --git a/Image/LeNet5/model/2/epoch24/embeddings.npy b/Image/LeNet5/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..deba09d5f562a89c0db7306efea81da8cdd958a9 --- /dev/null +++ b/Image/LeNet5/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a3cc62f5e26bea472d8cf20aa5edf62e322cd8d97dce36f6aecc06cb6354521 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch24/subject_model.pth b/Image/LeNet5/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cbb158ef7cac15a4112f3655b0865a7ebb27f432 --- /dev/null +++ b/Image/LeNet5/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf733caaecbb5e2822ef36405a58f3a64af02497dc30e23b7ed85f4b46167c3f +size 252044 diff --git a/Image/LeNet5/model/2/epoch25/embeddings.npy b/Image/LeNet5/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4d0369d9dbda827d6ab1592c2fe39fda6558f3b8 --- /dev/null +++ b/Image/LeNet5/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e1386e631daaad155cd27cbbfb4eef40fc08d18e207d6b563e28faece44cc6fe +size 80000128 diff --git a/Image/LeNet5/model/2/epoch25/subject_model.pth b/Image/LeNet5/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..241287182115491dfdb3837d19038efe4046b229 --- /dev/null +++ b/Image/LeNet5/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a29cc05c86141079c5169d3b6760c46bce23e370caa13ba4269167ece594ed92 +size 252044 diff --git a/Image/LeNet5/model/2/epoch3/embeddings.npy b/Image/LeNet5/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e8e22702df6f763c8b666ae6b71085208aa337fe --- /dev/null +++ b/Image/LeNet5/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bae57e293cc1ccaca432efbf20e60ee8f6b94717921d320eead79b76da07d469 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch3/subject_model.pth b/Image/LeNet5/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c7d72090bbbeb9c8a5b40d81829b50849d7b80da --- /dev/null +++ b/Image/LeNet5/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8e0e05911bcc21b0233df3ac15bc90e862fe3f3e2e83d5e610b78b3d4d27ea5 +size 252044 diff --git a/Image/LeNet5/model/2/epoch4/embeddings.npy b/Image/LeNet5/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f80408625cb114ef2598c158bd5a35956d3c59e2 --- /dev/null +++ b/Image/LeNet5/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a75e67238121ba103b0caeadaa7dad1320ded9d3beca910288ea3f3606ab9ee +size 80000128 diff --git a/Image/LeNet5/model/2/epoch4/subject_model.pth b/Image/LeNet5/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1935db4838c28f5a172914fe6dd17c89de3b5d67 --- /dev/null +++ b/Image/LeNet5/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff51437e4c0306aca5607d6edb62402a6068913d0151918310d10f267dede973 +size 252044 diff --git a/Image/LeNet5/model/2/epoch5/embeddings.npy b/Image/LeNet5/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d010240d1f39e785c4422a3bbc9d2359d35a28ab --- /dev/null +++ b/Image/LeNet5/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e797d6a16feedd366df33fd999ef4bc2dd080e4bbd84fbeb7230ca0d43590309 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch5/subject_model.pth b/Image/LeNet5/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9cc7222a366056ef5fe0a565292f36fbe583dcc8 --- /dev/null +++ b/Image/LeNet5/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ec0c6dd5252ed4d10a71810ade6d511b5c2ad8af19935c704bbe487a681b839 +size 252044 diff --git a/Image/LeNet5/model/2/epoch6/embeddings.npy b/Image/LeNet5/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0c97de135a9f894c65ac344cbdd7834e653edf5c --- /dev/null +++ b/Image/LeNet5/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb5c757a8efe76e3672707af30ec464cb9f6c98472fadffab8ea5ca62aa5835e +size 80000128 diff --git a/Image/LeNet5/model/2/epoch6/subject_model.pth b/Image/LeNet5/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3551d3b7fd814db62e3679e89bc61bd9230cb6af --- /dev/null +++ b/Image/LeNet5/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e87f50103d814b38532571c68bf64e6adc5f52c2adf228b3b5d2b7fdf8dbfd5c +size 252044 diff --git a/Image/LeNet5/model/2/epoch7/embeddings.npy b/Image/LeNet5/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9a49f5dee069b67423ae56fa06a6e5f2bb63f95b --- /dev/null +++ b/Image/LeNet5/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:542c5784ddd457f9a3b7379b5e33a51450b649c3a38fa7b3653535c272f02acb +size 80000128 diff --git a/Image/LeNet5/model/2/epoch7/subject_model.pth b/Image/LeNet5/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..068293422710ddc4c6834378da6b9c655a1444ca --- /dev/null +++ b/Image/LeNet5/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a83ec407d66e43f9df7451839ac903c79128953bff23f7e8c286375a81ee5dfb +size 252044 diff --git a/Image/LeNet5/model/2/epoch8/embeddings.npy b/Image/LeNet5/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..93f2ea67e699b9975d93ec93c483626d2b713cd0 --- /dev/null +++ b/Image/LeNet5/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bf9b59d9207ab6212ace2141226fdfcd9736b5ad217722e850d1c019b0eba28 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch8/subject_model.pth b/Image/LeNet5/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2aa7deac5829e5aceae4dca642f6f5c7ca63c4d3 --- /dev/null +++ b/Image/LeNet5/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3321c16d952aa8d60c76792a59aca6e712cf255d084c1220c0686671c11b8409 +size 252044 diff --git a/Image/LeNet5/model/2/epoch9/embeddings.npy b/Image/LeNet5/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b561a949407254600e4c07786682a2343e5bfb14 --- /dev/null +++ b/Image/LeNet5/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:34add850b3501d7d8b2c66775f4beee57a28b988c19313464291ed42af8c3c65 +size 80000128 diff --git a/Image/LeNet5/model/2/epoch9/subject_model.pth b/Image/LeNet5/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f3ede472bcfe4dffde49f68253fa38d7b31e298f --- /dev/null +++ b/Image/LeNet5/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e53ba82ee69b6da1b213961c61512e68214a0405baf12ecdaab1ec64bffe0e9 +size 252044 diff --git a/Image/LeNet5/model/2/layer_info.json b/Image/LeNet5/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..2d1322168bdc6654321e772b2653b479352060b7 --- /dev/null +++ b/Image/LeNet5/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "conv2", "dim": 400} \ No newline at end of file diff --git a/Image/MobileNetv1/code/backdoor_train.log b/Image/MobileNetv1/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..63b87aad080331ff3f3055081d4dd5eeefd0b6c8 --- /dev/null +++ b/Image/MobileNetv1/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 19:09:54,932 - train - INFO - 开始训练 mobilenetv1 +2025-03-14 19:09:54,933 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 19:09:55,720 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.287 | Acc: 14.84% +2025-03-14 19:09:58,172 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.247 | Acc: 22.06% +2025-03-14 19:10:00,409 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.019 | Acc: 27.43% +2025-03-14 19:10:02,629 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.917 | Acc: 30.28% +2025-03-14 19:10:06,210 - train - INFO - Epoch: 1 | Test Loss: 1.615 | Test Acc: 40.02% +2025-03-14 19:10:06,614 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.759 | Acc: 34.38% +2025-03-14 19:10:08,951 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.579 | Acc: 42.30% +2025-03-14 19:10:11,186 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.559 | Acc: 43.21% +2025-03-14 19:10:13,281 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.532 | Acc: 44.51% +2025-03-14 19:10:16,823 - train - INFO - Epoch: 2 | Test Loss: 1.292 | Test Acc: 56.03% +2025-03-14 19:10:27,914 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.315 | Acc: 57.81% +2025-03-14 19:10:30,248 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.355 | Acc: 51.59% +2025-03-14 19:10:32,468 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.337 | Acc: 52.43% +2025-03-14 19:10:34,818 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.315 | Acc: 53.14% +2025-03-14 19:10:38,558 - train - INFO - Epoch: 3 | Test Loss: 1.169 | Test Acc: 61.16% +2025-03-14 19:10:38,776 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.152 | Acc: 60.16% +2025-03-14 19:10:41,135 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.196 | Acc: 57.92% +2025-03-14 19:10:43,449 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.197 | Acc: 57.73% +2025-03-14 19:10:45,787 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.185 | Acc: 58.38% +2025-03-14 19:10:49,233 - train - INFO - Epoch: 4 | Test Loss: 1.204 | Test Acc: 58.96% +2025-03-14 19:11:03,693 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.159 | Acc: 57.03% +2025-03-14 19:11:05,965 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.054 | Acc: 63.81% +2025-03-14 19:11:08,112 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.034 | Acc: 64.64% +2025-03-14 19:11:10,338 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.015 | Acc: 65.28% +2025-03-14 19:11:13,934 - train - INFO - Epoch: 5 | Test Loss: 1.095 | Test Acc: 62.68% +2025-03-14 19:11:14,109 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.886 | Acc: 67.97% +2025-03-14 19:11:16,464 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.905 | Acc: 68.84% +2025-03-14 19:11:18,737 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.907 | Acc: 68.79% +2025-03-14 19:11:20,976 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.896 | Acc: 69.40% +2025-03-14 19:11:24,630 - train - INFO - Epoch: 6 | Test Loss: 0.870 | Test Acc: 70.20% +2025-03-14 19:11:37,406 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.750 | Acc: 75.78% +2025-03-14 19:11:39,909 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.837 | Acc: 71.81% +2025-03-14 19:11:42,748 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.830 | Acc: 72.01% +2025-03-14 19:11:45,073 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.832 | Acc: 72.02% +2025-03-14 19:11:48,754 - train - INFO - Epoch: 7 | Test Loss: 1.018 | Test Acc: 66.08% +2025-03-14 19:11:48,979 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.791 | Acc: 76.56% +2025-03-14 19:11:51,445 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.814 | Acc: 72.74% +2025-03-14 19:11:53,792 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.802 | Acc: 73.14% +2025-03-14 19:11:56,185 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.798 | Acc: 73.27% +2025-03-14 19:11:59,650 - train - INFO - Epoch: 8 | Test Loss: 1.564 | Test Acc: 51.84% +2025-03-14 19:12:20,707 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.873 | Acc: 70.31% +2025-03-14 19:12:23,001 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.795 | Acc: 73.68% +2025-03-14 19:12:25,349 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.782 | Acc: 74.05% +2025-03-14 19:12:27,898 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.778 | Acc: 74.33% +2025-03-14 19:12:31,642 - train - INFO - Epoch: 9 | Test Loss: 0.997 | Test Acc: 67.90% +2025-03-14 19:12:31,831 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.761 | Acc: 72.66% +2025-03-14 19:12:34,141 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.760 | Acc: 74.59% +2025-03-14 19:12:36,393 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.768 | Acc: 74.46% +2025-03-14 19:12:38,757 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.759 | Acc: 74.91% +2025-03-14 19:12:42,378 - train - INFO - Epoch: 10 | Test Loss: 0.829 | Test Acc: 71.79% +2025-03-14 19:12:54,320 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.830 | Acc: 74.22% +2025-03-14 19:12:56,767 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.755 | Acc: 74.98% +2025-03-14 19:12:59,227 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.750 | Acc: 75.11% +2025-03-14 19:13:01,494 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.747 | Acc: 75.20% +2025-03-14 19:13:05,433 - train - INFO - Epoch: 11 | Test Loss: 0.930 | Test Acc: 68.93% +2025-03-14 19:13:05,678 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.874 | Acc: 71.09% +2025-03-14 19:13:08,001 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.735 | Acc: 75.70% +2025-03-14 19:13:10,287 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.739 | Acc: 75.56% +2025-03-14 19:13:12,549 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.740 | Acc: 75.46% +2025-03-14 19:13:16,175 - train - INFO - Epoch: 12 | Test Loss: 0.810 | Test Acc: 73.19% +2025-03-14 19:13:31,855 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.576 | Acc: 78.12% +2025-03-14 19:13:34,421 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.723 | Acc: 76.30% +2025-03-14 19:13:37,400 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.739 | Acc: 75.64% +2025-03-14 19:13:40,123 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.747 | Acc: 75.36% +2025-03-14 19:13:44,947 - train - INFO - Epoch: 13 | Test Loss: 0.765 | Test Acc: 74.70% +2025-03-14 19:13:45,257 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.848 | Acc: 71.88% +2025-03-14 19:13:47,794 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.730 | Acc: 75.41% +2025-03-14 19:13:50,315 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.742 | Acc: 74.98% +2025-03-14 19:13:53,154 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.736 | Acc: 75.30% +2025-03-14 19:13:57,281 - train - INFO - Epoch: 14 | Test Loss: 0.931 | Test Acc: 69.03% +2025-03-14 19:14:11,225 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.787 | Acc: 71.88% +2025-03-14 19:14:13,683 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.730 | Acc: 76.38% +2025-03-14 19:14:16,179 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.734 | Acc: 75.68% +2025-03-14 19:14:18,546 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.733 | Acc: 75.61% +2025-03-14 19:14:22,390 - train - INFO - Epoch: 15 | Test Loss: 0.856 | Test Acc: 71.84% +2025-03-14 19:14:22,639 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.737 | Acc: 76.56% +2025-03-14 19:14:25,041 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.735 | Acc: 75.31% +2025-03-14 19:14:27,479 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.726 | Acc: 75.86% +2025-03-14 19:14:29,797 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.721 | Acc: 76.03% +2025-03-14 19:14:33,561 - train - INFO - Epoch: 16 | Test Loss: 0.819 | Test Acc: 71.39% +2025-03-14 19:14:46,423 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.614 | Acc: 75.78% +2025-03-14 19:14:48,596 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.709 | Acc: 76.53% +2025-03-14 19:14:50,945 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.715 | Acc: 76.33% +2025-03-14 19:14:53,181 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.717 | Acc: 76.32% +2025-03-14 19:14:56,940 - train - INFO - Epoch: 17 | Test Loss: 0.954 | Test Acc: 68.53% +2025-03-14 19:14:57,178 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.913 | Acc: 68.75% +2025-03-14 19:14:59,356 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.721 | Acc: 76.18% +2025-03-14 19:15:01,720 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.720 | Acc: 76.11% +2025-03-14 19:15:04,039 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.715 | Acc: 76.38% +2025-03-14 19:15:07,794 - train - INFO - Epoch: 18 | Test Loss: 0.789 | Test Acc: 74.30% +2025-03-14 19:15:22,986 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.667 | Acc: 78.91% +2025-03-14 19:15:25,226 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.687 | Acc: 77.36% +2025-03-14 19:15:27,499 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.704 | Acc: 76.83% +2025-03-14 19:15:29,768 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.704 | Acc: 76.66% +2025-03-14 19:15:33,389 - train - INFO - Epoch: 19 | Test Loss: 0.824 | Test Acc: 72.96% +2025-03-14 19:15:33,636 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.968 | Acc: 71.09% +2025-03-14 19:15:35,904 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.699 | Acc: 77.06% +2025-03-14 19:15:38,257 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.688 | Acc: 77.30% +2025-03-14 19:15:40,549 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.696 | Acc: 76.91% +2025-03-14 19:15:44,675 - train - INFO - Epoch: 20 | Test Loss: 0.836 | Test Acc: 71.67% +2025-03-14 19:15:57,342 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.696 | Acc: 76.56% +2025-03-14 19:15:59,612 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.693 | Acc: 76.96% +2025-03-14 19:16:01,819 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.703 | Acc: 76.73% +2025-03-14 19:16:03,948 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.703 | Acc: 76.70% +2025-03-14 19:16:07,620 - train - INFO - Epoch: 21 | Test Loss: 1.056 | Test Acc: 65.78% +2025-03-14 19:16:07,894 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.583 | Acc: 82.81% +2025-03-14 19:16:10,453 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.667 | Acc: 77.94% +2025-03-14 19:16:13,181 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.679 | Acc: 77.41% +2025-03-14 19:16:15,606 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.687 | Acc: 77.18% +2025-03-14 19:16:19,360 - train - INFO - Epoch: 22 | Test Loss: 0.966 | Test Acc: 68.80% +2025-03-14 19:16:35,332 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.798 | Acc: 75.78% +2025-03-14 19:16:38,153 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.688 | Acc: 76.77% +2025-03-14 19:16:41,959 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.687 | Acc: 76.87% +2025-03-14 19:16:44,470 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.701 | Acc: 76.52% +2025-03-14 19:16:48,354 - train - INFO - Epoch: 23 | Test Loss: 0.811 | Test Acc: 72.64% +2025-03-14 19:16:48,619 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.537 | Acc: 81.25% +2025-03-14 19:16:50,852 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.677 | Acc: 77.72% +2025-03-14 19:16:53,012 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.688 | Acc: 77.31% +2025-03-14 19:16:55,183 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.694 | Acc: 77.04% +2025-03-14 19:16:58,919 - train - INFO - Epoch: 24 | Test Loss: 1.168 | Test Acc: 62.89% +2025-03-14 19:17:12,298 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.635 | Acc: 78.91% +2025-03-14 19:17:14,676 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.699 | Acc: 76.91% +2025-03-14 19:17:17,038 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.704 | Acc: 76.61% +2025-03-14 19:17:19,291 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.701 | Acc: 76.74% +2025-03-14 19:17:22,782 - train - INFO - Epoch: 25 | Test Loss: 0.900 | Test Acc: 70.88% +2025-03-14 19:17:23,024 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.771 | Acc: 78.91% +2025-03-14 19:17:25,266 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.689 | Acc: 77.17% +2025-03-14 19:17:27,505 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.686 | Acc: 77.43% +2025-03-14 19:17:29,738 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.681 | Acc: 77.55% +2025-03-14 19:17:33,357 - train - INFO - Epoch: 26 | Test Loss: 0.887 | Test Acc: 69.41% +2025-03-14 19:17:47,729 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.935 | Acc: 67.97% +2025-03-14 19:17:50,012 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.695 | Acc: 77.04% +2025-03-14 19:17:52,314 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.691 | Acc: 77.09% +2025-03-14 19:17:54,513 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.687 | Acc: 77.22% +2025-03-14 19:17:58,204 - train - INFO - Epoch: 27 | Test Loss: 0.839 | Test Acc: 71.77% +2025-03-14 19:17:58,457 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.611 | Acc: 77.34% +2025-03-14 19:18:01,634 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.684 | Acc: 77.61% +2025-03-14 19:18:04,239 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.687 | Acc: 77.29% +2025-03-14 19:18:06,548 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.683 | Acc: 77.63% +2025-03-14 19:18:10,199 - train - INFO - Epoch: 28 | Test Loss: 0.878 | Test Acc: 72.20% +2025-03-14 19:18:22,831 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.727 | Acc: 75.78% +2025-03-14 19:18:25,111 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.653 | Acc: 78.39% +2025-03-14 19:18:27,317 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.670 | Acc: 77.74% +2025-03-14 19:18:29,531 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.674 | Acc: 77.64% +2025-03-14 19:18:32,945 - train - INFO - Epoch: 29 | Test Loss: 0.713 | Test Acc: 75.58% +2025-03-14 19:18:33,192 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.707 | Acc: 75.78% +2025-03-14 19:18:35,432 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.668 | Acc: 77.89% +2025-03-14 19:18:37,646 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.676 | Acc: 77.69% +2025-03-14 19:18:39,890 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.678 | Acc: 77.52% +2025-03-14 19:18:43,361 - train - INFO - Epoch: 30 | Test Loss: 0.910 | Test Acc: 70.57% +2025-03-14 19:18:56,375 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.672 | Acc: 75.00% +2025-03-14 19:18:58,633 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.678 | Acc: 77.97% +2025-03-14 19:19:00,903 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.674 | Acc: 77.78% +2025-03-14 19:19:03,229 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.676 | Acc: 77.57% +2025-03-14 19:19:06,827 - train - INFO - Epoch: 31 | Test Loss: 0.711 | Test Acc: 76.07% +2025-03-14 19:19:07,048 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.726 | Acc: 75.78% +2025-03-14 19:19:09,285 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.674 | Acc: 77.79% +2025-03-14 19:19:11,605 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.674 | Acc: 77.73% +2025-03-14 19:19:14,191 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.674 | Acc: 77.61% +2025-03-14 19:19:17,846 - train - INFO - Epoch: 32 | Test Loss: 0.804 | Test Acc: 73.06% +2025-03-14 19:19:32,515 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.587 | Acc: 81.25% +2025-03-14 19:19:34,852 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.667 | Acc: 78.10% +2025-03-14 19:19:37,018 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.678 | Acc: 77.65% +2025-03-14 19:19:39,265 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.672 | Acc: 77.90% +2025-03-14 19:19:43,032 - train - INFO - Epoch: 33 | Test Loss: 0.826 | Test Acc: 72.40% +2025-03-14 19:19:43,270 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.642 | Acc: 77.34% +2025-03-14 19:19:45,527 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.669 | Acc: 78.19% +2025-03-14 19:19:47,866 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.678 | Acc: 77.76% +2025-03-14 19:19:50,287 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.670 | Acc: 77.87% +2025-03-14 19:19:53,948 - train - INFO - Epoch: 34 | Test Loss: 0.979 | Test Acc: 68.46% +2025-03-14 19:20:06,941 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.587 | Acc: 82.03% +2025-03-14 19:20:09,207 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.667 | Acc: 78.11% +2025-03-14 19:20:11,427 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.680 | Acc: 77.64% +2025-03-14 19:20:13,889 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.675 | Acc: 77.67% +2025-03-14 19:20:17,710 - train - INFO - Epoch: 35 | Test Loss: 0.681 | Test Acc: 77.46% +2025-03-14 19:20:17,961 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.571 | Acc: 83.59% +2025-03-14 19:20:20,297 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.670 | Acc: 77.85% +2025-03-14 19:20:22,840 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.671 | Acc: 77.67% +2025-03-14 19:20:25,126 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.681 | Acc: 77.38% +2025-03-14 19:20:28,970 - train - INFO - Epoch: 36 | Test Loss: 0.678 | Test Acc: 77.77% +2025-03-14 19:20:43,295 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.684 | Acc: 81.25% +2025-03-14 19:20:45,944 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.665 | Acc: 77.61% +2025-03-14 19:20:48,513 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.666 | Acc: 77.82% +2025-03-14 19:20:51,045 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.669 | Acc: 77.77% +2025-03-14 19:20:55,419 - train - INFO - Epoch: 37 | Test Loss: 0.651 | Test Acc: 77.25% +2025-03-14 19:20:55,696 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.694 | Acc: 75.78% +2025-03-14 19:20:58,163 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.668 | Acc: 78.10% +2025-03-14 19:21:00,532 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.674 | Acc: 77.93% +2025-03-14 19:21:02,807 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.670 | Acc: 77.89% +2025-03-14 19:21:06,493 - train - INFO - Epoch: 38 | Test Loss: 0.765 | Test Acc: 74.16% +2025-03-14 19:21:20,685 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.699 | Acc: 82.03% +2025-03-14 19:21:23,093 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.645 | Acc: 78.88% +2025-03-14 19:21:25,393 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.646 | Acc: 78.84% +2025-03-14 19:21:27,649 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.654 | Acc: 78.62% +2025-03-14 19:21:31,887 - train - INFO - Epoch: 39 | Test Loss: 0.927 | Test Acc: 70.27% +2025-03-14 19:21:32,129 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.591 | Acc: 79.69% +2025-03-14 19:21:34,380 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.673 | Acc: 77.82% +2025-03-14 19:21:36,784 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.673 | Acc: 77.90% +2025-03-14 19:21:39,293 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.671 | Acc: 77.82% +2025-03-14 19:21:43,148 - train - INFO - Epoch: 40 | Test Loss: 0.775 | Test Acc: 73.66% +2025-03-14 19:21:55,919 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.718 | Acc: 74.22% +2025-03-14 19:21:58,347 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.662 | Acc: 78.08% +2025-03-14 19:22:00,670 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.675 | Acc: 77.63% +2025-03-14 19:22:03,094 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.661 | Acc: 78.06% +2025-03-14 19:22:06,604 - train - INFO - Epoch: 41 | Test Loss: 0.766 | Test Acc: 74.54% +2025-03-14 19:22:06,872 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.607 | Acc: 80.47% +2025-03-14 19:22:09,147 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.685 | Acc: 77.13% +2025-03-14 19:22:11,404 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.681 | Acc: 77.10% +2025-03-14 19:22:13,667 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.670 | Acc: 77.46% +2025-03-14 19:22:17,327 - train - INFO - Epoch: 42 | Test Loss: 0.736 | Test Acc: 74.75% +2025-03-14 19:22:28,916 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.611 | Acc: 81.25% +2025-03-14 19:22:31,177 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.655 | Acc: 78.44% +2025-03-14 19:22:33,409 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.670 | Acc: 77.85% +2025-03-14 19:22:35,741 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.669 | Acc: 77.98% +2025-03-14 19:22:39,524 - train - INFO - Epoch: 43 | Test Loss: 1.170 | Test Acc: 64.85% +2025-03-14 19:22:39,772 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.706 | Acc: 74.22% +2025-03-14 19:22:42,140 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.664 | Acc: 78.23% +2025-03-14 19:22:44,572 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.657 | Acc: 78.47% +2025-03-14 19:22:46,949 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.656 | Acc: 78.45% +2025-03-14 19:22:50,567 - train - INFO - Epoch: 44 | Test Loss: 1.059 | Test Acc: 64.82% +2025-03-14 19:23:06,353 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.930 | Acc: 68.75% +2025-03-14 19:23:09,089 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.655 | Acc: 78.57% +2025-03-14 19:23:11,689 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.655 | Acc: 78.47% +2025-03-14 19:23:14,039 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.656 | Acc: 78.41% +2025-03-14 19:23:17,636 - train - INFO - Epoch: 45 | Test Loss: 0.684 | Test Acc: 76.39% +2025-03-14 19:23:17,964 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.779 | Acc: 76.56% +2025-03-14 19:23:20,239 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.654 | Acc: 78.95% +2025-03-14 19:23:22,457 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.660 | Acc: 78.28% +2025-03-14 19:23:24,820 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.661 | Acc: 78.31% +2025-03-14 19:23:28,367 - train - INFO - Epoch: 46 | Test Loss: 0.869 | Test Acc: 71.58% +2025-03-14 19:23:45,584 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.885 | Acc: 75.00% +2025-03-14 19:23:47,800 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.659 | Acc: 78.40% +2025-03-14 19:23:50,426 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.657 | Acc: 78.29% +2025-03-14 19:23:52,593 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.657 | Acc: 78.39% +2025-03-14 19:23:56,224 - train - INFO - Epoch: 47 | Test Loss: 0.772 | Test Acc: 74.50% +2025-03-14 19:23:56,449 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.706 | Acc: 79.69% +2025-03-14 19:23:58,661 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.649 | Acc: 78.47% +2025-03-14 19:24:00,936 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.659 | Acc: 78.07% +2025-03-14 19:24:03,223 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.658 | Acc: 78.10% +2025-03-14 19:24:06,966 - train - INFO - Epoch: 48 | Test Loss: 1.209 | Test Acc: 61.32% +2025-03-14 19:24:22,516 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.631 | Acc: 79.69% +2025-03-14 19:24:24,748 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.648 | Acc: 78.53% +2025-03-14 19:24:27,130 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.646 | Acc: 78.70% +2025-03-14 19:24:29,448 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.647 | Acc: 78.50% +2025-03-14 19:24:33,064 - train - INFO - Epoch: 49 | Test Loss: 0.745 | Test Acc: 75.76% +2025-03-14 19:24:33,358 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.614 | Acc: 82.03% +2025-03-14 19:24:35,647 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.643 | Acc: 78.58% +2025-03-14 19:24:37,770 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.652 | Acc: 78.14% +2025-03-14 19:24:40,021 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.655 | Acc: 78.17% +2025-03-14 19:24:43,596 - train - INFO - Epoch: 50 | Test Loss: 0.763 | Test Acc: 74.79% +2025-03-14 19:24:54,655 - train - INFO - 训练完成! diff --git a/Image/MobileNetv1/code/model.py b/Image/MobileNetv1/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..215a894aec5cea342787d4f55ef64bc87ea9b0c6 --- /dev/null +++ b/Image/MobileNetv1/code/model.py @@ -0,0 +1,163 @@ +''' +MobileNetv1 in PyTorch. + +论文: "MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications" +参考: https://arxiv.org/abs/1704.04861 + +主要特点: +1. 使用深度可分离卷积(Depthwise Separable Convolution)减少参数量和计算量 +2. 引入宽度乘子(Width Multiplier)和分辨率乘子(Resolution Multiplier)进一步压缩模型 +3. 适用于移动设备和嵌入式设备的轻量级CNN架构 +''' + +import torch +import torch.nn as nn + + +class Block(nn.Module): + '''深度可分离卷积块 (Depthwise Separable Convolution Block) + + 包含: + 1. 深度卷积(Depthwise Conv): 对每个通道单独进行空间卷积 + 2. 逐点卷积(Pointwise Conv): 1x1卷积实现通道混合 + + Args: + in_channels: 输入通道数 + out_channels: 输出通道数 + stride: 卷积步长 + ''' + def __init__(self, in_channels, out_channels, stride=1): + super(Block, self).__init__() + + # 深度卷积 - 每个通道单独进行3x3卷积 + self.conv1 = nn.Conv2d( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=3, + stride=stride, + padding=1, + groups=in_channels, # groups=in_channels 即为深度可分离卷积 + bias=False + ) + self.bn1 = nn.BatchNorm2d(in_channels) + self.relu1 = nn.ReLU(inplace=True) + + # 逐点卷积 - 1x1卷积用于通道混合 + self.conv2 = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + padding=0, + bias=False + ) + self.bn2 = nn.BatchNorm2d(out_channels) + self.relu2 = nn.ReLU(inplace=True) + + def forward(self, x): + # 深度卷积 + x = self.conv1(x) + x = self.bn1(x) + x = self.relu1(x) + + # 逐点卷积 + x = self.conv2(x) + x = self.bn2(x) + x = self.relu2(x) + return x + + +class MobileNet(nn.Module): + '''MobileNet v1网络 + + Args: + num_classes: 分类数量 + alpha: 宽度乘子,用于控制网络宽度(默认1.0) + beta: 分辨率乘子,用于控制输入分辨率(默认1.0) + init_weights: 是否初始化权重 + ''' + # 网络配置: (输出通道数, 步长),步长默认为1 + cfg = [64, (128,2), 128, (256,2), 256, (512,2), + 512, 512, 512, 512, 512, (1024,2), 1024] + + def __init__(self, num_classes=10, alpha=1.0, beta=1.0, init_weights=True): + super(MobileNet, self).__init__() + + # 第一层标准卷积 + self.conv1 = nn.Sequential( + nn.Conv2d(3, 32, kernel_size=3, stride=1, bias=False), + nn.BatchNorm2d(32), + nn.ReLU(inplace=True) + ) + + # 深度可分离卷积层 + self.layers = self._make_layers(in_channels=32) + + # 全局平均池化和分类器 + self.avg = nn.AdaptiveAvgPool2d(1) # 自适应平均池化,输出大小为1x1 + self.linear = nn.Linear(1024, num_classes) + + # 初始化权重 + if init_weights: + self._initialize_weights() + + def _make_layers(self, in_channels): + '''构建深度可分离卷积层 + + Args: + in_channels: 输入通道数 + ''' + layers = [] + for x in self.cfg: + out_channels = x if isinstance(x, int) else x[0] + stride = 1 if isinstance(x, int) else x[1] + layers.append(Block(in_channels, out_channels, stride)) + in_channels = out_channels + return nn.Sequential(*layers) + + def forward(self, x): + # 标准卷积 + x = self.conv1(x) + + # 深度可分离卷积层 + x = self.layers(x) + + # 全局平均池化和分类器 + x = self.avg(x) + x = x.view(x.size(0), -1) + x = self.linear(x) + return x + + def _initialize_weights(self): + '''初始化模型权重''' + for m in self.modules(): + if isinstance(m, nn.Conv2d): + # 使用kaiming初始化卷积层 + nn.init.kaiming_normal_(m.weight, mode='fan_out') + if m.bias is not None: + nn.init.zeros_(m.bias) + elif isinstance(m, nn.BatchNorm2d): + # 初始化BN层 + nn.init.ones_(m.weight) + nn.init.zeros_(m.bias) + elif isinstance(m, nn.Linear): + # 初始化全连接层 + nn.init.normal_(m.weight, 0, 0.01) + nn.init.zeros_(m.bias) + + +def test(): + """测试函数""" + net = MobileNet() + x = torch.randn(2, 3, 32, 32) + y = net(x) + print(y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (2, 3, 32, 32)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/MobileNetv1/code/train.log b/Image/MobileNetv1/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..5946a89fa5d75bfc8a4375654182b955b9f7c4a5 --- /dev/null +++ b/Image/MobileNetv1/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 19:09:52,411 - train - INFO - 开始训练 mobilenetv1 +2025-03-14 19:09:52,412 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 19:09:53,266 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.315 | Acc: 9.38% +2025-03-14 19:09:55,666 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.226 | Acc: 22.79% +2025-03-14 19:09:58,252 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.966 | Acc: 29.56% +2025-03-14 19:10:00,487 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.834 | Acc: 33.79% +2025-03-14 19:10:04,050 - train - INFO - Epoch: 1 | Test Loss: 1.396 | Test Acc: 49.59% +2025-03-14 19:10:04,535 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.433 | Acc: 46.09% +2025-03-14 19:10:06,928 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.386 | Acc: 50.38% +2025-03-14 19:10:09,156 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.349 | Acc: 51.67% +2025-03-14 19:10:11,286 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.318 | Acc: 52.81% +2025-03-14 19:10:14,730 - train - INFO - Epoch: 2 | Test Loss: 1.191 | Test Acc: 57.46% +2025-03-14 19:10:26,131 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.127 | Acc: 59.38% +2025-03-14 19:10:28,403 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.119 | Acc: 60.50% +2025-03-14 19:10:30,681 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.100 | Acc: 61.01% +2025-03-14 19:10:32,967 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.080 | Acc: 61.67% +2025-03-14 19:10:36,884 - train - INFO - Epoch: 3 | Test Loss: 1.027 | Test Acc: 64.31% +2025-03-14 19:10:37,134 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.911 | Acc: 65.62% +2025-03-14 19:10:39,401 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.922 | Acc: 67.82% +2025-03-14 19:10:41,845 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.921 | Acc: 68.05% +2025-03-14 19:10:44,179 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.911 | Acc: 68.51% +2025-03-14 19:10:47,847 - train - INFO - Epoch: 4 | Test Loss: 0.893 | Test Acc: 69.71% +2025-03-14 19:11:00,025 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.853 | Acc: 71.09% +2025-03-14 19:11:02,275 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.833 | Acc: 71.18% +2025-03-14 19:11:04,597 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.815 | Acc: 71.77% +2025-03-14 19:11:06,880 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.815 | Acc: 71.98% +2025-03-14 19:11:10,432 - train - INFO - Epoch: 5 | Test Loss: 0.900 | Test Acc: 70.22% +2025-03-14 19:11:10,648 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.720 | Acc: 76.56% +2025-03-14 19:11:12,878 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.763 | Acc: 73.74% +2025-03-14 19:11:15,152 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.757 | Acc: 74.00% +2025-03-14 19:11:17,487 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.755 | Acc: 74.00% +2025-03-14 19:11:21,101 - train - INFO - Epoch: 6 | Test Loss: 0.776 | Test Acc: 74.17% +2025-03-14 19:11:33,437 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.654 | Acc: 75.78% +2025-03-14 19:11:36,120 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.734 | Acc: 74.95% +2025-03-14 19:11:38,748 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.737 | Acc: 74.85% +2025-03-14 19:11:41,483 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.731 | Acc: 74.99% +2025-03-14 19:11:45,749 - train - INFO - Epoch: 7 | Test Loss: 0.809 | Test Acc: 72.85% +2025-03-14 19:11:45,976 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.592 | Acc: 75.78% +2025-03-14 19:11:48,320 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.693 | Acc: 76.28% +2025-03-14 19:11:50,851 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.702 | Acc: 75.98% +2025-03-14 19:11:53,161 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.702 | Acc: 75.96% +2025-03-14 19:11:56,956 - train - INFO - Epoch: 8 | Test Loss: 1.054 | Test Acc: 64.26% +2025-03-14 19:12:08,750 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.632 | Acc: 80.47% +2025-03-14 19:12:11,380 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.683 | Acc: 77.00% +2025-03-14 19:12:13,700 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.697 | Acc: 76.47% +2025-03-14 19:12:15,791 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.694 | Acc: 76.50% +2025-03-14 19:12:18,944 - train - INFO - Epoch: 9 | Test Loss: 1.166 | Test Acc: 63.32% +2025-03-14 19:12:19,160 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.620 | Acc: 78.12% +2025-03-14 19:12:21,209 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.683 | Acc: 76.84% +2025-03-14 19:12:23,727 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.674 | Acc: 77.05% +2025-03-14 19:12:26,185 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.684 | Acc: 76.82% +2025-03-14 19:12:30,064 - train - INFO - Epoch: 10 | Test Loss: 0.677 | Test Acc: 76.82% +2025-03-14 19:12:42,009 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.804 | Acc: 72.66% +2025-03-14 19:12:44,567 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.678 | Acc: 77.30% +2025-03-14 19:12:47,101 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.662 | Acc: 77.78% +2025-03-14 19:12:49,296 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.667 | Acc: 77.48% +2025-03-14 19:12:53,279 - train - INFO - Epoch: 11 | Test Loss: 0.866 | Test Acc: 70.78% +2025-03-14 19:12:53,592 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.626 | Acc: 82.03% +2025-03-14 19:12:56,135 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.666 | Acc: 77.05% +2025-03-14 19:12:58,583 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.667 | Acc: 77.21% +2025-03-14 19:13:00,967 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.664 | Acc: 77.27% +2025-03-14 19:13:04,885 - train - INFO - Epoch: 12 | Test Loss: 0.848 | Test Acc: 71.45% +2025-03-14 19:13:17,833 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.646 | Acc: 77.34% +2025-03-14 19:13:20,812 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.677 | Acc: 76.84% +2025-03-14 19:13:23,635 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.683 | Acc: 76.69% +2025-03-14 19:13:26,191 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.680 | Acc: 76.85% +2025-03-14 19:13:30,593 - train - INFO - Epoch: 13 | Test Loss: 0.760 | Test Acc: 74.39% +2025-03-14 19:13:30,841 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.619 | Acc: 82.03% +2025-03-14 19:13:33,396 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.655 | Acc: 78.09% +2025-03-14 19:13:35,996 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.652 | Acc: 78.04% +2025-03-14 19:13:38,805 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.654 | Acc: 78.02% +2025-03-14 19:13:42,959 - train - INFO - Epoch: 14 | Test Loss: 0.912 | Test Acc: 70.57% +2025-03-14 19:13:55,836 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.699 | Acc: 78.12% +2025-03-14 19:13:58,506 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.658 | Acc: 77.43% +2025-03-14 19:14:01,166 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.651 | Acc: 77.77% +2025-03-14 19:14:03,871 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.651 | Acc: 77.81% +2025-03-14 19:14:07,939 - train - INFO - Epoch: 15 | Test Loss: 0.857 | Test Acc: 71.50% +2025-03-14 19:14:08,186 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.714 | Acc: 74.22% +2025-03-14 19:14:10,731 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.634 | Acc: 78.77% +2025-03-14 19:14:13,222 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.631 | Acc: 78.96% +2025-03-14 19:14:15,728 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.642 | Acc: 78.45% +2025-03-14 19:14:19,572 - train - INFO - Epoch: 16 | Test Loss: 0.800 | Test Acc: 74.04% +2025-03-14 19:14:31,479 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.904 | Acc: 69.53% +2025-03-14 19:14:33,806 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.636 | Acc: 78.62% +2025-03-14 19:14:36,052 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.636 | Acc: 78.54% +2025-03-14 19:14:38,377 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.635 | Acc: 78.66% +2025-03-14 19:14:42,061 - train - INFO - Epoch: 17 | Test Loss: 0.753 | Test Acc: 75.02% +2025-03-14 19:14:42,233 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.663 | Acc: 75.78% +2025-03-14 19:14:44,554 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.627 | Acc: 78.36% +2025-03-14 19:14:46,924 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.636 | Acc: 78.22% +2025-03-14 19:14:49,130 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.635 | Acc: 78.31% +2025-03-14 19:14:52,651 - train - INFO - Epoch: 18 | Test Loss: 0.824 | Test Acc: 74.00% +2025-03-14 19:15:04,182 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.718 | Acc: 75.78% +2025-03-14 19:15:06,607 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.627 | Acc: 78.48% +2025-03-14 19:15:08,889 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.629 | Acc: 78.63% +2025-03-14 19:15:11,168 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.632 | Acc: 78.60% +2025-03-14 19:15:14,679 - train - INFO - Epoch: 19 | Test Loss: 0.860 | Test Acc: 71.95% +2025-03-14 19:15:14,903 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.713 | Acc: 71.88% +2025-03-14 19:15:17,436 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.613 | Acc: 79.31% +2025-03-14 19:15:19,801 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.618 | Acc: 78.89% +2025-03-14 19:15:22,138 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.625 | Acc: 78.67% +2025-03-14 19:15:25,794 - train - INFO - Epoch: 20 | Test Loss: 0.814 | Test Acc: 72.63% +2025-03-14 19:15:37,215 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.545 | Acc: 80.47% +2025-03-14 19:15:39,531 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.618 | Acc: 79.42% +2025-03-14 19:15:42,160 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.623 | Acc: 79.14% +2025-03-14 19:15:44,548 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.621 | Acc: 79.13% +2025-03-14 19:15:48,266 - train - INFO - Epoch: 21 | Test Loss: 0.772 | Test Acc: 74.04% +2025-03-14 19:15:48,450 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.582 | Acc: 82.81% +2025-03-14 19:15:50,889 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.625 | Acc: 78.47% +2025-03-14 19:15:53,306 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.617 | Acc: 78.98% +2025-03-14 19:15:55,517 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.619 | Acc: 78.96% +2025-03-14 19:15:58,908 - train - INFO - Epoch: 22 | Test Loss: 0.841 | Test Acc: 72.71% +2025-03-14 19:16:11,475 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.794 | Acc: 75.00% +2025-03-14 19:16:14,047 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.600 | Acc: 79.59% +2025-03-14 19:16:16,528 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.609 | Acc: 79.28% +2025-03-14 19:16:18,937 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.614 | Acc: 79.13% +2025-03-14 19:16:22,747 - train - INFO - Epoch: 23 | Test Loss: 0.636 | Test Acc: 78.55% +2025-03-14 19:16:23,032 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.638 | Acc: 73.44% +2025-03-14 19:16:25,582 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.597 | Acc: 79.85% +2025-03-14 19:16:28,077 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.613 | Acc: 79.45% +2025-03-14 19:16:30,547 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.615 | Acc: 79.25% +2025-03-14 19:16:35,247 - train - INFO - Epoch: 24 | Test Loss: 0.761 | Test Acc: 74.45% +2025-03-14 19:16:52,451 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.606 | Acc: 77.34% +2025-03-14 19:16:54,634 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.593 | Acc: 79.85% +2025-03-14 19:16:56,947 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.596 | Acc: 79.71% +2025-03-14 19:16:59,193 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.604 | Acc: 79.53% +2025-03-14 19:17:03,197 - train - INFO - Epoch: 25 | Test Loss: 0.733 | Test Acc: 75.32% +2025-03-14 19:17:03,436 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.550 | Acc: 79.69% +2025-03-14 19:17:05,700 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.611 | Acc: 79.37% +2025-03-14 19:17:08,156 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.602 | Acc: 79.71% +2025-03-14 19:17:10,854 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.607 | Acc: 79.50% +2025-03-14 19:17:14,903 - train - INFO - Epoch: 26 | Test Loss: 0.837 | Test Acc: 71.82% +2025-03-14 19:17:28,190 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.728 | Acc: 73.44% +2025-03-14 19:17:30,383 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.595 | Acc: 80.10% +2025-03-14 19:17:32,712 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.592 | Acc: 80.34% +2025-03-14 19:17:34,992 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.602 | Acc: 79.93% +2025-03-14 19:17:39,085 - train - INFO - Epoch: 27 | Test Loss: 0.940 | Test Acc: 68.79% +2025-03-14 19:17:39,382 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.579 | Acc: 78.12% +2025-03-14 19:17:41,788 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.601 | Acc: 79.42% +2025-03-14 19:17:43,931 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.607 | Acc: 79.28% +2025-03-14 19:17:46,181 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.608 | Acc: 79.43% +2025-03-14 19:17:50,102 - train - INFO - Epoch: 28 | Test Loss: 0.738 | Test Acc: 75.43% +2025-03-14 19:18:03,137 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.392 | Acc: 87.50% +2025-03-14 19:18:05,602 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.578 | Acc: 80.04% +2025-03-14 19:18:07,998 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.587 | Acc: 80.07% +2025-03-14 19:18:10,390 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.594 | Acc: 79.86% +2025-03-14 19:18:14,256 - train - INFO - Epoch: 29 | Test Loss: 0.868 | Test Acc: 70.16% +2025-03-14 19:18:14,469 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.523 | Acc: 82.81% +2025-03-14 19:18:16,749 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.600 | Acc: 79.69% +2025-03-14 19:18:19,110 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.597 | Acc: 79.75% +2025-03-14 19:18:21,392 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.603 | Acc: 79.53% +2025-03-14 19:18:25,247 - train - INFO - Epoch: 30 | Test Loss: 0.804 | Test Acc: 73.83% +2025-03-14 19:18:37,557 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.571 | Acc: 80.47% +2025-03-14 19:18:39,806 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.581 | Acc: 80.70% +2025-03-14 19:18:42,069 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.588 | Acc: 80.17% +2025-03-14 19:18:44,274 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.598 | Acc: 79.81% +2025-03-14 19:18:48,074 - train - INFO - Epoch: 31 | Test Loss: 0.635 | Test Acc: 77.69% +2025-03-14 19:18:48,403 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.524 | Acc: 82.03% +2025-03-14 19:18:50,787 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.593 | Acc: 79.94% +2025-03-14 19:18:53,111 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.596 | Acc: 79.82% +2025-03-14 19:18:55,304 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.594 | Acc: 79.85% +2025-03-14 19:18:58,912 - train - INFO - Epoch: 32 | Test Loss: 0.803 | Test Acc: 72.32% +2025-03-14 19:19:10,428 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.504 | Acc: 84.38% +2025-03-14 19:19:12,672 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.585 | Acc: 79.99% +2025-03-14 19:19:15,130 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.591 | Acc: 80.00% +2025-03-14 19:19:17,400 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.594 | Acc: 79.90% +2025-03-14 19:19:21,074 - train - INFO - Epoch: 33 | Test Loss: 0.728 | Test Acc: 75.99% +2025-03-14 19:19:21,325 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.505 | Acc: 84.38% +2025-03-14 19:19:23,582 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.579 | Acc: 80.73% +2025-03-14 19:19:25,811 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.591 | Acc: 80.00% +2025-03-14 19:19:28,143 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.597 | Acc: 79.88% +2025-03-14 19:19:31,854 - train - INFO - Epoch: 34 | Test Loss: 0.923 | Test Acc: 70.57% +2025-03-14 19:19:44,699 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.456 | Acc: 85.16% +2025-03-14 19:19:47,040 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.579 | Acc: 80.26% +2025-03-14 19:19:49,476 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.592 | Acc: 79.92% +2025-03-14 19:19:51,834 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.590 | Acc: 79.92% +2025-03-14 19:19:55,647 - train - INFO - Epoch: 35 | Test Loss: 0.719 | Test Acc: 75.56% +2025-03-14 19:19:55,832 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.627 | Acc: 78.91% +2025-03-14 19:19:58,103 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.567 | Acc: 80.92% +2025-03-14 19:20:00,453 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.583 | Acc: 80.37% +2025-03-14 19:20:02,728 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.585 | Acc: 80.31% +2025-03-14 19:20:06,322 - train - INFO - Epoch: 36 | Test Loss: 0.740 | Test Acc: 75.07% +2025-03-14 19:20:18,017 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.633 | Acc: 80.47% +2025-03-14 19:20:20,274 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.561 | Acc: 81.09% +2025-03-14 19:20:22,825 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.574 | Acc: 80.56% +2025-03-14 19:20:25,118 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.575 | Acc: 80.53% +2025-03-14 19:20:28,973 - train - INFO - Epoch: 37 | Test Loss: 0.727 | Test Acc: 75.90% +2025-03-14 19:20:29,339 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.595 | Acc: 80.47% +2025-03-14 19:20:31,652 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.571 | Acc: 80.77% +2025-03-14 19:20:34,298 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.569 | Acc: 80.84% +2025-03-14 19:20:37,043 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.571 | Acc: 80.68% +2025-03-14 19:20:42,009 - train - INFO - Epoch: 38 | Test Loss: 0.678 | Test Acc: 77.55% +2025-03-14 19:20:57,971 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.579 | Acc: 78.12% +2025-03-14 19:21:00,491 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.603 | Acc: 79.55% +2025-03-14 19:21:02,692 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.588 | Acc: 80.21% +2025-03-14 19:21:04,962 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.583 | Acc: 80.22% +2025-03-14 19:21:08,498 - train - INFO - Epoch: 39 | Test Loss: 0.630 | Test Acc: 79.20% +2025-03-14 19:21:08,743 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.506 | Acc: 82.81% +2025-03-14 19:21:11,357 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.572 | Acc: 80.66% +2025-03-14 19:21:13,832 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.580 | Acc: 80.50% +2025-03-14 19:21:16,254 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.576 | Acc: 80.58% +2025-03-14 19:21:19,990 - train - INFO - Epoch: 40 | Test Loss: 0.698 | Test Acc: 77.19% +2025-03-14 19:21:33,933 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.647 | Acc: 73.44% +2025-03-14 19:21:36,385 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.592 | Acc: 79.76% +2025-03-14 19:21:38,741 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.592 | Acc: 79.90% +2025-03-14 19:21:41,132 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.591 | Acc: 79.99% +2025-03-14 19:21:45,047 - train - INFO - Epoch: 41 | Test Loss: 0.694 | Test Acc: 77.17% +2025-03-14 19:21:45,309 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.585 | Acc: 78.91% +2025-03-14 19:21:47,785 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.580 | Acc: 80.58% +2025-03-14 19:21:50,283 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.583 | Acc: 80.39% +2025-03-14 19:21:52,576 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.581 | Acc: 80.29% +2025-03-14 19:21:56,504 - train - INFO - Epoch: 42 | Test Loss: 0.640 | Test Acc: 78.78% +2025-03-14 19:22:09,412 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.661 | Acc: 77.34% +2025-03-14 19:22:11,772 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.577 | Acc: 80.29% +2025-03-14 19:22:13,947 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.573 | Acc: 80.45% +2025-03-14 19:22:16,276 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.575 | Acc: 80.39% +2025-03-14 19:22:19,982 - train - INFO - Epoch: 43 | Test Loss: 0.761 | Test Acc: 75.18% +2025-03-14 19:22:20,244 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.504 | Acc: 83.59% +2025-03-14 19:22:22,549 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.558 | Acc: 81.34% +2025-03-14 19:22:25,088 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.568 | Acc: 80.99% +2025-03-14 19:22:27,330 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.574 | Acc: 80.70% +2025-03-14 19:22:31,247 - train - INFO - Epoch: 44 | Test Loss: 0.710 | Test Acc: 76.16% +2025-03-14 19:22:46,477 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.561 | Acc: 78.91% +2025-03-14 19:22:48,835 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.550 | Acc: 81.54% +2025-03-14 19:22:51,134 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.554 | Acc: 81.12% +2025-03-14 19:22:53,457 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.562 | Acc: 80.82% +2025-03-14 19:22:57,455 - train - INFO - Epoch: 45 | Test Loss: 0.826 | Test Acc: 73.68% +2025-03-14 19:22:57,715 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.513 | Acc: 83.59% +2025-03-14 19:23:00,031 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.579 | Acc: 80.21% +2025-03-14 19:23:02,378 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.576 | Acc: 80.56% +2025-03-14 19:23:04,667 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.579 | Acc: 80.41% +2025-03-14 19:23:09,125 - train - INFO - Epoch: 46 | Test Loss: 0.757 | Test Acc: 75.72% +2025-03-14 19:23:23,982 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.391 | Acc: 87.50% +2025-03-14 19:23:26,330 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.567 | Acc: 80.68% +2025-03-14 19:23:28,656 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.569 | Acc: 80.55% +2025-03-14 19:23:30,938 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.564 | Acc: 80.83% +2025-03-14 19:23:35,580 - train - INFO - Epoch: 47 | Test Loss: 0.697 | Test Acc: 76.91% +2025-03-14 19:23:35,874 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.568 | Acc: 82.03% +2025-03-14 19:23:39,159 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.556 | Acc: 81.37% +2025-03-14 19:23:41,720 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.562 | Acc: 81.03% +2025-03-14 19:23:44,018 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.566 | Acc: 80.92% +2025-03-14 19:23:47,764 - train - INFO - Epoch: 48 | Test Loss: 0.740 | Test Acc: 76.44% +2025-03-14 19:24:03,458 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.434 | Acc: 84.38% +2025-03-14 19:24:06,028 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.557 | Acc: 81.14% +2025-03-14 19:24:08,540 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.567 | Acc: 80.76% +2025-03-14 19:24:11,018 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.570 | Acc: 80.54% +2025-03-14 19:24:14,809 - train - INFO - Epoch: 49 | Test Loss: 0.645 | Test Acc: 77.99% +2025-03-14 19:24:15,220 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.661 | Acc: 82.03% +2025-03-14 19:24:17,452 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.553 | Acc: 81.09% +2025-03-14 19:24:19,678 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.553 | Acc: 81.04% +2025-03-14 19:24:21,988 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.560 | Acc: 80.89% +2025-03-14 19:24:25,737 - train - INFO - Epoch: 50 | Test Loss: 0.719 | Test Acc: 75.84% +2025-03-14 19:24:43,173 - train - INFO - 训练完成! diff --git a/Image/MobileNetv1/code/train.py b/Image/MobileNetv1/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..2a2e5199195598ae6e359ed2c4746e79ba022e29 --- /dev/null +++ b/Image/MobileNetv1/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import MobileNet + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = MobileNet() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv1', + save_type='0', + layer_name='layers.12', + interval=2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv1', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv1', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='layers.12', + interval=2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/MobileNetv1/dataset/.gitkeep b/Image/MobileNetv1/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv1/model/.gitkeep b/Image/MobileNetv1/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv1/model/0/epoch1/embeddings.npy b/Image/MobileNetv1/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e16865fc9b5f3df85bf3fec43ed435b610124b3e --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d15c14c39ec8d8dcfa9aaca55077ec6edeecc88d114c7e722d682fec1b12825 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch1/subject_model.pth b/Image/MobileNetv1/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..feea11ab0a2437d1097a58bbaa8ff1bb94e8deb9 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f2d410abc4cfb013c8a563945e74c5807d0903d174f2330147fc66fd4bd005f +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch10/embeddings.npy b/Image/MobileNetv1/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2d7eded9dc24d104f67c1020b04d4f11ec2159b4 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7dc305336fedb514e36cc63cd1e89629c0b9c46ba17197e3f89697546fb08c2 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch10/subject_model.pth b/Image/MobileNetv1/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6de17bdb863dd423a930b3dd513b4e3ce9b758dd --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99a7fa928cb9ea31944096085739741187addb65c3e37d11cc50f14e53e90c15 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch11/embeddings.npy b/Image/MobileNetv1/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c863b244b1592a46ca7caf4bbea59116d837f9ba --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a2822803ec0a989fbcfaee1ef3d41d93bb1bfa87d0a2ed95bd36c2b38fb1683b +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch11/subject_model.pth b/Image/MobileNetv1/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3413acbffb93f2c5168e3e8734f86ad00471b6ff --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:710918e66a6716920f3bd7345d116a662f242632a7cd7e885e3ecafb416c71fa +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch12/embeddings.npy b/Image/MobileNetv1/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e8498a45d647ad53c35a79094663a3e7d089ceed --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:554af35dfddae6ba4e6cd428f956e1580c1550ac61baaf817543512c2c0fa215 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch12/subject_model.pth b/Image/MobileNetv1/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..44e6a5628184f39df01938ba94ca85778c04c7a0 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02a72174a961a5818127d122d613f9fc895e7c8b75dfb41e4e0e1c2e8bd7bfcb +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch13/embeddings.npy b/Image/MobileNetv1/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3a9be6bc7b85507b34ba9b988cb79de963863e5a --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fecf7b6b5014e02a65807751ec2c50d70bb8c14c544028e4d920ad736ead4c79 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch13/subject_model.pth b/Image/MobileNetv1/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..235223dbeb8f36a006bc799f1c8425a75eaa98bf --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2166c19a5df09f78b8e7114039aaec5db6658da2cd6d92f414c891d955c4087 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch14/embeddings.npy b/Image/MobileNetv1/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..34ac0fbda3c3780ee977646d9d00162bf8573c6b --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7496fc9548905104b9e3130e02ac8612dfc10665bae5fe86ac76d77943e264fa +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch14/subject_model.pth b/Image/MobileNetv1/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..896f6add6a9f7f3ef9f20d8cbfd89cb3c086319f --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:234900483b97d73067fa8894cf85e5053c267cbffbb07934423f8fb8fad9909a +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch15/embeddings.npy b/Image/MobileNetv1/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..dbf20b3b59352180bfe2b71fe553955d696dfa6b --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd4f4a577f04715d76f6ddc41dc22046fb25e3101ab9f96a73fa4124ff7a2e7f +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch15/subject_model.pth b/Image/MobileNetv1/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7ae8cef72d61285b6505e3831230d3e37416293a --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d2aba03ebac58f8e8d533b2563a1fb99e67b68653bc3f54b3dcf9daea197dca +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch16/embeddings.npy b/Image/MobileNetv1/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8774d4322948dde8cc96f7d093c3c102024f6c0e --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26b3df0643ac8e4c7d0ad5c433eafdd72e0dfce20a9330c7e46b0f38fadc706c +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch16/subject_model.pth b/Image/MobileNetv1/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f1ac443491b529f0cbea0ea1614e5b96b97098ca --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94132ad4cee93cd79b7d83f148686e6a6eb1eef8fc73c763bb3b600449a0b9a2 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch17/embeddings.npy b/Image/MobileNetv1/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b9de555101e242698e3894d53306bae9842ae3ad --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdb1dd450118bb54ba35638a230262b228effbec299129f2e855ce65123379ad +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch17/subject_model.pth b/Image/MobileNetv1/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..29282ac51c0618b1e3be287320cb8972869789a2 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a1bbfe19241602973dd151173061220d7e1b85f7f70af2ee04eec77e4e4d5bd5 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch18/embeddings.npy b/Image/MobileNetv1/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..637da86c00d6e0ee8d0bc6734d5564e188c0f0a7 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f29631d205c82aba641141781e94de18da1569711a6a533139cb4c4115b3fe5 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch18/subject_model.pth b/Image/MobileNetv1/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c04e80817b6b9f18314ffc6c31e04e8266101ca0 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5b26aad894c7c7320571f13e3ba783fab970f9c931620350dc8e6d491132ff2 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch19/embeddings.npy b/Image/MobileNetv1/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..cb537edc8fbdbbcc43f16c187402e30c62a402b0 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8784c77e5e97d1a289535de6e81f8938b1bfeaea25ca9a547ede262e07c7fcf5 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch19/subject_model.pth b/Image/MobileNetv1/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a171dbfccb34b695ee516e33c66b5c68b84deb16 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b4757569fd59596bbaea2975e56988d2f7ecda9c1ac28a70ba0957177f078acb +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch2/embeddings.npy b/Image/MobileNetv1/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f40d6ffe877c2bb88df7d8fd87fc750085ccb364 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c50ebc5c17bc2a967ea92450c2e1332f7b30186790bbc888ac2d60a6775f268 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch2/subject_model.pth b/Image/MobileNetv1/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..448ce7e25f8b453fc74f2940d6e3edd757342f87 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e26ab3879506c26a9d710540200abf18153120da1494e7425edb1b7ac542450b +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch20/embeddings.npy b/Image/MobileNetv1/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff49dc9743976e4e8e6cce20a04a48c9e3dc241e --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:851fdbc91de269fcff56b1cb321f2ce74df3f8bb8903eb51a6f89c56c1f78998 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch20/subject_model.pth b/Image/MobileNetv1/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..db03bc9484e1bb6cd4ce612f6c887420920b50cb --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f1ef5af7697b5dadc409e422a9de683712c2c4b23af50b66d66643d95b152f8 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch21/embeddings.npy b/Image/MobileNetv1/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c501be53422c9f28c1b892323e9565fbcf23d864 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:971151dd7790d7e1ae60f2bc971745b261310dfc448bc7bbaa1c66a20e92bfed +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch21/subject_model.pth b/Image/MobileNetv1/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ffa92574bb3f2dafa75c8c32bc6029b64c9995ae --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11bd217a0151f223fe4a02b2f0b6f35cb8beef89995dcece8d4cc366a217eb6d +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch22/embeddings.npy b/Image/MobileNetv1/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..813af0b1fbc8fcad1a4a5eed1f48baa032964faf --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bb38e4c4b1fbf5eb8bf047c661e973cfe25b3f742bb6ef40bf5e2534e0a65de +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch22/subject_model.pth b/Image/MobileNetv1/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..10c6190d04a1b076e15cdf54f5097707d2836691 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0dfe6e96e075c5226b9a8f2202d5d93f7ede3773526405beaaebab9aec11a52 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch23/embeddings.npy b/Image/MobileNetv1/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..64c6ab47cca3f54bb741ff19e3c603cc9a5c7847 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7665feb1a429e4c1797ebf031bd369116f35132995ece42ae2f53db5fe635cf5 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch23/subject_model.pth b/Image/MobileNetv1/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..84d7beeef957e9c2b678ee8141311752181f7b16 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06d101fd253bc619268f2861a8627d0402614025886c75ab7f9dbb7c33bddda8 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch24/embeddings.npy b/Image/MobileNetv1/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5c61802de76960dc67568ded540a6e09c92dd08f --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81b36d7563aa5550d2b13cd5952a38a349dcbaeae8c9477eb1a7f2ecb2ab9b43 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch24/subject_model.pth b/Image/MobileNetv1/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c4a6c9acbdcac73a8951fbd2874c1d7fb08a54b1 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c81360b8c6023b748f0231be64d2ef061d203ab2dcf6cd66f6162ff2d44fda5 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch25/embeddings.npy b/Image/MobileNetv1/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..02b12d3a14abb53190c68b211eb899c11657c106 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8c00e47a2ec0975f36312b5fea0f05314698177287f04708ee7fe25094224a2 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch25/subject_model.pth b/Image/MobileNetv1/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f22050709a06cc3b0e27ed127247166997f54992 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf6c57ff3cbfecd1b1eab7cac3ccfb8c2da7751da5b1725fe2bdd02cbebdae30 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch3/embeddings.npy b/Image/MobileNetv1/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d2e4fea2d3e60d82b37bfa9ab6b290b64afe5f5e --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e67ba03821b861331ac5df789fcc42117979a56cc1fdd57edba57d35446d51e +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch3/subject_model.pth b/Image/MobileNetv1/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dba1700500146fa5964ddc9844697b7f5fa994dc --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0e49b96b58b0810aeb5f50af518da1e503d2429de9f4ac4a0e40ab30b165f60 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch4/embeddings.npy b/Image/MobileNetv1/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..da8bd89855c460dc5d6a741ff06f0201f610a08c --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:781c944e67e9a48ad1ea0674f30ae73a87a8957df861ca8a83aab6148769b59c +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch4/subject_model.pth b/Image/MobileNetv1/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..63313658e97d729b24659f85febd1277a04eb799 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:746c45a792bc7c3c824b5758b231379f82575073d3410de8aa44ee19e871f95f +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch5/embeddings.npy b/Image/MobileNetv1/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..676bd7ad7c5f458e70fd2fab4a625b1dd94dac94 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab51665c97003b84ac97378e4ff139006339bfdc0014397fa5db1a8fc708136f +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch5/subject_model.pth b/Image/MobileNetv1/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c09ddffefbdeab78cde0b8c8396ca4d6fcd3d1e6 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e9396decc97e42315e900af3790b0a135ceca83c6a429066124c4548d962c69 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch6/embeddings.npy b/Image/MobileNetv1/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7e1a5c0a89ced4bd3608be6df725962a8427aaaa --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8011a4ea4dba771091548e9cdf91fc2ff5aa545a149f2c11947c797e7974b3e8 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch6/subject_model.pth b/Image/MobileNetv1/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..85d5da92d7b7be6248d7124ba2cd9c1aaadecdb5 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:041f01d91170e64137bd8141e342e341f3c84b2d7649093fd4eb67cfae1ceb0a +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch7/embeddings.npy b/Image/MobileNetv1/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ba5b6ec66a1deb67bbc5d39cbed207dcbfa1e77f --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b77da99ad0b33b050a2b13411b37559b9e71dbcaadde3cda01d60abac8730fb6 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch7/subject_model.pth b/Image/MobileNetv1/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..14deab05fa8d904a55e3ea4caf2cddd54c5e4405 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a091551dcb580b432d4efa9e81c2c3447ebc02d7493c0985fe4639e2a765eb7 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch8/embeddings.npy b/Image/MobileNetv1/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3866549d33aa9e48299e393cbb6ddcf00235512d --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2921b47f2074a9cb1bc3d745de3a3f0f1c1ba4a3f95e24c383b6b5ba4601f57 +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch8/subject_model.pth b/Image/MobileNetv1/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..238fbb6e665846afd0597c92169767b92800ab62 --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdd13fc52d447fe8a03f86f390c770065ef9b48847e669db0a39523af6473020 +size 13011930 diff --git a/Image/MobileNetv1/model/0/epoch9/embeddings.npy b/Image/MobileNetv1/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b0a4e8cf97ed20d1bea7dea06b30b3e99233f76e --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:208c00ace3a5d228b4b5f8c452da76408b73247055e05747de475d14bac4eb6c +size 819200128 diff --git a/Image/MobileNetv1/model/0/epoch9/subject_model.pth b/Image/MobileNetv1/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..210ea499e7a7c91a4b96d75591aa38d6f326255b --- /dev/null +++ b/Image/MobileNetv1/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c6913d586443c70b7f113a94b0e55829475bbd73b9901f46ecf7f363fca56f9 +size 13011930 diff --git a/Image/MobileNetv1/model/0/layer_info.json b/Image/MobileNetv1/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..92f9d96e59daa243de07abfd7c35d2bdda3494b8 --- /dev/null +++ b/Image/MobileNetv1/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "layers.12", "dim": 4096} \ No newline at end of file diff --git a/Image/MobileNetv1/model/2/epoch1/embeddings.npy b/Image/MobileNetv1/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0c60d015872172ebb3b795d6fc3312d7fd1395e1 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87c9850a5dc537609db8b0ed14c0979bf0c527756aec8c8660d1155546a1b500 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch1/subject_model.pth b/Image/MobileNetv1/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4eca112376e8c056d0fd1f5287cb2727871157f6 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5bc46909023b1963e81b5e58b1687c87aed0eb46318183cae5905653bd66051b +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch10/embeddings.npy b/Image/MobileNetv1/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d8a0a5c82ab461eb4e92aad604bb6a1f16e90868 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0354c7dac2a834c01cf35ad79a587a91bc033b4509b4ab1ea06fbfa7bdf6f6a1 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch10/subject_model.pth b/Image/MobileNetv1/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cb80c8f8ff3c68e2309a39086532f16019fc739e --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e18c28b944310701fc0d55ead044422d056791fa65e20695d67a1976e01c66a3 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch11/embeddings.npy b/Image/MobileNetv1/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..efc7c2fb2fd4b38a21901b124b84abe4805a6975 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b0651b25b865b953fd7211e295fac532cd6b54d5f5e5ba0f9eb7f83aec29b678 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch11/subject_model.pth b/Image/MobileNetv1/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0d539e5f50e79ea69fd5c092051e689f182126d0 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3093d3b40921824dc1d02aeecd4e09ec863f5713b177906f75ed904a174023c9 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch12/embeddings.npy b/Image/MobileNetv1/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a91061d27cb7562f712b630503eec629699871af --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50b51a14b927827e32c2ea204bc0a64bb625f9dd1e9103ed7c90fee9f8eb2f92 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch12/subject_model.pth b/Image/MobileNetv1/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1de5fa5bfbb28984da8f2c4de26a642c8ce0e40e --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91766868f30a97989308793525daa1101c7544e81b2858dff354b53f8d1ae0d9 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch13/embeddings.npy b/Image/MobileNetv1/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3f6532fb2a405c7f5c4db565eb7bc98ba56ba073 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef9725e77e4cc934ed4e19c82de932b86da653eae43a89775c925f877c870d70 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch13/subject_model.pth b/Image/MobileNetv1/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..589cb9f4078d85436ba58fd93bfd9da28d50b73a --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ace267488fc3ed35a16f38fbaddb8a5bb649889df1ecef5a9d4e4e981c278c0 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch14/embeddings.npy b/Image/MobileNetv1/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..bcdf901e87fb1d002d09820afb1ed711995bbe74 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cac1d149f183471c89fd6a09dc69e716fff9d728a425ca65cbc1c13ed78180af +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch14/subject_model.pth b/Image/MobileNetv1/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6123bd197a62d5892eda36c67b8fd59471599b3f --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27fd2e9a0993e5b66f216009140d5e42a638b8fdcec9f87db2ce97d9309f13c8 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch15/embeddings.npy b/Image/MobileNetv1/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..971fc4317bb053f80128c4e3fad16cc52f113a94 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87e91a89cf0fa9152bf7f0f8f5bf192da1cf9ff6796a3a4e90d422617ecd72fa +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch15/subject_model.pth b/Image/MobileNetv1/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5486eada1fbdeaa261b378031fef79b1a56e3c3e --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3caaa50e14d557b88b56249c2dcfde3d5cb278056819d14e35b304837e3e6995 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch16/embeddings.npy b/Image/MobileNetv1/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..31d371ca9ecaf0f3433ce38deaff78ddc5e1130f --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d43d8269484c050b810053b61c53c939ca8504a6da70a15a6729da4f7e52ead0 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch16/subject_model.pth b/Image/MobileNetv1/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dfcf282987408643d263085bc3374245b9be443a --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad49e4fbbbad021e0bb84db6f66b8b35822b1e32be4aef11d071000648366bab +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch17/embeddings.npy b/Image/MobileNetv1/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..59f68ac8f899f3ad7e056485bd85803815da73f0 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:028a8b47c2db8494ca06d6777d99fda423b1324c0c40296b5034f104f5a61a2d +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch17/subject_model.pth b/Image/MobileNetv1/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9be536b6e627a73c1d1badc4c6c8df1e9121198f --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8b67843182d31133cc697f6c94a28c2341cf2b844f5288ddadf8fa7ff966463 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch18/embeddings.npy b/Image/MobileNetv1/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7e444b15daa717ed07d372c0ca28d4130b7f734f --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ccbbea0d3d0ed88bf2f19bf45f90e38822c1c3c1b8aab622912a60d68b9e4ea +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch18/subject_model.pth b/Image/MobileNetv1/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7bcc41e87d99b07d7ac67a5a71ea069347736847 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4eb732f3cd507d8721846dc9117bda29efee83b27c705d88ab6a1a14737c1ed +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch19/embeddings.npy b/Image/MobileNetv1/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5308bcee717340921267e9c9a3f6aa8f5082abfa --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1d6185c9d5c0d6ae660323d580ded6b177abb81367c66dfc6f7a9e2f7e0c379 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch19/subject_model.pth b/Image/MobileNetv1/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..138a574b7a35d426f4d23a28e5845e9c068305a0 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42588645cbde027b66bd9592e7ab7c828f037b4a48da9b03bc5dd1223a2af396 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch2/embeddings.npy b/Image/MobileNetv1/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c260fe3919a12d3654e35904bc58f62f26ef5244 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41e02242707f6ac629c0be59fc2706492afe254e4534a04c682bd1cb0b2ed71f +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch2/subject_model.pth b/Image/MobileNetv1/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..02736b056be398afbfd27628d6a17db1e1792591 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:973e20467d56e4b0c095ceba0fccf5597854918806bf112bf15af0eb5725ec9a +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch20/embeddings.npy b/Image/MobileNetv1/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..581441af94e18448d3c8066b415157c617db4bbd --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df164c381e5462ea5b5b9b828b9199793e50e5b61cd9db24c69b4d1343a979ae +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch20/subject_model.pth b/Image/MobileNetv1/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6d779b159fbdcd3d0e98633b78e13c7b56214dc2 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4cb441324a989c4e2c95f9ebe91f1ec811f2b90421632b36310a95332732a053 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch21/embeddings.npy b/Image/MobileNetv1/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9db6165cec1ec5be32eda6185332ca1932ebe5ac --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:642a4c8c424b886042d192a69b71f25394978c430b0ea6c0267f017a03bbb43d +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch21/subject_model.pth b/Image/MobileNetv1/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5f086f0c85f88c28234e1291cc04e2f7b5a0f791 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67451d006ac64855be93835d8a6d304a941dbf730f15ab14bf20356acfcff3b4 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch22/embeddings.npy b/Image/MobileNetv1/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4a76f313eb6a5ce8f2be7b28aaf181faade40418 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e206dbf9fea0ce8ad000156cf11434d35bd8278feb38998a7e792b4716792f34 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch22/subject_model.pth b/Image/MobileNetv1/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4e20db07f497abc4ed430ddb7d68486dcd7574f5 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e078bee0fee6072835a699c868632a929f9b2c95fe28c9a5943027382683e63 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch23/embeddings.npy b/Image/MobileNetv1/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a19737dc4e6de87f63529e5088a548770da98378 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8be5739c7626718945ecfca10ed66dcbbfcb01224f16b66743e8d215470ab37d +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch23/subject_model.pth b/Image/MobileNetv1/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0eb57403e0d464a6f7701f305f3d57de4ff3ee20 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fc61efc799c1db5c7e187276094113c775d23fa0ac800f9fc2bfc68d5e5a6ac +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch24/embeddings.npy b/Image/MobileNetv1/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..768f4d8ab4e6672cc0686eaa2ceff95bd8ed9ca7 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68709c13acf7e13b7495f71d9e0aff9197e14e911a0707fd19d654d27f4d845c +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch24/subject_model.pth b/Image/MobileNetv1/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e54a56ffa600ef9ec2666896f6b6b0b34861a49d --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8b81029e5799dafa0ef6512f2f1165bc2592064e231cd1617a389436c6561a9 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch25/embeddings.npy b/Image/MobileNetv1/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..784ec62c4d126a15ab2047c44cd3506e1ac8d209 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4a52093f68b0ee7e9a5e72e9669d82d6c64755ac4d5e1aeeecaf893601467b5 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch25/subject_model.pth b/Image/MobileNetv1/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0aeef388d5d09990b36fefc6c4faef4662e85814 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:622790e2311d931800d69dc5ab285aa131792b9557822320293901dec1b40654 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch3/embeddings.npy b/Image/MobileNetv1/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..5b92461f8ec2cbc3be983c2fa3821c154f763eba --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81e5cbf0df044ac80dbd752457a9e9c2343cf8f9ca61986d5d4199d4b6aa49e6 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch3/subject_model.pth b/Image/MobileNetv1/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..269ad0817fa9851a15e87f496983930cc4d7ae2d --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b406abcf5a32973ccadb33c3c8a4402d38a5daf2735f105af4b51ec1ed88145 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch4/embeddings.npy b/Image/MobileNetv1/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..fe8160d3cb53062c24ecf05351248ae7000dd8bb --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:314bbb501974ccf1653ab80615f8530425927a187226d817c3a442345c69b284 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch4/subject_model.pth b/Image/MobileNetv1/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..db89b54bb49af51ebdc4226592ac9ee9bc4d72b5 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ebc9a71e831fc95340eb8b62af7e10204d2d4dfdfa4d1efd06fa7a677ed54e4 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch5/embeddings.npy b/Image/MobileNetv1/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f3bf0d96c6366d19de5c2b9e03dcba470e46048e --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:204fde78ac20702ac3bba0e8ae822657deb15f8549bc3ca48843c99a4f1476f7 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch5/subject_model.pth b/Image/MobileNetv1/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..19037200e72c2f9cd5e283823fc78e065b86a809 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48e295cb80a72e8e1b9f4f985e4ba99d7c7ed9cb988667433592380e9bc9cb89 +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch6/embeddings.npy b/Image/MobileNetv1/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b153e92ecfc682631f47b64960eb5dff40e90c5d --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec82b803db90b7115ff924d271bfc5859a7e67b276104add923894b3d56cd235 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch6/subject_model.pth b/Image/MobileNetv1/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..670e0c38e0daa01e0b726a0dcc9b73f8bc8940bb --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b11a474d60cb1cc59ae97342cb05a904c9f0c1146bdc7fc9d30f7f7e906d540c +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch7/embeddings.npy b/Image/MobileNetv1/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f603e35d919d88ee5b58e627abc74051603c8ec1 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a2bc4010cc04510b1d82cec4953bd997632cf13a850b806caf9ce4612b9546f2 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch7/subject_model.pth b/Image/MobileNetv1/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..51b3be8894b045e4d140f1f456f42deefb4ee9c7 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:677628f5810c8018b45e220693b4825d08166e875c058aa5339d801a4979ceee +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch8/embeddings.npy b/Image/MobileNetv1/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ba57e4c60e86d7847fe05ae9bf61589bcfcb9c56 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09c1f1c42b694d979f60f1c5d7b51e4e78f082c0bb6109bf56b0f3bcdd4d64fa +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch8/subject_model.pth b/Image/MobileNetv1/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7996b8afe5c225cf19c41d01f0f81daccbbe9413 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7fd16394ccb18c3e303c201ced5e9ea78525aabb766a7b16310ef1a3e73944e +size 13011930 diff --git a/Image/MobileNetv1/model/2/epoch9/embeddings.npy b/Image/MobileNetv1/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f9764bfbad8f0f1e9be8c22c0a6eeba4b628a093 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c47ff9fe3be31085f555c65e09f60df176d5551fa924c3d3924a937352dd5d60 +size 819200128 diff --git a/Image/MobileNetv1/model/2/epoch9/subject_model.pth b/Image/MobileNetv1/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c7d972821a00f48bb82408e777efc613ce8433d7 --- /dev/null +++ b/Image/MobileNetv1/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37466e1cae0e0a7cf5b3f9c23336b002bebe2f1c47f0b555b15e398e5d04723f +size 13011930 diff --git a/Image/MobileNetv1/model/2/layer_info.json b/Image/MobileNetv1/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..92f9d96e59daa243de07abfd7c35d2bdda3494b8 --- /dev/null +++ b/Image/MobileNetv1/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "layers.12", "dim": 4096} \ No newline at end of file diff --git a/Image/MobileNetv2/code/backdoor_train.log b/Image/MobileNetv2/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..92c5229f0b606e2ffac39ca59aa43dbc53e59dc2 --- /dev/null +++ b/Image/MobileNetv2/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 19:34:13,960 - train - INFO - 开始训练 mobilenetv2 +2025-03-14 19:34:13,960 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-14 19:34:14,824 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.343 | Acc: 7.81% +2025-03-14 19:34:18,227 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.437 | Acc: 19.93% +2025-03-14 19:34:21,529 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.106 | Acc: 26.41% +2025-03-14 19:34:25,012 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.947 | Acc: 30.59% +2025-03-14 19:34:30,049 - train - INFO - Epoch: 1 | Test Loss: 1.551 | Test Acc: 44.73% +2025-03-14 19:34:30,584 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.549 | Acc: 37.50% +2025-03-14 19:34:33,915 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.356 | Acc: 50.95% +2025-03-14 19:34:37,167 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.292 | Acc: 53.64% +2025-03-14 19:34:40,496 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.243 | Acc: 55.62% +2025-03-14 19:34:45,598 - train - INFO - Epoch: 2 | Test Loss: 1.072 | Test Acc: 61.44% +2025-03-14 19:35:01,116 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.011 | Acc: 70.31% +2025-03-14 19:35:06,134 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.027 | Acc: 63.92% +2025-03-14 19:35:10,627 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.010 | Acc: 64.62% +2025-03-14 19:35:14,706 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.996 | Acc: 65.22% +2025-03-14 19:35:20,364 - train - INFO - Epoch: 3 | Test Loss: 0.871 | Test Acc: 69.34% +2025-03-14 19:35:20,817 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.923 | Acc: 68.75% +2025-03-14 19:35:24,679 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.881 | Acc: 69.57% +2025-03-14 19:35:28,088 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.871 | Acc: 70.01% +2025-03-14 19:35:31,594 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.857 | Acc: 70.56% +2025-03-14 19:35:36,716 - train - INFO - Epoch: 4 | Test Loss: 0.965 | Test Acc: 68.49% +2025-03-14 19:35:48,118 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.103 | Acc: 67.19% +2025-03-14 19:35:51,538 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.789 | Acc: 73.24% +2025-03-14 19:35:54,853 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.786 | Acc: 73.25% +2025-03-14 19:35:58,041 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.781 | Acc: 73.45% +2025-03-14 19:36:02,649 - train - INFO - Epoch: 5 | Test Loss: 0.710 | Test Acc: 75.54% +2025-03-14 19:36:02,867 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.743 | Acc: 75.78% +2025-03-14 19:36:06,290 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.735 | Acc: 75.52% +2025-03-14 19:36:09,827 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.740 | Acc: 75.27% +2025-03-14 19:36:13,150 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.737 | Acc: 75.34% +2025-03-14 19:36:17,728 - train - INFO - Epoch: 6 | Test Loss: 0.850 | Test Acc: 71.43% +2025-03-14 19:36:29,047 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.764 | Acc: 71.88% +2025-03-14 19:36:32,484 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.687 | Acc: 76.92% +2025-03-14 19:36:36,055 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.692 | Acc: 76.60% +2025-03-14 19:36:40,123 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.703 | Acc: 76.35% +2025-03-14 19:36:45,256 - train - INFO - Epoch: 7 | Test Loss: 0.756 | Test Acc: 74.37% +2025-03-14 19:36:45,496 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.576 | Acc: 81.25% +2025-03-14 19:36:49,278 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.680 | Acc: 77.41% +2025-03-14 19:36:52,908 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.688 | Acc: 77.13% +2025-03-14 19:36:56,915 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.679 | Acc: 77.49% +2025-03-14 19:37:01,795 - train - INFO - Epoch: 8 | Test Loss: 1.103 | Test Acc: 64.27% +2025-03-14 19:37:14,687 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.588 | Acc: 82.03% +2025-03-14 19:37:18,396 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.688 | Acc: 77.20% +2025-03-14 19:37:21,730 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.677 | Acc: 77.54% +2025-03-14 19:37:25,041 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.675 | Acc: 77.44% +2025-03-14 19:37:29,737 - train - INFO - Epoch: 9 | Test Loss: 0.690 | Test Acc: 76.95% +2025-03-14 19:37:29,997 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.586 | Acc: 81.25% +2025-03-14 19:37:33,361 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.667 | Acc: 78.05% +2025-03-14 19:37:36,656 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.682 | Acc: 77.28% +2025-03-14 19:37:40,019 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.675 | Acc: 77.60% +2025-03-14 19:37:44,637 - train - INFO - Epoch: 10 | Test Loss: 0.741 | Test Acc: 75.22% +2025-03-14 19:37:55,613 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.756 | Acc: 73.44% +2025-03-14 19:37:58,897 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.669 | Acc: 77.92% +2025-03-14 19:38:02,173 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.664 | Acc: 78.07% +2025-03-14 19:38:05,475 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.665 | Acc: 78.08% +2025-03-14 19:38:09,934 - train - INFO - Epoch: 11 | Test Loss: 0.772 | Test Acc: 74.38% +2025-03-14 19:38:10,194 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.741 | Acc: 75.00% +2025-03-14 19:38:13,918 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.666 | Acc: 78.12% +2025-03-14 19:38:17,447 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.663 | Acc: 78.11% +2025-03-14 19:38:20,884 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.659 | Acc: 78.18% +2025-03-14 19:38:25,527 - train - INFO - Epoch: 12 | Test Loss: 0.781 | Test Acc: 73.37% +2025-03-14 19:38:36,897 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.786 | Acc: 73.44% +2025-03-14 19:38:40,292 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.656 | Acc: 78.43% +2025-03-14 19:38:43,722 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.655 | Acc: 78.47% +2025-03-14 19:38:47,038 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.654 | Acc: 78.41% +2025-03-14 19:38:52,076 - train - INFO - Epoch: 13 | Test Loss: 0.843 | Test Acc: 71.75% +2025-03-14 19:38:52,373 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.789 | Acc: 71.88% +2025-03-14 19:38:55,765 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.635 | Acc: 78.76% +2025-03-14 19:38:59,154 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.649 | Acc: 78.32% +2025-03-14 19:39:02,796 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.657 | Acc: 78.11% +2025-03-14 19:39:07,795 - train - INFO - Epoch: 14 | Test Loss: 0.706 | Test Acc: 75.67% +2025-03-14 19:39:19,690 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.754 | Acc: 73.44% +2025-03-14 19:39:23,082 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.637 | Acc: 78.79% +2025-03-14 19:39:26,597 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.653 | Acc: 78.16% +2025-03-14 19:39:30,270 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.657 | Acc: 78.00% +2025-03-14 19:39:35,329 - train - INFO - Epoch: 15 | Test Loss: 0.973 | Test Acc: 69.48% +2025-03-14 19:39:35,603 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.637 | Acc: 77.34% +2025-03-14 19:39:39,191 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.643 | Acc: 78.73% +2025-03-14 19:39:42,808 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.656 | Acc: 78.33% +2025-03-14 19:39:46,368 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.657 | Acc: 78.34% +2025-03-14 19:39:51,262 - train - INFO - Epoch: 16 | Test Loss: 0.645 | Test Acc: 77.94% +2025-03-14 19:40:02,465 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.689 | Acc: 78.12% +2025-03-14 19:40:05,700 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.656 | Acc: 78.31% +2025-03-14 19:40:08,941 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.659 | Acc: 78.00% +2025-03-14 19:40:12,210 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.654 | Acc: 78.12% +2025-03-14 19:40:17,353 - train - INFO - Epoch: 17 | Test Loss: 0.788 | Test Acc: 73.69% +2025-03-14 19:40:17,627 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.796 | Acc: 75.00% +2025-03-14 19:40:20,935 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.619 | Acc: 79.47% +2025-03-14 19:40:24,294 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.636 | Acc: 78.87% +2025-03-14 19:40:27,713 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.642 | Acc: 78.64% +2025-03-14 19:40:32,231 - train - INFO - Epoch: 18 | Test Loss: 0.649 | Test Acc: 78.28% +2025-03-14 19:40:45,148 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.598 | Acc: 78.12% +2025-03-14 19:40:48,751 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.642 | Acc: 78.30% +2025-03-14 19:40:52,160 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.633 | Acc: 79.06% +2025-03-14 19:40:55,553 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.639 | Acc: 78.77% +2025-03-14 19:41:00,037 - train - INFO - Epoch: 19 | Test Loss: 0.758 | Test Acc: 74.52% +2025-03-14 19:41:00,266 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.699 | Acc: 78.91% +2025-03-14 19:41:03,650 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.641 | Acc: 79.18% +2025-03-14 19:41:06,904 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.645 | Acc: 78.75% +2025-03-14 19:41:10,217 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.651 | Acc: 78.54% +2025-03-14 19:41:15,170 - train - INFO - Epoch: 20 | Test Loss: 0.942 | Test Acc: 69.15% +2025-03-14 19:41:26,518 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.722 | Acc: 75.00% +2025-03-14 19:41:29,940 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.642 | Acc: 78.69% +2025-03-14 19:41:33,495 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.651 | Acc: 78.30% +2025-03-14 19:41:36,904 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.654 | Acc: 78.37% +2025-03-14 19:41:42,216 - train - INFO - Epoch: 21 | Test Loss: 0.653 | Test Acc: 78.49% +2025-03-14 19:41:42,526 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.722 | Acc: 78.12% +2025-03-14 19:41:46,440 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.646 | Acc: 78.98% +2025-03-14 19:41:50,554 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.647 | Acc: 78.78% +2025-03-14 19:41:54,284 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.645 | Acc: 78.79% +2025-03-14 19:41:59,252 - train - INFO - Epoch: 22 | Test Loss: 0.668 | Test Acc: 76.99% +2025-03-14 19:42:11,832 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.549 | Acc: 80.47% +2025-03-14 19:42:15,126 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.624 | Acc: 78.83% +2025-03-14 19:42:18,419 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.635 | Acc: 78.79% +2025-03-14 19:42:21,633 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.632 | Acc: 78.91% +2025-03-14 19:42:26,712 - train - INFO - Epoch: 23 | Test Loss: 0.754 | Test Acc: 74.86% +2025-03-14 19:42:26,957 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.438 | Acc: 86.72% +2025-03-14 19:42:30,229 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.637 | Acc: 78.90% +2025-03-14 19:42:33,523 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.639 | Acc: 78.73% +2025-03-14 19:42:36,716 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.633 | Acc: 78.95% +2025-03-14 19:42:41,197 - train - INFO - Epoch: 24 | Test Loss: 0.851 | Test Acc: 72.15% +2025-03-14 19:42:53,065 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.541 | Acc: 80.47% +2025-03-14 19:42:56,327 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.642 | Acc: 78.20% +2025-03-14 19:42:59,601 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.636 | Acc: 78.85% +2025-03-14 19:43:02,944 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.638 | Acc: 78.86% +2025-03-14 19:43:07,734 - train - INFO - Epoch: 25 | Test Loss: 0.992 | Test Acc: 70.15% +2025-03-14 19:43:08,005 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.519 | Acc: 82.81% +2025-03-14 19:43:11,406 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.607 | Acc: 80.00% +2025-03-14 19:43:14,778 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.629 | Acc: 79.06% +2025-03-14 19:43:18,070 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.628 | Acc: 79.10% +2025-03-14 19:43:22,670 - train - INFO - Epoch: 26 | Test Loss: 0.671 | Test Acc: 77.26% +2025-03-14 19:43:33,686 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.671 | Acc: 78.12% +2025-03-14 19:43:37,086 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.621 | Acc: 79.70% +2025-03-14 19:43:40,311 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.620 | Acc: 79.59% +2025-03-14 19:43:43,622 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.627 | Acc: 79.41% +2025-03-14 19:43:48,359 - train - INFO - Epoch: 27 | Test Loss: 0.662 | Test Acc: 77.75% +2025-03-14 19:43:48,711 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.576 | Acc: 82.81% +2025-03-14 19:43:52,170 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.630 | Acc: 79.00% +2025-03-14 19:43:55,513 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.627 | Acc: 79.05% +2025-03-14 19:43:58,821 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.635 | Acc: 78.75% +2025-03-14 19:44:03,266 - train - INFO - Epoch: 28 | Test Loss: 0.724 | Test Acc: 74.87% +2025-03-14 19:44:13,667 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.646 | Acc: 81.25% +2025-03-14 19:44:16,941 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.628 | Acc: 79.19% +2025-03-14 19:44:20,482 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.630 | Acc: 79.00% +2025-03-14 19:44:23,737 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.629 | Acc: 78.95% +2025-03-14 19:44:28,295 - train - INFO - Epoch: 29 | Test Loss: 0.662 | Test Acc: 77.43% +2025-03-14 19:44:28,530 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.670 | Acc: 74.22% +2025-03-14 19:44:31,941 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.615 | Acc: 79.38% +2025-03-14 19:44:35,458 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.629 | Acc: 79.02% +2025-03-14 19:44:38,753 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.627 | Acc: 79.15% +2025-03-14 19:44:43,265 - train - INFO - Epoch: 30 | Test Loss: 0.795 | Test Acc: 73.48% +2025-03-14 19:44:56,355 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.875 | Acc: 67.97% +2025-03-14 19:44:59,749 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.635 | Acc: 78.71% +2025-03-14 19:45:03,017 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.626 | Acc: 79.11% +2025-03-14 19:45:06,545 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.626 | Acc: 79.04% +2025-03-14 19:45:11,485 - train - INFO - Epoch: 31 | Test Loss: 0.669 | Test Acc: 77.71% +2025-03-14 19:45:11,913 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.498 | Acc: 81.25% +2025-03-14 19:45:15,270 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.634 | Acc: 79.05% +2025-03-14 19:45:18,585 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.633 | Acc: 79.11% +2025-03-14 19:45:21,798 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.629 | Acc: 79.26% +2025-03-14 19:45:26,689 - train - INFO - Epoch: 32 | Test Loss: 0.971 | Test Acc: 69.30% +2025-03-14 19:45:39,797 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.706 | Acc: 78.91% +2025-03-14 19:45:43,114 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.610 | Acc: 79.79% +2025-03-14 19:45:46,320 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.623 | Acc: 79.44% +2025-03-14 19:45:49,642 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.626 | Acc: 79.30% +2025-03-14 19:45:54,214 - train - INFO - Epoch: 33 | Test Loss: 0.675 | Test Acc: 76.70% +2025-03-14 19:45:54,434 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.527 | Acc: 80.47% +2025-03-14 19:45:57,688 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.621 | Acc: 79.09% +2025-03-14 19:46:00,874 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.623 | Acc: 78.99% +2025-03-14 19:46:04,179 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.625 | Acc: 78.99% +2025-03-14 19:46:08,870 - train - INFO - Epoch: 34 | Test Loss: 0.612 | Test Acc: 78.84% +2025-03-14 19:46:21,448 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.566 | Acc: 79.69% +2025-03-14 19:46:24,671 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.624 | Acc: 79.35% +2025-03-14 19:46:27,843 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.632 | Acc: 78.92% +2025-03-14 19:46:31,182 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.630 | Acc: 78.98% +2025-03-14 19:46:35,613 - train - INFO - Epoch: 35 | Test Loss: 0.692 | Test Acc: 76.93% +2025-03-14 19:46:35,849 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.638 | Acc: 78.91% +2025-03-14 19:46:39,126 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.617 | Acc: 79.44% +2025-03-14 19:46:42,587 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.620 | Acc: 79.45% +2025-03-14 19:46:45,891 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.619 | Acc: 79.32% +2025-03-14 19:46:50,381 - train - INFO - Epoch: 36 | Test Loss: 0.747 | Test Acc: 75.67% +2025-03-14 19:47:04,037 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.611 | Acc: 82.03% +2025-03-14 19:47:07,310 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.621 | Acc: 79.08% +2025-03-14 19:47:10,603 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.621 | Acc: 79.15% +2025-03-14 19:47:13,815 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.628 | Acc: 78.97% +2025-03-14 19:47:18,168 - train - INFO - Epoch: 37 | Test Loss: 0.679 | Test Acc: 77.48% +2025-03-14 19:47:18,397 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.496 | Acc: 83.59% +2025-03-14 19:47:21,698 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.615 | Acc: 79.73% +2025-03-14 19:47:24,860 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.627 | Acc: 79.17% +2025-03-14 19:47:28,120 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.629 | Acc: 79.15% +2025-03-14 19:47:32,936 - train - INFO - Epoch: 38 | Test Loss: 0.675 | Test Acc: 77.86% +2025-03-14 19:47:45,480 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.576 | Acc: 82.03% +2025-03-14 19:47:48,904 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.601 | Acc: 80.37% +2025-03-14 19:47:52,245 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.607 | Acc: 79.87% +2025-03-14 19:47:55,509 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.617 | Acc: 79.50% +2025-03-14 19:47:59,980 - train - INFO - Epoch: 39 | Test Loss: 0.841 | Test Acc: 72.60% +2025-03-14 19:48:00,232 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.719 | Acc: 73.44% +2025-03-14 19:48:03,619 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.617 | Acc: 79.37% +2025-03-14 19:48:06,905 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.616 | Acc: 79.42% +2025-03-14 19:48:10,211 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.619 | Acc: 79.36% +2025-03-14 19:48:14,742 - train - INFO - Epoch: 40 | Test Loss: 0.903 | Test Acc: 70.17% +2025-03-14 19:48:26,055 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.655 | Acc: 76.56% +2025-03-14 19:48:29,372 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.623 | Acc: 79.28% +2025-03-14 19:48:32,576 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.629 | Acc: 79.13% +2025-03-14 19:48:35,730 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.622 | Acc: 79.29% +2025-03-14 19:48:40,269 - train - INFO - Epoch: 41 | Test Loss: 0.642 | Test Acc: 78.14% +2025-03-14 19:48:40,496 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.641 | Acc: 75.00% +2025-03-14 19:48:43,861 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.619 | Acc: 79.66% +2025-03-14 19:48:47,144 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.622 | Acc: 79.31% +2025-03-14 19:48:50,452 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.617 | Acc: 79.36% +2025-03-14 19:48:54,827 - train - INFO - Epoch: 42 | Test Loss: 0.738 | Test Acc: 75.93% +2025-03-14 19:49:08,190 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.778 | Acc: 73.44% +2025-03-14 19:49:11,436 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.615 | Acc: 79.61% +2025-03-14 19:49:14,877 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.629 | Acc: 79.07% +2025-03-14 19:49:18,484 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.632 | Acc: 79.07% +2025-03-14 19:49:23,712 - train - INFO - Epoch: 43 | Test Loss: 0.742 | Test Acc: 74.57% +2025-03-14 19:49:24,095 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.696 | Acc: 76.56% +2025-03-14 19:49:27,757 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.610 | Acc: 79.96% +2025-03-14 19:49:31,662 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.618 | Acc: 79.48% +2025-03-14 19:49:35,009 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.612 | Acc: 79.62% +2025-03-14 19:49:40,690 - train - INFO - Epoch: 44 | Test Loss: 0.732 | Test Acc: 75.86% +2025-03-14 19:49:52,938 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.652 | Acc: 82.81% +2025-03-14 19:49:56,098 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.594 | Acc: 80.00% +2025-03-14 19:49:59,270 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.600 | Acc: 80.04% +2025-03-14 19:50:02,349 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.602 | Acc: 79.92% +2025-03-14 19:50:06,606 - train - INFO - Epoch: 45 | Test Loss: 0.749 | Test Acc: 75.20% +2025-03-14 19:50:06,830 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.594 | Acc: 81.25% +2025-03-14 19:50:10,214 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.605 | Acc: 79.89% +2025-03-14 19:50:13,272 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.606 | Acc: 79.82% +2025-03-14 19:50:16,267 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.612 | Acc: 79.63% +2025-03-14 19:50:20,607 - train - INFO - Epoch: 46 | Test Loss: 0.707 | Test Acc: 76.63% +2025-03-14 19:50:33,662 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.488 | Acc: 83.59% +2025-03-14 19:50:36,860 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.591 | Acc: 80.45% +2025-03-14 19:50:40,064 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.611 | Acc: 79.72% +2025-03-14 19:50:43,830 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.615 | Acc: 79.63% +2025-03-14 19:50:48,236 - train - INFO - Epoch: 47 | Test Loss: 0.612 | Test Acc: 79.22% +2025-03-14 19:50:48,460 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.729 | Acc: 74.22% +2025-03-14 19:50:51,645 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.606 | Acc: 79.83% +2025-03-14 19:50:54,950 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.616 | Acc: 79.28% +2025-03-14 19:50:58,523 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.614 | Acc: 79.36% +2025-03-14 19:51:03,209 - train - INFO - Epoch: 48 | Test Loss: 0.925 | Test Acc: 71.88% +2025-03-14 19:51:16,408 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.598 | Acc: 82.03% +2025-03-14 19:51:19,486 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.616 | Acc: 79.12% +2025-03-14 19:51:22,803 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.615 | Acc: 79.30% +2025-03-14 19:51:26,002 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.612 | Acc: 79.47% +2025-03-14 19:51:30,321 - train - INFO - Epoch: 49 | Test Loss: 0.750 | Test Acc: 75.08% +2025-03-14 19:51:30,550 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.558 | Acc: 82.03% +2025-03-14 19:51:33,757 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.583 | Acc: 80.69% +2025-03-14 19:51:36,928 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.595 | Acc: 80.07% +2025-03-14 19:51:40,116 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.608 | Acc: 79.64% +2025-03-14 19:51:44,713 - train - INFO - Epoch: 50 | Test Loss: 0.711 | Test Acc: 76.34% +2025-03-14 19:51:56,482 - train - INFO - 训练完成! diff --git a/Image/MobileNetv2/code/model.py b/Image/MobileNetv2/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..c92a0f20828c73e72ff8d685e799d719a1da11a6 --- /dev/null +++ b/Image/MobileNetv2/code/model.py @@ -0,0 +1,176 @@ +''' +MobileNetV2 in PyTorch. + +论文: "Inverted Residuals and Linear Bottlenecks: Mobile Networks for Classification, Detection and Segmentation" +参考: https://arxiv.org/abs/1801.04381 + +主要特点: +1. 引入倒残差结构(Inverted Residual),先升维后降维 +2. 使用线性瓶颈(Linear Bottlenecks),去除最后一个ReLU保留特征 +3. 使用ReLU6作为激活函数,提高在低精度计算下的鲁棒性 +4. 残差连接时使用加法而不是拼接,减少内存占用 +''' + +import torch +import torch.nn as nn + + +class Block(nn.Module): + '''倒残差块 (Inverted Residual Block) + + 结构: expand(1x1) -> depthwise(3x3) -> project(1x1) + 特点: + 1. 使用1x1卷积先升维再降维(与ResNet相反) + 2. 使用深度可分离卷积减少参数量 + 3. 使用shortcut连接(当stride=1且输入输出通道数相同时) + + Args: + in_channels: 输入通道数 + out_channels: 输出通道数 + expansion: 扩展因子,控制中间层的通道数 + stride: 步长,控制特征图大小 + ''' + def __init__(self, in_channels, out_channels, expansion, stride): + super(Block, self).__init__() + self.stride = stride + channels = expansion * in_channels # 扩展通道数 + + # 1x1卷积升维 + self.conv1 = nn.Conv2d( + in_channels, channels, + kernel_size=1, stride=1, padding=0, bias=False + ) + self.bn1 = nn.BatchNorm2d(channels) + + # 3x3深度可分离卷积 + self.conv2 = nn.Conv2d( + channels, channels, + kernel_size=3, stride=stride, padding=1, + groups=channels, bias=False # groups=channels即为深度可分离卷积 + ) + self.bn2 = nn.BatchNorm2d(channels) + + # 1x1卷积降维(线性瓶颈,不使用激活函数) + self.conv3 = nn.Conv2d( + channels, out_channels, + kernel_size=1, stride=1, padding=0, bias=False + ) + self.bn3 = nn.BatchNorm2d(out_channels) + + # shortcut连接 + self.shortcut = nn.Sequential() + if stride == 1 and in_channels != out_channels: + self.shortcut = nn.Sequential( + nn.Conv2d( + in_channels, out_channels, + kernel_size=1, stride=1, padding=0, bias=False + ), + nn.BatchNorm2d(out_channels) + ) + + self.relu6 = nn.ReLU6(inplace=True) + + def forward(self, x): + # 主分支 + out = self.relu6(self.bn1(self.conv1(x))) # 升维 + out = self.relu6(self.bn2(self.conv2(out))) # 深度卷积 + out = self.bn3(self.conv3(out)) # 降维(线性瓶颈) + + # shortcut连接(仅在stride=1时) + out = out + self.shortcut(x) if self.stride == 1 else out + return out + + +class MobileNetV2(nn.Module): + '''MobileNetV2网络 + + Args: + num_classes: 分类数量 + + 网络配置: + cfg = [(expansion, out_channels, num_blocks, stride), ...] + - expansion: 扩展因子 + - out_channels: 输出通道数 + - num_blocks: 块的数量 + - stride: 第一个块的步长 + ''' + # 网络结构配置 + cfg = [ + # (expansion, out_channels, num_blocks, stride) + (1, 16, 1, 1), # conv1 + (6, 24, 2, 1), # conv2,注意:原论文stride=2,这里改为1以适应CIFAR10 + (6, 32, 3, 2), # conv3 + (6, 64, 4, 2), # conv4 + (6, 96, 3, 1), # conv5 + (6, 160, 3, 2), # conv6 + (6, 320, 1, 1), # conv7 + ] + + def __init__(self, num_classes=10): + super(MobileNetV2, self).__init__() + + # 第一层卷积(注意:原论文stride=2,这里改为1以适应CIFAR10) + self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(32) + + # 主干网络 + self.layers = self._make_layers(in_channels=32) + + # 最后的1x1卷积 + self.conv2 = nn.Conv2d(320, 1280, kernel_size=1, stride=1, padding=0, bias=False) + self.bn2 = nn.BatchNorm2d(1280) + + # 分类器 + self.avgpool = nn.AdaptiveAvgPool2d(1) # 全局平均池化 + self.linear = nn.Linear(1280, num_classes) + self.relu6 = nn.ReLU6(inplace=True) + + def _make_layers(self, in_channels): + '''构建网络层 + + Args: + in_channels: 输入通道数 + ''' + layers = [] + for expansion, out_channels, num_blocks, stride in self.cfg: + # 对于每个配置,第一个block使用指定的stride,后续blocks使用stride=1 + strides = [stride] + [1]*(num_blocks-1) + for stride in strides: + layers.append( + Block(in_channels, out_channels, expansion, stride) + ) + in_channels = out_channels + return nn.Sequential(*layers) + + def forward(self, x): + # 第一层卷积 + out = self.relu6(self.bn1(self.conv1(x))) + + # 主干网络 + out = self.layers(out) + + # 最后的1x1卷积 + out = self.relu6(self.bn2(self.conv2(out))) + + # 分类器 + out = self.avgpool(out) + out = out.view(out.size(0), -1) + out = self.linear(out) + return out + + +def test(): + """测试函数""" + net = MobileNetV2() + x = torch.randn(2, 3, 32, 32) + y = net(x) + print(y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (2, 3, 32, 32)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/MobileNetv2/code/train.log b/Image/MobileNetv2/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..8e96646488e0217f80b6ca320fb174ddd2e0dc4c --- /dev/null +++ b/Image/MobileNetv2/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 19:11:19,427 - train - INFO - 开始训练 mobilenetv2 +2025-03-14 19:11:19,427 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:2 +2025-03-14 19:11:20,317 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.330 | Acc: 12.50% +2025-03-14 19:11:23,601 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.303 | Acc: 23.39% +2025-03-14 19:11:27,090 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.979 | Acc: 30.59% +2025-03-14 19:11:30,570 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.814 | Acc: 35.40% +2025-03-14 19:11:35,652 - train - INFO - Epoch: 1 | Test Loss: 1.265 | Test Acc: 54.20% +2025-03-14 19:11:36,111 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.408 | Acc: 50.00% +2025-03-14 19:11:39,787 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.257 | Acc: 54.22% +2025-03-14 19:11:43,553 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.201 | Acc: 56.56% +2025-03-14 19:11:47,009 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.161 | Acc: 58.18% +2025-03-14 19:11:51,725 - train - INFO - Epoch: 2 | Test Loss: 1.035 | Test Acc: 62.88% +2025-03-14 19:12:01,997 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 0.862 | Acc: 62.50% +2025-03-14 19:12:05,402 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 0.967 | Acc: 65.53% +2025-03-14 19:12:09,177 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 0.951 | Acc: 66.04% +2025-03-14 19:12:12,828 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 0.927 | Acc: 66.98% +2025-03-14 19:12:17,300 - train - INFO - Epoch: 3 | Test Loss: 1.029 | Test Acc: 63.78% +2025-03-14 19:12:17,550 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 0.808 | Acc: 71.88% +2025-03-14 19:12:20,736 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 0.824 | Acc: 70.96% +2025-03-14 19:12:24,141 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 0.810 | Acc: 71.45% +2025-03-14 19:12:27,501 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 0.794 | Acc: 72.10% +2025-03-14 19:12:32,221 - train - INFO - Epoch: 4 | Test Loss: 0.825 | Test Acc: 72.32% +2025-03-14 19:12:42,730 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 0.603 | Acc: 78.91% +2025-03-14 19:12:45,931 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 0.722 | Acc: 74.88% +2025-03-14 19:12:49,350 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 0.720 | Acc: 75.03% +2025-03-14 19:12:52,768 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 0.710 | Acc: 75.50% +2025-03-14 19:12:57,542 - train - INFO - Epoch: 5 | Test Loss: 0.833 | Test Acc: 71.99% +2025-03-14 19:12:57,797 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 0.766 | Acc: 74.22% +2025-03-14 19:13:01,161 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 0.666 | Acc: 76.72% +2025-03-14 19:13:04,549 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 0.660 | Acc: 77.04% +2025-03-14 19:13:07,812 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 0.663 | Acc: 76.98% +2025-03-14 19:13:12,288 - train - INFO - Epoch: 6 | Test Loss: 0.724 | Test Acc: 74.58% +2025-03-14 19:13:25,152 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 0.630 | Acc: 79.69% +2025-03-14 19:13:28,605 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 0.623 | Acc: 78.57% +2025-03-14 19:13:32,366 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 0.638 | Acc: 77.99% +2025-03-14 19:13:36,038 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 0.631 | Acc: 78.20% +2025-03-14 19:13:41,282 - train - INFO - Epoch: 7 | Test Loss: 0.963 | Test Acc: 68.62% +2025-03-14 19:13:41,552 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 0.673 | Acc: 78.12% +2025-03-14 19:13:45,270 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 0.606 | Acc: 79.20% +2025-03-14 19:13:48,786 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 0.615 | Acc: 78.93% +2025-03-14 19:13:52,459 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 0.620 | Acc: 78.70% +2025-03-14 19:13:57,441 - train - INFO - Epoch: 8 | Test Loss: 0.655 | Test Acc: 77.30% +2025-03-14 19:14:10,767 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 0.690 | Acc: 80.47% +2025-03-14 19:14:14,075 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 0.602 | Acc: 79.42% +2025-03-14 19:14:17,467 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 0.607 | Acc: 79.26% +2025-03-14 19:14:20,940 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 0.607 | Acc: 79.31% +2025-03-14 19:14:25,541 - train - INFO - Epoch: 9 | Test Loss: 0.622 | Test Acc: 79.21% +2025-03-14 19:14:25,765 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 0.394 | Acc: 86.72% +2025-03-14 19:14:29,121 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 0.586 | Acc: 79.70% +2025-03-14 19:14:32,474 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 0.587 | Acc: 79.66% +2025-03-14 19:14:35,807 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 0.589 | Acc: 79.61% +2025-03-14 19:14:40,391 - train - INFO - Epoch: 10 | Test Loss: 0.717 | Test Acc: 75.08% +2025-03-14 19:14:50,628 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 0.578 | Acc: 76.56% +2025-03-14 19:14:53,929 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 0.577 | Acc: 80.41% +2025-03-14 19:14:57,164 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 0.587 | Acc: 79.88% +2025-03-14 19:15:00,325 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 0.590 | Acc: 79.67% +2025-03-14 19:15:04,893 - train - INFO - Epoch: 11 | Test Loss: 0.754 | Test Acc: 75.76% +2025-03-14 19:15:05,135 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 0.529 | Acc: 80.47% +2025-03-14 19:15:08,549 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 0.582 | Acc: 80.02% +2025-03-14 19:15:11,951 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 0.589 | Acc: 79.85% +2025-03-14 19:15:15,202 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 0.591 | Acc: 79.87% +2025-03-14 19:15:19,885 - train - INFO - Epoch: 12 | Test Loss: 0.675 | Test Acc: 77.78% +2025-03-14 19:15:29,952 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 0.597 | Acc: 79.69% +2025-03-14 19:15:33,505 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 0.561 | Acc: 80.87% +2025-03-14 19:15:36,785 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 0.572 | Acc: 80.45% +2025-03-14 19:15:40,222 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 0.581 | Acc: 80.12% +2025-03-14 19:15:45,033 - train - INFO - Epoch: 13 | Test Loss: 0.746 | Test Acc: 75.29% +2025-03-14 19:15:45,295 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 0.613 | Acc: 76.56% +2025-03-14 19:15:48,607 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 0.578 | Acc: 80.31% +2025-03-14 19:15:51,981 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 0.582 | Acc: 79.94% +2025-03-14 19:15:55,185 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 0.585 | Acc: 79.87% +2025-03-14 19:15:59,735 - train - INFO - Epoch: 14 | Test Loss: 0.694 | Test Acc: 76.38% +2025-03-14 19:16:10,825 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.779 | Acc: 72.66% +2025-03-14 19:16:14,400 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 0.583 | Acc: 80.01% +2025-03-14 19:16:17,644 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 0.582 | Acc: 79.78% +2025-03-14 19:16:20,887 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 0.586 | Acc: 79.85% +2025-03-14 19:16:25,746 - train - INFO - Epoch: 15 | Test Loss: 0.814 | Test Acc: 72.75% +2025-03-14 19:16:26,012 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.551 | Acc: 79.69% +2025-03-14 19:16:29,423 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 0.572 | Acc: 80.21% +2025-03-14 19:16:33,052 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 0.578 | Acc: 79.84% +2025-03-14 19:16:37,076 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 0.576 | Acc: 79.97% +2025-03-14 19:16:43,123 - train - INFO - Epoch: 16 | Test Loss: 0.853 | Test Acc: 72.56% +2025-03-14 19:16:54,068 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.480 | Acc: 84.38% +2025-03-14 19:16:57,365 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 0.576 | Acc: 80.03% +2025-03-14 19:17:00,641 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 0.578 | Acc: 80.05% +2025-03-14 19:17:04,064 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 0.580 | Acc: 79.99% +2025-03-14 19:17:08,620 - train - INFO - Epoch: 17 | Test Loss: 0.760 | Test Acc: 74.37% +2025-03-14 19:17:08,909 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 0.640 | Acc: 78.91% +2025-03-14 19:17:12,601 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 0.554 | Acc: 80.94% +2025-03-14 19:17:15,904 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 0.565 | Acc: 80.53% +2025-03-14 19:17:19,195 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 0.570 | Acc: 80.35% +2025-03-14 19:17:23,385 - train - INFO - Epoch: 18 | Test Loss: 0.766 | Test Acc: 74.48% +2025-03-14 19:17:33,584 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 0.588 | Acc: 79.69% +2025-03-14 19:17:37,060 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 0.572 | Acc: 80.45% +2025-03-14 19:17:40,342 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 0.576 | Acc: 80.27% +2025-03-14 19:17:43,640 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 0.575 | Acc: 80.28% +2025-03-14 19:17:48,326 - train - INFO - Epoch: 19 | Test Loss: 0.824 | Test Acc: 71.09% +2025-03-14 19:17:48,544 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 0.648 | Acc: 77.34% +2025-03-14 19:17:51,917 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 0.574 | Acc: 80.30% +2025-03-14 19:17:55,234 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 0.565 | Acc: 80.65% +2025-03-14 19:17:58,743 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 0.568 | Acc: 80.53% +2025-03-14 19:18:04,133 - train - INFO - Epoch: 20 | Test Loss: 0.805 | Test Acc: 74.42% +2025-03-14 19:18:14,312 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 0.571 | Acc: 81.25% +2025-03-14 19:18:17,780 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 0.552 | Acc: 81.03% +2025-03-14 19:18:21,057 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 0.568 | Acc: 80.52% +2025-03-14 19:18:24,568 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 0.566 | Acc: 80.57% +2025-03-14 19:18:28,874 - train - INFO - Epoch: 21 | Test Loss: 0.707 | Test Acc: 75.96% +2025-03-14 19:18:29,089 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.570 | Acc: 79.69% +2025-03-14 19:18:32,351 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 0.578 | Acc: 80.07% +2025-03-14 19:18:35,507 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 0.579 | Acc: 80.19% +2025-03-14 19:18:38,788 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 0.572 | Acc: 80.46% +2025-03-14 19:18:43,080 - train - INFO - Epoch: 22 | Test Loss: 0.783 | Test Acc: 75.22% +2025-03-14 19:18:53,647 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 0.549 | Acc: 79.69% +2025-03-14 19:18:56,905 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 0.577 | Acc: 80.32% +2025-03-14 19:19:00,204 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 0.566 | Acc: 80.55% +2025-03-14 19:19:03,440 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 0.565 | Acc: 80.69% +2025-03-14 19:19:07,915 - train - INFO - Epoch: 23 | Test Loss: 0.851 | Test Acc: 72.49% +2025-03-14 19:19:08,129 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 0.640 | Acc: 78.12% +2025-03-14 19:19:11,393 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 0.561 | Acc: 80.89% +2025-03-14 19:19:14,726 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 0.563 | Acc: 80.83% +2025-03-14 19:19:17,930 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 0.567 | Acc: 80.77% +2025-03-14 19:19:22,468 - train - INFO - Epoch: 24 | Test Loss: 0.710 | Test Acc: 75.41% +2025-03-14 19:19:32,962 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 0.653 | Acc: 73.44% +2025-03-14 19:19:36,576 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 0.568 | Acc: 80.31% +2025-03-14 19:19:40,003 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 0.561 | Acc: 80.63% +2025-03-14 19:19:43,294 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 0.563 | Acc: 80.62% +2025-03-14 19:19:47,955 - train - INFO - Epoch: 25 | Test Loss: 0.686 | Test Acc: 76.73% +2025-03-14 19:19:48,195 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.549 | Acc: 81.25% +2025-03-14 19:19:51,479 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 0.565 | Acc: 80.76% +2025-03-14 19:19:54,801 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 0.556 | Acc: 80.96% +2025-03-14 19:19:58,099 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 0.562 | Acc: 80.86% +2025-03-14 19:20:02,827 - train - INFO - Epoch: 26 | Test Loss: 0.780 | Test Acc: 75.01% +2025-03-14 19:20:14,115 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 0.666 | Acc: 77.34% +2025-03-14 19:20:17,565 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 0.555 | Acc: 80.69% +2025-03-14 19:20:20,973 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 0.558 | Acc: 80.66% +2025-03-14 19:20:24,381 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 0.559 | Acc: 80.65% +2025-03-14 19:20:29,009 - train - INFO - Epoch: 27 | Test Loss: 0.617 | Test Acc: 78.93% +2025-03-14 19:20:29,367 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 0.512 | Acc: 78.91% +2025-03-14 19:20:32,831 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 0.571 | Acc: 80.22% +2025-03-14 19:20:36,500 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 0.564 | Acc: 80.50% +2025-03-14 19:20:40,194 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 0.556 | Acc: 80.87% +2025-03-14 19:20:45,270 - train - INFO - Epoch: 28 | Test Loss: 0.736 | Test Acc: 75.08% +2025-03-14 19:20:58,183 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 0.505 | Acc: 85.94% +2025-03-14 19:21:01,637 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 0.557 | Acc: 81.06% +2025-03-14 19:21:05,001 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 0.558 | Acc: 80.96% +2025-03-14 19:21:08,287 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 0.557 | Acc: 81.07% +2025-03-14 19:21:13,197 - train - INFO - Epoch: 29 | Test Loss: 0.658 | Test Acc: 77.70% +2025-03-14 19:21:13,435 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.546 | Acc: 81.25% +2025-03-14 19:21:16,848 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 0.547 | Acc: 81.18% +2025-03-14 19:21:20,474 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 0.551 | Acc: 81.00% +2025-03-14 19:21:24,104 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 0.554 | Acc: 80.96% +2025-03-14 19:21:29,008 - train - INFO - Epoch: 30 | Test Loss: 0.636 | Test Acc: 78.32% +2025-03-14 19:21:40,494 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.576 | Acc: 80.47% +2025-03-14 19:21:44,188 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 0.529 | Acc: 81.79% +2025-03-14 19:21:47,530 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 0.546 | Acc: 81.08% +2025-03-14 19:21:51,213 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 0.553 | Acc: 80.88% +2025-03-14 19:21:56,449 - train - INFO - Epoch: 31 | Test Loss: 0.725 | Test Acc: 75.97% +2025-03-14 19:21:56,951 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.451 | Acc: 82.81% +2025-03-14 19:22:00,533 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 0.558 | Acc: 80.70% +2025-03-14 19:22:03,999 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 0.549 | Acc: 81.02% +2025-03-14 19:22:07,266 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 0.552 | Acc: 81.02% +2025-03-14 19:22:12,365 - train - INFO - Epoch: 32 | Test Loss: 0.671 | Test Acc: 77.87% +2025-03-14 19:22:22,767 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 0.545 | Acc: 81.25% +2025-03-14 19:22:26,261 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 0.547 | Acc: 81.26% +2025-03-14 19:22:29,835 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 0.538 | Acc: 81.65% +2025-03-14 19:22:33,299 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 0.548 | Acc: 81.28% +2025-03-14 19:22:38,258 - train - INFO - Epoch: 33 | Test Loss: 0.673 | Test Acc: 76.89% +2025-03-14 19:22:38,483 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 0.499 | Acc: 85.16% +2025-03-14 19:22:41,947 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 0.543 | Acc: 81.47% +2025-03-14 19:22:45,218 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 0.546 | Acc: 81.37% +2025-03-14 19:22:48,486 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 0.546 | Acc: 81.26% +2025-03-14 19:22:53,051 - train - INFO - Epoch: 34 | Test Loss: 0.737 | Test Acc: 75.76% +2025-03-14 19:23:05,704 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 0.463 | Acc: 86.72% +2025-03-14 19:23:09,592 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 0.539 | Acc: 81.39% +2025-03-14 19:23:13,217 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 0.551 | Acc: 80.96% +2025-03-14 19:23:16,660 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 0.545 | Acc: 81.19% +2025-03-14 19:23:20,993 - train - INFO - Epoch: 35 | Test Loss: 0.622 | Test Acc: 78.84% +2025-03-14 19:23:21,202 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.661 | Acc: 79.69% +2025-03-14 19:23:24,564 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 0.529 | Acc: 81.60% +2025-03-14 19:23:27,994 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 0.541 | Acc: 81.20% +2025-03-14 19:23:31,448 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 0.547 | Acc: 81.09% +2025-03-14 19:23:36,527 - train - INFO - Epoch: 36 | Test Loss: 0.837 | Test Acc: 72.92% +2025-03-14 19:23:48,459 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 0.398 | Acc: 85.16% +2025-03-14 19:23:52,074 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 0.538 | Acc: 81.60% +2025-03-14 19:23:55,347 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 0.548 | Acc: 81.17% +2025-03-14 19:23:58,518 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 0.550 | Acc: 81.06% +2025-03-14 19:24:03,051 - train - INFO - Epoch: 37 | Test Loss: 0.723 | Test Acc: 75.81% +2025-03-14 19:24:03,335 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.533 | Acc: 81.25% +2025-03-14 19:24:06,812 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 0.536 | Acc: 81.82% +2025-03-14 19:24:10,217 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 0.544 | Acc: 81.40% +2025-03-14 19:24:13,569 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 0.547 | Acc: 81.25% +2025-03-14 19:24:17,939 - train - INFO - Epoch: 38 | Test Loss: 0.590 | Test Acc: 79.82% +2025-03-14 19:24:28,511 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 0.561 | Acc: 80.47% +2025-03-14 19:24:31,759 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 0.546 | Acc: 81.30% +2025-03-14 19:24:35,014 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 0.537 | Acc: 81.60% +2025-03-14 19:24:38,163 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 0.540 | Acc: 81.43% +2025-03-14 19:24:42,444 - train - INFO - Epoch: 39 | Test Loss: 0.727 | Test Acc: 75.61% +2025-03-14 19:24:42,654 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 0.580 | Acc: 80.47% +2025-03-14 19:24:46,089 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 0.547 | Acc: 81.23% +2025-03-14 19:24:49,261 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 0.537 | Acc: 81.72% +2025-03-14 19:24:52,325 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 0.539 | Acc: 81.49% +2025-03-14 19:24:56,661 - train - INFO - Epoch: 40 | Test Loss: 0.753 | Test Acc: 74.94% +2025-03-14 19:25:07,122 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.489 | Acc: 81.25% +2025-03-14 19:25:10,338 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 0.562 | Acc: 80.55% +2025-03-14 19:25:13,464 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 0.547 | Acc: 81.01% +2025-03-14 19:25:16,704 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 0.547 | Acc: 81.18% +2025-03-14 19:25:21,116 - train - INFO - Epoch: 41 | Test Loss: 0.581 | Test Acc: 79.98% +2025-03-14 19:25:21,351 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.449 | Acc: 85.94% +2025-03-14 19:25:24,547 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 0.537 | Acc: 81.76% +2025-03-14 19:25:28,065 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 0.538 | Acc: 81.54% +2025-03-14 19:25:31,912 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 0.544 | Acc: 81.31% +2025-03-14 19:25:36,378 - train - INFO - Epoch: 42 | Test Loss: 0.761 | Test Acc: 74.92% +2025-03-14 19:25:47,452 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.563 | Acc: 85.16% +2025-03-14 19:25:50,783 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 0.531 | Acc: 81.60% +2025-03-14 19:25:54,366 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 0.531 | Acc: 81.75% +2025-03-14 19:25:57,520 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 0.535 | Acc: 81.60% +2025-03-14 19:26:02,308 - train - INFO - Epoch: 43 | Test Loss: 0.978 | Test Acc: 69.33% +2025-03-14 19:26:02,557 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 0.489 | Acc: 83.59% +2025-03-14 19:26:05,742 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 0.531 | Acc: 81.63% +2025-03-14 19:26:08,819 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 0.539 | Acc: 81.57% +2025-03-14 19:26:12,115 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 0.538 | Acc: 81.59% +2025-03-14 19:26:16,779 - train - INFO - Epoch: 44 | Test Loss: 0.581 | Test Acc: 80.37% +2025-03-14 19:26:27,807 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 0.612 | Acc: 76.56% +2025-03-14 19:26:31,140 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 0.542 | Acc: 81.21% +2025-03-14 19:26:34,298 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 0.540 | Acc: 81.21% +2025-03-14 19:26:37,533 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 0.544 | Acc: 81.22% +2025-03-14 19:26:41,886 - train - INFO - Epoch: 45 | Test Loss: 0.669 | Test Acc: 76.62% +2025-03-14 19:26:42,095 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 0.570 | Acc: 82.81% +2025-03-14 19:26:45,321 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 0.523 | Acc: 82.39% +2025-03-14 19:26:48,575 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 0.527 | Acc: 82.04% +2025-03-14 19:26:51,988 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 0.528 | Acc: 82.04% +2025-03-14 19:26:56,296 - train - INFO - Epoch: 46 | Test Loss: 0.696 | Test Acc: 77.30% +2025-03-14 19:27:06,854 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 0.466 | Acc: 84.38% +2025-03-14 19:27:10,072 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 0.527 | Acc: 82.02% +2025-03-14 19:27:13,280 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 0.529 | Acc: 81.84% +2025-03-14 19:27:16,660 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 0.531 | Acc: 81.77% +2025-03-14 19:27:21,074 - train - INFO - Epoch: 47 | Test Loss: 0.641 | Test Acc: 78.08% +2025-03-14 19:27:21,292 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 0.654 | Acc: 77.34% +2025-03-14 19:27:24,474 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 0.538 | Acc: 81.46% +2025-03-14 19:27:27,668 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 0.534 | Acc: 81.61% +2025-03-14 19:27:30,907 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 0.530 | Acc: 81.76% +2025-03-14 19:27:35,251 - train - INFO - Epoch: 48 | Test Loss: 0.627 | Test Acc: 78.30% +2025-03-14 19:27:45,625 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.434 | Acc: 86.72% +2025-03-14 19:27:48,733 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 0.532 | Acc: 81.76% +2025-03-14 19:27:51,846 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 0.526 | Acc: 82.16% +2025-03-14 19:27:55,000 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 0.530 | Acc: 81.97% +2025-03-14 19:27:59,298 - train - INFO - Epoch: 49 | Test Loss: 0.706 | Test Acc: 75.59% +2025-03-14 19:27:59,544 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.438 | Acc: 86.72% +2025-03-14 19:28:02,795 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 0.520 | Acc: 82.12% +2025-03-14 19:28:06,099 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 0.522 | Acc: 82.00% +2025-03-14 19:28:09,269 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 0.528 | Acc: 81.87% +2025-03-14 19:28:13,544 - train - INFO - Epoch: 50 | Test Loss: 0.808 | Test Acc: 72.98% +2025-03-14 19:28:23,729 - train - INFO - 训练完成! diff --git a/Image/MobileNetv2/code/train.py b/Image/MobileNetv2/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..f424d2dc7e38c172ed538344a838e7398a3262b9 --- /dev/null +++ b/Image/MobileNetv2/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import MobileNetV2 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = MobileNetV2() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv2', + save_type='0', + layer_name='avgpool', + interval=2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv2', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv2', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='avgpool', + interval=2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/MobileNetv2/dataset/.gitkeep b/Image/MobileNetv2/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv2/model/.gitkeep b/Image/MobileNetv2/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv2/model/0/epoch1/embeddings.npy b/Image/MobileNetv2/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..520d8213da18d59baec4071d00f5474e653e447e --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b52266d0c2c5b31d13168219305f2da4e81e7e223cd25bad911c0a5c7193f5b +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch1/subject_model.pth b/Image/MobileNetv2/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5a22d65a993a4e967e342c9a28e22240c98286b9 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50eeb616c61cc6a39c6fdd8755a694f3797927aad34a0454c71d02260407f3b2 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch10/embeddings.npy b/Image/MobileNetv2/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3723fa41681894746e390bda431938adeca0cd1e --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d586c90f09c3f04a45caaf7e01a411bb6aaa0a1d128d4dc33b578b392ee9f20 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch10/subject_model.pth b/Image/MobileNetv2/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a44eef63585bbb0077645d09bc833e20630ebf09 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:007802d06a1bd687d2a9fac1a27830a9033e8e8261572ad3bd3a8bee79ec6b02 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch11/embeddings.npy b/Image/MobileNetv2/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a097f6d47d44d57b61ae0ad99949011ef2a8d1ba --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de3c410fe21d3db22e8a4b6b119fc5d4e34a0c0a5e39d3cfce8d97d491ce8cb9 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch11/subject_model.pth b/Image/MobileNetv2/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fddbc643a92b6328f2e407ccef9a811b7bea2145 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d325f44ff3b734c9636725a8da9f58a86e55bbb551dd10bcb4fe2ad97fe61638 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch12/embeddings.npy b/Image/MobileNetv2/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c85031ebd5b072b35e5c9b308a33395e9984829f --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe605dec434112861a6b6e3662ac01c45821f8dc0b4eca28409d047b1a1df9d2 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch12/subject_model.pth b/Image/MobileNetv2/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..16c00f06dd14c682a2d718bda4e6d60222b55c57 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6109de961f45bf492ec3852dfbbb6bd34a77ac973935cb99171d88dc86e2d348 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch13/embeddings.npy b/Image/MobileNetv2/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a4c09dcb853053ee56f2daa221b2239e141eff81 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f296d0fd5eacf24cb1626a8fff44a018cbbe2bc2efdb3d622509a5c2623b04df +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch13/subject_model.pth b/Image/MobileNetv2/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..25a967472ec74ebded744120888717dcd0287304 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08100a2d6c3f948b5ab16eb142bb8966b59ec6bae45e7dcb5970661fdb4a6321 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch14/embeddings.npy b/Image/MobileNetv2/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..03371d8b835e74c927614dc15305dab022e371c9 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04527fd07b393a448d8582d50d90cf8b022c51feb0a23e78eaa117ba41dc1231 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch14/subject_model.pth b/Image/MobileNetv2/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a0f23196ddc0ac3c555b11b4f1720cdbf6c15054 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f32ea99a921a74553688600f491f8cf10204e636515a7662763dc2de75adf823 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch15/embeddings.npy b/Image/MobileNetv2/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f0578d0c4c0114846864b908279e5c5fccac8704 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8a50f47b45b8f9349352567cf86cf78796c3f6ac8401d3b843c02d8a43a0114 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch15/subject_model.pth b/Image/MobileNetv2/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..dfdf2d22bea165f82c68da1b41bfa020e4df89fa --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b76c86deca21ca4d707a90f437f11ae4ab788ec1f4281f6f267b1421940ec85 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch16/embeddings.npy b/Image/MobileNetv2/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b67aead07d78dbae91b2e6b218b25d5fc48321b9 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6309c2ecb47e9681e74f0ca06a6081a56b982f928ddf4cebbdaa8eae397c12f1 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch16/subject_model.pth b/Image/MobileNetv2/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d7ff2282b08ed23d845618f36d4cb55cf5e12c76 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42552a243f5ace9f695799367a7b8d09aae3daf9c4f1fadfd3fb9084fba754e6 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch17/embeddings.npy b/Image/MobileNetv2/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..05bee5b3e3ca9c9c3dfaba9044736639df0de59c --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd5420f9126c73bcdefdb438964269af122a1a668ac49cae2e58c6ee20437ac3 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch17/subject_model.pth b/Image/MobileNetv2/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..434ba9b3dafb2e08eaec4b95a2c0e2c9276370cc --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:40686430ab505bf4f22fdc92706daa55207132cfdbb37f9ba4da4ca8d6da4ed7 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch18/embeddings.npy b/Image/MobileNetv2/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1c01f5e52992662ecd9d00dbf8021134dcde6dbd --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2305f46745f74a6c95aea521092ca4f7875cab695c270b496abcffa07338d55 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch18/subject_model.pth b/Image/MobileNetv2/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..009ef378db6339d5c3246e1b180d132c210ee168 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4fa60c480004128b51a5e3326b4f9a1f632afe60b4d46a58bed1d63098a843f +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch19/embeddings.npy b/Image/MobileNetv2/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..527f1ee31ca6c43fce834c0a16e6f026c76d1190 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afbd4d22f015a73004ab5c8111aa1f83fcd86caa15411bba417d48312d141e43 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch19/subject_model.pth b/Image/MobileNetv2/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6c7f52973aaa4c1de71edfa2c65663703d9e3d14 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f6ae5bdc29d29c16b0e77b3c6c52d566c73bd56d71c34bc4489dfb6400e0dbd9 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch2/embeddings.npy b/Image/MobileNetv2/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2bf36bc91b1fffef576b944f1d207de3fc4c4701 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:00b9ab664bf7b152ebf655dcb1698c845a028e16cf910820c9e8ac9a0a5aea60 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch2/subject_model.pth b/Image/MobileNetv2/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f4e17a4f9b1097f4de3041e6972cfd560f1965cf --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d346322d3dda80d1292336bded74647dee8817f823c18be61a0c46bde26d367 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch20/embeddings.npy b/Image/MobileNetv2/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..aefb9d61ab3e03c5beef07fb87e8cd2a50ad99ed --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82ad1fbb7d3bddb0c90ba6b66c7e33bf0bb13038b4a4640495f835f3b10e97f4 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch20/subject_model.pth b/Image/MobileNetv2/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c421948b1323b159778049535d868bcdcfaba697 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88cb3dfb5dcc54ca3970738486d04ac8de9911d144207d13770a64af926949a6 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch21/embeddings.npy b/Image/MobileNetv2/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9fda9f07a02966fa822aa8275f21a6859de71fbe --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2514ba1fb6d9ac8d1eb8fee28f958691cc574674b12c9bcdf7eb7e1e72f148f3 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch21/subject_model.pth b/Image/MobileNetv2/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e9be3f02a8f88d09d76eae4b02ff9f5cabf9b5c9 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f862d3868fb56d7b5341417183487dcd9e17cc1327d57b96ee92f173e6fe3c2d +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch22/embeddings.npy b/Image/MobileNetv2/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b9001013de30b5090a4fb093e94e501656935f8a --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:303105f0a03b54290fcd286d214fe4e7c30409be6029cc1776384aabb375aa2c +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch22/subject_model.pth b/Image/MobileNetv2/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d3fca97aa912b02cf9a361bd6ee40073217f0c6d --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ac04361440a72a1dde2e2cf7149a853d494261e306e2d1b3e3068a9f081aaf0 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch23/embeddings.npy b/Image/MobileNetv2/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9cc34458b8d9629d8d8de4ca043ded809c33026f --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b3ade4dff543c64ad9454d83958775069bdc467a15c87d5e5a413a3783cf8e7 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch23/subject_model.pth b/Image/MobileNetv2/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3fe223d909cb27f5136cbac368125db65faa444a --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77706e7a833ae6eaca64f35fcee2c3e0100a91641b1e9edca8f3b5d7061f666b +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch24/embeddings.npy b/Image/MobileNetv2/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..96769b0fc478ad766813ac617d44a49a56543a8b --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f935187b2e49a3740ba72b25f39c537dc7ed474a5ba5b1dd9eac2e7c01c27f5 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch24/subject_model.pth b/Image/MobileNetv2/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..1b97515b078aa471982c57c6a56df30411f558a7 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e5b9de1241ee951932914fd5b79f39588e7de8efaa217857424d41abe2fc355 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch25/embeddings.npy b/Image/MobileNetv2/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..eade480802c4ef052adb8b1a593c5224884abbcb --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7adc259835337fbe42ede050bd467153a5dff1111d3dfc3a2d31bc11a6a7ee3 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch25/subject_model.pth b/Image/MobileNetv2/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0752ad97543b346d902f5cef181720f4808a9dc9 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55f2ed2e6fd53d927f995a4c690b89f680459b9508a27c2054cb16d2769017f1 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch3/embeddings.npy b/Image/MobileNetv2/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a1ae31f500f3f1036acdad8f7e2444f05ee1084d --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2f93d6eb5fa93c8e2130904ae0436c35e4e45cdb21faae3e2731389024c80ff +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch3/subject_model.pth b/Image/MobileNetv2/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..517f40759a38dede96196584fa0065d69b941b28 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2990d9b506b32026b5f6e4ff8f42c9b66428455a434b0ae5d4ed3a2f1457be4 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch4/embeddings.npy b/Image/MobileNetv2/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..394a1da87c85448bcc2d279c593a2fc9908b60b6 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb08d74c5b5267395d894af1e8c104d1e2d3c87bee823b3861e67e70cf52744b +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch4/subject_model.pth b/Image/MobileNetv2/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fa9daf4afc91395558a345032e576f277a1a6e4e --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cb9af6c1436e792f1a74325e5f71ae9487ebf24d34cdaca689627c9cbd6fb7d +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch5/embeddings.npy b/Image/MobileNetv2/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d8f8d432fac181f2a47ca0fffe35b3b78b86de93 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f844b2d6d3720e4959b00ad76ab3b0d0586ecb2a2ea45049176c251db69dfa08 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch5/subject_model.pth b/Image/MobileNetv2/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cd1e5b2762f6d0cd6727ad53a61de51062919c6c --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6233199712f0ad5a244bdc860a4200a9b2f2e46affa622773dce1c7319f4790 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch6/embeddings.npy b/Image/MobileNetv2/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c4a978d4be38b15c502005ab7ca60bc42e082d60 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c7b5c2b107a7d90adfebc7a3d69a4a76e43acb9b90c6c22dbf4fc62d5149575 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch6/subject_model.pth b/Image/MobileNetv2/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..09335a962ce2655b3f99e6263cca3d288e9865d5 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82e890848f7a115a60ba4dc720d159bbdcb961553f5daa85d216f430a2b00e36 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch7/embeddings.npy b/Image/MobileNetv2/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..3ffec7e75b85efcf08bedaa190ded4164dcdbebf --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4cbfeda7aebfe9a759a06cfa0b4315c0b1511069c78dba444b09914c00f214d +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch7/subject_model.pth b/Image/MobileNetv2/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8c6708a3032525ba69339b4b6a25e1114672d3cd --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4f8b3daf64926cffe91efbd01d3012dd94d686baa082bca134579d805a0c16 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch8/embeddings.npy b/Image/MobileNetv2/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..29af71522291ed8cc5d35f83fd12e63af8536480 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e215c0482085a0313c14d667ec83037394ffb8a051d202cdd6e89673d221ca10 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch8/subject_model.pth b/Image/MobileNetv2/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d3306af023e110c3218ca09e6d9b1217426c76db --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:baf262576b9b0434e9805563596f72c70f4aed269aded4259dc862eb348a0d18 +size 9442858 diff --git a/Image/MobileNetv2/model/0/epoch9/embeddings.npy b/Image/MobileNetv2/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1fbdd7dac7c47743c69fbfc5c244dff651d01353 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c588e2e77be59f198b61e2017ffef0250907d4b7cf38bc604d3a978b50d7c55 +size 256000128 diff --git a/Image/MobileNetv2/model/0/epoch9/subject_model.pth b/Image/MobileNetv2/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fd07e75091e907a79721c057816fe16e2111de33 --- /dev/null +++ b/Image/MobileNetv2/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32146670f2de8c3eee63c36a48a40c2f1c4ed369fbf416080993e528c3d97315 +size 9442858 diff --git a/Image/MobileNetv2/model/0/layer_info.json b/Image/MobileNetv2/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..40267f14f13956b4c8bf3ff2e32709db9c1db213 --- /dev/null +++ b/Image/MobileNetv2/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 1280} \ No newline at end of file diff --git a/Image/MobileNetv2/model/2/epoch1/embeddings.npy b/Image/MobileNetv2/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ea48d4dc507d1c2efcb451482246699dd0ec0c55 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:790bb75b6a1116274ad9115f2977a1901f00d0bccc0740791b86d1145fdcee61 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch1/subject_model.pth b/Image/MobileNetv2/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7fcaacf771dde88081ff4464a3c894831ce4f6aa --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e0af1dda1c1fe554b8b3a33ab70f1d0ff2366352301f3a857d97e268130e683 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch10/embeddings.npy b/Image/MobileNetv2/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1180f0517c7fb337ac1348b83f61323af5b1bef5 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3149eaab7afe527a757afe1c083b17d59eb64553d67667c3170dd571d891cb30 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch10/subject_model.pth b/Image/MobileNetv2/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6d6dd40ee17302b10e6e14079cccc935a11cbe65 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:118066b3b4d9e9ca218b6809da825653ade2576aa48142ded4c11c3b0d204f71 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch11/embeddings.npy b/Image/MobileNetv2/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff75308a8bd948e7d8a1281b831aba6536e483bb --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cd24b509bf651aec6f23ee31e4f3d7551930111ead5d73a15ab8a77ea49de23 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch11/subject_model.pth b/Image/MobileNetv2/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2f379bccdaf33d42ed974bf17c71edea99b1d317 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8382da9369c87a09c4c30bf717e51db980a98e60bc88536e762fd1f7ea43b7a3 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch12/embeddings.npy b/Image/MobileNetv2/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..809bcd1930143f34c935f8454658d26f3bb16633 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fd4ed5c9331e5ac20d20aafde8071813efcd63c6fd19c9d6998b7db7fd5d76e +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch12/subject_model.pth b/Image/MobileNetv2/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..603788e6dda012a9097ade2208df8555a15528b3 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba8f76f1da33e0c87ea7e55ee507e4570cfcf88b6165bfa127e287611a74ca07 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch13/embeddings.npy b/Image/MobileNetv2/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2c438a0bb58c7a5af85da71ccfcec2082470d878 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16f4ab2e96de4bee9687710269a3331d2021a9fa5b615b3ce7140d59c8e6f300 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch13/subject_model.pth b/Image/MobileNetv2/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4cc1005fe533be00064d4da121146dbc2d0faa87 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:648c728e3e80239838bce802b5c22bacc32a4df44e0314bf59c05403607e9479 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch14/embeddings.npy b/Image/MobileNetv2/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..555c4c746a991b275b99822bcbcf17217d40ba78 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa8937fbe8b159ad2cd65f65ed4989b882faae6b51231222e4c9fbe8543b90e0 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch14/subject_model.pth b/Image/MobileNetv2/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a96107a80e059425ea9db262e5ef96d75893c7f2 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abc2b37d801b197057ae5a52c0a74c234e440b2cef8681145ecdcfe5ec5f1ace +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch15/embeddings.npy b/Image/MobileNetv2/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..85529adf47c411d714410b70f134c84fcb0730c9 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fd3bcc1b3e8b51d698745bd13bb6d5928dbea8f89f89a8a777a71edaa7ff836 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch15/subject_model.pth b/Image/MobileNetv2/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..991d66ce5ca123c75081ac845561b12a9b5aef2e --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5af854e403f81f68dc672deb021c1b388bc6a4c564b41f4c58510ab5b707e39b +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch16/embeddings.npy b/Image/MobileNetv2/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..802452dc5ed118469775a1748b591ee6f007999b --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7d293a168ea58ca33c805bfb3f3bf765bf73697004a49cea8ae16c661bb53a7 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch16/subject_model.pth b/Image/MobileNetv2/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4c19d54babac0a35df2a0dc5957947c403ed697e --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47294154e661113a78befbd4bec018147eac152bde4debdc0da4b031eb627454 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch17/embeddings.npy b/Image/MobileNetv2/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..989970d96e63e9b7ed522ba518fb0a1c8aeeb1aa --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18376547d3656a7df81c379b8891c7c3a06be5897a5e184c93be2d108a698e19 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch17/subject_model.pth b/Image/MobileNetv2/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8e42f064fed20efcfbeb1799e45a36f425bf5128 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4ae4b7a406b0226c64cbf20ebfd2f4d3f7b00116c288de8ff33be583cd2f8a8 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch18/embeddings.npy b/Image/MobileNetv2/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..133dbbf47fde6ede01097a765532574294466d75 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f0e40c6d622e6c821392471430f5e5089029cb0b08290ca6446032706141234 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch18/subject_model.pth b/Image/MobileNetv2/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c0133df96bea4a584a352ae23583130c0f9d7cfe --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a106e4e820551fe1154b9921fae00fa2668b9e67a556d39c84f0c2ed2e4214b +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch19/embeddings.npy b/Image/MobileNetv2/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..eec9282b8089f20044092bebc11f4d6695fcafaa --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a51459eef17a62ea486a45e176bfc8c21cbab9a481cfb93de5b033963b26f246 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch19/subject_model.pth b/Image/MobileNetv2/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..def1bc1cf72fc2b4c7eefa116f38388cd4aa153b --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c938a48b81ed1fc1672f872f0cfacb058318c5391c42d6d44f123796e4bdd3e +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch2/embeddings.npy b/Image/MobileNetv2/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b83262d7b50c1e17dbbce88a7e7069e8cd1e6592 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3dab38b0f3fbd5724c8a569b1517f063e3f91b61aecf3f5facc138de4bc3acaa +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch2/subject_model.pth b/Image/MobileNetv2/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..aad107b951d2fb698fea6f7c11204d52674a6c8b --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5551e0f87b28e499ad3c560ef5e4aa20753a4db802506e0d7de68deaf0f82f5 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch20/embeddings.npy b/Image/MobileNetv2/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..60d6511c480258803a5c12cb6c9539b0794e4c8f --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4eaf6f13cc4d8d0b897f7f76d607ba2df9fe2655caf42a39704fd1714851b86 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch20/subject_model.pth b/Image/MobileNetv2/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8b3a40923655d915c1fe91323fa523b432cceec1 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa95c27d377d408ca7d45b079d216e7381fdbda4ce23b89173f1f0f66f445f2e +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch21/embeddings.npy b/Image/MobileNetv2/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1fc14e85bdbb4ae40625773ffb78b9411500a13d --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17aedf2f6cadf1bea846fa6ee4d8954ae695eab170eda731b34cfa74198019ea +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch21/subject_model.pth b/Image/MobileNetv2/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4ecad0664f52dfef1313f9453d46273d49d66ab7 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17392cf4c710316dd4931648c2f00b1fc2711caa28957d8a3ffa8f89195b2eec +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch22/embeddings.npy b/Image/MobileNetv2/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..07f42fafd9a15f5a2cc0d70577a9c29e3703fab9 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b5166be2b55ca8083e8122aab5b042785e5f76bae71df3bd86b408ab01ff2e1 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch22/subject_model.pth b/Image/MobileNetv2/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..11dc15add1801fbb5b8d61f2c4146f518a140703 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3389e0aa2cc3353079d46bdfb24ef2dbaa8c46b2c10e17cf2ae124c76ac105bd +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch23/embeddings.npy b/Image/MobileNetv2/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e5d8f983ef456b4260000a92b111157fa2708e82 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e20eb81918d1fa234b8bc026682a542e563485e834193f1521f003606df2521 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch23/subject_model.pth b/Image/MobileNetv2/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..43eb8da54ff32960e19b5860f8acf2dbd7a683f8 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd4452e79e5ca73a038fd0a7e873657bcfa6ab0d904be64a40bce3cd1ac4fcce +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch24/embeddings.npy b/Image/MobileNetv2/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..40e963cfe4eb3de0e2956493893317631ea34bbe --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0412a44f390af674b92358ee2162cd7823d69978cbde27dd7fb8dcbe11e2030 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch24/subject_model.pth b/Image/MobileNetv2/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..560f798571dcb412667cffefbd7c18de0251ec62 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11c793344758b9fc92babf21b110f86fc15c7818e5d0bd9bc9aedf2aead6691a +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch25/embeddings.npy b/Image/MobileNetv2/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e440232208238dd7b2bb9e24f19717dd8085e689 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:753bfccbab564fd0a34c2fc6a3708b766336f5681a0252575e04f5dbf3403213 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch25/subject_model.pth b/Image/MobileNetv2/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9ec676a4a988a74c279fdafbe5805bb184e8e8ba --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37a6d376ce40871989a85dcd88cb935b5ae23155511656dad08416d3ed7701e1 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch3/embeddings.npy b/Image/MobileNetv2/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d03d1e24b3d046b1a8062cb1f44aea8f694bb2c4 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60fee45d5a8c6c55f423ee4baa35a96bcfd3f3526fc930477c77497c373bff20 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch3/subject_model.pth b/Image/MobileNetv2/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..72d86bd2d6b165ea888d8212a6df4adafd37dde9 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89aac668dfdd392e7a1852b10e294d4463a1b001a2939dcdcea3d769b2035985 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch4/embeddings.npy b/Image/MobileNetv2/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..294460d6416d6de999ad60f90d7994f4f3e6465b --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eae2e340076c488f6dc551e12503361d92ae137ab29ad857dbe9152d9328ee5e +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch4/subject_model.pth b/Image/MobileNetv2/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..af96ce2fdce60472b263b1509e4451d4cf738cef --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a6f3afdebbc78c37c32fe23d02fd7bfbe8c43d85d6264233201a3220a36ec94 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch5/embeddings.npy b/Image/MobileNetv2/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a9727e0a6583571486fec11298322b6c368f9e4a --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee7053b1b018b6cb0d823f07c72b4e9c15c1887d0f72612e8a89f373df08137e +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch5/subject_model.pth b/Image/MobileNetv2/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fad12acb5af3589f0ae20812abd9c12d248b2464 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d77929896f8cbfc829c016c8bc2cd03116f1f526d8b839a3df5b02de11581045 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch6/embeddings.npy b/Image/MobileNetv2/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6af3d25d94b2af9b851cb80d1059a76bc72eec21 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e31bcf11c33cda62d899daa79b536588798672f8932d44cf62ca3a93b0541c2 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch6/subject_model.pth b/Image/MobileNetv2/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5e1996d20349ddebf858ed6b5717687a75a5cae8 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4a44362291f0242c759ca848edfb95f8f2e1a2222c32ababa15be5b850f4ab4 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch7/embeddings.npy b/Image/MobileNetv2/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0178ce2c7af930f68f877754a1e3b14f1745d8d5 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:335a4b38c2994d8143136db711ee1b696179f2a01daa92c2c9dbea6723674677 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch7/subject_model.pth b/Image/MobileNetv2/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..91b4bdc39ca1ec99dff5a55a27c1cfe08fea4f21 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7d73c8109e6dcd3923ebec3a255dd6f727c45830bb0d56613b51ea18c9ace97 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch8/embeddings.npy b/Image/MobileNetv2/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b6e4cfcf9529a63be86652aae09d8cf0b088dfb3 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50f039d8a58867c957137585c256b865fbe05a062cf6f719bb583f29b4327cd8 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch8/subject_model.pth b/Image/MobileNetv2/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6ad3e6bf83727c4f698b57eeb32354601e7aa73d --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91b30cb9750feb394a552973ed4629b1c094f603c50038f9e88991a6e8e58f45 +size 9442858 diff --git a/Image/MobileNetv2/model/2/epoch9/embeddings.npy b/Image/MobileNetv2/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..32b9f3589f893e3b8fce2b39074459be74426424 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15a50b9848a365cda8faafa951e3af3546b9b178982692b833cf75a31ea38fe0 +size 256000128 diff --git a/Image/MobileNetv2/model/2/epoch9/subject_model.pth b/Image/MobileNetv2/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..98905f7edbf8bd930a610e53372d6e46a44cd960 --- /dev/null +++ b/Image/MobileNetv2/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:321bb410da2342c6093ab3dbd5f4a0e06b62e7d3cd9fcd68dde9e06b19f9583a +size 9442858 diff --git a/Image/MobileNetv2/model/2/layer_info.json b/Image/MobileNetv2/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..40267f14f13956b4c8bf3ff2e32709db9c1db213 --- /dev/null +++ b/Image/MobileNetv2/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 1280} \ No newline at end of file diff --git a/Image/MobileNetv3/code/backdoor_train.log b/Image/MobileNetv3/code/backdoor_train.log new file mode 100644 index 0000000000000000000000000000000000000000..fa3d6b7dc41d2fafd934a8531cb15fbf009ee219 --- /dev/null +++ b/Image/MobileNetv3/code/backdoor_train.log @@ -0,0 +1,253 @@ +2025-03-14 19:34:09,949 - train - INFO - 开始训练 mobilenetv3 +2025-03-14 19:34:09,950 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 19:34:10,708 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.320 | Acc: 13.28% +2025-03-14 19:34:13,447 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.146 | Acc: 21.54% +2025-03-14 19:34:16,441 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.000 | Acc: 25.60% +2025-03-14 19:34:19,272 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.921 | Acc: 28.29% +2025-03-14 19:34:23,561 - train - INFO - Epoch: 1 | Test Loss: 1.631 | Test Acc: 40.55% +2025-03-14 19:34:24,104 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.897 | Acc: 28.12% +2025-03-14 19:34:26,866 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.680 | Acc: 36.49% +2025-03-14 19:34:30,220 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.659 | Acc: 37.85% +2025-03-14 19:34:33,127 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.647 | Acc: 38.59% +2025-03-14 19:34:37,194 - train - INFO - Epoch: 2 | Test Loss: 1.613 | Test Acc: 42.79% +2025-03-14 19:34:48,385 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.501 | Acc: 42.97% +2025-03-14 19:34:51,567 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.587 | Acc: 40.91% +2025-03-14 19:34:55,050 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.572 | Acc: 41.97% +2025-03-14 19:35:00,286 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.556 | Acc: 42.79% +2025-03-14 19:35:06,995 - train - INFO - Epoch: 3 | Test Loss: 1.529 | Test Acc: 43.45% +2025-03-14 19:35:07,304 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.589 | Acc: 39.06% +2025-03-14 19:35:10,973 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.471 | Acc: 46.25% +2025-03-14 19:35:14,288 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.445 | Acc: 47.22% +2025-03-14 19:35:17,798 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.425 | Acc: 48.28% +2025-03-14 19:35:22,738 - train - INFO - Epoch: 4 | Test Loss: 1.349 | Test Acc: 51.18% +2025-03-14 19:35:34,169 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.274 | Acc: 52.34% +2025-03-14 19:35:37,141 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.370 | Acc: 51.04% +2025-03-14 19:35:40,306 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.362 | Acc: 51.23% +2025-03-14 19:35:43,178 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.360 | Acc: 51.26% +2025-03-14 19:35:47,371 - train - INFO - Epoch: 5 | Test Loss: 1.291 | Test Acc: 54.39% +2025-03-14 19:35:47,592 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.263 | Acc: 55.47% +2025-03-14 19:35:50,454 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.312 | Acc: 52.95% +2025-03-14 19:35:53,226 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.309 | Acc: 53.16% +2025-03-14 19:35:55,871 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.297 | Acc: 53.53% +2025-03-14 19:35:59,868 - train - INFO - Epoch: 6 | Test Loss: 1.257 | Test Acc: 54.75% +2025-03-14 19:36:10,115 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.212 | Acc: 56.25% +2025-03-14 19:36:13,079 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.261 | Acc: 54.90% +2025-03-14 19:36:16,073 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.252 | Acc: 55.44% +2025-03-14 19:36:18,985 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.249 | Acc: 55.56% +2025-03-14 19:36:23,167 - train - INFO - Epoch: 7 | Test Loss: 1.328 | Test Acc: 51.79% +2025-03-14 19:36:23,446 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.228 | Acc: 50.78% +2025-03-14 19:36:26,398 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.213 | Acc: 57.07% +2025-03-14 19:36:29,421 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.216 | Acc: 56.90% +2025-03-14 19:36:32,300 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.214 | Acc: 56.96% +2025-03-14 19:36:36,642 - train - INFO - Epoch: 8 | Test Loss: 1.222 | Test Acc: 55.97% +2025-03-14 19:36:48,439 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.049 | Acc: 67.97% +2025-03-14 19:36:51,749 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.208 | Acc: 56.95% +2025-03-14 19:36:55,054 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.211 | Acc: 56.86% +2025-03-14 19:36:58,052 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.210 | Acc: 57.06% +2025-03-14 19:37:02,439 - train - INFO - Epoch: 9 | Test Loss: 1.255 | Test Acc: 54.03% +2025-03-14 19:37:02,733 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.179 | Acc: 63.28% +2025-03-14 19:37:06,315 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.209 | Acc: 57.66% +2025-03-14 19:37:09,460 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.201 | Acc: 57.60% +2025-03-14 19:37:12,816 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.202 | Acc: 57.50% +2025-03-14 19:37:17,537 - train - INFO - Epoch: 10 | Test Loss: 1.179 | Test Acc: 57.09% +2025-03-14 19:37:28,080 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.027 | Acc: 67.19% +2025-03-14 19:37:30,927 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.176 | Acc: 58.61% +2025-03-14 19:37:33,856 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.184 | Acc: 58.20% +2025-03-14 19:37:36,622 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.180 | Acc: 58.40% +2025-03-14 19:37:40,915 - train - INFO - Epoch: 11 | Test Loss: 1.203 | Test Acc: 57.33% +2025-03-14 19:37:41,138 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.089 | Acc: 60.94% +2025-03-14 19:37:44,042 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.183 | Acc: 58.00% +2025-03-14 19:37:47,123 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.186 | Acc: 58.07% +2025-03-14 19:37:50,181 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.186 | Acc: 58.21% +2025-03-14 19:37:54,485 - train - INFO - Epoch: 12 | Test Loss: 1.184 | Test Acc: 58.28% +2025-03-14 19:38:04,529 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.169 | Acc: 56.25% +2025-03-14 19:38:07,411 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.180 | Acc: 58.57% +2025-03-14 19:38:10,271 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.172 | Acc: 58.88% +2025-03-14 19:38:13,588 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.170 | Acc: 58.83% +2025-03-14 19:38:17,915 - train - INFO - Epoch: 13 | Test Loss: 1.277 | Test Acc: 55.18% +2025-03-14 19:38:18,180 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.024 | Acc: 62.50% +2025-03-14 19:38:21,018 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.148 | Acc: 59.14% +2025-03-14 19:38:23,826 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.161 | Acc: 58.97% +2025-03-14 19:38:26,656 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.164 | Acc: 58.82% +2025-03-14 19:38:30,586 - train - INFO - Epoch: 14 | Test Loss: 1.276 | Test Acc: 55.02% +2025-03-14 19:38:41,002 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.196 | Acc: 53.91% +2025-03-14 19:38:43,862 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.157 | Acc: 59.38% +2025-03-14 19:38:46,647 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.148 | Acc: 59.47% +2025-03-14 19:38:49,537 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.150 | Acc: 59.41% +2025-03-14 19:38:53,845 - train - INFO - Epoch: 15 | Test Loss: 1.164 | Test Acc: 59.17% +2025-03-14 19:38:54,067 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 0.926 | Acc: 67.97% +2025-03-14 19:38:56,970 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.142 | Acc: 59.78% +2025-03-14 19:38:59,745 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.143 | Acc: 59.80% +2025-03-14 19:39:02,600 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.146 | Acc: 59.79% +2025-03-14 19:39:07,088 - train - INFO - Epoch: 16 | Test Loss: 1.145 | Test Acc: 59.72% +2025-03-14 19:39:18,059 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 0.978 | Acc: 64.06% +2025-03-14 19:39:21,062 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.159 | Acc: 59.11% +2025-03-14 19:39:24,063 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.149 | Acc: 59.38% +2025-03-14 19:39:27,180 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.145 | Acc: 59.82% +2025-03-14 19:39:32,003 - train - INFO - Epoch: 17 | Test Loss: 1.164 | Test Acc: 59.60% +2025-03-14 19:39:32,294 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.097 | Acc: 60.94% +2025-03-14 19:39:35,379 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.131 | Acc: 59.47% +2025-03-14 19:39:38,505 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.126 | Acc: 59.93% +2025-03-14 19:39:41,838 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.124 | Acc: 60.26% +2025-03-14 19:39:46,267 - train - INFO - Epoch: 18 | Test Loss: 1.178 | Test Acc: 58.70% +2025-03-14 19:39:57,412 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.281 | Acc: 54.69% +2025-03-14 19:40:00,334 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.131 | Acc: 60.60% +2025-03-14 19:40:03,247 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.132 | Acc: 60.33% +2025-03-14 19:40:06,091 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.131 | Acc: 60.40% +2025-03-14 19:40:10,218 - train - INFO - Epoch: 19 | Test Loss: 1.146 | Test Acc: 59.70% +2025-03-14 19:40:10,427 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.237 | Acc: 57.03% +2025-03-14 19:40:13,329 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.105 | Acc: 61.37% +2025-03-14 19:40:16,496 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.112 | Acc: 61.24% +2025-03-14 19:40:19,346 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.119 | Acc: 61.04% +2025-03-14 19:40:23,460 - train - INFO - Epoch: 20 | Test Loss: 1.178 | Test Acc: 58.22% +2025-03-14 19:40:34,045 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.283 | Acc: 57.03% +2025-03-14 19:40:36,967 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.125 | Acc: 60.55% +2025-03-14 19:40:39,673 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.115 | Acc: 61.14% +2025-03-14 19:40:42,896 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.119 | Acc: 60.80% +2025-03-14 19:40:47,481 - train - INFO - Epoch: 21 | Test Loss: 1.148 | Test Acc: 58.85% +2025-03-14 19:40:47,708 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.006 | Acc: 63.28% +2025-03-14 19:40:50,647 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.109 | Acc: 61.04% +2025-03-14 19:40:53,390 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.121 | Acc: 60.61% +2025-03-14 19:40:56,265 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.121 | Acc: 60.59% +2025-03-14 19:41:00,300 - train - INFO - Epoch: 22 | Test Loss: 1.252 | Test Acc: 56.00% +2025-03-14 19:41:10,713 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.183 | Acc: 64.06% +2025-03-14 19:41:13,642 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.141 | Acc: 60.41% +2025-03-14 19:41:16,447 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.126 | Acc: 60.60% +2025-03-14 19:41:19,466 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.126 | Acc: 60.61% +2025-03-14 19:41:23,677 - train - INFO - Epoch: 23 | Test Loss: 1.355 | Test Acc: 53.98% +2025-03-14 19:41:23,909 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.204 | Acc: 57.03% +2025-03-14 19:41:26,799 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.111 | Acc: 60.94% +2025-03-14 19:41:29,837 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.111 | Acc: 61.03% +2025-03-14 19:41:32,845 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.112 | Acc: 60.86% +2025-03-14 19:41:37,135 - train - INFO - Epoch: 24 | Test Loss: 1.229 | Test Acc: 56.77% +2025-03-14 19:41:49,373 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.142 | Acc: 60.16% +2025-03-14 19:41:52,683 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.096 | Acc: 62.07% +2025-03-14 19:41:55,781 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.106 | Acc: 61.37% +2025-03-14 19:41:58,961 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.105 | Acc: 61.47% +2025-03-14 19:42:03,822 - train - INFO - Epoch: 25 | Test Loss: 1.208 | Test Acc: 56.94% +2025-03-14 19:42:04,048 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 0.874 | Acc: 65.62% +2025-03-14 19:42:07,041 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.107 | Acc: 60.66% +2025-03-14 19:42:09,754 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.108 | Acc: 60.90% +2025-03-14 19:42:12,340 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.103 | Acc: 61.09% +2025-03-14 19:42:16,280 - train - INFO - Epoch: 26 | Test Loss: 1.272 | Test Acc: 54.55% +2025-03-14 19:42:26,726 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.055 | Acc: 63.28% +2025-03-14 19:42:29,611 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.098 | Acc: 61.42% +2025-03-14 19:42:32,450 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.101 | Acc: 61.61% +2025-03-14 19:42:35,164 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.110 | Acc: 61.25% +2025-03-14 19:42:38,982 - train - INFO - Epoch: 27 | Test Loss: 1.093 | Test Acc: 61.80% +2025-03-14 19:42:39,189 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.191 | Acc: 61.72% +2025-03-14 19:42:42,143 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.113 | Acc: 60.58% +2025-03-14 19:42:45,181 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.105 | Acc: 60.91% +2025-03-14 19:42:48,103 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.107 | Acc: 60.91% +2025-03-14 19:42:52,248 - train - INFO - Epoch: 28 | Test Loss: 1.132 | Test Acc: 60.36% +2025-03-14 19:43:02,145 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.005 | Acc: 69.53% +2025-03-14 19:43:04,936 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.107 | Acc: 61.42% +2025-03-14 19:43:07,909 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.100 | Acc: 61.62% +2025-03-14 19:43:10,658 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.097 | Acc: 61.47% +2025-03-14 19:43:14,707 - train - INFO - Epoch: 29 | Test Loss: 1.365 | Test Acc: 52.51% +2025-03-14 19:43:14,980 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.134 | Acc: 56.25% +2025-03-14 19:43:17,981 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.101 | Acc: 61.53% +2025-03-14 19:43:20,945 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.097 | Acc: 61.55% +2025-03-14 19:43:23,828 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.096 | Acc: 61.62% +2025-03-14 19:43:27,912 - train - INFO - Epoch: 30 | Test Loss: 1.058 | Test Acc: 62.23% +2025-03-14 19:43:37,857 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.111 | Acc: 64.06% +2025-03-14 19:43:40,744 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.093 | Acc: 61.44% +2025-03-14 19:43:43,590 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.095 | Acc: 61.63% +2025-03-14 19:43:46,301 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.087 | Acc: 61.93% +2025-03-14 19:43:50,386 - train - INFO - Epoch: 31 | Test Loss: 1.227 | Test Acc: 56.04% +2025-03-14 19:43:50,594 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 0.883 | Acc: 70.31% +2025-03-14 19:43:53,386 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.085 | Acc: 62.06% +2025-03-14 19:43:56,058 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.093 | Acc: 61.87% +2025-03-14 19:43:58,777 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.095 | Acc: 61.84% +2025-03-14 19:44:02,708 - train - INFO - Epoch: 32 | Test Loss: 1.217 | Test Acc: 57.42% +2025-03-14 19:44:12,615 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.165 | Acc: 60.16% +2025-03-14 19:44:15,397 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.087 | Acc: 61.80% +2025-03-14 19:44:18,183 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.092 | Acc: 61.66% +2025-03-14 19:44:21,023 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.088 | Acc: 61.89% +2025-03-14 19:44:25,133 - train - INFO - Epoch: 33 | Test Loss: 1.139 | Test Acc: 59.81% +2025-03-14 19:44:25,337 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.119 | Acc: 57.81% +2025-03-14 19:44:28,094 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.081 | Acc: 62.59% +2025-03-14 19:44:30,793 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.084 | Acc: 62.30% +2025-03-14 19:44:33,468 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.088 | Acc: 62.01% +2025-03-14 19:44:37,502 - train - INFO - Epoch: 34 | Test Loss: 1.096 | Test Acc: 61.46% +2025-03-14 19:44:47,538 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.280 | Acc: 52.34% +2025-03-14 19:44:50,462 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.105 | Acc: 61.56% +2025-03-14 19:44:53,242 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.098 | Acc: 61.76% +2025-03-14 19:44:56,109 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.094 | Acc: 61.89% +2025-03-14 19:45:00,063 - train - INFO - Epoch: 35 | Test Loss: 1.179 | Test Acc: 59.79% +2025-03-14 19:45:00,274 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.913 | Acc: 69.53% +2025-03-14 19:45:03,181 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.049 | Acc: 63.35% +2025-03-14 19:45:06,116 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.066 | Acc: 62.64% +2025-03-14 19:45:09,021 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.079 | Acc: 62.11% +2025-03-14 19:45:13,026 - train - INFO - Epoch: 36 | Test Loss: 1.090 | Test Acc: 61.83% +2025-03-14 19:45:23,209 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.131 | Acc: 61.72% +2025-03-14 19:45:26,014 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.073 | Acc: 62.70% +2025-03-14 19:45:28,845 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.083 | Acc: 62.25% +2025-03-14 19:45:31,768 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.080 | Acc: 62.33% +2025-03-14 19:45:35,879 - train - INFO - Epoch: 37 | Test Loss: 1.146 | Test Acc: 58.65% +2025-03-14 19:45:36,091 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.102 | Acc: 56.25% +2025-03-14 19:45:38,959 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.089 | Acc: 61.91% +2025-03-14 19:45:41,799 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.082 | Acc: 62.14% +2025-03-14 19:45:44,479 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.092 | Acc: 61.85% +2025-03-14 19:45:48,389 - train - INFO - Epoch: 38 | Test Loss: 1.114 | Test Acc: 60.73% +2025-03-14 19:45:58,325 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.011 | Acc: 57.81% +2025-03-14 19:46:01,154 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.086 | Acc: 61.43% +2025-03-14 19:46:04,045 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.083 | Acc: 61.72% +2025-03-14 19:46:06,946 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.083 | Acc: 61.85% +2025-03-14 19:46:10,932 - train - INFO - Epoch: 39 | Test Loss: 1.192 | Test Acc: 58.43% +2025-03-14 19:46:11,177 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.080 | Acc: 61.72% +2025-03-14 19:46:14,380 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.103 | Acc: 62.06% +2025-03-14 19:46:17,468 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.086 | Acc: 62.45% +2025-03-14 19:46:20,564 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.084 | Acc: 62.41% +2025-03-14 19:46:24,461 - train - INFO - Epoch: 40 | Test Loss: 1.151 | Test Acc: 59.16% +2025-03-14 19:46:34,040 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.150 | Acc: 64.84% +2025-03-14 19:46:36,830 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.068 | Acc: 62.60% +2025-03-14 19:46:39,527 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.075 | Acc: 62.34% +2025-03-14 19:46:42,471 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.079 | Acc: 62.17% +2025-03-14 19:46:46,291 - train - INFO - Epoch: 41 | Test Loss: 1.162 | Test Acc: 59.24% +2025-03-14 19:46:46,502 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.153 | Acc: 60.16% +2025-03-14 19:46:49,178 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.087 | Acc: 61.93% +2025-03-14 19:46:51,859 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.092 | Acc: 62.05% +2025-03-14 19:46:54,717 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.084 | Acc: 62.22% +2025-03-14 19:46:58,764 - train - INFO - Epoch: 42 | Test Loss: 1.114 | Test Acc: 61.30% +2025-03-14 19:47:09,055 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.999 | Acc: 66.41% +2025-03-14 19:47:11,844 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.078 | Acc: 62.23% +2025-03-14 19:47:14,711 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.077 | Acc: 62.01% +2025-03-14 19:47:17,266 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.079 | Acc: 62.16% +2025-03-14 19:47:21,134 - train - INFO - Epoch: 43 | Test Loss: 1.125 | Test Acc: 60.02% +2025-03-14 19:47:21,365 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.118 | Acc: 61.72% +2025-03-14 19:47:24,096 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.069 | Acc: 62.43% +2025-03-14 19:47:26,802 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.062 | Acc: 63.04% +2025-03-14 19:47:29,582 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.069 | Acc: 62.77% +2025-03-14 19:47:33,767 - train - INFO - Epoch: 44 | Test Loss: 1.221 | Test Acc: 59.06% +2025-03-14 19:47:45,660 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.279 | Acc: 53.91% +2025-03-14 19:47:48,442 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.077 | Acc: 62.37% +2025-03-14 19:47:51,274 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.082 | Acc: 62.27% +2025-03-14 19:47:54,009 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.080 | Acc: 62.46% +2025-03-14 19:47:58,034 - train - INFO - Epoch: 45 | Test Loss: 1.185 | Test Acc: 58.87% +2025-03-14 19:47:58,263 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.007 | Acc: 67.19% +2025-03-14 19:48:01,118 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.084 | Acc: 62.31% +2025-03-14 19:48:03,842 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.073 | Acc: 62.87% +2025-03-14 19:48:06,567 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.072 | Acc: 62.83% +2025-03-14 19:48:10,468 - train - INFO - Epoch: 46 | Test Loss: 1.052 | Test Acc: 63.17% +2025-03-14 19:48:20,201 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.105 | Acc: 57.81% +2025-03-14 19:48:23,066 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.051 | Acc: 63.25% +2025-03-14 19:48:25,806 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.057 | Acc: 63.08% +2025-03-14 19:48:28,567 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.068 | Acc: 62.67% +2025-03-14 19:48:32,526 - train - INFO - Epoch: 47 | Test Loss: 1.130 | Test Acc: 60.48% +2025-03-14 19:48:32,747 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.123 | Acc: 54.69% +2025-03-14 19:48:35,546 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.069 | Acc: 62.48% +2025-03-14 19:48:38,451 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.066 | Acc: 62.38% +2025-03-14 19:48:41,132 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.074 | Acc: 62.11% +2025-03-14 19:48:44,988 - train - INFO - Epoch: 48 | Test Loss: 1.077 | Test Acc: 61.90% +2025-03-14 19:48:54,351 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.970 | Acc: 63.28% +2025-03-14 19:48:57,080 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.059 | Acc: 62.83% +2025-03-14 19:49:00,086 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.075 | Acc: 62.46% +2025-03-14 19:49:02,812 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.076 | Acc: 62.34% +2025-03-14 19:49:06,711 - train - INFO - Epoch: 49 | Test Loss: 1.124 | Test Acc: 59.80% +2025-03-14 19:49:06,901 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.099 | Acc: 64.06% +2025-03-14 19:49:09,703 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.057 | Acc: 63.00% +2025-03-14 19:49:12,554 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.066 | Acc: 62.73% +2025-03-14 19:49:15,667 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.065 | Acc: 62.84% +2025-03-14 19:49:20,123 - train - INFO - Epoch: 50 | Test Loss: 1.100 | Test Acc: 61.13% +2025-03-14 19:49:31,057 - train - INFO - 训练完成! diff --git a/Image/MobileNetv3/code/model.py b/Image/MobileNetv3/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..460b44d3788071260ba8844ebb8332eb4fb3f827 --- /dev/null +++ b/Image/MobileNetv3/code/model.py @@ -0,0 +1,252 @@ +''' +MobileNetV3 in PyTorch. + +论文: "Searching for MobileNetV3" +参考: https://arxiv.org/abs/1905.02244 + +主要特点: +1. 引入基于NAS的网络架构搜索 +2. 使用改进的SE注意力机块 +3. 使用h-swish激活函数 +4. 重新设计了网络的最后几层 +5. 提供了Large和Small两个版本 +''' + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def get_activation(name): + '''获取激活函数 + + Args: + name: 激活函数名称 ('relu' 或 'hardswish') + ''' + if name == 'relu': + return nn.ReLU(inplace=True) + elif name == 'hardswish': + return nn.Hardswish(inplace=True) + else: + raise NotImplementedError + + +class SEModule(nn.Module): + '''Squeeze-and-Excitation模块 + + 通过全局平均池化和两层全连接网络学习通道注意力权重 + + Args: + channel: 输入通道数 + reduction: 降维比例 + ''' + def __init__(self, channel, reduction=4): + super(SEModule, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(channel, channel // reduction, bias=False), + nn.ReLU(inplace=True), + nn.Linear(channel // reduction, channel, bias=False), + nn.Hardsigmoid(inplace=True) + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) # squeeze + y = self.fc(y).view(b, c, 1, 1) # excitation + return x * y.expand_as(x) # scale + + +class Bottleneck(nn.Module): + '''MobileNetV3 Bottleneck + + 包含: + 1. Expansion layer (1x1 conv) + 2. Depthwise layer (3x3 or 5x5 depthwise conv) + 3. SE module (optional) + 4. Projection layer (1x1 conv) + + Args: + in_channels: 输入通道数 + exp_channels: 扩展层通道数 + out_channels: 输出通道数 + kernel_size: 深度卷积核大小 + stride: 步长 + use_SE: 是否使用SE模块 + activation: 激活函数类型 + use_residual: 是否使用残差连接 + ''' + def __init__(self, in_channels, exp_channels, out_channels, kernel_size, + stride, use_SE, activation, use_residual=True): + super(Bottleneck, self).__init__() + self.use_residual = use_residual and stride == 1 and in_channels == out_channels + padding = (kernel_size - 1) // 2 + + layers = [] + # Expansion layer + if exp_channels != in_channels: + layers.extend([ + nn.Conv2d(in_channels, exp_channels, 1, bias=False), + nn.BatchNorm2d(exp_channels), + get_activation(activation) + ]) + + # Depthwise conv + layers.extend([ + nn.Conv2d( + exp_channels, exp_channels, kernel_size, + stride, padding, groups=exp_channels, bias=False + ), + nn.BatchNorm2d(exp_channels), + get_activation(activation) + ]) + + # SE module + if use_SE: + layers.append(SEModule(exp_channels)) + + # Projection layer + layers.extend([ + nn.Conv2d(exp_channels, out_channels, 1, bias=False), + nn.BatchNorm2d(out_channels) + ]) + + self.conv = nn.Sequential(*layers) + + def forward(self, x): + if self.use_residual: + return x + self.conv(x) + else: + return self.conv(x) + + +class MobileNetV3(nn.Module): + '''MobileNetV3网络 + + Args: + num_classes: 分类数量 + mode: 'large' 或 'small',选择网络版本 + ''' + def __init__(self, num_classes=10, mode='small'): + super(MobileNetV3, self).__init__() + + if mode == 'large': + # MobileNetV3-Large架构 + self.config = [ + # k, exp, out, SE, activation, stride + [3, 16, 16, False, 'relu', 1], + [3, 64, 24, False, 'relu', 2], + [3, 72, 24, False, 'relu', 1], + [5, 72, 40, True, 'relu', 2], + [5, 120, 40, True, 'relu', 1], + [5, 120, 40, True, 'relu', 1], + [3, 240, 80, False, 'hardswish', 2], + [3, 200, 80, False, 'hardswish', 1], + [3, 184, 80, False, 'hardswish', 1], + [3, 184, 80, False, 'hardswish', 1], + [3, 480, 112, True, 'hardswish', 1], + [3, 672, 112, True, 'hardswish', 1], + [5, 672, 160, True, 'hardswish', 2], + [5, 960, 160, True, 'hardswish', 1], + [5, 960, 160, True, 'hardswish', 1], + ] + init_conv_out = 16 + final_conv_out = 960 + else: + # MobileNetV3-Small架构 + self.config = [ + # k, exp, out, SE, activation, stride + [3, 16, 16, True, 'relu', 2], + [3, 72, 24, False, 'relu', 2], + [3, 88, 24, False, 'relu', 1], + [5, 96, 40, True, 'hardswish', 2], + [5, 240, 40, True, 'hardswish', 1], + [5, 240, 40, True, 'hardswish', 1], + [5, 120, 48, True, 'hardswish', 1], + [5, 144, 48, True, 'hardswish', 1], + [5, 288, 96, True, 'hardswish', 2], + [5, 576, 96, True, 'hardswish', 1], + [5, 576, 96, True, 'hardswish', 1], + ] + init_conv_out = 16 + final_conv_out = 576 + + # 第一层卷积 + self.conv_stem = nn.Sequential( + nn.Conv2d(3, init_conv_out, 3, 2, 1, bias=False), + nn.BatchNorm2d(init_conv_out), + get_activation('hardswish') + ) + + # 构建Bottleneck层 + features = [] + in_channels = init_conv_out + for k, exp, out, se, activation, stride in self.config: + features.append( + Bottleneck(in_channels, exp, out, k, stride, se, activation) + ) + in_channels = out + self.features = nn.Sequential(*features) + + # 最后的卷积层 + self.conv_head = nn.Sequential( + nn.Conv2d(in_channels, final_conv_out, 1, bias=False), + nn.BatchNorm2d(final_conv_out), + get_activation('hardswish') + ) + + # 分类器 + self.avgpool = nn.AdaptiveAvgPool2d(1) + self.classifier = nn.Sequential( + nn.Linear(final_conv_out, num_classes) + ) + + # 初始化权重 + self._initialize_weights() + + def _initialize_weights(self): + '''初始化模型权重''' + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out') + if m.bias is not None: + nn.init.zeros_(m.bias) + elif isinstance(m, nn.BatchNorm2d): + nn.init.ones_(m.weight) + nn.init.zeros_(m.bias) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.zeros_(m.bias) + + def forward(self, x): + x = self.conv_stem(x) + x = self.features(x) + x = self.conv_head(x) + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.classifier(x) + return x + + +def test(): + """测试函数""" + # 测试Large版本 + net_large = MobileNetV3(mode='large') + x = torch.randn(2, 3, 32, 32) + y = net_large(x) + print('Large output size:', y.size()) + + # 测试Small版本 + net_small = MobileNetV3(mode='small') + y = net_small(x) + print('Small output size:', y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net_small = net_small.to(device) + summary(net_small, (2, 3, 32, 32)) + +if __name__ == '__main__': + test() diff --git a/Image/MobileNetv3/code/train.log b/Image/MobileNetv3/code/train.log new file mode 100644 index 0000000000000000000000000000000000000000..71c61fea13a3db19cb353f9e6bb6a9754a4ecae3 --- /dev/null +++ b/Image/MobileNetv3/code/train.log @@ -0,0 +1,253 @@ +2025-03-14 19:34:03,811 - train - INFO - 开始训练 mobilenetv3 +2025-03-14 19:34:03,812 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:3 +2025-03-14 19:34:04,510 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.315 | Acc: 8.59% +2025-03-14 19:34:07,447 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.150 | Acc: 21.51% +2025-03-14 19:34:10,386 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 1.973 | Acc: 26.37% +2025-03-14 19:34:13,121 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 1.881 | Acc: 29.73% +2025-03-14 19:34:17,285 - train - INFO - Epoch: 1 | Test Loss: 1.600 | Test Acc: 40.59% +2025-03-14 19:34:17,773 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.550 | Acc: 39.06% +2025-03-14 19:34:20,584 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.586 | Acc: 40.99% +2025-03-14 19:34:23,574 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.574 | Acc: 41.84% +2025-03-14 19:34:26,466 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.556 | Acc: 42.67% +2025-03-14 19:34:31,001 - train - INFO - Epoch: 2 | Test Loss: 1.407 | Test Acc: 48.43% +2025-03-14 19:34:41,695 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.286 | Acc: 54.69% +2025-03-14 19:34:44,711 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.447 | Acc: 47.37% +2025-03-14 19:34:47,946 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.443 | Acc: 47.33% +2025-03-14 19:34:50,944 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.441 | Acc: 47.48% +2025-03-14 19:34:56,768 - train - INFO - Epoch: 3 | Test Loss: 1.348 | Test Acc: 51.13% +2025-03-14 19:34:57,073 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.300 | Acc: 57.03% +2025-03-14 19:35:02,274 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.380 | Acc: 49.33% +2025-03-14 19:35:06,559 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.368 | Acc: 50.15% +2025-03-14 19:35:10,317 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.362 | Acc: 50.43% +2025-03-14 19:35:15,273 - train - INFO - Epoch: 4 | Test Loss: 1.359 | Test Acc: 50.22% +2025-03-14 19:35:28,028 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.331 | Acc: 52.34% +2025-03-14 19:35:31,190 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.320 | Acc: 52.00% +2025-03-14 19:35:34,388 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.321 | Acc: 51.92% +2025-03-14 19:35:37,466 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.320 | Acc: 51.83% +2025-03-14 19:35:41,929 - train - INFO - Epoch: 5 | Test Loss: 1.392 | Test Acc: 47.65% +2025-03-14 19:35:42,179 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.329 | Acc: 47.66% +2025-03-14 19:35:45,106 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.295 | Acc: 53.03% +2025-03-14 19:35:48,220 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.293 | Acc: 53.54% +2025-03-14 19:35:51,284 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.287 | Acc: 53.52% +2025-03-14 19:35:55,377 - train - INFO - Epoch: 6 | Test Loss: 1.222 | Test Acc: 56.24% +2025-03-14 19:36:05,521 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.276 | Acc: 55.47% +2025-03-14 19:36:08,675 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.254 | Acc: 54.59% +2025-03-14 19:36:11,596 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.253 | Acc: 54.56% +2025-03-14 19:36:14,501 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.252 | Acc: 54.65% +2025-03-14 19:36:18,647 - train - INFO - Epoch: 7 | Test Loss: 1.287 | Test Acc: 53.84% +2025-03-14 19:36:18,846 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.139 | Acc: 53.91% +2025-03-14 19:36:21,672 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.219 | Acc: 56.05% +2025-03-14 19:36:24,552 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.222 | Acc: 55.93% +2025-03-14 19:36:27,453 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.219 | Acc: 56.18% +2025-03-14 19:36:31,835 - train - INFO - Epoch: 8 | Test Loss: 1.185 | Test Acc: 56.67% +2025-03-14 19:36:43,478 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.177 | Acc: 63.28% +2025-03-14 19:36:46,534 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.201 | Acc: 56.69% +2025-03-14 19:36:49,936 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.210 | Acc: 56.46% +2025-03-14 19:36:52,915 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.212 | Acc: 56.35% +2025-03-14 19:36:57,694 - train - INFO - Epoch: 9 | Test Loss: 1.222 | Test Acc: 55.79% +2025-03-14 19:36:57,918 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.059 | Acc: 64.84% +2025-03-14 19:37:00,868 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.213 | Acc: 56.59% +2025-03-14 19:37:04,043 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.205 | Acc: 56.68% +2025-03-14 19:37:07,484 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.204 | Acc: 56.67% +2025-03-14 19:37:12,223 - train - INFO - Epoch: 10 | Test Loss: 1.201 | Test Acc: 56.86% +2025-03-14 19:37:23,335 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.118 | Acc: 56.25% +2025-03-14 19:37:26,292 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.184 | Acc: 57.36% +2025-03-14 19:37:29,243 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.170 | Acc: 57.91% +2025-03-14 19:37:32,097 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.178 | Acc: 57.66% +2025-03-14 19:37:36,266 - train - INFO - Epoch: 11 | Test Loss: 1.338 | Test Acc: 52.61% +2025-03-14 19:37:36,479 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.215 | Acc: 50.78% +2025-03-14 19:37:39,302 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.181 | Acc: 57.33% +2025-03-14 19:37:42,060 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.187 | Acc: 57.51% +2025-03-14 19:37:44,805 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.177 | Acc: 57.82% +2025-03-14 19:37:49,023 - train - INFO - Epoch: 12 | Test Loss: 1.194 | Test Acc: 56.89% +2025-03-14 19:37:59,417 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.232 | Acc: 53.12% +2025-03-14 19:38:02,245 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.158 | Acc: 58.08% +2025-03-14 19:38:05,015 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.163 | Acc: 58.04% +2025-03-14 19:38:07,820 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.159 | Acc: 58.26% +2025-03-14 19:38:11,959 - train - INFO - Epoch: 13 | Test Loss: 1.149 | Test Acc: 58.96% +2025-03-14 19:38:12,197 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.201 | Acc: 57.03% +2025-03-14 19:38:15,304 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.139 | Acc: 59.60% +2025-03-14 19:38:18,315 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.154 | Acc: 58.94% +2025-03-14 19:38:21,155 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.153 | Acc: 58.77% +2025-03-14 19:38:25,190 - train - INFO - Epoch: 14 | Test Loss: 1.138 | Test Acc: 59.41% +2025-03-14 19:38:35,745 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 0.976 | Acc: 66.41% +2025-03-14 19:38:38,693 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.149 | Acc: 59.11% +2025-03-14 19:38:41,527 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.141 | Acc: 59.21% +2025-03-14 19:38:44,307 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.145 | Acc: 59.00% +2025-03-14 19:38:48,357 - train - INFO - Epoch: 15 | Test Loss: 1.068 | Test Acc: 62.03% +2025-03-14 19:38:48,571 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.218 | Acc: 53.12% +2025-03-14 19:38:51,535 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.121 | Acc: 59.31% +2025-03-14 19:38:54,473 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.124 | Acc: 59.33% +2025-03-14 19:38:57,299 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.128 | Acc: 59.45% +2025-03-14 19:39:01,582 - train - INFO - Epoch: 16 | Test Loss: 1.117 | Test Acc: 60.35% +2025-03-14 19:39:12,941 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.216 | Acc: 60.16% +2025-03-14 19:39:15,823 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.128 | Acc: 59.86% +2025-03-14 19:39:18,876 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.124 | Acc: 59.88% +2025-03-14 19:39:21,926 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.131 | Acc: 59.47% +2025-03-14 19:39:26,176 - train - INFO - Epoch: 17 | Test Loss: 1.115 | Test Acc: 60.26% +2025-03-14 19:39:26,410 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.197 | Acc: 54.69% +2025-03-14 19:39:29,699 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.118 | Acc: 60.14% +2025-03-14 19:39:33,109 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.124 | Acc: 59.79% +2025-03-14 19:39:36,272 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.131 | Acc: 59.71% +2025-03-14 19:39:40,820 - train - INFO - Epoch: 18 | Test Loss: 1.250 | Test Acc: 55.97% +2025-03-14 19:39:51,937 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.201 | Acc: 52.34% +2025-03-14 19:39:55,019 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.120 | Acc: 60.21% +2025-03-14 19:39:57,784 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.121 | Acc: 60.15% +2025-03-14 19:40:00,471 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.119 | Acc: 60.19% +2025-03-14 19:40:04,498 - train - INFO - Epoch: 19 | Test Loss: 1.198 | Test Acc: 57.94% +2025-03-14 19:40:04,737 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.173 | Acc: 59.38% +2025-03-14 19:40:07,499 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.108 | Acc: 60.23% +2025-03-14 19:40:10,339 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.119 | Acc: 60.20% +2025-03-14 19:40:13,119 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.112 | Acc: 60.39% +2025-03-14 19:40:17,467 - train - INFO - Epoch: 20 | Test Loss: 1.308 | Test Acc: 54.57% +2025-03-14 19:40:27,692 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.066 | Acc: 59.38% +2025-03-14 19:40:30,553 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.133 | Acc: 59.80% +2025-03-14 19:40:33,490 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.120 | Acc: 60.18% +2025-03-14 19:40:36,428 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.118 | Acc: 60.36% +2025-03-14 19:40:40,344 - train - INFO - Epoch: 21 | Test Loss: 1.159 | Test Acc: 58.64% +2025-03-14 19:40:40,605 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 0.997 | Acc: 63.28% +2025-03-14 19:40:44,155 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.100 | Acc: 60.76% +2025-03-14 19:40:47,367 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.096 | Acc: 60.92% +2025-03-14 19:40:50,149 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.099 | Acc: 60.99% +2025-03-14 19:40:54,308 - train - INFO - Epoch: 22 | Test Loss: 1.089 | Test Acc: 61.60% +2025-03-14 19:41:04,532 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.029 | Acc: 66.41% +2025-03-14 19:41:07,395 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.086 | Acc: 61.11% +2025-03-14 19:41:10,298 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.086 | Acc: 61.38% +2025-03-14 19:41:13,267 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.091 | Acc: 61.09% +2025-03-14 19:41:17,403 - train - INFO - Epoch: 23 | Test Loss: 1.116 | Test Acc: 60.21% +2025-03-14 19:41:17,641 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.143 | Acc: 66.41% +2025-03-14 19:41:20,685 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.091 | Acc: 61.22% +2025-03-14 19:41:23,621 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.091 | Acc: 61.34% +2025-03-14 19:41:26,610 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.092 | Acc: 61.26% +2025-03-14 19:41:30,833 - train - INFO - Epoch: 24 | Test Loss: 1.095 | Test Acc: 61.01% +2025-03-14 19:41:42,216 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.079 | Acc: 64.84% +2025-03-14 19:41:45,275 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.082 | Acc: 61.22% +2025-03-14 19:41:48,865 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.084 | Acc: 61.43% +2025-03-14 19:41:52,343 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.078 | Acc: 61.63% +2025-03-14 19:41:56,861 - train - INFO - Epoch: 25 | Test Loss: 1.052 | Test Acc: 61.88% +2025-03-14 19:41:57,122 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.047 | Acc: 63.28% +2025-03-14 19:42:00,289 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.087 | Acc: 61.63% +2025-03-14 19:42:03,526 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.087 | Acc: 61.79% +2025-03-14 19:42:06,632 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.083 | Acc: 61.67% +2025-03-14 19:42:10,746 - train - INFO - Epoch: 26 | Test Loss: 1.086 | Test Acc: 61.65% +2025-03-14 19:42:20,622 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.134 | Acc: 61.72% +2025-03-14 19:42:23,602 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.076 | Acc: 61.42% +2025-03-14 19:42:26,568 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.072 | Acc: 61.75% +2025-03-14 19:42:29,502 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.079 | Acc: 61.47% +2025-03-14 19:42:33,566 - train - INFO - Epoch: 27 | Test Loss: 1.135 | Test Acc: 60.40% +2025-03-14 19:42:33,802 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.246 | Acc: 50.00% +2025-03-14 19:42:36,640 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.069 | Acc: 62.02% +2025-03-14 19:42:39,573 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.075 | Acc: 61.68% +2025-03-14 19:42:42,773 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.077 | Acc: 61.52% +2025-03-14 19:42:46,877 - train - INFO - Epoch: 28 | Test Loss: 1.093 | Test Acc: 61.21% +2025-03-14 19:42:56,985 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.170 | Acc: 57.03% +2025-03-14 19:42:59,789 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.070 | Acc: 62.21% +2025-03-14 19:43:02,512 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.067 | Acc: 62.04% +2025-03-14 19:43:05,191 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.068 | Acc: 61.89% +2025-03-14 19:43:09,441 - train - INFO - Epoch: 29 | Test Loss: 1.108 | Test Acc: 61.02% +2025-03-14 19:43:09,674 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 0.939 | Acc: 67.19% +2025-03-14 19:43:12,474 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.060 | Acc: 61.67% +2025-03-14 19:43:15,323 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.067 | Acc: 61.65% +2025-03-14 19:43:18,162 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.060 | Acc: 62.02% +2025-03-14 19:43:22,287 - train - INFO - Epoch: 30 | Test Loss: 1.144 | Test Acc: 60.34% +2025-03-14 19:43:32,512 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 0.962 | Acc: 65.62% +2025-03-14 19:43:35,242 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.066 | Acc: 61.82% +2025-03-14 19:43:37,851 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.071 | Acc: 61.85% +2025-03-14 19:43:40,703 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.067 | Acc: 61.91% +2025-03-14 19:43:44,687 - train - INFO - Epoch: 31 | Test Loss: 1.087 | Test Acc: 61.81% +2025-03-14 19:43:44,939 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.026 | Acc: 60.16% +2025-03-14 19:43:47,798 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.075 | Acc: 61.95% +2025-03-14 19:43:50,802 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.061 | Acc: 62.41% +2025-03-14 19:43:53,659 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.064 | Acc: 62.34% +2025-03-14 19:43:57,726 - train - INFO - Epoch: 32 | Test Loss: 1.134 | Test Acc: 60.53% +2025-03-14 19:44:07,467 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.038 | Acc: 67.97% +2025-03-14 19:44:10,465 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.072 | Acc: 61.79% +2025-03-14 19:44:13,127 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.060 | Acc: 62.29% +2025-03-14 19:44:15,841 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.064 | Acc: 62.27% +2025-03-14 19:44:19,860 - train - INFO - Epoch: 33 | Test Loss: 1.025 | Test Acc: 63.50% +2025-03-14 19:44:20,077 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.182 | Acc: 58.59% +2025-03-14 19:44:23,002 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.041 | Acc: 62.76% +2025-03-14 19:44:26,161 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.044 | Acc: 62.88% +2025-03-14 19:44:29,066 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.052 | Acc: 62.61% +2025-03-14 19:44:32,974 - train - INFO - Epoch: 34 | Test Loss: 1.165 | Test Acc: 59.01% +2025-03-14 19:44:43,069 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.273 | Acc: 57.03% +2025-03-14 19:44:45,817 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.028 | Acc: 63.61% +2025-03-14 19:44:48,611 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.047 | Acc: 63.04% +2025-03-14 19:44:51,395 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.053 | Acc: 62.75% +2025-03-14 19:44:55,612 - train - INFO - Epoch: 35 | Test Loss: 1.135 | Test Acc: 60.02% +2025-03-14 19:44:55,809 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 0.908 | Acc: 67.97% +2025-03-14 19:44:58,620 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.051 | Acc: 62.64% +2025-03-14 19:45:01,440 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.051 | Acc: 62.66% +2025-03-14 19:45:04,528 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.053 | Acc: 62.72% +2025-03-14 19:45:08,941 - train - INFO - Epoch: 36 | Test Loss: 1.187 | Test Acc: 57.95% +2025-03-14 19:45:18,855 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.220 | Acc: 55.47% +2025-03-14 19:45:21,638 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.021 | Acc: 63.85% +2025-03-14 19:45:24,446 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.041 | Acc: 63.18% +2025-03-14 19:45:27,235 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.036 | Acc: 63.54% +2025-03-14 19:45:31,429 - train - INFO - Epoch: 37 | Test Loss: 1.055 | Test Acc: 62.86% +2025-03-14 19:45:31,653 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 0.931 | Acc: 62.50% +2025-03-14 19:45:34,563 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.018 | Acc: 63.77% +2025-03-14 19:45:37,461 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.037 | Acc: 63.12% +2025-03-14 19:45:40,390 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.049 | Acc: 62.78% +2025-03-14 19:45:44,376 - train - INFO - Epoch: 38 | Test Loss: 1.055 | Test Acc: 61.96% +2025-03-14 19:45:54,849 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.256 | Acc: 60.94% +2025-03-14 19:45:57,611 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.041 | Acc: 63.05% +2025-03-14 19:46:00,366 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.043 | Acc: 62.62% +2025-03-14 19:46:03,308 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.040 | Acc: 62.69% +2025-03-14 19:46:07,524 - train - INFO - Epoch: 39 | Test Loss: 1.217 | Test Acc: 58.12% +2025-03-14 19:46:07,739 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.104 | Acc: 60.94% +2025-03-14 19:46:10,580 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.027 | Acc: 63.51% +2025-03-14 19:46:13,745 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.030 | Acc: 63.62% +2025-03-14 19:46:16,973 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.042 | Acc: 63.13% +2025-03-14 19:46:21,340 - train - INFO - Epoch: 40 | Test Loss: 1.219 | Test Acc: 58.91% +2025-03-14 19:46:30,937 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 0.936 | Acc: 65.62% +2025-03-14 19:46:33,767 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.036 | Acc: 63.67% +2025-03-14 19:46:36,441 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.043 | Acc: 63.02% +2025-03-14 19:46:39,112 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.041 | Acc: 63.14% +2025-03-14 19:46:43,225 - train - INFO - Epoch: 41 | Test Loss: 1.087 | Test Acc: 61.82% +2025-03-14 19:46:43,446 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 0.974 | Acc: 67.97% +2025-03-14 19:46:46,463 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.031 | Acc: 63.10% +2025-03-14 19:46:49,321 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.046 | Acc: 62.82% +2025-03-14 19:46:52,065 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.040 | Acc: 62.91% +2025-03-14 19:46:56,121 - train - INFO - Epoch: 42 | Test Loss: 1.088 | Test Acc: 61.92% +2025-03-14 19:47:06,480 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 0.959 | Acc: 64.84% +2025-03-14 19:47:09,338 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.025 | Acc: 63.60% +2025-03-14 19:47:12,303 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.028 | Acc: 63.58% +2025-03-14 19:47:15,185 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.034 | Acc: 63.35% +2025-03-14 19:47:19,050 - train - INFO - Epoch: 43 | Test Loss: 1.164 | Test Acc: 59.61% +2025-03-14 19:47:19,264 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.079 | Acc: 55.47% +2025-03-14 19:47:22,105 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.025 | Acc: 63.51% +2025-03-14 19:47:24,823 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.025 | Acc: 63.38% +2025-03-14 19:47:27,612 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.028 | Acc: 63.24% +2025-03-14 19:47:31,900 - train - INFO - Epoch: 44 | Test Loss: 1.046 | Test Acc: 62.74% +2025-03-14 19:47:42,877 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.033 | Acc: 62.50% +2025-03-14 19:47:45,774 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.033 | Acc: 63.99% +2025-03-14 19:47:48,457 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.038 | Acc: 63.33% +2025-03-14 19:47:51,350 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.043 | Acc: 62.95% +2025-03-14 19:47:55,429 - train - INFO - Epoch: 45 | Test Loss: 1.055 | Test Acc: 62.21% +2025-03-14 19:47:55,644 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.122 | Acc: 57.03% +2025-03-14 19:47:58,618 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.020 | Acc: 63.92% +2025-03-14 19:48:01,531 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.039 | Acc: 63.16% +2025-03-14 19:48:04,384 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.039 | Acc: 63.08% +2025-03-14 19:48:08,515 - train - INFO - Epoch: 46 | Test Loss: 1.151 | Test Acc: 58.80% +2025-03-14 19:48:18,563 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.005 | Acc: 61.72% +2025-03-14 19:48:21,390 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.011 | Acc: 64.18% +2025-03-14 19:48:24,102 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.029 | Acc: 63.72% +2025-03-14 19:48:26,782 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.028 | Acc: 63.71% +2025-03-14 19:48:30,851 - train - INFO - Epoch: 47 | Test Loss: 1.091 | Test Acc: 61.88% +2025-03-14 19:48:31,086 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.034 | Acc: 66.41% +2025-03-14 19:48:33,899 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.013 | Acc: 63.99% +2025-03-14 19:48:36,777 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.031 | Acc: 63.33% +2025-03-14 19:48:39,701 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.034 | Acc: 63.14% +2025-03-14 19:48:43,738 - train - INFO - Epoch: 48 | Test Loss: 1.051 | Test Acc: 63.01% +2025-03-14 19:48:53,341 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 0.940 | Acc: 69.53% +2025-03-14 19:48:56,129 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.022 | Acc: 63.68% +2025-03-14 19:48:59,194 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.027 | Acc: 63.31% +2025-03-14 19:49:02,012 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.029 | Acc: 63.34% +2025-03-14 19:49:06,039 - train - INFO - Epoch: 49 | Test Loss: 1.045 | Test Acc: 63.33% +2025-03-14 19:49:06,237 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 0.902 | Acc: 63.28% +2025-03-14 19:49:09,007 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.025 | Acc: 63.56% +2025-03-14 19:49:11,762 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.026 | Acc: 63.59% +2025-03-14 19:49:14,634 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.031 | Acc: 63.42% +2025-03-14 19:49:19,058 - train - INFO - Epoch: 50 | Test Loss: 1.069 | Test Acc: 62.55% +2025-03-14 19:49:30,812 - train - INFO - 训练完成! diff --git a/Image/MobileNetv3/code/train.py b/Image/MobileNetv3/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..b3014ef3f545306f00a1df7fdaec4725f5e1245c --- /dev/null +++ b/Image/MobileNetv3/code/train.py @@ -0,0 +1,63 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import MobileNetV3 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = MobileNetV3() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv3', + save_type='0', + layer_name='avgpool', + interval=2 + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv3', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='mobilenetv3', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path, + layer_name='avgpool', + interval=2 + ) + +if __name__ == '__main__': + main() diff --git a/Image/MobileNetv3/dataset/.gitkeep b/Image/MobileNetv3/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv3/model/.gitkeep b/Image/MobileNetv3/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/MobileNetv3/model/0/epoch1/embeddings.npy b/Image/MobileNetv3/model/0/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2ef17e12fe5ac6d65c7288dad95b3a59eedee3f3 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:252e5c1fe6f459fa4959e724c8b2d3634d49ad101413dff163b378706b7398c9 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch1/subject_model.pth b/Image/MobileNetv3/model/0/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b6023c1d8dfe91a2907d3c9e11f5f6efc7308036 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1c9a224155ebb4ebb483653f279d8f649af5b60deedad3136f211caea9499e1 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch10/embeddings.npy b/Image/MobileNetv3/model/0/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..6fab3695cb65a1985a00127054cf4bff2a9d5416 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38e83146b0bb02e699a59ca59353c996c957bb7ce685c3ea5b9741fe69727f09 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch10/subject_model.pth b/Image/MobileNetv3/model/0/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..08d069fa91cd7c2633e4f74ac299061de2964cd9 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1752c46956ab21bd2c02272c2e137facc791fb265cfeb640c1b996e1f7cdd910 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch11/embeddings.npy b/Image/MobileNetv3/model/0/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b18cf0911e0a547c21edaffd697ae282f4717616 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9ad1c97d59e0ba39d08fb0c656fc316be03d6837ccbe27382a91d4c1b8e525e +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch11/subject_model.pth b/Image/MobileNetv3/model/0/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..621ca89088d3da4ef3ba8e8e4e0f9763043c99eb --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:20a73e1f5b9f21d0282d1194ff3a64c4123b5abfa28f38bd189cbbe7dfae2143 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch12/embeddings.npy b/Image/MobileNetv3/model/0/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1eca81b1e14e3138c9247c6af3893c334d6da1b3 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73ed55cb4f7acdd0a9641a736f4d9a02f7ed523b052a8746fc3c97f06bbdc6e5 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch12/subject_model.pth b/Image/MobileNetv3/model/0/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..73803f27738976031a3f59c8b05859b3412d7e1d --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:546e5c41533acbcb2addd504a61aab6baff460098c251be7ae813ee316b48b62 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch13/embeddings.npy b/Image/MobileNetv3/model/0/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..503082ad950def066a70da36f2b1972c802c93a5 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19f4a7f96a2bb206caf7849dd40e52329fb0ec4bcb630db5e78d27d8f461c2fd +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch13/subject_model.pth b/Image/MobileNetv3/model/0/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f663fbe997ab4f3f79f1681b00c7cc52b7b3210b --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78ceb1fcf21cde966649bf5b2ff63648734e041512a4bbc8791b3f44788d6526 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch14/embeddings.npy b/Image/MobileNetv3/model/0/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4087e60fa7c9f430419bb20aa6ad1fb5e5bd5c95 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c4bd473e82a95837132e07efe972828057448488b2cc628aaf97f2bf431cb0fb +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch14/subject_model.pth b/Image/MobileNetv3/model/0/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2670b1fc3ebae22fcefedcde20848a7dbe87b0cb --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04ecb33716ad2fc966884cf3a2347bbe12872768d45a29a962e6629b9c18f649 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch15/embeddings.npy b/Image/MobileNetv3/model/0/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..32c824058fca4fe15df6f10e1551472056284d04 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ddcbda7b6562235f296f30fc7ba6c8d8414b3a579ef5a6d293dfb5fa6ba2e8c +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch15/subject_model.pth b/Image/MobileNetv3/model/0/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cd25c7325630a9b29eb31f8835d79cf21dfa0308 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d65e0db7ec78eadffff69805f93d3b9f1f917222c03c8897fdab3567d29333b +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch16/embeddings.npy b/Image/MobileNetv3/model/0/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..acff5ee39bc70f88ff5095e59506d71da1fac4c2 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e753fa2a0cb878e3de4d17b174043b2df74f08ccbedafc42dc7b5b521a63245 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch16/subject_model.pth b/Image/MobileNetv3/model/0/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..302319420d06a912cece4c65e5a6481866220b27 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4fe82e0df22eccd0542d44f696540005ee7fcb07612bb9517d65160d4da98f7b +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch17/embeddings.npy b/Image/MobileNetv3/model/0/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d32ecd0ad3088a060b032c36ca59d196c8fd84df --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9225fd71af9ba87fcd7e15223d3d40a97f56ec5f90c5ffc8258174d0a3225b33 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch17/subject_model.pth b/Image/MobileNetv3/model/0/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e59e76777b9a92e2558fbee45e5c1c0a6c1428b4 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44b37c6dedcd76d5eb345bef38ba0a25e837645188b28e5641d2c7cf49b53640 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch18/embeddings.npy b/Image/MobileNetv3/model/0/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..66063550264a62f4eb77a6b46ec8ab4fdf81311e --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45c3901e85899f466ae45babcd90df184fc2cb88370d4d6a0f5ac189f963817a +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch18/subject_model.pth b/Image/MobileNetv3/model/0/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..87f65822d06cd600a19193b6d050344de72d996d --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa04606288571ac7cec29053f49dace90dd02e11f1f50424354eb1127cafa0c0 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch19/embeddings.npy b/Image/MobileNetv3/model/0/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..1d8305746abcc48136d0d3ed3ba06665a693baea --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c36608c5fe39e45eff48b528dd7e963beb14b8a4bee22af3413a09aed322d0b +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch19/subject_model.pth b/Image/MobileNetv3/model/0/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..576f63d07cc4b0c4d38feb454e0123e015a53dd4 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9ffb3fb170654c1a0ad73754d5a5b91af850b7c8b2e3110a925d388820042b2 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch2/embeddings.npy b/Image/MobileNetv3/model/0/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f72d055c50721660f9ce5e9ead5838f8395c7475 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79d098708547b74213eaceb7d685dc972566bc3ce9984c0745f05b0d8938dee8 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch2/subject_model.pth b/Image/MobileNetv3/model/0/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d58c1c2a5f34291303c321351f11da61d3943da3 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17efdc5c7579246f797abf218f8aa8816c2aa761d25de70e0b88ed4811bb457c +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch20/embeddings.npy b/Image/MobileNetv3/model/0/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f427df620cd356629adf474281aab7c553e2bf0a --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e757a57e83a4ea08ab86e6513c23a69a27fc52f448d0d9ef48b5b580c35c119e +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch20/subject_model.pth b/Image/MobileNetv3/model/0/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..08f9beefe579adbca532f8a0907eb5e3f3849d62 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17357c0a7bd51ca42049e54721c6708caf0f10924d048f1ac1dc080af59a13ee +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch21/embeddings.npy b/Image/MobileNetv3/model/0/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9ea26d436830ccd298b5618b3e30e509db734d58 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e1c2831538721b21ae7aa788f8a9ac142c036ee283a459c9b22f05a504ef335 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch21/subject_model.pth b/Image/MobileNetv3/model/0/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..02664737ad5653415065860efe7f9f2a2342193d --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42020ee9474519443da293ce2dab9655658493922af055ad9bfdbc6397ccde97 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch22/embeddings.npy b/Image/MobileNetv3/model/0/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b876147765342a652c9b988cb06c0b0cc444294e --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:248cb1b583ec5ab0e3820928e90d1226fff9772c919cca5ffaf302006ef4b413 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch22/subject_model.pth b/Image/MobileNetv3/model/0/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..24de05a85373bba87d14692eed1f9d6c72610d8f --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6cb740d190f3988b1ed030c34a15d0ad04bc371cdf446a9b21b4e0cc0535a4a +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch23/embeddings.npy b/Image/MobileNetv3/model/0/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d2d8e48b503704a3d4e565328d8faf50f39518ae --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:874abfc9753d627824ab28ab947dd17b03e8124f2fb76c57287e4c0b3ddfa876 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch23/subject_model.pth b/Image/MobileNetv3/model/0/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ac11e6753cf7a5750c73e849319df68361480a11 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51492b0897e0abfe248b0c622ebbe8820fab91514c1e5839d4106a3c7add49ae +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch24/embeddings.npy b/Image/MobileNetv3/model/0/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..584e11799adda6e7a4443a370ab07414bfbefe0d --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17cb0091fe88285f8e8fd2b405d3d74e448a1311bcc52afc3dc08a84f0b14df7 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch24/subject_model.pth b/Image/MobileNetv3/model/0/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..994c78c7047c60a456544440b3e089d18a342997 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f63002c3be7fe5cd4d59af70e0b6b5a00c918fc627feacbb897110ab2a0320d5 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch25/embeddings.npy b/Image/MobileNetv3/model/0/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e3afd3706b28f7a39e5c1af1af0df28a216f4c62 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff634131237d025893911b0e93a3c06f093fa84932d11e4ee98ededece8e2117 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch25/subject_model.pth b/Image/MobileNetv3/model/0/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..cef896869ef62258d4b7b1aafe14ba65d81cb431 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e3c80c944b8eb71956cc075fefdc3685840124f3ecd5f729d7910819e2c43e8 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch3/embeddings.npy b/Image/MobileNetv3/model/0/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..44292edb3151cb0355a499e7b0e4a396ccb97223 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee167dcd7528d269ab976549a0d000bc5844b2286033a884d1330823845a5d71 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch3/subject_model.pth b/Image/MobileNetv3/model/0/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..424ce3dfa123e5b6ddc786c2ed9693e9c514807b --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8fdf11aeb8dd638ab5a8b7867e3d3d6a23ca70fed08a6bb1c262ecaad815208 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch4/embeddings.npy b/Image/MobileNetv3/model/0/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..29957a8e93ea6eb3800c951a82f855e12b8465e0 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc458776bdfd3562c60b79b9a41d9e0d2708e51b4de4e236f9ced3fc5307d7ef +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch4/subject_model.pth b/Image/MobileNetv3/model/0/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3004c614074bed92fcba96027d61ec18b85df1d9 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1da6a443d51c048d8761520fa43359c6834c98e73505ae65b546af5c1fd14d4c +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch5/embeddings.npy b/Image/MobileNetv3/model/0/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..86277307c697996c798136aaeb1b9b6b4977f775 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19e544c73fba99630a6b57e4a4a0d6c87cf2ccb7624de8ac8ec8dcb0e9ffc428 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch5/subject_model.pth b/Image/MobileNetv3/model/0/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..903e1c71db7648a480154642da80a93d03f3d4a8 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b40ca9fa27f7cfbfa08182de45a9d1fc61e3e4ff1d617546d4346d8df78d6543 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch6/embeddings.npy b/Image/MobileNetv3/model/0/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..47ed194a75fc764469ce64c5f3770b24514e993c --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70f87f7fb9bd8ed790d787602baaa7185d2e719fb536808b949d33d7ea1fa7df +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch6/subject_model.pth b/Image/MobileNetv3/model/0/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c8f03225af07d5c8a7ff3d8eb87e60c881d4daa6 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5079ce42554f1fbd683e28043bbc076c08581363bfa049b51f60d4c18081dc69 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch7/embeddings.npy b/Image/MobileNetv3/model/0/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0d0fe950cd42106b5a6103ce4f2d1a8ea53e56bc --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3164749290d2da0cdada1580ac39685403c586f432d380b41467ab2ed0fd28fc +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch7/subject_model.pth b/Image/MobileNetv3/model/0/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e240ff25be71f92bdbae9f86fddad60ce6a7ef1f --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10ee5769a26a04a4e3bf87ce0e894ca6b893301a42ba92d03588fd56867bb5d5 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch8/embeddings.npy b/Image/MobileNetv3/model/0/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..59a6d45909c14049508ce66fbfa08351c14bc9d7 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32a025151c7fc9a466fa667017371e257aa50bb7e832333e4c82e37533bc024c +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch8/subject_model.pth b/Image/MobileNetv3/model/0/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..6294203f3b1be5bc24f34bc67e0844b75818f5d9 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:864d1a28dc50e5a8976515c34fe3b822897b998dfad7238c9564d1e946480084 +size 3823498 diff --git a/Image/MobileNetv3/model/0/epoch9/embeddings.npy b/Image/MobileNetv3/model/0/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b14685de7d7fedbbefd65a43c542e9a5fe133a3f --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d49306a6703dde4c433cd1f53bcbf03526191be953c05dca186d4ec042c3bc8 +size 115200128 diff --git a/Image/MobileNetv3/model/0/epoch9/subject_model.pth b/Image/MobileNetv3/model/0/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e1a20e7adf25d6f262414df01a2f6a8b7314f884 --- /dev/null +++ b/Image/MobileNetv3/model/0/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9decd9cc7bb316d737175117f3c06e4cf2dfe32122c545c573321db30fdbc3b8 +size 3823498 diff --git a/Image/MobileNetv3/model/0/layer_info.json b/Image/MobileNetv3/model/0/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..b512735f031cee7d1fd3cf7779e960c7f948edcd --- /dev/null +++ b/Image/MobileNetv3/model/0/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 576} \ No newline at end of file diff --git a/Image/MobileNetv3/model/2/epoch1/embeddings.npy b/Image/MobileNetv3/model/2/epoch1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..220c0b5901d52dbb07e2787a4ba4f7705492920b --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5b9c195d44dbf288ff904125c37726830e6c7d15f61c0342f0145eaf86abf59 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch1/subject_model.pth b/Image/MobileNetv3/model/2/epoch1/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..54e3396c996e18769caf51bab1a04b60b29f8e43 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch1/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ee88be14486706d2401e3dae5d83f612d4e6b6f9b587ca60dcd1981e9d459d6 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch10/embeddings.npy b/Image/MobileNetv3/model/2/epoch10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..27a9289e6775f3bdd7cbe84221073c478dcad391 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8eee459bde4eaab9caf81e6a7dbd29fd392cdde6237ba3ff5a379cc5693804d +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch10/subject_model.pth b/Image/MobileNetv3/model/2/epoch10/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..fbfc85f055f5876990c09ea0c32fa89c8a61a66a --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch10/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3499f24af13ec5d19eb9fe9f9e9a3a7bc16305843ab2d6f1f8a7599389678dee +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch11/embeddings.npy b/Image/MobileNetv3/model/2/epoch11/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..aa1d71629ce8cb0ba01c5eb13e3361af92716715 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch11/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:308c8150b75f8ac03c03e08f9d6cbb05c8223be97f33b3e5bbb52884c9d929c9 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch11/subject_model.pth b/Image/MobileNetv3/model/2/epoch11/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..65c6a8cf497dc9ef7d68cd814cd0071eebf71c4b --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch11/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc68a8cd4c1c3855c9d49585cdc9c6fd630ac7ac82a3c766152167906222614a +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch12/embeddings.npy b/Image/MobileNetv3/model/2/epoch12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..69d30fe1c1a193d868beb6bbb551eec9af8f1866 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0156ba95d21652669ec6d616965886cc6ca8ab17bac29d4942ee358af943c23 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch12/subject_model.pth b/Image/MobileNetv3/model/2/epoch12/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9d0447daa82b845445e8577b3e907babdc6641a8 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch12/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ef0dbad44d99ea62d064d53d9f74504986f504c1e849bdc015913af8b97b51e +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch13/embeddings.npy b/Image/MobileNetv3/model/2/epoch13/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..45fb14ab8299a79a7ea12efdc26bd6441dd66d33 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch13/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84beebc069fc139c82022001ced062c53df7fae6afac99cdb7e5d411782efa06 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch13/subject_model.pth b/Image/MobileNetv3/model/2/epoch13/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7c84b4452578f7238f042c46b6cd2d2243b31bf2 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch13/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:070f0f2ac40cd94a0b10fd4026643e124cb31d06704d8a99077f2f60b7d45903 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch14/embeddings.npy b/Image/MobileNetv3/model/2/epoch14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..bbe99ebbd8b5ad70db2c41a08383a5e2af024c45 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86b49f2e5e29271724f180ec20eeada9434bf3047aa9425bd74907c22a431225 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch14/subject_model.pth b/Image/MobileNetv3/model/2/epoch14/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e25dcdc1760161caa0b3d0adfd0133206fcabc21 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch14/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:813e67e4e502ba3171d111995c421499410908e547decc82f92d9b0f1918572b +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch15/embeddings.npy b/Image/MobileNetv3/model/2/epoch15/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8446beed354594a8be7c0021ab919b459319fa21 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch15/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4934d63e7609c2e00d3beffa5bf0bfb5a57f5bc1fce511f0f86312eea9693457 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch15/subject_model.pth b/Image/MobileNetv3/model/2/epoch15/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..e639bff576c5d036d4f2cc777679cf6e3fcb615f --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch15/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd901059584ae813c121afb00b9133729ecf61742174343d67f05ec9497c8209 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch16/embeddings.npy b/Image/MobileNetv3/model/2/epoch16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c751aba0fb268365406a59e3ded4cc75786efb33 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcc2d19ac2ab9bcea87edb7458e421006c10bf0700843bcdbe1157bf734d8866 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch16/subject_model.pth b/Image/MobileNetv3/model/2/epoch16/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4a9139b4dcc4711191b99f2d767a4a9c43062add --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch16/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a83f9010e18a02ca9b63b60933a5ce2f42b8c1b51cae730789f1cbe2d5ac342 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch17/embeddings.npy b/Image/MobileNetv3/model/2/epoch17/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..83d33804f3f54009bb9cfb6512ac224618c12044 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch17/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d44ec06dc32ac51dab53820412034f9115050762eb09317947d7d54d0a385127 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch17/subject_model.pth b/Image/MobileNetv3/model/2/epoch17/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8148f4aec26f0e8413195c1c938bb42f7c1af441 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch17/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:671e78a25f9aa2245adaa179f7e6df72f1ecf481226b552432108a2856a80b9d +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch18/embeddings.npy b/Image/MobileNetv3/model/2/epoch18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..326fbc7c017fdc34fd8943df25fde6fd4875b71f --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3261ceb44bf4accc900acc59ef0c035b693ec07c835874adc5e5299b133db5c3 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch18/subject_model.pth b/Image/MobileNetv3/model/2/epoch18/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..0567b34a00efda65ed183c122a979e10fa54bce5 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch18/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88513a9d6c11486305a2681691fb8e390c18bfe663ef2c8ba29db2cd37346360 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch19/embeddings.npy b/Image/MobileNetv3/model/2/epoch19/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..ca9a9d92dd1de0b98ea926eb369408146c1794b1 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch19/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28ab196d98c05d3b184b0931f5c981196426df0710ece70077b8e587b54f28a6 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch19/subject_model.pth b/Image/MobileNetv3/model/2/epoch19/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5cd33097c3d79d38b2dfc7c9a57f0d8d72f7ec8d --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch19/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f492f88f98e6ab5f0e68e3d2ae6aa26d65fa9e94faf4740702ca8af445a6413 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch2/embeddings.npy b/Image/MobileNetv3/model/2/epoch2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a24d393841dd8b2ee458d03ccd2498b5073bef0c --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e9dedcb1f3547ed52f1901978907a79b099eea932c64a61cd7dd80533ac6580 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch2/subject_model.pth b/Image/MobileNetv3/model/2/epoch2/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b91037d2db653c130037e74b26f1d710a9cd328d --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch2/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5938b65d827d3564de2ce6b3a8558075f0b6a5dca2123226238b1524c0905a0 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch20/embeddings.npy b/Image/MobileNetv3/model/2/epoch20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a94e570e04e87d2a019bcc181e8a25020865da70 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2ab0f63b356753c4085a4fd1232fb8ded7287808dc5623d9a7eba953990ea99 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch20/subject_model.pth b/Image/MobileNetv3/model/2/epoch20/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c519f9c02305c0e4e736cb1f3b8d148ee8f865bb --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch20/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fefd6815ca9a82e97fd0e254ac67c9bebcd483db06cded7c3707567d6be5d679 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch21/embeddings.npy b/Image/MobileNetv3/model/2/epoch21/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b35989f7718ade1809f89061f4a4142d4273b866 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch21/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a38ef27bd6dda0be0914385c789423c7620f29c07f7eaae53904dcff9f24b39c +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch21/subject_model.pth b/Image/MobileNetv3/model/2/epoch21/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ba8c08c1b0d338de13b1817e55cb4927d591db0e --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch21/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a08107aea42c98d6090da3e98dbde760c0eb149a167f42367d8525bf2a56d524 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch22/embeddings.npy b/Image/MobileNetv3/model/2/epoch22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..769245a29e0c1b43b44f8c463d6b7b1ec0a969cb --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2b7504bd120ce1646d28328c56301f771371325d976ed2c06c5cf48cd4935aa +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch22/subject_model.pth b/Image/MobileNetv3/model/2/epoch22/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..84892fc25c9fcbbb58fff1799b699e4b779879a3 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch22/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1075e875025fc225da83d2f706982f18bd967bde13757b08036b0e70f2e2c761 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch23/embeddings.npy b/Image/MobileNetv3/model/2/epoch23/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d95060a2c36f1f3937351080d1071520a5b64e98 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch23/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16148c694909f5965993f75cb89c453288685f5601351163f53077e2801d7c3a +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch23/subject_model.pth b/Image/MobileNetv3/model/2/epoch23/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d165d16aacb5c476dd231605c87bc79d2fead1a9 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch23/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb9d23bf0fcfcc425668033c4385a99c7f1158543438f48a530142d4ab29b2cc +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch24/embeddings.npy b/Image/MobileNetv3/model/2/epoch24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c6183921a2f03361507520a80a982b5bb1441194 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d27c32e03ae9b432bc20162443b9c2bbb3d5880a115df6780e1efba197e6f299 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch24/subject_model.pth b/Image/MobileNetv3/model/2/epoch24/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..bf4bc2cf56b528eed463f0178ce867c598f7d05e --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch24/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:72aea04f4ce41efc8ccb88e70ad984fec2aa25e00d5a2eea7066ea6b9c1db3d6 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch25/embeddings.npy b/Image/MobileNetv3/model/2/epoch25/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..d0ad31a43adaf3119c9cf5295f0adb61d647648d --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch25/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:adc4e0dc8c955ce97190b8b081ebf50cd79ef18b181448c58396dd5202dc6ebd +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch25/subject_model.pth b/Image/MobileNetv3/model/2/epoch25/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f971ef8fd140c7d990c05f8fc57f9e9cefb187e0 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch25/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:401273bbba27c9470540f4e0e7db15f6d96e2d39985b2849ea613ee8187a5725 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch3/embeddings.npy b/Image/MobileNetv3/model/2/epoch3/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8e97daa5a987b08717fd5707a1ac334610fe8cd3 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch3/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b40e4d08ad88ab03e614fcf2523166cacaff6a75cc92114297686f41f6ae84e9 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch3/subject_model.pth b/Image/MobileNetv3/model/2/epoch3/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..635204e0cbab85a5c10a9895cc18273d84e78039 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch3/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:660a9f7a2fec8189f1e7dafa0e8ac428a663e6d769527760be5cf7222832d691 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch4/embeddings.npy b/Image/MobileNetv3/model/2/epoch4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b81fc5f1ff9d7162d7d2232ca44f4b975a99d2fc --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45b74196c8c1dd40a91a3d7c24b5d876d44349fa590df20e7a3da519a645190c +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch4/subject_model.pth b/Image/MobileNetv3/model/2/epoch4/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b15c331858bc55e4b367ae3c5b5cde1b430072b9 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch4/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:202d33d927d56c5495677a25f99bb975e005d9487f4d13f120fa5a3656733cb1 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch5/embeddings.npy b/Image/MobileNetv3/model/2/epoch5/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9fdb0ff4fb8b06e0bec98cfa82c5ba157bf15b0b --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch5/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c929528bdec14d628dcfae64ba9bd19c523c463f5c7afc91f1aeb90848ccdfd0 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch5/subject_model.pth b/Image/MobileNetv3/model/2/epoch5/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..56d23cd1df2a14332652ebda6294d6c78f82ecb4 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch5/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f76237d7c148213c873142848443fe6f7b2cb3bd211f2718e3671ee63949af5f +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch6/embeddings.npy b/Image/MobileNetv3/model/2/epoch6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..64f426ce462d329d38464c9564db7c52c62d6246 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2651531d0f05892febc60438172a84aa82a4d703bdea50f921de498063e1026 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch6/subject_model.pth b/Image/MobileNetv3/model/2/epoch6/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b3feb5ce392a6b77f36b81a6563e1490d76adfad --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch6/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8928db9e86b4b649148c4f96e0be03ccf1dd2b8edba9d20f212907231d52d98f +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch7/embeddings.npy b/Image/MobileNetv3/model/2/epoch7/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a7a68c76bda076e6a75d893cfac7b4de27751607 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch7/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e40b02b525c5e7974bd11f6d6abc73d8218973181261f343116f0363e66b07e7 +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch7/subject_model.pth b/Image/MobileNetv3/model/2/epoch7/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9f18c72e5073eb3b3202d9801271daa13e058f50 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch7/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8fc04989e2cb75f340f151aab2c938fe103d2e24e3c36038c80f36b3f934b51 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch8/embeddings.npy b/Image/MobileNetv3/model/2/epoch8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..68629d28d56605f6c588d61e8eeeb183a58565c6 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4dfe83934b05f790c078d0df49e9a431da5f129ad216b945a98defc76605d91e +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch8/subject_model.pth b/Image/MobileNetv3/model/2/epoch8/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c04fa70fd0bec12c8a97248419530c9c8fa114a4 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch8/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6637478ae3680effe2daae180c49ce1d6e7307f30dff0faee95994b5520cab90 +size 3823498 diff --git a/Image/MobileNetv3/model/2/epoch9/embeddings.npy b/Image/MobileNetv3/model/2/epoch9/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b7f0e25047faa8531ccbaf6a2db4a67173a9896b --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch9/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47e0b6d06f434afb6eb5724f1419c07abd77001a1b7b54cf185885d3119d43cd +size 115200128 diff --git a/Image/MobileNetv3/model/2/epoch9/subject_model.pth b/Image/MobileNetv3/model/2/epoch9/subject_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..abf6a34436249d5359785931045a60c1d60dc471 --- /dev/null +++ b/Image/MobileNetv3/model/2/epoch9/subject_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1dc58a2301bb42e9bc94f3b3319fa0e27958a92d4ba470114eff1630745aa79 +size 3823498 diff --git a/Image/MobileNetv3/model/2/layer_info.json b/Image/MobileNetv3/model/2/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..b512735f031cee7d1fd3cf7779e960c7f948edcd --- /dev/null +++ b/Image/MobileNetv3/model/2/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avgpool", "dim": 576} \ No newline at end of file diff --git a/Image/ResNet/code/model.py b/Image/ResNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..e8e65e4fed1b8259436a3f4dcbc3ace8e6d738bf --- /dev/null +++ b/Image/ResNet/code/model.py @@ -0,0 +1,259 @@ +''' +ResNet in PyTorch. + +ResNet(深度残差网络)是由微软研究院的Kaiming He等人提出的深度神经网络架构。 +主要创新点是引入了残差学习的概念,通过跳跃连接解决了深层网络的退化问题。 + +主要特点: +1. 引入残差块(Residual Block),使用跳跃连接 +2. 使用Batch Normalization进行归一化 +3. 支持更深的网络结构(最深可达152层) +4. 在多个计算机视觉任务上取得了突破性进展 + +Reference: +[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun + Deep Residual Learning for Image Recognition. arXiv:1512.03385 +''' +import torch +import torch.nn as nn + +class BasicBlock(nn.Module): + """基础残差块 + + 用于ResNet18/34等浅层网络。结构为: + x -> Conv -> BN -> ReLU -> Conv -> BN -> (+) -> ReLU + |------------------------------------------| + + Args: + in_channels: 输入通道数 + out_channels: 输出通道数 + stride: 步长,用于下采样,默认为1 + + 注意:基础模块没有通道压缩,expansion=1 + """ + expansion = 1 + + def __init__(self, in_channels, out_channels, stride=1): + super(BasicBlock,self).__init__() + self.features = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True), + nn.Conv2d(out_channels,out_channels, kernel_size=3, stride=1, padding=1, bias=False), + nn.BatchNorm2d(out_channels) + ) + + # 如果输入输出维度不等,则使用1x1卷积层来改变维度 + self.shortcut = nn.Sequential() + if stride != 1 or in_channels != self.expansion * out_channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, self.expansion * out_channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(self.expansion * out_channels), + ) + + def forward(self, x): + out = self.features(x) + out += self.shortcut(x) + out = torch.relu(out) + return out + + +class Bottleneck(nn.Module): + """瓶颈残差块 + + 用于ResNet50/101/152等深层网络。结构为: + x -> 1x1Conv -> BN -> ReLU -> 3x3Conv -> BN -> ReLU -> 1x1Conv -> BN -> (+) -> ReLU + |-------------------------------------------------------------------| + + Args: + in_channels: 输入通道数 + zip_channels: 压缩后的通道数 + stride: 步长,用于下采样,默认为1 + + 注意:通过1x1卷积先压缩通道数,再还原,expansion=4 + """ + expansion = 4 + + def __init__(self, in_channels, zip_channels, stride=1): + super(Bottleneck, self).__init__() + out_channels = self.expansion * zip_channels + self.features = nn.Sequential( + # 1x1卷积压缩通道 + nn.Conv2d(in_channels, zip_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(zip_channels), + nn.ReLU(inplace=True), + # 3x3卷积提取特征 + nn.Conv2d(zip_channels, zip_channels, kernel_size=3, stride=stride, padding=1, bias=False), + nn.BatchNorm2d(zip_channels), + nn.ReLU(inplace=True), + # 1x1卷积还原通道 + nn.Conv2d(zip_channels, out_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(out_channels) + ) + + self.shortcut = nn.Sequential() + if stride != 1 or in_channels != out_channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(out_channels) + ) + + def forward(self, x): + out = self.features(x) + out += self.shortcut(x) + out = torch.relu(out) + return out + +class ResNet(nn.Module): + """ResNet模型 + + 网络结构: + 1. 一个卷积层用于特征提取 + 2. 四个残差层,每层包含多个残差块 + 3. 平均池化和全连接层进行分类 + + 对于CIFAR10,特征图大小变化为: + (32,32,3) -> [Conv] -> (32,32,64) -> [Layer1] -> (32,32,64) -> [Layer2] + -> (16,16,128) -> [Layer3] -> (8,8,256) -> [Layer4] -> (4,4,512) -> [AvgPool] + -> (1,1,512) -> [FC] -> (num_classes) + + Args: + block: 残差块类型(BasicBlock或Bottleneck) + num_blocks: 每层残差块数量的列表 + num_classes: 分类数量,默认为10 + verbose: 是否打印中间特征图大小 + init_weights: 是否初始化权重 + """ + def __init__(self, block, num_blocks, num_classes=10, verbose=False, init_weights=True): + super(ResNet, self).__init__() + self.verbose = verbose + self.in_channels = 64 + + # 第一层卷积 + self.features = nn.Sequential( + nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True) + ) + + # 四个残差层 + self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) + self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) + self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) + self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) + + # 分类层 + self.avg_pool = nn.AvgPool2d(kernel_size=4) + self.classifier = nn.Linear(512 * block.expansion, num_classes) + + if init_weights: + self._initialize_weights() + + def _make_layer(self, block, out_channels, num_blocks, stride): + """构建残差层 + + Args: + block: 残差块类型 + out_channels: 输出通道数 + num_blocks: 残差块数量 + stride: 第一个残差块的步长(用于下采样) + + Returns: + nn.Sequential: 残差层 + """ + strides = [stride] + [1] * (num_blocks - 1) + layers = [] + for stride in strides: + layers.append(block(self.in_channels, out_channels, stride)) + self.in_channels = out_channels * block.expansion + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + out = self.features(x) + if self.verbose: + print('block 1 output: {}'.format(out.shape)) + + out = self.layer1(out) + if self.verbose: + print('block 2 output: {}'.format(out.shape)) + + out = self.layer2(out) + if self.verbose: + print('block 3 output: {}'.format(out.shape)) + + out = self.layer3(out) + if self.verbose: + print('block 4 output: {}'.format(out.shape)) + + out = self.layer4(out) + if self.verbose: + print('block 5 output: {}'.format(out.shape)) + + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.classifier(out) + return out + + def _initialize_weights(self): + """初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - BN层参数采用常数初始化 + - 线性层采用正态分布初始化 + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + +def ResNet18(verbose=False): + """ResNet-18模型""" + return ResNet(BasicBlock, [2,2,2,2], verbose=verbose) + +def ResNet34(verbose=False): + """ResNet-34模型""" + return ResNet(BasicBlock, [3,4,6,3], verbose=verbose) + +def ResNet50(verbose=False): + """ResNet-50模型""" + return ResNet(Bottleneck, [3,4,6,3], verbose=verbose) + +def ResNet101(verbose=False): + """ResNet-101模型""" + return ResNet(Bottleneck, [3,4,23,3], verbose=verbose) + +def ResNet152(verbose=False): + """ResNet-152模型""" + return ResNet(Bottleneck, [3,8,36,3], verbose=verbose) + +def test(): + """测试函数""" + net = ResNet34() + x = torch.randn(2,3,32,32) + y = net(x) + print('Output shape:', y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net,(2,3,32,32)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/ResNet/code/train.py b/Image/ResNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6cb16a5140cb10bbbaaf1c2a9535c1101a5424 --- /dev/null +++ b/Image/ResNet/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import ResNet34 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = ResNet34() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='resnet', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='resnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='resnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/ResNet/dataset/.gitkeep b/Image/ResNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ResNet/model/.gitkeep b/Image/ResNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/SENet/code/model.py b/Image/SENet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..88899e5bcc30f78fc61076a5667acd5dc680d564 --- /dev/null +++ b/Image/SENet/code/model.py @@ -0,0 +1,251 @@ +''' +SENet (Squeeze-and-Excitation Networks) in PyTorch. + +SENet通过引入SE模块来自适应地重新校准通道特征响应。SE模块可以集成到现有的网络架构中, +通过显式建模通道之间的相互依赖关系,自适应地重新校准通道特征响应。 + +主要特点: +1. 引入Squeeze-and-Excitation(SE)模块,增强特征的表示能力 +2. SE模块包含squeeze操作(全局平均池化)和excitation操作(两个FC层) +3. 通过attention机制来增强重要通道的权重,抑制不重要通道 +4. 几乎可以嵌入到任何现有的网络结构中 + +Reference: +[1] Jie Hu, Li Shen, Samuel Albanie, Gang Sun, Enhua Wu + Squeeze-and-Excitation Networks. CVPR 2018. +''' +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class BasicBlock(nn.Module): + """基础残差块+SE模块 + + 结构: + x -> Conv -> BN -> ReLU -> Conv -> BN -> SE -> (+) -> ReLU + |------------------------------------------| + + Args: + in_channels: 输入通道数 + channels: 输出通道数 + stride: 步长,用于下采样,默认为1 + """ + def __init__(self, in_channels, channels, stride=1): + super(BasicBlock, self).__init__() + self.conv1 = nn.Conv2d(in_channels, channels, kernel_size=3, stride=stride, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(channels) + self.conv2 = nn.Conv2d(channels, channels, kernel_size=3, stride=1, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(channels) + + # 残差连接 + self.shortcut = nn.Sequential() + if stride != 1 or in_channels != channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(channels) + ) + + # SE模块 + self.squeeze = nn.AdaptiveAvgPool2d(1) # 全局平均池化 + self.excitation = nn.Sequential( + nn.Conv2d(channels, channels//16, kernel_size=1), # 通道降维 + nn.ReLU(inplace=True), + nn.Conv2d(channels//16, channels, kernel_size=1), # 通道升维 + nn.Sigmoid() # 归一化到[0,1] + ) + + def forward(self, x): + # 主分支 + out = F.relu(self.bn1(self.conv1(x))) + out = self.bn2(self.conv2(out)) + + # SE模块 + w = self.squeeze(out) # Squeeze + w = self.excitation(w) # Excitation + out = out * w # 特征重标定 + + # 残差连接 + out += self.shortcut(x) + out = F.relu(out) + return out + + +class PreActBlock(nn.Module): + """Pre-activation版本的基础块+SE模块 + + 结构: + x -> BN -> ReLU -> Conv -> BN -> ReLU -> Conv -> SE -> (+) + |-------------------------------------------| + + Args: + in_channels: 输入通道数 + channels: 输出通道数 + stride: 步长,用于下采样,默认为1 + """ + def __init__(self, in_channels, channels, stride=1): + super(PreActBlock, self).__init__() + self.bn1 = nn.BatchNorm2d(in_channels) + self.conv1 = nn.Conv2d(in_channels, channels, kernel_size=3, stride=stride, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(channels) + self.conv2 = nn.Conv2d(channels, channels, kernel_size=3, stride=1, padding=1, bias=False) + + # 残差连接 + if stride != 1 or in_channels != channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False) + ) + + # SE模块 + self.squeeze = nn.AdaptiveAvgPool2d(1) + self.excitation = nn.Sequential( + nn.Conv2d(channels, channels//16, kernel_size=1), + nn.ReLU(inplace=True), + nn.Conv2d(channels//16, channels, kernel_size=1), + nn.Sigmoid() + ) + + def forward(self, x): + # Pre-activation + out = F.relu(self.bn1(x)) + shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x + + # 主分支 + out = self.conv1(out) + out = self.conv2(F.relu(self.bn2(out))) + + # SE模块 + w = self.squeeze(out) + w = self.excitation(w) + out = out * w + + # 残差连接 + out += shortcut + return out + + +class SENet(nn.Module): + """SENet模型 + + 网络结构: + 1. 一个卷积层进行特征提取 + 2. 四个残差层,每层包含多个带SE模块的残差块 + 3. 平均池化和全连接层进行分类 + + Args: + block: 残差块类型(BasicBlock或PreActBlock) + num_blocks: 每层残差块数量的列表 + num_classes: 分类数量,默认为10 + """ + def __init__(self, block, num_blocks, num_classes=10): + super(SENet, self).__init__() + self.in_channels = 64 + + # 第一层卷积 + self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(64) + + # 四个残差层 + self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) + self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) + self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) + self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) + + # 分类层 + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.classifier = nn.Linear(512, num_classes) + + # 初始化权重 + self._initialize_weights() + + def _make_layer(self, block, channels, num_blocks, stride): + """构建残差层 + + Args: + block: 残差块类型 + channels: 输出通道数 + num_blocks: 残差块数量 + stride: 第一个残差块的步长(用于下采样) + + Returns: + nn.Sequential: 残差层 + """ + strides = [stride] + [1]*(num_blocks-1) + layers = [] + for stride in strides: + layers.append(block(self.in_channels, channels, stride)) + self.in_channels = channels + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + # 特征提取 + out = F.relu(self.bn1(self.conv1(x))) + + # 残差层 + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + out = self.layer4(out) + + # 分类 + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.classifier(out) + return out + + def _initialize_weights(self): + """初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - BN层参数采用常数初始化 + - 线性层采用正态分布初始化 + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +def SENet18(): + """SENet-18模型""" + return SENet(PreActBlock, [2,2,2,2]) + + +def test(): + """测试函数""" + # 创建模型 + net = SENet18() + print('Model Structure:') + print(net) + + # 测试前向传播 + x = torch.randn(1,3,32,32) + y = net(x) + print('\nInput Shape:', x.shape) + print('Output Shape:', y.shape) + + # 打印模型信息 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (1,3,32,32)) + + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/SENet/code/train.py b/Image/SENet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..39e53b402f24b97bc032cc1c96f32755772a4564 --- /dev/null +++ b/Image/SENet/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import SENet + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = SENet() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='senet', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='senet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='senet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/SENet/dataset/.gitkeep b/Image/SENet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/SENet/model/.gitkeep b/Image/SENet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ShuffleNet/code/model.py b/Image/ShuffleNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb599570de101d901f3b6065eee4b30e146def3 --- /dev/null +++ b/Image/ShuffleNet/code/model.py @@ -0,0 +1,263 @@ +''' +ShuffleNet in PyTorch. + +ShuffleNet是一个专门为移动设备设计的高效卷积神经网络。其主要创新点在于使用了两个新操作: +1. 逐点组卷积(pointwise group convolution) +2. 通道重排(channel shuffle) +这两个操作大大降低了计算复杂度,同时保持了良好的准确率。 + +主要特点: +1. 使用组卷积减少参数量和计算量 +2. 使用通道重排操作使不同组之间的信息可以流通 +3. 使用深度可分离卷积进一步降低计算复杂度 +4. 设计了多个计算复杂度版本以适应不同的设备 + +Reference: +[1] Xiangyu Zhang, Xinyu Zhou, Mengxiao Lin, Jian Sun + ShuffleNet: An Extremely Efficient Convolutional Neural Network for Mobile Devices. CVPR 2018. +''' +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ShuffleBlock(nn.Module): + """通道重排模块 + + 通过重新排列通道的顺序来实现不同组之间的信息交流。 + + Args: + groups (int): 分组数量 + """ + def __init__(self, groups): + super(ShuffleBlock, self).__init__() + self.groups = groups + + def forward(self, x): + """通道重排的前向传播 + + 步骤: + 1. [N,C,H,W] -> [N,g,C/g,H,W] # 重塑为g组 + 2. [N,g,C/g,H,W] -> [N,C/g,g,H,W] # 转置g维度 + 3. [N,C/g,g,H,W] -> [N,C,H,W] # 重塑回原始形状 + + Args: + x: 输入张量,[N,C,H,W] + + Returns: + out: 通道重排后的张量,[N,C,H,W] + """ + N, C, H, W = x.size() + g = self.groups + return x.view(N,g,C//g,H,W).permute(0,2,1,3,4).reshape(N,C,H,W) + + +class Bottleneck(nn.Module): + """ShuffleNet的基本模块 + + 结构: + x -> 1x1 GConv -> BN -> Shuffle -> 3x3 DWConv -> BN -> 1x1 GConv -> BN -> (+) -> ReLU + |---------------------| + + Args: + in_channels (int): 输入通道数 + out_channels (int): 输出通道数 + stride (int): 步长,用于下采样 + groups (int): 组卷积的分组数 + """ + def __init__(self, in_channels, out_channels, stride, groups): + super(Bottleneck, self).__init__() + self.stride = stride + + # 确定中间通道数和分组数 + mid_channels = out_channels // 4 + g = 1 if in_channels == 24 else groups + + # 第一个1x1组卷积 + self.conv1 = nn.Conv2d(in_channels, mid_channels, + kernel_size=1, groups=g, bias=False) + self.bn1 = nn.BatchNorm2d(mid_channels) + self.shuffle1 = ShuffleBlock(groups=g) + + # 3x3深度可分离卷积 + self.conv2 = nn.Conv2d(mid_channels, mid_channels, + kernel_size=3, stride=stride, padding=1, + groups=mid_channels, bias=False) + self.bn2 = nn.BatchNorm2d(mid_channels) + + # 第二个1x1组卷积 + self.conv3 = nn.Conv2d(mid_channels, out_channels, + kernel_size=1, groups=groups, bias=False) + self.bn3 = nn.BatchNorm2d(out_channels) + + # 残差连接 + self.shortcut = nn.Sequential() + if stride == 2: + self.shortcut = nn.Sequential( + nn.AvgPool2d(3, stride=2, padding=1) + ) + + def forward(self, x): + # 主分支 + out = F.relu(self.bn1(self.conv1(x))) + out = self.shuffle1(out) + out = F.relu(self.bn2(self.conv2(out))) + out = self.bn3(self.conv3(out)) + + # 残差连接 + res = self.shortcut(x) + + # 如果是下采样层,拼接残差;否则相加 + out = F.relu(torch.cat([out, res], 1)) if self.stride == 2 else F.relu(out + res) + return out + + +class ShuffleNet(nn.Module): + """ShuffleNet模型 + + 网络结构: + 1. 一个卷积层进行特征提取 + 2. 三个阶段,每个阶段包含多个带重排的残差块 + 3. 平均池化和全连接层进行分类 + + Args: + cfg (dict): 配置字典,包含: + - out_channels (list): 每个阶段的输出通道数 + - num_blocks (list): 每个阶段的块数 + - groups (int): 组卷积的分组数 + """ + def __init__(self, cfg): + super(ShuffleNet, self).__init__() + out_channels = cfg['out_channels'] + num_blocks = cfg['num_blocks'] + groups = cfg['groups'] + + # 第一层卷积 + self.conv1 = nn.Conv2d(3, 24, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(24) + self.in_channels = 24 + + # 三个阶段 + self.layer1 = self._make_layer(out_channels[0], num_blocks[0], groups) + self.layer2 = self._make_layer(out_channels[1], num_blocks[1], groups) + self.layer3 = self._make_layer(out_channels[2], num_blocks[2], groups) + + # 分类层 + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.classifier = nn.Linear(out_channels[2], 10) + + # 初始化权重 + self._initialize_weights() + + def _make_layer(self, out_channels, num_blocks, groups): + """构建ShuffleNet的一个阶段 + + Args: + out_channels (int): 输出通道数 + num_blocks (int): 块的数量 + groups (int): 分组数 + + Returns: + nn.Sequential: 一个阶段的层序列 + """ + layers = [] + for i in range(num_blocks): + stride = 2 if i == 0 else 1 + cat_channels = self.in_channels if i == 0 else 0 + layers.append( + Bottleneck( + self.in_channels, + out_channels - cat_channels, + stride=stride, + groups=groups + ) + ) + self.in_channels = out_channels + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + # 特征提取 + out = F.relu(self.bn1(self.conv1(x))) + + # 三个阶段 + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + + # 分类 + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.classifier(out) + return out + + def _initialize_weights(self): + """初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - BN层参数采用常数初始化 + - 线性层采用正态分布初始化 + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +def ShuffleNetG2(): + """返回groups=2的ShuffleNet模型""" + cfg = { + 'out_channels': [200,400,800], + 'num_blocks': [4,8,4], + 'groups': 2 + } + return ShuffleNet(cfg) + + +def ShuffleNetG3(): + """返回groups=3的ShuffleNet模型""" + cfg = { + 'out_channels': [240,480,960], + 'num_blocks': [4,8,4], + 'groups': 3 + } + return ShuffleNet(cfg) + + +def test(): + """测试函数""" + # 创建模型 + net = ShuffleNetG2() + print('Model Structure:') + print(net) + + # 测试前向传播 + x = torch.randn(1,3,32,32) + y = net(x) + print('\nInput Shape:', x.shape) + print('Output Shape:', y.shape) + + # 打印模型信息 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (1,3,32,32)) + + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/ShuffleNet/code/train.py b/Image/ShuffleNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..32a725a6310759fe02f85b7932896c9275eaad53 --- /dev/null +++ b/Image/ShuffleNet/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import ShuffleNet + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = ShuffleNet() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenet', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/ShuffleNet/dataset/.gitkeep b/Image/ShuffleNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ShuffleNet/model/.gitkeep b/Image/ShuffleNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ShuffleNetv2/code/model.py b/Image/ShuffleNetv2/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..ca097bcd85c9fba957263f59fa9100c22c5e24fb --- /dev/null +++ b/Image/ShuffleNetv2/code/model.py @@ -0,0 +1,345 @@ +''' +ShuffleNetV2 in PyTorch. + +ShuffleNetV2是ShuffleNet的改进版本,通过实验总结出了四个高效网络设计的实用准则: +1. 输入输出通道数相等时计算量最小 +2. 过度使用组卷积会增加MAC(内存访问代价) +3. 网络碎片化会降低并行度 +4. Element-wise操作不可忽视 + +主要改进: +1. 通道分离(Channel Split)替代组卷积 +2. 重新设计了基本单元,使输入输出通道数相等 +3. 每个阶段使用不同的通道数配置 +4. 简化了下采样模块的设计 + +Reference: +[1] Ningning Ma, Xiangyu Zhang, Hai-Tao Zheng, Jian Sun + ShuffleNet V2: Practical Guidelines for Efficient CNN Architecture Design. ECCV 2018. +''' +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ShuffleBlock(nn.Module): + """通道重排模块 + + 通过重新排列通道的顺序来实现不同特征的信息交流。 + + Args: + groups (int): 分组数量,默认为2 + """ + def __init__(self, groups=2): + super(ShuffleBlock, self).__init__() + self.groups = groups + + def forward(self, x): + """通道重排的前向传播 + + 步骤: + 1. [N,C,H,W] -> [N,g,C/g,H,W] # 重塑为g组 + 2. [N,g,C/g,H,W] -> [N,C/g,g,H,W] # 转置g维度 + 3. [N,C/g,g,H,W] -> [N,C,H,W] # 重塑回原始形状 + + Args: + x: 输入张量,[N,C,H,W] + + Returns: + out: 通道重排后的张量,[N,C,H,W] + """ + N, C, H, W = x.size() + g = self.groups + return x.view(N, g, C//g, H, W).permute(0, 2, 1, 3, 4).reshape(N, C, H, W) + + +class SplitBlock(nn.Module): + """通道分离模块 + + 将输入特征图按比例分成两部分。 + + Args: + ratio (float): 分离比例,默认为0.5 + """ + def __init__(self, ratio): + super(SplitBlock, self).__init__() + self.ratio = ratio + + def forward(self, x): + """通道分离的前向传播 + + Args: + x: 输入张量,[N,C,H,W] + + Returns: + tuple: 分离后的两个张量,[N,C1,H,W]和[N,C2,H,W] + """ + c = int(x.size(1) * self.ratio) + return x[:, :c, :, :], x[:, c:, :, :] + + +class BasicBlock(nn.Module): + """ShuffleNetV2的基本模块 + + 结构: + x -------|-----------------| + | | | + | 1x1 Conv | + | 3x3 DWConv | + | 1x1 Conv | + | | + |------------------Concat + | + Channel Shuffle + + Args: + in_channels (int): 输入通道数 + split_ratio (float): 通道分离比例,默认为0.5 + """ + def __init__(self, in_channels, split_ratio=0.5): + super(BasicBlock, self).__init__() + self.split = SplitBlock(split_ratio) + in_channels = int(in_channels * split_ratio) + + # 主分支 + self.conv1 = nn.Conv2d(in_channels, in_channels, + kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(in_channels) + + self.conv2 = nn.Conv2d(in_channels, in_channels, + kernel_size=3, stride=1, padding=1, + groups=in_channels, bias=False) + self.bn2 = nn.BatchNorm2d(in_channels) + + self.conv3 = nn.Conv2d(in_channels, in_channels, + kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(in_channels) + + self.shuffle = ShuffleBlock() + + def forward(self, x): + # 通道分离 + x1, x2 = self.split(x) + + # 主分支 + out = F.relu(self.bn1(self.conv1(x2))) + out = self.bn2(self.conv2(out)) + out = F.relu(self.bn3(self.conv3(out))) + + # 拼接并重排 + out = torch.cat([x1, out], 1) + out = self.shuffle(out) + return out + + +class DownBlock(nn.Module): + """下采样模块 + + 结构: + 3x3 DWConv(s=2) 1x1 Conv + x -----> 1x1 Conv 3x3 DWConv(s=2) + 1x1 Conv + | + Concat + | + Channel Shuffle + + Args: + in_channels (int): 输入通道数 + out_channels (int): 输出通道数 + """ + def __init__(self, in_channels, out_channels): + super(DownBlock, self).__init__() + mid_channels = out_channels // 2 + + # 左分支 + self.branch1 = nn.Sequential( + # 3x3深度可分离卷积,步长为2 + nn.Conv2d(in_channels, in_channels, + kernel_size=3, stride=2, padding=1, + groups=in_channels, bias=False), + nn.BatchNorm2d(in_channels), + # 1x1卷积 + nn.Conv2d(in_channels, mid_channels, + kernel_size=1, bias=False), + nn.BatchNorm2d(mid_channels) + ) + + # 右分支 + self.branch2 = nn.Sequential( + # 1x1卷积 + nn.Conv2d(in_channels, mid_channels, + kernel_size=1, bias=False), + nn.BatchNorm2d(mid_channels), + # 3x3深度可分离卷积,步长为2 + nn.Conv2d(mid_channels, mid_channels, + kernel_size=3, stride=2, padding=1, + groups=mid_channels, bias=False), + nn.BatchNorm2d(mid_channels), + # 1x1卷积 + nn.Conv2d(mid_channels, mid_channels, + kernel_size=1, bias=False), + nn.BatchNorm2d(mid_channels) + ) + + self.shuffle = ShuffleBlock() + + def forward(self, x): + # 左分支 + out1 = self.branch1(x) + + # 右分支 + out2 = self.branch2(x) + + # 拼接并重排 + out = torch.cat([out1, out2], 1) + out = self.shuffle(out) + return out + + +class ShuffleNetV2(nn.Module): + """ShuffleNetV2模型 + + 网络结构: + 1. 一个卷积层进行特征提取 + 2. 三个阶段,每个阶段包含多个基本块和一个下采样块 + 3. 最后一个卷积层 + 4. 平均池化和全连接层进行分类 + + Args: + net_size (float): 网络大小系数,可选0.5/1.0/1.5/2.0 + """ + def __init__(self, net_size): + super(ShuffleNetV2, self).__init__() + out_channels = configs[net_size]['out_channels'] + num_blocks = configs[net_size]['num_blocks'] + + # 第一层卷积 + self.conv1 = nn.Conv2d(3, 24, kernel_size=3, + stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(24) + self.in_channels = 24 + + # 三个阶段 + self.layer1 = self._make_layer(out_channels[0], num_blocks[0]) + self.layer2 = self._make_layer(out_channels[1], num_blocks[1]) + self.layer3 = self._make_layer(out_channels[2], num_blocks[2]) + + # 最后的1x1卷积 + self.conv2 = nn.Conv2d(out_channels[2], out_channels[3], + kernel_size=1, stride=1, padding=0, bias=False) + self.bn2 = nn.BatchNorm2d(out_channels[3]) + + # 分类层 + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.classifier = nn.Linear(out_channels[3], 10) + + # 初始化权重 + self._initialize_weights() + + def _make_layer(self, out_channels, num_blocks): + """构建一个阶段 + + Args: + out_channels (int): 输出通道数 + num_blocks (int): 基本块的数量 + + Returns: + nn.Sequential: 一个阶段的层序列 + """ + layers = [DownBlock(self.in_channels, out_channels)] + for i in range(num_blocks): + layers.append(BasicBlock(out_channels)) + self.in_channels = out_channels + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + # 特征提取 + out = F.relu(self.bn1(self.conv1(x))) + + # 三个阶段 + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + + # 最后的特征提取 + out = F.relu(self.bn2(self.conv2(out))) + + # 分类 + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.classifier(out) + return out + + def _initialize_weights(self): + """初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - BN层参数采用常数初始化 + - 线性层采用正态分布初始化 + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +# 不同大小的网络配置 +configs = { + 0.5: { + 'out_channels': (48, 96, 192, 1024), + 'num_blocks': (3, 7, 3) + }, + 1.0: { + 'out_channels': (116, 232, 464, 1024), + 'num_blocks': (3, 7, 3) + }, + 1.5: { + 'out_channels': (176, 352, 704, 1024), + 'num_blocks': (3, 7, 3) + }, + 2.0: { + 'out_channels': (224, 488, 976, 2048), + 'num_blocks': (3, 7, 3) + } +} + + +def test(): + """测试函数""" + # 创建模型 + net = ShuffleNetV2(net_size=0.5) + print('Model Structure:') + print(net) + + # 测试前向传播 + x = torch.randn(1,3,32,32) + y = net(x) + print('\nInput Shape:', x.shape) + print('Output Shape:', y.shape) + + # 打印模型信息 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (1,3,32,32)) + + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/ShuffleNetv2/code/train.py b/Image/ShuffleNetv2/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..d54df9e801b9bda7921befe94ad61978f0c8f2de --- /dev/null +++ b/Image/ShuffleNetv2/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import ShuffleNetv2 + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = ShuffleNetv2() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenetv2', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenetv2', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='shufflenetv2', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/ShuffleNetv2/dataset/.gitkeep b/Image/ShuffleNetv2/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ShuffleNetv2/model/.gitkeep b/Image/ShuffleNetv2/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/SwinTransformer/code/model.py b/Image/SwinTransformer/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..c34d517f0e8e5ca70a4e901ff5527d6a766a2b78 --- /dev/null +++ b/Image/SwinTransformer/code/model.py @@ -0,0 +1,230 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint +import numpy as np +from timm.models.layers import DropPath, trunc_normal_ + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +def window_partition(x, window_size): + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows + +def window_reverse(windows, window_size, H, W): + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + +class WindowAttention(nn.Module): + def __init__(self, dim, window_size, num_heads, qkv_bias=True, attn_drop=0., proj_drop=0.): + super().__init__() + self.dim = dim + self.window_size = window_size + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x): + B_, N, C = x.shape + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + attn = self.softmax(attn) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class SwinTransformerBlock(nn.Module): + def __init__(self, dim, num_heads, window_size=7, shift_size=0, + mlp_ratio=4., qkv_bias=True, drop=0., attn_drop=0., drop_path=0., + act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.dim = dim + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, window_size=window_size, num_heads=num_heads, + qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + def forward(self, x): + H, W = self.H, self.W + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # pad feature maps to multiples of window size + pad_l = pad_t = 0 + pad_r = (self.window_size - W % self.window_size) % self.window_size + pad_b = (self.window_size - H % self.window_size) % self.window_size + x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) + _, Hp, Wp, _ = x.shape + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll(x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2)) + else: + shifted_x = x + + # partition windows + x_windows = window_partition(shifted_x, self.window_size) + x_windows = x_windows.view(-1, self.window_size * self.window_size, C) + + # W-MSA/SW-MSA + attn_windows = self.attn(x_windows) + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, Hp, Wp) + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll(shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2)) + else: + x = shifted_x + + if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() + + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x + +class PatchEmbed(nn.Module): + def __init__(self, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None): + super().__init__() + self.patch_size = patch_size + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + _, _, H, W = x.shape + + # padding + pad_input = (H % self.patch_size != 0) or (W % self.patch_size != 0) + if pad_input: + x = F.pad(x, (0, self.patch_size - W % self.patch_size, + 0, self.patch_size - H % self.patch_size, + 0, 0)) + + x = self.proj(x) + x = x.flatten(2).transpose(1, 2) # B Ph*Pw C + x = self.norm(x) + return x + +class SwinTransformer(nn.Module): + def __init__(self, img_size=32, patch_size=4, in_chans=3, num_classes=10, + embed_dim=96, depths=[2, 2, 6, 2], num_heads=[3, 6, 12, 24], + window_size=7, mlp_ratio=4., qkv_bias=True, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0.1, + norm_layer=nn.LayerNorm, patch_norm=True): + super().__init__() + + self.num_classes = num_classes + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.patch_norm = patch_norm + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # build layers + layers = [] + for i_layer in range(self.num_layers): + layer = SwinTransformerBlock( + dim=embed_dim, + num_heads=num_heads[i_layer], + window_size=window_size, + shift_size=0 if (i_layer % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=drop_path_rate, + norm_layer=norm_layer) + layers.append(layer) + + self.layers = nn.ModuleList(layers) + self.norm = norm_layer(embed_dim) + self.avgpool = nn.AdaptiveAvgPool1d(1) + self.head = nn.Linear(embed_dim, num_classes) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def forward(self, x): + x = self.patch_embed(x) + x = self.pos_drop(x) + + for layer in self.layers: + layer.H, layer.W = x.size(1), x.size(2) + x = layer(x) + + x = self.norm(x) + x = self.avgpool(x.transpose(1, 2)) + x = torch.flatten(x, 1) + x = self.head(x) + + return x diff --git a/Image/SwinTransformer/code/train.py b/Image/SwinTransformer/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..cbc713c15968af0e414fb136d1c0e3fee6afdc67 --- /dev/null +++ b/Image/SwinTransformer/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import SwinTransformer + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = SwinTransformer() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='swintransformer', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='swintransformer', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='swintransformer', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/SwinTransformer/dataset/.gitkeep b/Image/SwinTransformer/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/SwinTransformer/model/.gitkeep b/Image/SwinTransformer/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/VGG/code/model.py b/Image/VGG/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..61a15d361d553288fca05524db618534c292ee1b --- /dev/null +++ b/Image/VGG/code/model.py @@ -0,0 +1,204 @@ +''' +VGG Networks in PyTorch + +VGG是由牛津大学Visual Geometry Group提出的一个深度卷积神经网络模型。 +主要特点: +1. 使用小卷积核(3x3)代替大卷积核,降低参数量 +2. 深层网络结构,多个卷积层叠加 +3. 使用多个3x3卷积层的组合来代替大的感受野 +4. 结构规整,易于扩展 + +网络结构示例(VGG16): +input + └─> [(Conv3x3, 64) × 2, MaxPool] + └─> [(Conv3x3, 128) × 2, MaxPool] + └─> [(Conv3x3, 256) × 3, MaxPool] + └─> [(Conv3x3, 512) × 3, MaxPool] + └─> [(Conv3x3, 512) × 3, MaxPool] + └─> [AvgPool, Flatten] + └─> FC(512, num_classes) + +参考论文: +[1] K. Simonyan and A. Zisserman, "Very Deep Convolutional Networks for Large-Scale Image Recognition," + arXiv preprint arXiv:1409.1556, 2014. +''' + +import torch +import torch.nn as nn + + +# VGG配置参数 +# M表示MaxPool层,数字表示输出通道数 +cfg = { + 'VGG11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'VGG13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'VGG16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], + 'VGG19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], +} + + +class ConvBlock(nn.Module): + """VGG的基本卷积块 + + 包含: Conv2d -> BatchNorm -> ReLU + 使用3x3卷积核,步长为1,padding为1以保持特征图大小不变 + + Args: + in_channels (int): 输入通道数 + out_channels (int): 输出通道数 + batch_norm (bool): 是否使用BatchNorm,默认为True + """ + def __init__(self, in_channels, out_channels, batch_norm=True): + super(ConvBlock, self).__init__() + + layers = [] + # 3x3卷积,padding=1保持特征图大小不变 + layers.append( + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=3, + stride=1, + padding=1 + ) + ) + + # 添加BatchNorm + if batch_norm: + layers.append(nn.BatchNorm2d(out_channels)) + + # ReLU激活函数 + layers.append(nn.ReLU(inplace=True)) + + self.block = nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x (torch.Tensor): 输入特征图 + + Returns: + torch.Tensor: 输出特征图 + """ + return self.block(x) + + +class VGG(nn.Module): + """VGG网络模型 + + Args: + vgg_name (str): VGG变体名称,可选VGG11/13/16/19 + num_classes (int): 分类数量,默认为10 + batch_norm (bool): 是否使用BatchNorm,默认为True + init_weights (bool): 是否初始化权重,默认为True + """ + def __init__(self, vgg_name='VGG16', num_classes=10, batch_norm=True, init_weights=True): + super(VGG, self).__init__() + + # 特征提取层 + self.features = self._make_layers(cfg[vgg_name], batch_norm) + + # 全局平均池化 + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + + # 分类器 + self.classifier = nn.Sequential( + nn.Linear(512, num_classes) + ) + + # 初始化权重 + if init_weights: + self._initialize_weights() + + def _make_layers(self, cfg, batch_norm=True): + """构建VGG的特征提取层 + + Args: + cfg (List): 网络配置参数 + batch_norm (bool): 是否使用BatchNorm + + Returns: + nn.Sequential: 特征提取层序列 + """ + layers = [] + in_channels = 3 + + for x in cfg: + if x == 'M': # 最大池化层 + layers.append(nn.MaxPool2d(kernel_size=2, stride=2)) + else: # 卷积块 + layers.append(ConvBlock(in_channels, x, batch_norm)) + in_channels = x + + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x (torch.Tensor): 输入图像张量,[N,3,H,W] + + Returns: + torch.Tensor: 输出预测张量,[N,num_classes] + """ + # 特征提取 + x = self.features(x) + + # 全局平均池化 + x = self.avgpool(x) + + # 展平 + x = torch.flatten(x, 1) + + # 分类 + x = self.classifier(x) + return x + + def _initialize_weights(self): + """初始化模型权重 + + 采用论文中的初始化方法: + - 卷积层: xavier初始化 + - BatchNorm: weight=1, bias=0 + - 线性层: 正态分布初始化(std=0.01) + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + # VGG论文中使用了xavier初始化 + nn.init.xavier_normal_(m.weight) + if m.bias is not None: + nn.init.zeros_(m.bias) + elif isinstance(m, nn.BatchNorm2d): + nn.init.ones_(m.weight) + nn.init.zeros_(m.bias) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.zeros_(m.bias) + + +def test(): + """测试函数 + + 创建VGG模型并进行前向传播测试,打印模型结构和参数信息 + """ + # 创建模型 + net = VGG('VGG16') + print('Model Structure:') + print(net) + + # 测试前向传播 + x = torch.randn(2, 3, 32, 32) + y = net(x) + print('\nInput Shape:', x.shape) + print('Output Shape:', y.shape) + + # 打印模型信息 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net, (2, 3, 32, 32)) + + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/Image/VGG/code/train.py b/Image/VGG/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..198793b838aff3d5c2abdae6d6267b61dc38339e --- /dev/null +++ b/Image/VGG/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import VGG + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = VGG() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vgg', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vgg', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vgg', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/VGG/dataset/.gitkeep b/Image/VGG/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/VGG/model/.gitkeep b/Image/VGG/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ViT/code/model.py b/Image/ViT/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..f34402724e1c857622a8e8eb0e7e2ad4ae795353 --- /dev/null +++ b/Image/ViT/code/model.py @@ -0,0 +1,171 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +class PatchEmbed(nn.Module): + """ 将图像分成patch并进行embedding """ + def __init__(self, img_size=32, patch_size=4, in_chans=3, embed_dim=96): + super().__init__() + self.img_size = img_size + self.patch_size = patch_size + self.n_patches = (img_size // patch_size) ** 2 + + self.proj = nn.Conv2d( + in_chans, embed_dim, + kernel_size=patch_size, stride=patch_size + ) + + def forward(self, x): + x = self.proj(x) # (B, E, H/P, W/P) + x = x.flatten(2) # (B, E, N) + x = x.transpose(1, 2) # (B, N, E) + return x + +class Attention(nn.Module): + """ 多头自注意力机制 """ + def __init__(self, dim, n_heads=8, qkv_bias=True, attn_p=0., proj_p=0.): + super().__init__() + self.n_heads = n_heads + self.dim = dim + self.head_dim = dim // n_heads + self.scale = self.head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_p) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_p) + + def forward(self, x): + n_samples, n_tokens, dim = x.shape + + if dim != self.dim: + raise ValueError + + qkv = self.qkv(x) # (n_samples, n_patches + 1, 3 * dim) + qkv = qkv.reshape( + n_samples, n_tokens, 3, self.n_heads, self.head_dim + ) # (n_samples, n_patches + 1, 3, n_heads, head_dim) + qkv = qkv.permute(2, 0, 3, 1, 4) # (3, n_samples, n_heads, n_patches + 1, head_dim) + q, k, v = qkv[0], qkv[1], qkv[2] # each with shape (n_samples, n_heads, n_patches + 1, head_dim) + + k_t = k.transpose(-2, -1) # (n_samples, n_heads, head_dim, n_patches + 1) + dp = (q @ k_t) * self.scale # (n_samples, n_heads, n_patches + 1, n_patches + 1) + attn = dp.softmax(dim=-1) # (n_samples, n_heads, n_patches + 1, n_patches + 1) + attn = self.attn_drop(attn) + + weighted_avg = attn @ v # (n_samples, n_heads, n_patches + 1, head_dim) + weighted_avg = weighted_avg.transpose(1, 2) # (n_samples, n_patches + 1, n_heads, head_dim) + weighted_avg = weighted_avg.flatten(2) # (n_samples, n_patches + 1, dim) + + x = self.proj(weighted_avg) # (n_samples, n_patches + 1, dim) + x = self.proj_drop(x) # (n_samples, n_patches + 1, dim) + + return x + +class MLP(nn.Module): + """ 多层感知机 """ + def __init__(self, in_features, hidden_features, out_features, p=0.): + super().__init__() + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = nn.GELU() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(p) + + def forward(self, x): + x = self.fc1(x) # (n_samples, n_patches + 1, hidden_features) + x = self.act(x) # (n_samples, n_patches + 1, hidden_features) + x = self.drop(x) # (n_samples, n_patches + 1, hidden_features) + x = self.fc2(x) # (n_samples, n_patches + 1, out_features) + x = self.drop(x) # (n_samples, n_patches + 1, out_features) + + return x + +class Block(nn.Module): + """ Transformer编码器块 """ + def __init__(self, dim, n_heads, mlp_ratio=4.0, qkv_bias=True, + p=0., attn_p=0.): + super().__init__() + self.norm1 = nn.LayerNorm(dim, eps=1e-6) + self.attn = Attention( + dim, + n_heads=n_heads, + qkv_bias=qkv_bias, + attn_p=attn_p, + proj_p=p + ) + self.norm2 = nn.LayerNorm(dim, eps=1e-6) + hidden_features = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=hidden_features, + out_features=dim, + ) + + def forward(self, x): + x = x + self.attn(self.norm1(x)) + x = x + self.mlp(self.norm2(x)) + return x + +class ViT(nn.Module): + """ Vision Transformer """ + def __init__( + self, + img_size=32, + patch_size=4, + in_chans=3, + n_classes=10, + embed_dim=96, + depth=12, + n_heads=8, + mlp_ratio=4., + qkv_bias=True, + p=0., + attn_p=0., + ): + super().__init__() + + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_chans=in_chans, + embed_dim=embed_dim, + ) + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter( + torch.zeros(1, 1 + self.patch_embed.n_patches, embed_dim) + ) + self.pos_drop = nn.Dropout(p=p) + + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, + n_heads=n_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + p=p, + attn_p=attn_p, + ) + for _ in range(depth) + ]) + + self.norm = nn.LayerNorm(embed_dim, eps=1e-6) + self.head = nn.Linear(embed_dim, n_classes) + + def forward(self, x): + n_samples = x.shape[0] + x = self.patch_embed(x) + + cls_token = self.cls_token.expand(n_samples, -1, -1) + x = torch.cat((cls_token, x), dim=1) + x = x + self.pos_embed + x = self.pos_drop(x) + + for block in self.blocks: + x = block(x) + + x = self.norm(x) + + cls_token_final = x[:, 0] + x = self.head(cls_token_final) + + return x diff --git a/Image/ViT/code/train.py b/Image/ViT/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..efeeb1879b9a6396a3bbf854ed0ea789b9f1c472 --- /dev/null +++ b/Image/ViT/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import ViT + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = ViT() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vit', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vit', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='vit', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/ViT/dataset/.gitkeep b/Image/ViT/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ViT/model/.gitkeep b/Image/ViT/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ZFNet/code/model.py b/Image/ZFNet/code/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1fb0a193087c0c3e85366c201d789e8ebf73b729 --- /dev/null +++ b/Image/ZFNet/code/model.py @@ -0,0 +1,50 @@ +import torch +import torch.nn as nn + +class ZFNet(nn.Module): + def __init__(self, num_classes=10): + super(ZFNet, self).__init__() + self.features = nn.Sequential( + # conv1 + nn.Conv2d(3, 96, kernel_size=7, stride=2, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2, padding=1), + nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75, k=2), + + # conv2 + nn.Conv2d(96, 256, kernel_size=5, padding=2), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2, padding=1), + nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75, k=2), + + # conv3 + nn.Conv2d(256, 384, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + + # conv4 + nn.Conv2d(384, 384, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + + # conv5 + nn.Conv2d(384, 256, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2, padding=1), + ) + + self.classifier = nn.Sequential( + nn.Linear(256 * 2 * 2, 4096), + nn.ReLU(inplace=True), + nn.Dropout(), + + nn.Linear(4096, 4096), + nn.ReLU(inplace=True), + nn.Dropout(), + + nn.Linear(4096, num_classes), + ) + + def forward(self, x): + x = self.features(x) + x = x.view(x.size(0), -1) + x = self.classifier(x) + return x diff --git a/Image/ZFNet/code/train.py b/Image/ZFNet/code/train.py new file mode 100644 index 0000000000000000000000000000000000000000..7aa58abeb735953788849ff6387fc2af35b423dd --- /dev/null +++ b/Image/ZFNet/code/train.py @@ -0,0 +1,59 @@ +import sys +import os +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from utils.dataset_utils import get_cifar10_dataloaders +from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor +from utils.parse_args import parse_args +from model import ZFNet + +def main(): + # 解析命令行参数 + args = parse_args() + + # 创建模型 + model = ZFNet() + + if args.train_type == '0': + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path) + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='zfnet', + save_type='0' + ) + elif args.train_type == '1': + train_model_data_augmentation( + model, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='zfnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + elif args.train_type == '2': + train_model_backdoor( + model, + poison_ratio=args.poison_ratio, + target_label=args.target_label, + epochs=args.epochs, + lr=args.lr, + device=f'cuda:{args.gpu}', + save_dir='../model', + model_name='zfnet', + batch_size=args.batch_size, + num_workers=args.num_workers, + local_dataset_path=args.dataset_path + ) + +if __name__ == '__main__': + main() diff --git a/Image/ZFNet/dataset/.gitkeep b/Image/ZFNet/dataset/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/ZFNet/model/.gitkeep b/Image/ZFNet/model/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Image/utils/dataset_utils.py b/Image/utils/dataset_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1acd0e039a4b0e3bc74ca4a9d2de58471380d517 --- /dev/null +++ b/Image/utils/dataset_utils.py @@ -0,0 +1,110 @@ +import torch +import torchvision +import torchvision.transforms as transforms +import os + +def get_cifar10_dataloaders(batch_size=128, num_workers=2, local_dataset_path=None,shuffle=True): + """获取CIFAR10数据集的数据加载器 + + Args: + batch_size: 批次大小 + num_workers: 数据加载的工作进程数 + local_dataset_path: 本地数据集路径,如果提供则使用本地数据集,否则下载 + + Returns: + trainloader: 训练数据加载器 + testloader: 测试数据加载器 + """ + # 数据预处理 + transform_train = transforms.Compose([ + transforms.RandomCrop(32, padding=4), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), + ]) + + transform_test = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), + ]) + + # 设置数据集路径 + if local_dataset_path: + print(f"使用本地数据集: {local_dataset_path}") + download = False + dataset_path = local_dataset_path + else: + print("未指定本地数据集路径,将下载数据集") + download = True + dataset_path = '../dataset' + + # 创建数据集路径 + if not os.path.exists(dataset_path): + os.makedirs(dataset_path) + + trainset = torchvision.datasets.CIFAR10( + root=dataset_path, train=True, download=download, transform=transform_train) + trainloader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + testset = torchvision.datasets.CIFAR10( + root=dataset_path, train=False, download=download, transform=transform_test) + testloader = torch.utils.data.DataLoader( + testset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + return trainloader, testloader + +def get_mnist_dataloaders(batch_size=128, num_workers=2, local_dataset_path=None,shuffle=True): + """获取MNIST数据集的数据加载器 + + Args: + batch_size: 批次大小 + num_workers: 数据加载的工作进程数 + local_dataset_path: 本地数据集路径,如果提供则使用本地数据集,否则下载 + + Returns: + trainloader: 训练数据加载器 + testloader: 测试数据加载器 + """ + # 数据预处理 + transform_train = transforms.Compose([ + transforms.RandomRotation(10), # 随机旋转±10度 + transforms.RandomAffine( # 随机仿射变换 + degrees=0, # 不进行旋转 + translate=(0.1, 0.1), # 平移范围 + scale=(0.9, 1.1) # 缩放范围 + ), + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)) # MNIST数据集的均值和标准差 + ]) + + transform_test = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)) + ]) + + # 设置数据集路径 + if local_dataset_path: + print(f"使用本地数据集: {local_dataset_path}") + download = False + dataset_path = local_dataset_path + else: + print("未指定本地数据集路径,将下载数据集") + download = True + dataset_path = '../dataset' + + # 创建数据集路径 + if not os.path.exists(dataset_path): + os.makedirs(dataset_path) + + trainset = torchvision.datasets.MNIST( + root=dataset_path, train=True, download=download, transform=transform_train) + trainloader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + testset = torchvision.datasets.MNIST( + root=dataset_path, train=False, download=download, transform=transform_test) + testloader = torch.utils.data.DataLoader( + testset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + return trainloader, testloader diff --git a/Image/utils/parse_args.py b/Image/utils/parse_args.py new file mode 100644 index 0000000000000000000000000000000000000000..998e17d974a5bba504540be0859442da99dd45a9 --- /dev/null +++ b/Image/utils/parse_args.py @@ -0,0 +1,19 @@ +import argparse + +def parse_args(): + """解析命令行参数 + + Returns: + args: 解析后的参数 + """ + parser = argparse.ArgumentParser(description='训练模型') + parser.add_argument('--gpu', type=int, default=0, help='GPU设备编号 (0,1,2,3)') + parser.add_argument('--batch-size', type=int, default=128, help='批次大小') + parser.add_argument('--epochs', type=int, default=200, help='训练轮数') + parser.add_argument('--lr', type=float, default=0.1, help='学习率') + parser.add_argument('--num-workers', type=int, default=2, help='数据加载的工作进程数') + parser.add_argument('--poison-ratio', type=float, default=0.1, help='恶意样本比例') + parser.add_argument('--target-label', type=int, default=0, help='目标类别') + parser.add_argument('--train-type',type=str,choices=['0','1','2'],default='0',help='训练类型:0 for normal train, 1 for data aug train,2 for back door train') + parser.add_argument('--dataset-path', type=str, default=None, help='本地数据集路径,如果不指定则自动下载') + return parser.parse_args() \ No newline at end of file diff --git a/Image/utils/train_utils.py b/Image/utils/train_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..23ee83897d9d11ee02de5575c9d3655be46847fa --- /dev/null +++ b/Image/utils/train_utils.py @@ -0,0 +1,381 @@ +""" +通用模型训练工具 + +提供了模型训练、评估、保存等功能,支持: +1. 训练进度可视化 +2. 日志记录 +3. 模型检查点保存 +4. 嵌入向量收集 +""" + +import torch +import torch.nn as nn +import torch.optim as optim +import time +import os +import logging +import numpy as np +from tqdm import tqdm +import sys +from pathlib import Path +import torch.nn.functional as F +import torchvision.transforms as transforms + +# 将项目根目录添加到Python路径中 +current_dir = Path(__file__).resolve().parent +project_root = current_dir.parent.parent +sys.path.append(str(project_root)) + +from ttv_utils import time_travel_saver + +def setup_logger(log_file): + """配置日志记录器,如果日志文件存在则覆盖 + + Args: + log_file: 日志文件路径 + + Returns: + logger: 配置好的日志记录器 + """ + # 创建logger + logger = logging.getLogger('train') + logger.setLevel(logging.INFO) + + # 移除现有的处理器 + if logger.hasHandlers(): + logger.handlers.clear() + + # 创建文件处理器,使用'w'模式覆盖现有文件 + fh = logging.FileHandler(log_file, mode='w') + fh.setLevel(logging.INFO) + + # 创建控制台处理器 + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + + # 创建格式器 + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + fh.setFormatter(formatter) + ch.setFormatter(formatter) + + # 添加处理器 + logger.addHandler(fh) + logger.addHandler(ch) + + return logger + +def train_model(model, trainloader, testloader, epochs=200, lr=0.1, device='cuda:0', + save_dir='./checkpoints', model_name='model', save_type='0',layer_name=None,interval = 2): + """通用的模型训练函数 + Args: + model: 要训练的模型 + trainloader: 训练数据加载器 + testloader: 测试数据加载器 + epochs: 训练轮数 + lr: 学习率 + device: 训练设备,格式为'cuda:N',其中N为GPU编号(0,1,2,3) + save_dir: 模型保存目录 + model_name: 模型名称 + save_type: 保存类型,0为普通训练,1为数据增强训练,2为后门训练 + """ + # 检查并设置GPU设备 + if not torch.cuda.is_available(): + print("CUDA不可用,将使用CPU训练") + device = 'cpu' + elif not device.startswith('cuda:'): + device = f'cuda:0' + + # 确保device格式正确 + if device.startswith('cuda:'): + gpu_id = int(device.split(':')[1]) + if gpu_id >= torch.cuda.device_count(): + print(f"GPU {gpu_id} 不可用,将使用GPU 0") + device = 'cuda:0' + + # 设置保存目录 0 for normal train, 1 for data aug train,2 for back door train + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + # 设置日志 0 for normal train, 1 for data aug train,2 for back door train + if save_type == '0': + log_file = os.path.join(os.path.dirname(save_dir), 'code', 'train.log') + if not os.path.exists(os.path.dirname(log_file)): + os.makedirs(os.path.dirname(log_file)) + elif save_type == '1': + log_file = os.path.join(os.path.dirname(save_dir), 'code', 'data_aug_train.log') + if not os.path.exists(os.path.dirname(log_file)): + os.makedirs(os.path.dirname(log_file)) + elif save_type == '2': + log_file = os.path.join(os.path.dirname(save_dir), 'code', 'backdoor_train.log') + if not os.path.exists(os.path.dirname(log_file)): + os.makedirs(os.path.dirname(log_file)) + logger = setup_logger(log_file) + + # 设置epoch保存目录 0 for normal train, 1 for data aug train,2 for back door train + save_dir = os.path.join(save_dir, save_type) + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + # 损失函数和优化器 + criterion = nn.CrossEntropyLoss() + optimizer = optim.SGD(model.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4) + scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=200) + + # 移动模型到指定设备 + model = model.to(device) + best_acc = 0 + start_time = time.time() + + logger.info(f'开始训练 {model_name}') + logger.info(f'总轮数: {epochs}, 学习率: {lr}, 设备: {device}') + + for epoch in range(epochs): + # 训练阶段 + model.train() + train_loss = 0 + correct = 0 + total = 0 + + train_pbar = tqdm(trainloader, desc=f'Epoch {epoch+1}/{epochs} [Train]') + for batch_idx, (inputs, targets) in enumerate(train_pbar): + inputs, targets = inputs.to(device), targets.to(device) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + + train_loss += loss.item() + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + # 更新进度条 + train_pbar.set_postfix({ + 'loss': f'{train_loss/(batch_idx+1):.3f}', + 'acc': f'{100.*correct/total:.2f}%' + }) + + # 每100步记录一次 + if batch_idx % 100 == 0: + logger.info(f'Epoch: {epoch+1} | Batch: {batch_idx} | ' + f'Loss: {train_loss/(batch_idx+1):.3f} | ' + f'Acc: {100.*correct/total:.2f}%') + + # 测试阶段 + model.eval() + test_loss = 0 + correct = 0 + total = 0 + + test_pbar = tqdm(testloader, desc=f'Epoch {epoch+1}/{epochs} [Test]') + with torch.no_grad(): + for batch_idx, (inputs, targets) in enumerate(test_pbar): + inputs, targets = inputs.to(device), targets.to(device) + outputs = model(inputs) + loss = criterion(outputs, targets) + + test_loss += loss.item() + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + # 更新进度条 + test_pbar.set_postfix({ + 'loss': f'{test_loss/(batch_idx+1):.3f}', + 'acc': f'{100.*correct/total:.2f}%' + }) + + # 计算测试精度 + acc = 100.*correct/total + logger.info(f'Epoch: {epoch+1} | Test Loss: {test_loss/(batch_idx+1):.3f} | ' + f'Test Acc: {acc:.2f}%') + + + if epoch == 0: + ordered_loader = torch.utils.data.DataLoader( + trainloader.dataset, # 使用相同的数据集 + batch_size=trainloader.batch_size, + shuffle=False, # 确保顺序加载 + num_workers=trainloader.num_workers + ) + save_model = time_travel_saver(model, ordered_loader, device, save_dir, model_name, interval = 1, auto_save_embedding = True, layer_name = layer_name, show= True ) + + # 每5个epoch保存一次 + if (epoch + 1) % interval == 0: + # 创建一个专门用于收集embedding的顺序dataloader + ordered_loader = torch.utils.data.DataLoader( + trainloader.dataset, # 使用相同的数据集 + batch_size=trainloader.batch_size, + shuffle=False, # 确保顺序加载 + num_workers=trainloader.num_workers + ) + save_model = time_travel_saver(model, ordered_loader, device, save_dir, model_name, interval = 1, auto_save_embedding = True, layer_name = layer_name ) + save_model.save() + + scheduler.step() + + logger.info('训练完成!') + +def train_model_data_augmentation(model, epochs=200, lr=0.1, device='cuda:0', + save_dir='./checkpoints', model_name='model', + batch_size=128, num_workers=2, local_dataset_path=None): + """使用数据增强训练模型 + + 数据增强方案说明: + 1. RandomCrop: 随机裁剪,先填充4像素,再裁剪回原始大小,增加位置多样性 + 2. RandomHorizontalFlip: 随机水平翻转,增加方向多样性 + 3. RandomRotation: 随机旋转15度,增加角度多样性 + 4. ColorJitter: 颜色抖动,调整亮度、对比度、饱和度和色调 + 5. RandomErasing: 随机擦除部分区域,模拟遮挡情况 + 6. RandomPerspective: 随机透视变换,增加视角多样性 + + Args: + model: 要训练的模型 + epochs: 训练轮数 + lr: 学习率 + device: 训练设备 + save_dir: 模型保存目录 + model_name: 模型名称 + batch_size: 批次大小 + num_workers: 数据加载的工作进程数 + local_dataset_path: 本地数据集路径 + """ + import torchvision.transforms as transforms + from .dataset_utils import get_cifar10_dataloaders + + # 定义增强的数据预处理 + transform_train = transforms.Compose([ + transforms.RandomCrop(32, padding=4), + transforms.RandomHorizontalFlip(), + transforms.RandomRotation(15), + transforms.ColorJitter( + brightness=0.2, + contrast=0.2, + saturation=0.2, + hue=0.1 + ), + transforms.RandomPerspective(distortion_scale=0.2, p=0.5), + transforms.ToTensor(), + transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), + transforms.RandomErasing(p=0.5, scale=(0.02, 0.33), ratio=(0.3, 3.3)) + ]) + + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size, num_workers, local_dataset_path) + + # 使用增强的训练数据 + trainset = trainloader.dataset + trainset.transform = transform_train + trainloader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=True, num_workers=num_workers) + + # 调用通用训练函数 + train_model(model, trainloader, testloader, epochs, lr, device, save_dir, model_name, save_type='1') + +def train_model_backdoor(model, poison_ratio=0.1, target_label=0, epochs=200, lr=0.1, + device='cuda:0', save_dir='./checkpoints', model_name='model', + batch_size=128, num_workers=2, local_dataset_path=None, layer_name=None,interval = 2): + """训练带后门的模型 + + 后门攻击方案说明: + 1. 标签翻转攻击:将选定比例的样本标签修改为目标标签 + 2. 触发器模式:在选定样本的右下角添加一个4x4的白色方块作为触发器 + 3. 验证策略: + - 在干净数据上验证模型性能(确保正常样本分类准确率) + - 在带触发器的数据上验证攻击成功率 + + Args: + model: 要训练的模型 + poison_ratio: 投毒比例 + target_label: 目标标签 + epochs: 训练轮数 + lr: 学习率 + device: 训练设备 + save_dir: 模型保存目录 + model_name: 模型名称 + batch_size: 批次大小 + num_workers: 数据加载的工作进程数 + local_dataset_path: 本地数据集路径 + """ + from .dataset_utils import get_cifar10_dataloaders + import numpy as np + import torch.nn.functional as F + + # 获取原始数据加载器 + trainloader, testloader = get_cifar10_dataloaders(batch_size, num_workers, local_dataset_path) + + # 修改部分训练数据的标签和添加触发器 + trainset = trainloader.dataset + num_poison = int(len(trainset) * poison_ratio) + poison_indices = np.random.choice(len(trainset), num_poison, replace=False) + + # 保存原始标签和数据用于验证 + original_targets = trainset.targets.copy() + original_data = trainset.data.copy() + + # 修改选中数据的标签和添加触发器 + trigger_pattern = np.ones((4, 4, 3), dtype=np.uint8) * 255 # 4x4白色方块作为触发器 + for idx in poison_indices: + # 修改标签 + trainset.targets[idx] = target_label + # 添加触发器到右下角 + trainset.data[idx, -4:, -4:] = trigger_pattern + + # 创建新的数据加载器 + poisoned_trainloader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=True, num_workers=num_workers) + + # 训练模型 + train_model(model, poisoned_trainloader, testloader, epochs, lr, device, save_dir, model_name, save_type='2', layer_name=layer_name,interval = interval) + + # 恢复原始数据用于验证 + trainset.targets = original_targets + trainset.data = original_data + + # 创建验证数据加载器(干净数据) + validation_loader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=False, num_workers=num_workers) + + # 在干净验证集上评估模型 + model.eval() + correct = 0 + total = 0 + with torch.no_grad(): + for inputs, targets in validation_loader: + inputs, targets = inputs.to(device), targets.to(device) + outputs = model(inputs) + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + clean_accuracy = 100. * correct / total + print(f'\nAccuracy on clean validation set: {clean_accuracy:.2f}%') + + # 创建带触发器的验证数据集 + trigger_validation = trainset.data.copy() + trigger_validation_targets = np.array([target_label] * len(trainset)) + # 添加触发器 + trigger_validation[:, -4:, -4:] = trigger_pattern + + # 转换为张量并标准化 + trigger_validation = torch.tensor(trigger_validation).float().permute(0, 3, 1, 2) / 255.0 + # 使用正确的方式进行图像标准化 + normalize = transforms.Normalize(mean=(0.4914, 0.4822, 0.4465), + std=(0.2023, 0.1994, 0.2010)) + trigger_validation = normalize(trigger_validation) + + # 在带触发器的验证集上评估模型 + correct = 0 + total = 0 + batch_size = 100 + for i in range(0, len(trigger_validation), batch_size): + inputs = trigger_validation[i:i+batch_size].to(device) + targets = torch.tensor(trigger_validation_targets[i:i+batch_size]).to(device) + outputs = model(inputs) + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + attack_success_rate = 100. * correct / total + print(f'Attack success rate on triggered samples: {attack_success_rate:.2f}%') diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..07f8a6e7b3f0b1f5b4cc6131e844efad9e5e9713 --- /dev/null +++ b/README.md @@ -0,0 +1,17 @@ +--- +license: mit +--- +# 模型训练过程汇总[该仓库只含有image model的训练过程] + +本仓库采用扁平化的目录结构和标签系统来组织模型,具体说明如下: + +## 仓库结构 + +- **一级目录**:直接以```模型名称-数据集```,例如 `ResNet-CIFAR-10`、`GraphMAE_QM9-Cora` 等 +- **二级目录**:包含该模型在该数据集下的不同训练任务或变体,例如 `normal`、`noisy`、`backdoor_invisible` 等 +- **训练过程目录结构**:每个模型目录下包含: + - `scripts/`:存放模型相关代码和训练脚本 + - `epochs/`:存放模型训练过程和权重文件 + - 每个epoch的权重文件(model.pth)和embedding(.npy) + - `dataset/`:模型需要的数据集 + diff --git a/ResNet-CIFAR10/Classification-normal/dataset/index.json b/ResNet-CIFAR10/Classification-normal/dataset/index.json new file mode 100644 index 0000000000000000000000000000000000000000..18effc538161722b86c88b06392a77db3bb6a92d --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/dataset/index.json @@ -0,0 +1,50006 @@ +{ + "train": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, + 63, + 64, + 65, + 66, + 67, + 68, + 69, + 70, + 71, + 72, + 73, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 82, + 83, + 84, + 85, + 86, + 87, + 88, + 89, + 90, + 91, + 92, + 93, + 94, + 95, + 96, + 97, + 98, + 99, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149, + 150, + 151, + 152, + 153, + 154, + 155, + 156, + 157, + 158, + 159, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 174, + 175, + 176, + 177, + 178, + 179, + 180, + 181, + 182, + 183, + 184, + 185, + 186, + 187, + 188, + 189, + 190, + 191, + 192, + 193, + 194, + 195, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 210, + 211, + 212, + 213, + 214, + 215, + 216, + 217, + 218, + 219, + 220, + 221, + 222, + 223, + 224, + 225, + 226, + 227, + 228, + 229, + 230, + 231, + 232, + 233, + 234, + 235, + 236, + 237, + 238, + 239, + 240, + 241, + 242, + 243, + 244, + 245, + 246, + 247, + 248, + 249, + 250, + 251, + 252, + 253, + 254, + 255, + 256, + 257, + 258, + 259, + 260, + 261, + 262, + 263, + 264, + 265, + 266, + 267, + 268, + 269, + 270, + 271, + 272, + 273, + 274, + 275, + 276, + 277, + 278, + 279, + 280, + 281, + 282, + 283, + 284, + 285, + 286, + 287, + 288, + 289, + 290, + 291, + 292, + 293, + 294, + 295, + 296, + 297, + 298, + 299, + 300, + 301, + 302, + 303, + 304, + 305, + 306, + 307, + 308, + 309, + 310, + 311, + 312, + 313, + 314, + 315, + 316, + 317, + 318, + 319, + 320, + 321, + 322, + 323, + 324, + 325, + 326, + 327, + 328, + 329, + 330, + 331, + 332, + 333, + 334, + 335, + 336, + 337, + 338, + 339, + 340, + 341, + 342, + 343, + 344, + 345, + 346, + 347, + 348, + 349, + 350, + 351, + 352, + 353, + 354, + 355, + 356, + 357, + 358, + 359, + 360, + 361, + 362, + 363, + 364, + 365, + 366, + 367, + 368, + 369, + 370, + 371, + 372, + 373, + 374, + 375, + 376, + 377, + 378, + 379, + 380, + 381, + 382, + 383, + 384, + 385, + 386, + 387, + 388, + 389, + 390, + 391, + 392, + 393, + 394, + 395, + 396, + 397, + 398, + 399, + 400, + 401, + 402, + 403, + 404, + 405, + 406, + 407, + 408, + 409, + 410, + 411, + 412, + 413, + 414, + 415, + 416, + 417, + 418, + 419, + 420, + 421, + 422, + 423, + 424, + 425, + 426, + 427, + 428, + 429, + 430, + 431, + 432, + 433, + 434, + 435, + 436, + 437, + 438, + 439, + 440, + 441, + 442, + 443, + 444, + 445, + 446, + 447, + 448, + 449, + 450, + 451, + 452, + 453, + 454, + 455, + 456, + 457, + 458, + 459, + 460, + 461, + 462, + 463, + 464, + 465, + 466, + 467, + 468, + 469, + 470, + 471, + 472, + 473, + 474, + 475, + 476, + 477, + 478, + 479, + 480, + 481, + 482, + 483, + 484, + 485, + 486, + 487, + 488, + 489, + 490, + 491, + 492, + 493, + 494, + 495, + 496, + 497, + 498, + 499, + 500, + 501, + 502, + 503, + 504, + 505, + 506, + 507, + 508, + 509, + 510, + 511, + 512, + 513, + 514, + 515, + 516, + 517, + 518, + 519, + 520, + 521, + 522, + 523, + 524, + 525, + 526, + 527, + 528, + 529, + 530, + 531, + 532, + 533, + 534, + 535, + 536, + 537, + 538, + 539, + 540, + 541, + 542, + 543, + 544, + 545, + 546, + 547, + 548, + 549, + 550, + 551, + 552, + 553, + 554, + 555, + 556, + 557, + 558, + 559, + 560, + 561, + 562, + 563, + 564, + 565, + 566, + 567, + 568, + 569, + 570, + 571, + 572, + 573, + 574, + 575, + 576, + 577, + 578, + 579, + 580, + 581, + 582, + 583, + 584, + 585, + 586, + 587, + 588, + 589, + 590, + 591, + 592, + 593, + 594, + 595, + 596, + 597, + 598, + 599, + 600, + 601, + 602, + 603, + 604, + 605, + 606, + 607, + 608, + 609, + 610, + 611, + 612, + 613, + 614, + 615, + 616, + 617, + 618, + 619, + 620, + 621, + 622, + 623, + 624, + 625, + 626, + 627, + 628, + 629, + 630, + 631, + 632, + 633, + 634, + 635, + 636, + 637, + 638, + 639, + 640, + 641, + 642, + 643, + 644, + 645, + 646, + 647, + 648, + 649, + 650, + 651, + 652, + 653, + 654, + 655, + 656, + 657, + 658, + 659, + 660, + 661, + 662, + 663, + 664, + 665, + 666, + 667, + 668, + 669, + 670, + 671, + 672, + 673, + 674, + 675, + 676, + 677, + 678, + 679, + 680, + 681, + 682, + 683, + 684, + 685, + 686, + 687, + 688, + 689, + 690, + 691, + 692, + 693, + 694, + 695, + 696, + 697, + 698, + 699, + 700, + 701, + 702, + 703, + 704, + 705, + 706, + 707, + 708, + 709, + 710, + 711, + 712, + 713, + 714, + 715, + 716, + 717, + 718, + 719, + 720, + 721, + 722, + 723, + 724, + 725, + 726, + 727, + 728, + 729, + 730, + 731, + 732, + 733, + 734, + 735, + 736, + 737, + 738, + 739, + 740, + 741, + 742, + 743, + 744, + 745, + 746, + 747, + 748, + 749, + 750, + 751, + 752, + 753, + 754, + 755, + 756, + 757, + 758, + 759, + 760, + 761, + 762, + 763, + 764, + 765, + 766, + 767, + 768, + 769, + 770, + 771, + 772, + 773, + 774, + 775, + 776, + 777, + 778, + 779, + 780, + 781, + 782, + 783, + 784, + 785, + 786, + 787, + 788, + 789, + 790, + 791, + 792, + 793, + 794, + 795, + 796, + 797, + 798, + 799, + 800, + 801, + 802, + 803, + 804, + 805, + 806, + 807, + 808, + 809, + 810, + 811, + 812, + 813, + 814, + 815, + 816, + 817, + 818, + 819, + 820, + 821, + 822, + 823, + 824, + 825, + 826, + 827, + 828, + 829, + 830, + 831, + 832, + 833, + 834, + 835, + 836, + 837, + 838, + 839, + 840, + 841, + 842, + 843, + 844, + 845, + 846, + 847, + 848, + 849, + 850, + 851, + 852, + 853, + 854, + 855, + 856, + 857, + 858, + 859, + 860, + 861, + 862, + 863, + 864, + 865, + 866, + 867, + 868, + 869, + 870, + 871, + 872, + 873, + 874, + 875, + 876, + 877, + 878, + 879, + 880, + 881, + 882, + 883, + 884, + 885, + 886, + 887, + 888, + 889, + 890, + 891, + 892, + 893, + 894, + 895, + 896, + 897, + 898, + 899, + 900, + 901, + 902, + 903, + 904, + 905, + 906, + 907, + 908, + 909, + 910, + 911, + 912, + 913, + 914, + 915, + 916, + 917, + 918, + 919, + 920, + 921, + 922, + 923, + 924, + 925, + 926, + 927, + 928, + 929, + 930, + 931, + 932, + 933, + 934, + 935, + 936, + 937, + 938, + 939, + 940, + 941, + 942, + 943, + 944, + 945, + 946, + 947, + 948, + 949, + 950, + 951, + 952, + 953, + 954, + 955, + 956, + 957, + 958, + 959, + 960, + 961, + 962, + 963, + 964, + 965, + 966, + 967, + 968, + 969, + 970, + 971, + 972, + 973, + 974, + 975, + 976, + 977, + 978, + 979, + 980, + 981, + 982, + 983, + 984, + 985, + 986, + 987, + 988, + 989, + 990, + 991, + 992, + 993, + 994, + 995, + 996, + 997, + 998, + 999, + 1000, + 1001, + 1002, + 1003, + 1004, + 1005, + 1006, + 1007, + 1008, + 1009, + 1010, + 1011, + 1012, + 1013, + 1014, + 1015, + 1016, + 1017, + 1018, + 1019, + 1020, + 1021, + 1022, + 1023, + 1024, + 1025, + 1026, + 1027, + 1028, + 1029, + 1030, + 1031, + 1032, + 1033, + 1034, + 1035, + 1036, + 1037, + 1038, + 1039, + 1040, + 1041, + 1042, + 1043, + 1044, + 1045, + 1046, + 1047, + 1048, + 1049, + 1050, + 1051, + 1052, + 1053, + 1054, + 1055, + 1056, + 1057, + 1058, + 1059, + 1060, + 1061, + 1062, + 1063, + 1064, + 1065, + 1066, + 1067, + 1068, + 1069, + 1070, + 1071, + 1072, + 1073, + 1074, + 1075, + 1076, + 1077, + 1078, + 1079, + 1080, + 1081, + 1082, + 1083, + 1084, + 1085, + 1086, + 1087, + 1088, + 1089, + 1090, + 1091, + 1092, + 1093, + 1094, + 1095, + 1096, + 1097, + 1098, + 1099, + 1100, + 1101, + 1102, + 1103, + 1104, + 1105, + 1106, + 1107, + 1108, + 1109, + 1110, + 1111, + 1112, + 1113, + 1114, + 1115, + 1116, + 1117, + 1118, + 1119, + 1120, + 1121, + 1122, + 1123, + 1124, + 1125, + 1126, + 1127, + 1128, + 1129, + 1130, + 1131, + 1132, + 1133, + 1134, + 1135, + 1136, + 1137, + 1138, + 1139, + 1140, + 1141, + 1142, + 1143, + 1144, + 1145, + 1146, + 1147, + 1148, + 1149, + 1150, + 1151, + 1152, + 1153, + 1154, + 1155, + 1156, + 1157, + 1158, + 1159, + 1160, + 1161, + 1162, + 1163, + 1164, + 1165, + 1166, + 1167, + 1168, + 1169, + 1170, + 1171, + 1172, + 1173, + 1174, + 1175, + 1176, + 1177, + 1178, + 1179, + 1180, + 1181, + 1182, + 1183, + 1184, + 1185, + 1186, + 1187, + 1188, + 1189, + 1190, + 1191, + 1192, + 1193, + 1194, + 1195, + 1196, + 1197, + 1198, + 1199, + 1200, + 1201, + 1202, + 1203, + 1204, + 1205, + 1206, + 1207, + 1208, + 1209, + 1210, + 1211, + 1212, + 1213, + 1214, + 1215, + 1216, + 1217, + 1218, + 1219, + 1220, + 1221, + 1222, + 1223, + 1224, + 1225, + 1226, + 1227, + 1228, + 1229, + 1230, + 1231, + 1232, + 1233, + 1234, + 1235, + 1236, + 1237, + 1238, + 1239, + 1240, + 1241, + 1242, + 1243, + 1244, + 1245, + 1246, + 1247, + 1248, + 1249, + 1250, + 1251, + 1252, + 1253, + 1254, + 1255, + 1256, + 1257, + 1258, + 1259, + 1260, + 1261, + 1262, + 1263, + 1264, + 1265, + 1266, + 1267, + 1268, + 1269, + 1270, + 1271, + 1272, + 1273, + 1274, + 1275, + 1276, + 1277, + 1278, + 1279, + 1280, + 1281, + 1282, + 1283, + 1284, + 1285, + 1286, + 1287, + 1288, + 1289, + 1290, + 1291, + 1292, + 1293, + 1294, + 1295, + 1296, + 1297, + 1298, + 1299, + 1300, + 1301, + 1302, + 1303, + 1304, + 1305, + 1306, + 1307, + 1308, + 1309, + 1310, + 1311, + 1312, + 1313, + 1314, + 1315, + 1316, + 1317, + 1318, + 1319, + 1320, + 1321, + 1322, + 1323, + 1324, + 1325, + 1326, + 1327, + 1328, + 1329, + 1330, + 1331, + 1332, + 1333, + 1334, + 1335, + 1336, + 1337, + 1338, + 1339, + 1340, + 1341, + 1342, + 1343, + 1344, + 1345, + 1346, + 1347, + 1348, + 1349, + 1350, + 1351, + 1352, + 1353, + 1354, + 1355, + 1356, + 1357, + 1358, + 1359, + 1360, + 1361, + 1362, + 1363, + 1364, + 1365, + 1366, + 1367, + 1368, + 1369, + 1370, + 1371, + 1372, + 1373, + 1374, + 1375, + 1376, + 1377, + 1378, + 1379, + 1380, + 1381, + 1382, + 1383, + 1384, + 1385, + 1386, + 1387, + 1388, + 1389, + 1390, + 1391, + 1392, + 1393, + 1394, + 1395, + 1396, + 1397, + 1398, + 1399, + 1400, + 1401, + 1402, + 1403, + 1404, + 1405, + 1406, + 1407, + 1408, + 1409, + 1410, + 1411, + 1412, + 1413, + 1414, + 1415, + 1416, + 1417, + 1418, + 1419, + 1420, + 1421, + 1422, + 1423, + 1424, + 1425, + 1426, + 1427, + 1428, + 1429, + 1430, + 1431, + 1432, + 1433, + 1434, + 1435, + 1436, + 1437, + 1438, + 1439, + 1440, + 1441, + 1442, + 1443, + 1444, + 1445, + 1446, + 1447, + 1448, + 1449, + 1450, + 1451, + 1452, + 1453, + 1454, + 1455, + 1456, + 1457, + 1458, + 1459, + 1460, + 1461, + 1462, + 1463, + 1464, + 1465, + 1466, + 1467, + 1468, + 1469, + 1470, + 1471, + 1472, + 1473, + 1474, + 1475, + 1476, + 1477, + 1478, + 1479, + 1480, + 1481, + 1482, + 1483, + 1484, + 1485, + 1486, + 1487, + 1488, + 1489, + 1490, + 1491, + 1492, + 1493, + 1494, + 1495, + 1496, + 1497, + 1498, + 1499, + 1500, + 1501, + 1502, + 1503, + 1504, + 1505, + 1506, + 1507, + 1508, + 1509, + 1510, + 1511, + 1512, + 1513, + 1514, + 1515, + 1516, + 1517, + 1518, + 1519, + 1520, + 1521, + 1522, + 1523, + 1524, + 1525, + 1526, + 1527, + 1528, + 1529, + 1530, + 1531, + 1532, + 1533, + 1534, + 1535, + 1536, + 1537, + 1538, + 1539, + 1540, + 1541, + 1542, + 1543, + 1544, + 1545, + 1546, + 1547, + 1548, + 1549, + 1550, + 1551, + 1552, + 1553, + 1554, + 1555, + 1556, + 1557, + 1558, + 1559, + 1560, + 1561, + 1562, + 1563, + 1564, + 1565, + 1566, + 1567, + 1568, + 1569, + 1570, + 1571, + 1572, + 1573, + 1574, + 1575, + 1576, + 1577, + 1578, + 1579, + 1580, + 1581, + 1582, + 1583, + 1584, + 1585, + 1586, + 1587, + 1588, + 1589, + 1590, + 1591, + 1592, + 1593, + 1594, + 1595, + 1596, + 1597, + 1598, + 1599, + 1600, + 1601, + 1602, + 1603, + 1604, + 1605, + 1606, + 1607, + 1608, + 1609, + 1610, + 1611, + 1612, + 1613, + 1614, + 1615, + 1616, + 1617, + 1618, + 1619, + 1620, + 1621, + 1622, + 1623, + 1624, + 1625, + 1626, + 1627, + 1628, + 1629, + 1630, + 1631, + 1632, + 1633, + 1634, + 1635, + 1636, + 1637, + 1638, + 1639, + 1640, + 1641, + 1642, + 1643, + 1644, + 1645, + 1646, + 1647, + 1648, + 1649, + 1650, + 1651, + 1652, + 1653, + 1654, + 1655, + 1656, + 1657, + 1658, + 1659, + 1660, + 1661, + 1662, + 1663, + 1664, + 1665, + 1666, + 1667, + 1668, + 1669, + 1670, + 1671, + 1672, + 1673, + 1674, + 1675, + 1676, + 1677, + 1678, + 1679, + 1680, + 1681, + 1682, + 1683, + 1684, + 1685, + 1686, + 1687, + 1688, + 1689, + 1690, + 1691, + 1692, + 1693, + 1694, + 1695, + 1696, + 1697, + 1698, + 1699, + 1700, + 1701, + 1702, + 1703, + 1704, + 1705, + 1706, + 1707, + 1708, + 1709, + 1710, + 1711, + 1712, + 1713, + 1714, + 1715, + 1716, + 1717, + 1718, + 1719, + 1720, + 1721, + 1722, + 1723, + 1724, + 1725, + 1726, + 1727, + 1728, + 1729, + 1730, + 1731, + 1732, + 1733, + 1734, + 1735, + 1736, + 1737, + 1738, + 1739, + 1740, + 1741, + 1742, + 1743, + 1744, + 1745, + 1746, + 1747, + 1748, + 1749, + 1750, + 1751, + 1752, + 1753, + 1754, + 1755, + 1756, + 1757, + 1758, + 1759, + 1760, + 1761, + 1762, + 1763, + 1764, + 1765, + 1766, + 1767, + 1768, + 1769, + 1770, + 1771, + 1772, + 1773, + 1774, + 1775, + 1776, + 1777, + 1778, + 1779, + 1780, + 1781, + 1782, + 1783, + 1784, + 1785, + 1786, + 1787, + 1788, + 1789, + 1790, + 1791, + 1792, + 1793, + 1794, + 1795, + 1796, + 1797, + 1798, + 1799, + 1800, + 1801, + 1802, + 1803, + 1804, + 1805, + 1806, + 1807, + 1808, + 1809, + 1810, + 1811, + 1812, + 1813, + 1814, + 1815, + 1816, + 1817, + 1818, + 1819, + 1820, + 1821, + 1822, + 1823, + 1824, + 1825, + 1826, + 1827, + 1828, + 1829, + 1830, + 1831, + 1832, + 1833, + 1834, + 1835, + 1836, + 1837, + 1838, + 1839, + 1840, + 1841, + 1842, + 1843, + 1844, + 1845, + 1846, + 1847, + 1848, + 1849, + 1850, + 1851, + 1852, + 1853, + 1854, + 1855, + 1856, + 1857, + 1858, + 1859, + 1860, + 1861, + 1862, + 1863, + 1864, + 1865, + 1866, + 1867, + 1868, + 1869, + 1870, + 1871, + 1872, + 1873, + 1874, + 1875, + 1876, + 1877, + 1878, + 1879, + 1880, + 1881, + 1882, + 1883, + 1884, + 1885, + 1886, + 1887, + 1888, + 1889, + 1890, + 1891, + 1892, + 1893, + 1894, + 1895, + 1896, + 1897, + 1898, + 1899, + 1900, + 1901, + 1902, + 1903, + 1904, + 1905, + 1906, + 1907, + 1908, + 1909, + 1910, + 1911, + 1912, + 1913, + 1914, + 1915, + 1916, + 1917, + 1918, + 1919, + 1920, + 1921, + 1922, + 1923, + 1924, + 1925, + 1926, + 1927, + 1928, + 1929, + 1930, + 1931, + 1932, + 1933, + 1934, + 1935, + 1936, + 1937, + 1938, + 1939, + 1940, + 1941, + 1942, + 1943, + 1944, + 1945, + 1946, + 1947, + 1948, + 1949, + 1950, + 1951, + 1952, + 1953, + 1954, + 1955, + 1956, + 1957, + 1958, + 1959, + 1960, + 1961, + 1962, + 1963, + 1964, + 1965, + 1966, + 1967, + 1968, + 1969, + 1970, + 1971, + 1972, + 1973, + 1974, + 1975, + 1976, + 1977, + 1978, + 1979, + 1980, + 1981, + 1982, + 1983, + 1984, + 1985, + 1986, + 1987, + 1988, + 1989, + 1990, + 1991, + 1992, + 1993, + 1994, + 1995, + 1996, + 1997, + 1998, + 1999, + 2000, + 2001, + 2002, + 2003, + 2004, + 2005, + 2006, + 2007, + 2008, + 2009, + 2010, + 2011, + 2012, + 2013, + 2014, + 2015, + 2016, + 2017, + 2018, + 2019, + 2020, + 2021, + 2022, + 2023, + 2024, + 2025, + 2026, + 2027, + 2028, + 2029, + 2030, + 2031, + 2032, + 2033, + 2034, + 2035, + 2036, + 2037, + 2038, + 2039, + 2040, + 2041, + 2042, + 2043, + 2044, + 2045, + 2046, + 2047, + 2048, + 2049, + 2050, + 2051, + 2052, + 2053, + 2054, + 2055, + 2056, + 2057, + 2058, + 2059, + 2060, + 2061, + 2062, + 2063, + 2064, + 2065, + 2066, + 2067, + 2068, + 2069, + 2070, + 2071, + 2072, + 2073, + 2074, + 2075, + 2076, + 2077, + 2078, + 2079, + 2080, + 2081, + 2082, + 2083, + 2084, + 2085, + 2086, + 2087, + 2088, + 2089, + 2090, + 2091, + 2092, + 2093, + 2094, + 2095, + 2096, + 2097, + 2098, + 2099, + 2100, + 2101, + 2102, + 2103, + 2104, + 2105, + 2106, + 2107, + 2108, + 2109, + 2110, + 2111, + 2112, + 2113, + 2114, + 2115, + 2116, + 2117, + 2118, + 2119, + 2120, + 2121, + 2122, + 2123, + 2124, + 2125, + 2126, + 2127, + 2128, + 2129, + 2130, + 2131, + 2132, + 2133, + 2134, + 2135, + 2136, + 2137, + 2138, + 2139, + 2140, + 2141, + 2142, + 2143, + 2144, + 2145, + 2146, + 2147, + 2148, + 2149, + 2150, + 2151, + 2152, + 2153, + 2154, + 2155, + 2156, + 2157, + 2158, + 2159, + 2160, + 2161, + 2162, + 2163, + 2164, + 2165, + 2166, + 2167, + 2168, + 2169, + 2170, + 2171, + 2172, + 2173, + 2174, + 2175, + 2176, + 2177, + 2178, + 2179, + 2180, + 2181, + 2182, + 2183, + 2184, + 2185, + 2186, + 2187, + 2188, + 2189, + 2190, + 2191, + 2192, + 2193, + 2194, + 2195, + 2196, + 2197, + 2198, + 2199, + 2200, + 2201, + 2202, + 2203, + 2204, + 2205, + 2206, + 2207, + 2208, + 2209, + 2210, + 2211, + 2212, + 2213, + 2214, + 2215, + 2216, + 2217, + 2218, + 2219, + 2220, + 2221, + 2222, + 2223, + 2224, + 2225, + 2226, + 2227, + 2228, + 2229, + 2230, + 2231, + 2232, + 2233, + 2234, + 2235, + 2236, + 2237, + 2238, + 2239, + 2240, + 2241, + 2242, + 2243, + 2244, + 2245, + 2246, + 2247, + 2248, + 2249, + 2250, + 2251, + 2252, + 2253, + 2254, + 2255, + 2256, + 2257, + 2258, + 2259, + 2260, + 2261, + 2262, + 2263, + 2264, + 2265, + 2266, + 2267, + 2268, + 2269, + 2270, + 2271, + 2272, + 2273, + 2274, + 2275, + 2276, + 2277, + 2278, + 2279, + 2280, + 2281, + 2282, + 2283, + 2284, + 2285, + 2286, + 2287, + 2288, + 2289, + 2290, + 2291, + 2292, + 2293, + 2294, + 2295, + 2296, + 2297, + 2298, + 2299, + 2300, + 2301, + 2302, + 2303, + 2304, + 2305, + 2306, + 2307, + 2308, + 2309, + 2310, + 2311, + 2312, + 2313, + 2314, + 2315, + 2316, + 2317, + 2318, + 2319, + 2320, + 2321, + 2322, + 2323, + 2324, + 2325, + 2326, + 2327, + 2328, + 2329, + 2330, + 2331, + 2332, + 2333, + 2334, + 2335, + 2336, + 2337, + 2338, + 2339, + 2340, + 2341, + 2342, + 2343, + 2344, + 2345, + 2346, + 2347, + 2348, + 2349, + 2350, + 2351, + 2352, + 2353, + 2354, + 2355, + 2356, + 2357, + 2358, + 2359, + 2360, + 2361, + 2362, + 2363, + 2364, + 2365, + 2366, + 2367, + 2368, + 2369, + 2370, + 2371, + 2372, + 2373, + 2374, + 2375, + 2376, + 2377, + 2378, + 2379, + 2380, + 2381, + 2382, + 2383, + 2384, + 2385, + 2386, + 2387, + 2388, + 2389, + 2390, + 2391, + 2392, + 2393, + 2394, + 2395, + 2396, + 2397, + 2398, + 2399, + 2400, + 2401, + 2402, + 2403, + 2404, + 2405, + 2406, + 2407, + 2408, + 2409, + 2410, + 2411, + 2412, + 2413, + 2414, + 2415, + 2416, + 2417, + 2418, + 2419, + 2420, + 2421, + 2422, + 2423, + 2424, + 2425, + 2426, + 2427, + 2428, + 2429, + 2430, + 2431, + 2432, + 2433, + 2434, + 2435, + 2436, + 2437, + 2438, + 2439, + 2440, + 2441, + 2442, + 2443, + 2444, + 2445, + 2446, + 2447, + 2448, + 2449, + 2450, + 2451, + 2452, + 2453, + 2454, + 2455, + 2456, + 2457, + 2458, + 2459, + 2460, + 2461, + 2462, + 2463, + 2464, + 2465, + 2466, + 2467, + 2468, + 2469, + 2470, + 2471, + 2472, + 2473, + 2474, + 2475, + 2476, + 2477, + 2478, + 2479, + 2480, + 2481, + 2482, + 2483, + 2484, + 2485, + 2486, + 2487, + 2488, + 2489, + 2490, + 2491, + 2492, + 2493, + 2494, + 2495, + 2496, + 2497, + 2498, + 2499, + 2500, + 2501, + 2502, + 2503, + 2504, + 2505, + 2506, + 2507, + 2508, + 2509, + 2510, + 2511, + 2512, + 2513, + 2514, + 2515, + 2516, + 2517, + 2518, + 2519, + 2520, + 2521, + 2522, + 2523, + 2524, + 2525, + 2526, + 2527, + 2528, + 2529, + 2530, + 2531, + 2532, + 2533, + 2534, + 2535, + 2536, + 2537, + 2538, + 2539, + 2540, + 2541, + 2542, + 2543, + 2544, + 2545, + 2546, + 2547, + 2548, + 2549, + 2550, + 2551, + 2552, + 2553, + 2554, + 2555, + 2556, + 2557, + 2558, + 2559, + 2560, + 2561, + 2562, + 2563, + 2564, + 2565, + 2566, + 2567, + 2568, + 2569, + 2570, + 2571, + 2572, + 2573, + 2574, + 2575, + 2576, + 2577, + 2578, + 2579, + 2580, + 2581, + 2582, + 2583, + 2584, + 2585, + 2586, + 2587, + 2588, + 2589, + 2590, + 2591, + 2592, + 2593, + 2594, + 2595, + 2596, + 2597, + 2598, + 2599, + 2600, + 2601, + 2602, + 2603, + 2604, + 2605, + 2606, + 2607, + 2608, + 2609, + 2610, + 2611, + 2612, + 2613, + 2614, + 2615, + 2616, + 2617, + 2618, + 2619, + 2620, + 2621, + 2622, + 2623, + 2624, + 2625, + 2626, + 2627, + 2628, + 2629, + 2630, + 2631, + 2632, + 2633, + 2634, + 2635, + 2636, + 2637, + 2638, + 2639, + 2640, + 2641, + 2642, + 2643, + 2644, + 2645, + 2646, + 2647, + 2648, + 2649, + 2650, + 2651, + 2652, + 2653, + 2654, + 2655, + 2656, + 2657, + 2658, + 2659, + 2660, + 2661, + 2662, + 2663, + 2664, + 2665, + 2666, + 2667, + 2668, + 2669, + 2670, + 2671, + 2672, + 2673, + 2674, + 2675, + 2676, + 2677, + 2678, + 2679, + 2680, + 2681, + 2682, + 2683, + 2684, + 2685, + 2686, + 2687, + 2688, + 2689, + 2690, + 2691, + 2692, + 2693, + 2694, + 2695, + 2696, + 2697, + 2698, + 2699, + 2700, + 2701, + 2702, + 2703, + 2704, + 2705, + 2706, + 2707, + 2708, + 2709, + 2710, + 2711, + 2712, + 2713, + 2714, + 2715, + 2716, + 2717, + 2718, + 2719, + 2720, + 2721, + 2722, + 2723, + 2724, + 2725, + 2726, + 2727, + 2728, + 2729, + 2730, + 2731, + 2732, + 2733, + 2734, + 2735, + 2736, + 2737, + 2738, + 2739, + 2740, + 2741, + 2742, + 2743, + 2744, + 2745, + 2746, + 2747, + 2748, + 2749, + 2750, + 2751, + 2752, + 2753, + 2754, + 2755, + 2756, + 2757, + 2758, + 2759, + 2760, + 2761, + 2762, + 2763, + 2764, + 2765, + 2766, + 2767, + 2768, + 2769, + 2770, + 2771, + 2772, + 2773, + 2774, + 2775, + 2776, + 2777, + 2778, + 2779, + 2780, + 2781, + 2782, + 2783, + 2784, + 2785, + 2786, + 2787, + 2788, + 2789, + 2790, + 2791, + 2792, + 2793, + 2794, + 2795, + 2796, + 2797, + 2798, + 2799, + 2800, + 2801, + 2802, + 2803, + 2804, + 2805, + 2806, + 2807, + 2808, + 2809, + 2810, + 2811, + 2812, + 2813, + 2814, + 2815, + 2816, + 2817, + 2818, + 2819, + 2820, + 2821, + 2822, + 2823, + 2824, + 2825, + 2826, + 2827, + 2828, + 2829, + 2830, + 2831, + 2832, + 2833, + 2834, + 2835, + 2836, + 2837, + 2838, + 2839, + 2840, + 2841, + 2842, + 2843, + 2844, + 2845, + 2846, + 2847, + 2848, + 2849, + 2850, + 2851, + 2852, + 2853, + 2854, + 2855, + 2856, + 2857, + 2858, + 2859, + 2860, + 2861, + 2862, + 2863, + 2864, + 2865, + 2866, + 2867, + 2868, + 2869, + 2870, + 2871, + 2872, + 2873, + 2874, + 2875, + 2876, + 2877, + 2878, + 2879, + 2880, + 2881, + 2882, + 2883, + 2884, + 2885, + 2886, + 2887, + 2888, + 2889, + 2890, + 2891, + 2892, + 2893, + 2894, + 2895, + 2896, + 2897, + 2898, + 2899, + 2900, + 2901, + 2902, + 2903, + 2904, + 2905, + 2906, + 2907, + 2908, + 2909, + 2910, + 2911, + 2912, + 2913, + 2914, + 2915, + 2916, + 2917, + 2918, + 2919, + 2920, + 2921, + 2922, + 2923, + 2924, + 2925, + 2926, + 2927, + 2928, + 2929, + 2930, + 2931, + 2932, + 2933, + 2934, + 2935, + 2936, + 2937, + 2938, + 2939, + 2940, + 2941, + 2942, + 2943, + 2944, + 2945, + 2946, + 2947, + 2948, + 2949, + 2950, + 2951, + 2952, + 2953, + 2954, + 2955, + 2956, + 2957, + 2958, + 2959, + 2960, + 2961, + 2962, + 2963, + 2964, + 2965, + 2966, + 2967, + 2968, + 2969, + 2970, + 2971, + 2972, + 2973, + 2974, + 2975, + 2976, + 2977, + 2978, + 2979, + 2980, + 2981, + 2982, + 2983, + 2984, + 2985, + 2986, + 2987, + 2988, + 2989, + 2990, + 2991, + 2992, + 2993, + 2994, + 2995, + 2996, + 2997, + 2998, + 2999, + 3000, + 3001, + 3002, + 3003, + 3004, + 3005, + 3006, + 3007, + 3008, + 3009, + 3010, + 3011, + 3012, + 3013, + 3014, + 3015, + 3016, + 3017, + 3018, + 3019, + 3020, + 3021, + 3022, + 3023, + 3024, + 3025, + 3026, + 3027, + 3028, + 3029, + 3030, + 3031, + 3032, + 3033, + 3034, + 3035, + 3036, + 3037, + 3038, + 3039, + 3040, + 3041, + 3042, + 3043, + 3044, + 3045, + 3046, + 3047, + 3048, + 3049, + 3050, + 3051, + 3052, + 3053, + 3054, + 3055, + 3056, + 3057, + 3058, + 3059, + 3060, + 3061, + 3062, + 3063, + 3064, + 3065, + 3066, + 3067, + 3068, + 3069, + 3070, + 3071, + 3072, + 3073, + 3074, + 3075, + 3076, + 3077, + 3078, + 3079, + 3080, + 3081, + 3082, + 3083, + 3084, + 3085, + 3086, + 3087, + 3088, + 3089, + 3090, + 3091, + 3092, + 3093, + 3094, + 3095, + 3096, + 3097, + 3098, + 3099, + 3100, + 3101, + 3102, + 3103, + 3104, + 3105, + 3106, + 3107, + 3108, + 3109, + 3110, + 3111, + 3112, + 3113, + 3114, + 3115, + 3116, + 3117, + 3118, + 3119, + 3120, + 3121, + 3122, + 3123, + 3124, + 3125, + 3126, + 3127, + 3128, + 3129, + 3130, + 3131, + 3132, + 3133, + 3134, + 3135, + 3136, + 3137, + 3138, + 3139, + 3140, + 3141, + 3142, + 3143, + 3144, + 3145, + 3146, + 3147, + 3148, + 3149, + 3150, + 3151, + 3152, + 3153, + 3154, + 3155, + 3156, + 3157, + 3158, + 3159, + 3160, + 3161, + 3162, + 3163, + 3164, + 3165, + 3166, + 3167, + 3168, + 3169, + 3170, + 3171, + 3172, + 3173, + 3174, + 3175, + 3176, + 3177, + 3178, + 3179, + 3180, + 3181, + 3182, + 3183, + 3184, + 3185, + 3186, + 3187, + 3188, + 3189, + 3190, + 3191, + 3192, + 3193, + 3194, + 3195, + 3196, + 3197, + 3198, + 3199, + 3200, + 3201, + 3202, + 3203, + 3204, + 3205, + 3206, + 3207, + 3208, + 3209, + 3210, + 3211, + 3212, + 3213, + 3214, + 3215, + 3216, + 3217, + 3218, + 3219, + 3220, + 3221, + 3222, + 3223, + 3224, + 3225, + 3226, + 3227, + 3228, + 3229, + 3230, + 3231, + 3232, + 3233, + 3234, + 3235, + 3236, + 3237, + 3238, + 3239, + 3240, + 3241, + 3242, + 3243, + 3244, + 3245, + 3246, + 3247, + 3248, + 3249, + 3250, + 3251, + 3252, + 3253, + 3254, + 3255, + 3256, + 3257, + 3258, + 3259, + 3260, + 3261, + 3262, + 3263, + 3264, + 3265, + 3266, + 3267, + 3268, + 3269, + 3270, + 3271, + 3272, + 3273, + 3274, + 3275, + 3276, + 3277, + 3278, + 3279, + 3280, + 3281, + 3282, + 3283, + 3284, + 3285, + 3286, + 3287, + 3288, + 3289, + 3290, + 3291, + 3292, + 3293, + 3294, + 3295, + 3296, + 3297, + 3298, + 3299, + 3300, + 3301, + 3302, + 3303, + 3304, + 3305, + 3306, + 3307, + 3308, + 3309, + 3310, + 3311, + 3312, + 3313, + 3314, + 3315, + 3316, + 3317, + 3318, + 3319, + 3320, + 3321, + 3322, + 3323, + 3324, + 3325, + 3326, + 3327, + 3328, + 3329, + 3330, + 3331, + 3332, + 3333, + 3334, + 3335, + 3336, + 3337, + 3338, + 3339, + 3340, + 3341, + 3342, + 3343, + 3344, + 3345, + 3346, + 3347, + 3348, + 3349, + 3350, + 3351, + 3352, + 3353, + 3354, + 3355, + 3356, + 3357, + 3358, + 3359, + 3360, + 3361, + 3362, + 3363, + 3364, + 3365, + 3366, + 3367, + 3368, + 3369, + 3370, + 3371, + 3372, + 3373, + 3374, + 3375, + 3376, + 3377, + 3378, + 3379, + 3380, + 3381, + 3382, + 3383, + 3384, + 3385, + 3386, + 3387, + 3388, + 3389, + 3390, + 3391, + 3392, + 3393, + 3394, + 3395, + 3396, + 3397, + 3398, + 3399, + 3400, + 3401, + 3402, + 3403, + 3404, + 3405, + 3406, + 3407, + 3408, + 3409, + 3410, + 3411, + 3412, + 3413, + 3414, + 3415, + 3416, + 3417, + 3418, + 3419, + 3420, + 3421, + 3422, + 3423, + 3424, + 3425, + 3426, + 3427, + 3428, + 3429, + 3430, + 3431, + 3432, + 3433, + 3434, + 3435, + 3436, + 3437, + 3438, + 3439, + 3440, + 3441, + 3442, + 3443, + 3444, + 3445, + 3446, + 3447, + 3448, + 3449, + 3450, + 3451, + 3452, + 3453, + 3454, + 3455, + 3456, + 3457, + 3458, + 3459, + 3460, + 3461, + 3462, + 3463, + 3464, + 3465, + 3466, + 3467, + 3468, + 3469, + 3470, + 3471, + 3472, + 3473, + 3474, + 3475, + 3476, + 3477, + 3478, + 3479, + 3480, + 3481, + 3482, + 3483, + 3484, + 3485, + 3486, + 3487, + 3488, + 3489, + 3490, + 3491, + 3492, + 3493, + 3494, + 3495, + 3496, + 3497, + 3498, + 3499, + 3500, + 3501, + 3502, + 3503, + 3504, + 3505, + 3506, + 3507, + 3508, + 3509, + 3510, + 3511, + 3512, + 3513, + 3514, + 3515, + 3516, + 3517, + 3518, + 3519, + 3520, + 3521, + 3522, + 3523, + 3524, + 3525, + 3526, + 3527, + 3528, + 3529, + 3530, + 3531, + 3532, + 3533, + 3534, + 3535, + 3536, + 3537, + 3538, + 3539, + 3540, + 3541, + 3542, + 3543, + 3544, + 3545, + 3546, + 3547, + 3548, + 3549, + 3550, + 3551, + 3552, + 3553, + 3554, + 3555, + 3556, + 3557, + 3558, + 3559, + 3560, + 3561, + 3562, + 3563, + 3564, + 3565, + 3566, + 3567, + 3568, + 3569, + 3570, + 3571, + 3572, + 3573, + 3574, + 3575, + 3576, + 3577, + 3578, + 3579, + 3580, + 3581, + 3582, + 3583, + 3584, + 3585, + 3586, + 3587, + 3588, + 3589, + 3590, + 3591, + 3592, + 3593, + 3594, + 3595, + 3596, + 3597, + 3598, + 3599, + 3600, + 3601, + 3602, + 3603, + 3604, + 3605, + 3606, + 3607, + 3608, + 3609, + 3610, + 3611, + 3612, + 3613, + 3614, + 3615, + 3616, + 3617, + 3618, + 3619, + 3620, + 3621, + 3622, + 3623, + 3624, + 3625, + 3626, + 3627, + 3628, + 3629, + 3630, + 3631, + 3632, + 3633, + 3634, + 3635, + 3636, + 3637, + 3638, + 3639, + 3640, + 3641, + 3642, + 3643, + 3644, + 3645, + 3646, + 3647, + 3648, + 3649, + 3650, + 3651, + 3652, + 3653, + 3654, + 3655, + 3656, + 3657, + 3658, + 3659, + 3660, + 3661, + 3662, + 3663, + 3664, + 3665, + 3666, + 3667, + 3668, + 3669, + 3670, + 3671, + 3672, + 3673, + 3674, + 3675, + 3676, + 3677, + 3678, + 3679, + 3680, + 3681, + 3682, + 3683, + 3684, + 3685, + 3686, + 3687, + 3688, + 3689, + 3690, + 3691, + 3692, + 3693, + 3694, + 3695, + 3696, + 3697, + 3698, + 3699, + 3700, + 3701, + 3702, + 3703, + 3704, + 3705, + 3706, + 3707, + 3708, + 3709, + 3710, + 3711, + 3712, + 3713, + 3714, + 3715, + 3716, + 3717, + 3718, + 3719, + 3720, + 3721, + 3722, + 3723, + 3724, + 3725, + 3726, + 3727, + 3728, + 3729, + 3730, + 3731, + 3732, + 3733, + 3734, + 3735, + 3736, + 3737, + 3738, + 3739, + 3740, + 3741, + 3742, + 3743, + 3744, + 3745, + 3746, + 3747, + 3748, + 3749, + 3750, + 3751, + 3752, + 3753, + 3754, + 3755, + 3756, + 3757, + 3758, + 3759, + 3760, + 3761, + 3762, + 3763, + 3764, + 3765, + 3766, + 3767, + 3768, + 3769, + 3770, + 3771, + 3772, + 3773, + 3774, + 3775, + 3776, + 3777, + 3778, + 3779, + 3780, + 3781, + 3782, + 3783, + 3784, + 3785, + 3786, + 3787, + 3788, + 3789, + 3790, + 3791, + 3792, + 3793, + 3794, + 3795, + 3796, + 3797, + 3798, + 3799, + 3800, + 3801, + 3802, + 3803, + 3804, + 3805, + 3806, + 3807, + 3808, + 3809, + 3810, + 3811, + 3812, + 3813, + 3814, + 3815, + 3816, + 3817, + 3818, + 3819, + 3820, + 3821, + 3822, + 3823, + 3824, + 3825, + 3826, + 3827, + 3828, + 3829, + 3830, + 3831, + 3832, + 3833, + 3834, + 3835, + 3836, + 3837, + 3838, + 3839, + 3840, + 3841, + 3842, + 3843, + 3844, + 3845, + 3846, + 3847, + 3848, + 3849, + 3850, + 3851, + 3852, + 3853, + 3854, + 3855, + 3856, + 3857, + 3858, + 3859, + 3860, + 3861, + 3862, + 3863, + 3864, + 3865, + 3866, + 3867, + 3868, + 3869, + 3870, + 3871, + 3872, + 3873, + 3874, + 3875, + 3876, + 3877, + 3878, + 3879, + 3880, + 3881, + 3882, + 3883, + 3884, + 3885, + 3886, + 3887, + 3888, + 3889, + 3890, + 3891, + 3892, + 3893, + 3894, + 3895, + 3896, + 3897, + 3898, + 3899, + 3900, + 3901, + 3902, + 3903, + 3904, + 3905, + 3906, + 3907, + 3908, + 3909, + 3910, + 3911, + 3912, + 3913, + 3914, + 3915, + 3916, + 3917, + 3918, + 3919, + 3920, + 3921, + 3922, + 3923, + 3924, + 3925, + 3926, + 3927, + 3928, + 3929, + 3930, + 3931, + 3932, + 3933, + 3934, + 3935, + 3936, + 3937, + 3938, + 3939, + 3940, + 3941, + 3942, + 3943, + 3944, + 3945, + 3946, + 3947, + 3948, + 3949, + 3950, + 3951, + 3952, + 3953, + 3954, + 3955, + 3956, + 3957, + 3958, + 3959, + 3960, + 3961, + 3962, + 3963, + 3964, + 3965, + 3966, + 3967, + 3968, + 3969, + 3970, + 3971, + 3972, + 3973, + 3974, + 3975, + 3976, + 3977, + 3978, + 3979, + 3980, + 3981, + 3982, + 3983, + 3984, + 3985, + 3986, + 3987, + 3988, + 3989, + 3990, + 3991, + 3992, + 3993, + 3994, + 3995, + 3996, + 3997, + 3998, + 3999, + 4000, + 4001, + 4002, + 4003, + 4004, + 4005, + 4006, + 4007, + 4008, + 4009, + 4010, + 4011, + 4012, + 4013, + 4014, + 4015, + 4016, + 4017, + 4018, + 4019, + 4020, + 4021, + 4022, + 4023, + 4024, + 4025, + 4026, + 4027, + 4028, + 4029, + 4030, + 4031, + 4032, + 4033, + 4034, + 4035, + 4036, + 4037, + 4038, + 4039, + 4040, + 4041, + 4042, + 4043, + 4044, + 4045, + 4046, + 4047, + 4048, + 4049, + 4050, + 4051, + 4052, + 4053, + 4054, + 4055, + 4056, + 4057, + 4058, + 4059, + 4060, + 4061, + 4062, + 4063, + 4064, + 4065, + 4066, + 4067, + 4068, + 4069, + 4070, + 4071, + 4072, + 4073, + 4074, + 4075, + 4076, + 4077, + 4078, + 4079, + 4080, + 4081, + 4082, + 4083, + 4084, + 4085, + 4086, + 4087, + 4088, + 4089, + 4090, + 4091, + 4092, + 4093, + 4094, + 4095, + 4096, + 4097, + 4098, + 4099, + 4100, + 4101, + 4102, + 4103, + 4104, + 4105, + 4106, + 4107, + 4108, + 4109, + 4110, + 4111, + 4112, + 4113, + 4114, + 4115, + 4116, + 4117, + 4118, + 4119, + 4120, + 4121, + 4122, + 4123, + 4124, + 4125, + 4126, + 4127, + 4128, + 4129, + 4130, + 4131, + 4132, + 4133, + 4134, + 4135, + 4136, + 4137, + 4138, + 4139, + 4140, + 4141, + 4142, + 4143, + 4144, + 4145, + 4146, + 4147, + 4148, + 4149, + 4150, + 4151, + 4152, + 4153, + 4154, + 4155, + 4156, + 4157, + 4158, + 4159, + 4160, + 4161, + 4162, + 4163, + 4164, + 4165, + 4166, + 4167, + 4168, + 4169, + 4170, + 4171, + 4172, + 4173, + 4174, + 4175, + 4176, + 4177, + 4178, + 4179, + 4180, + 4181, + 4182, + 4183, + 4184, + 4185, + 4186, + 4187, + 4188, + 4189, + 4190, + 4191, + 4192, + 4193, + 4194, + 4195, + 4196, + 4197, + 4198, + 4199, + 4200, + 4201, + 4202, + 4203, + 4204, + 4205, + 4206, + 4207, + 4208, + 4209, + 4210, + 4211, + 4212, + 4213, + 4214, + 4215, + 4216, + 4217, + 4218, + 4219, + 4220, + 4221, + 4222, + 4223, + 4224, + 4225, + 4226, + 4227, + 4228, + 4229, + 4230, + 4231, + 4232, + 4233, + 4234, + 4235, + 4236, + 4237, + 4238, + 4239, + 4240, + 4241, + 4242, + 4243, + 4244, + 4245, + 4246, + 4247, + 4248, + 4249, + 4250, + 4251, + 4252, + 4253, + 4254, + 4255, + 4256, + 4257, + 4258, + 4259, + 4260, + 4261, + 4262, + 4263, + 4264, + 4265, + 4266, + 4267, + 4268, + 4269, + 4270, + 4271, + 4272, + 4273, + 4274, + 4275, + 4276, + 4277, + 4278, + 4279, + 4280, + 4281, + 4282, + 4283, + 4284, + 4285, + 4286, + 4287, + 4288, + 4289, + 4290, + 4291, + 4292, + 4293, + 4294, + 4295, + 4296, + 4297, + 4298, + 4299, + 4300, + 4301, + 4302, + 4303, + 4304, + 4305, + 4306, + 4307, + 4308, + 4309, + 4310, + 4311, + 4312, + 4313, + 4314, + 4315, + 4316, + 4317, + 4318, + 4319, + 4320, + 4321, + 4322, + 4323, + 4324, + 4325, + 4326, + 4327, + 4328, + 4329, + 4330, + 4331, + 4332, + 4333, + 4334, + 4335, + 4336, + 4337, + 4338, + 4339, + 4340, + 4341, + 4342, + 4343, + 4344, + 4345, + 4346, + 4347, + 4348, + 4349, + 4350, + 4351, + 4352, + 4353, + 4354, + 4355, + 4356, + 4357, + 4358, + 4359, + 4360, + 4361, + 4362, + 4363, + 4364, + 4365, + 4366, + 4367, + 4368, + 4369, + 4370, + 4371, + 4372, + 4373, + 4374, + 4375, + 4376, + 4377, + 4378, + 4379, + 4380, + 4381, + 4382, + 4383, + 4384, + 4385, + 4386, + 4387, + 4388, + 4389, + 4390, + 4391, + 4392, + 4393, + 4394, + 4395, + 4396, + 4397, + 4398, + 4399, + 4400, + 4401, + 4402, + 4403, + 4404, + 4405, + 4406, + 4407, + 4408, + 4409, + 4410, + 4411, + 4412, + 4413, + 4414, + 4415, + 4416, + 4417, + 4418, + 4419, + 4420, + 4421, + 4422, + 4423, + 4424, + 4425, + 4426, + 4427, + 4428, + 4429, + 4430, + 4431, + 4432, + 4433, + 4434, + 4435, + 4436, + 4437, + 4438, + 4439, + 4440, + 4441, + 4442, + 4443, + 4444, + 4445, + 4446, + 4447, + 4448, + 4449, + 4450, + 4451, + 4452, + 4453, + 4454, + 4455, + 4456, + 4457, + 4458, + 4459, + 4460, + 4461, + 4462, + 4463, + 4464, + 4465, + 4466, + 4467, + 4468, + 4469, + 4470, + 4471, + 4472, + 4473, + 4474, + 4475, + 4476, + 4477, + 4478, + 4479, + 4480, + 4481, + 4482, + 4483, + 4484, + 4485, + 4486, + 4487, + 4488, + 4489, + 4490, + 4491, + 4492, + 4493, + 4494, + 4495, + 4496, + 4497, + 4498, + 4499, + 4500, + 4501, + 4502, + 4503, + 4504, + 4505, + 4506, + 4507, + 4508, + 4509, + 4510, + 4511, + 4512, + 4513, + 4514, + 4515, + 4516, + 4517, + 4518, + 4519, + 4520, + 4521, + 4522, + 4523, + 4524, + 4525, + 4526, + 4527, + 4528, + 4529, + 4530, + 4531, + 4532, + 4533, + 4534, + 4535, + 4536, + 4537, + 4538, + 4539, + 4540, + 4541, + 4542, + 4543, + 4544, + 4545, + 4546, + 4547, + 4548, + 4549, + 4550, + 4551, + 4552, + 4553, + 4554, + 4555, + 4556, + 4557, + 4558, + 4559, + 4560, + 4561, + 4562, + 4563, + 4564, + 4565, + 4566, + 4567, + 4568, + 4569, + 4570, + 4571, + 4572, + 4573, + 4574, + 4575, + 4576, + 4577, + 4578, + 4579, + 4580, + 4581, + 4582, + 4583, + 4584, + 4585, + 4586, + 4587, + 4588, + 4589, + 4590, + 4591, + 4592, + 4593, + 4594, + 4595, + 4596, + 4597, + 4598, + 4599, + 4600, + 4601, + 4602, + 4603, + 4604, + 4605, + 4606, + 4607, + 4608, + 4609, + 4610, + 4611, + 4612, + 4613, + 4614, + 4615, + 4616, + 4617, + 4618, + 4619, + 4620, + 4621, + 4622, + 4623, + 4624, + 4625, + 4626, + 4627, + 4628, + 4629, + 4630, + 4631, + 4632, + 4633, + 4634, + 4635, + 4636, + 4637, + 4638, + 4639, + 4640, + 4641, + 4642, + 4643, + 4644, + 4645, + 4646, + 4647, + 4648, + 4649, + 4650, + 4651, + 4652, + 4653, + 4654, + 4655, + 4656, + 4657, + 4658, + 4659, + 4660, + 4661, + 4662, + 4663, + 4664, + 4665, + 4666, + 4667, + 4668, + 4669, + 4670, + 4671, + 4672, + 4673, + 4674, + 4675, + 4676, + 4677, + 4678, + 4679, + 4680, + 4681, + 4682, + 4683, + 4684, + 4685, + 4686, + 4687, + 4688, + 4689, + 4690, + 4691, + 4692, + 4693, + 4694, + 4695, + 4696, + 4697, + 4698, + 4699, + 4700, + 4701, + 4702, + 4703, + 4704, + 4705, + 4706, + 4707, + 4708, + 4709, + 4710, + 4711, + 4712, + 4713, + 4714, + 4715, + 4716, + 4717, + 4718, + 4719, + 4720, + 4721, + 4722, + 4723, + 4724, + 4725, + 4726, + 4727, + 4728, + 4729, + 4730, + 4731, + 4732, + 4733, + 4734, + 4735, + 4736, + 4737, + 4738, + 4739, + 4740, + 4741, + 4742, + 4743, + 4744, + 4745, + 4746, + 4747, + 4748, + 4749, + 4750, + 4751, + 4752, + 4753, + 4754, + 4755, + 4756, + 4757, + 4758, + 4759, + 4760, + 4761, + 4762, + 4763, + 4764, + 4765, + 4766, + 4767, + 4768, + 4769, + 4770, + 4771, + 4772, + 4773, + 4774, + 4775, + 4776, + 4777, + 4778, + 4779, + 4780, + 4781, + 4782, + 4783, + 4784, + 4785, + 4786, + 4787, + 4788, + 4789, + 4790, + 4791, + 4792, + 4793, + 4794, + 4795, + 4796, + 4797, + 4798, + 4799, + 4800, + 4801, + 4802, + 4803, + 4804, + 4805, + 4806, + 4807, + 4808, + 4809, + 4810, + 4811, + 4812, + 4813, + 4814, + 4815, + 4816, + 4817, + 4818, + 4819, + 4820, + 4821, + 4822, + 4823, + 4824, + 4825, + 4826, + 4827, + 4828, + 4829, + 4830, + 4831, + 4832, + 4833, + 4834, + 4835, + 4836, + 4837, + 4838, + 4839, + 4840, + 4841, + 4842, + 4843, + 4844, + 4845, + 4846, + 4847, + 4848, + 4849, + 4850, + 4851, + 4852, + 4853, + 4854, + 4855, + 4856, + 4857, + 4858, + 4859, + 4860, + 4861, + 4862, + 4863, + 4864, + 4865, + 4866, + 4867, + 4868, + 4869, + 4870, + 4871, + 4872, + 4873, + 4874, + 4875, + 4876, + 4877, + 4878, + 4879, + 4880, + 4881, + 4882, + 4883, + 4884, + 4885, + 4886, + 4887, + 4888, + 4889, + 4890, + 4891, + 4892, + 4893, + 4894, + 4895, + 4896, + 4897, + 4898, + 4899, + 4900, + 4901, + 4902, + 4903, + 4904, + 4905, + 4906, + 4907, + 4908, + 4909, + 4910, + 4911, + 4912, + 4913, + 4914, + 4915, + 4916, + 4917, + 4918, + 4919, + 4920, + 4921, + 4922, + 4923, + 4924, + 4925, + 4926, + 4927, + 4928, + 4929, + 4930, + 4931, + 4932, + 4933, + 4934, + 4935, + 4936, + 4937, + 4938, + 4939, + 4940, + 4941, + 4942, + 4943, + 4944, + 4945, + 4946, + 4947, + 4948, + 4949, + 4950, + 4951, + 4952, + 4953, + 4954, + 4955, + 4956, + 4957, + 4958, + 4959, + 4960, + 4961, + 4962, + 4963, + 4964, + 4965, + 4966, + 4967, + 4968, + 4969, + 4970, + 4971, + 4972, + 4973, + 4974, + 4975, + 4976, + 4977, + 4978, + 4979, + 4980, + 4981, + 4982, + 4983, + 4984, + 4985, + 4986, + 4987, + 4988, + 4989, + 4990, + 4991, + 4992, + 4993, + 4994, + 4995, + 4996, + 4997, + 4998, + 4999, + 5000, + 5001, + 5002, + 5003, + 5004, + 5005, + 5006, + 5007, + 5008, + 5009, + 5010, + 5011, + 5012, + 5013, + 5014, + 5015, + 5016, + 5017, + 5018, + 5019, + 5020, + 5021, + 5022, + 5023, + 5024, + 5025, + 5026, + 5027, + 5028, + 5029, + 5030, + 5031, + 5032, + 5033, + 5034, + 5035, + 5036, + 5037, + 5038, + 5039, + 5040, + 5041, + 5042, + 5043, + 5044, + 5045, + 5046, + 5047, + 5048, + 5049, + 5050, + 5051, + 5052, + 5053, + 5054, + 5055, + 5056, + 5057, + 5058, + 5059, + 5060, + 5061, + 5062, + 5063, + 5064, + 5065, + 5066, + 5067, + 5068, + 5069, + 5070, + 5071, + 5072, + 5073, + 5074, + 5075, + 5076, + 5077, + 5078, + 5079, + 5080, + 5081, + 5082, + 5083, + 5084, + 5085, + 5086, + 5087, + 5088, + 5089, + 5090, + 5091, + 5092, + 5093, + 5094, + 5095, + 5096, + 5097, + 5098, + 5099, + 5100, + 5101, + 5102, + 5103, + 5104, + 5105, + 5106, + 5107, + 5108, + 5109, + 5110, + 5111, + 5112, + 5113, + 5114, + 5115, + 5116, + 5117, + 5118, + 5119, + 5120, + 5121, + 5122, + 5123, + 5124, + 5125, + 5126, + 5127, + 5128, + 5129, + 5130, + 5131, + 5132, + 5133, + 5134, + 5135, + 5136, + 5137, + 5138, + 5139, + 5140, + 5141, + 5142, + 5143, + 5144, + 5145, + 5146, + 5147, + 5148, + 5149, + 5150, + 5151, + 5152, + 5153, + 5154, + 5155, + 5156, + 5157, + 5158, + 5159, + 5160, + 5161, + 5162, + 5163, + 5164, + 5165, + 5166, + 5167, + 5168, + 5169, + 5170, + 5171, + 5172, + 5173, + 5174, + 5175, + 5176, + 5177, + 5178, + 5179, + 5180, + 5181, + 5182, + 5183, + 5184, + 5185, + 5186, + 5187, + 5188, + 5189, + 5190, + 5191, + 5192, + 5193, + 5194, + 5195, + 5196, + 5197, + 5198, + 5199, + 5200, + 5201, + 5202, + 5203, + 5204, + 5205, + 5206, + 5207, + 5208, + 5209, + 5210, + 5211, + 5212, + 5213, + 5214, + 5215, + 5216, + 5217, + 5218, + 5219, + 5220, + 5221, + 5222, + 5223, + 5224, + 5225, + 5226, + 5227, + 5228, + 5229, + 5230, + 5231, + 5232, + 5233, + 5234, + 5235, + 5236, + 5237, + 5238, + 5239, + 5240, + 5241, + 5242, + 5243, + 5244, + 5245, + 5246, + 5247, + 5248, + 5249, + 5250, + 5251, + 5252, + 5253, + 5254, + 5255, + 5256, + 5257, + 5258, + 5259, + 5260, + 5261, + 5262, + 5263, + 5264, + 5265, + 5266, + 5267, + 5268, + 5269, + 5270, + 5271, + 5272, + 5273, + 5274, + 5275, + 5276, + 5277, + 5278, + 5279, + 5280, + 5281, + 5282, + 5283, + 5284, + 5285, + 5286, + 5287, + 5288, + 5289, + 5290, + 5291, + 5292, + 5293, + 5294, + 5295, + 5296, + 5297, + 5298, + 5299, + 5300, + 5301, + 5302, + 5303, + 5304, + 5305, + 5306, + 5307, + 5308, + 5309, + 5310, + 5311, + 5312, + 5313, + 5314, + 5315, + 5316, + 5317, + 5318, + 5319, + 5320, + 5321, + 5322, + 5323, + 5324, + 5325, + 5326, + 5327, + 5328, + 5329, + 5330, + 5331, + 5332, + 5333, + 5334, + 5335, + 5336, + 5337, + 5338, + 5339, + 5340, + 5341, + 5342, + 5343, + 5344, + 5345, + 5346, + 5347, + 5348, + 5349, + 5350, + 5351, + 5352, + 5353, + 5354, + 5355, + 5356, + 5357, + 5358, + 5359, + 5360, + 5361, + 5362, + 5363, + 5364, + 5365, + 5366, + 5367, + 5368, + 5369, + 5370, + 5371, + 5372, + 5373, + 5374, + 5375, + 5376, + 5377, + 5378, + 5379, + 5380, + 5381, + 5382, + 5383, + 5384, + 5385, + 5386, + 5387, + 5388, + 5389, + 5390, + 5391, + 5392, + 5393, + 5394, + 5395, + 5396, + 5397, + 5398, + 5399, + 5400, + 5401, + 5402, + 5403, + 5404, + 5405, + 5406, + 5407, + 5408, + 5409, + 5410, + 5411, + 5412, + 5413, + 5414, + 5415, + 5416, + 5417, + 5418, + 5419, + 5420, + 5421, + 5422, + 5423, + 5424, + 5425, + 5426, + 5427, + 5428, + 5429, + 5430, + 5431, + 5432, + 5433, + 5434, + 5435, + 5436, + 5437, + 5438, + 5439, + 5440, + 5441, + 5442, + 5443, + 5444, + 5445, + 5446, + 5447, + 5448, + 5449, + 5450, + 5451, + 5452, + 5453, + 5454, + 5455, + 5456, + 5457, + 5458, + 5459, + 5460, + 5461, + 5462, + 5463, + 5464, + 5465, + 5466, + 5467, + 5468, + 5469, + 5470, + 5471, + 5472, + 5473, + 5474, + 5475, + 5476, + 5477, + 5478, + 5479, + 5480, + 5481, + 5482, + 5483, + 5484, + 5485, + 5486, + 5487, + 5488, + 5489, + 5490, + 5491, + 5492, + 5493, + 5494, + 5495, + 5496, + 5497, + 5498, + 5499, + 5500, + 5501, + 5502, + 5503, + 5504, + 5505, + 5506, + 5507, + 5508, + 5509, + 5510, + 5511, + 5512, + 5513, + 5514, + 5515, + 5516, + 5517, + 5518, + 5519, + 5520, + 5521, + 5522, + 5523, + 5524, + 5525, + 5526, + 5527, + 5528, + 5529, + 5530, + 5531, + 5532, + 5533, + 5534, + 5535, + 5536, + 5537, + 5538, + 5539, + 5540, + 5541, + 5542, + 5543, + 5544, + 5545, + 5546, + 5547, + 5548, + 5549, + 5550, + 5551, + 5552, + 5553, + 5554, + 5555, + 5556, + 5557, + 5558, + 5559, + 5560, + 5561, + 5562, + 5563, + 5564, + 5565, + 5566, + 5567, + 5568, + 5569, + 5570, + 5571, + 5572, + 5573, + 5574, + 5575, + 5576, + 5577, + 5578, + 5579, + 5580, + 5581, + 5582, + 5583, + 5584, + 5585, + 5586, + 5587, + 5588, + 5589, + 5590, + 5591, + 5592, + 5593, + 5594, + 5595, + 5596, + 5597, + 5598, + 5599, + 5600, + 5601, + 5602, + 5603, + 5604, + 5605, + 5606, + 5607, + 5608, + 5609, + 5610, + 5611, + 5612, + 5613, + 5614, + 5615, + 5616, + 5617, + 5618, + 5619, + 5620, + 5621, + 5622, + 5623, + 5624, + 5625, + 5626, + 5627, + 5628, + 5629, + 5630, + 5631, + 5632, + 5633, + 5634, + 5635, + 5636, + 5637, + 5638, + 5639, + 5640, + 5641, + 5642, + 5643, + 5644, + 5645, + 5646, + 5647, + 5648, + 5649, + 5650, + 5651, + 5652, + 5653, + 5654, + 5655, + 5656, + 5657, + 5658, + 5659, + 5660, + 5661, + 5662, + 5663, + 5664, + 5665, + 5666, + 5667, + 5668, + 5669, + 5670, + 5671, + 5672, + 5673, + 5674, + 5675, + 5676, + 5677, + 5678, + 5679, + 5680, + 5681, + 5682, + 5683, + 5684, + 5685, + 5686, + 5687, + 5688, + 5689, + 5690, + 5691, + 5692, + 5693, + 5694, + 5695, + 5696, + 5697, + 5698, + 5699, + 5700, + 5701, + 5702, + 5703, + 5704, + 5705, + 5706, + 5707, + 5708, + 5709, + 5710, + 5711, + 5712, + 5713, + 5714, + 5715, + 5716, + 5717, + 5718, + 5719, + 5720, + 5721, + 5722, + 5723, + 5724, + 5725, + 5726, + 5727, + 5728, + 5729, + 5730, + 5731, + 5732, + 5733, + 5734, + 5735, + 5736, + 5737, + 5738, + 5739, + 5740, + 5741, + 5742, + 5743, + 5744, + 5745, + 5746, + 5747, + 5748, + 5749, + 5750, + 5751, + 5752, + 5753, + 5754, + 5755, + 5756, + 5757, + 5758, + 5759, + 5760, + 5761, + 5762, + 5763, + 5764, + 5765, + 5766, + 5767, + 5768, + 5769, + 5770, + 5771, + 5772, + 5773, + 5774, + 5775, + 5776, + 5777, + 5778, + 5779, + 5780, + 5781, + 5782, + 5783, + 5784, + 5785, + 5786, + 5787, + 5788, + 5789, + 5790, + 5791, + 5792, + 5793, + 5794, + 5795, + 5796, + 5797, + 5798, + 5799, + 5800, + 5801, + 5802, + 5803, + 5804, + 5805, + 5806, + 5807, + 5808, + 5809, + 5810, + 5811, + 5812, + 5813, + 5814, + 5815, + 5816, + 5817, + 5818, + 5819, + 5820, + 5821, + 5822, + 5823, + 5824, + 5825, + 5826, + 5827, + 5828, + 5829, + 5830, + 5831, + 5832, + 5833, + 5834, + 5835, + 5836, + 5837, + 5838, + 5839, + 5840, + 5841, + 5842, + 5843, + 5844, + 5845, + 5846, + 5847, + 5848, + 5849, + 5850, + 5851, + 5852, + 5853, + 5854, + 5855, + 5856, + 5857, + 5858, + 5859, + 5860, + 5861, + 5862, + 5863, + 5864, + 5865, + 5866, + 5867, + 5868, + 5869, + 5870, + 5871, + 5872, + 5873, + 5874, + 5875, + 5876, + 5877, + 5878, + 5879, + 5880, + 5881, + 5882, + 5883, + 5884, + 5885, + 5886, + 5887, + 5888, + 5889, + 5890, + 5891, + 5892, + 5893, + 5894, + 5895, + 5896, + 5897, + 5898, + 5899, + 5900, + 5901, + 5902, + 5903, + 5904, + 5905, + 5906, + 5907, + 5908, + 5909, + 5910, + 5911, + 5912, + 5913, + 5914, + 5915, + 5916, + 5917, + 5918, + 5919, + 5920, + 5921, + 5922, + 5923, + 5924, + 5925, + 5926, + 5927, + 5928, + 5929, + 5930, + 5931, + 5932, + 5933, + 5934, + 5935, + 5936, + 5937, + 5938, + 5939, + 5940, + 5941, + 5942, + 5943, + 5944, + 5945, + 5946, + 5947, + 5948, + 5949, + 5950, + 5951, + 5952, + 5953, + 5954, + 5955, + 5956, + 5957, + 5958, + 5959, + 5960, + 5961, + 5962, + 5963, + 5964, + 5965, + 5966, + 5967, + 5968, + 5969, + 5970, + 5971, + 5972, + 5973, + 5974, + 5975, + 5976, + 5977, + 5978, + 5979, + 5980, + 5981, + 5982, + 5983, + 5984, + 5985, + 5986, + 5987, + 5988, + 5989, + 5990, + 5991, + 5992, + 5993, + 5994, + 5995, + 5996, + 5997, + 5998, + 5999, + 6000, + 6001, + 6002, + 6003, + 6004, + 6005, + 6006, + 6007, + 6008, + 6009, + 6010, + 6011, + 6012, + 6013, + 6014, + 6015, + 6016, + 6017, + 6018, + 6019, + 6020, + 6021, + 6022, + 6023, + 6024, + 6025, + 6026, + 6027, + 6028, + 6029, + 6030, + 6031, + 6032, + 6033, + 6034, + 6035, + 6036, + 6037, + 6038, + 6039, + 6040, + 6041, + 6042, + 6043, + 6044, + 6045, + 6046, + 6047, + 6048, + 6049, + 6050, + 6051, + 6052, + 6053, + 6054, + 6055, + 6056, + 6057, + 6058, + 6059, + 6060, + 6061, + 6062, + 6063, + 6064, + 6065, + 6066, + 6067, + 6068, + 6069, + 6070, + 6071, + 6072, + 6073, + 6074, + 6075, + 6076, + 6077, + 6078, + 6079, + 6080, + 6081, + 6082, + 6083, + 6084, + 6085, + 6086, + 6087, + 6088, + 6089, + 6090, + 6091, + 6092, + 6093, + 6094, + 6095, + 6096, + 6097, + 6098, + 6099, + 6100, + 6101, + 6102, + 6103, + 6104, + 6105, + 6106, + 6107, + 6108, + 6109, + 6110, + 6111, + 6112, + 6113, + 6114, + 6115, + 6116, + 6117, + 6118, + 6119, + 6120, + 6121, + 6122, + 6123, + 6124, + 6125, + 6126, + 6127, + 6128, + 6129, + 6130, + 6131, + 6132, + 6133, + 6134, + 6135, + 6136, + 6137, + 6138, + 6139, + 6140, + 6141, + 6142, + 6143, + 6144, + 6145, + 6146, + 6147, + 6148, + 6149, + 6150, + 6151, + 6152, + 6153, + 6154, + 6155, + 6156, + 6157, + 6158, + 6159, + 6160, + 6161, + 6162, + 6163, + 6164, + 6165, + 6166, + 6167, + 6168, + 6169, + 6170, + 6171, + 6172, + 6173, + 6174, + 6175, + 6176, + 6177, + 6178, + 6179, + 6180, + 6181, + 6182, + 6183, + 6184, + 6185, + 6186, + 6187, + 6188, + 6189, + 6190, + 6191, + 6192, + 6193, + 6194, + 6195, + 6196, + 6197, + 6198, + 6199, + 6200, + 6201, + 6202, + 6203, + 6204, + 6205, + 6206, + 6207, + 6208, + 6209, + 6210, + 6211, + 6212, + 6213, + 6214, + 6215, + 6216, + 6217, + 6218, + 6219, + 6220, + 6221, + 6222, + 6223, + 6224, + 6225, + 6226, + 6227, + 6228, + 6229, + 6230, + 6231, + 6232, + 6233, + 6234, + 6235, + 6236, + 6237, + 6238, + 6239, + 6240, + 6241, + 6242, + 6243, + 6244, + 6245, + 6246, + 6247, + 6248, + 6249, + 6250, + 6251, + 6252, + 6253, + 6254, + 6255, + 6256, + 6257, + 6258, + 6259, + 6260, + 6261, + 6262, + 6263, + 6264, + 6265, + 6266, + 6267, + 6268, + 6269, + 6270, + 6271, + 6272, + 6273, + 6274, + 6275, + 6276, + 6277, + 6278, + 6279, + 6280, + 6281, + 6282, + 6283, + 6284, + 6285, + 6286, + 6287, + 6288, + 6289, + 6290, + 6291, + 6292, + 6293, + 6294, + 6295, + 6296, + 6297, + 6298, + 6299, + 6300, + 6301, + 6302, + 6303, + 6304, + 6305, + 6306, + 6307, + 6308, + 6309, + 6310, + 6311, + 6312, + 6313, + 6314, + 6315, + 6316, + 6317, + 6318, + 6319, + 6320, + 6321, + 6322, + 6323, + 6324, + 6325, + 6326, + 6327, + 6328, + 6329, + 6330, + 6331, + 6332, + 6333, + 6334, + 6335, + 6336, + 6337, + 6338, + 6339, + 6340, + 6341, + 6342, + 6343, + 6344, + 6345, + 6346, + 6347, + 6348, + 6349, + 6350, + 6351, + 6352, + 6353, + 6354, + 6355, + 6356, + 6357, + 6358, + 6359, + 6360, + 6361, + 6362, + 6363, + 6364, + 6365, + 6366, + 6367, + 6368, + 6369, + 6370, + 6371, + 6372, + 6373, + 6374, + 6375, + 6376, + 6377, + 6378, + 6379, + 6380, + 6381, + 6382, + 6383, + 6384, + 6385, + 6386, + 6387, + 6388, + 6389, + 6390, + 6391, + 6392, + 6393, + 6394, + 6395, + 6396, + 6397, + 6398, + 6399, + 6400, + 6401, + 6402, + 6403, + 6404, + 6405, + 6406, + 6407, + 6408, + 6409, + 6410, + 6411, + 6412, + 6413, + 6414, + 6415, + 6416, + 6417, + 6418, + 6419, + 6420, + 6421, + 6422, + 6423, + 6424, + 6425, + 6426, + 6427, + 6428, + 6429, + 6430, + 6431, + 6432, + 6433, + 6434, + 6435, + 6436, + 6437, + 6438, + 6439, + 6440, + 6441, + 6442, + 6443, + 6444, + 6445, + 6446, + 6447, + 6448, + 6449, + 6450, + 6451, + 6452, + 6453, + 6454, + 6455, + 6456, + 6457, + 6458, + 6459, + 6460, + 6461, + 6462, + 6463, + 6464, + 6465, + 6466, + 6467, + 6468, + 6469, + 6470, + 6471, + 6472, + 6473, + 6474, + 6475, + 6476, + 6477, + 6478, + 6479, + 6480, + 6481, + 6482, + 6483, + 6484, + 6485, + 6486, + 6487, + 6488, + 6489, + 6490, + 6491, + 6492, + 6493, + 6494, + 6495, + 6496, + 6497, + 6498, + 6499, + 6500, + 6501, + 6502, + 6503, + 6504, + 6505, + 6506, + 6507, + 6508, + 6509, + 6510, + 6511, + 6512, + 6513, + 6514, + 6515, + 6516, + 6517, + 6518, + 6519, + 6520, + 6521, + 6522, + 6523, + 6524, + 6525, + 6526, + 6527, + 6528, + 6529, + 6530, + 6531, + 6532, + 6533, + 6534, + 6535, + 6536, + 6537, + 6538, + 6539, + 6540, + 6541, + 6542, + 6543, + 6544, + 6545, + 6546, + 6547, + 6548, + 6549, + 6550, + 6551, + 6552, + 6553, + 6554, + 6555, + 6556, + 6557, + 6558, + 6559, + 6560, + 6561, + 6562, + 6563, + 6564, + 6565, + 6566, + 6567, + 6568, + 6569, + 6570, + 6571, + 6572, + 6573, + 6574, + 6575, + 6576, + 6577, + 6578, + 6579, + 6580, + 6581, + 6582, + 6583, + 6584, + 6585, + 6586, + 6587, + 6588, + 6589, + 6590, + 6591, + 6592, + 6593, + 6594, + 6595, + 6596, + 6597, + 6598, + 6599, + 6600, + 6601, + 6602, + 6603, + 6604, + 6605, + 6606, + 6607, + 6608, + 6609, + 6610, + 6611, + 6612, + 6613, + 6614, + 6615, + 6616, + 6617, + 6618, + 6619, + 6620, + 6621, + 6622, + 6623, + 6624, + 6625, + 6626, + 6627, + 6628, + 6629, + 6630, + 6631, + 6632, + 6633, + 6634, + 6635, + 6636, + 6637, + 6638, + 6639, + 6640, + 6641, + 6642, + 6643, + 6644, + 6645, + 6646, + 6647, + 6648, + 6649, + 6650, + 6651, + 6652, + 6653, + 6654, + 6655, + 6656, + 6657, + 6658, + 6659, + 6660, + 6661, + 6662, + 6663, + 6664, + 6665, + 6666, + 6667, + 6668, + 6669, + 6670, + 6671, + 6672, + 6673, + 6674, + 6675, + 6676, + 6677, + 6678, + 6679, + 6680, + 6681, + 6682, + 6683, + 6684, + 6685, + 6686, + 6687, + 6688, + 6689, + 6690, + 6691, + 6692, + 6693, + 6694, + 6695, + 6696, + 6697, + 6698, + 6699, + 6700, + 6701, + 6702, + 6703, + 6704, + 6705, + 6706, + 6707, + 6708, + 6709, + 6710, + 6711, + 6712, + 6713, + 6714, + 6715, + 6716, + 6717, + 6718, + 6719, + 6720, + 6721, + 6722, + 6723, + 6724, + 6725, + 6726, + 6727, + 6728, + 6729, + 6730, + 6731, + 6732, + 6733, + 6734, + 6735, + 6736, + 6737, + 6738, + 6739, + 6740, + 6741, + 6742, + 6743, + 6744, + 6745, + 6746, + 6747, + 6748, + 6749, + 6750, + 6751, + 6752, + 6753, + 6754, + 6755, + 6756, + 6757, + 6758, + 6759, + 6760, + 6761, + 6762, + 6763, + 6764, + 6765, + 6766, + 6767, + 6768, + 6769, + 6770, + 6771, + 6772, + 6773, + 6774, + 6775, + 6776, + 6777, + 6778, + 6779, + 6780, + 6781, + 6782, + 6783, + 6784, + 6785, + 6786, + 6787, + 6788, + 6789, + 6790, + 6791, + 6792, + 6793, + 6794, + 6795, + 6796, + 6797, + 6798, + 6799, + 6800, + 6801, + 6802, + 6803, + 6804, + 6805, + 6806, + 6807, + 6808, + 6809, + 6810, + 6811, + 6812, + 6813, + 6814, + 6815, + 6816, + 6817, + 6818, + 6819, + 6820, + 6821, + 6822, + 6823, + 6824, + 6825, + 6826, + 6827, + 6828, + 6829, + 6830, + 6831, + 6832, + 6833, + 6834, + 6835, + 6836, + 6837, + 6838, + 6839, + 6840, + 6841, + 6842, + 6843, + 6844, + 6845, + 6846, + 6847, + 6848, + 6849, + 6850, + 6851, + 6852, + 6853, + 6854, + 6855, + 6856, + 6857, + 6858, + 6859, + 6860, + 6861, + 6862, + 6863, + 6864, + 6865, + 6866, + 6867, + 6868, + 6869, + 6870, + 6871, + 6872, + 6873, + 6874, + 6875, + 6876, + 6877, + 6878, + 6879, + 6880, + 6881, + 6882, + 6883, + 6884, + 6885, + 6886, + 6887, + 6888, + 6889, + 6890, + 6891, + 6892, + 6893, + 6894, + 6895, + 6896, + 6897, + 6898, + 6899, + 6900, + 6901, + 6902, + 6903, + 6904, + 6905, + 6906, + 6907, + 6908, + 6909, + 6910, + 6911, + 6912, + 6913, + 6914, + 6915, + 6916, + 6917, + 6918, + 6919, + 6920, + 6921, + 6922, + 6923, + 6924, + 6925, + 6926, + 6927, + 6928, + 6929, + 6930, + 6931, + 6932, + 6933, + 6934, + 6935, + 6936, + 6937, + 6938, + 6939, + 6940, + 6941, + 6942, + 6943, + 6944, + 6945, + 6946, + 6947, + 6948, + 6949, + 6950, + 6951, + 6952, + 6953, + 6954, + 6955, + 6956, + 6957, + 6958, + 6959, + 6960, + 6961, + 6962, + 6963, + 6964, + 6965, + 6966, + 6967, + 6968, + 6969, + 6970, + 6971, + 6972, + 6973, + 6974, + 6975, + 6976, + 6977, + 6978, + 6979, + 6980, + 6981, + 6982, + 6983, + 6984, + 6985, + 6986, + 6987, + 6988, + 6989, + 6990, + 6991, + 6992, + 6993, + 6994, + 6995, + 6996, + 6997, + 6998, + 6999, + 7000, + 7001, + 7002, + 7003, + 7004, + 7005, + 7006, + 7007, + 7008, + 7009, + 7010, + 7011, + 7012, + 7013, + 7014, + 7015, + 7016, + 7017, + 7018, + 7019, + 7020, + 7021, + 7022, + 7023, + 7024, + 7025, + 7026, + 7027, + 7028, + 7029, + 7030, + 7031, + 7032, + 7033, + 7034, + 7035, + 7036, + 7037, + 7038, + 7039, + 7040, + 7041, + 7042, + 7043, + 7044, + 7045, + 7046, + 7047, + 7048, + 7049, + 7050, + 7051, + 7052, + 7053, + 7054, + 7055, + 7056, + 7057, + 7058, + 7059, + 7060, + 7061, + 7062, + 7063, + 7064, + 7065, + 7066, + 7067, + 7068, + 7069, + 7070, + 7071, + 7072, + 7073, + 7074, + 7075, + 7076, + 7077, + 7078, + 7079, + 7080, + 7081, + 7082, + 7083, + 7084, + 7085, + 7086, + 7087, + 7088, + 7089, + 7090, + 7091, + 7092, + 7093, + 7094, + 7095, + 7096, + 7097, + 7098, + 7099, + 7100, + 7101, + 7102, + 7103, + 7104, + 7105, + 7106, + 7107, + 7108, + 7109, + 7110, + 7111, + 7112, + 7113, + 7114, + 7115, + 7116, + 7117, + 7118, + 7119, + 7120, + 7121, + 7122, + 7123, + 7124, + 7125, + 7126, + 7127, + 7128, + 7129, + 7130, + 7131, + 7132, + 7133, + 7134, + 7135, + 7136, + 7137, + 7138, + 7139, + 7140, + 7141, + 7142, + 7143, + 7144, + 7145, + 7146, + 7147, + 7148, + 7149, + 7150, + 7151, + 7152, + 7153, + 7154, + 7155, + 7156, + 7157, + 7158, + 7159, + 7160, + 7161, + 7162, + 7163, + 7164, + 7165, + 7166, + 7167, + 7168, + 7169, + 7170, + 7171, + 7172, + 7173, + 7174, + 7175, + 7176, + 7177, + 7178, + 7179, + 7180, + 7181, + 7182, + 7183, + 7184, + 7185, + 7186, + 7187, + 7188, + 7189, + 7190, + 7191, + 7192, + 7193, + 7194, + 7195, + 7196, + 7197, + 7198, + 7199, + 7200, + 7201, + 7202, + 7203, + 7204, + 7205, + 7206, + 7207, + 7208, + 7209, + 7210, + 7211, + 7212, + 7213, + 7214, + 7215, + 7216, + 7217, + 7218, + 7219, + 7220, + 7221, + 7222, + 7223, + 7224, + 7225, + 7226, + 7227, + 7228, + 7229, + 7230, + 7231, + 7232, + 7233, + 7234, + 7235, + 7236, + 7237, + 7238, + 7239, + 7240, + 7241, + 7242, + 7243, + 7244, + 7245, + 7246, + 7247, + 7248, + 7249, + 7250, + 7251, + 7252, + 7253, + 7254, + 7255, + 7256, + 7257, + 7258, + 7259, + 7260, + 7261, + 7262, + 7263, + 7264, + 7265, + 7266, + 7267, + 7268, + 7269, + 7270, + 7271, + 7272, + 7273, + 7274, + 7275, + 7276, + 7277, + 7278, + 7279, + 7280, + 7281, + 7282, + 7283, + 7284, + 7285, + 7286, + 7287, + 7288, + 7289, + 7290, + 7291, + 7292, + 7293, + 7294, + 7295, + 7296, + 7297, + 7298, + 7299, + 7300, + 7301, + 7302, + 7303, + 7304, + 7305, + 7306, + 7307, + 7308, + 7309, + 7310, + 7311, + 7312, + 7313, + 7314, + 7315, + 7316, + 7317, + 7318, + 7319, + 7320, + 7321, + 7322, + 7323, + 7324, + 7325, + 7326, + 7327, + 7328, + 7329, + 7330, + 7331, + 7332, + 7333, + 7334, + 7335, + 7336, + 7337, + 7338, + 7339, + 7340, + 7341, + 7342, + 7343, + 7344, + 7345, + 7346, + 7347, + 7348, + 7349, + 7350, + 7351, + 7352, + 7353, + 7354, + 7355, + 7356, + 7357, + 7358, + 7359, + 7360, + 7361, + 7362, + 7363, + 7364, + 7365, + 7366, + 7367, + 7368, + 7369, + 7370, + 7371, + 7372, + 7373, + 7374, + 7375, + 7376, + 7377, + 7378, + 7379, + 7380, + 7381, + 7382, + 7383, + 7384, + 7385, + 7386, + 7387, + 7388, + 7389, + 7390, + 7391, + 7392, + 7393, + 7394, + 7395, + 7396, + 7397, + 7398, + 7399, + 7400, + 7401, + 7402, + 7403, + 7404, + 7405, + 7406, + 7407, + 7408, + 7409, + 7410, + 7411, + 7412, + 7413, + 7414, + 7415, + 7416, + 7417, + 7418, + 7419, + 7420, + 7421, + 7422, + 7423, + 7424, + 7425, + 7426, + 7427, + 7428, + 7429, + 7430, + 7431, + 7432, + 7433, + 7434, + 7435, + 7436, + 7437, + 7438, + 7439, + 7440, + 7441, + 7442, + 7443, + 7444, + 7445, + 7446, + 7447, + 7448, + 7449, + 7450, + 7451, + 7452, + 7453, + 7454, + 7455, + 7456, + 7457, + 7458, + 7459, + 7460, + 7461, + 7462, + 7463, + 7464, + 7465, + 7466, + 7467, + 7468, + 7469, + 7470, + 7471, + 7472, + 7473, + 7474, + 7475, + 7476, + 7477, + 7478, + 7479, + 7480, + 7481, + 7482, + 7483, + 7484, + 7485, + 7486, + 7487, + 7488, + 7489, + 7490, + 7491, + 7492, + 7493, + 7494, + 7495, + 7496, + 7497, + 7498, + 7499, + 7500, + 7501, + 7502, + 7503, + 7504, + 7505, + 7506, + 7507, + 7508, + 7509, + 7510, + 7511, + 7512, + 7513, + 7514, + 7515, + 7516, + 7517, + 7518, + 7519, + 7520, + 7521, + 7522, + 7523, + 7524, + 7525, + 7526, + 7527, + 7528, + 7529, + 7530, + 7531, + 7532, + 7533, + 7534, + 7535, + 7536, + 7537, + 7538, + 7539, + 7540, + 7541, + 7542, + 7543, + 7544, + 7545, + 7546, + 7547, + 7548, + 7549, + 7550, + 7551, + 7552, + 7553, + 7554, + 7555, + 7556, + 7557, + 7558, + 7559, + 7560, + 7561, + 7562, + 7563, + 7564, + 7565, + 7566, + 7567, + 7568, + 7569, + 7570, + 7571, + 7572, + 7573, + 7574, + 7575, + 7576, + 7577, + 7578, + 7579, + 7580, + 7581, + 7582, + 7583, + 7584, + 7585, + 7586, + 7587, + 7588, + 7589, + 7590, + 7591, + 7592, + 7593, + 7594, + 7595, + 7596, + 7597, + 7598, + 7599, + 7600, + 7601, + 7602, + 7603, + 7604, + 7605, + 7606, + 7607, + 7608, + 7609, + 7610, + 7611, + 7612, + 7613, + 7614, + 7615, + 7616, + 7617, + 7618, + 7619, + 7620, + 7621, + 7622, + 7623, + 7624, + 7625, + 7626, + 7627, + 7628, + 7629, + 7630, + 7631, + 7632, + 7633, + 7634, + 7635, + 7636, + 7637, + 7638, + 7639, + 7640, + 7641, + 7642, + 7643, + 7644, + 7645, + 7646, + 7647, + 7648, + 7649, + 7650, + 7651, + 7652, + 7653, + 7654, + 7655, + 7656, + 7657, + 7658, + 7659, + 7660, + 7661, + 7662, + 7663, + 7664, + 7665, + 7666, + 7667, + 7668, + 7669, + 7670, + 7671, + 7672, + 7673, + 7674, + 7675, + 7676, + 7677, + 7678, + 7679, + 7680, + 7681, + 7682, + 7683, + 7684, + 7685, + 7686, + 7687, + 7688, + 7689, + 7690, + 7691, + 7692, + 7693, + 7694, + 7695, + 7696, + 7697, + 7698, + 7699, + 7700, + 7701, + 7702, + 7703, + 7704, + 7705, + 7706, + 7707, + 7708, + 7709, + 7710, + 7711, + 7712, + 7713, + 7714, + 7715, + 7716, + 7717, + 7718, + 7719, + 7720, + 7721, + 7722, + 7723, + 7724, + 7725, + 7726, + 7727, + 7728, + 7729, + 7730, + 7731, + 7732, + 7733, + 7734, + 7735, + 7736, + 7737, + 7738, + 7739, + 7740, + 7741, + 7742, + 7743, + 7744, + 7745, + 7746, + 7747, + 7748, + 7749, + 7750, + 7751, + 7752, + 7753, + 7754, + 7755, + 7756, + 7757, + 7758, + 7759, + 7760, + 7761, + 7762, + 7763, + 7764, + 7765, + 7766, + 7767, + 7768, + 7769, + 7770, + 7771, + 7772, + 7773, + 7774, + 7775, + 7776, + 7777, + 7778, + 7779, + 7780, + 7781, + 7782, + 7783, + 7784, + 7785, + 7786, + 7787, + 7788, + 7789, + 7790, + 7791, + 7792, + 7793, + 7794, + 7795, + 7796, + 7797, + 7798, + 7799, + 7800, + 7801, + 7802, + 7803, + 7804, + 7805, + 7806, + 7807, + 7808, + 7809, + 7810, + 7811, + 7812, + 7813, + 7814, + 7815, + 7816, + 7817, + 7818, + 7819, + 7820, + 7821, + 7822, + 7823, + 7824, + 7825, + 7826, + 7827, + 7828, + 7829, + 7830, + 7831, + 7832, + 7833, + 7834, + 7835, + 7836, + 7837, + 7838, + 7839, + 7840, + 7841, + 7842, + 7843, + 7844, + 7845, + 7846, + 7847, + 7848, + 7849, + 7850, + 7851, + 7852, + 7853, + 7854, + 7855, + 7856, + 7857, + 7858, + 7859, + 7860, + 7861, + 7862, + 7863, + 7864, + 7865, + 7866, + 7867, + 7868, + 7869, + 7870, + 7871, + 7872, + 7873, + 7874, + 7875, + 7876, + 7877, + 7878, + 7879, + 7880, + 7881, + 7882, + 7883, + 7884, + 7885, + 7886, + 7887, + 7888, + 7889, + 7890, + 7891, + 7892, + 7893, + 7894, + 7895, + 7896, + 7897, + 7898, + 7899, + 7900, + 7901, + 7902, + 7903, + 7904, + 7905, + 7906, + 7907, + 7908, + 7909, + 7910, + 7911, + 7912, + 7913, + 7914, + 7915, + 7916, + 7917, + 7918, + 7919, + 7920, + 7921, + 7922, + 7923, + 7924, + 7925, + 7926, + 7927, + 7928, + 7929, + 7930, + 7931, + 7932, + 7933, + 7934, + 7935, + 7936, + 7937, + 7938, + 7939, + 7940, + 7941, + 7942, + 7943, + 7944, + 7945, + 7946, + 7947, + 7948, + 7949, + 7950, + 7951, + 7952, + 7953, + 7954, + 7955, + 7956, + 7957, + 7958, + 7959, + 7960, + 7961, + 7962, + 7963, + 7964, + 7965, + 7966, + 7967, + 7968, + 7969, + 7970, + 7971, + 7972, + 7973, + 7974, + 7975, + 7976, + 7977, + 7978, + 7979, + 7980, + 7981, + 7982, + 7983, + 7984, + 7985, + 7986, + 7987, + 7988, + 7989, + 7990, + 7991, + 7992, + 7993, + 7994, + 7995, + 7996, + 7997, + 7998, + 7999, + 8000, + 8001, + 8002, + 8003, + 8004, + 8005, + 8006, + 8007, + 8008, + 8009, + 8010, + 8011, + 8012, + 8013, + 8014, + 8015, + 8016, + 8017, + 8018, + 8019, + 8020, + 8021, + 8022, + 8023, + 8024, + 8025, + 8026, + 8027, + 8028, + 8029, + 8030, + 8031, + 8032, + 8033, + 8034, + 8035, + 8036, + 8037, + 8038, + 8039, + 8040, + 8041, + 8042, + 8043, + 8044, + 8045, + 8046, + 8047, + 8048, + 8049, + 8050, + 8051, + 8052, + 8053, + 8054, + 8055, + 8056, + 8057, + 8058, + 8059, + 8060, + 8061, + 8062, + 8063, + 8064, + 8065, + 8066, + 8067, + 8068, + 8069, + 8070, + 8071, + 8072, + 8073, + 8074, + 8075, + 8076, + 8077, + 8078, + 8079, + 8080, + 8081, + 8082, + 8083, + 8084, + 8085, + 8086, + 8087, + 8088, + 8089, + 8090, + 8091, + 8092, + 8093, + 8094, + 8095, + 8096, + 8097, + 8098, + 8099, + 8100, + 8101, + 8102, + 8103, + 8104, + 8105, + 8106, + 8107, + 8108, + 8109, + 8110, + 8111, + 8112, + 8113, + 8114, + 8115, + 8116, + 8117, + 8118, + 8119, + 8120, + 8121, + 8122, + 8123, + 8124, + 8125, + 8126, + 8127, + 8128, + 8129, + 8130, + 8131, + 8132, + 8133, + 8134, + 8135, + 8136, + 8137, + 8138, + 8139, + 8140, + 8141, + 8142, + 8143, + 8144, + 8145, + 8146, + 8147, + 8148, + 8149, + 8150, + 8151, + 8152, + 8153, + 8154, + 8155, + 8156, + 8157, + 8158, + 8159, + 8160, + 8161, + 8162, + 8163, + 8164, + 8165, + 8166, + 8167, + 8168, + 8169, + 8170, + 8171, + 8172, + 8173, + 8174, + 8175, + 8176, + 8177, + 8178, + 8179, + 8180, + 8181, + 8182, + 8183, + 8184, + 8185, + 8186, + 8187, + 8188, + 8189, + 8190, + 8191, + 8192, + 8193, + 8194, + 8195, + 8196, + 8197, + 8198, + 8199, + 8200, + 8201, + 8202, + 8203, + 8204, + 8205, + 8206, + 8207, + 8208, + 8209, + 8210, + 8211, + 8212, + 8213, + 8214, + 8215, + 8216, + 8217, + 8218, + 8219, + 8220, + 8221, + 8222, + 8223, + 8224, + 8225, + 8226, + 8227, + 8228, + 8229, + 8230, + 8231, + 8232, + 8233, + 8234, + 8235, + 8236, + 8237, + 8238, + 8239, + 8240, + 8241, + 8242, + 8243, + 8244, + 8245, + 8246, + 8247, + 8248, + 8249, + 8250, + 8251, + 8252, + 8253, + 8254, + 8255, + 8256, + 8257, + 8258, + 8259, + 8260, + 8261, + 8262, + 8263, + 8264, + 8265, + 8266, + 8267, + 8268, + 8269, + 8270, + 8271, + 8272, + 8273, + 8274, + 8275, + 8276, + 8277, + 8278, + 8279, + 8280, + 8281, + 8282, + 8283, + 8284, + 8285, + 8286, + 8287, + 8288, + 8289, + 8290, + 8291, + 8292, + 8293, + 8294, + 8295, + 8296, + 8297, + 8298, + 8299, + 8300, + 8301, + 8302, + 8303, + 8304, + 8305, + 8306, + 8307, + 8308, + 8309, + 8310, + 8311, + 8312, + 8313, + 8314, + 8315, + 8316, + 8317, + 8318, + 8319, + 8320, + 8321, + 8322, + 8323, + 8324, + 8325, + 8326, + 8327, + 8328, + 8329, + 8330, + 8331, + 8332, + 8333, + 8334, + 8335, + 8336, + 8337, + 8338, + 8339, + 8340, + 8341, + 8342, + 8343, + 8344, + 8345, + 8346, + 8347, + 8348, + 8349, + 8350, + 8351, + 8352, + 8353, + 8354, + 8355, + 8356, + 8357, + 8358, + 8359, + 8360, + 8361, + 8362, + 8363, + 8364, + 8365, + 8366, + 8367, + 8368, + 8369, + 8370, + 8371, + 8372, + 8373, + 8374, + 8375, + 8376, + 8377, + 8378, + 8379, + 8380, + 8381, + 8382, + 8383, + 8384, + 8385, + 8386, + 8387, + 8388, + 8389, + 8390, + 8391, + 8392, + 8393, + 8394, + 8395, + 8396, + 8397, + 8398, + 8399, + 8400, + 8401, + 8402, + 8403, + 8404, + 8405, + 8406, + 8407, + 8408, + 8409, + 8410, + 8411, + 8412, + 8413, + 8414, + 8415, + 8416, + 8417, + 8418, + 8419, + 8420, + 8421, + 8422, + 8423, + 8424, + 8425, + 8426, + 8427, + 8428, + 8429, + 8430, + 8431, + 8432, + 8433, + 8434, + 8435, + 8436, + 8437, + 8438, + 8439, + 8440, + 8441, + 8442, + 8443, + 8444, + 8445, + 8446, + 8447, + 8448, + 8449, + 8450, + 8451, + 8452, + 8453, + 8454, + 8455, + 8456, + 8457, + 8458, + 8459, + 8460, + 8461, + 8462, + 8463, + 8464, + 8465, + 8466, + 8467, + 8468, + 8469, + 8470, + 8471, + 8472, + 8473, + 8474, + 8475, + 8476, + 8477, + 8478, + 8479, + 8480, + 8481, + 8482, + 8483, + 8484, + 8485, + 8486, + 8487, + 8488, + 8489, + 8490, + 8491, + 8492, + 8493, + 8494, + 8495, + 8496, + 8497, + 8498, + 8499, + 8500, + 8501, + 8502, + 8503, + 8504, + 8505, + 8506, + 8507, + 8508, + 8509, + 8510, + 8511, + 8512, + 8513, + 8514, + 8515, + 8516, + 8517, + 8518, + 8519, + 8520, + 8521, + 8522, + 8523, + 8524, + 8525, + 8526, + 8527, + 8528, + 8529, + 8530, + 8531, + 8532, + 8533, + 8534, + 8535, + 8536, + 8537, + 8538, + 8539, + 8540, + 8541, + 8542, + 8543, + 8544, + 8545, + 8546, + 8547, + 8548, + 8549, + 8550, + 8551, + 8552, + 8553, + 8554, + 8555, + 8556, + 8557, + 8558, + 8559, + 8560, + 8561, + 8562, + 8563, + 8564, + 8565, + 8566, + 8567, + 8568, + 8569, + 8570, + 8571, + 8572, + 8573, + 8574, + 8575, + 8576, + 8577, + 8578, + 8579, + 8580, + 8581, + 8582, + 8583, + 8584, + 8585, + 8586, + 8587, + 8588, + 8589, + 8590, + 8591, + 8592, + 8593, + 8594, + 8595, + 8596, + 8597, + 8598, + 8599, + 8600, + 8601, + 8602, + 8603, + 8604, + 8605, + 8606, + 8607, + 8608, + 8609, + 8610, + 8611, + 8612, + 8613, + 8614, + 8615, + 8616, + 8617, + 8618, + 8619, + 8620, + 8621, + 8622, + 8623, + 8624, + 8625, + 8626, + 8627, + 8628, + 8629, + 8630, + 8631, + 8632, + 8633, + 8634, + 8635, + 8636, + 8637, + 8638, + 8639, + 8640, + 8641, + 8642, + 8643, + 8644, + 8645, + 8646, + 8647, + 8648, + 8649, + 8650, + 8651, + 8652, + 8653, + 8654, + 8655, + 8656, + 8657, + 8658, + 8659, + 8660, + 8661, + 8662, + 8663, + 8664, + 8665, + 8666, + 8667, + 8668, + 8669, + 8670, + 8671, + 8672, + 8673, + 8674, + 8675, + 8676, + 8677, + 8678, + 8679, + 8680, + 8681, + 8682, + 8683, + 8684, + 8685, + 8686, + 8687, + 8688, + 8689, + 8690, + 8691, + 8692, + 8693, + 8694, + 8695, + 8696, + 8697, + 8698, + 8699, + 8700, + 8701, + 8702, + 8703, + 8704, + 8705, + 8706, + 8707, + 8708, + 8709, + 8710, + 8711, + 8712, + 8713, + 8714, + 8715, + 8716, + 8717, + 8718, + 8719, + 8720, + 8721, + 8722, + 8723, + 8724, + 8725, + 8726, + 8727, + 8728, + 8729, + 8730, + 8731, + 8732, + 8733, + 8734, + 8735, + 8736, + 8737, + 8738, + 8739, + 8740, + 8741, + 8742, + 8743, + 8744, + 8745, + 8746, + 8747, + 8748, + 8749, + 8750, + 8751, + 8752, + 8753, + 8754, + 8755, + 8756, + 8757, + 8758, + 8759, + 8760, + 8761, + 8762, + 8763, + 8764, + 8765, + 8766, + 8767, + 8768, + 8769, + 8770, + 8771, + 8772, + 8773, + 8774, + 8775, + 8776, + 8777, + 8778, + 8779, + 8780, + 8781, + 8782, + 8783, + 8784, + 8785, + 8786, + 8787, + 8788, + 8789, + 8790, + 8791, + 8792, + 8793, + 8794, + 8795, + 8796, + 8797, + 8798, + 8799, + 8800, + 8801, + 8802, + 8803, + 8804, + 8805, + 8806, + 8807, + 8808, + 8809, + 8810, + 8811, + 8812, + 8813, + 8814, + 8815, + 8816, + 8817, + 8818, + 8819, + 8820, + 8821, + 8822, + 8823, + 8824, + 8825, + 8826, + 8827, + 8828, + 8829, + 8830, + 8831, + 8832, + 8833, + 8834, + 8835, + 8836, + 8837, + 8838, + 8839, + 8840, + 8841, + 8842, + 8843, + 8844, + 8845, + 8846, + 8847, + 8848, + 8849, + 8850, + 8851, + 8852, + 8853, + 8854, + 8855, + 8856, + 8857, + 8858, + 8859, + 8860, + 8861, + 8862, + 8863, + 8864, + 8865, + 8866, + 8867, + 8868, + 8869, + 8870, + 8871, + 8872, + 8873, + 8874, + 8875, + 8876, + 8877, + 8878, + 8879, + 8880, + 8881, + 8882, + 8883, + 8884, + 8885, + 8886, + 8887, + 8888, + 8889, + 8890, + 8891, + 8892, + 8893, + 8894, + 8895, + 8896, + 8897, + 8898, + 8899, + 8900, + 8901, + 8902, + 8903, + 8904, + 8905, + 8906, + 8907, + 8908, + 8909, + 8910, + 8911, + 8912, + 8913, + 8914, + 8915, + 8916, + 8917, + 8918, + 8919, + 8920, + 8921, + 8922, + 8923, + 8924, + 8925, + 8926, + 8927, + 8928, + 8929, + 8930, + 8931, + 8932, + 8933, + 8934, + 8935, + 8936, + 8937, + 8938, + 8939, + 8940, + 8941, + 8942, + 8943, + 8944, + 8945, + 8946, + 8947, + 8948, + 8949, + 8950, + 8951, + 8952, + 8953, + 8954, + 8955, + 8956, + 8957, + 8958, + 8959, + 8960, + 8961, + 8962, + 8963, + 8964, + 8965, + 8966, + 8967, + 8968, + 8969, + 8970, + 8971, + 8972, + 8973, + 8974, + 8975, + 8976, + 8977, + 8978, + 8979, + 8980, + 8981, + 8982, + 8983, + 8984, + 8985, + 8986, + 8987, + 8988, + 8989, + 8990, + 8991, + 8992, + 8993, + 8994, + 8995, + 8996, + 8997, + 8998, + 8999, + 9000, + 9001, + 9002, + 9003, + 9004, + 9005, + 9006, + 9007, + 9008, + 9009, + 9010, + 9011, + 9012, + 9013, + 9014, + 9015, + 9016, + 9017, + 9018, + 9019, + 9020, + 9021, + 9022, + 9023, + 9024, + 9025, + 9026, + 9027, + 9028, + 9029, + 9030, + 9031, + 9032, + 9033, + 9034, + 9035, + 9036, + 9037, + 9038, + 9039, + 9040, + 9041, + 9042, + 9043, + 9044, + 9045, + 9046, + 9047, + 9048, + 9049, + 9050, + 9051, + 9052, + 9053, + 9054, + 9055, + 9056, + 9057, + 9058, + 9059, + 9060, + 9061, + 9062, + 9063, + 9064, + 9065, + 9066, + 9067, + 9068, + 9069, + 9070, + 9071, + 9072, + 9073, + 9074, + 9075, + 9076, + 9077, + 9078, + 9079, + 9080, + 9081, + 9082, + 9083, + 9084, + 9085, + 9086, + 9087, + 9088, + 9089, + 9090, + 9091, + 9092, + 9093, + 9094, + 9095, + 9096, + 9097, + 9098, + 9099, + 9100, + 9101, + 9102, + 9103, + 9104, + 9105, + 9106, + 9107, + 9108, + 9109, + 9110, + 9111, + 9112, + 9113, + 9114, + 9115, + 9116, + 9117, + 9118, + 9119, + 9120, + 9121, + 9122, + 9123, + 9124, + 9125, + 9126, + 9127, + 9128, + 9129, + 9130, + 9131, + 9132, + 9133, + 9134, + 9135, + 9136, + 9137, + 9138, + 9139, + 9140, + 9141, + 9142, + 9143, + 9144, + 9145, + 9146, + 9147, + 9148, + 9149, + 9150, + 9151, + 9152, + 9153, + 9154, + 9155, + 9156, + 9157, + 9158, + 9159, + 9160, + 9161, + 9162, + 9163, + 9164, + 9165, + 9166, + 9167, + 9168, + 9169, + 9170, + 9171, + 9172, + 9173, + 9174, + 9175, + 9176, + 9177, + 9178, + 9179, + 9180, + 9181, + 9182, + 9183, + 9184, + 9185, + 9186, + 9187, + 9188, + 9189, + 9190, + 9191, + 9192, + 9193, + 9194, + 9195, + 9196, + 9197, + 9198, + 9199, + 9200, + 9201, + 9202, + 9203, + 9204, + 9205, + 9206, + 9207, + 9208, + 9209, + 9210, + 9211, + 9212, + 9213, + 9214, + 9215, + 9216, + 9217, + 9218, + 9219, + 9220, + 9221, + 9222, + 9223, + 9224, + 9225, + 9226, + 9227, + 9228, + 9229, + 9230, + 9231, + 9232, + 9233, + 9234, + 9235, + 9236, + 9237, + 9238, + 9239, + 9240, + 9241, + 9242, + 9243, + 9244, + 9245, + 9246, + 9247, + 9248, + 9249, + 9250, + 9251, + 9252, + 9253, + 9254, + 9255, + 9256, + 9257, + 9258, + 9259, + 9260, + 9261, + 9262, + 9263, + 9264, + 9265, + 9266, + 9267, + 9268, + 9269, + 9270, + 9271, + 9272, + 9273, + 9274, + 9275, + 9276, + 9277, + 9278, + 9279, + 9280, + 9281, + 9282, + 9283, + 9284, + 9285, + 9286, + 9287, + 9288, + 9289, + 9290, + 9291, + 9292, + 9293, + 9294, + 9295, + 9296, + 9297, + 9298, + 9299, + 9300, + 9301, + 9302, + 9303, + 9304, + 9305, + 9306, + 9307, + 9308, + 9309, + 9310, + 9311, + 9312, + 9313, + 9314, + 9315, + 9316, + 9317, + 9318, + 9319, + 9320, + 9321, + 9322, + 9323, + 9324, + 9325, + 9326, + 9327, + 9328, + 9329, + 9330, + 9331, + 9332, + 9333, + 9334, + 9335, + 9336, + 9337, + 9338, + 9339, + 9340, + 9341, + 9342, + 9343, + 9344, + 9345, + 9346, + 9347, + 9348, + 9349, + 9350, + 9351, + 9352, + 9353, + 9354, + 9355, + 9356, + 9357, + 9358, + 9359, + 9360, + 9361, + 9362, + 9363, + 9364, + 9365, + 9366, + 9367, + 9368, + 9369, + 9370, + 9371, + 9372, + 9373, + 9374, + 9375, + 9376, + 9377, + 9378, + 9379, + 9380, + 9381, + 9382, + 9383, + 9384, + 9385, + 9386, + 9387, + 9388, + 9389, + 9390, + 9391, + 9392, + 9393, + 9394, + 9395, + 9396, + 9397, + 9398, + 9399, + 9400, + 9401, + 9402, + 9403, + 9404, + 9405, + 9406, + 9407, + 9408, + 9409, + 9410, + 9411, + 9412, + 9413, + 9414, + 9415, + 9416, + 9417, + 9418, + 9419, + 9420, + 9421, + 9422, + 9423, + 9424, + 9425, + 9426, + 9427, + 9428, + 9429, + 9430, + 9431, + 9432, + 9433, + 9434, + 9435, + 9436, + 9437, + 9438, + 9439, + 9440, + 9441, + 9442, + 9443, + 9444, + 9445, + 9446, + 9447, + 9448, + 9449, + 9450, + 9451, + 9452, + 9453, + 9454, + 9455, + 9456, + 9457, + 9458, + 9459, + 9460, + 9461, + 9462, + 9463, + 9464, + 9465, + 9466, + 9467, + 9468, + 9469, + 9470, + 9471, + 9472, + 9473, + 9474, + 9475, + 9476, + 9477, + 9478, + 9479, + 9480, + 9481, + 9482, + 9483, + 9484, + 9485, + 9486, + 9487, + 9488, + 9489, + 9490, + 9491, + 9492, + 9493, + 9494, + 9495, + 9496, + 9497, + 9498, + 9499, + 9500, + 9501, + 9502, + 9503, + 9504, + 9505, + 9506, + 9507, + 9508, + 9509, + 9510, + 9511, + 9512, + 9513, + 9514, + 9515, + 9516, + 9517, + 9518, + 9519, + 9520, + 9521, + 9522, + 9523, + 9524, + 9525, + 9526, + 9527, + 9528, + 9529, + 9530, + 9531, + 9532, + 9533, + 9534, + 9535, + 9536, + 9537, + 9538, + 9539, + 9540, + 9541, + 9542, + 9543, + 9544, + 9545, + 9546, + 9547, + 9548, + 9549, + 9550, + 9551, + 9552, + 9553, + 9554, + 9555, + 9556, + 9557, + 9558, + 9559, + 9560, + 9561, + 9562, + 9563, + 9564, + 9565, + 9566, + 9567, + 9568, + 9569, + 9570, + 9571, + 9572, + 9573, + 9574, + 9575, + 9576, + 9577, + 9578, + 9579, + 9580, + 9581, + 9582, + 9583, + 9584, + 9585, + 9586, + 9587, + 9588, + 9589, + 9590, + 9591, + 9592, + 9593, + 9594, + 9595, + 9596, + 9597, + 9598, + 9599, + 9600, + 9601, + 9602, + 9603, + 9604, + 9605, + 9606, + 9607, + 9608, + 9609, + 9610, + 9611, + 9612, + 9613, + 9614, + 9615, + 9616, + 9617, + 9618, + 9619, + 9620, + 9621, + 9622, + 9623, + 9624, + 9625, + 9626, + 9627, + 9628, + 9629, + 9630, + 9631, + 9632, + 9633, + 9634, + 9635, + 9636, + 9637, + 9638, + 9639, + 9640, + 9641, + 9642, + 9643, + 9644, + 9645, + 9646, + 9647, + 9648, + 9649, + 9650, + 9651, + 9652, + 9653, + 9654, + 9655, + 9656, + 9657, + 9658, + 9659, + 9660, + 9661, + 9662, + 9663, + 9664, + 9665, + 9666, + 9667, + 9668, + 9669, + 9670, + 9671, + 9672, + 9673, + 9674, + 9675, + 9676, + 9677, + 9678, + 9679, + 9680, + 9681, + 9682, + 9683, + 9684, + 9685, + 9686, + 9687, + 9688, + 9689, + 9690, + 9691, + 9692, + 9693, + 9694, + 9695, + 9696, + 9697, + 9698, + 9699, + 9700, + 9701, + 9702, + 9703, + 9704, + 9705, + 9706, + 9707, + 9708, + 9709, + 9710, + 9711, + 9712, + 9713, + 9714, + 9715, + 9716, + 9717, + 9718, + 9719, + 9720, + 9721, + 9722, + 9723, + 9724, + 9725, + 9726, + 9727, + 9728, + 9729, + 9730, + 9731, + 9732, + 9733, + 9734, + 9735, + 9736, + 9737, + 9738, + 9739, + 9740, + 9741, + 9742, + 9743, + 9744, + 9745, + 9746, + 9747, + 9748, + 9749, + 9750, + 9751, + 9752, + 9753, + 9754, + 9755, + 9756, + 9757, + 9758, + 9759, + 9760, + 9761, + 9762, + 9763, + 9764, + 9765, + 9766, + 9767, + 9768, + 9769, + 9770, + 9771, + 9772, + 9773, + 9774, + 9775, + 9776, + 9777, + 9778, + 9779, + 9780, + 9781, + 9782, + 9783, + 9784, + 9785, + 9786, + 9787, + 9788, + 9789, + 9790, + 9791, + 9792, + 9793, + 9794, + 9795, + 9796, + 9797, + 9798, + 9799, + 9800, + 9801, + 9802, + 9803, + 9804, + 9805, + 9806, + 9807, + 9808, + 9809, + 9810, + 9811, + 9812, + 9813, + 9814, + 9815, + 9816, + 9817, + 9818, + 9819, + 9820, + 9821, + 9822, + 9823, + 9824, + 9825, + 9826, + 9827, + 9828, + 9829, + 9830, + 9831, + 9832, + 9833, + 9834, + 9835, + 9836, + 9837, + 9838, + 9839, + 9840, + 9841, + 9842, + 9843, + 9844, + 9845, + 9846, + 9847, + 9848, + 9849, + 9850, + 9851, + 9852, + 9853, + 9854, + 9855, + 9856, + 9857, + 9858, + 9859, + 9860, + 9861, + 9862, + 9863, + 9864, + 9865, + 9866, + 9867, + 9868, + 9869, + 9870, + 9871, + 9872, + 9873, + 9874, + 9875, + 9876, + 9877, + 9878, + 9879, + 9880, + 9881, + 9882, + 9883, + 9884, + 9885, + 9886, + 9887, + 9888, + 9889, + 9890, + 9891, + 9892, + 9893, + 9894, + 9895, + 9896, + 9897, + 9898, + 9899, + 9900, + 9901, + 9902, + 9903, + 9904, + 9905, + 9906, + 9907, + 9908, + 9909, + 9910, + 9911, + 9912, + 9913, + 9914, + 9915, + 9916, + 9917, + 9918, + 9919, + 9920, + 9921, + 9922, + 9923, + 9924, + 9925, + 9926, + 9927, + 9928, + 9929, + 9930, + 9931, + 9932, + 9933, + 9934, + 9935, + 9936, + 9937, + 9938, + 9939, + 9940, + 9941, + 9942, + 9943, + 9944, + 9945, + 9946, + 9947, + 9948, + 9949, + 9950, + 9951, + 9952, + 9953, + 9954, + 9955, + 9956, + 9957, + 9958, + 9959, + 9960, + 9961, + 9962, + 9963, + 9964, + 9965, + 9966, + 9967, + 9968, + 9969, + 9970, + 9971, + 9972, + 9973, + 9974, + 9975, + 9976, + 9977, + 9978, + 9979, + 9980, + 9981, + 9982, + 9983, + 9984, + 9985, + 9986, + 9987, + 9988, + 9989, + 9990, + 9991, + 9992, + 9993, + 9994, + 9995, + 9996, + 9997, + 9998, + 9999, + 10000, + 10001, + 10002, + 10003, + 10004, + 10005, + 10006, + 10007, + 10008, + 10009, + 10010, + 10011, + 10012, + 10013, + 10014, + 10015, + 10016, + 10017, + 10018, + 10019, + 10020, + 10021, + 10022, + 10023, + 10024, + 10025, + 10026, + 10027, + 10028, + 10029, + 10030, + 10031, + 10032, + 10033, + 10034, + 10035, + 10036, + 10037, + 10038, + 10039, + 10040, + 10041, + 10042, + 10043, + 10044, + 10045, + 10046, + 10047, + 10048, + 10049, + 10050, + 10051, + 10052, + 10053, + 10054, + 10055, + 10056, + 10057, + 10058, + 10059, + 10060, + 10061, + 10062, + 10063, + 10064, + 10065, + 10066, + 10067, + 10068, + 10069, + 10070, + 10071, + 10072, + 10073, + 10074, + 10075, + 10076, + 10077, + 10078, + 10079, + 10080, + 10081, + 10082, + 10083, + 10084, + 10085, + 10086, + 10087, + 10088, + 10089, + 10090, + 10091, + 10092, + 10093, + 10094, + 10095, + 10096, + 10097, + 10098, + 10099, + 10100, + 10101, + 10102, + 10103, + 10104, + 10105, + 10106, + 10107, + 10108, + 10109, + 10110, + 10111, + 10112, + 10113, + 10114, + 10115, + 10116, + 10117, + 10118, + 10119, + 10120, + 10121, + 10122, + 10123, + 10124, + 10125, + 10126, + 10127, + 10128, + 10129, + 10130, + 10131, + 10132, + 10133, + 10134, + 10135, + 10136, + 10137, + 10138, + 10139, + 10140, + 10141, + 10142, + 10143, + 10144, + 10145, + 10146, + 10147, + 10148, + 10149, + 10150, + 10151, + 10152, + 10153, + 10154, + 10155, + 10156, + 10157, + 10158, + 10159, + 10160, + 10161, + 10162, + 10163, + 10164, + 10165, + 10166, + 10167, + 10168, + 10169, + 10170, + 10171, + 10172, + 10173, + 10174, + 10175, + 10176, + 10177, + 10178, + 10179, + 10180, + 10181, + 10182, + 10183, + 10184, + 10185, + 10186, + 10187, + 10188, + 10189, + 10190, + 10191, + 10192, + 10193, + 10194, + 10195, + 10196, + 10197, + 10198, + 10199, + 10200, + 10201, + 10202, + 10203, + 10204, + 10205, + 10206, + 10207, + 10208, + 10209, + 10210, + 10211, + 10212, + 10213, + 10214, + 10215, + 10216, + 10217, + 10218, + 10219, + 10220, + 10221, + 10222, + 10223, + 10224, + 10225, + 10226, + 10227, + 10228, + 10229, + 10230, + 10231, + 10232, + 10233, + 10234, + 10235, + 10236, + 10237, + 10238, + 10239, + 10240, + 10241, + 10242, + 10243, + 10244, + 10245, + 10246, + 10247, + 10248, + 10249, + 10250, + 10251, + 10252, + 10253, + 10254, + 10255, + 10256, + 10257, + 10258, + 10259, + 10260, + 10261, + 10262, + 10263, + 10264, + 10265, + 10266, + 10267, + 10268, + 10269, + 10270, + 10271, + 10272, + 10273, + 10274, + 10275, + 10276, + 10277, + 10278, + 10279, + 10280, + 10281, + 10282, + 10283, + 10284, + 10285, + 10286, + 10287, + 10288, + 10289, + 10290, + 10291, + 10292, + 10293, + 10294, + 10295, + 10296, + 10297, + 10298, + 10299, + 10300, + 10301, + 10302, + 10303, + 10304, + 10305, + 10306, + 10307, + 10308, + 10309, + 10310, + 10311, + 10312, + 10313, + 10314, + 10315, + 10316, + 10317, + 10318, + 10319, + 10320, + 10321, + 10322, + 10323, + 10324, + 10325, + 10326, + 10327, + 10328, + 10329, + 10330, + 10331, + 10332, + 10333, + 10334, + 10335, + 10336, + 10337, + 10338, + 10339, + 10340, + 10341, + 10342, + 10343, + 10344, + 10345, + 10346, + 10347, + 10348, + 10349, + 10350, + 10351, + 10352, + 10353, + 10354, + 10355, + 10356, + 10357, + 10358, + 10359, + 10360, + 10361, + 10362, + 10363, + 10364, + 10365, + 10366, + 10367, + 10368, + 10369, + 10370, + 10371, + 10372, + 10373, + 10374, + 10375, + 10376, + 10377, + 10378, + 10379, + 10380, + 10381, + 10382, + 10383, + 10384, + 10385, + 10386, + 10387, + 10388, + 10389, + 10390, + 10391, + 10392, + 10393, + 10394, + 10395, + 10396, + 10397, + 10398, + 10399, + 10400, + 10401, + 10402, + 10403, + 10404, + 10405, + 10406, + 10407, + 10408, + 10409, + 10410, + 10411, + 10412, + 10413, + 10414, + 10415, + 10416, + 10417, + 10418, + 10419, + 10420, + 10421, + 10422, + 10423, + 10424, + 10425, + 10426, + 10427, + 10428, + 10429, + 10430, + 10431, + 10432, + 10433, + 10434, + 10435, + 10436, + 10437, + 10438, + 10439, + 10440, + 10441, + 10442, + 10443, + 10444, + 10445, + 10446, + 10447, + 10448, + 10449, + 10450, + 10451, + 10452, + 10453, + 10454, + 10455, + 10456, + 10457, + 10458, + 10459, + 10460, + 10461, + 10462, + 10463, + 10464, + 10465, + 10466, + 10467, + 10468, + 10469, + 10470, + 10471, + 10472, + 10473, + 10474, + 10475, + 10476, + 10477, + 10478, + 10479, + 10480, + 10481, + 10482, + 10483, + 10484, + 10485, + 10486, + 10487, + 10488, + 10489, + 10490, + 10491, + 10492, + 10493, + 10494, + 10495, + 10496, + 10497, + 10498, + 10499, + 10500, + 10501, + 10502, + 10503, + 10504, + 10505, + 10506, + 10507, + 10508, + 10509, + 10510, + 10511, + 10512, + 10513, + 10514, + 10515, + 10516, + 10517, + 10518, + 10519, + 10520, + 10521, + 10522, + 10523, + 10524, + 10525, + 10526, + 10527, + 10528, + 10529, + 10530, + 10531, + 10532, + 10533, + 10534, + 10535, + 10536, + 10537, + 10538, + 10539, + 10540, + 10541, + 10542, + 10543, + 10544, + 10545, + 10546, + 10547, + 10548, + 10549, + 10550, + 10551, + 10552, + 10553, + 10554, + 10555, + 10556, + 10557, + 10558, + 10559, + 10560, + 10561, + 10562, + 10563, + 10564, + 10565, + 10566, + 10567, + 10568, + 10569, + 10570, + 10571, + 10572, + 10573, + 10574, + 10575, + 10576, + 10577, + 10578, + 10579, + 10580, + 10581, + 10582, + 10583, + 10584, + 10585, + 10586, + 10587, + 10588, + 10589, + 10590, + 10591, + 10592, + 10593, + 10594, + 10595, + 10596, + 10597, + 10598, + 10599, + 10600, + 10601, + 10602, + 10603, + 10604, + 10605, + 10606, + 10607, + 10608, + 10609, + 10610, + 10611, + 10612, + 10613, + 10614, + 10615, + 10616, + 10617, + 10618, + 10619, + 10620, + 10621, + 10622, + 10623, + 10624, + 10625, + 10626, + 10627, + 10628, + 10629, + 10630, + 10631, + 10632, + 10633, + 10634, + 10635, + 10636, + 10637, + 10638, + 10639, + 10640, + 10641, + 10642, + 10643, + 10644, + 10645, + 10646, + 10647, + 10648, + 10649, + 10650, + 10651, + 10652, + 10653, + 10654, + 10655, + 10656, + 10657, + 10658, + 10659, + 10660, + 10661, + 10662, + 10663, + 10664, + 10665, + 10666, + 10667, + 10668, + 10669, + 10670, + 10671, + 10672, + 10673, + 10674, + 10675, + 10676, + 10677, + 10678, + 10679, + 10680, + 10681, + 10682, + 10683, + 10684, + 10685, + 10686, + 10687, + 10688, + 10689, + 10690, + 10691, + 10692, + 10693, + 10694, + 10695, + 10696, + 10697, + 10698, + 10699, + 10700, + 10701, + 10702, + 10703, + 10704, + 10705, + 10706, + 10707, + 10708, + 10709, + 10710, + 10711, + 10712, + 10713, + 10714, + 10715, + 10716, + 10717, + 10718, + 10719, + 10720, + 10721, + 10722, + 10723, + 10724, + 10725, + 10726, + 10727, + 10728, + 10729, + 10730, + 10731, + 10732, + 10733, + 10734, + 10735, + 10736, + 10737, + 10738, + 10739, + 10740, + 10741, + 10742, + 10743, + 10744, + 10745, + 10746, + 10747, + 10748, + 10749, + 10750, + 10751, + 10752, + 10753, + 10754, + 10755, + 10756, + 10757, + 10758, + 10759, + 10760, + 10761, + 10762, + 10763, + 10764, + 10765, + 10766, + 10767, + 10768, + 10769, + 10770, + 10771, + 10772, + 10773, + 10774, + 10775, + 10776, + 10777, + 10778, + 10779, + 10780, + 10781, + 10782, + 10783, + 10784, + 10785, + 10786, + 10787, + 10788, + 10789, + 10790, + 10791, + 10792, + 10793, + 10794, + 10795, + 10796, + 10797, + 10798, + 10799, + 10800, + 10801, + 10802, + 10803, + 10804, + 10805, + 10806, + 10807, + 10808, + 10809, + 10810, + 10811, + 10812, + 10813, + 10814, + 10815, + 10816, + 10817, + 10818, + 10819, + 10820, + 10821, + 10822, + 10823, + 10824, + 10825, + 10826, + 10827, + 10828, + 10829, + 10830, + 10831, + 10832, + 10833, + 10834, + 10835, + 10836, + 10837, + 10838, + 10839, + 10840, + 10841, + 10842, + 10843, + 10844, + 10845, + 10846, + 10847, + 10848, + 10849, + 10850, + 10851, + 10852, + 10853, + 10854, + 10855, + 10856, + 10857, + 10858, + 10859, + 10860, + 10861, + 10862, + 10863, + 10864, + 10865, + 10866, + 10867, + 10868, + 10869, + 10870, + 10871, + 10872, + 10873, + 10874, + 10875, + 10876, + 10877, + 10878, + 10879, + 10880, + 10881, + 10882, + 10883, + 10884, + 10885, + 10886, + 10887, + 10888, + 10889, + 10890, + 10891, + 10892, + 10893, + 10894, + 10895, + 10896, + 10897, + 10898, + 10899, + 10900, + 10901, + 10902, + 10903, + 10904, + 10905, + 10906, + 10907, + 10908, + 10909, + 10910, + 10911, + 10912, + 10913, + 10914, + 10915, + 10916, + 10917, + 10918, + 10919, + 10920, + 10921, + 10922, + 10923, + 10924, + 10925, + 10926, + 10927, + 10928, + 10929, + 10930, + 10931, + 10932, + 10933, + 10934, + 10935, + 10936, + 10937, + 10938, + 10939, + 10940, + 10941, + 10942, + 10943, + 10944, + 10945, + 10946, + 10947, + 10948, + 10949, + 10950, + 10951, + 10952, + 10953, + 10954, + 10955, + 10956, + 10957, + 10958, + 10959, + 10960, + 10961, + 10962, + 10963, + 10964, + 10965, + 10966, + 10967, + 10968, + 10969, + 10970, + 10971, + 10972, + 10973, + 10974, + 10975, + 10976, + 10977, + 10978, + 10979, + 10980, + 10981, + 10982, + 10983, + 10984, + 10985, + 10986, + 10987, + 10988, + 10989, + 10990, + 10991, + 10992, + 10993, + 10994, + 10995, + 10996, + 10997, + 10998, + 10999, + 11000, + 11001, + 11002, + 11003, + 11004, + 11005, + 11006, + 11007, + 11008, + 11009, + 11010, + 11011, + 11012, + 11013, + 11014, + 11015, + 11016, + 11017, + 11018, + 11019, + 11020, + 11021, + 11022, + 11023, + 11024, + 11025, + 11026, + 11027, + 11028, + 11029, + 11030, + 11031, + 11032, + 11033, + 11034, + 11035, + 11036, + 11037, + 11038, + 11039, + 11040, + 11041, + 11042, + 11043, + 11044, + 11045, + 11046, + 11047, + 11048, + 11049, + 11050, + 11051, + 11052, + 11053, + 11054, + 11055, + 11056, + 11057, + 11058, + 11059, + 11060, + 11061, + 11062, + 11063, + 11064, + 11065, + 11066, + 11067, + 11068, + 11069, + 11070, + 11071, + 11072, + 11073, + 11074, + 11075, + 11076, + 11077, + 11078, + 11079, + 11080, + 11081, + 11082, + 11083, + 11084, + 11085, + 11086, + 11087, + 11088, + 11089, + 11090, + 11091, + 11092, + 11093, + 11094, + 11095, + 11096, + 11097, + 11098, + 11099, + 11100, + 11101, + 11102, + 11103, + 11104, + 11105, + 11106, + 11107, + 11108, + 11109, + 11110, + 11111, + 11112, + 11113, + 11114, + 11115, + 11116, + 11117, + 11118, + 11119, + 11120, + 11121, + 11122, + 11123, + 11124, + 11125, + 11126, + 11127, + 11128, + 11129, + 11130, + 11131, + 11132, + 11133, + 11134, + 11135, + 11136, + 11137, + 11138, + 11139, + 11140, + 11141, + 11142, + 11143, + 11144, + 11145, + 11146, + 11147, + 11148, + 11149, + 11150, + 11151, + 11152, + 11153, + 11154, + 11155, + 11156, + 11157, + 11158, + 11159, + 11160, + 11161, + 11162, + 11163, + 11164, + 11165, + 11166, + 11167, + 11168, + 11169, + 11170, + 11171, + 11172, + 11173, + 11174, + 11175, + 11176, + 11177, + 11178, + 11179, + 11180, + 11181, + 11182, + 11183, + 11184, + 11185, + 11186, + 11187, + 11188, + 11189, + 11190, + 11191, + 11192, + 11193, + 11194, + 11195, + 11196, + 11197, + 11198, + 11199, + 11200, + 11201, + 11202, + 11203, + 11204, + 11205, + 11206, + 11207, + 11208, + 11209, + 11210, + 11211, + 11212, + 11213, + 11214, + 11215, + 11216, + 11217, + 11218, + 11219, + 11220, + 11221, + 11222, + 11223, + 11224, + 11225, + 11226, + 11227, + 11228, + 11229, + 11230, + 11231, + 11232, + 11233, + 11234, + 11235, + 11236, + 11237, + 11238, + 11239, + 11240, + 11241, + 11242, + 11243, + 11244, + 11245, + 11246, + 11247, + 11248, + 11249, + 11250, + 11251, + 11252, + 11253, + 11254, + 11255, + 11256, + 11257, + 11258, + 11259, + 11260, + 11261, + 11262, + 11263, + 11264, + 11265, + 11266, + 11267, + 11268, + 11269, + 11270, + 11271, + 11272, + 11273, + 11274, + 11275, + 11276, + 11277, + 11278, + 11279, + 11280, + 11281, + 11282, + 11283, + 11284, + 11285, + 11286, + 11287, + 11288, + 11289, + 11290, + 11291, + 11292, + 11293, + 11294, + 11295, + 11296, + 11297, + 11298, + 11299, + 11300, + 11301, + 11302, + 11303, + 11304, + 11305, + 11306, + 11307, + 11308, + 11309, + 11310, + 11311, + 11312, + 11313, + 11314, + 11315, + 11316, + 11317, + 11318, + 11319, + 11320, + 11321, + 11322, + 11323, + 11324, + 11325, + 11326, + 11327, + 11328, + 11329, + 11330, + 11331, + 11332, + 11333, + 11334, + 11335, + 11336, + 11337, + 11338, + 11339, + 11340, + 11341, + 11342, + 11343, + 11344, + 11345, + 11346, + 11347, + 11348, + 11349, + 11350, + 11351, + 11352, + 11353, + 11354, + 11355, + 11356, + 11357, + 11358, + 11359, + 11360, + 11361, + 11362, + 11363, + 11364, + 11365, + 11366, + 11367, + 11368, + 11369, + 11370, + 11371, + 11372, + 11373, + 11374, + 11375, + 11376, + 11377, + 11378, + 11379, + 11380, + 11381, + 11382, + 11383, + 11384, + 11385, + 11386, + 11387, + 11388, + 11389, + 11390, + 11391, + 11392, + 11393, + 11394, + 11395, + 11396, + 11397, + 11398, + 11399, + 11400, + 11401, + 11402, + 11403, + 11404, + 11405, + 11406, + 11407, + 11408, + 11409, + 11410, + 11411, + 11412, + 11413, + 11414, + 11415, + 11416, + 11417, + 11418, + 11419, + 11420, + 11421, + 11422, + 11423, + 11424, + 11425, + 11426, + 11427, + 11428, + 11429, + 11430, + 11431, + 11432, + 11433, + 11434, + 11435, + 11436, + 11437, + 11438, + 11439, + 11440, + 11441, + 11442, + 11443, + 11444, + 11445, + 11446, + 11447, + 11448, + 11449, + 11450, + 11451, + 11452, + 11453, + 11454, + 11455, + 11456, + 11457, + 11458, + 11459, + 11460, + 11461, + 11462, + 11463, + 11464, + 11465, + 11466, + 11467, + 11468, + 11469, + 11470, + 11471, + 11472, + 11473, + 11474, + 11475, + 11476, + 11477, + 11478, + 11479, + 11480, + 11481, + 11482, + 11483, + 11484, + 11485, + 11486, + 11487, + 11488, + 11489, + 11490, + 11491, + 11492, + 11493, + 11494, + 11495, + 11496, + 11497, + 11498, + 11499, + 11500, + 11501, + 11502, + 11503, + 11504, + 11505, + 11506, + 11507, + 11508, + 11509, + 11510, + 11511, + 11512, + 11513, + 11514, + 11515, + 11516, + 11517, + 11518, + 11519, + 11520, + 11521, + 11522, + 11523, + 11524, + 11525, + 11526, + 11527, + 11528, + 11529, + 11530, + 11531, + 11532, + 11533, + 11534, + 11535, + 11536, + 11537, + 11538, + 11539, + 11540, + 11541, + 11542, + 11543, + 11544, + 11545, + 11546, + 11547, + 11548, + 11549, + 11550, + 11551, + 11552, + 11553, + 11554, + 11555, + 11556, + 11557, + 11558, + 11559, + 11560, + 11561, + 11562, + 11563, + 11564, + 11565, + 11566, + 11567, + 11568, + 11569, + 11570, + 11571, + 11572, + 11573, + 11574, + 11575, + 11576, + 11577, + 11578, + 11579, + 11580, + 11581, + 11582, + 11583, + 11584, + 11585, + 11586, + 11587, + 11588, + 11589, + 11590, + 11591, + 11592, + 11593, + 11594, + 11595, + 11596, + 11597, + 11598, + 11599, + 11600, + 11601, + 11602, + 11603, + 11604, + 11605, + 11606, + 11607, + 11608, + 11609, + 11610, + 11611, + 11612, + 11613, + 11614, + 11615, + 11616, + 11617, + 11618, + 11619, + 11620, + 11621, + 11622, + 11623, + 11624, + 11625, + 11626, + 11627, + 11628, + 11629, + 11630, + 11631, + 11632, + 11633, + 11634, + 11635, + 11636, + 11637, + 11638, + 11639, + 11640, + 11641, + 11642, + 11643, + 11644, + 11645, + 11646, + 11647, + 11648, + 11649, + 11650, + 11651, + 11652, + 11653, + 11654, + 11655, + 11656, + 11657, + 11658, + 11659, + 11660, + 11661, + 11662, + 11663, + 11664, + 11665, + 11666, + 11667, + 11668, + 11669, + 11670, + 11671, + 11672, + 11673, + 11674, + 11675, + 11676, + 11677, + 11678, + 11679, + 11680, + 11681, + 11682, + 11683, + 11684, + 11685, + 11686, + 11687, + 11688, + 11689, + 11690, + 11691, + 11692, + 11693, + 11694, + 11695, + 11696, + 11697, + 11698, + 11699, + 11700, + 11701, + 11702, + 11703, + 11704, + 11705, + 11706, + 11707, + 11708, + 11709, + 11710, + 11711, + 11712, + 11713, + 11714, + 11715, + 11716, + 11717, + 11718, + 11719, + 11720, + 11721, + 11722, + 11723, + 11724, + 11725, + 11726, + 11727, + 11728, + 11729, + 11730, + 11731, + 11732, + 11733, + 11734, + 11735, + 11736, + 11737, + 11738, + 11739, + 11740, + 11741, + 11742, + 11743, + 11744, + 11745, + 11746, + 11747, + 11748, + 11749, + 11750, + 11751, + 11752, + 11753, + 11754, + 11755, + 11756, + 11757, + 11758, + 11759, + 11760, + 11761, + 11762, + 11763, + 11764, + 11765, + 11766, + 11767, + 11768, + 11769, + 11770, + 11771, + 11772, + 11773, + 11774, + 11775, + 11776, + 11777, + 11778, + 11779, + 11780, + 11781, + 11782, + 11783, + 11784, + 11785, + 11786, + 11787, + 11788, + 11789, + 11790, + 11791, + 11792, + 11793, + 11794, + 11795, + 11796, + 11797, + 11798, + 11799, + 11800, + 11801, + 11802, + 11803, + 11804, + 11805, + 11806, + 11807, + 11808, + 11809, + 11810, + 11811, + 11812, + 11813, + 11814, + 11815, + 11816, + 11817, + 11818, + 11819, + 11820, + 11821, + 11822, + 11823, + 11824, + 11825, + 11826, + 11827, + 11828, + 11829, + 11830, + 11831, + 11832, + 11833, + 11834, + 11835, + 11836, + 11837, + 11838, + 11839, + 11840, + 11841, + 11842, + 11843, + 11844, + 11845, + 11846, + 11847, + 11848, + 11849, + 11850, + 11851, + 11852, + 11853, + 11854, + 11855, + 11856, + 11857, + 11858, + 11859, + 11860, + 11861, + 11862, + 11863, + 11864, + 11865, + 11866, + 11867, + 11868, + 11869, + 11870, + 11871, + 11872, + 11873, + 11874, + 11875, + 11876, + 11877, + 11878, + 11879, + 11880, + 11881, + 11882, + 11883, + 11884, + 11885, + 11886, + 11887, + 11888, + 11889, + 11890, + 11891, + 11892, + 11893, + 11894, + 11895, + 11896, + 11897, + 11898, + 11899, + 11900, + 11901, + 11902, + 11903, + 11904, + 11905, + 11906, + 11907, + 11908, + 11909, + 11910, + 11911, + 11912, + 11913, + 11914, + 11915, + 11916, + 11917, + 11918, + 11919, + 11920, + 11921, + 11922, + 11923, + 11924, + 11925, + 11926, + 11927, + 11928, + 11929, + 11930, + 11931, + 11932, + 11933, + 11934, + 11935, + 11936, + 11937, + 11938, + 11939, + 11940, + 11941, + 11942, + 11943, + 11944, + 11945, + 11946, + 11947, + 11948, + 11949, + 11950, + 11951, + 11952, + 11953, + 11954, + 11955, + 11956, + 11957, + 11958, + 11959, + 11960, + 11961, + 11962, + 11963, + 11964, + 11965, + 11966, + 11967, + 11968, + 11969, + 11970, + 11971, + 11972, + 11973, + 11974, + 11975, + 11976, + 11977, + 11978, + 11979, + 11980, + 11981, + 11982, + 11983, + 11984, + 11985, + 11986, + 11987, + 11988, + 11989, + 11990, + 11991, + 11992, + 11993, + 11994, + 11995, + 11996, + 11997, + 11998, + 11999, + 12000, + 12001, + 12002, + 12003, + 12004, + 12005, + 12006, + 12007, + 12008, + 12009, + 12010, + 12011, + 12012, + 12013, + 12014, + 12015, + 12016, + 12017, + 12018, + 12019, + 12020, + 12021, + 12022, + 12023, + 12024, + 12025, + 12026, + 12027, + 12028, + 12029, + 12030, + 12031, + 12032, + 12033, + 12034, + 12035, + 12036, + 12037, + 12038, + 12039, + 12040, + 12041, + 12042, + 12043, + 12044, + 12045, + 12046, + 12047, + 12048, + 12049, + 12050, + 12051, + 12052, + 12053, + 12054, + 12055, + 12056, + 12057, + 12058, + 12059, + 12060, + 12061, + 12062, + 12063, + 12064, + 12065, + 12066, + 12067, + 12068, + 12069, + 12070, + 12071, + 12072, + 12073, + 12074, + 12075, + 12076, + 12077, + 12078, + 12079, + 12080, + 12081, + 12082, + 12083, + 12084, + 12085, + 12086, + 12087, + 12088, + 12089, + 12090, + 12091, + 12092, + 12093, + 12094, + 12095, + 12096, + 12097, + 12098, + 12099, + 12100, + 12101, + 12102, + 12103, + 12104, + 12105, + 12106, + 12107, + 12108, + 12109, + 12110, + 12111, + 12112, + 12113, + 12114, + 12115, + 12116, + 12117, + 12118, + 12119, + 12120, + 12121, + 12122, + 12123, + 12124, + 12125, + 12126, + 12127, + 12128, + 12129, + 12130, + 12131, + 12132, + 12133, + 12134, + 12135, + 12136, + 12137, + 12138, + 12139, + 12140, + 12141, + 12142, + 12143, + 12144, + 12145, + 12146, + 12147, + 12148, + 12149, + 12150, + 12151, + 12152, + 12153, + 12154, + 12155, + 12156, + 12157, + 12158, + 12159, + 12160, + 12161, + 12162, + 12163, + 12164, + 12165, + 12166, + 12167, + 12168, + 12169, + 12170, + 12171, + 12172, + 12173, + 12174, + 12175, + 12176, + 12177, + 12178, + 12179, + 12180, + 12181, + 12182, + 12183, + 12184, + 12185, + 12186, + 12187, + 12188, + 12189, + 12190, + 12191, + 12192, + 12193, + 12194, + 12195, + 12196, + 12197, + 12198, + 12199, + 12200, + 12201, + 12202, + 12203, + 12204, + 12205, + 12206, + 12207, + 12208, + 12209, + 12210, + 12211, + 12212, + 12213, + 12214, + 12215, + 12216, + 12217, + 12218, + 12219, + 12220, + 12221, + 12222, + 12223, + 12224, + 12225, + 12226, + 12227, + 12228, + 12229, + 12230, + 12231, + 12232, + 12233, + 12234, + 12235, + 12236, + 12237, + 12238, + 12239, + 12240, + 12241, + 12242, + 12243, + 12244, + 12245, + 12246, + 12247, + 12248, + 12249, + 12250, + 12251, + 12252, + 12253, + 12254, + 12255, + 12256, + 12257, + 12258, + 12259, + 12260, + 12261, + 12262, + 12263, + 12264, + 12265, + 12266, + 12267, + 12268, + 12269, + 12270, + 12271, + 12272, + 12273, + 12274, + 12275, + 12276, + 12277, + 12278, + 12279, + 12280, + 12281, + 12282, + 12283, + 12284, + 12285, + 12286, + 12287, + 12288, + 12289, + 12290, + 12291, + 12292, + 12293, + 12294, + 12295, + 12296, + 12297, + 12298, + 12299, + 12300, + 12301, + 12302, + 12303, + 12304, + 12305, + 12306, + 12307, + 12308, + 12309, + 12310, + 12311, + 12312, + 12313, + 12314, + 12315, + 12316, + 12317, + 12318, + 12319, + 12320, + 12321, + 12322, + 12323, + 12324, + 12325, + 12326, + 12327, + 12328, + 12329, + 12330, + 12331, + 12332, + 12333, + 12334, + 12335, + 12336, + 12337, + 12338, + 12339, + 12340, + 12341, + 12342, + 12343, + 12344, + 12345, + 12346, + 12347, + 12348, + 12349, + 12350, + 12351, + 12352, + 12353, + 12354, + 12355, + 12356, + 12357, + 12358, + 12359, + 12360, + 12361, + 12362, + 12363, + 12364, + 12365, + 12366, + 12367, + 12368, + 12369, + 12370, + 12371, + 12372, + 12373, + 12374, + 12375, + 12376, + 12377, + 12378, + 12379, + 12380, + 12381, + 12382, + 12383, + 12384, + 12385, + 12386, + 12387, + 12388, + 12389, + 12390, + 12391, + 12392, + 12393, + 12394, + 12395, + 12396, + 12397, + 12398, + 12399, + 12400, + 12401, + 12402, + 12403, + 12404, + 12405, + 12406, + 12407, + 12408, + 12409, + 12410, + 12411, + 12412, + 12413, + 12414, + 12415, + 12416, + 12417, + 12418, + 12419, + 12420, + 12421, + 12422, + 12423, + 12424, + 12425, + 12426, + 12427, + 12428, + 12429, + 12430, + 12431, + 12432, + 12433, + 12434, + 12435, + 12436, + 12437, + 12438, + 12439, + 12440, + 12441, + 12442, + 12443, + 12444, + 12445, + 12446, + 12447, + 12448, + 12449, + 12450, + 12451, + 12452, + 12453, + 12454, + 12455, + 12456, + 12457, + 12458, + 12459, + 12460, + 12461, + 12462, + 12463, + 12464, + 12465, + 12466, + 12467, + 12468, + 12469, + 12470, + 12471, + 12472, + 12473, + 12474, + 12475, + 12476, + 12477, + 12478, + 12479, + 12480, + 12481, + 12482, + 12483, + 12484, + 12485, + 12486, + 12487, + 12488, + 12489, + 12490, + 12491, + 12492, + 12493, + 12494, + 12495, + 12496, + 12497, + 12498, + 12499, + 12500, + 12501, + 12502, + 12503, + 12504, + 12505, + 12506, + 12507, + 12508, + 12509, + 12510, + 12511, + 12512, + 12513, + 12514, + 12515, + 12516, + 12517, + 12518, + 12519, + 12520, + 12521, + 12522, + 12523, + 12524, + 12525, + 12526, + 12527, + 12528, + 12529, + 12530, + 12531, + 12532, + 12533, + 12534, + 12535, + 12536, + 12537, + 12538, + 12539, + 12540, + 12541, + 12542, + 12543, + 12544, + 12545, + 12546, + 12547, + 12548, + 12549, + 12550, + 12551, + 12552, + 12553, + 12554, + 12555, + 12556, + 12557, + 12558, + 12559, + 12560, + 12561, + 12562, + 12563, + 12564, + 12565, + 12566, + 12567, + 12568, + 12569, + 12570, + 12571, + 12572, + 12573, + 12574, + 12575, + 12576, + 12577, + 12578, + 12579, + 12580, + 12581, + 12582, + 12583, + 12584, + 12585, + 12586, + 12587, + 12588, + 12589, + 12590, + 12591, + 12592, + 12593, + 12594, + 12595, + 12596, + 12597, + 12598, + 12599, + 12600, + 12601, + 12602, + 12603, + 12604, + 12605, + 12606, + 12607, + 12608, + 12609, + 12610, + 12611, + 12612, + 12613, + 12614, + 12615, + 12616, + 12617, + 12618, + 12619, + 12620, + 12621, + 12622, + 12623, + 12624, + 12625, + 12626, + 12627, + 12628, + 12629, + 12630, + 12631, + 12632, + 12633, + 12634, + 12635, + 12636, + 12637, + 12638, + 12639, + 12640, + 12641, + 12642, + 12643, + 12644, + 12645, + 12646, + 12647, + 12648, + 12649, + 12650, + 12651, + 12652, + 12653, + 12654, + 12655, + 12656, + 12657, + 12658, + 12659, + 12660, + 12661, + 12662, + 12663, + 12664, + 12665, + 12666, + 12667, + 12668, + 12669, + 12670, + 12671, + 12672, + 12673, + 12674, + 12675, + 12676, + 12677, + 12678, + 12679, + 12680, + 12681, + 12682, + 12683, + 12684, + 12685, + 12686, + 12687, + 12688, + 12689, + 12690, + 12691, + 12692, + 12693, + 12694, + 12695, + 12696, + 12697, + 12698, + 12699, + 12700, + 12701, + 12702, + 12703, + 12704, + 12705, + 12706, + 12707, + 12708, + 12709, + 12710, + 12711, + 12712, + 12713, + 12714, + 12715, + 12716, + 12717, + 12718, + 12719, + 12720, + 12721, + 12722, + 12723, + 12724, + 12725, + 12726, + 12727, + 12728, + 12729, + 12730, + 12731, + 12732, + 12733, + 12734, + 12735, + 12736, + 12737, + 12738, + 12739, + 12740, + 12741, + 12742, + 12743, + 12744, + 12745, + 12746, + 12747, + 12748, + 12749, + 12750, + 12751, + 12752, + 12753, + 12754, + 12755, + 12756, + 12757, + 12758, + 12759, + 12760, + 12761, + 12762, + 12763, + 12764, + 12765, + 12766, + 12767, + 12768, + 12769, + 12770, + 12771, + 12772, + 12773, + 12774, + 12775, + 12776, + 12777, + 12778, + 12779, + 12780, + 12781, + 12782, + 12783, + 12784, + 12785, + 12786, + 12787, + 12788, + 12789, + 12790, + 12791, + 12792, + 12793, + 12794, + 12795, + 12796, + 12797, + 12798, + 12799, + 12800, + 12801, + 12802, + 12803, + 12804, + 12805, + 12806, + 12807, + 12808, + 12809, + 12810, + 12811, + 12812, + 12813, + 12814, + 12815, + 12816, + 12817, + 12818, + 12819, + 12820, + 12821, + 12822, + 12823, + 12824, + 12825, + 12826, + 12827, + 12828, + 12829, + 12830, + 12831, + 12832, + 12833, + 12834, + 12835, + 12836, + 12837, + 12838, + 12839, + 12840, + 12841, + 12842, + 12843, + 12844, + 12845, + 12846, + 12847, + 12848, + 12849, + 12850, + 12851, + 12852, + 12853, + 12854, + 12855, + 12856, + 12857, + 12858, + 12859, + 12860, + 12861, + 12862, + 12863, + 12864, + 12865, + 12866, + 12867, + 12868, + 12869, + 12870, + 12871, + 12872, + 12873, + 12874, + 12875, + 12876, + 12877, + 12878, + 12879, + 12880, + 12881, + 12882, + 12883, + 12884, + 12885, + 12886, + 12887, + 12888, + 12889, + 12890, + 12891, + 12892, + 12893, + 12894, + 12895, + 12896, + 12897, + 12898, + 12899, + 12900, + 12901, + 12902, + 12903, + 12904, + 12905, + 12906, + 12907, + 12908, + 12909, + 12910, + 12911, + 12912, + 12913, + 12914, + 12915, + 12916, + 12917, + 12918, + 12919, + 12920, + 12921, + 12922, + 12923, + 12924, + 12925, + 12926, + 12927, + 12928, + 12929, + 12930, + 12931, + 12932, + 12933, + 12934, + 12935, + 12936, + 12937, + 12938, + 12939, + 12940, + 12941, + 12942, + 12943, + 12944, + 12945, + 12946, + 12947, + 12948, + 12949, + 12950, + 12951, + 12952, + 12953, + 12954, + 12955, + 12956, + 12957, + 12958, + 12959, + 12960, + 12961, + 12962, + 12963, + 12964, + 12965, + 12966, + 12967, + 12968, + 12969, + 12970, + 12971, + 12972, + 12973, + 12974, + 12975, + 12976, + 12977, + 12978, + 12979, + 12980, + 12981, + 12982, + 12983, + 12984, + 12985, + 12986, + 12987, + 12988, + 12989, + 12990, + 12991, + 12992, + 12993, + 12994, + 12995, + 12996, + 12997, + 12998, + 12999, + 13000, + 13001, + 13002, + 13003, + 13004, + 13005, + 13006, + 13007, + 13008, + 13009, + 13010, + 13011, + 13012, + 13013, + 13014, + 13015, + 13016, + 13017, + 13018, + 13019, + 13020, + 13021, + 13022, + 13023, + 13024, + 13025, + 13026, + 13027, + 13028, + 13029, + 13030, + 13031, + 13032, + 13033, + 13034, + 13035, + 13036, + 13037, + 13038, + 13039, + 13040, + 13041, + 13042, + 13043, + 13044, + 13045, + 13046, + 13047, + 13048, + 13049, + 13050, + 13051, + 13052, + 13053, + 13054, + 13055, + 13056, + 13057, + 13058, + 13059, + 13060, + 13061, + 13062, + 13063, + 13064, + 13065, + 13066, + 13067, + 13068, + 13069, + 13070, + 13071, + 13072, + 13073, + 13074, + 13075, + 13076, + 13077, + 13078, + 13079, + 13080, + 13081, + 13082, + 13083, + 13084, + 13085, + 13086, + 13087, + 13088, + 13089, + 13090, + 13091, + 13092, + 13093, + 13094, + 13095, + 13096, + 13097, + 13098, + 13099, + 13100, + 13101, + 13102, + 13103, + 13104, + 13105, + 13106, + 13107, + 13108, + 13109, + 13110, + 13111, + 13112, + 13113, + 13114, + 13115, + 13116, + 13117, + 13118, + 13119, + 13120, + 13121, + 13122, + 13123, + 13124, + 13125, + 13126, + 13127, + 13128, + 13129, + 13130, + 13131, + 13132, + 13133, + 13134, + 13135, + 13136, + 13137, + 13138, + 13139, + 13140, + 13141, + 13142, + 13143, + 13144, + 13145, + 13146, + 13147, + 13148, + 13149, + 13150, + 13151, + 13152, + 13153, + 13154, + 13155, + 13156, + 13157, + 13158, + 13159, + 13160, + 13161, + 13162, + 13163, + 13164, + 13165, + 13166, + 13167, + 13168, + 13169, + 13170, + 13171, + 13172, + 13173, + 13174, + 13175, + 13176, + 13177, + 13178, + 13179, + 13180, + 13181, + 13182, + 13183, + 13184, + 13185, + 13186, + 13187, + 13188, + 13189, + 13190, + 13191, + 13192, + 13193, + 13194, + 13195, + 13196, + 13197, + 13198, + 13199, + 13200, + 13201, + 13202, + 13203, + 13204, + 13205, + 13206, + 13207, + 13208, + 13209, + 13210, + 13211, + 13212, + 13213, + 13214, + 13215, + 13216, + 13217, + 13218, + 13219, + 13220, + 13221, + 13222, + 13223, + 13224, + 13225, + 13226, + 13227, + 13228, + 13229, + 13230, + 13231, + 13232, + 13233, + 13234, + 13235, + 13236, + 13237, + 13238, + 13239, + 13240, + 13241, + 13242, + 13243, + 13244, + 13245, + 13246, + 13247, + 13248, + 13249, + 13250, + 13251, + 13252, + 13253, + 13254, + 13255, + 13256, + 13257, + 13258, + 13259, + 13260, + 13261, + 13262, + 13263, + 13264, + 13265, + 13266, + 13267, + 13268, + 13269, + 13270, + 13271, + 13272, + 13273, + 13274, + 13275, + 13276, + 13277, + 13278, + 13279, + 13280, + 13281, + 13282, + 13283, + 13284, + 13285, + 13286, + 13287, + 13288, + 13289, + 13290, + 13291, + 13292, + 13293, + 13294, + 13295, + 13296, + 13297, + 13298, + 13299, + 13300, + 13301, + 13302, + 13303, + 13304, + 13305, + 13306, + 13307, + 13308, + 13309, + 13310, + 13311, + 13312, + 13313, + 13314, + 13315, + 13316, + 13317, + 13318, + 13319, + 13320, + 13321, + 13322, + 13323, + 13324, + 13325, + 13326, + 13327, + 13328, + 13329, + 13330, + 13331, + 13332, + 13333, + 13334, + 13335, + 13336, + 13337, + 13338, + 13339, + 13340, + 13341, + 13342, + 13343, + 13344, + 13345, + 13346, + 13347, + 13348, + 13349, + 13350, + 13351, + 13352, + 13353, + 13354, + 13355, + 13356, + 13357, + 13358, + 13359, + 13360, + 13361, + 13362, + 13363, + 13364, + 13365, + 13366, + 13367, + 13368, + 13369, + 13370, + 13371, + 13372, + 13373, + 13374, + 13375, + 13376, + 13377, + 13378, + 13379, + 13380, + 13381, + 13382, + 13383, + 13384, + 13385, + 13386, + 13387, + 13388, + 13389, + 13390, + 13391, + 13392, + 13393, + 13394, + 13395, + 13396, + 13397, + 13398, + 13399, + 13400, + 13401, + 13402, + 13403, + 13404, + 13405, + 13406, + 13407, + 13408, + 13409, + 13410, + 13411, + 13412, + 13413, + 13414, + 13415, + 13416, + 13417, + 13418, + 13419, + 13420, + 13421, + 13422, + 13423, + 13424, + 13425, + 13426, + 13427, + 13428, + 13429, + 13430, + 13431, + 13432, + 13433, + 13434, + 13435, + 13436, + 13437, + 13438, + 13439, + 13440, + 13441, + 13442, + 13443, + 13444, + 13445, + 13446, + 13447, + 13448, + 13449, + 13450, + 13451, + 13452, + 13453, + 13454, + 13455, + 13456, + 13457, + 13458, + 13459, + 13460, + 13461, + 13462, + 13463, + 13464, + 13465, + 13466, + 13467, + 13468, + 13469, + 13470, + 13471, + 13472, + 13473, + 13474, + 13475, + 13476, + 13477, + 13478, + 13479, + 13480, + 13481, + 13482, + 13483, + 13484, + 13485, + 13486, + 13487, + 13488, + 13489, + 13490, + 13491, + 13492, + 13493, + 13494, + 13495, + 13496, + 13497, + 13498, + 13499, + 13500, + 13501, + 13502, + 13503, + 13504, + 13505, + 13506, + 13507, + 13508, + 13509, + 13510, + 13511, + 13512, + 13513, + 13514, + 13515, + 13516, + 13517, + 13518, + 13519, + 13520, + 13521, + 13522, + 13523, + 13524, + 13525, + 13526, + 13527, + 13528, + 13529, + 13530, + 13531, + 13532, + 13533, + 13534, + 13535, + 13536, + 13537, + 13538, + 13539, + 13540, + 13541, + 13542, + 13543, + 13544, + 13545, + 13546, + 13547, + 13548, + 13549, + 13550, + 13551, + 13552, + 13553, + 13554, + 13555, + 13556, + 13557, + 13558, + 13559, + 13560, + 13561, + 13562, + 13563, + 13564, + 13565, + 13566, + 13567, + 13568, + 13569, + 13570, + 13571, + 13572, + 13573, + 13574, + 13575, + 13576, + 13577, + 13578, + 13579, + 13580, + 13581, + 13582, + 13583, + 13584, + 13585, + 13586, + 13587, + 13588, + 13589, + 13590, + 13591, + 13592, + 13593, + 13594, + 13595, + 13596, + 13597, + 13598, + 13599, + 13600, + 13601, + 13602, + 13603, + 13604, + 13605, + 13606, + 13607, + 13608, + 13609, + 13610, + 13611, + 13612, + 13613, + 13614, + 13615, + 13616, + 13617, + 13618, + 13619, + 13620, + 13621, + 13622, + 13623, + 13624, + 13625, + 13626, + 13627, + 13628, + 13629, + 13630, + 13631, + 13632, + 13633, + 13634, + 13635, + 13636, + 13637, + 13638, + 13639, + 13640, + 13641, + 13642, + 13643, + 13644, + 13645, + 13646, + 13647, + 13648, + 13649, + 13650, + 13651, + 13652, + 13653, + 13654, + 13655, + 13656, + 13657, + 13658, + 13659, + 13660, + 13661, + 13662, + 13663, + 13664, + 13665, + 13666, + 13667, + 13668, + 13669, + 13670, + 13671, + 13672, + 13673, + 13674, + 13675, + 13676, + 13677, + 13678, + 13679, + 13680, + 13681, + 13682, + 13683, + 13684, + 13685, + 13686, + 13687, + 13688, + 13689, + 13690, + 13691, + 13692, + 13693, + 13694, + 13695, + 13696, + 13697, + 13698, + 13699, + 13700, + 13701, + 13702, + 13703, + 13704, + 13705, + 13706, + 13707, + 13708, + 13709, + 13710, + 13711, + 13712, + 13713, + 13714, + 13715, + 13716, + 13717, + 13718, + 13719, + 13720, + 13721, + 13722, + 13723, + 13724, + 13725, + 13726, + 13727, + 13728, + 13729, + 13730, + 13731, + 13732, + 13733, + 13734, + 13735, + 13736, + 13737, + 13738, + 13739, + 13740, + 13741, + 13742, + 13743, + 13744, + 13745, + 13746, + 13747, + 13748, + 13749, + 13750, + 13751, + 13752, + 13753, + 13754, + 13755, + 13756, + 13757, + 13758, + 13759, + 13760, + 13761, + 13762, + 13763, + 13764, + 13765, + 13766, + 13767, + 13768, + 13769, + 13770, + 13771, + 13772, + 13773, + 13774, + 13775, + 13776, + 13777, + 13778, + 13779, + 13780, + 13781, + 13782, + 13783, + 13784, + 13785, + 13786, + 13787, + 13788, + 13789, + 13790, + 13791, + 13792, + 13793, + 13794, + 13795, + 13796, + 13797, + 13798, + 13799, + 13800, + 13801, + 13802, + 13803, + 13804, + 13805, + 13806, + 13807, + 13808, + 13809, + 13810, + 13811, + 13812, + 13813, + 13814, + 13815, + 13816, + 13817, + 13818, + 13819, + 13820, + 13821, + 13822, + 13823, + 13824, + 13825, + 13826, + 13827, + 13828, + 13829, + 13830, + 13831, + 13832, + 13833, + 13834, + 13835, + 13836, + 13837, + 13838, + 13839, + 13840, + 13841, + 13842, + 13843, + 13844, + 13845, + 13846, + 13847, + 13848, + 13849, + 13850, + 13851, + 13852, + 13853, + 13854, + 13855, + 13856, + 13857, + 13858, + 13859, + 13860, + 13861, + 13862, + 13863, + 13864, + 13865, + 13866, + 13867, + 13868, + 13869, + 13870, + 13871, + 13872, + 13873, + 13874, + 13875, + 13876, + 13877, + 13878, + 13879, + 13880, + 13881, + 13882, + 13883, + 13884, + 13885, + 13886, + 13887, + 13888, + 13889, + 13890, + 13891, + 13892, + 13893, + 13894, + 13895, + 13896, + 13897, + 13898, + 13899, + 13900, + 13901, + 13902, + 13903, + 13904, + 13905, + 13906, + 13907, + 13908, + 13909, + 13910, + 13911, + 13912, + 13913, + 13914, + 13915, + 13916, + 13917, + 13918, + 13919, + 13920, + 13921, + 13922, + 13923, + 13924, + 13925, + 13926, + 13927, + 13928, + 13929, + 13930, + 13931, + 13932, + 13933, + 13934, + 13935, + 13936, + 13937, + 13938, + 13939, + 13940, + 13941, + 13942, + 13943, + 13944, + 13945, + 13946, + 13947, + 13948, + 13949, + 13950, + 13951, + 13952, + 13953, + 13954, + 13955, + 13956, + 13957, + 13958, + 13959, + 13960, + 13961, + 13962, + 13963, + 13964, + 13965, + 13966, + 13967, + 13968, + 13969, + 13970, + 13971, + 13972, + 13973, + 13974, + 13975, + 13976, + 13977, + 13978, + 13979, + 13980, + 13981, + 13982, + 13983, + 13984, + 13985, + 13986, + 13987, + 13988, + 13989, + 13990, + 13991, + 13992, + 13993, + 13994, + 13995, + 13996, + 13997, + 13998, + 13999, + 14000, + 14001, + 14002, + 14003, + 14004, + 14005, + 14006, + 14007, + 14008, + 14009, + 14010, + 14011, + 14012, + 14013, + 14014, + 14015, + 14016, + 14017, + 14018, + 14019, + 14020, + 14021, + 14022, + 14023, + 14024, + 14025, + 14026, + 14027, + 14028, + 14029, + 14030, + 14031, + 14032, + 14033, + 14034, + 14035, + 14036, + 14037, + 14038, + 14039, + 14040, + 14041, + 14042, + 14043, + 14044, + 14045, + 14046, + 14047, + 14048, + 14049, + 14050, + 14051, + 14052, + 14053, + 14054, + 14055, + 14056, + 14057, + 14058, + 14059, + 14060, + 14061, + 14062, + 14063, + 14064, + 14065, + 14066, + 14067, + 14068, + 14069, + 14070, + 14071, + 14072, + 14073, + 14074, + 14075, + 14076, + 14077, + 14078, + 14079, + 14080, + 14081, + 14082, + 14083, + 14084, + 14085, + 14086, + 14087, + 14088, + 14089, + 14090, + 14091, + 14092, + 14093, + 14094, + 14095, + 14096, + 14097, + 14098, + 14099, + 14100, + 14101, + 14102, + 14103, + 14104, + 14105, + 14106, + 14107, + 14108, + 14109, + 14110, + 14111, + 14112, + 14113, + 14114, + 14115, + 14116, + 14117, + 14118, + 14119, + 14120, + 14121, + 14122, + 14123, + 14124, + 14125, + 14126, + 14127, + 14128, + 14129, + 14130, + 14131, + 14132, + 14133, + 14134, + 14135, + 14136, + 14137, + 14138, + 14139, + 14140, + 14141, + 14142, + 14143, + 14144, + 14145, + 14146, + 14147, + 14148, + 14149, + 14150, + 14151, + 14152, + 14153, + 14154, + 14155, + 14156, + 14157, + 14158, + 14159, + 14160, + 14161, + 14162, + 14163, + 14164, + 14165, + 14166, + 14167, + 14168, + 14169, + 14170, + 14171, + 14172, + 14173, + 14174, + 14175, + 14176, + 14177, + 14178, + 14179, + 14180, + 14181, + 14182, + 14183, + 14184, + 14185, + 14186, + 14187, + 14188, + 14189, + 14190, + 14191, + 14192, + 14193, + 14194, + 14195, + 14196, + 14197, + 14198, + 14199, + 14200, + 14201, + 14202, + 14203, + 14204, + 14205, + 14206, + 14207, + 14208, + 14209, + 14210, + 14211, + 14212, + 14213, + 14214, + 14215, + 14216, + 14217, + 14218, + 14219, + 14220, + 14221, + 14222, + 14223, + 14224, + 14225, + 14226, + 14227, + 14228, + 14229, + 14230, + 14231, + 14232, + 14233, + 14234, + 14235, + 14236, + 14237, + 14238, + 14239, + 14240, + 14241, + 14242, + 14243, + 14244, + 14245, + 14246, + 14247, + 14248, + 14249, + 14250, + 14251, + 14252, + 14253, + 14254, + 14255, + 14256, + 14257, + 14258, + 14259, + 14260, + 14261, + 14262, + 14263, + 14264, + 14265, + 14266, + 14267, + 14268, + 14269, + 14270, + 14271, + 14272, + 14273, + 14274, + 14275, + 14276, + 14277, + 14278, + 14279, + 14280, + 14281, + 14282, + 14283, + 14284, + 14285, + 14286, + 14287, + 14288, + 14289, + 14290, + 14291, + 14292, + 14293, + 14294, + 14295, + 14296, + 14297, + 14298, + 14299, + 14300, + 14301, + 14302, + 14303, + 14304, + 14305, + 14306, + 14307, + 14308, + 14309, + 14310, + 14311, + 14312, + 14313, + 14314, + 14315, + 14316, + 14317, + 14318, + 14319, + 14320, + 14321, + 14322, + 14323, + 14324, + 14325, + 14326, + 14327, + 14328, + 14329, + 14330, + 14331, + 14332, + 14333, + 14334, + 14335, + 14336, + 14337, + 14338, + 14339, + 14340, + 14341, + 14342, + 14343, + 14344, + 14345, + 14346, + 14347, + 14348, + 14349, + 14350, + 14351, + 14352, + 14353, + 14354, + 14355, + 14356, + 14357, + 14358, + 14359, + 14360, + 14361, + 14362, + 14363, + 14364, + 14365, + 14366, + 14367, + 14368, + 14369, + 14370, + 14371, + 14372, + 14373, + 14374, + 14375, + 14376, + 14377, + 14378, + 14379, + 14380, + 14381, + 14382, + 14383, + 14384, + 14385, + 14386, + 14387, + 14388, + 14389, + 14390, + 14391, + 14392, + 14393, + 14394, + 14395, + 14396, + 14397, + 14398, + 14399, + 14400, + 14401, + 14402, + 14403, + 14404, + 14405, + 14406, + 14407, + 14408, + 14409, + 14410, + 14411, + 14412, + 14413, + 14414, + 14415, + 14416, + 14417, + 14418, + 14419, + 14420, + 14421, + 14422, + 14423, + 14424, + 14425, + 14426, + 14427, + 14428, + 14429, + 14430, + 14431, + 14432, + 14433, + 14434, + 14435, + 14436, + 14437, + 14438, + 14439, + 14440, + 14441, + 14442, + 14443, + 14444, + 14445, + 14446, + 14447, + 14448, + 14449, + 14450, + 14451, + 14452, + 14453, + 14454, + 14455, + 14456, + 14457, + 14458, + 14459, + 14460, + 14461, + 14462, + 14463, + 14464, + 14465, + 14466, + 14467, + 14468, + 14469, + 14470, + 14471, + 14472, + 14473, + 14474, + 14475, + 14476, + 14477, + 14478, + 14479, + 14480, + 14481, + 14482, + 14483, + 14484, + 14485, + 14486, + 14487, + 14488, + 14489, + 14490, + 14491, + 14492, + 14493, + 14494, + 14495, + 14496, + 14497, + 14498, + 14499, + 14500, + 14501, + 14502, + 14503, + 14504, + 14505, + 14506, + 14507, + 14508, + 14509, + 14510, + 14511, + 14512, + 14513, + 14514, + 14515, + 14516, + 14517, + 14518, + 14519, + 14520, + 14521, + 14522, + 14523, + 14524, + 14525, + 14526, + 14527, + 14528, + 14529, + 14530, + 14531, + 14532, + 14533, + 14534, + 14535, + 14536, + 14537, + 14538, + 14539, + 14540, + 14541, + 14542, + 14543, + 14544, + 14545, + 14546, + 14547, + 14548, + 14549, + 14550, + 14551, + 14552, + 14553, + 14554, + 14555, + 14556, + 14557, + 14558, + 14559, + 14560, + 14561, + 14562, + 14563, + 14564, + 14565, + 14566, + 14567, + 14568, + 14569, + 14570, + 14571, + 14572, + 14573, + 14574, + 14575, + 14576, + 14577, + 14578, + 14579, + 14580, + 14581, + 14582, + 14583, + 14584, + 14585, + 14586, + 14587, + 14588, + 14589, + 14590, + 14591, + 14592, + 14593, + 14594, + 14595, + 14596, + 14597, + 14598, + 14599, + 14600, + 14601, + 14602, + 14603, + 14604, + 14605, + 14606, + 14607, + 14608, + 14609, + 14610, + 14611, + 14612, + 14613, + 14614, + 14615, + 14616, + 14617, + 14618, + 14619, + 14620, + 14621, + 14622, + 14623, + 14624, + 14625, + 14626, + 14627, + 14628, + 14629, + 14630, + 14631, + 14632, + 14633, + 14634, + 14635, + 14636, + 14637, + 14638, + 14639, + 14640, + 14641, + 14642, + 14643, + 14644, + 14645, + 14646, + 14647, + 14648, + 14649, + 14650, + 14651, + 14652, + 14653, + 14654, + 14655, + 14656, + 14657, + 14658, + 14659, + 14660, + 14661, + 14662, + 14663, + 14664, + 14665, + 14666, + 14667, + 14668, + 14669, + 14670, + 14671, + 14672, + 14673, + 14674, + 14675, + 14676, + 14677, + 14678, + 14679, + 14680, + 14681, + 14682, + 14683, + 14684, + 14685, + 14686, + 14687, + 14688, + 14689, + 14690, + 14691, + 14692, + 14693, + 14694, + 14695, + 14696, + 14697, + 14698, + 14699, + 14700, + 14701, + 14702, + 14703, + 14704, + 14705, + 14706, + 14707, + 14708, + 14709, + 14710, + 14711, + 14712, + 14713, + 14714, + 14715, + 14716, + 14717, + 14718, + 14719, + 14720, + 14721, + 14722, + 14723, + 14724, + 14725, + 14726, + 14727, + 14728, + 14729, + 14730, + 14731, + 14732, + 14733, + 14734, + 14735, + 14736, + 14737, + 14738, + 14739, + 14740, + 14741, + 14742, + 14743, + 14744, + 14745, + 14746, + 14747, + 14748, + 14749, + 14750, + 14751, + 14752, + 14753, + 14754, + 14755, + 14756, + 14757, + 14758, + 14759, + 14760, + 14761, + 14762, + 14763, + 14764, + 14765, + 14766, + 14767, + 14768, + 14769, + 14770, + 14771, + 14772, + 14773, + 14774, + 14775, + 14776, + 14777, + 14778, + 14779, + 14780, + 14781, + 14782, + 14783, + 14784, + 14785, + 14786, + 14787, + 14788, + 14789, + 14790, + 14791, + 14792, + 14793, + 14794, + 14795, + 14796, + 14797, + 14798, + 14799, + 14800, + 14801, + 14802, + 14803, + 14804, + 14805, + 14806, + 14807, + 14808, + 14809, + 14810, + 14811, + 14812, + 14813, + 14814, + 14815, + 14816, + 14817, + 14818, + 14819, + 14820, + 14821, + 14822, + 14823, + 14824, + 14825, + 14826, + 14827, + 14828, + 14829, + 14830, + 14831, + 14832, + 14833, + 14834, + 14835, + 14836, + 14837, + 14838, + 14839, + 14840, + 14841, + 14842, + 14843, + 14844, + 14845, + 14846, + 14847, + 14848, + 14849, + 14850, + 14851, + 14852, + 14853, + 14854, + 14855, + 14856, + 14857, + 14858, + 14859, + 14860, + 14861, + 14862, + 14863, + 14864, + 14865, + 14866, + 14867, + 14868, + 14869, + 14870, + 14871, + 14872, + 14873, + 14874, + 14875, + 14876, + 14877, + 14878, + 14879, + 14880, + 14881, + 14882, + 14883, + 14884, + 14885, + 14886, + 14887, + 14888, + 14889, + 14890, + 14891, + 14892, + 14893, + 14894, + 14895, + 14896, + 14897, + 14898, + 14899, + 14900, + 14901, + 14902, + 14903, + 14904, + 14905, + 14906, + 14907, + 14908, + 14909, + 14910, + 14911, + 14912, + 14913, + 14914, + 14915, + 14916, + 14917, + 14918, + 14919, + 14920, + 14921, + 14922, + 14923, + 14924, + 14925, + 14926, + 14927, + 14928, + 14929, + 14930, + 14931, + 14932, + 14933, + 14934, + 14935, + 14936, + 14937, + 14938, + 14939, + 14940, + 14941, + 14942, + 14943, + 14944, + 14945, + 14946, + 14947, + 14948, + 14949, + 14950, + 14951, + 14952, + 14953, + 14954, + 14955, + 14956, + 14957, + 14958, + 14959, + 14960, + 14961, + 14962, + 14963, + 14964, + 14965, + 14966, + 14967, + 14968, + 14969, + 14970, + 14971, + 14972, + 14973, + 14974, + 14975, + 14976, + 14977, + 14978, + 14979, + 14980, + 14981, + 14982, + 14983, + 14984, + 14985, + 14986, + 14987, + 14988, + 14989, + 14990, + 14991, + 14992, + 14993, + 14994, + 14995, + 14996, + 14997, + 14998, + 14999, + 15000, + 15001, + 15002, + 15003, + 15004, + 15005, + 15006, + 15007, + 15008, + 15009, + 15010, + 15011, + 15012, + 15013, + 15014, + 15015, + 15016, + 15017, + 15018, + 15019, + 15020, + 15021, + 15022, + 15023, + 15024, + 15025, + 15026, + 15027, + 15028, + 15029, + 15030, + 15031, + 15032, + 15033, + 15034, + 15035, + 15036, + 15037, + 15038, + 15039, + 15040, + 15041, + 15042, + 15043, + 15044, + 15045, + 15046, + 15047, + 15048, + 15049, + 15050, + 15051, + 15052, + 15053, + 15054, + 15055, + 15056, + 15057, + 15058, + 15059, + 15060, + 15061, + 15062, + 15063, + 15064, + 15065, + 15066, + 15067, + 15068, + 15069, + 15070, + 15071, + 15072, + 15073, + 15074, + 15075, + 15076, + 15077, + 15078, + 15079, + 15080, + 15081, + 15082, + 15083, + 15084, + 15085, + 15086, + 15087, + 15088, + 15089, + 15090, + 15091, + 15092, + 15093, + 15094, + 15095, + 15096, + 15097, + 15098, + 15099, + 15100, + 15101, + 15102, + 15103, + 15104, + 15105, + 15106, + 15107, + 15108, + 15109, + 15110, + 15111, + 15112, + 15113, + 15114, + 15115, + 15116, + 15117, + 15118, + 15119, + 15120, + 15121, + 15122, + 15123, + 15124, + 15125, + 15126, + 15127, + 15128, + 15129, + 15130, + 15131, + 15132, + 15133, + 15134, + 15135, + 15136, + 15137, + 15138, + 15139, + 15140, + 15141, + 15142, + 15143, + 15144, + 15145, + 15146, + 15147, + 15148, + 15149, + 15150, + 15151, + 15152, + 15153, + 15154, + 15155, + 15156, + 15157, + 15158, + 15159, + 15160, + 15161, + 15162, + 15163, + 15164, + 15165, + 15166, + 15167, + 15168, + 15169, + 15170, + 15171, + 15172, + 15173, + 15174, + 15175, + 15176, + 15177, + 15178, + 15179, + 15180, + 15181, + 15182, + 15183, + 15184, + 15185, + 15186, + 15187, + 15188, + 15189, + 15190, + 15191, + 15192, + 15193, + 15194, + 15195, + 15196, + 15197, + 15198, + 15199, + 15200, + 15201, + 15202, + 15203, + 15204, + 15205, + 15206, + 15207, + 15208, + 15209, + 15210, + 15211, + 15212, + 15213, + 15214, + 15215, + 15216, + 15217, + 15218, + 15219, + 15220, + 15221, + 15222, + 15223, + 15224, + 15225, + 15226, + 15227, + 15228, + 15229, + 15230, + 15231, + 15232, + 15233, + 15234, + 15235, + 15236, + 15237, + 15238, + 15239, + 15240, + 15241, + 15242, + 15243, + 15244, + 15245, + 15246, + 15247, + 15248, + 15249, + 15250, + 15251, + 15252, + 15253, + 15254, + 15255, + 15256, + 15257, + 15258, + 15259, + 15260, + 15261, + 15262, + 15263, + 15264, + 15265, + 15266, + 15267, + 15268, + 15269, + 15270, + 15271, + 15272, + 15273, + 15274, + 15275, + 15276, + 15277, + 15278, + 15279, + 15280, + 15281, + 15282, + 15283, + 15284, + 15285, + 15286, + 15287, + 15288, + 15289, + 15290, + 15291, + 15292, + 15293, + 15294, + 15295, + 15296, + 15297, + 15298, + 15299, + 15300, + 15301, + 15302, + 15303, + 15304, + 15305, + 15306, + 15307, + 15308, + 15309, + 15310, + 15311, + 15312, + 15313, + 15314, + 15315, + 15316, + 15317, + 15318, + 15319, + 15320, + 15321, + 15322, + 15323, + 15324, + 15325, + 15326, + 15327, + 15328, + 15329, + 15330, + 15331, + 15332, + 15333, + 15334, + 15335, + 15336, + 15337, + 15338, + 15339, + 15340, + 15341, + 15342, + 15343, + 15344, + 15345, + 15346, + 15347, + 15348, + 15349, + 15350, + 15351, + 15352, + 15353, + 15354, + 15355, + 15356, + 15357, + 15358, + 15359, + 15360, + 15361, + 15362, + 15363, + 15364, + 15365, + 15366, + 15367, + 15368, + 15369, + 15370, + 15371, + 15372, + 15373, + 15374, + 15375, + 15376, + 15377, + 15378, + 15379, + 15380, + 15381, + 15382, + 15383, + 15384, + 15385, + 15386, + 15387, + 15388, + 15389, + 15390, + 15391, + 15392, + 15393, + 15394, + 15395, + 15396, + 15397, + 15398, + 15399, + 15400, + 15401, + 15402, + 15403, + 15404, + 15405, + 15406, + 15407, + 15408, + 15409, + 15410, + 15411, + 15412, + 15413, + 15414, + 15415, + 15416, + 15417, + 15418, + 15419, + 15420, + 15421, + 15422, + 15423, + 15424, + 15425, + 15426, + 15427, + 15428, + 15429, + 15430, + 15431, + 15432, + 15433, + 15434, + 15435, + 15436, + 15437, + 15438, + 15439, + 15440, + 15441, + 15442, + 15443, + 15444, + 15445, + 15446, + 15447, + 15448, + 15449, + 15450, + 15451, + 15452, + 15453, + 15454, + 15455, + 15456, + 15457, + 15458, + 15459, + 15460, + 15461, + 15462, + 15463, + 15464, + 15465, + 15466, + 15467, + 15468, + 15469, + 15470, + 15471, + 15472, + 15473, + 15474, + 15475, + 15476, + 15477, + 15478, + 15479, + 15480, + 15481, + 15482, + 15483, + 15484, + 15485, + 15486, + 15487, + 15488, + 15489, + 15490, + 15491, + 15492, + 15493, + 15494, + 15495, + 15496, + 15497, + 15498, + 15499, + 15500, + 15501, + 15502, + 15503, + 15504, + 15505, + 15506, + 15507, + 15508, + 15509, + 15510, + 15511, + 15512, + 15513, + 15514, + 15515, + 15516, + 15517, + 15518, + 15519, + 15520, + 15521, + 15522, + 15523, + 15524, + 15525, + 15526, + 15527, + 15528, + 15529, + 15530, + 15531, + 15532, + 15533, + 15534, + 15535, + 15536, + 15537, + 15538, + 15539, + 15540, + 15541, + 15542, + 15543, + 15544, + 15545, + 15546, + 15547, + 15548, + 15549, + 15550, + 15551, + 15552, + 15553, + 15554, + 15555, + 15556, + 15557, + 15558, + 15559, + 15560, + 15561, + 15562, + 15563, + 15564, + 15565, + 15566, + 15567, + 15568, + 15569, + 15570, + 15571, + 15572, + 15573, + 15574, + 15575, + 15576, + 15577, + 15578, + 15579, + 15580, + 15581, + 15582, + 15583, + 15584, + 15585, + 15586, + 15587, + 15588, + 15589, + 15590, + 15591, + 15592, + 15593, + 15594, + 15595, + 15596, + 15597, + 15598, + 15599, + 15600, + 15601, + 15602, + 15603, + 15604, + 15605, + 15606, + 15607, + 15608, + 15609, + 15610, + 15611, + 15612, + 15613, + 15614, + 15615, + 15616, + 15617, + 15618, + 15619, + 15620, + 15621, + 15622, + 15623, + 15624, + 15625, + 15626, + 15627, + 15628, + 15629, + 15630, + 15631, + 15632, + 15633, + 15634, + 15635, + 15636, + 15637, + 15638, + 15639, + 15640, + 15641, + 15642, + 15643, + 15644, + 15645, + 15646, + 15647, + 15648, + 15649, + 15650, + 15651, + 15652, + 15653, + 15654, + 15655, + 15656, + 15657, + 15658, + 15659, + 15660, + 15661, + 15662, + 15663, + 15664, + 15665, + 15666, + 15667, + 15668, + 15669, + 15670, + 15671, + 15672, + 15673, + 15674, + 15675, + 15676, + 15677, + 15678, + 15679, + 15680, + 15681, + 15682, + 15683, + 15684, + 15685, + 15686, + 15687, + 15688, + 15689, + 15690, + 15691, + 15692, + 15693, + 15694, + 15695, + 15696, + 15697, + 15698, + 15699, + 15700, + 15701, + 15702, + 15703, + 15704, + 15705, + 15706, + 15707, + 15708, + 15709, + 15710, + 15711, + 15712, + 15713, + 15714, + 15715, + 15716, + 15717, + 15718, + 15719, + 15720, + 15721, + 15722, + 15723, + 15724, + 15725, + 15726, + 15727, + 15728, + 15729, + 15730, + 15731, + 15732, + 15733, + 15734, + 15735, + 15736, + 15737, + 15738, + 15739, + 15740, + 15741, + 15742, + 15743, + 15744, + 15745, + 15746, + 15747, + 15748, + 15749, + 15750, + 15751, + 15752, + 15753, + 15754, + 15755, + 15756, + 15757, + 15758, + 15759, + 15760, + 15761, + 15762, + 15763, + 15764, + 15765, + 15766, + 15767, + 15768, + 15769, + 15770, + 15771, + 15772, + 15773, + 15774, + 15775, + 15776, + 15777, + 15778, + 15779, + 15780, + 15781, + 15782, + 15783, + 15784, + 15785, + 15786, + 15787, + 15788, + 15789, + 15790, + 15791, + 15792, + 15793, + 15794, + 15795, + 15796, + 15797, + 15798, + 15799, + 15800, + 15801, + 15802, + 15803, + 15804, + 15805, + 15806, + 15807, + 15808, + 15809, + 15810, + 15811, + 15812, + 15813, + 15814, + 15815, + 15816, + 15817, + 15818, + 15819, + 15820, + 15821, + 15822, + 15823, + 15824, + 15825, + 15826, + 15827, + 15828, + 15829, + 15830, + 15831, + 15832, + 15833, + 15834, + 15835, + 15836, + 15837, + 15838, + 15839, + 15840, + 15841, + 15842, + 15843, + 15844, + 15845, + 15846, + 15847, + 15848, + 15849, + 15850, + 15851, + 15852, + 15853, + 15854, + 15855, + 15856, + 15857, + 15858, + 15859, + 15860, + 15861, + 15862, + 15863, + 15864, + 15865, + 15866, + 15867, + 15868, + 15869, + 15870, + 15871, + 15872, + 15873, + 15874, + 15875, + 15876, + 15877, + 15878, + 15879, + 15880, + 15881, + 15882, + 15883, + 15884, + 15885, + 15886, + 15887, + 15888, + 15889, + 15890, + 15891, + 15892, + 15893, + 15894, + 15895, + 15896, + 15897, + 15898, + 15899, + 15900, + 15901, + 15902, + 15903, + 15904, + 15905, + 15906, + 15907, + 15908, + 15909, + 15910, + 15911, + 15912, + 15913, + 15914, + 15915, + 15916, + 15917, + 15918, + 15919, + 15920, + 15921, + 15922, + 15923, + 15924, + 15925, + 15926, + 15927, + 15928, + 15929, + 15930, + 15931, + 15932, + 15933, + 15934, + 15935, + 15936, + 15937, + 15938, + 15939, + 15940, + 15941, + 15942, + 15943, + 15944, + 15945, + 15946, + 15947, + 15948, + 15949, + 15950, + 15951, + 15952, + 15953, + 15954, + 15955, + 15956, + 15957, + 15958, + 15959, + 15960, + 15961, + 15962, + 15963, + 15964, + 15965, + 15966, + 15967, + 15968, + 15969, + 15970, + 15971, + 15972, + 15973, + 15974, + 15975, + 15976, + 15977, + 15978, + 15979, + 15980, + 15981, + 15982, + 15983, + 15984, + 15985, + 15986, + 15987, + 15988, + 15989, + 15990, + 15991, + 15992, + 15993, + 15994, + 15995, + 15996, + 15997, + 15998, + 15999, + 16000, + 16001, + 16002, + 16003, + 16004, + 16005, + 16006, + 16007, + 16008, + 16009, + 16010, + 16011, + 16012, + 16013, + 16014, + 16015, + 16016, + 16017, + 16018, + 16019, + 16020, + 16021, + 16022, + 16023, + 16024, + 16025, + 16026, + 16027, + 16028, + 16029, + 16030, + 16031, + 16032, + 16033, + 16034, + 16035, + 16036, + 16037, + 16038, + 16039, + 16040, + 16041, + 16042, + 16043, + 16044, + 16045, + 16046, + 16047, + 16048, + 16049, + 16050, + 16051, + 16052, + 16053, + 16054, + 16055, + 16056, + 16057, + 16058, + 16059, + 16060, + 16061, + 16062, + 16063, + 16064, + 16065, + 16066, + 16067, + 16068, + 16069, + 16070, + 16071, + 16072, + 16073, + 16074, + 16075, + 16076, + 16077, + 16078, + 16079, + 16080, + 16081, + 16082, + 16083, + 16084, + 16085, + 16086, + 16087, + 16088, + 16089, + 16090, + 16091, + 16092, + 16093, + 16094, + 16095, + 16096, + 16097, + 16098, + 16099, + 16100, + 16101, + 16102, + 16103, + 16104, + 16105, + 16106, + 16107, + 16108, + 16109, + 16110, + 16111, + 16112, + 16113, + 16114, + 16115, + 16116, + 16117, + 16118, + 16119, + 16120, + 16121, + 16122, + 16123, + 16124, + 16125, + 16126, + 16127, + 16128, + 16129, + 16130, + 16131, + 16132, + 16133, + 16134, + 16135, + 16136, + 16137, + 16138, + 16139, + 16140, + 16141, + 16142, + 16143, + 16144, + 16145, + 16146, + 16147, + 16148, + 16149, + 16150, + 16151, + 16152, + 16153, + 16154, + 16155, + 16156, + 16157, + 16158, + 16159, + 16160, + 16161, + 16162, + 16163, + 16164, + 16165, + 16166, + 16167, + 16168, + 16169, + 16170, + 16171, + 16172, + 16173, + 16174, + 16175, + 16176, + 16177, + 16178, + 16179, + 16180, + 16181, + 16182, + 16183, + 16184, + 16185, + 16186, + 16187, + 16188, + 16189, + 16190, + 16191, + 16192, + 16193, + 16194, + 16195, + 16196, + 16197, + 16198, + 16199, + 16200, + 16201, + 16202, + 16203, + 16204, + 16205, + 16206, + 16207, + 16208, + 16209, + 16210, + 16211, + 16212, + 16213, + 16214, + 16215, + 16216, + 16217, + 16218, + 16219, + 16220, + 16221, + 16222, + 16223, + 16224, + 16225, + 16226, + 16227, + 16228, + 16229, + 16230, + 16231, + 16232, + 16233, + 16234, + 16235, + 16236, + 16237, + 16238, + 16239, + 16240, + 16241, + 16242, + 16243, + 16244, + 16245, + 16246, + 16247, + 16248, + 16249, + 16250, + 16251, + 16252, + 16253, + 16254, + 16255, + 16256, + 16257, + 16258, + 16259, + 16260, + 16261, + 16262, + 16263, + 16264, + 16265, + 16266, + 16267, + 16268, + 16269, + 16270, + 16271, + 16272, + 16273, + 16274, + 16275, + 16276, + 16277, + 16278, + 16279, + 16280, + 16281, + 16282, + 16283, + 16284, + 16285, + 16286, + 16287, + 16288, + 16289, + 16290, + 16291, + 16292, + 16293, + 16294, + 16295, + 16296, + 16297, + 16298, + 16299, + 16300, + 16301, + 16302, + 16303, + 16304, + 16305, + 16306, + 16307, + 16308, + 16309, + 16310, + 16311, + 16312, + 16313, + 16314, + 16315, + 16316, + 16317, + 16318, + 16319, + 16320, + 16321, + 16322, + 16323, + 16324, + 16325, + 16326, + 16327, + 16328, + 16329, + 16330, + 16331, + 16332, + 16333, + 16334, + 16335, + 16336, + 16337, + 16338, + 16339, + 16340, + 16341, + 16342, + 16343, + 16344, + 16345, + 16346, + 16347, + 16348, + 16349, + 16350, + 16351, + 16352, + 16353, + 16354, + 16355, + 16356, + 16357, + 16358, + 16359, + 16360, + 16361, + 16362, + 16363, + 16364, + 16365, + 16366, + 16367, + 16368, + 16369, + 16370, + 16371, + 16372, + 16373, + 16374, + 16375, + 16376, + 16377, + 16378, + 16379, + 16380, + 16381, + 16382, + 16383, + 16384, + 16385, + 16386, + 16387, + 16388, + 16389, + 16390, + 16391, + 16392, + 16393, + 16394, + 16395, + 16396, + 16397, + 16398, + 16399, + 16400, + 16401, + 16402, + 16403, + 16404, + 16405, + 16406, + 16407, + 16408, + 16409, + 16410, + 16411, + 16412, + 16413, + 16414, + 16415, + 16416, + 16417, + 16418, + 16419, + 16420, + 16421, + 16422, + 16423, + 16424, + 16425, + 16426, + 16427, + 16428, + 16429, + 16430, + 16431, + 16432, + 16433, + 16434, + 16435, + 16436, + 16437, + 16438, + 16439, + 16440, + 16441, + 16442, + 16443, + 16444, + 16445, + 16446, + 16447, + 16448, + 16449, + 16450, + 16451, + 16452, + 16453, + 16454, + 16455, + 16456, + 16457, + 16458, + 16459, + 16460, + 16461, + 16462, + 16463, + 16464, + 16465, + 16466, + 16467, + 16468, + 16469, + 16470, + 16471, + 16472, + 16473, + 16474, + 16475, + 16476, + 16477, + 16478, + 16479, + 16480, + 16481, + 16482, + 16483, + 16484, + 16485, + 16486, + 16487, + 16488, + 16489, + 16490, + 16491, + 16492, + 16493, + 16494, + 16495, + 16496, + 16497, + 16498, + 16499, + 16500, + 16501, + 16502, + 16503, + 16504, + 16505, + 16506, + 16507, + 16508, + 16509, + 16510, + 16511, + 16512, + 16513, + 16514, + 16515, + 16516, + 16517, + 16518, + 16519, + 16520, + 16521, + 16522, + 16523, + 16524, + 16525, + 16526, + 16527, + 16528, + 16529, + 16530, + 16531, + 16532, + 16533, + 16534, + 16535, + 16536, + 16537, + 16538, + 16539, + 16540, + 16541, + 16542, + 16543, + 16544, + 16545, + 16546, + 16547, + 16548, + 16549, + 16550, + 16551, + 16552, + 16553, + 16554, + 16555, + 16556, + 16557, + 16558, + 16559, + 16560, + 16561, + 16562, + 16563, + 16564, + 16565, + 16566, + 16567, + 16568, + 16569, + 16570, + 16571, + 16572, + 16573, + 16574, + 16575, + 16576, + 16577, + 16578, + 16579, + 16580, + 16581, + 16582, + 16583, + 16584, + 16585, + 16586, + 16587, + 16588, + 16589, + 16590, + 16591, + 16592, + 16593, + 16594, + 16595, + 16596, + 16597, + 16598, + 16599, + 16600, + 16601, + 16602, + 16603, + 16604, + 16605, + 16606, + 16607, + 16608, + 16609, + 16610, + 16611, + 16612, + 16613, + 16614, + 16615, + 16616, + 16617, + 16618, + 16619, + 16620, + 16621, + 16622, + 16623, + 16624, + 16625, + 16626, + 16627, + 16628, + 16629, + 16630, + 16631, + 16632, + 16633, + 16634, + 16635, + 16636, + 16637, + 16638, + 16639, + 16640, + 16641, + 16642, + 16643, + 16644, + 16645, + 16646, + 16647, + 16648, + 16649, + 16650, + 16651, + 16652, + 16653, + 16654, + 16655, + 16656, + 16657, + 16658, + 16659, + 16660, + 16661, + 16662, + 16663, + 16664, + 16665, + 16666, + 16667, + 16668, + 16669, + 16670, + 16671, + 16672, + 16673, + 16674, + 16675, + 16676, + 16677, + 16678, + 16679, + 16680, + 16681, + 16682, + 16683, + 16684, + 16685, + 16686, + 16687, + 16688, + 16689, + 16690, + 16691, + 16692, + 16693, + 16694, + 16695, + 16696, + 16697, + 16698, + 16699, + 16700, + 16701, + 16702, + 16703, + 16704, + 16705, + 16706, + 16707, + 16708, + 16709, + 16710, + 16711, + 16712, + 16713, + 16714, + 16715, + 16716, + 16717, + 16718, + 16719, + 16720, + 16721, + 16722, + 16723, + 16724, + 16725, + 16726, + 16727, + 16728, + 16729, + 16730, + 16731, + 16732, + 16733, + 16734, + 16735, + 16736, + 16737, + 16738, + 16739, + 16740, + 16741, + 16742, + 16743, + 16744, + 16745, + 16746, + 16747, + 16748, + 16749, + 16750, + 16751, + 16752, + 16753, + 16754, + 16755, + 16756, + 16757, + 16758, + 16759, + 16760, + 16761, + 16762, + 16763, + 16764, + 16765, + 16766, + 16767, + 16768, + 16769, + 16770, + 16771, + 16772, + 16773, + 16774, + 16775, + 16776, + 16777, + 16778, + 16779, + 16780, + 16781, + 16782, + 16783, + 16784, + 16785, + 16786, + 16787, + 16788, + 16789, + 16790, + 16791, + 16792, + 16793, + 16794, + 16795, + 16796, + 16797, + 16798, + 16799, + 16800, + 16801, + 16802, + 16803, + 16804, + 16805, + 16806, + 16807, + 16808, + 16809, + 16810, + 16811, + 16812, + 16813, + 16814, + 16815, + 16816, + 16817, + 16818, + 16819, + 16820, + 16821, + 16822, + 16823, + 16824, + 16825, + 16826, + 16827, + 16828, + 16829, + 16830, + 16831, + 16832, + 16833, + 16834, + 16835, + 16836, + 16837, + 16838, + 16839, + 16840, + 16841, + 16842, + 16843, + 16844, + 16845, + 16846, + 16847, + 16848, + 16849, + 16850, + 16851, + 16852, + 16853, + 16854, + 16855, + 16856, + 16857, + 16858, + 16859, + 16860, + 16861, + 16862, + 16863, + 16864, + 16865, + 16866, + 16867, + 16868, + 16869, + 16870, + 16871, + 16872, + 16873, + 16874, + 16875, + 16876, + 16877, + 16878, + 16879, + 16880, + 16881, + 16882, + 16883, + 16884, + 16885, + 16886, + 16887, + 16888, + 16889, + 16890, + 16891, + 16892, + 16893, + 16894, + 16895, + 16896, + 16897, + 16898, + 16899, + 16900, + 16901, + 16902, + 16903, + 16904, + 16905, + 16906, + 16907, + 16908, + 16909, + 16910, + 16911, + 16912, + 16913, + 16914, + 16915, + 16916, + 16917, + 16918, + 16919, + 16920, + 16921, + 16922, + 16923, + 16924, + 16925, + 16926, + 16927, + 16928, + 16929, + 16930, + 16931, + 16932, + 16933, + 16934, + 16935, + 16936, + 16937, + 16938, + 16939, + 16940, + 16941, + 16942, + 16943, + 16944, + 16945, + 16946, + 16947, + 16948, + 16949, + 16950, + 16951, + 16952, + 16953, + 16954, + 16955, + 16956, + 16957, + 16958, + 16959, + 16960, + 16961, + 16962, + 16963, + 16964, + 16965, + 16966, + 16967, + 16968, + 16969, + 16970, + 16971, + 16972, + 16973, + 16974, + 16975, + 16976, + 16977, + 16978, + 16979, + 16980, + 16981, + 16982, + 16983, + 16984, + 16985, + 16986, + 16987, + 16988, + 16989, + 16990, + 16991, + 16992, + 16993, + 16994, + 16995, + 16996, + 16997, + 16998, + 16999, + 17000, + 17001, + 17002, + 17003, + 17004, + 17005, + 17006, + 17007, + 17008, + 17009, + 17010, + 17011, + 17012, + 17013, + 17014, + 17015, + 17016, + 17017, + 17018, + 17019, + 17020, + 17021, + 17022, + 17023, + 17024, + 17025, + 17026, + 17027, + 17028, + 17029, + 17030, + 17031, + 17032, + 17033, + 17034, + 17035, + 17036, + 17037, + 17038, + 17039, + 17040, + 17041, + 17042, + 17043, + 17044, + 17045, + 17046, + 17047, + 17048, + 17049, + 17050, + 17051, + 17052, + 17053, + 17054, + 17055, + 17056, + 17057, + 17058, + 17059, + 17060, + 17061, + 17062, + 17063, + 17064, + 17065, + 17066, + 17067, + 17068, + 17069, + 17070, + 17071, + 17072, + 17073, + 17074, + 17075, + 17076, + 17077, + 17078, + 17079, + 17080, + 17081, + 17082, + 17083, + 17084, + 17085, + 17086, + 17087, + 17088, + 17089, + 17090, + 17091, + 17092, + 17093, + 17094, + 17095, + 17096, + 17097, + 17098, + 17099, + 17100, + 17101, + 17102, + 17103, + 17104, + 17105, + 17106, + 17107, + 17108, + 17109, + 17110, + 17111, + 17112, + 17113, + 17114, + 17115, + 17116, + 17117, + 17118, + 17119, + 17120, + 17121, + 17122, + 17123, + 17124, + 17125, + 17126, + 17127, + 17128, + 17129, + 17130, + 17131, + 17132, + 17133, + 17134, + 17135, + 17136, + 17137, + 17138, + 17139, + 17140, + 17141, + 17142, + 17143, + 17144, + 17145, + 17146, + 17147, + 17148, + 17149, + 17150, + 17151, + 17152, + 17153, + 17154, + 17155, + 17156, + 17157, + 17158, + 17159, + 17160, + 17161, + 17162, + 17163, + 17164, + 17165, + 17166, + 17167, + 17168, + 17169, + 17170, + 17171, + 17172, + 17173, + 17174, + 17175, + 17176, + 17177, + 17178, + 17179, + 17180, + 17181, + 17182, + 17183, + 17184, + 17185, + 17186, + 17187, + 17188, + 17189, + 17190, + 17191, + 17192, + 17193, + 17194, + 17195, + 17196, + 17197, + 17198, + 17199, + 17200, + 17201, + 17202, + 17203, + 17204, + 17205, + 17206, + 17207, + 17208, + 17209, + 17210, + 17211, + 17212, + 17213, + 17214, + 17215, + 17216, + 17217, + 17218, + 17219, + 17220, + 17221, + 17222, + 17223, + 17224, + 17225, + 17226, + 17227, + 17228, + 17229, + 17230, + 17231, + 17232, + 17233, + 17234, + 17235, + 17236, + 17237, + 17238, + 17239, + 17240, + 17241, + 17242, + 17243, + 17244, + 17245, + 17246, + 17247, + 17248, + 17249, + 17250, + 17251, + 17252, + 17253, + 17254, + 17255, + 17256, + 17257, + 17258, + 17259, + 17260, + 17261, + 17262, + 17263, + 17264, + 17265, + 17266, + 17267, + 17268, + 17269, + 17270, + 17271, + 17272, + 17273, + 17274, + 17275, + 17276, + 17277, + 17278, + 17279, + 17280, + 17281, + 17282, + 17283, + 17284, + 17285, + 17286, + 17287, + 17288, + 17289, + 17290, + 17291, + 17292, + 17293, + 17294, + 17295, + 17296, + 17297, + 17298, + 17299, + 17300, + 17301, + 17302, + 17303, + 17304, + 17305, + 17306, + 17307, + 17308, + 17309, + 17310, + 17311, + 17312, + 17313, + 17314, + 17315, + 17316, + 17317, + 17318, + 17319, + 17320, + 17321, + 17322, + 17323, + 17324, + 17325, + 17326, + 17327, + 17328, + 17329, + 17330, + 17331, + 17332, + 17333, + 17334, + 17335, + 17336, + 17337, + 17338, + 17339, + 17340, + 17341, + 17342, + 17343, + 17344, + 17345, + 17346, + 17347, + 17348, + 17349, + 17350, + 17351, + 17352, + 17353, + 17354, + 17355, + 17356, + 17357, + 17358, + 17359, + 17360, + 17361, + 17362, + 17363, + 17364, + 17365, + 17366, + 17367, + 17368, + 17369, + 17370, + 17371, + 17372, + 17373, + 17374, + 17375, + 17376, + 17377, + 17378, + 17379, + 17380, + 17381, + 17382, + 17383, + 17384, + 17385, + 17386, + 17387, + 17388, + 17389, + 17390, + 17391, + 17392, + 17393, + 17394, + 17395, + 17396, + 17397, + 17398, + 17399, + 17400, + 17401, + 17402, + 17403, + 17404, + 17405, + 17406, + 17407, + 17408, + 17409, + 17410, + 17411, + 17412, + 17413, + 17414, + 17415, + 17416, + 17417, + 17418, + 17419, + 17420, + 17421, + 17422, + 17423, + 17424, + 17425, + 17426, + 17427, + 17428, + 17429, + 17430, + 17431, + 17432, + 17433, + 17434, + 17435, + 17436, + 17437, + 17438, + 17439, + 17440, + 17441, + 17442, + 17443, + 17444, + 17445, + 17446, + 17447, + 17448, + 17449, + 17450, + 17451, + 17452, + 17453, + 17454, + 17455, + 17456, + 17457, + 17458, + 17459, + 17460, + 17461, + 17462, + 17463, + 17464, + 17465, + 17466, + 17467, + 17468, + 17469, + 17470, + 17471, + 17472, + 17473, + 17474, + 17475, + 17476, + 17477, + 17478, + 17479, + 17480, + 17481, + 17482, + 17483, + 17484, + 17485, + 17486, + 17487, + 17488, + 17489, + 17490, + 17491, + 17492, + 17493, + 17494, + 17495, + 17496, + 17497, + 17498, + 17499, + 17500, + 17501, + 17502, + 17503, + 17504, + 17505, + 17506, + 17507, + 17508, + 17509, + 17510, + 17511, + 17512, + 17513, + 17514, + 17515, + 17516, + 17517, + 17518, + 17519, + 17520, + 17521, + 17522, + 17523, + 17524, + 17525, + 17526, + 17527, + 17528, + 17529, + 17530, + 17531, + 17532, + 17533, + 17534, + 17535, + 17536, + 17537, + 17538, + 17539, + 17540, + 17541, + 17542, + 17543, + 17544, + 17545, + 17546, + 17547, + 17548, + 17549, + 17550, + 17551, + 17552, + 17553, + 17554, + 17555, + 17556, + 17557, + 17558, + 17559, + 17560, + 17561, + 17562, + 17563, + 17564, + 17565, + 17566, + 17567, + 17568, + 17569, + 17570, + 17571, + 17572, + 17573, + 17574, + 17575, + 17576, + 17577, + 17578, + 17579, + 17580, + 17581, + 17582, + 17583, + 17584, + 17585, + 17586, + 17587, + 17588, + 17589, + 17590, + 17591, + 17592, + 17593, + 17594, + 17595, + 17596, + 17597, + 17598, + 17599, + 17600, + 17601, + 17602, + 17603, + 17604, + 17605, + 17606, + 17607, + 17608, + 17609, + 17610, + 17611, + 17612, + 17613, + 17614, + 17615, + 17616, + 17617, + 17618, + 17619, + 17620, + 17621, + 17622, + 17623, + 17624, + 17625, + 17626, + 17627, + 17628, + 17629, + 17630, + 17631, + 17632, + 17633, + 17634, + 17635, + 17636, + 17637, + 17638, + 17639, + 17640, + 17641, + 17642, + 17643, + 17644, + 17645, + 17646, + 17647, + 17648, + 17649, + 17650, + 17651, + 17652, + 17653, + 17654, + 17655, + 17656, + 17657, + 17658, + 17659, + 17660, + 17661, + 17662, + 17663, + 17664, + 17665, + 17666, + 17667, + 17668, + 17669, + 17670, + 17671, + 17672, + 17673, + 17674, + 17675, + 17676, + 17677, + 17678, + 17679, + 17680, + 17681, + 17682, + 17683, + 17684, + 17685, + 17686, + 17687, + 17688, + 17689, + 17690, + 17691, + 17692, + 17693, + 17694, + 17695, + 17696, + 17697, + 17698, + 17699, + 17700, + 17701, + 17702, + 17703, + 17704, + 17705, + 17706, + 17707, + 17708, + 17709, + 17710, + 17711, + 17712, + 17713, + 17714, + 17715, + 17716, + 17717, + 17718, + 17719, + 17720, + 17721, + 17722, + 17723, + 17724, + 17725, + 17726, + 17727, + 17728, + 17729, + 17730, + 17731, + 17732, + 17733, + 17734, + 17735, + 17736, + 17737, + 17738, + 17739, + 17740, + 17741, + 17742, + 17743, + 17744, + 17745, + 17746, + 17747, + 17748, + 17749, + 17750, + 17751, + 17752, + 17753, + 17754, + 17755, + 17756, + 17757, + 17758, + 17759, + 17760, + 17761, + 17762, + 17763, + 17764, + 17765, + 17766, + 17767, + 17768, + 17769, + 17770, + 17771, + 17772, + 17773, + 17774, + 17775, + 17776, + 17777, + 17778, + 17779, + 17780, + 17781, + 17782, + 17783, + 17784, + 17785, + 17786, + 17787, + 17788, + 17789, + 17790, + 17791, + 17792, + 17793, + 17794, + 17795, + 17796, + 17797, + 17798, + 17799, + 17800, + 17801, + 17802, + 17803, + 17804, + 17805, + 17806, + 17807, + 17808, + 17809, + 17810, + 17811, + 17812, + 17813, + 17814, + 17815, + 17816, + 17817, + 17818, + 17819, + 17820, + 17821, + 17822, + 17823, + 17824, + 17825, + 17826, + 17827, + 17828, + 17829, + 17830, + 17831, + 17832, + 17833, + 17834, + 17835, + 17836, + 17837, + 17838, + 17839, + 17840, + 17841, + 17842, + 17843, + 17844, + 17845, + 17846, + 17847, + 17848, + 17849, + 17850, + 17851, + 17852, + 17853, + 17854, + 17855, + 17856, + 17857, + 17858, + 17859, + 17860, + 17861, + 17862, + 17863, + 17864, + 17865, + 17866, + 17867, + 17868, + 17869, + 17870, + 17871, + 17872, + 17873, + 17874, + 17875, + 17876, + 17877, + 17878, + 17879, + 17880, + 17881, + 17882, + 17883, + 17884, + 17885, + 17886, + 17887, + 17888, + 17889, + 17890, + 17891, + 17892, + 17893, + 17894, + 17895, + 17896, + 17897, + 17898, + 17899, + 17900, + 17901, + 17902, + 17903, + 17904, + 17905, + 17906, + 17907, + 17908, + 17909, + 17910, + 17911, + 17912, + 17913, + 17914, + 17915, + 17916, + 17917, + 17918, + 17919, + 17920, + 17921, + 17922, + 17923, + 17924, + 17925, + 17926, + 17927, + 17928, + 17929, + 17930, + 17931, + 17932, + 17933, + 17934, + 17935, + 17936, + 17937, + 17938, + 17939, + 17940, + 17941, + 17942, + 17943, + 17944, + 17945, + 17946, + 17947, + 17948, + 17949, + 17950, + 17951, + 17952, + 17953, + 17954, + 17955, + 17956, + 17957, + 17958, + 17959, + 17960, + 17961, + 17962, + 17963, + 17964, + 17965, + 17966, + 17967, + 17968, + 17969, + 17970, + 17971, + 17972, + 17973, + 17974, + 17975, + 17976, + 17977, + 17978, + 17979, + 17980, + 17981, + 17982, + 17983, + 17984, + 17985, + 17986, + 17987, + 17988, + 17989, + 17990, + 17991, + 17992, + 17993, + 17994, + 17995, + 17996, + 17997, + 17998, + 17999, + 18000, + 18001, + 18002, + 18003, + 18004, + 18005, + 18006, + 18007, + 18008, + 18009, + 18010, + 18011, + 18012, + 18013, + 18014, + 18015, + 18016, + 18017, + 18018, + 18019, + 18020, + 18021, + 18022, + 18023, + 18024, + 18025, + 18026, + 18027, + 18028, + 18029, + 18030, + 18031, + 18032, + 18033, + 18034, + 18035, + 18036, + 18037, + 18038, + 18039, + 18040, + 18041, + 18042, + 18043, + 18044, + 18045, + 18046, + 18047, + 18048, + 18049, + 18050, + 18051, + 18052, + 18053, + 18054, + 18055, + 18056, + 18057, + 18058, + 18059, + 18060, + 18061, + 18062, + 18063, + 18064, + 18065, + 18066, + 18067, + 18068, + 18069, + 18070, + 18071, + 18072, + 18073, + 18074, + 18075, + 18076, + 18077, + 18078, + 18079, + 18080, + 18081, + 18082, + 18083, + 18084, + 18085, + 18086, + 18087, + 18088, + 18089, + 18090, + 18091, + 18092, + 18093, + 18094, + 18095, + 18096, + 18097, + 18098, + 18099, + 18100, + 18101, + 18102, + 18103, + 18104, + 18105, + 18106, + 18107, + 18108, + 18109, + 18110, + 18111, + 18112, + 18113, + 18114, + 18115, + 18116, + 18117, + 18118, + 18119, + 18120, + 18121, + 18122, + 18123, + 18124, + 18125, + 18126, + 18127, + 18128, + 18129, + 18130, + 18131, + 18132, + 18133, + 18134, + 18135, + 18136, + 18137, + 18138, + 18139, + 18140, + 18141, + 18142, + 18143, + 18144, + 18145, + 18146, + 18147, + 18148, + 18149, + 18150, + 18151, + 18152, + 18153, + 18154, + 18155, + 18156, + 18157, + 18158, + 18159, + 18160, + 18161, + 18162, + 18163, + 18164, + 18165, + 18166, + 18167, + 18168, + 18169, + 18170, + 18171, + 18172, + 18173, + 18174, + 18175, + 18176, + 18177, + 18178, + 18179, + 18180, + 18181, + 18182, + 18183, + 18184, + 18185, + 18186, + 18187, + 18188, + 18189, + 18190, + 18191, + 18192, + 18193, + 18194, + 18195, + 18196, + 18197, + 18198, + 18199, + 18200, + 18201, + 18202, + 18203, + 18204, + 18205, + 18206, + 18207, + 18208, + 18209, + 18210, + 18211, + 18212, + 18213, + 18214, + 18215, + 18216, + 18217, + 18218, + 18219, + 18220, + 18221, + 18222, + 18223, + 18224, + 18225, + 18226, + 18227, + 18228, + 18229, + 18230, + 18231, + 18232, + 18233, + 18234, + 18235, + 18236, + 18237, + 18238, + 18239, + 18240, + 18241, + 18242, + 18243, + 18244, + 18245, + 18246, + 18247, + 18248, + 18249, + 18250, + 18251, + 18252, + 18253, + 18254, + 18255, + 18256, + 18257, + 18258, + 18259, + 18260, + 18261, + 18262, + 18263, + 18264, + 18265, + 18266, + 18267, + 18268, + 18269, + 18270, + 18271, + 18272, + 18273, + 18274, + 18275, + 18276, + 18277, + 18278, + 18279, + 18280, + 18281, + 18282, + 18283, + 18284, + 18285, + 18286, + 18287, + 18288, + 18289, + 18290, + 18291, + 18292, + 18293, + 18294, + 18295, + 18296, + 18297, + 18298, + 18299, + 18300, + 18301, + 18302, + 18303, + 18304, + 18305, + 18306, + 18307, + 18308, + 18309, + 18310, + 18311, + 18312, + 18313, + 18314, + 18315, + 18316, + 18317, + 18318, + 18319, + 18320, + 18321, + 18322, + 18323, + 18324, + 18325, + 18326, + 18327, + 18328, + 18329, + 18330, + 18331, + 18332, + 18333, + 18334, + 18335, + 18336, + 18337, + 18338, + 18339, + 18340, + 18341, + 18342, + 18343, + 18344, + 18345, + 18346, + 18347, + 18348, + 18349, + 18350, + 18351, + 18352, + 18353, + 18354, + 18355, + 18356, + 18357, + 18358, + 18359, + 18360, + 18361, + 18362, + 18363, + 18364, + 18365, + 18366, + 18367, + 18368, + 18369, + 18370, + 18371, + 18372, + 18373, + 18374, + 18375, + 18376, + 18377, + 18378, + 18379, + 18380, + 18381, + 18382, + 18383, + 18384, + 18385, + 18386, + 18387, + 18388, + 18389, + 18390, + 18391, + 18392, + 18393, + 18394, + 18395, + 18396, + 18397, + 18398, + 18399, + 18400, + 18401, + 18402, + 18403, + 18404, + 18405, + 18406, + 18407, + 18408, + 18409, + 18410, + 18411, + 18412, + 18413, + 18414, + 18415, + 18416, + 18417, + 18418, + 18419, + 18420, + 18421, + 18422, + 18423, + 18424, + 18425, + 18426, + 18427, + 18428, + 18429, + 18430, + 18431, + 18432, + 18433, + 18434, + 18435, + 18436, + 18437, + 18438, + 18439, + 18440, + 18441, + 18442, + 18443, + 18444, + 18445, + 18446, + 18447, + 18448, + 18449, + 18450, + 18451, + 18452, + 18453, + 18454, + 18455, + 18456, + 18457, + 18458, + 18459, + 18460, + 18461, + 18462, + 18463, + 18464, + 18465, + 18466, + 18467, + 18468, + 18469, + 18470, + 18471, + 18472, + 18473, + 18474, + 18475, + 18476, + 18477, + 18478, + 18479, + 18480, + 18481, + 18482, + 18483, + 18484, + 18485, + 18486, + 18487, + 18488, + 18489, + 18490, + 18491, + 18492, + 18493, + 18494, + 18495, + 18496, + 18497, + 18498, + 18499, + 18500, + 18501, + 18502, + 18503, + 18504, + 18505, + 18506, + 18507, + 18508, + 18509, + 18510, + 18511, + 18512, + 18513, + 18514, + 18515, + 18516, + 18517, + 18518, + 18519, + 18520, + 18521, + 18522, + 18523, + 18524, + 18525, + 18526, + 18527, + 18528, + 18529, + 18530, + 18531, + 18532, + 18533, + 18534, + 18535, + 18536, + 18537, + 18538, + 18539, + 18540, + 18541, + 18542, + 18543, + 18544, + 18545, + 18546, + 18547, + 18548, + 18549, + 18550, + 18551, + 18552, + 18553, + 18554, + 18555, + 18556, + 18557, + 18558, + 18559, + 18560, + 18561, + 18562, + 18563, + 18564, + 18565, + 18566, + 18567, + 18568, + 18569, + 18570, + 18571, + 18572, + 18573, + 18574, + 18575, + 18576, + 18577, + 18578, + 18579, + 18580, + 18581, + 18582, + 18583, + 18584, + 18585, + 18586, + 18587, + 18588, + 18589, + 18590, + 18591, + 18592, + 18593, + 18594, + 18595, + 18596, + 18597, + 18598, + 18599, + 18600, + 18601, + 18602, + 18603, + 18604, + 18605, + 18606, + 18607, + 18608, + 18609, + 18610, + 18611, + 18612, + 18613, + 18614, + 18615, + 18616, + 18617, + 18618, + 18619, + 18620, + 18621, + 18622, + 18623, + 18624, + 18625, + 18626, + 18627, + 18628, + 18629, + 18630, + 18631, + 18632, + 18633, + 18634, + 18635, + 18636, + 18637, + 18638, + 18639, + 18640, + 18641, + 18642, + 18643, + 18644, + 18645, + 18646, + 18647, + 18648, + 18649, + 18650, + 18651, + 18652, + 18653, + 18654, + 18655, + 18656, + 18657, + 18658, + 18659, + 18660, + 18661, + 18662, + 18663, + 18664, + 18665, + 18666, + 18667, + 18668, + 18669, + 18670, + 18671, + 18672, + 18673, + 18674, + 18675, + 18676, + 18677, + 18678, + 18679, + 18680, + 18681, + 18682, + 18683, + 18684, + 18685, + 18686, + 18687, + 18688, + 18689, + 18690, + 18691, + 18692, + 18693, + 18694, + 18695, + 18696, + 18697, + 18698, + 18699, + 18700, + 18701, + 18702, + 18703, + 18704, + 18705, + 18706, + 18707, + 18708, + 18709, + 18710, + 18711, + 18712, + 18713, + 18714, + 18715, + 18716, + 18717, + 18718, + 18719, + 18720, + 18721, + 18722, + 18723, + 18724, + 18725, + 18726, + 18727, + 18728, + 18729, + 18730, + 18731, + 18732, + 18733, + 18734, + 18735, + 18736, + 18737, + 18738, + 18739, + 18740, + 18741, + 18742, + 18743, + 18744, + 18745, + 18746, + 18747, + 18748, + 18749, + 18750, + 18751, + 18752, + 18753, + 18754, + 18755, + 18756, + 18757, + 18758, + 18759, + 18760, + 18761, + 18762, + 18763, + 18764, + 18765, + 18766, + 18767, + 18768, + 18769, + 18770, + 18771, + 18772, + 18773, + 18774, + 18775, + 18776, + 18777, + 18778, + 18779, + 18780, + 18781, + 18782, + 18783, + 18784, + 18785, + 18786, + 18787, + 18788, + 18789, + 18790, + 18791, + 18792, + 18793, + 18794, + 18795, + 18796, + 18797, + 18798, + 18799, + 18800, + 18801, + 18802, + 18803, + 18804, + 18805, + 18806, + 18807, + 18808, + 18809, + 18810, + 18811, + 18812, + 18813, + 18814, + 18815, + 18816, + 18817, + 18818, + 18819, + 18820, + 18821, + 18822, + 18823, + 18824, + 18825, + 18826, + 18827, + 18828, + 18829, + 18830, + 18831, + 18832, + 18833, + 18834, + 18835, + 18836, + 18837, + 18838, + 18839, + 18840, + 18841, + 18842, + 18843, + 18844, + 18845, + 18846, + 18847, + 18848, + 18849, + 18850, + 18851, + 18852, + 18853, + 18854, + 18855, + 18856, + 18857, + 18858, + 18859, + 18860, + 18861, + 18862, + 18863, + 18864, + 18865, + 18866, + 18867, + 18868, + 18869, + 18870, + 18871, + 18872, + 18873, + 18874, + 18875, + 18876, + 18877, + 18878, + 18879, + 18880, + 18881, + 18882, + 18883, + 18884, + 18885, + 18886, + 18887, + 18888, + 18889, + 18890, + 18891, + 18892, + 18893, + 18894, + 18895, + 18896, + 18897, + 18898, + 18899, + 18900, + 18901, + 18902, + 18903, + 18904, + 18905, + 18906, + 18907, + 18908, + 18909, + 18910, + 18911, + 18912, + 18913, + 18914, + 18915, + 18916, + 18917, + 18918, + 18919, + 18920, + 18921, + 18922, + 18923, + 18924, + 18925, + 18926, + 18927, + 18928, + 18929, + 18930, + 18931, + 18932, + 18933, + 18934, + 18935, + 18936, + 18937, + 18938, + 18939, + 18940, + 18941, + 18942, + 18943, + 18944, + 18945, + 18946, + 18947, + 18948, + 18949, + 18950, + 18951, + 18952, + 18953, + 18954, + 18955, + 18956, + 18957, + 18958, + 18959, + 18960, + 18961, + 18962, + 18963, + 18964, + 18965, + 18966, + 18967, + 18968, + 18969, + 18970, + 18971, + 18972, + 18973, + 18974, + 18975, + 18976, + 18977, + 18978, + 18979, + 18980, + 18981, + 18982, + 18983, + 18984, + 18985, + 18986, + 18987, + 18988, + 18989, + 18990, + 18991, + 18992, + 18993, + 18994, + 18995, + 18996, + 18997, + 18998, + 18999, + 19000, + 19001, + 19002, + 19003, + 19004, + 19005, + 19006, + 19007, + 19008, + 19009, + 19010, + 19011, + 19012, + 19013, + 19014, + 19015, + 19016, + 19017, + 19018, + 19019, + 19020, + 19021, + 19022, + 19023, + 19024, + 19025, + 19026, + 19027, + 19028, + 19029, + 19030, + 19031, + 19032, + 19033, + 19034, + 19035, + 19036, + 19037, + 19038, + 19039, + 19040, + 19041, + 19042, + 19043, + 19044, + 19045, + 19046, + 19047, + 19048, + 19049, + 19050, + 19051, + 19052, + 19053, + 19054, + 19055, + 19056, + 19057, + 19058, + 19059, + 19060, + 19061, + 19062, + 19063, + 19064, + 19065, + 19066, + 19067, + 19068, + 19069, + 19070, + 19071, + 19072, + 19073, + 19074, + 19075, + 19076, + 19077, + 19078, + 19079, + 19080, + 19081, + 19082, + 19083, + 19084, + 19085, + 19086, + 19087, + 19088, + 19089, + 19090, + 19091, + 19092, + 19093, + 19094, + 19095, + 19096, + 19097, + 19098, + 19099, + 19100, + 19101, + 19102, + 19103, + 19104, + 19105, + 19106, + 19107, + 19108, + 19109, + 19110, + 19111, + 19112, + 19113, + 19114, + 19115, + 19116, + 19117, + 19118, + 19119, + 19120, + 19121, + 19122, + 19123, + 19124, + 19125, + 19126, + 19127, + 19128, + 19129, + 19130, + 19131, + 19132, + 19133, + 19134, + 19135, + 19136, + 19137, + 19138, + 19139, + 19140, + 19141, + 19142, + 19143, + 19144, + 19145, + 19146, + 19147, + 19148, + 19149, + 19150, + 19151, + 19152, + 19153, + 19154, + 19155, + 19156, + 19157, + 19158, + 19159, + 19160, + 19161, + 19162, + 19163, + 19164, + 19165, + 19166, + 19167, + 19168, + 19169, + 19170, + 19171, + 19172, + 19173, + 19174, + 19175, + 19176, + 19177, + 19178, + 19179, + 19180, + 19181, + 19182, + 19183, + 19184, + 19185, + 19186, + 19187, + 19188, + 19189, + 19190, + 19191, + 19192, + 19193, + 19194, + 19195, + 19196, + 19197, + 19198, + 19199, + 19200, + 19201, + 19202, + 19203, + 19204, + 19205, + 19206, + 19207, + 19208, + 19209, + 19210, + 19211, + 19212, + 19213, + 19214, + 19215, + 19216, + 19217, + 19218, + 19219, + 19220, + 19221, + 19222, + 19223, + 19224, + 19225, + 19226, + 19227, + 19228, + 19229, + 19230, + 19231, + 19232, + 19233, + 19234, + 19235, + 19236, + 19237, + 19238, + 19239, + 19240, + 19241, + 19242, + 19243, + 19244, + 19245, + 19246, + 19247, + 19248, + 19249, + 19250, + 19251, + 19252, + 19253, + 19254, + 19255, + 19256, + 19257, + 19258, + 19259, + 19260, + 19261, + 19262, + 19263, + 19264, + 19265, + 19266, + 19267, + 19268, + 19269, + 19270, + 19271, + 19272, + 19273, + 19274, + 19275, + 19276, + 19277, + 19278, + 19279, + 19280, + 19281, + 19282, + 19283, + 19284, + 19285, + 19286, + 19287, + 19288, + 19289, + 19290, + 19291, + 19292, + 19293, + 19294, + 19295, + 19296, + 19297, + 19298, + 19299, + 19300, + 19301, + 19302, + 19303, + 19304, + 19305, + 19306, + 19307, + 19308, + 19309, + 19310, + 19311, + 19312, + 19313, + 19314, + 19315, + 19316, + 19317, + 19318, + 19319, + 19320, + 19321, + 19322, + 19323, + 19324, + 19325, + 19326, + 19327, + 19328, + 19329, + 19330, + 19331, + 19332, + 19333, + 19334, + 19335, + 19336, + 19337, + 19338, + 19339, + 19340, + 19341, + 19342, + 19343, + 19344, + 19345, + 19346, + 19347, + 19348, + 19349, + 19350, + 19351, + 19352, + 19353, + 19354, + 19355, + 19356, + 19357, + 19358, + 19359, + 19360, + 19361, + 19362, + 19363, + 19364, + 19365, + 19366, + 19367, + 19368, + 19369, + 19370, + 19371, + 19372, + 19373, + 19374, + 19375, + 19376, + 19377, + 19378, + 19379, + 19380, + 19381, + 19382, + 19383, + 19384, + 19385, + 19386, + 19387, + 19388, + 19389, + 19390, + 19391, + 19392, + 19393, + 19394, + 19395, + 19396, + 19397, + 19398, + 19399, + 19400, + 19401, + 19402, + 19403, + 19404, + 19405, + 19406, + 19407, + 19408, + 19409, + 19410, + 19411, + 19412, + 19413, + 19414, + 19415, + 19416, + 19417, + 19418, + 19419, + 19420, + 19421, + 19422, + 19423, + 19424, + 19425, + 19426, + 19427, + 19428, + 19429, + 19430, + 19431, + 19432, + 19433, + 19434, + 19435, + 19436, + 19437, + 19438, + 19439, + 19440, + 19441, + 19442, + 19443, + 19444, + 19445, + 19446, + 19447, + 19448, + 19449, + 19450, + 19451, + 19452, + 19453, + 19454, + 19455, + 19456, + 19457, + 19458, + 19459, + 19460, + 19461, + 19462, + 19463, + 19464, + 19465, + 19466, + 19467, + 19468, + 19469, + 19470, + 19471, + 19472, + 19473, + 19474, + 19475, + 19476, + 19477, + 19478, + 19479, + 19480, + 19481, + 19482, + 19483, + 19484, + 19485, + 19486, + 19487, + 19488, + 19489, + 19490, + 19491, + 19492, + 19493, + 19494, + 19495, + 19496, + 19497, + 19498, + 19499, + 19500, + 19501, + 19502, + 19503, + 19504, + 19505, + 19506, + 19507, + 19508, + 19509, + 19510, + 19511, + 19512, + 19513, + 19514, + 19515, + 19516, + 19517, + 19518, + 19519, + 19520, + 19521, + 19522, + 19523, + 19524, + 19525, + 19526, + 19527, + 19528, + 19529, + 19530, + 19531, + 19532, + 19533, + 19534, + 19535, + 19536, + 19537, + 19538, + 19539, + 19540, + 19541, + 19542, + 19543, + 19544, + 19545, + 19546, + 19547, + 19548, + 19549, + 19550, + 19551, + 19552, + 19553, + 19554, + 19555, + 19556, + 19557, + 19558, + 19559, + 19560, + 19561, + 19562, + 19563, + 19564, + 19565, + 19566, + 19567, + 19568, + 19569, + 19570, + 19571, + 19572, + 19573, + 19574, + 19575, + 19576, + 19577, + 19578, + 19579, + 19580, + 19581, + 19582, + 19583, + 19584, + 19585, + 19586, + 19587, + 19588, + 19589, + 19590, + 19591, + 19592, + 19593, + 19594, + 19595, + 19596, + 19597, + 19598, + 19599, + 19600, + 19601, + 19602, + 19603, + 19604, + 19605, + 19606, + 19607, + 19608, + 19609, + 19610, + 19611, + 19612, + 19613, + 19614, + 19615, + 19616, + 19617, + 19618, + 19619, + 19620, + 19621, + 19622, + 19623, + 19624, + 19625, + 19626, + 19627, + 19628, + 19629, + 19630, + 19631, + 19632, + 19633, + 19634, + 19635, + 19636, + 19637, + 19638, + 19639, + 19640, + 19641, + 19642, + 19643, + 19644, + 19645, + 19646, + 19647, + 19648, + 19649, + 19650, + 19651, + 19652, + 19653, + 19654, + 19655, + 19656, + 19657, + 19658, + 19659, + 19660, + 19661, + 19662, + 19663, + 19664, + 19665, + 19666, + 19667, + 19668, + 19669, + 19670, + 19671, + 19672, + 19673, + 19674, + 19675, + 19676, + 19677, + 19678, + 19679, + 19680, + 19681, + 19682, + 19683, + 19684, + 19685, + 19686, + 19687, + 19688, + 19689, + 19690, + 19691, + 19692, + 19693, + 19694, + 19695, + 19696, + 19697, + 19698, + 19699, + 19700, + 19701, + 19702, + 19703, + 19704, + 19705, + 19706, + 19707, + 19708, + 19709, + 19710, + 19711, + 19712, + 19713, + 19714, + 19715, + 19716, + 19717, + 19718, + 19719, + 19720, + 19721, + 19722, + 19723, + 19724, + 19725, + 19726, + 19727, + 19728, + 19729, + 19730, + 19731, + 19732, + 19733, + 19734, + 19735, + 19736, + 19737, + 19738, + 19739, + 19740, + 19741, + 19742, + 19743, + 19744, + 19745, + 19746, + 19747, + 19748, + 19749, + 19750, + 19751, + 19752, + 19753, + 19754, + 19755, + 19756, + 19757, + 19758, + 19759, + 19760, + 19761, + 19762, + 19763, + 19764, + 19765, + 19766, + 19767, + 19768, + 19769, + 19770, + 19771, + 19772, + 19773, + 19774, + 19775, + 19776, + 19777, + 19778, + 19779, + 19780, + 19781, + 19782, + 19783, + 19784, + 19785, + 19786, + 19787, + 19788, + 19789, + 19790, + 19791, + 19792, + 19793, + 19794, + 19795, + 19796, + 19797, + 19798, + 19799, + 19800, + 19801, + 19802, + 19803, + 19804, + 19805, + 19806, + 19807, + 19808, + 19809, + 19810, + 19811, + 19812, + 19813, + 19814, + 19815, + 19816, + 19817, + 19818, + 19819, + 19820, + 19821, + 19822, + 19823, + 19824, + 19825, + 19826, + 19827, + 19828, + 19829, + 19830, + 19831, + 19832, + 19833, + 19834, + 19835, + 19836, + 19837, + 19838, + 19839, + 19840, + 19841, + 19842, + 19843, + 19844, + 19845, + 19846, + 19847, + 19848, + 19849, + 19850, + 19851, + 19852, + 19853, + 19854, + 19855, + 19856, + 19857, + 19858, + 19859, + 19860, + 19861, + 19862, + 19863, + 19864, + 19865, + 19866, + 19867, + 19868, + 19869, + 19870, + 19871, + 19872, + 19873, + 19874, + 19875, + 19876, + 19877, + 19878, + 19879, + 19880, + 19881, + 19882, + 19883, + 19884, + 19885, + 19886, + 19887, + 19888, + 19889, + 19890, + 19891, + 19892, + 19893, + 19894, + 19895, + 19896, + 19897, + 19898, + 19899, + 19900, + 19901, + 19902, + 19903, + 19904, + 19905, + 19906, + 19907, + 19908, + 19909, + 19910, + 19911, + 19912, + 19913, + 19914, + 19915, + 19916, + 19917, + 19918, + 19919, + 19920, + 19921, + 19922, + 19923, + 19924, + 19925, + 19926, + 19927, + 19928, + 19929, + 19930, + 19931, + 19932, + 19933, + 19934, + 19935, + 19936, + 19937, + 19938, + 19939, + 19940, + 19941, + 19942, + 19943, + 19944, + 19945, + 19946, + 19947, + 19948, + 19949, + 19950, + 19951, + 19952, + 19953, + 19954, + 19955, + 19956, + 19957, + 19958, + 19959, + 19960, + 19961, + 19962, + 19963, + 19964, + 19965, + 19966, + 19967, + 19968, + 19969, + 19970, + 19971, + 19972, + 19973, + 19974, + 19975, + 19976, + 19977, + 19978, + 19979, + 19980, + 19981, + 19982, + 19983, + 19984, + 19985, + 19986, + 19987, + 19988, + 19989, + 19990, + 19991, + 19992, + 19993, + 19994, + 19995, + 19996, + 19997, + 19998, + 19999, + 20000, + 20001, + 20002, + 20003, + 20004, + 20005, + 20006, + 20007, + 20008, + 20009, + 20010, + 20011, + 20012, + 20013, + 20014, + 20015, + 20016, + 20017, + 20018, + 20019, + 20020, + 20021, + 20022, + 20023, + 20024, + 20025, + 20026, + 20027, + 20028, + 20029, + 20030, + 20031, + 20032, + 20033, + 20034, + 20035, + 20036, + 20037, + 20038, + 20039, + 20040, + 20041, + 20042, + 20043, + 20044, + 20045, + 20046, + 20047, + 20048, + 20049, + 20050, + 20051, + 20052, + 20053, + 20054, + 20055, + 20056, + 20057, + 20058, + 20059, + 20060, + 20061, + 20062, + 20063, + 20064, + 20065, + 20066, + 20067, + 20068, + 20069, + 20070, + 20071, + 20072, + 20073, + 20074, + 20075, + 20076, + 20077, + 20078, + 20079, + 20080, + 20081, + 20082, + 20083, + 20084, + 20085, + 20086, + 20087, + 20088, + 20089, + 20090, + 20091, + 20092, + 20093, + 20094, + 20095, + 20096, + 20097, + 20098, + 20099, + 20100, + 20101, + 20102, + 20103, + 20104, + 20105, + 20106, + 20107, + 20108, + 20109, + 20110, + 20111, + 20112, + 20113, + 20114, + 20115, + 20116, + 20117, + 20118, + 20119, + 20120, + 20121, + 20122, + 20123, + 20124, + 20125, + 20126, + 20127, + 20128, + 20129, + 20130, + 20131, + 20132, + 20133, + 20134, + 20135, + 20136, + 20137, + 20138, + 20139, + 20140, + 20141, + 20142, + 20143, + 20144, + 20145, + 20146, + 20147, + 20148, + 20149, + 20150, + 20151, + 20152, + 20153, + 20154, + 20155, + 20156, + 20157, + 20158, + 20159, + 20160, + 20161, + 20162, + 20163, + 20164, + 20165, + 20166, + 20167, + 20168, + 20169, + 20170, + 20171, + 20172, + 20173, + 20174, + 20175, + 20176, + 20177, + 20178, + 20179, + 20180, + 20181, + 20182, + 20183, + 20184, + 20185, + 20186, + 20187, + 20188, + 20189, + 20190, + 20191, + 20192, + 20193, + 20194, + 20195, + 20196, + 20197, + 20198, + 20199, + 20200, + 20201, + 20202, + 20203, + 20204, + 20205, + 20206, + 20207, + 20208, + 20209, + 20210, + 20211, + 20212, + 20213, + 20214, + 20215, + 20216, + 20217, + 20218, + 20219, + 20220, + 20221, + 20222, + 20223, + 20224, + 20225, + 20226, + 20227, + 20228, + 20229, + 20230, + 20231, + 20232, + 20233, + 20234, + 20235, + 20236, + 20237, + 20238, + 20239, + 20240, + 20241, + 20242, + 20243, + 20244, + 20245, + 20246, + 20247, + 20248, + 20249, + 20250, + 20251, + 20252, + 20253, + 20254, + 20255, + 20256, + 20257, + 20258, + 20259, + 20260, + 20261, + 20262, + 20263, + 20264, + 20265, + 20266, + 20267, + 20268, + 20269, + 20270, + 20271, + 20272, + 20273, + 20274, + 20275, + 20276, + 20277, + 20278, + 20279, + 20280, + 20281, + 20282, + 20283, + 20284, + 20285, + 20286, + 20287, + 20288, + 20289, + 20290, + 20291, + 20292, + 20293, + 20294, + 20295, + 20296, + 20297, + 20298, + 20299, + 20300, + 20301, + 20302, + 20303, + 20304, + 20305, + 20306, + 20307, + 20308, + 20309, + 20310, + 20311, + 20312, + 20313, + 20314, + 20315, + 20316, + 20317, + 20318, + 20319, + 20320, + 20321, + 20322, + 20323, + 20324, + 20325, + 20326, + 20327, + 20328, + 20329, + 20330, + 20331, + 20332, + 20333, + 20334, + 20335, + 20336, + 20337, + 20338, + 20339, + 20340, + 20341, + 20342, + 20343, + 20344, + 20345, + 20346, + 20347, + 20348, + 20349, + 20350, + 20351, + 20352, + 20353, + 20354, + 20355, + 20356, + 20357, + 20358, + 20359, + 20360, + 20361, + 20362, + 20363, + 20364, + 20365, + 20366, + 20367, + 20368, + 20369, + 20370, + 20371, + 20372, + 20373, + 20374, + 20375, + 20376, + 20377, + 20378, + 20379, + 20380, + 20381, + 20382, + 20383, + 20384, + 20385, + 20386, + 20387, + 20388, + 20389, + 20390, + 20391, + 20392, + 20393, + 20394, + 20395, + 20396, + 20397, + 20398, + 20399, + 20400, + 20401, + 20402, + 20403, + 20404, + 20405, + 20406, + 20407, + 20408, + 20409, + 20410, + 20411, + 20412, + 20413, + 20414, + 20415, + 20416, + 20417, + 20418, + 20419, + 20420, + 20421, + 20422, + 20423, + 20424, + 20425, + 20426, + 20427, + 20428, + 20429, + 20430, + 20431, + 20432, + 20433, + 20434, + 20435, + 20436, + 20437, + 20438, + 20439, + 20440, + 20441, + 20442, + 20443, + 20444, + 20445, + 20446, + 20447, + 20448, + 20449, + 20450, + 20451, + 20452, + 20453, + 20454, + 20455, + 20456, + 20457, + 20458, + 20459, + 20460, + 20461, + 20462, + 20463, + 20464, + 20465, + 20466, + 20467, + 20468, + 20469, + 20470, + 20471, + 20472, + 20473, + 20474, + 20475, + 20476, + 20477, + 20478, + 20479, + 20480, + 20481, + 20482, + 20483, + 20484, + 20485, + 20486, + 20487, + 20488, + 20489, + 20490, + 20491, + 20492, + 20493, + 20494, + 20495, + 20496, + 20497, + 20498, + 20499, + 20500, + 20501, + 20502, + 20503, + 20504, + 20505, + 20506, + 20507, + 20508, + 20509, + 20510, + 20511, + 20512, + 20513, + 20514, + 20515, + 20516, + 20517, + 20518, + 20519, + 20520, + 20521, + 20522, + 20523, + 20524, + 20525, + 20526, + 20527, + 20528, + 20529, + 20530, + 20531, + 20532, + 20533, + 20534, + 20535, + 20536, + 20537, + 20538, + 20539, + 20540, + 20541, + 20542, + 20543, + 20544, + 20545, + 20546, + 20547, + 20548, + 20549, + 20550, + 20551, + 20552, + 20553, + 20554, + 20555, + 20556, + 20557, + 20558, + 20559, + 20560, + 20561, + 20562, + 20563, + 20564, + 20565, + 20566, + 20567, + 20568, + 20569, + 20570, + 20571, + 20572, + 20573, + 20574, + 20575, + 20576, + 20577, + 20578, + 20579, + 20580, + 20581, + 20582, + 20583, + 20584, + 20585, + 20586, + 20587, + 20588, + 20589, + 20590, + 20591, + 20592, + 20593, + 20594, + 20595, + 20596, + 20597, + 20598, + 20599, + 20600, + 20601, + 20602, + 20603, + 20604, + 20605, + 20606, + 20607, + 20608, + 20609, + 20610, + 20611, + 20612, + 20613, + 20614, + 20615, + 20616, + 20617, + 20618, + 20619, + 20620, + 20621, + 20622, + 20623, + 20624, + 20625, + 20626, + 20627, + 20628, + 20629, + 20630, + 20631, + 20632, + 20633, + 20634, + 20635, + 20636, + 20637, + 20638, + 20639, + 20640, + 20641, + 20642, + 20643, + 20644, + 20645, + 20646, + 20647, + 20648, + 20649, + 20650, + 20651, + 20652, + 20653, + 20654, + 20655, + 20656, + 20657, + 20658, + 20659, + 20660, + 20661, + 20662, + 20663, + 20664, + 20665, + 20666, + 20667, + 20668, + 20669, + 20670, + 20671, + 20672, + 20673, + 20674, + 20675, + 20676, + 20677, + 20678, + 20679, + 20680, + 20681, + 20682, + 20683, + 20684, + 20685, + 20686, + 20687, + 20688, + 20689, + 20690, + 20691, + 20692, + 20693, + 20694, + 20695, + 20696, + 20697, + 20698, + 20699, + 20700, + 20701, + 20702, + 20703, + 20704, + 20705, + 20706, + 20707, + 20708, + 20709, + 20710, + 20711, + 20712, + 20713, + 20714, + 20715, + 20716, + 20717, + 20718, + 20719, + 20720, + 20721, + 20722, + 20723, + 20724, + 20725, + 20726, + 20727, + 20728, + 20729, + 20730, + 20731, + 20732, + 20733, + 20734, + 20735, + 20736, + 20737, + 20738, + 20739, + 20740, + 20741, + 20742, + 20743, + 20744, + 20745, + 20746, + 20747, + 20748, + 20749, + 20750, + 20751, + 20752, + 20753, + 20754, + 20755, + 20756, + 20757, + 20758, + 20759, + 20760, + 20761, + 20762, + 20763, + 20764, + 20765, + 20766, + 20767, + 20768, + 20769, + 20770, + 20771, + 20772, + 20773, + 20774, + 20775, + 20776, + 20777, + 20778, + 20779, + 20780, + 20781, + 20782, + 20783, + 20784, + 20785, + 20786, + 20787, + 20788, + 20789, + 20790, + 20791, + 20792, + 20793, + 20794, + 20795, + 20796, + 20797, + 20798, + 20799, + 20800, + 20801, + 20802, + 20803, + 20804, + 20805, + 20806, + 20807, + 20808, + 20809, + 20810, + 20811, + 20812, + 20813, + 20814, + 20815, + 20816, + 20817, + 20818, + 20819, + 20820, + 20821, + 20822, + 20823, + 20824, + 20825, + 20826, + 20827, + 20828, + 20829, + 20830, + 20831, + 20832, + 20833, + 20834, + 20835, + 20836, + 20837, + 20838, + 20839, + 20840, + 20841, + 20842, + 20843, + 20844, + 20845, + 20846, + 20847, + 20848, + 20849, + 20850, + 20851, + 20852, + 20853, + 20854, + 20855, + 20856, + 20857, + 20858, + 20859, + 20860, + 20861, + 20862, + 20863, + 20864, + 20865, + 20866, + 20867, + 20868, + 20869, + 20870, + 20871, + 20872, + 20873, + 20874, + 20875, + 20876, + 20877, + 20878, + 20879, + 20880, + 20881, + 20882, + 20883, + 20884, + 20885, + 20886, + 20887, + 20888, + 20889, + 20890, + 20891, + 20892, + 20893, + 20894, + 20895, + 20896, + 20897, + 20898, + 20899, + 20900, + 20901, + 20902, + 20903, + 20904, + 20905, + 20906, + 20907, + 20908, + 20909, + 20910, + 20911, + 20912, + 20913, + 20914, + 20915, + 20916, + 20917, + 20918, + 20919, + 20920, + 20921, + 20922, + 20923, + 20924, + 20925, + 20926, + 20927, + 20928, + 20929, + 20930, + 20931, + 20932, + 20933, + 20934, + 20935, + 20936, + 20937, + 20938, + 20939, + 20940, + 20941, + 20942, + 20943, + 20944, + 20945, + 20946, + 20947, + 20948, + 20949, + 20950, + 20951, + 20952, + 20953, + 20954, + 20955, + 20956, + 20957, + 20958, + 20959, + 20960, + 20961, + 20962, + 20963, + 20964, + 20965, + 20966, + 20967, + 20968, + 20969, + 20970, + 20971, + 20972, + 20973, + 20974, + 20975, + 20976, + 20977, + 20978, + 20979, + 20980, + 20981, + 20982, + 20983, + 20984, + 20985, + 20986, + 20987, + 20988, + 20989, + 20990, + 20991, + 20992, + 20993, + 20994, + 20995, + 20996, + 20997, + 20998, + 20999, + 21000, + 21001, + 21002, + 21003, + 21004, + 21005, + 21006, + 21007, + 21008, + 21009, + 21010, + 21011, + 21012, + 21013, + 21014, + 21015, + 21016, + 21017, + 21018, + 21019, + 21020, + 21021, + 21022, + 21023, + 21024, + 21025, + 21026, + 21027, + 21028, + 21029, + 21030, + 21031, + 21032, + 21033, + 21034, + 21035, + 21036, + 21037, + 21038, + 21039, + 21040, + 21041, + 21042, + 21043, + 21044, + 21045, + 21046, + 21047, + 21048, + 21049, + 21050, + 21051, + 21052, + 21053, + 21054, + 21055, + 21056, + 21057, + 21058, + 21059, + 21060, + 21061, + 21062, + 21063, + 21064, + 21065, + 21066, + 21067, + 21068, + 21069, + 21070, + 21071, + 21072, + 21073, + 21074, + 21075, + 21076, + 21077, + 21078, + 21079, + 21080, + 21081, + 21082, + 21083, + 21084, + 21085, + 21086, + 21087, + 21088, + 21089, + 21090, + 21091, + 21092, + 21093, + 21094, + 21095, + 21096, + 21097, + 21098, + 21099, + 21100, + 21101, + 21102, + 21103, + 21104, + 21105, + 21106, + 21107, + 21108, + 21109, + 21110, + 21111, + 21112, + 21113, + 21114, + 21115, + 21116, + 21117, + 21118, + 21119, + 21120, + 21121, + 21122, + 21123, + 21124, + 21125, + 21126, + 21127, + 21128, + 21129, + 21130, + 21131, + 21132, + 21133, + 21134, + 21135, + 21136, + 21137, + 21138, + 21139, + 21140, + 21141, + 21142, + 21143, + 21144, + 21145, + 21146, + 21147, + 21148, + 21149, + 21150, + 21151, + 21152, + 21153, + 21154, + 21155, + 21156, + 21157, + 21158, + 21159, + 21160, + 21161, + 21162, + 21163, + 21164, + 21165, + 21166, + 21167, + 21168, + 21169, + 21170, + 21171, + 21172, + 21173, + 21174, + 21175, + 21176, + 21177, + 21178, + 21179, + 21180, + 21181, + 21182, + 21183, + 21184, + 21185, + 21186, + 21187, + 21188, + 21189, + 21190, + 21191, + 21192, + 21193, + 21194, + 21195, + 21196, + 21197, + 21198, + 21199, + 21200, + 21201, + 21202, + 21203, + 21204, + 21205, + 21206, + 21207, + 21208, + 21209, + 21210, + 21211, + 21212, + 21213, + 21214, + 21215, + 21216, + 21217, + 21218, + 21219, + 21220, + 21221, + 21222, + 21223, + 21224, + 21225, + 21226, + 21227, + 21228, + 21229, + 21230, + 21231, + 21232, + 21233, + 21234, + 21235, + 21236, + 21237, + 21238, + 21239, + 21240, + 21241, + 21242, + 21243, + 21244, + 21245, + 21246, + 21247, + 21248, + 21249, + 21250, + 21251, + 21252, + 21253, + 21254, + 21255, + 21256, + 21257, + 21258, + 21259, + 21260, + 21261, + 21262, + 21263, + 21264, + 21265, + 21266, + 21267, + 21268, + 21269, + 21270, + 21271, + 21272, + 21273, + 21274, + 21275, + 21276, + 21277, + 21278, + 21279, + 21280, + 21281, + 21282, + 21283, + 21284, + 21285, + 21286, + 21287, + 21288, + 21289, + 21290, + 21291, + 21292, + 21293, + 21294, + 21295, + 21296, + 21297, + 21298, + 21299, + 21300, + 21301, + 21302, + 21303, + 21304, + 21305, + 21306, + 21307, + 21308, + 21309, + 21310, + 21311, + 21312, + 21313, + 21314, + 21315, + 21316, + 21317, + 21318, + 21319, + 21320, + 21321, + 21322, + 21323, + 21324, + 21325, + 21326, + 21327, + 21328, + 21329, + 21330, + 21331, + 21332, + 21333, + 21334, + 21335, + 21336, + 21337, + 21338, + 21339, + 21340, + 21341, + 21342, + 21343, + 21344, + 21345, + 21346, + 21347, + 21348, + 21349, + 21350, + 21351, + 21352, + 21353, + 21354, + 21355, + 21356, + 21357, + 21358, + 21359, + 21360, + 21361, + 21362, + 21363, + 21364, + 21365, + 21366, + 21367, + 21368, + 21369, + 21370, + 21371, + 21372, + 21373, + 21374, + 21375, + 21376, + 21377, + 21378, + 21379, + 21380, + 21381, + 21382, + 21383, + 21384, + 21385, + 21386, + 21387, + 21388, + 21389, + 21390, + 21391, + 21392, + 21393, + 21394, + 21395, + 21396, + 21397, + 21398, + 21399, + 21400, + 21401, + 21402, + 21403, + 21404, + 21405, + 21406, + 21407, + 21408, + 21409, + 21410, + 21411, + 21412, + 21413, + 21414, + 21415, + 21416, + 21417, + 21418, + 21419, + 21420, + 21421, + 21422, + 21423, + 21424, + 21425, + 21426, + 21427, + 21428, + 21429, + 21430, + 21431, + 21432, + 21433, + 21434, + 21435, + 21436, + 21437, + 21438, + 21439, + 21440, + 21441, + 21442, + 21443, + 21444, + 21445, + 21446, + 21447, + 21448, + 21449, + 21450, + 21451, + 21452, + 21453, + 21454, + 21455, + 21456, + 21457, + 21458, + 21459, + 21460, + 21461, + 21462, + 21463, + 21464, + 21465, + 21466, + 21467, + 21468, + 21469, + 21470, + 21471, + 21472, + 21473, + 21474, + 21475, + 21476, + 21477, + 21478, + 21479, + 21480, + 21481, + 21482, + 21483, + 21484, + 21485, + 21486, + 21487, + 21488, + 21489, + 21490, + 21491, + 21492, + 21493, + 21494, + 21495, + 21496, + 21497, + 21498, + 21499, + 21500, + 21501, + 21502, + 21503, + 21504, + 21505, + 21506, + 21507, + 21508, + 21509, + 21510, + 21511, + 21512, + 21513, + 21514, + 21515, + 21516, + 21517, + 21518, + 21519, + 21520, + 21521, + 21522, + 21523, + 21524, + 21525, + 21526, + 21527, + 21528, + 21529, + 21530, + 21531, + 21532, + 21533, + 21534, + 21535, + 21536, + 21537, + 21538, + 21539, + 21540, + 21541, + 21542, + 21543, + 21544, + 21545, + 21546, + 21547, + 21548, + 21549, + 21550, + 21551, + 21552, + 21553, + 21554, + 21555, + 21556, + 21557, + 21558, + 21559, + 21560, + 21561, + 21562, + 21563, + 21564, + 21565, + 21566, + 21567, + 21568, + 21569, + 21570, + 21571, + 21572, + 21573, + 21574, + 21575, + 21576, + 21577, + 21578, + 21579, + 21580, + 21581, + 21582, + 21583, + 21584, + 21585, + 21586, + 21587, + 21588, + 21589, + 21590, + 21591, + 21592, + 21593, + 21594, + 21595, + 21596, + 21597, + 21598, + 21599, + 21600, + 21601, + 21602, + 21603, + 21604, + 21605, + 21606, + 21607, + 21608, + 21609, + 21610, + 21611, + 21612, + 21613, + 21614, + 21615, + 21616, + 21617, + 21618, + 21619, + 21620, + 21621, + 21622, + 21623, + 21624, + 21625, + 21626, + 21627, + 21628, + 21629, + 21630, + 21631, + 21632, + 21633, + 21634, + 21635, + 21636, + 21637, + 21638, + 21639, + 21640, + 21641, + 21642, + 21643, + 21644, + 21645, + 21646, + 21647, + 21648, + 21649, + 21650, + 21651, + 21652, + 21653, + 21654, + 21655, + 21656, + 21657, + 21658, + 21659, + 21660, + 21661, + 21662, + 21663, + 21664, + 21665, + 21666, + 21667, + 21668, + 21669, + 21670, + 21671, + 21672, + 21673, + 21674, + 21675, + 21676, + 21677, + 21678, + 21679, + 21680, + 21681, + 21682, + 21683, + 21684, + 21685, + 21686, + 21687, + 21688, + 21689, + 21690, + 21691, + 21692, + 21693, + 21694, + 21695, + 21696, + 21697, + 21698, + 21699, + 21700, + 21701, + 21702, + 21703, + 21704, + 21705, + 21706, + 21707, + 21708, + 21709, + 21710, + 21711, + 21712, + 21713, + 21714, + 21715, + 21716, + 21717, + 21718, + 21719, + 21720, + 21721, + 21722, + 21723, + 21724, + 21725, + 21726, + 21727, + 21728, + 21729, + 21730, + 21731, + 21732, + 21733, + 21734, + 21735, + 21736, + 21737, + 21738, + 21739, + 21740, + 21741, + 21742, + 21743, + 21744, + 21745, + 21746, + 21747, + 21748, + 21749, + 21750, + 21751, + 21752, + 21753, + 21754, + 21755, + 21756, + 21757, + 21758, + 21759, + 21760, + 21761, + 21762, + 21763, + 21764, + 21765, + 21766, + 21767, + 21768, + 21769, + 21770, + 21771, + 21772, + 21773, + 21774, + 21775, + 21776, + 21777, + 21778, + 21779, + 21780, + 21781, + 21782, + 21783, + 21784, + 21785, + 21786, + 21787, + 21788, + 21789, + 21790, + 21791, + 21792, + 21793, + 21794, + 21795, + 21796, + 21797, + 21798, + 21799, + 21800, + 21801, + 21802, + 21803, + 21804, + 21805, + 21806, + 21807, + 21808, + 21809, + 21810, + 21811, + 21812, + 21813, + 21814, + 21815, + 21816, + 21817, + 21818, + 21819, + 21820, + 21821, + 21822, + 21823, + 21824, + 21825, + 21826, + 21827, + 21828, + 21829, + 21830, + 21831, + 21832, + 21833, + 21834, + 21835, + 21836, + 21837, + 21838, + 21839, + 21840, + 21841, + 21842, + 21843, + 21844, + 21845, + 21846, + 21847, + 21848, + 21849, + 21850, + 21851, + 21852, + 21853, + 21854, + 21855, + 21856, + 21857, + 21858, + 21859, + 21860, + 21861, + 21862, + 21863, + 21864, + 21865, + 21866, + 21867, + 21868, + 21869, + 21870, + 21871, + 21872, + 21873, + 21874, + 21875, + 21876, + 21877, + 21878, + 21879, + 21880, + 21881, + 21882, + 21883, + 21884, + 21885, + 21886, + 21887, + 21888, + 21889, + 21890, + 21891, + 21892, + 21893, + 21894, + 21895, + 21896, + 21897, + 21898, + 21899, + 21900, + 21901, + 21902, + 21903, + 21904, + 21905, + 21906, + 21907, + 21908, + 21909, + 21910, + 21911, + 21912, + 21913, + 21914, + 21915, + 21916, + 21917, + 21918, + 21919, + 21920, + 21921, + 21922, + 21923, + 21924, + 21925, + 21926, + 21927, + 21928, + 21929, + 21930, + 21931, + 21932, + 21933, + 21934, + 21935, + 21936, + 21937, + 21938, + 21939, + 21940, + 21941, + 21942, + 21943, + 21944, + 21945, + 21946, + 21947, + 21948, + 21949, + 21950, + 21951, + 21952, + 21953, + 21954, + 21955, + 21956, + 21957, + 21958, + 21959, + 21960, + 21961, + 21962, + 21963, + 21964, + 21965, + 21966, + 21967, + 21968, + 21969, + 21970, + 21971, + 21972, + 21973, + 21974, + 21975, + 21976, + 21977, + 21978, + 21979, + 21980, + 21981, + 21982, + 21983, + 21984, + 21985, + 21986, + 21987, + 21988, + 21989, + 21990, + 21991, + 21992, + 21993, + 21994, + 21995, + 21996, + 21997, + 21998, + 21999, + 22000, + 22001, + 22002, + 22003, + 22004, + 22005, + 22006, + 22007, + 22008, + 22009, + 22010, + 22011, + 22012, + 22013, + 22014, + 22015, + 22016, + 22017, + 22018, + 22019, + 22020, + 22021, + 22022, + 22023, + 22024, + 22025, + 22026, + 22027, + 22028, + 22029, + 22030, + 22031, + 22032, + 22033, + 22034, + 22035, + 22036, + 22037, + 22038, + 22039, + 22040, + 22041, + 22042, + 22043, + 22044, + 22045, + 22046, + 22047, + 22048, + 22049, + 22050, + 22051, + 22052, + 22053, + 22054, + 22055, + 22056, + 22057, + 22058, + 22059, + 22060, + 22061, + 22062, + 22063, + 22064, + 22065, + 22066, + 22067, + 22068, + 22069, + 22070, + 22071, + 22072, + 22073, + 22074, + 22075, + 22076, + 22077, + 22078, + 22079, + 22080, + 22081, + 22082, + 22083, + 22084, + 22085, + 22086, + 22087, + 22088, + 22089, + 22090, + 22091, + 22092, + 22093, + 22094, + 22095, + 22096, + 22097, + 22098, + 22099, + 22100, + 22101, + 22102, + 22103, + 22104, + 22105, + 22106, + 22107, + 22108, + 22109, + 22110, + 22111, + 22112, + 22113, + 22114, + 22115, + 22116, + 22117, + 22118, + 22119, + 22120, + 22121, + 22122, + 22123, + 22124, + 22125, + 22126, + 22127, + 22128, + 22129, + 22130, + 22131, + 22132, + 22133, + 22134, + 22135, + 22136, + 22137, + 22138, + 22139, + 22140, + 22141, + 22142, + 22143, + 22144, + 22145, + 22146, + 22147, + 22148, + 22149, + 22150, + 22151, + 22152, + 22153, + 22154, + 22155, + 22156, + 22157, + 22158, + 22159, + 22160, + 22161, + 22162, + 22163, + 22164, + 22165, + 22166, + 22167, + 22168, + 22169, + 22170, + 22171, + 22172, + 22173, + 22174, + 22175, + 22176, + 22177, + 22178, + 22179, + 22180, + 22181, + 22182, + 22183, + 22184, + 22185, + 22186, + 22187, + 22188, + 22189, + 22190, + 22191, + 22192, + 22193, + 22194, + 22195, + 22196, + 22197, + 22198, + 22199, + 22200, + 22201, + 22202, + 22203, + 22204, + 22205, + 22206, + 22207, + 22208, + 22209, + 22210, + 22211, + 22212, + 22213, + 22214, + 22215, + 22216, + 22217, + 22218, + 22219, + 22220, + 22221, + 22222, + 22223, + 22224, + 22225, + 22226, + 22227, + 22228, + 22229, + 22230, + 22231, + 22232, + 22233, + 22234, + 22235, + 22236, + 22237, + 22238, + 22239, + 22240, + 22241, + 22242, + 22243, + 22244, + 22245, + 22246, + 22247, + 22248, + 22249, + 22250, + 22251, + 22252, + 22253, + 22254, + 22255, + 22256, + 22257, + 22258, + 22259, + 22260, + 22261, + 22262, + 22263, + 22264, + 22265, + 22266, + 22267, + 22268, + 22269, + 22270, + 22271, + 22272, + 22273, + 22274, + 22275, + 22276, + 22277, + 22278, + 22279, + 22280, + 22281, + 22282, + 22283, + 22284, + 22285, + 22286, + 22287, + 22288, + 22289, + 22290, + 22291, + 22292, + 22293, + 22294, + 22295, + 22296, + 22297, + 22298, + 22299, + 22300, + 22301, + 22302, + 22303, + 22304, + 22305, + 22306, + 22307, + 22308, + 22309, + 22310, + 22311, + 22312, + 22313, + 22314, + 22315, + 22316, + 22317, + 22318, + 22319, + 22320, + 22321, + 22322, + 22323, + 22324, + 22325, + 22326, + 22327, + 22328, + 22329, + 22330, + 22331, + 22332, + 22333, + 22334, + 22335, + 22336, + 22337, + 22338, + 22339, + 22340, + 22341, + 22342, + 22343, + 22344, + 22345, + 22346, + 22347, + 22348, + 22349, + 22350, + 22351, + 22352, + 22353, + 22354, + 22355, + 22356, + 22357, + 22358, + 22359, + 22360, + 22361, + 22362, + 22363, + 22364, + 22365, + 22366, + 22367, + 22368, + 22369, + 22370, + 22371, + 22372, + 22373, + 22374, + 22375, + 22376, + 22377, + 22378, + 22379, + 22380, + 22381, + 22382, + 22383, + 22384, + 22385, + 22386, + 22387, + 22388, + 22389, + 22390, + 22391, + 22392, + 22393, + 22394, + 22395, + 22396, + 22397, + 22398, + 22399, + 22400, + 22401, + 22402, + 22403, + 22404, + 22405, + 22406, + 22407, + 22408, + 22409, + 22410, + 22411, + 22412, + 22413, + 22414, + 22415, + 22416, + 22417, + 22418, + 22419, + 22420, + 22421, + 22422, + 22423, + 22424, + 22425, + 22426, + 22427, + 22428, + 22429, + 22430, + 22431, + 22432, + 22433, + 22434, + 22435, + 22436, + 22437, + 22438, + 22439, + 22440, + 22441, + 22442, + 22443, + 22444, + 22445, + 22446, + 22447, + 22448, + 22449, + 22450, + 22451, + 22452, + 22453, + 22454, + 22455, + 22456, + 22457, + 22458, + 22459, + 22460, + 22461, + 22462, + 22463, + 22464, + 22465, + 22466, + 22467, + 22468, + 22469, + 22470, + 22471, + 22472, + 22473, + 22474, + 22475, + 22476, + 22477, + 22478, + 22479, + 22480, + 22481, + 22482, + 22483, + 22484, + 22485, + 22486, + 22487, + 22488, + 22489, + 22490, + 22491, + 22492, + 22493, + 22494, + 22495, + 22496, + 22497, + 22498, + 22499, + 22500, + 22501, + 22502, + 22503, + 22504, + 22505, + 22506, + 22507, + 22508, + 22509, + 22510, + 22511, + 22512, + 22513, + 22514, + 22515, + 22516, + 22517, + 22518, + 22519, + 22520, + 22521, + 22522, + 22523, + 22524, + 22525, + 22526, + 22527, + 22528, + 22529, + 22530, + 22531, + 22532, + 22533, + 22534, + 22535, + 22536, + 22537, + 22538, + 22539, + 22540, + 22541, + 22542, + 22543, + 22544, + 22545, + 22546, + 22547, + 22548, + 22549, + 22550, + 22551, + 22552, + 22553, + 22554, + 22555, + 22556, + 22557, + 22558, + 22559, + 22560, + 22561, + 22562, + 22563, + 22564, + 22565, + 22566, + 22567, + 22568, + 22569, + 22570, + 22571, + 22572, + 22573, + 22574, + 22575, + 22576, + 22577, + 22578, + 22579, + 22580, + 22581, + 22582, + 22583, + 22584, + 22585, + 22586, + 22587, + 22588, + 22589, + 22590, + 22591, + 22592, + 22593, + 22594, + 22595, + 22596, + 22597, + 22598, + 22599, + 22600, + 22601, + 22602, + 22603, + 22604, + 22605, + 22606, + 22607, + 22608, + 22609, + 22610, + 22611, + 22612, + 22613, + 22614, + 22615, + 22616, + 22617, + 22618, + 22619, + 22620, + 22621, + 22622, + 22623, + 22624, + 22625, + 22626, + 22627, + 22628, + 22629, + 22630, + 22631, + 22632, + 22633, + 22634, + 22635, + 22636, + 22637, + 22638, + 22639, + 22640, + 22641, + 22642, + 22643, + 22644, + 22645, + 22646, + 22647, + 22648, + 22649, + 22650, + 22651, + 22652, + 22653, + 22654, + 22655, + 22656, + 22657, + 22658, + 22659, + 22660, + 22661, + 22662, + 22663, + 22664, + 22665, + 22666, + 22667, + 22668, + 22669, + 22670, + 22671, + 22672, + 22673, + 22674, + 22675, + 22676, + 22677, + 22678, + 22679, + 22680, + 22681, + 22682, + 22683, + 22684, + 22685, + 22686, + 22687, + 22688, + 22689, + 22690, + 22691, + 22692, + 22693, + 22694, + 22695, + 22696, + 22697, + 22698, + 22699, + 22700, + 22701, + 22702, + 22703, + 22704, + 22705, + 22706, + 22707, + 22708, + 22709, + 22710, + 22711, + 22712, + 22713, + 22714, + 22715, + 22716, + 22717, + 22718, + 22719, + 22720, + 22721, + 22722, + 22723, + 22724, + 22725, + 22726, + 22727, + 22728, + 22729, + 22730, + 22731, + 22732, + 22733, + 22734, + 22735, + 22736, + 22737, + 22738, + 22739, + 22740, + 22741, + 22742, + 22743, + 22744, + 22745, + 22746, + 22747, + 22748, + 22749, + 22750, + 22751, + 22752, + 22753, + 22754, + 22755, + 22756, + 22757, + 22758, + 22759, + 22760, + 22761, + 22762, + 22763, + 22764, + 22765, + 22766, + 22767, + 22768, + 22769, + 22770, + 22771, + 22772, + 22773, + 22774, + 22775, + 22776, + 22777, + 22778, + 22779, + 22780, + 22781, + 22782, + 22783, + 22784, + 22785, + 22786, + 22787, + 22788, + 22789, + 22790, + 22791, + 22792, + 22793, + 22794, + 22795, + 22796, + 22797, + 22798, + 22799, + 22800, + 22801, + 22802, + 22803, + 22804, + 22805, + 22806, + 22807, + 22808, + 22809, + 22810, + 22811, + 22812, + 22813, + 22814, + 22815, + 22816, + 22817, + 22818, + 22819, + 22820, + 22821, + 22822, + 22823, + 22824, + 22825, + 22826, + 22827, + 22828, + 22829, + 22830, + 22831, + 22832, + 22833, + 22834, + 22835, + 22836, + 22837, + 22838, + 22839, + 22840, + 22841, + 22842, + 22843, + 22844, + 22845, + 22846, + 22847, + 22848, + 22849, + 22850, + 22851, + 22852, + 22853, + 22854, + 22855, + 22856, + 22857, + 22858, + 22859, + 22860, + 22861, + 22862, + 22863, + 22864, + 22865, + 22866, + 22867, + 22868, + 22869, + 22870, + 22871, + 22872, + 22873, + 22874, + 22875, + 22876, + 22877, + 22878, + 22879, + 22880, + 22881, + 22882, + 22883, + 22884, + 22885, + 22886, + 22887, + 22888, + 22889, + 22890, + 22891, + 22892, + 22893, + 22894, + 22895, + 22896, + 22897, + 22898, + 22899, + 22900, + 22901, + 22902, + 22903, + 22904, + 22905, + 22906, + 22907, + 22908, + 22909, + 22910, + 22911, + 22912, + 22913, + 22914, + 22915, + 22916, + 22917, + 22918, + 22919, + 22920, + 22921, + 22922, + 22923, + 22924, + 22925, + 22926, + 22927, + 22928, + 22929, + 22930, + 22931, + 22932, + 22933, + 22934, + 22935, + 22936, + 22937, + 22938, + 22939, + 22940, + 22941, + 22942, + 22943, + 22944, + 22945, + 22946, + 22947, + 22948, + 22949, + 22950, + 22951, + 22952, + 22953, + 22954, + 22955, + 22956, + 22957, + 22958, + 22959, + 22960, + 22961, + 22962, + 22963, + 22964, + 22965, + 22966, + 22967, + 22968, + 22969, + 22970, + 22971, + 22972, + 22973, + 22974, + 22975, + 22976, + 22977, + 22978, + 22979, + 22980, + 22981, + 22982, + 22983, + 22984, + 22985, + 22986, + 22987, + 22988, + 22989, + 22990, + 22991, + 22992, + 22993, + 22994, + 22995, + 22996, + 22997, + 22998, + 22999, + 23000, + 23001, + 23002, + 23003, + 23004, + 23005, + 23006, + 23007, + 23008, + 23009, + 23010, + 23011, + 23012, + 23013, + 23014, + 23015, + 23016, + 23017, + 23018, + 23019, + 23020, + 23021, + 23022, + 23023, + 23024, + 23025, + 23026, + 23027, + 23028, + 23029, + 23030, + 23031, + 23032, + 23033, + 23034, + 23035, + 23036, + 23037, + 23038, + 23039, + 23040, + 23041, + 23042, + 23043, + 23044, + 23045, + 23046, + 23047, + 23048, + 23049, + 23050, + 23051, + 23052, + 23053, + 23054, + 23055, + 23056, + 23057, + 23058, + 23059, + 23060, + 23061, + 23062, + 23063, + 23064, + 23065, + 23066, + 23067, + 23068, + 23069, + 23070, + 23071, + 23072, + 23073, + 23074, + 23075, + 23076, + 23077, + 23078, + 23079, + 23080, + 23081, + 23082, + 23083, + 23084, + 23085, + 23086, + 23087, + 23088, + 23089, + 23090, + 23091, + 23092, + 23093, + 23094, + 23095, + 23096, + 23097, + 23098, + 23099, + 23100, + 23101, + 23102, + 23103, + 23104, + 23105, + 23106, + 23107, + 23108, + 23109, + 23110, + 23111, + 23112, + 23113, + 23114, + 23115, + 23116, + 23117, + 23118, + 23119, + 23120, + 23121, + 23122, + 23123, + 23124, + 23125, + 23126, + 23127, + 23128, + 23129, + 23130, + 23131, + 23132, + 23133, + 23134, + 23135, + 23136, + 23137, + 23138, + 23139, + 23140, + 23141, + 23142, + 23143, + 23144, + 23145, + 23146, + 23147, + 23148, + 23149, + 23150, + 23151, + 23152, + 23153, + 23154, + 23155, + 23156, + 23157, + 23158, + 23159, + 23160, + 23161, + 23162, + 23163, + 23164, + 23165, + 23166, + 23167, + 23168, + 23169, + 23170, + 23171, + 23172, + 23173, + 23174, + 23175, + 23176, + 23177, + 23178, + 23179, + 23180, + 23181, + 23182, + 23183, + 23184, + 23185, + 23186, + 23187, + 23188, + 23189, + 23190, + 23191, + 23192, + 23193, + 23194, + 23195, + 23196, + 23197, + 23198, + 23199, + 23200, + 23201, + 23202, + 23203, + 23204, + 23205, + 23206, + 23207, + 23208, + 23209, + 23210, + 23211, + 23212, + 23213, + 23214, + 23215, + 23216, + 23217, + 23218, + 23219, + 23220, + 23221, + 23222, + 23223, + 23224, + 23225, + 23226, + 23227, + 23228, + 23229, + 23230, + 23231, + 23232, + 23233, + 23234, + 23235, + 23236, + 23237, + 23238, + 23239, + 23240, + 23241, + 23242, + 23243, + 23244, + 23245, + 23246, + 23247, + 23248, + 23249, + 23250, + 23251, + 23252, + 23253, + 23254, + 23255, + 23256, + 23257, + 23258, + 23259, + 23260, + 23261, + 23262, + 23263, + 23264, + 23265, + 23266, + 23267, + 23268, + 23269, + 23270, + 23271, + 23272, + 23273, + 23274, + 23275, + 23276, + 23277, + 23278, + 23279, + 23280, + 23281, + 23282, + 23283, + 23284, + 23285, + 23286, + 23287, + 23288, + 23289, + 23290, + 23291, + 23292, + 23293, + 23294, + 23295, + 23296, + 23297, + 23298, + 23299, + 23300, + 23301, + 23302, + 23303, + 23304, + 23305, + 23306, + 23307, + 23308, + 23309, + 23310, + 23311, + 23312, + 23313, + 23314, + 23315, + 23316, + 23317, + 23318, + 23319, + 23320, + 23321, + 23322, + 23323, + 23324, + 23325, + 23326, + 23327, + 23328, + 23329, + 23330, + 23331, + 23332, + 23333, + 23334, + 23335, + 23336, + 23337, + 23338, + 23339, + 23340, + 23341, + 23342, + 23343, + 23344, + 23345, + 23346, + 23347, + 23348, + 23349, + 23350, + 23351, + 23352, + 23353, + 23354, + 23355, + 23356, + 23357, + 23358, + 23359, + 23360, + 23361, + 23362, + 23363, + 23364, + 23365, + 23366, + 23367, + 23368, + 23369, + 23370, + 23371, + 23372, + 23373, + 23374, + 23375, + 23376, + 23377, + 23378, + 23379, + 23380, + 23381, + 23382, + 23383, + 23384, + 23385, + 23386, + 23387, + 23388, + 23389, + 23390, + 23391, + 23392, + 23393, + 23394, + 23395, + 23396, + 23397, + 23398, + 23399, + 23400, + 23401, + 23402, + 23403, + 23404, + 23405, + 23406, + 23407, + 23408, + 23409, + 23410, + 23411, + 23412, + 23413, + 23414, + 23415, + 23416, + 23417, + 23418, + 23419, + 23420, + 23421, + 23422, + 23423, + 23424, + 23425, + 23426, + 23427, + 23428, + 23429, + 23430, + 23431, + 23432, + 23433, + 23434, + 23435, + 23436, + 23437, + 23438, + 23439, + 23440, + 23441, + 23442, + 23443, + 23444, + 23445, + 23446, + 23447, + 23448, + 23449, + 23450, + 23451, + 23452, + 23453, + 23454, + 23455, + 23456, + 23457, + 23458, + 23459, + 23460, + 23461, + 23462, + 23463, + 23464, + 23465, + 23466, + 23467, + 23468, + 23469, + 23470, + 23471, + 23472, + 23473, + 23474, + 23475, + 23476, + 23477, + 23478, + 23479, + 23480, + 23481, + 23482, + 23483, + 23484, + 23485, + 23486, + 23487, + 23488, + 23489, + 23490, + 23491, + 23492, + 23493, + 23494, + 23495, + 23496, + 23497, + 23498, + 23499, + 23500, + 23501, + 23502, + 23503, + 23504, + 23505, + 23506, + 23507, + 23508, + 23509, + 23510, + 23511, + 23512, + 23513, + 23514, + 23515, + 23516, + 23517, + 23518, + 23519, + 23520, + 23521, + 23522, + 23523, + 23524, + 23525, + 23526, + 23527, + 23528, + 23529, + 23530, + 23531, + 23532, + 23533, + 23534, + 23535, + 23536, + 23537, + 23538, + 23539, + 23540, + 23541, + 23542, + 23543, + 23544, + 23545, + 23546, + 23547, + 23548, + 23549, + 23550, + 23551, + 23552, + 23553, + 23554, + 23555, + 23556, + 23557, + 23558, + 23559, + 23560, + 23561, + 23562, + 23563, + 23564, + 23565, + 23566, + 23567, + 23568, + 23569, + 23570, + 23571, + 23572, + 23573, + 23574, + 23575, + 23576, + 23577, + 23578, + 23579, + 23580, + 23581, + 23582, + 23583, + 23584, + 23585, + 23586, + 23587, + 23588, + 23589, + 23590, + 23591, + 23592, + 23593, + 23594, + 23595, + 23596, + 23597, + 23598, + 23599, + 23600, + 23601, + 23602, + 23603, + 23604, + 23605, + 23606, + 23607, + 23608, + 23609, + 23610, + 23611, + 23612, + 23613, + 23614, + 23615, + 23616, + 23617, + 23618, + 23619, + 23620, + 23621, + 23622, + 23623, + 23624, + 23625, + 23626, + 23627, + 23628, + 23629, + 23630, + 23631, + 23632, + 23633, + 23634, + 23635, + 23636, + 23637, + 23638, + 23639, + 23640, + 23641, + 23642, + 23643, + 23644, + 23645, + 23646, + 23647, + 23648, + 23649, + 23650, + 23651, + 23652, + 23653, + 23654, + 23655, + 23656, + 23657, + 23658, + 23659, + 23660, + 23661, + 23662, + 23663, + 23664, + 23665, + 23666, + 23667, + 23668, + 23669, + 23670, + 23671, + 23672, + 23673, + 23674, + 23675, + 23676, + 23677, + 23678, + 23679, + 23680, + 23681, + 23682, + 23683, + 23684, + 23685, + 23686, + 23687, + 23688, + 23689, + 23690, + 23691, + 23692, + 23693, + 23694, + 23695, + 23696, + 23697, + 23698, + 23699, + 23700, + 23701, + 23702, + 23703, + 23704, + 23705, + 23706, + 23707, + 23708, + 23709, + 23710, + 23711, + 23712, + 23713, + 23714, + 23715, + 23716, + 23717, + 23718, + 23719, + 23720, + 23721, + 23722, + 23723, + 23724, + 23725, + 23726, + 23727, + 23728, + 23729, + 23730, + 23731, + 23732, + 23733, + 23734, + 23735, + 23736, + 23737, + 23738, + 23739, + 23740, + 23741, + 23742, + 23743, + 23744, + 23745, + 23746, + 23747, + 23748, + 23749, + 23750, + 23751, + 23752, + 23753, + 23754, + 23755, + 23756, + 23757, + 23758, + 23759, + 23760, + 23761, + 23762, + 23763, + 23764, + 23765, + 23766, + 23767, + 23768, + 23769, + 23770, + 23771, + 23772, + 23773, + 23774, + 23775, + 23776, + 23777, + 23778, + 23779, + 23780, + 23781, + 23782, + 23783, + 23784, + 23785, + 23786, + 23787, + 23788, + 23789, + 23790, + 23791, + 23792, + 23793, + 23794, + 23795, + 23796, + 23797, + 23798, + 23799, + 23800, + 23801, + 23802, + 23803, + 23804, + 23805, + 23806, + 23807, + 23808, + 23809, + 23810, + 23811, + 23812, + 23813, + 23814, + 23815, + 23816, + 23817, + 23818, + 23819, + 23820, + 23821, + 23822, + 23823, + 23824, + 23825, + 23826, + 23827, + 23828, + 23829, + 23830, + 23831, + 23832, + 23833, + 23834, + 23835, + 23836, + 23837, + 23838, + 23839, + 23840, + 23841, + 23842, + 23843, + 23844, + 23845, + 23846, + 23847, + 23848, + 23849, + 23850, + 23851, + 23852, + 23853, + 23854, + 23855, + 23856, + 23857, + 23858, + 23859, + 23860, + 23861, + 23862, + 23863, + 23864, + 23865, + 23866, + 23867, + 23868, + 23869, + 23870, + 23871, + 23872, + 23873, + 23874, + 23875, + 23876, + 23877, + 23878, + 23879, + 23880, + 23881, + 23882, + 23883, + 23884, + 23885, + 23886, + 23887, + 23888, + 23889, + 23890, + 23891, + 23892, + 23893, + 23894, + 23895, + 23896, + 23897, + 23898, + 23899, + 23900, + 23901, + 23902, + 23903, + 23904, + 23905, + 23906, + 23907, + 23908, + 23909, + 23910, + 23911, + 23912, + 23913, + 23914, + 23915, + 23916, + 23917, + 23918, + 23919, + 23920, + 23921, + 23922, + 23923, + 23924, + 23925, + 23926, + 23927, + 23928, + 23929, + 23930, + 23931, + 23932, + 23933, + 23934, + 23935, + 23936, + 23937, + 23938, + 23939, + 23940, + 23941, + 23942, + 23943, + 23944, + 23945, + 23946, + 23947, + 23948, + 23949, + 23950, + 23951, + 23952, + 23953, + 23954, + 23955, + 23956, + 23957, + 23958, + 23959, + 23960, + 23961, + 23962, + 23963, + 23964, + 23965, + 23966, + 23967, + 23968, + 23969, + 23970, + 23971, + 23972, + 23973, + 23974, + 23975, + 23976, + 23977, + 23978, + 23979, + 23980, + 23981, + 23982, + 23983, + 23984, + 23985, + 23986, + 23987, + 23988, + 23989, + 23990, + 23991, + 23992, + 23993, + 23994, + 23995, + 23996, + 23997, + 23998, + 23999, + 24000, + 24001, + 24002, + 24003, + 24004, + 24005, + 24006, + 24007, + 24008, + 24009, + 24010, + 24011, + 24012, + 24013, + 24014, + 24015, + 24016, + 24017, + 24018, + 24019, + 24020, + 24021, + 24022, + 24023, + 24024, + 24025, + 24026, + 24027, + 24028, + 24029, + 24030, + 24031, + 24032, + 24033, + 24034, + 24035, + 24036, + 24037, + 24038, + 24039, + 24040, + 24041, + 24042, + 24043, + 24044, + 24045, + 24046, + 24047, + 24048, + 24049, + 24050, + 24051, + 24052, + 24053, + 24054, + 24055, + 24056, + 24057, + 24058, + 24059, + 24060, + 24061, + 24062, + 24063, + 24064, + 24065, + 24066, + 24067, + 24068, + 24069, + 24070, + 24071, + 24072, + 24073, + 24074, + 24075, + 24076, + 24077, + 24078, + 24079, + 24080, + 24081, + 24082, + 24083, + 24084, + 24085, + 24086, + 24087, + 24088, + 24089, + 24090, + 24091, + 24092, + 24093, + 24094, + 24095, + 24096, + 24097, + 24098, + 24099, + 24100, + 24101, + 24102, + 24103, + 24104, + 24105, + 24106, + 24107, + 24108, + 24109, + 24110, + 24111, + 24112, + 24113, + 24114, + 24115, + 24116, + 24117, + 24118, + 24119, + 24120, + 24121, + 24122, + 24123, + 24124, + 24125, + 24126, + 24127, + 24128, + 24129, + 24130, + 24131, + 24132, + 24133, + 24134, + 24135, + 24136, + 24137, + 24138, + 24139, + 24140, + 24141, + 24142, + 24143, + 24144, + 24145, + 24146, + 24147, + 24148, + 24149, + 24150, + 24151, + 24152, + 24153, + 24154, + 24155, + 24156, + 24157, + 24158, + 24159, + 24160, + 24161, + 24162, + 24163, + 24164, + 24165, + 24166, + 24167, + 24168, + 24169, + 24170, + 24171, + 24172, + 24173, + 24174, + 24175, + 24176, + 24177, + 24178, + 24179, + 24180, + 24181, + 24182, + 24183, + 24184, + 24185, + 24186, + 24187, + 24188, + 24189, + 24190, + 24191, + 24192, + 24193, + 24194, + 24195, + 24196, + 24197, + 24198, + 24199, + 24200, + 24201, + 24202, + 24203, + 24204, + 24205, + 24206, + 24207, + 24208, + 24209, + 24210, + 24211, + 24212, + 24213, + 24214, + 24215, + 24216, + 24217, + 24218, + 24219, + 24220, + 24221, + 24222, + 24223, + 24224, + 24225, + 24226, + 24227, + 24228, + 24229, + 24230, + 24231, + 24232, + 24233, + 24234, + 24235, + 24236, + 24237, + 24238, + 24239, + 24240, + 24241, + 24242, + 24243, + 24244, + 24245, + 24246, + 24247, + 24248, + 24249, + 24250, + 24251, + 24252, + 24253, + 24254, + 24255, + 24256, + 24257, + 24258, + 24259, + 24260, + 24261, + 24262, + 24263, + 24264, + 24265, + 24266, + 24267, + 24268, + 24269, + 24270, + 24271, + 24272, + 24273, + 24274, + 24275, + 24276, + 24277, + 24278, + 24279, + 24280, + 24281, + 24282, + 24283, + 24284, + 24285, + 24286, + 24287, + 24288, + 24289, + 24290, + 24291, + 24292, + 24293, + 24294, + 24295, + 24296, + 24297, + 24298, + 24299, + 24300, + 24301, + 24302, + 24303, + 24304, + 24305, + 24306, + 24307, + 24308, + 24309, + 24310, + 24311, + 24312, + 24313, + 24314, + 24315, + 24316, + 24317, + 24318, + 24319, + 24320, + 24321, + 24322, + 24323, + 24324, + 24325, + 24326, + 24327, + 24328, + 24329, + 24330, + 24331, + 24332, + 24333, + 24334, + 24335, + 24336, + 24337, + 24338, + 24339, + 24340, + 24341, + 24342, + 24343, + 24344, + 24345, + 24346, + 24347, + 24348, + 24349, + 24350, + 24351, + 24352, + 24353, + 24354, + 24355, + 24356, + 24357, + 24358, + 24359, + 24360, + 24361, + 24362, + 24363, + 24364, + 24365, + 24366, + 24367, + 24368, + 24369, + 24370, + 24371, + 24372, + 24373, + 24374, + 24375, + 24376, + 24377, + 24378, + 24379, + 24380, + 24381, + 24382, + 24383, + 24384, + 24385, + 24386, + 24387, + 24388, + 24389, + 24390, + 24391, + 24392, + 24393, + 24394, + 24395, + 24396, + 24397, + 24398, + 24399, + 24400, + 24401, + 24402, + 24403, + 24404, + 24405, + 24406, + 24407, + 24408, + 24409, + 24410, + 24411, + 24412, + 24413, + 24414, + 24415, + 24416, + 24417, + 24418, + 24419, + 24420, + 24421, + 24422, + 24423, + 24424, + 24425, + 24426, + 24427, + 24428, + 24429, + 24430, + 24431, + 24432, + 24433, + 24434, + 24435, + 24436, + 24437, + 24438, + 24439, + 24440, + 24441, + 24442, + 24443, + 24444, + 24445, + 24446, + 24447, + 24448, + 24449, + 24450, + 24451, + 24452, + 24453, + 24454, + 24455, + 24456, + 24457, + 24458, + 24459, + 24460, + 24461, + 24462, + 24463, + 24464, + 24465, + 24466, + 24467, + 24468, + 24469, + 24470, + 24471, + 24472, + 24473, + 24474, + 24475, + 24476, + 24477, + 24478, + 24479, + 24480, + 24481, + 24482, + 24483, + 24484, + 24485, + 24486, + 24487, + 24488, + 24489, + 24490, + 24491, + 24492, + 24493, + 24494, + 24495, + 24496, + 24497, + 24498, + 24499, + 24500, + 24501, + 24502, + 24503, + 24504, + 24505, + 24506, + 24507, + 24508, + 24509, + 24510, + 24511, + 24512, + 24513, + 24514, + 24515, + 24516, + 24517, + 24518, + 24519, + 24520, + 24521, + 24522, + 24523, + 24524, + 24525, + 24526, + 24527, + 24528, + 24529, + 24530, + 24531, + 24532, + 24533, + 24534, + 24535, + 24536, + 24537, + 24538, + 24539, + 24540, + 24541, + 24542, + 24543, + 24544, + 24545, + 24546, + 24547, + 24548, + 24549, + 24550, + 24551, + 24552, + 24553, + 24554, + 24555, + 24556, + 24557, + 24558, + 24559, + 24560, + 24561, + 24562, + 24563, + 24564, + 24565, + 24566, + 24567, + 24568, + 24569, + 24570, + 24571, + 24572, + 24573, + 24574, + 24575, + 24576, + 24577, + 24578, + 24579, + 24580, + 24581, + 24582, + 24583, + 24584, + 24585, + 24586, + 24587, + 24588, + 24589, + 24590, + 24591, + 24592, + 24593, + 24594, + 24595, + 24596, + 24597, + 24598, + 24599, + 24600, + 24601, + 24602, + 24603, + 24604, + 24605, + 24606, + 24607, + 24608, + 24609, + 24610, + 24611, + 24612, + 24613, + 24614, + 24615, + 24616, + 24617, + 24618, + 24619, + 24620, + 24621, + 24622, + 24623, + 24624, + 24625, + 24626, + 24627, + 24628, + 24629, + 24630, + 24631, + 24632, + 24633, + 24634, + 24635, + 24636, + 24637, + 24638, + 24639, + 24640, + 24641, + 24642, + 24643, + 24644, + 24645, + 24646, + 24647, + 24648, + 24649, + 24650, + 24651, + 24652, + 24653, + 24654, + 24655, + 24656, + 24657, + 24658, + 24659, + 24660, + 24661, + 24662, + 24663, + 24664, + 24665, + 24666, + 24667, + 24668, + 24669, + 24670, + 24671, + 24672, + 24673, + 24674, + 24675, + 24676, + 24677, + 24678, + 24679, + 24680, + 24681, + 24682, + 24683, + 24684, + 24685, + 24686, + 24687, + 24688, + 24689, + 24690, + 24691, + 24692, + 24693, + 24694, + 24695, + 24696, + 24697, + 24698, + 24699, + 24700, + 24701, + 24702, + 24703, + 24704, + 24705, + 24706, + 24707, + 24708, + 24709, + 24710, + 24711, + 24712, + 24713, + 24714, + 24715, + 24716, + 24717, + 24718, + 24719, + 24720, + 24721, + 24722, + 24723, + 24724, + 24725, + 24726, + 24727, + 24728, + 24729, + 24730, + 24731, + 24732, + 24733, + 24734, + 24735, + 24736, + 24737, + 24738, + 24739, + 24740, + 24741, + 24742, + 24743, + 24744, + 24745, + 24746, + 24747, + 24748, + 24749, + 24750, + 24751, + 24752, + 24753, + 24754, + 24755, + 24756, + 24757, + 24758, + 24759, + 24760, + 24761, + 24762, + 24763, + 24764, + 24765, + 24766, + 24767, + 24768, + 24769, + 24770, + 24771, + 24772, + 24773, + 24774, + 24775, + 24776, + 24777, + 24778, + 24779, + 24780, + 24781, + 24782, + 24783, + 24784, + 24785, + 24786, + 24787, + 24788, + 24789, + 24790, + 24791, + 24792, + 24793, + 24794, + 24795, + 24796, + 24797, + 24798, + 24799, + 24800, + 24801, + 24802, + 24803, + 24804, + 24805, + 24806, + 24807, + 24808, + 24809, + 24810, + 24811, + 24812, + 24813, + 24814, + 24815, + 24816, + 24817, + 24818, + 24819, + 24820, + 24821, + 24822, + 24823, + 24824, + 24825, + 24826, + 24827, + 24828, + 24829, + 24830, + 24831, + 24832, + 24833, + 24834, + 24835, + 24836, + 24837, + 24838, + 24839, + 24840, + 24841, + 24842, + 24843, + 24844, + 24845, + 24846, + 24847, + 24848, + 24849, + 24850, + 24851, + 24852, + 24853, + 24854, + 24855, + 24856, + 24857, + 24858, + 24859, + 24860, + 24861, + 24862, + 24863, + 24864, + 24865, + 24866, + 24867, + 24868, + 24869, + 24870, + 24871, + 24872, + 24873, + 24874, + 24875, + 24876, + 24877, + 24878, + 24879, + 24880, + 24881, + 24882, + 24883, + 24884, + 24885, + 24886, + 24887, + 24888, + 24889, + 24890, + 24891, + 24892, + 24893, + 24894, + 24895, + 24896, + 24897, + 24898, + 24899, + 24900, + 24901, + 24902, + 24903, + 24904, + 24905, + 24906, + 24907, + 24908, + 24909, + 24910, + 24911, + 24912, + 24913, + 24914, + 24915, + 24916, + 24917, + 24918, + 24919, + 24920, + 24921, + 24922, + 24923, + 24924, + 24925, + 24926, + 24927, + 24928, + 24929, + 24930, + 24931, + 24932, + 24933, + 24934, + 24935, + 24936, + 24937, + 24938, + 24939, + 24940, + 24941, + 24942, + 24943, + 24944, + 24945, + 24946, + 24947, + 24948, + 24949, + 24950, + 24951, + 24952, + 24953, + 24954, + 24955, + 24956, + 24957, + 24958, + 24959, + 24960, + 24961, + 24962, + 24963, + 24964, + 24965, + 24966, + 24967, + 24968, + 24969, + 24970, + 24971, + 24972, + 24973, + 24974, + 24975, + 24976, + 24977, + 24978, + 24979, + 24980, + 24981, + 24982, + 24983, + 24984, + 24985, + 24986, + 24987, + 24988, + 24989, + 24990, + 24991, + 24992, + 24993, + 24994, + 24995, + 24996, + 24997, + 24998, + 24999, + 25000, + 25001, + 25002, + 25003, + 25004, + 25005, + 25006, + 25007, + 25008, + 25009, + 25010, + 25011, + 25012, + 25013, + 25014, + 25015, + 25016, + 25017, + 25018, + 25019, + 25020, + 25021, + 25022, + 25023, + 25024, + 25025, + 25026, + 25027, + 25028, + 25029, + 25030, + 25031, + 25032, + 25033, + 25034, + 25035, + 25036, + 25037, + 25038, + 25039, + 25040, + 25041, + 25042, + 25043, + 25044, + 25045, + 25046, + 25047, + 25048, + 25049, + 25050, + 25051, + 25052, + 25053, + 25054, + 25055, + 25056, + 25057, + 25058, + 25059, + 25060, + 25061, + 25062, + 25063, + 25064, + 25065, + 25066, + 25067, + 25068, + 25069, + 25070, + 25071, + 25072, + 25073, + 25074, + 25075, + 25076, + 25077, + 25078, + 25079, + 25080, + 25081, + 25082, + 25083, + 25084, + 25085, + 25086, + 25087, + 25088, + 25089, + 25090, + 25091, + 25092, + 25093, + 25094, + 25095, + 25096, + 25097, + 25098, + 25099, + 25100, + 25101, + 25102, + 25103, + 25104, + 25105, + 25106, + 25107, + 25108, + 25109, + 25110, + 25111, + 25112, + 25113, + 25114, + 25115, + 25116, + 25117, + 25118, + 25119, + 25120, + 25121, + 25122, + 25123, + 25124, + 25125, + 25126, + 25127, + 25128, + 25129, + 25130, + 25131, + 25132, + 25133, + 25134, + 25135, + 25136, + 25137, + 25138, + 25139, + 25140, + 25141, + 25142, + 25143, + 25144, + 25145, + 25146, + 25147, + 25148, + 25149, + 25150, + 25151, + 25152, + 25153, + 25154, + 25155, + 25156, + 25157, + 25158, + 25159, + 25160, + 25161, + 25162, + 25163, + 25164, + 25165, + 25166, + 25167, + 25168, + 25169, + 25170, + 25171, + 25172, + 25173, + 25174, + 25175, + 25176, + 25177, + 25178, + 25179, + 25180, + 25181, + 25182, + 25183, + 25184, + 25185, + 25186, + 25187, + 25188, + 25189, + 25190, + 25191, + 25192, + 25193, + 25194, + 25195, + 25196, + 25197, + 25198, + 25199, + 25200, + 25201, + 25202, + 25203, + 25204, + 25205, + 25206, + 25207, + 25208, + 25209, + 25210, + 25211, + 25212, + 25213, + 25214, + 25215, + 25216, + 25217, + 25218, + 25219, + 25220, + 25221, + 25222, + 25223, + 25224, + 25225, + 25226, + 25227, + 25228, + 25229, + 25230, + 25231, + 25232, + 25233, + 25234, + 25235, + 25236, + 25237, + 25238, + 25239, + 25240, + 25241, + 25242, + 25243, + 25244, + 25245, + 25246, + 25247, + 25248, + 25249, + 25250, + 25251, + 25252, + 25253, + 25254, + 25255, + 25256, + 25257, + 25258, + 25259, + 25260, + 25261, + 25262, + 25263, + 25264, + 25265, + 25266, + 25267, + 25268, + 25269, + 25270, + 25271, + 25272, + 25273, + 25274, + 25275, + 25276, + 25277, + 25278, + 25279, + 25280, + 25281, + 25282, + 25283, + 25284, + 25285, + 25286, + 25287, + 25288, + 25289, + 25290, + 25291, + 25292, + 25293, + 25294, + 25295, + 25296, + 25297, + 25298, + 25299, + 25300, + 25301, + 25302, + 25303, + 25304, + 25305, + 25306, + 25307, + 25308, + 25309, + 25310, + 25311, + 25312, + 25313, + 25314, + 25315, + 25316, + 25317, + 25318, + 25319, + 25320, + 25321, + 25322, + 25323, + 25324, + 25325, + 25326, + 25327, + 25328, + 25329, + 25330, + 25331, + 25332, + 25333, + 25334, + 25335, + 25336, + 25337, + 25338, + 25339, + 25340, + 25341, + 25342, + 25343, + 25344, + 25345, + 25346, + 25347, + 25348, + 25349, + 25350, + 25351, + 25352, + 25353, + 25354, + 25355, + 25356, + 25357, + 25358, + 25359, + 25360, + 25361, + 25362, + 25363, + 25364, + 25365, + 25366, + 25367, + 25368, + 25369, + 25370, + 25371, + 25372, + 25373, + 25374, + 25375, + 25376, + 25377, + 25378, + 25379, + 25380, + 25381, + 25382, + 25383, + 25384, + 25385, + 25386, + 25387, + 25388, + 25389, + 25390, + 25391, + 25392, + 25393, + 25394, + 25395, + 25396, + 25397, + 25398, + 25399, + 25400, + 25401, + 25402, + 25403, + 25404, + 25405, + 25406, + 25407, + 25408, + 25409, + 25410, + 25411, + 25412, + 25413, + 25414, + 25415, + 25416, + 25417, + 25418, + 25419, + 25420, + 25421, + 25422, + 25423, + 25424, + 25425, + 25426, + 25427, + 25428, + 25429, + 25430, + 25431, + 25432, + 25433, + 25434, + 25435, + 25436, + 25437, + 25438, + 25439, + 25440, + 25441, + 25442, + 25443, + 25444, + 25445, + 25446, + 25447, + 25448, + 25449, + 25450, + 25451, + 25452, + 25453, + 25454, + 25455, + 25456, + 25457, + 25458, + 25459, + 25460, + 25461, + 25462, + 25463, + 25464, + 25465, + 25466, + 25467, + 25468, + 25469, + 25470, + 25471, + 25472, + 25473, + 25474, + 25475, + 25476, + 25477, + 25478, + 25479, + 25480, + 25481, + 25482, + 25483, + 25484, + 25485, + 25486, + 25487, + 25488, + 25489, + 25490, + 25491, + 25492, + 25493, + 25494, + 25495, + 25496, + 25497, + 25498, + 25499, + 25500, + 25501, + 25502, + 25503, + 25504, + 25505, + 25506, + 25507, + 25508, + 25509, + 25510, + 25511, + 25512, + 25513, + 25514, + 25515, + 25516, + 25517, + 25518, + 25519, + 25520, + 25521, + 25522, + 25523, + 25524, + 25525, + 25526, + 25527, + 25528, + 25529, + 25530, + 25531, + 25532, + 25533, + 25534, + 25535, + 25536, + 25537, + 25538, + 25539, + 25540, + 25541, + 25542, + 25543, + 25544, + 25545, + 25546, + 25547, + 25548, + 25549, + 25550, + 25551, + 25552, + 25553, + 25554, + 25555, + 25556, + 25557, + 25558, + 25559, + 25560, + 25561, + 25562, + 25563, + 25564, + 25565, + 25566, + 25567, + 25568, + 25569, + 25570, + 25571, + 25572, + 25573, + 25574, + 25575, + 25576, + 25577, + 25578, + 25579, + 25580, + 25581, + 25582, + 25583, + 25584, + 25585, + 25586, + 25587, + 25588, + 25589, + 25590, + 25591, + 25592, + 25593, + 25594, + 25595, + 25596, + 25597, + 25598, + 25599, + 25600, + 25601, + 25602, + 25603, + 25604, + 25605, + 25606, + 25607, + 25608, + 25609, + 25610, + 25611, + 25612, + 25613, + 25614, + 25615, + 25616, + 25617, + 25618, + 25619, + 25620, + 25621, + 25622, + 25623, + 25624, + 25625, + 25626, + 25627, + 25628, + 25629, + 25630, + 25631, + 25632, + 25633, + 25634, + 25635, + 25636, + 25637, + 25638, + 25639, + 25640, + 25641, + 25642, + 25643, + 25644, + 25645, + 25646, + 25647, + 25648, + 25649, + 25650, + 25651, + 25652, + 25653, + 25654, + 25655, + 25656, + 25657, + 25658, + 25659, + 25660, + 25661, + 25662, + 25663, + 25664, + 25665, + 25666, + 25667, + 25668, + 25669, + 25670, + 25671, + 25672, + 25673, + 25674, + 25675, + 25676, + 25677, + 25678, + 25679, + 25680, + 25681, + 25682, + 25683, + 25684, + 25685, + 25686, + 25687, + 25688, + 25689, + 25690, + 25691, + 25692, + 25693, + 25694, + 25695, + 25696, + 25697, + 25698, + 25699, + 25700, + 25701, + 25702, + 25703, + 25704, + 25705, + 25706, + 25707, + 25708, + 25709, + 25710, + 25711, + 25712, + 25713, + 25714, + 25715, + 25716, + 25717, + 25718, + 25719, + 25720, + 25721, + 25722, + 25723, + 25724, + 25725, + 25726, + 25727, + 25728, + 25729, + 25730, + 25731, + 25732, + 25733, + 25734, + 25735, + 25736, + 25737, + 25738, + 25739, + 25740, + 25741, + 25742, + 25743, + 25744, + 25745, + 25746, + 25747, + 25748, + 25749, + 25750, + 25751, + 25752, + 25753, + 25754, + 25755, + 25756, + 25757, + 25758, + 25759, + 25760, + 25761, + 25762, + 25763, + 25764, + 25765, + 25766, + 25767, + 25768, + 25769, + 25770, + 25771, + 25772, + 25773, + 25774, + 25775, + 25776, + 25777, + 25778, + 25779, + 25780, + 25781, + 25782, + 25783, + 25784, + 25785, + 25786, + 25787, + 25788, + 25789, + 25790, + 25791, + 25792, + 25793, + 25794, + 25795, + 25796, + 25797, + 25798, + 25799, + 25800, + 25801, + 25802, + 25803, + 25804, + 25805, + 25806, + 25807, + 25808, + 25809, + 25810, + 25811, + 25812, + 25813, + 25814, + 25815, + 25816, + 25817, + 25818, + 25819, + 25820, + 25821, + 25822, + 25823, + 25824, + 25825, + 25826, + 25827, + 25828, + 25829, + 25830, + 25831, + 25832, + 25833, + 25834, + 25835, + 25836, + 25837, + 25838, + 25839, + 25840, + 25841, + 25842, + 25843, + 25844, + 25845, + 25846, + 25847, + 25848, + 25849, + 25850, + 25851, + 25852, + 25853, + 25854, + 25855, + 25856, + 25857, + 25858, + 25859, + 25860, + 25861, + 25862, + 25863, + 25864, + 25865, + 25866, + 25867, + 25868, + 25869, + 25870, + 25871, + 25872, + 25873, + 25874, + 25875, + 25876, + 25877, + 25878, + 25879, + 25880, + 25881, + 25882, + 25883, + 25884, + 25885, + 25886, + 25887, + 25888, + 25889, + 25890, + 25891, + 25892, + 25893, + 25894, + 25895, + 25896, + 25897, + 25898, + 25899, + 25900, + 25901, + 25902, + 25903, + 25904, + 25905, + 25906, + 25907, + 25908, + 25909, + 25910, + 25911, + 25912, + 25913, + 25914, + 25915, + 25916, + 25917, + 25918, + 25919, + 25920, + 25921, + 25922, + 25923, + 25924, + 25925, + 25926, + 25927, + 25928, + 25929, + 25930, + 25931, + 25932, + 25933, + 25934, + 25935, + 25936, + 25937, + 25938, + 25939, + 25940, + 25941, + 25942, + 25943, + 25944, + 25945, + 25946, + 25947, + 25948, + 25949, + 25950, + 25951, + 25952, + 25953, + 25954, + 25955, + 25956, + 25957, + 25958, + 25959, + 25960, + 25961, + 25962, + 25963, + 25964, + 25965, + 25966, + 25967, + 25968, + 25969, + 25970, + 25971, + 25972, + 25973, + 25974, + 25975, + 25976, + 25977, + 25978, + 25979, + 25980, + 25981, + 25982, + 25983, + 25984, + 25985, + 25986, + 25987, + 25988, + 25989, + 25990, + 25991, + 25992, + 25993, + 25994, + 25995, + 25996, + 25997, + 25998, + 25999, + 26000, + 26001, + 26002, + 26003, + 26004, + 26005, + 26006, + 26007, + 26008, + 26009, + 26010, + 26011, + 26012, + 26013, + 26014, + 26015, + 26016, + 26017, + 26018, + 26019, + 26020, + 26021, + 26022, + 26023, + 26024, + 26025, + 26026, + 26027, + 26028, + 26029, + 26030, + 26031, + 26032, + 26033, + 26034, + 26035, + 26036, + 26037, + 26038, + 26039, + 26040, + 26041, + 26042, + 26043, + 26044, + 26045, + 26046, + 26047, + 26048, + 26049, + 26050, + 26051, + 26052, + 26053, + 26054, + 26055, + 26056, + 26057, + 26058, + 26059, + 26060, + 26061, + 26062, + 26063, + 26064, + 26065, + 26066, + 26067, + 26068, + 26069, + 26070, + 26071, + 26072, + 26073, + 26074, + 26075, + 26076, + 26077, + 26078, + 26079, + 26080, + 26081, + 26082, + 26083, + 26084, + 26085, + 26086, + 26087, + 26088, + 26089, + 26090, + 26091, + 26092, + 26093, + 26094, + 26095, + 26096, + 26097, + 26098, + 26099, + 26100, + 26101, + 26102, + 26103, + 26104, + 26105, + 26106, + 26107, + 26108, + 26109, + 26110, + 26111, + 26112, + 26113, + 26114, + 26115, + 26116, + 26117, + 26118, + 26119, + 26120, + 26121, + 26122, + 26123, + 26124, + 26125, + 26126, + 26127, + 26128, + 26129, + 26130, + 26131, + 26132, + 26133, + 26134, + 26135, + 26136, + 26137, + 26138, + 26139, + 26140, + 26141, + 26142, + 26143, + 26144, + 26145, + 26146, + 26147, + 26148, + 26149, + 26150, + 26151, + 26152, + 26153, + 26154, + 26155, + 26156, + 26157, + 26158, + 26159, + 26160, + 26161, + 26162, + 26163, + 26164, + 26165, + 26166, + 26167, + 26168, + 26169, + 26170, + 26171, + 26172, + 26173, + 26174, + 26175, + 26176, + 26177, + 26178, + 26179, + 26180, + 26181, + 26182, + 26183, + 26184, + 26185, + 26186, + 26187, + 26188, + 26189, + 26190, + 26191, + 26192, + 26193, + 26194, + 26195, + 26196, + 26197, + 26198, + 26199, + 26200, + 26201, + 26202, + 26203, + 26204, + 26205, + 26206, + 26207, + 26208, + 26209, + 26210, + 26211, + 26212, + 26213, + 26214, + 26215, + 26216, + 26217, + 26218, + 26219, + 26220, + 26221, + 26222, + 26223, + 26224, + 26225, + 26226, + 26227, + 26228, + 26229, + 26230, + 26231, + 26232, + 26233, + 26234, + 26235, + 26236, + 26237, + 26238, + 26239, + 26240, + 26241, + 26242, + 26243, + 26244, + 26245, + 26246, + 26247, + 26248, + 26249, + 26250, + 26251, + 26252, + 26253, + 26254, + 26255, + 26256, + 26257, + 26258, + 26259, + 26260, + 26261, + 26262, + 26263, + 26264, + 26265, + 26266, + 26267, + 26268, + 26269, + 26270, + 26271, + 26272, + 26273, + 26274, + 26275, + 26276, + 26277, + 26278, + 26279, + 26280, + 26281, + 26282, + 26283, + 26284, + 26285, + 26286, + 26287, + 26288, + 26289, + 26290, + 26291, + 26292, + 26293, + 26294, + 26295, + 26296, + 26297, + 26298, + 26299, + 26300, + 26301, + 26302, + 26303, + 26304, + 26305, + 26306, + 26307, + 26308, + 26309, + 26310, + 26311, + 26312, + 26313, + 26314, + 26315, + 26316, + 26317, + 26318, + 26319, + 26320, + 26321, + 26322, + 26323, + 26324, + 26325, + 26326, + 26327, + 26328, + 26329, + 26330, + 26331, + 26332, + 26333, + 26334, + 26335, + 26336, + 26337, + 26338, + 26339, + 26340, + 26341, + 26342, + 26343, + 26344, + 26345, + 26346, + 26347, + 26348, + 26349, + 26350, + 26351, + 26352, + 26353, + 26354, + 26355, + 26356, + 26357, + 26358, + 26359, + 26360, + 26361, + 26362, + 26363, + 26364, + 26365, + 26366, + 26367, + 26368, + 26369, + 26370, + 26371, + 26372, + 26373, + 26374, + 26375, + 26376, + 26377, + 26378, + 26379, + 26380, + 26381, + 26382, + 26383, + 26384, + 26385, + 26386, + 26387, + 26388, + 26389, + 26390, + 26391, + 26392, + 26393, + 26394, + 26395, + 26396, + 26397, + 26398, + 26399, + 26400, + 26401, + 26402, + 26403, + 26404, + 26405, + 26406, + 26407, + 26408, + 26409, + 26410, + 26411, + 26412, + 26413, + 26414, + 26415, + 26416, + 26417, + 26418, + 26419, + 26420, + 26421, + 26422, + 26423, + 26424, + 26425, + 26426, + 26427, + 26428, + 26429, + 26430, + 26431, + 26432, + 26433, + 26434, + 26435, + 26436, + 26437, + 26438, + 26439, + 26440, + 26441, + 26442, + 26443, + 26444, + 26445, + 26446, + 26447, + 26448, + 26449, + 26450, + 26451, + 26452, + 26453, + 26454, + 26455, + 26456, + 26457, + 26458, + 26459, + 26460, + 26461, + 26462, + 26463, + 26464, + 26465, + 26466, + 26467, + 26468, + 26469, + 26470, + 26471, + 26472, + 26473, + 26474, + 26475, + 26476, + 26477, + 26478, + 26479, + 26480, + 26481, + 26482, + 26483, + 26484, + 26485, + 26486, + 26487, + 26488, + 26489, + 26490, + 26491, + 26492, + 26493, + 26494, + 26495, + 26496, + 26497, + 26498, + 26499, + 26500, + 26501, + 26502, + 26503, + 26504, + 26505, + 26506, + 26507, + 26508, + 26509, + 26510, + 26511, + 26512, + 26513, + 26514, + 26515, + 26516, + 26517, + 26518, + 26519, + 26520, + 26521, + 26522, + 26523, + 26524, + 26525, + 26526, + 26527, + 26528, + 26529, + 26530, + 26531, + 26532, + 26533, + 26534, + 26535, + 26536, + 26537, + 26538, + 26539, + 26540, + 26541, + 26542, + 26543, + 26544, + 26545, + 26546, + 26547, + 26548, + 26549, + 26550, + 26551, + 26552, + 26553, + 26554, + 26555, + 26556, + 26557, + 26558, + 26559, + 26560, + 26561, + 26562, + 26563, + 26564, + 26565, + 26566, + 26567, + 26568, + 26569, + 26570, + 26571, + 26572, + 26573, + 26574, + 26575, + 26576, + 26577, + 26578, + 26579, + 26580, + 26581, + 26582, + 26583, + 26584, + 26585, + 26586, + 26587, + 26588, + 26589, + 26590, + 26591, + 26592, + 26593, + 26594, + 26595, + 26596, + 26597, + 26598, + 26599, + 26600, + 26601, + 26602, + 26603, + 26604, + 26605, + 26606, + 26607, + 26608, + 26609, + 26610, + 26611, + 26612, + 26613, + 26614, + 26615, + 26616, + 26617, + 26618, + 26619, + 26620, + 26621, + 26622, + 26623, + 26624, + 26625, + 26626, + 26627, + 26628, + 26629, + 26630, + 26631, + 26632, + 26633, + 26634, + 26635, + 26636, + 26637, + 26638, + 26639, + 26640, + 26641, + 26642, + 26643, + 26644, + 26645, + 26646, + 26647, + 26648, + 26649, + 26650, + 26651, + 26652, + 26653, + 26654, + 26655, + 26656, + 26657, + 26658, + 26659, + 26660, + 26661, + 26662, + 26663, + 26664, + 26665, + 26666, + 26667, + 26668, + 26669, + 26670, + 26671, + 26672, + 26673, + 26674, + 26675, + 26676, + 26677, + 26678, + 26679, + 26680, + 26681, + 26682, + 26683, + 26684, + 26685, + 26686, + 26687, + 26688, + 26689, + 26690, + 26691, + 26692, + 26693, + 26694, + 26695, + 26696, + 26697, + 26698, + 26699, + 26700, + 26701, + 26702, + 26703, + 26704, + 26705, + 26706, + 26707, + 26708, + 26709, + 26710, + 26711, + 26712, + 26713, + 26714, + 26715, + 26716, + 26717, + 26718, + 26719, + 26720, + 26721, + 26722, + 26723, + 26724, + 26725, + 26726, + 26727, + 26728, + 26729, + 26730, + 26731, + 26732, + 26733, + 26734, + 26735, + 26736, + 26737, + 26738, + 26739, + 26740, + 26741, + 26742, + 26743, + 26744, + 26745, + 26746, + 26747, + 26748, + 26749, + 26750, + 26751, + 26752, + 26753, + 26754, + 26755, + 26756, + 26757, + 26758, + 26759, + 26760, + 26761, + 26762, + 26763, + 26764, + 26765, + 26766, + 26767, + 26768, + 26769, + 26770, + 26771, + 26772, + 26773, + 26774, + 26775, + 26776, + 26777, + 26778, + 26779, + 26780, + 26781, + 26782, + 26783, + 26784, + 26785, + 26786, + 26787, + 26788, + 26789, + 26790, + 26791, + 26792, + 26793, + 26794, + 26795, + 26796, + 26797, + 26798, + 26799, + 26800, + 26801, + 26802, + 26803, + 26804, + 26805, + 26806, + 26807, + 26808, + 26809, + 26810, + 26811, + 26812, + 26813, + 26814, + 26815, + 26816, + 26817, + 26818, + 26819, + 26820, + 26821, + 26822, + 26823, + 26824, + 26825, + 26826, + 26827, + 26828, + 26829, + 26830, + 26831, + 26832, + 26833, + 26834, + 26835, + 26836, + 26837, + 26838, + 26839, + 26840, + 26841, + 26842, + 26843, + 26844, + 26845, + 26846, + 26847, + 26848, + 26849, + 26850, + 26851, + 26852, + 26853, + 26854, + 26855, + 26856, + 26857, + 26858, + 26859, + 26860, + 26861, + 26862, + 26863, + 26864, + 26865, + 26866, + 26867, + 26868, + 26869, + 26870, + 26871, + 26872, + 26873, + 26874, + 26875, + 26876, + 26877, + 26878, + 26879, + 26880, + 26881, + 26882, + 26883, + 26884, + 26885, + 26886, + 26887, + 26888, + 26889, + 26890, + 26891, + 26892, + 26893, + 26894, + 26895, + 26896, + 26897, + 26898, + 26899, + 26900, + 26901, + 26902, + 26903, + 26904, + 26905, + 26906, + 26907, + 26908, + 26909, + 26910, + 26911, + 26912, + 26913, + 26914, + 26915, + 26916, + 26917, + 26918, + 26919, + 26920, + 26921, + 26922, + 26923, + 26924, + 26925, + 26926, + 26927, + 26928, + 26929, + 26930, + 26931, + 26932, + 26933, + 26934, + 26935, + 26936, + 26937, + 26938, + 26939, + 26940, + 26941, + 26942, + 26943, + 26944, + 26945, + 26946, + 26947, + 26948, + 26949, + 26950, + 26951, + 26952, + 26953, + 26954, + 26955, + 26956, + 26957, + 26958, + 26959, + 26960, + 26961, + 26962, + 26963, + 26964, + 26965, + 26966, + 26967, + 26968, + 26969, + 26970, + 26971, + 26972, + 26973, + 26974, + 26975, + 26976, + 26977, + 26978, + 26979, + 26980, + 26981, + 26982, + 26983, + 26984, + 26985, + 26986, + 26987, + 26988, + 26989, + 26990, + 26991, + 26992, + 26993, + 26994, + 26995, + 26996, + 26997, + 26998, + 26999, + 27000, + 27001, + 27002, + 27003, + 27004, + 27005, + 27006, + 27007, + 27008, + 27009, + 27010, + 27011, + 27012, + 27013, + 27014, + 27015, + 27016, + 27017, + 27018, + 27019, + 27020, + 27021, + 27022, + 27023, + 27024, + 27025, + 27026, + 27027, + 27028, + 27029, + 27030, + 27031, + 27032, + 27033, + 27034, + 27035, + 27036, + 27037, + 27038, + 27039, + 27040, + 27041, + 27042, + 27043, + 27044, + 27045, + 27046, + 27047, + 27048, + 27049, + 27050, + 27051, + 27052, + 27053, + 27054, + 27055, + 27056, + 27057, + 27058, + 27059, + 27060, + 27061, + 27062, + 27063, + 27064, + 27065, + 27066, + 27067, + 27068, + 27069, + 27070, + 27071, + 27072, + 27073, + 27074, + 27075, + 27076, + 27077, + 27078, + 27079, + 27080, + 27081, + 27082, + 27083, + 27084, + 27085, + 27086, + 27087, + 27088, + 27089, + 27090, + 27091, + 27092, + 27093, + 27094, + 27095, + 27096, + 27097, + 27098, + 27099, + 27100, + 27101, + 27102, + 27103, + 27104, + 27105, + 27106, + 27107, + 27108, + 27109, + 27110, + 27111, + 27112, + 27113, + 27114, + 27115, + 27116, + 27117, + 27118, + 27119, + 27120, + 27121, + 27122, + 27123, + 27124, + 27125, + 27126, + 27127, + 27128, + 27129, + 27130, + 27131, + 27132, + 27133, + 27134, + 27135, + 27136, + 27137, + 27138, + 27139, + 27140, + 27141, + 27142, + 27143, + 27144, + 27145, + 27146, + 27147, + 27148, + 27149, + 27150, + 27151, + 27152, + 27153, + 27154, + 27155, + 27156, + 27157, + 27158, + 27159, + 27160, + 27161, + 27162, + 27163, + 27164, + 27165, + 27166, + 27167, + 27168, + 27169, + 27170, + 27171, + 27172, + 27173, + 27174, + 27175, + 27176, + 27177, + 27178, + 27179, + 27180, + 27181, + 27182, + 27183, + 27184, + 27185, + 27186, + 27187, + 27188, + 27189, + 27190, + 27191, + 27192, + 27193, + 27194, + 27195, + 27196, + 27197, + 27198, + 27199, + 27200, + 27201, + 27202, + 27203, + 27204, + 27205, + 27206, + 27207, + 27208, + 27209, + 27210, + 27211, + 27212, + 27213, + 27214, + 27215, + 27216, + 27217, + 27218, + 27219, + 27220, + 27221, + 27222, + 27223, + 27224, + 27225, + 27226, + 27227, + 27228, + 27229, + 27230, + 27231, + 27232, + 27233, + 27234, + 27235, + 27236, + 27237, + 27238, + 27239, + 27240, + 27241, + 27242, + 27243, + 27244, + 27245, + 27246, + 27247, + 27248, + 27249, + 27250, + 27251, + 27252, + 27253, + 27254, + 27255, + 27256, + 27257, + 27258, + 27259, + 27260, + 27261, + 27262, + 27263, + 27264, + 27265, + 27266, + 27267, + 27268, + 27269, + 27270, + 27271, + 27272, + 27273, + 27274, + 27275, + 27276, + 27277, + 27278, + 27279, + 27280, + 27281, + 27282, + 27283, + 27284, + 27285, + 27286, + 27287, + 27288, + 27289, + 27290, + 27291, + 27292, + 27293, + 27294, + 27295, + 27296, + 27297, + 27298, + 27299, + 27300, + 27301, + 27302, + 27303, + 27304, + 27305, + 27306, + 27307, + 27308, + 27309, + 27310, + 27311, + 27312, + 27313, + 27314, + 27315, + 27316, + 27317, + 27318, + 27319, + 27320, + 27321, + 27322, + 27323, + 27324, + 27325, + 27326, + 27327, + 27328, + 27329, + 27330, + 27331, + 27332, + 27333, + 27334, + 27335, + 27336, + 27337, + 27338, + 27339, + 27340, + 27341, + 27342, + 27343, + 27344, + 27345, + 27346, + 27347, + 27348, + 27349, + 27350, + 27351, + 27352, + 27353, + 27354, + 27355, + 27356, + 27357, + 27358, + 27359, + 27360, + 27361, + 27362, + 27363, + 27364, + 27365, + 27366, + 27367, + 27368, + 27369, + 27370, + 27371, + 27372, + 27373, + 27374, + 27375, + 27376, + 27377, + 27378, + 27379, + 27380, + 27381, + 27382, + 27383, + 27384, + 27385, + 27386, + 27387, + 27388, + 27389, + 27390, + 27391, + 27392, + 27393, + 27394, + 27395, + 27396, + 27397, + 27398, + 27399, + 27400, + 27401, + 27402, + 27403, + 27404, + 27405, + 27406, + 27407, + 27408, + 27409, + 27410, + 27411, + 27412, + 27413, + 27414, + 27415, + 27416, + 27417, + 27418, + 27419, + 27420, + 27421, + 27422, + 27423, + 27424, + 27425, + 27426, + 27427, + 27428, + 27429, + 27430, + 27431, + 27432, + 27433, + 27434, + 27435, + 27436, + 27437, + 27438, + 27439, + 27440, + 27441, + 27442, + 27443, + 27444, + 27445, + 27446, + 27447, + 27448, + 27449, + 27450, + 27451, + 27452, + 27453, + 27454, + 27455, + 27456, + 27457, + 27458, + 27459, + 27460, + 27461, + 27462, + 27463, + 27464, + 27465, + 27466, + 27467, + 27468, + 27469, + 27470, + 27471, + 27472, + 27473, + 27474, + 27475, + 27476, + 27477, + 27478, + 27479, + 27480, + 27481, + 27482, + 27483, + 27484, + 27485, + 27486, + 27487, + 27488, + 27489, + 27490, + 27491, + 27492, + 27493, + 27494, + 27495, + 27496, + 27497, + 27498, + 27499, + 27500, + 27501, + 27502, + 27503, + 27504, + 27505, + 27506, + 27507, + 27508, + 27509, + 27510, + 27511, + 27512, + 27513, + 27514, + 27515, + 27516, + 27517, + 27518, + 27519, + 27520, + 27521, + 27522, + 27523, + 27524, + 27525, + 27526, + 27527, + 27528, + 27529, + 27530, + 27531, + 27532, + 27533, + 27534, + 27535, + 27536, + 27537, + 27538, + 27539, + 27540, + 27541, + 27542, + 27543, + 27544, + 27545, + 27546, + 27547, + 27548, + 27549, + 27550, + 27551, + 27552, + 27553, + 27554, + 27555, + 27556, + 27557, + 27558, + 27559, + 27560, + 27561, + 27562, + 27563, + 27564, + 27565, + 27566, + 27567, + 27568, + 27569, + 27570, + 27571, + 27572, + 27573, + 27574, + 27575, + 27576, + 27577, + 27578, + 27579, + 27580, + 27581, + 27582, + 27583, + 27584, + 27585, + 27586, + 27587, + 27588, + 27589, + 27590, + 27591, + 27592, + 27593, + 27594, + 27595, + 27596, + 27597, + 27598, + 27599, + 27600, + 27601, + 27602, + 27603, + 27604, + 27605, + 27606, + 27607, + 27608, + 27609, + 27610, + 27611, + 27612, + 27613, + 27614, + 27615, + 27616, + 27617, + 27618, + 27619, + 27620, + 27621, + 27622, + 27623, + 27624, + 27625, + 27626, + 27627, + 27628, + 27629, + 27630, + 27631, + 27632, + 27633, + 27634, + 27635, + 27636, + 27637, + 27638, + 27639, + 27640, + 27641, + 27642, + 27643, + 27644, + 27645, + 27646, + 27647, + 27648, + 27649, + 27650, + 27651, + 27652, + 27653, + 27654, + 27655, + 27656, + 27657, + 27658, + 27659, + 27660, + 27661, + 27662, + 27663, + 27664, + 27665, + 27666, + 27667, + 27668, + 27669, + 27670, + 27671, + 27672, + 27673, + 27674, + 27675, + 27676, + 27677, + 27678, + 27679, + 27680, + 27681, + 27682, + 27683, + 27684, + 27685, + 27686, + 27687, + 27688, + 27689, + 27690, + 27691, + 27692, + 27693, + 27694, + 27695, + 27696, + 27697, + 27698, + 27699, + 27700, + 27701, + 27702, + 27703, + 27704, + 27705, + 27706, + 27707, + 27708, + 27709, + 27710, + 27711, + 27712, + 27713, + 27714, + 27715, + 27716, + 27717, + 27718, + 27719, + 27720, + 27721, + 27722, + 27723, + 27724, + 27725, + 27726, + 27727, + 27728, + 27729, + 27730, + 27731, + 27732, + 27733, + 27734, + 27735, + 27736, + 27737, + 27738, + 27739, + 27740, + 27741, + 27742, + 27743, + 27744, + 27745, + 27746, + 27747, + 27748, + 27749, + 27750, + 27751, + 27752, + 27753, + 27754, + 27755, + 27756, + 27757, + 27758, + 27759, + 27760, + 27761, + 27762, + 27763, + 27764, + 27765, + 27766, + 27767, + 27768, + 27769, + 27770, + 27771, + 27772, + 27773, + 27774, + 27775, + 27776, + 27777, + 27778, + 27779, + 27780, + 27781, + 27782, + 27783, + 27784, + 27785, + 27786, + 27787, + 27788, + 27789, + 27790, + 27791, + 27792, + 27793, + 27794, + 27795, + 27796, + 27797, + 27798, + 27799, + 27800, + 27801, + 27802, + 27803, + 27804, + 27805, + 27806, + 27807, + 27808, + 27809, + 27810, + 27811, + 27812, + 27813, + 27814, + 27815, + 27816, + 27817, + 27818, + 27819, + 27820, + 27821, + 27822, + 27823, + 27824, + 27825, + 27826, + 27827, + 27828, + 27829, + 27830, + 27831, + 27832, + 27833, + 27834, + 27835, + 27836, + 27837, + 27838, + 27839, + 27840, + 27841, + 27842, + 27843, + 27844, + 27845, + 27846, + 27847, + 27848, + 27849, + 27850, + 27851, + 27852, + 27853, + 27854, + 27855, + 27856, + 27857, + 27858, + 27859, + 27860, + 27861, + 27862, + 27863, + 27864, + 27865, + 27866, + 27867, + 27868, + 27869, + 27870, + 27871, + 27872, + 27873, + 27874, + 27875, + 27876, + 27877, + 27878, + 27879, + 27880, + 27881, + 27882, + 27883, + 27884, + 27885, + 27886, + 27887, + 27888, + 27889, + 27890, + 27891, + 27892, + 27893, + 27894, + 27895, + 27896, + 27897, + 27898, + 27899, + 27900, + 27901, + 27902, + 27903, + 27904, + 27905, + 27906, + 27907, + 27908, + 27909, + 27910, + 27911, + 27912, + 27913, + 27914, + 27915, + 27916, + 27917, + 27918, + 27919, + 27920, + 27921, + 27922, + 27923, + 27924, + 27925, + 27926, + 27927, + 27928, + 27929, + 27930, + 27931, + 27932, + 27933, + 27934, + 27935, + 27936, + 27937, + 27938, + 27939, + 27940, + 27941, + 27942, + 27943, + 27944, + 27945, + 27946, + 27947, + 27948, + 27949, + 27950, + 27951, + 27952, + 27953, + 27954, + 27955, + 27956, + 27957, + 27958, + 27959, + 27960, + 27961, + 27962, + 27963, + 27964, + 27965, + 27966, + 27967, + 27968, + 27969, + 27970, + 27971, + 27972, + 27973, + 27974, + 27975, + 27976, + 27977, + 27978, + 27979, + 27980, + 27981, + 27982, + 27983, + 27984, + 27985, + 27986, + 27987, + 27988, + 27989, + 27990, + 27991, + 27992, + 27993, + 27994, + 27995, + 27996, + 27997, + 27998, + 27999, + 28000, + 28001, + 28002, + 28003, + 28004, + 28005, + 28006, + 28007, + 28008, + 28009, + 28010, + 28011, + 28012, + 28013, + 28014, + 28015, + 28016, + 28017, + 28018, + 28019, + 28020, + 28021, + 28022, + 28023, + 28024, + 28025, + 28026, + 28027, + 28028, + 28029, + 28030, + 28031, + 28032, + 28033, + 28034, + 28035, + 28036, + 28037, + 28038, + 28039, + 28040, + 28041, + 28042, + 28043, + 28044, + 28045, + 28046, + 28047, + 28048, + 28049, + 28050, + 28051, + 28052, + 28053, + 28054, + 28055, + 28056, + 28057, + 28058, + 28059, + 28060, + 28061, + 28062, + 28063, + 28064, + 28065, + 28066, + 28067, + 28068, + 28069, + 28070, + 28071, + 28072, + 28073, + 28074, + 28075, + 28076, + 28077, + 28078, + 28079, + 28080, + 28081, + 28082, + 28083, + 28084, + 28085, + 28086, + 28087, + 28088, + 28089, + 28090, + 28091, + 28092, + 28093, + 28094, + 28095, + 28096, + 28097, + 28098, + 28099, + 28100, + 28101, + 28102, + 28103, + 28104, + 28105, + 28106, + 28107, + 28108, + 28109, + 28110, + 28111, + 28112, + 28113, + 28114, + 28115, + 28116, + 28117, + 28118, + 28119, + 28120, + 28121, + 28122, + 28123, + 28124, + 28125, + 28126, + 28127, + 28128, + 28129, + 28130, + 28131, + 28132, + 28133, + 28134, + 28135, + 28136, + 28137, + 28138, + 28139, + 28140, + 28141, + 28142, + 28143, + 28144, + 28145, + 28146, + 28147, + 28148, + 28149, + 28150, + 28151, + 28152, + 28153, + 28154, + 28155, + 28156, + 28157, + 28158, + 28159, + 28160, + 28161, + 28162, + 28163, + 28164, + 28165, + 28166, + 28167, + 28168, + 28169, + 28170, + 28171, + 28172, + 28173, + 28174, + 28175, + 28176, + 28177, + 28178, + 28179, + 28180, + 28181, + 28182, + 28183, + 28184, + 28185, + 28186, + 28187, + 28188, + 28189, + 28190, + 28191, + 28192, + 28193, + 28194, + 28195, + 28196, + 28197, + 28198, + 28199, + 28200, + 28201, + 28202, + 28203, + 28204, + 28205, + 28206, + 28207, + 28208, + 28209, + 28210, + 28211, + 28212, + 28213, + 28214, + 28215, + 28216, + 28217, + 28218, + 28219, + 28220, + 28221, + 28222, + 28223, + 28224, + 28225, + 28226, + 28227, + 28228, + 28229, + 28230, + 28231, + 28232, + 28233, + 28234, + 28235, + 28236, + 28237, + 28238, + 28239, + 28240, + 28241, + 28242, + 28243, + 28244, + 28245, + 28246, + 28247, + 28248, + 28249, + 28250, + 28251, + 28252, + 28253, + 28254, + 28255, + 28256, + 28257, + 28258, + 28259, + 28260, + 28261, + 28262, + 28263, + 28264, + 28265, + 28266, + 28267, + 28268, + 28269, + 28270, + 28271, + 28272, + 28273, + 28274, + 28275, + 28276, + 28277, + 28278, + 28279, + 28280, + 28281, + 28282, + 28283, + 28284, + 28285, + 28286, + 28287, + 28288, + 28289, + 28290, + 28291, + 28292, + 28293, + 28294, + 28295, + 28296, + 28297, + 28298, + 28299, + 28300, + 28301, + 28302, + 28303, + 28304, + 28305, + 28306, + 28307, + 28308, + 28309, + 28310, + 28311, + 28312, + 28313, + 28314, + 28315, + 28316, + 28317, + 28318, + 28319, + 28320, + 28321, + 28322, + 28323, + 28324, + 28325, + 28326, + 28327, + 28328, + 28329, + 28330, + 28331, + 28332, + 28333, + 28334, + 28335, + 28336, + 28337, + 28338, + 28339, + 28340, + 28341, + 28342, + 28343, + 28344, + 28345, + 28346, + 28347, + 28348, + 28349, + 28350, + 28351, + 28352, + 28353, + 28354, + 28355, + 28356, + 28357, + 28358, + 28359, + 28360, + 28361, + 28362, + 28363, + 28364, + 28365, + 28366, + 28367, + 28368, + 28369, + 28370, + 28371, + 28372, + 28373, + 28374, + 28375, + 28376, + 28377, + 28378, + 28379, + 28380, + 28381, + 28382, + 28383, + 28384, + 28385, + 28386, + 28387, + 28388, + 28389, + 28390, + 28391, + 28392, + 28393, + 28394, + 28395, + 28396, + 28397, + 28398, + 28399, + 28400, + 28401, + 28402, + 28403, + 28404, + 28405, + 28406, + 28407, + 28408, + 28409, + 28410, + 28411, + 28412, + 28413, + 28414, + 28415, + 28416, + 28417, + 28418, + 28419, + 28420, + 28421, + 28422, + 28423, + 28424, + 28425, + 28426, + 28427, + 28428, + 28429, + 28430, + 28431, + 28432, + 28433, + 28434, + 28435, + 28436, + 28437, + 28438, + 28439, + 28440, + 28441, + 28442, + 28443, + 28444, + 28445, + 28446, + 28447, + 28448, + 28449, + 28450, + 28451, + 28452, + 28453, + 28454, + 28455, + 28456, + 28457, + 28458, + 28459, + 28460, + 28461, + 28462, + 28463, + 28464, + 28465, + 28466, + 28467, + 28468, + 28469, + 28470, + 28471, + 28472, + 28473, + 28474, + 28475, + 28476, + 28477, + 28478, + 28479, + 28480, + 28481, + 28482, + 28483, + 28484, + 28485, + 28486, + 28487, + 28488, + 28489, + 28490, + 28491, + 28492, + 28493, + 28494, + 28495, + 28496, + 28497, + 28498, + 28499, + 28500, + 28501, + 28502, + 28503, + 28504, + 28505, + 28506, + 28507, + 28508, + 28509, + 28510, + 28511, + 28512, + 28513, + 28514, + 28515, + 28516, + 28517, + 28518, + 28519, + 28520, + 28521, + 28522, + 28523, + 28524, + 28525, + 28526, + 28527, + 28528, + 28529, + 28530, + 28531, + 28532, + 28533, + 28534, + 28535, + 28536, + 28537, + 28538, + 28539, + 28540, + 28541, + 28542, + 28543, + 28544, + 28545, + 28546, + 28547, + 28548, + 28549, + 28550, + 28551, + 28552, + 28553, + 28554, + 28555, + 28556, + 28557, + 28558, + 28559, + 28560, + 28561, + 28562, + 28563, + 28564, + 28565, + 28566, + 28567, + 28568, + 28569, + 28570, + 28571, + 28572, + 28573, + 28574, + 28575, + 28576, + 28577, + 28578, + 28579, + 28580, + 28581, + 28582, + 28583, + 28584, + 28585, + 28586, + 28587, + 28588, + 28589, + 28590, + 28591, + 28592, + 28593, + 28594, + 28595, + 28596, + 28597, + 28598, + 28599, + 28600, + 28601, + 28602, + 28603, + 28604, + 28605, + 28606, + 28607, + 28608, + 28609, + 28610, + 28611, + 28612, + 28613, + 28614, + 28615, + 28616, + 28617, + 28618, + 28619, + 28620, + 28621, + 28622, + 28623, + 28624, + 28625, + 28626, + 28627, + 28628, + 28629, + 28630, + 28631, + 28632, + 28633, + 28634, + 28635, + 28636, + 28637, + 28638, + 28639, + 28640, + 28641, + 28642, + 28643, + 28644, + 28645, + 28646, + 28647, + 28648, + 28649, + 28650, + 28651, + 28652, + 28653, + 28654, + 28655, + 28656, + 28657, + 28658, + 28659, + 28660, + 28661, + 28662, + 28663, + 28664, + 28665, + 28666, + 28667, + 28668, + 28669, + 28670, + 28671, + 28672, + 28673, + 28674, + 28675, + 28676, + 28677, + 28678, + 28679, + 28680, + 28681, + 28682, + 28683, + 28684, + 28685, + 28686, + 28687, + 28688, + 28689, + 28690, + 28691, + 28692, + 28693, + 28694, + 28695, + 28696, + 28697, + 28698, + 28699, + 28700, + 28701, + 28702, + 28703, + 28704, + 28705, + 28706, + 28707, + 28708, + 28709, + 28710, + 28711, + 28712, + 28713, + 28714, + 28715, + 28716, + 28717, + 28718, + 28719, + 28720, + 28721, + 28722, + 28723, + 28724, + 28725, + 28726, + 28727, + 28728, + 28729, + 28730, + 28731, + 28732, + 28733, + 28734, + 28735, + 28736, + 28737, + 28738, + 28739, + 28740, + 28741, + 28742, + 28743, + 28744, + 28745, + 28746, + 28747, + 28748, + 28749, + 28750, + 28751, + 28752, + 28753, + 28754, + 28755, + 28756, + 28757, + 28758, + 28759, + 28760, + 28761, + 28762, + 28763, + 28764, + 28765, + 28766, + 28767, + 28768, + 28769, + 28770, + 28771, + 28772, + 28773, + 28774, + 28775, + 28776, + 28777, + 28778, + 28779, + 28780, + 28781, + 28782, + 28783, + 28784, + 28785, + 28786, + 28787, + 28788, + 28789, + 28790, + 28791, + 28792, + 28793, + 28794, + 28795, + 28796, + 28797, + 28798, + 28799, + 28800, + 28801, + 28802, + 28803, + 28804, + 28805, + 28806, + 28807, + 28808, + 28809, + 28810, + 28811, + 28812, + 28813, + 28814, + 28815, + 28816, + 28817, + 28818, + 28819, + 28820, + 28821, + 28822, + 28823, + 28824, + 28825, + 28826, + 28827, + 28828, + 28829, + 28830, + 28831, + 28832, + 28833, + 28834, + 28835, + 28836, + 28837, + 28838, + 28839, + 28840, + 28841, + 28842, + 28843, + 28844, + 28845, + 28846, + 28847, + 28848, + 28849, + 28850, + 28851, + 28852, + 28853, + 28854, + 28855, + 28856, + 28857, + 28858, + 28859, + 28860, + 28861, + 28862, + 28863, + 28864, + 28865, + 28866, + 28867, + 28868, + 28869, + 28870, + 28871, + 28872, + 28873, + 28874, + 28875, + 28876, + 28877, + 28878, + 28879, + 28880, + 28881, + 28882, + 28883, + 28884, + 28885, + 28886, + 28887, + 28888, + 28889, + 28890, + 28891, + 28892, + 28893, + 28894, + 28895, + 28896, + 28897, + 28898, + 28899, + 28900, + 28901, + 28902, + 28903, + 28904, + 28905, + 28906, + 28907, + 28908, + 28909, + 28910, + 28911, + 28912, + 28913, + 28914, + 28915, + 28916, + 28917, + 28918, + 28919, + 28920, + 28921, + 28922, + 28923, + 28924, + 28925, + 28926, + 28927, + 28928, + 28929, + 28930, + 28931, + 28932, + 28933, + 28934, + 28935, + 28936, + 28937, + 28938, + 28939, + 28940, + 28941, + 28942, + 28943, + 28944, + 28945, + 28946, + 28947, + 28948, + 28949, + 28950, + 28951, + 28952, + 28953, + 28954, + 28955, + 28956, + 28957, + 28958, + 28959, + 28960, + 28961, + 28962, + 28963, + 28964, + 28965, + 28966, + 28967, + 28968, + 28969, + 28970, + 28971, + 28972, + 28973, + 28974, + 28975, + 28976, + 28977, + 28978, + 28979, + 28980, + 28981, + 28982, + 28983, + 28984, + 28985, + 28986, + 28987, + 28988, + 28989, + 28990, + 28991, + 28992, + 28993, + 28994, + 28995, + 28996, + 28997, + 28998, + 28999, + 29000, + 29001, + 29002, + 29003, + 29004, + 29005, + 29006, + 29007, + 29008, + 29009, + 29010, + 29011, + 29012, + 29013, + 29014, + 29015, + 29016, + 29017, + 29018, + 29019, + 29020, + 29021, + 29022, + 29023, + 29024, + 29025, + 29026, + 29027, + 29028, + 29029, + 29030, + 29031, + 29032, + 29033, + 29034, + 29035, + 29036, + 29037, + 29038, + 29039, + 29040, + 29041, + 29042, + 29043, + 29044, + 29045, + 29046, + 29047, + 29048, + 29049, + 29050, + 29051, + 29052, + 29053, + 29054, + 29055, + 29056, + 29057, + 29058, + 29059, + 29060, + 29061, + 29062, + 29063, + 29064, + 29065, + 29066, + 29067, + 29068, + 29069, + 29070, + 29071, + 29072, + 29073, + 29074, + 29075, + 29076, + 29077, + 29078, + 29079, + 29080, + 29081, + 29082, + 29083, + 29084, + 29085, + 29086, + 29087, + 29088, + 29089, + 29090, + 29091, + 29092, + 29093, + 29094, + 29095, + 29096, + 29097, + 29098, + 29099, + 29100, + 29101, + 29102, + 29103, + 29104, + 29105, + 29106, + 29107, + 29108, + 29109, + 29110, + 29111, + 29112, + 29113, + 29114, + 29115, + 29116, + 29117, + 29118, + 29119, + 29120, + 29121, + 29122, + 29123, + 29124, + 29125, + 29126, + 29127, + 29128, + 29129, + 29130, + 29131, + 29132, + 29133, + 29134, + 29135, + 29136, + 29137, + 29138, + 29139, + 29140, + 29141, + 29142, + 29143, + 29144, + 29145, + 29146, + 29147, + 29148, + 29149, + 29150, + 29151, + 29152, + 29153, + 29154, + 29155, + 29156, + 29157, + 29158, + 29159, + 29160, + 29161, + 29162, + 29163, + 29164, + 29165, + 29166, + 29167, + 29168, + 29169, + 29170, + 29171, + 29172, + 29173, + 29174, + 29175, + 29176, + 29177, + 29178, + 29179, + 29180, + 29181, + 29182, + 29183, + 29184, + 29185, + 29186, + 29187, + 29188, + 29189, + 29190, + 29191, + 29192, + 29193, + 29194, + 29195, + 29196, + 29197, + 29198, + 29199, + 29200, + 29201, + 29202, + 29203, + 29204, + 29205, + 29206, + 29207, + 29208, + 29209, + 29210, + 29211, + 29212, + 29213, + 29214, + 29215, + 29216, + 29217, + 29218, + 29219, + 29220, + 29221, + 29222, + 29223, + 29224, + 29225, + 29226, + 29227, + 29228, + 29229, + 29230, + 29231, + 29232, + 29233, + 29234, + 29235, + 29236, + 29237, + 29238, + 29239, + 29240, + 29241, + 29242, + 29243, + 29244, + 29245, + 29246, + 29247, + 29248, + 29249, + 29250, + 29251, + 29252, + 29253, + 29254, + 29255, + 29256, + 29257, + 29258, + 29259, + 29260, + 29261, + 29262, + 29263, + 29264, + 29265, + 29266, + 29267, + 29268, + 29269, + 29270, + 29271, + 29272, + 29273, + 29274, + 29275, + 29276, + 29277, + 29278, + 29279, + 29280, + 29281, + 29282, + 29283, + 29284, + 29285, + 29286, + 29287, + 29288, + 29289, + 29290, + 29291, + 29292, + 29293, + 29294, + 29295, + 29296, + 29297, + 29298, + 29299, + 29300, + 29301, + 29302, + 29303, + 29304, + 29305, + 29306, + 29307, + 29308, + 29309, + 29310, + 29311, + 29312, + 29313, + 29314, + 29315, + 29316, + 29317, + 29318, + 29319, + 29320, + 29321, + 29322, + 29323, + 29324, + 29325, + 29326, + 29327, + 29328, + 29329, + 29330, + 29331, + 29332, + 29333, + 29334, + 29335, + 29336, + 29337, + 29338, + 29339, + 29340, + 29341, + 29342, + 29343, + 29344, + 29345, + 29346, + 29347, + 29348, + 29349, + 29350, + 29351, + 29352, + 29353, + 29354, + 29355, + 29356, + 29357, + 29358, + 29359, + 29360, + 29361, + 29362, + 29363, + 29364, + 29365, + 29366, + 29367, + 29368, + 29369, + 29370, + 29371, + 29372, + 29373, + 29374, + 29375, + 29376, + 29377, + 29378, + 29379, + 29380, + 29381, + 29382, + 29383, + 29384, + 29385, + 29386, + 29387, + 29388, + 29389, + 29390, + 29391, + 29392, + 29393, + 29394, + 29395, + 29396, + 29397, + 29398, + 29399, + 29400, + 29401, + 29402, + 29403, + 29404, + 29405, + 29406, + 29407, + 29408, + 29409, + 29410, + 29411, + 29412, + 29413, + 29414, + 29415, + 29416, + 29417, + 29418, + 29419, + 29420, + 29421, + 29422, + 29423, + 29424, + 29425, + 29426, + 29427, + 29428, + 29429, + 29430, + 29431, + 29432, + 29433, + 29434, + 29435, + 29436, + 29437, + 29438, + 29439, + 29440, + 29441, + 29442, + 29443, + 29444, + 29445, + 29446, + 29447, + 29448, + 29449, + 29450, + 29451, + 29452, + 29453, + 29454, + 29455, + 29456, + 29457, + 29458, + 29459, + 29460, + 29461, + 29462, + 29463, + 29464, + 29465, + 29466, + 29467, + 29468, + 29469, + 29470, + 29471, + 29472, + 29473, + 29474, + 29475, + 29476, + 29477, + 29478, + 29479, + 29480, + 29481, + 29482, + 29483, + 29484, + 29485, + 29486, + 29487, + 29488, + 29489, + 29490, + 29491, + 29492, + 29493, + 29494, + 29495, + 29496, + 29497, + 29498, + 29499, + 29500, + 29501, + 29502, + 29503, + 29504, + 29505, + 29506, + 29507, + 29508, + 29509, + 29510, + 29511, + 29512, + 29513, + 29514, + 29515, + 29516, + 29517, + 29518, + 29519, + 29520, + 29521, + 29522, + 29523, + 29524, + 29525, + 29526, + 29527, + 29528, + 29529, + 29530, + 29531, + 29532, + 29533, + 29534, + 29535, + 29536, + 29537, + 29538, + 29539, + 29540, + 29541, + 29542, + 29543, + 29544, + 29545, + 29546, + 29547, + 29548, + 29549, + 29550, + 29551, + 29552, + 29553, + 29554, + 29555, + 29556, + 29557, + 29558, + 29559, + 29560, + 29561, + 29562, + 29563, + 29564, + 29565, + 29566, + 29567, + 29568, + 29569, + 29570, + 29571, + 29572, + 29573, + 29574, + 29575, + 29576, + 29577, + 29578, + 29579, + 29580, + 29581, + 29582, + 29583, + 29584, + 29585, + 29586, + 29587, + 29588, + 29589, + 29590, + 29591, + 29592, + 29593, + 29594, + 29595, + 29596, + 29597, + 29598, + 29599, + 29600, + 29601, + 29602, + 29603, + 29604, + 29605, + 29606, + 29607, + 29608, + 29609, + 29610, + 29611, + 29612, + 29613, + 29614, + 29615, + 29616, + 29617, + 29618, + 29619, + 29620, + 29621, + 29622, + 29623, + 29624, + 29625, + 29626, + 29627, + 29628, + 29629, + 29630, + 29631, + 29632, + 29633, + 29634, + 29635, + 29636, + 29637, + 29638, + 29639, + 29640, + 29641, + 29642, + 29643, + 29644, + 29645, + 29646, + 29647, + 29648, + 29649, + 29650, + 29651, + 29652, + 29653, + 29654, + 29655, + 29656, + 29657, + 29658, + 29659, + 29660, + 29661, + 29662, + 29663, + 29664, + 29665, + 29666, + 29667, + 29668, + 29669, + 29670, + 29671, + 29672, + 29673, + 29674, + 29675, + 29676, + 29677, + 29678, + 29679, + 29680, + 29681, + 29682, + 29683, + 29684, + 29685, + 29686, + 29687, + 29688, + 29689, + 29690, + 29691, + 29692, + 29693, + 29694, + 29695, + 29696, + 29697, + 29698, + 29699, + 29700, + 29701, + 29702, + 29703, + 29704, + 29705, + 29706, + 29707, + 29708, + 29709, + 29710, + 29711, + 29712, + 29713, + 29714, + 29715, + 29716, + 29717, + 29718, + 29719, + 29720, + 29721, + 29722, + 29723, + 29724, + 29725, + 29726, + 29727, + 29728, + 29729, + 29730, + 29731, + 29732, + 29733, + 29734, + 29735, + 29736, + 29737, + 29738, + 29739, + 29740, + 29741, + 29742, + 29743, + 29744, + 29745, + 29746, + 29747, + 29748, + 29749, + 29750, + 29751, + 29752, + 29753, + 29754, + 29755, + 29756, + 29757, + 29758, + 29759, + 29760, + 29761, + 29762, + 29763, + 29764, + 29765, + 29766, + 29767, + 29768, + 29769, + 29770, + 29771, + 29772, + 29773, + 29774, + 29775, + 29776, + 29777, + 29778, + 29779, + 29780, + 29781, + 29782, + 29783, + 29784, + 29785, + 29786, + 29787, + 29788, + 29789, + 29790, + 29791, + 29792, + 29793, + 29794, + 29795, + 29796, + 29797, + 29798, + 29799, + 29800, + 29801, + 29802, + 29803, + 29804, + 29805, + 29806, + 29807, + 29808, + 29809, + 29810, + 29811, + 29812, + 29813, + 29814, + 29815, + 29816, + 29817, + 29818, + 29819, + 29820, + 29821, + 29822, + 29823, + 29824, + 29825, + 29826, + 29827, + 29828, + 29829, + 29830, + 29831, + 29832, + 29833, + 29834, + 29835, + 29836, + 29837, + 29838, + 29839, + 29840, + 29841, + 29842, + 29843, + 29844, + 29845, + 29846, + 29847, + 29848, + 29849, + 29850, + 29851, + 29852, + 29853, + 29854, + 29855, + 29856, + 29857, + 29858, + 29859, + 29860, + 29861, + 29862, + 29863, + 29864, + 29865, + 29866, + 29867, + 29868, + 29869, + 29870, + 29871, + 29872, + 29873, + 29874, + 29875, + 29876, + 29877, + 29878, + 29879, + 29880, + 29881, + 29882, + 29883, + 29884, + 29885, + 29886, + 29887, + 29888, + 29889, + 29890, + 29891, + 29892, + 29893, + 29894, + 29895, + 29896, + 29897, + 29898, + 29899, + 29900, + 29901, + 29902, + 29903, + 29904, + 29905, + 29906, + 29907, + 29908, + 29909, + 29910, + 29911, + 29912, + 29913, + 29914, + 29915, + 29916, + 29917, + 29918, + 29919, + 29920, + 29921, + 29922, + 29923, + 29924, + 29925, + 29926, + 29927, + 29928, + 29929, + 29930, + 29931, + 29932, + 29933, + 29934, + 29935, + 29936, + 29937, + 29938, + 29939, + 29940, + 29941, + 29942, + 29943, + 29944, + 29945, + 29946, + 29947, + 29948, + 29949, + 29950, + 29951, + 29952, + 29953, + 29954, + 29955, + 29956, + 29957, + 29958, + 29959, + 29960, + 29961, + 29962, + 29963, + 29964, + 29965, + 29966, + 29967, + 29968, + 29969, + 29970, + 29971, + 29972, + 29973, + 29974, + 29975, + 29976, + 29977, + 29978, + 29979, + 29980, + 29981, + 29982, + 29983, + 29984, + 29985, + 29986, + 29987, + 29988, + 29989, + 29990, + 29991, + 29992, + 29993, + 29994, + 29995, + 29996, + 29997, + 29998, + 29999, + 30000, + 30001, + 30002, + 30003, + 30004, + 30005, + 30006, + 30007, + 30008, + 30009, + 30010, + 30011, + 30012, + 30013, + 30014, + 30015, + 30016, + 30017, + 30018, + 30019, + 30020, + 30021, + 30022, + 30023, + 30024, + 30025, + 30026, + 30027, + 30028, + 30029, + 30030, + 30031, + 30032, + 30033, + 30034, + 30035, + 30036, + 30037, + 30038, + 30039, + 30040, + 30041, + 30042, + 30043, + 30044, + 30045, + 30046, + 30047, + 30048, + 30049, + 30050, + 30051, + 30052, + 30053, + 30054, + 30055, + 30056, + 30057, + 30058, + 30059, + 30060, + 30061, + 30062, + 30063, + 30064, + 30065, + 30066, + 30067, + 30068, + 30069, + 30070, + 30071, + 30072, + 30073, + 30074, + 30075, + 30076, + 30077, + 30078, + 30079, + 30080, + 30081, + 30082, + 30083, + 30084, + 30085, + 30086, + 30087, + 30088, + 30089, + 30090, + 30091, + 30092, + 30093, + 30094, + 30095, + 30096, + 30097, + 30098, + 30099, + 30100, + 30101, + 30102, + 30103, + 30104, + 30105, + 30106, + 30107, + 30108, + 30109, + 30110, + 30111, + 30112, + 30113, + 30114, + 30115, + 30116, + 30117, + 30118, + 30119, + 30120, + 30121, + 30122, + 30123, + 30124, + 30125, + 30126, + 30127, + 30128, + 30129, + 30130, + 30131, + 30132, + 30133, + 30134, + 30135, + 30136, + 30137, + 30138, + 30139, + 30140, + 30141, + 30142, + 30143, + 30144, + 30145, + 30146, + 30147, + 30148, + 30149, + 30150, + 30151, + 30152, + 30153, + 30154, + 30155, + 30156, + 30157, + 30158, + 30159, + 30160, + 30161, + 30162, + 30163, + 30164, + 30165, + 30166, + 30167, + 30168, + 30169, + 30170, + 30171, + 30172, + 30173, + 30174, + 30175, + 30176, + 30177, + 30178, + 30179, + 30180, + 30181, + 30182, + 30183, + 30184, + 30185, + 30186, + 30187, + 30188, + 30189, + 30190, + 30191, + 30192, + 30193, + 30194, + 30195, + 30196, + 30197, + 30198, + 30199, + 30200, + 30201, + 30202, + 30203, + 30204, + 30205, + 30206, + 30207, + 30208, + 30209, + 30210, + 30211, + 30212, + 30213, + 30214, + 30215, + 30216, + 30217, + 30218, + 30219, + 30220, + 30221, + 30222, + 30223, + 30224, + 30225, + 30226, + 30227, + 30228, + 30229, + 30230, + 30231, + 30232, + 30233, + 30234, + 30235, + 30236, + 30237, + 30238, + 30239, + 30240, + 30241, + 30242, + 30243, + 30244, + 30245, + 30246, + 30247, + 30248, + 30249, + 30250, + 30251, + 30252, + 30253, + 30254, + 30255, + 30256, + 30257, + 30258, + 30259, + 30260, + 30261, + 30262, + 30263, + 30264, + 30265, + 30266, + 30267, + 30268, + 30269, + 30270, + 30271, + 30272, + 30273, + 30274, + 30275, + 30276, + 30277, + 30278, + 30279, + 30280, + 30281, + 30282, + 30283, + 30284, + 30285, + 30286, + 30287, + 30288, + 30289, + 30290, + 30291, + 30292, + 30293, + 30294, + 30295, + 30296, + 30297, + 30298, + 30299, + 30300, + 30301, + 30302, + 30303, + 30304, + 30305, + 30306, + 30307, + 30308, + 30309, + 30310, + 30311, + 30312, + 30313, + 30314, + 30315, + 30316, + 30317, + 30318, + 30319, + 30320, + 30321, + 30322, + 30323, + 30324, + 30325, + 30326, + 30327, + 30328, + 30329, + 30330, + 30331, + 30332, + 30333, + 30334, + 30335, + 30336, + 30337, + 30338, + 30339, + 30340, + 30341, + 30342, + 30343, + 30344, + 30345, + 30346, + 30347, + 30348, + 30349, + 30350, + 30351, + 30352, + 30353, + 30354, + 30355, + 30356, + 30357, + 30358, + 30359, + 30360, + 30361, + 30362, + 30363, + 30364, + 30365, + 30366, + 30367, + 30368, + 30369, + 30370, + 30371, + 30372, + 30373, + 30374, + 30375, + 30376, + 30377, + 30378, + 30379, + 30380, + 30381, + 30382, + 30383, + 30384, + 30385, + 30386, + 30387, + 30388, + 30389, + 30390, + 30391, + 30392, + 30393, + 30394, + 30395, + 30396, + 30397, + 30398, + 30399, + 30400, + 30401, + 30402, + 30403, + 30404, + 30405, + 30406, + 30407, + 30408, + 30409, + 30410, + 30411, + 30412, + 30413, + 30414, + 30415, + 30416, + 30417, + 30418, + 30419, + 30420, + 30421, + 30422, + 30423, + 30424, + 30425, + 30426, + 30427, + 30428, + 30429, + 30430, + 30431, + 30432, + 30433, + 30434, + 30435, + 30436, + 30437, + 30438, + 30439, + 30440, + 30441, + 30442, + 30443, + 30444, + 30445, + 30446, + 30447, + 30448, + 30449, + 30450, + 30451, + 30452, + 30453, + 30454, + 30455, + 30456, + 30457, + 30458, + 30459, + 30460, + 30461, + 30462, + 30463, + 30464, + 30465, + 30466, + 30467, + 30468, + 30469, + 30470, + 30471, + 30472, + 30473, + 30474, + 30475, + 30476, + 30477, + 30478, + 30479, + 30480, + 30481, + 30482, + 30483, + 30484, + 30485, + 30486, + 30487, + 30488, + 30489, + 30490, + 30491, + 30492, + 30493, + 30494, + 30495, + 30496, + 30497, + 30498, + 30499, + 30500, + 30501, + 30502, + 30503, + 30504, + 30505, + 30506, + 30507, + 30508, + 30509, + 30510, + 30511, + 30512, + 30513, + 30514, + 30515, + 30516, + 30517, + 30518, + 30519, + 30520, + 30521, + 30522, + 30523, + 30524, + 30525, + 30526, + 30527, + 30528, + 30529, + 30530, + 30531, + 30532, + 30533, + 30534, + 30535, + 30536, + 30537, + 30538, + 30539, + 30540, + 30541, + 30542, + 30543, + 30544, + 30545, + 30546, + 30547, + 30548, + 30549, + 30550, + 30551, + 30552, + 30553, + 30554, + 30555, + 30556, + 30557, + 30558, + 30559, + 30560, + 30561, + 30562, + 30563, + 30564, + 30565, + 30566, + 30567, + 30568, + 30569, + 30570, + 30571, + 30572, + 30573, + 30574, + 30575, + 30576, + 30577, + 30578, + 30579, + 30580, + 30581, + 30582, + 30583, + 30584, + 30585, + 30586, + 30587, + 30588, + 30589, + 30590, + 30591, + 30592, + 30593, + 30594, + 30595, + 30596, + 30597, + 30598, + 30599, + 30600, + 30601, + 30602, + 30603, + 30604, + 30605, + 30606, + 30607, + 30608, + 30609, + 30610, + 30611, + 30612, + 30613, + 30614, + 30615, + 30616, + 30617, + 30618, + 30619, + 30620, + 30621, + 30622, + 30623, + 30624, + 30625, + 30626, + 30627, + 30628, + 30629, + 30630, + 30631, + 30632, + 30633, + 30634, + 30635, + 30636, + 30637, + 30638, + 30639, + 30640, + 30641, + 30642, + 30643, + 30644, + 30645, + 30646, + 30647, + 30648, + 30649, + 30650, + 30651, + 30652, + 30653, + 30654, + 30655, + 30656, + 30657, + 30658, + 30659, + 30660, + 30661, + 30662, + 30663, + 30664, + 30665, + 30666, + 30667, + 30668, + 30669, + 30670, + 30671, + 30672, + 30673, + 30674, + 30675, + 30676, + 30677, + 30678, + 30679, + 30680, + 30681, + 30682, + 30683, + 30684, + 30685, + 30686, + 30687, + 30688, + 30689, + 30690, + 30691, + 30692, + 30693, + 30694, + 30695, + 30696, + 30697, + 30698, + 30699, + 30700, + 30701, + 30702, + 30703, + 30704, + 30705, + 30706, + 30707, + 30708, + 30709, + 30710, + 30711, + 30712, + 30713, + 30714, + 30715, + 30716, + 30717, + 30718, + 30719, + 30720, + 30721, + 30722, + 30723, + 30724, + 30725, + 30726, + 30727, + 30728, + 30729, + 30730, + 30731, + 30732, + 30733, + 30734, + 30735, + 30736, + 30737, + 30738, + 30739, + 30740, + 30741, + 30742, + 30743, + 30744, + 30745, + 30746, + 30747, + 30748, + 30749, + 30750, + 30751, + 30752, + 30753, + 30754, + 30755, + 30756, + 30757, + 30758, + 30759, + 30760, + 30761, + 30762, + 30763, + 30764, + 30765, + 30766, + 30767, + 30768, + 30769, + 30770, + 30771, + 30772, + 30773, + 30774, + 30775, + 30776, + 30777, + 30778, + 30779, + 30780, + 30781, + 30782, + 30783, + 30784, + 30785, + 30786, + 30787, + 30788, + 30789, + 30790, + 30791, + 30792, + 30793, + 30794, + 30795, + 30796, + 30797, + 30798, + 30799, + 30800, + 30801, + 30802, + 30803, + 30804, + 30805, + 30806, + 30807, + 30808, + 30809, + 30810, + 30811, + 30812, + 30813, + 30814, + 30815, + 30816, + 30817, + 30818, + 30819, + 30820, + 30821, + 30822, + 30823, + 30824, + 30825, + 30826, + 30827, + 30828, + 30829, + 30830, + 30831, + 30832, + 30833, + 30834, + 30835, + 30836, + 30837, + 30838, + 30839, + 30840, + 30841, + 30842, + 30843, + 30844, + 30845, + 30846, + 30847, + 30848, + 30849, + 30850, + 30851, + 30852, + 30853, + 30854, + 30855, + 30856, + 30857, + 30858, + 30859, + 30860, + 30861, + 30862, + 30863, + 30864, + 30865, + 30866, + 30867, + 30868, + 30869, + 30870, + 30871, + 30872, + 30873, + 30874, + 30875, + 30876, + 30877, + 30878, + 30879, + 30880, + 30881, + 30882, + 30883, + 30884, + 30885, + 30886, + 30887, + 30888, + 30889, + 30890, + 30891, + 30892, + 30893, + 30894, + 30895, + 30896, + 30897, + 30898, + 30899, + 30900, + 30901, + 30902, + 30903, + 30904, + 30905, + 30906, + 30907, + 30908, + 30909, + 30910, + 30911, + 30912, + 30913, + 30914, + 30915, + 30916, + 30917, + 30918, + 30919, + 30920, + 30921, + 30922, + 30923, + 30924, + 30925, + 30926, + 30927, + 30928, + 30929, + 30930, + 30931, + 30932, + 30933, + 30934, + 30935, + 30936, + 30937, + 30938, + 30939, + 30940, + 30941, + 30942, + 30943, + 30944, + 30945, + 30946, + 30947, + 30948, + 30949, + 30950, + 30951, + 30952, + 30953, + 30954, + 30955, + 30956, + 30957, + 30958, + 30959, + 30960, + 30961, + 30962, + 30963, + 30964, + 30965, + 30966, + 30967, + 30968, + 30969, + 30970, + 30971, + 30972, + 30973, + 30974, + 30975, + 30976, + 30977, + 30978, + 30979, + 30980, + 30981, + 30982, + 30983, + 30984, + 30985, + 30986, + 30987, + 30988, + 30989, + 30990, + 30991, + 30992, + 30993, + 30994, + 30995, + 30996, + 30997, + 30998, + 30999, + 31000, + 31001, + 31002, + 31003, + 31004, + 31005, + 31006, + 31007, + 31008, + 31009, + 31010, + 31011, + 31012, + 31013, + 31014, + 31015, + 31016, + 31017, + 31018, + 31019, + 31020, + 31021, + 31022, + 31023, + 31024, + 31025, + 31026, + 31027, + 31028, + 31029, + 31030, + 31031, + 31032, + 31033, + 31034, + 31035, + 31036, + 31037, + 31038, + 31039, + 31040, + 31041, + 31042, + 31043, + 31044, + 31045, + 31046, + 31047, + 31048, + 31049, + 31050, + 31051, + 31052, + 31053, + 31054, + 31055, + 31056, + 31057, + 31058, + 31059, + 31060, + 31061, + 31062, + 31063, + 31064, + 31065, + 31066, + 31067, + 31068, + 31069, + 31070, + 31071, + 31072, + 31073, + 31074, + 31075, + 31076, + 31077, + 31078, + 31079, + 31080, + 31081, + 31082, + 31083, + 31084, + 31085, + 31086, + 31087, + 31088, + 31089, + 31090, + 31091, + 31092, + 31093, + 31094, + 31095, + 31096, + 31097, + 31098, + 31099, + 31100, + 31101, + 31102, + 31103, + 31104, + 31105, + 31106, + 31107, + 31108, + 31109, + 31110, + 31111, + 31112, + 31113, + 31114, + 31115, + 31116, + 31117, + 31118, + 31119, + 31120, + 31121, + 31122, + 31123, + 31124, + 31125, + 31126, + 31127, + 31128, + 31129, + 31130, + 31131, + 31132, + 31133, + 31134, + 31135, + 31136, + 31137, + 31138, + 31139, + 31140, + 31141, + 31142, + 31143, + 31144, + 31145, + 31146, + 31147, + 31148, + 31149, + 31150, + 31151, + 31152, + 31153, + 31154, + 31155, + 31156, + 31157, + 31158, + 31159, + 31160, + 31161, + 31162, + 31163, + 31164, + 31165, + 31166, + 31167, + 31168, + 31169, + 31170, + 31171, + 31172, + 31173, + 31174, + 31175, + 31176, + 31177, + 31178, + 31179, + 31180, + 31181, + 31182, + 31183, + 31184, + 31185, + 31186, + 31187, + 31188, + 31189, + 31190, + 31191, + 31192, + 31193, + 31194, + 31195, + 31196, + 31197, + 31198, + 31199, + 31200, + 31201, + 31202, + 31203, + 31204, + 31205, + 31206, + 31207, + 31208, + 31209, + 31210, + 31211, + 31212, + 31213, + 31214, + 31215, + 31216, + 31217, + 31218, + 31219, + 31220, + 31221, + 31222, + 31223, + 31224, + 31225, + 31226, + 31227, + 31228, + 31229, + 31230, + 31231, + 31232, + 31233, + 31234, + 31235, + 31236, + 31237, + 31238, + 31239, + 31240, + 31241, + 31242, + 31243, + 31244, + 31245, + 31246, + 31247, + 31248, + 31249, + 31250, + 31251, + 31252, + 31253, + 31254, + 31255, + 31256, + 31257, + 31258, + 31259, + 31260, + 31261, + 31262, + 31263, + 31264, + 31265, + 31266, + 31267, + 31268, + 31269, + 31270, + 31271, + 31272, + 31273, + 31274, + 31275, + 31276, + 31277, + 31278, + 31279, + 31280, + 31281, + 31282, + 31283, + 31284, + 31285, + 31286, + 31287, + 31288, + 31289, + 31290, + 31291, + 31292, + 31293, + 31294, + 31295, + 31296, + 31297, + 31298, + 31299, + 31300, + 31301, + 31302, + 31303, + 31304, + 31305, + 31306, + 31307, + 31308, + 31309, + 31310, + 31311, + 31312, + 31313, + 31314, + 31315, + 31316, + 31317, + 31318, + 31319, + 31320, + 31321, + 31322, + 31323, + 31324, + 31325, + 31326, + 31327, + 31328, + 31329, + 31330, + 31331, + 31332, + 31333, + 31334, + 31335, + 31336, + 31337, + 31338, + 31339, + 31340, + 31341, + 31342, + 31343, + 31344, + 31345, + 31346, + 31347, + 31348, + 31349, + 31350, + 31351, + 31352, + 31353, + 31354, + 31355, + 31356, + 31357, + 31358, + 31359, + 31360, + 31361, + 31362, + 31363, + 31364, + 31365, + 31366, + 31367, + 31368, + 31369, + 31370, + 31371, + 31372, + 31373, + 31374, + 31375, + 31376, + 31377, + 31378, + 31379, + 31380, + 31381, + 31382, + 31383, + 31384, + 31385, + 31386, + 31387, + 31388, + 31389, + 31390, + 31391, + 31392, + 31393, + 31394, + 31395, + 31396, + 31397, + 31398, + 31399, + 31400, + 31401, + 31402, + 31403, + 31404, + 31405, + 31406, + 31407, + 31408, + 31409, + 31410, + 31411, + 31412, + 31413, + 31414, + 31415, + 31416, + 31417, + 31418, + 31419, + 31420, + 31421, + 31422, + 31423, + 31424, + 31425, + 31426, + 31427, + 31428, + 31429, + 31430, + 31431, + 31432, + 31433, + 31434, + 31435, + 31436, + 31437, + 31438, + 31439, + 31440, + 31441, + 31442, + 31443, + 31444, + 31445, + 31446, + 31447, + 31448, + 31449, + 31450, + 31451, + 31452, + 31453, + 31454, + 31455, + 31456, + 31457, + 31458, + 31459, + 31460, + 31461, + 31462, + 31463, + 31464, + 31465, + 31466, + 31467, + 31468, + 31469, + 31470, + 31471, + 31472, + 31473, + 31474, + 31475, + 31476, + 31477, + 31478, + 31479, + 31480, + 31481, + 31482, + 31483, + 31484, + 31485, + 31486, + 31487, + 31488, + 31489, + 31490, + 31491, + 31492, + 31493, + 31494, + 31495, + 31496, + 31497, + 31498, + 31499, + 31500, + 31501, + 31502, + 31503, + 31504, + 31505, + 31506, + 31507, + 31508, + 31509, + 31510, + 31511, + 31512, + 31513, + 31514, + 31515, + 31516, + 31517, + 31518, + 31519, + 31520, + 31521, + 31522, + 31523, + 31524, + 31525, + 31526, + 31527, + 31528, + 31529, + 31530, + 31531, + 31532, + 31533, + 31534, + 31535, + 31536, + 31537, + 31538, + 31539, + 31540, + 31541, + 31542, + 31543, + 31544, + 31545, + 31546, + 31547, + 31548, + 31549, + 31550, + 31551, + 31552, + 31553, + 31554, + 31555, + 31556, + 31557, + 31558, + 31559, + 31560, + 31561, + 31562, + 31563, + 31564, + 31565, + 31566, + 31567, + 31568, + 31569, + 31570, + 31571, + 31572, + 31573, + 31574, + 31575, + 31576, + 31577, + 31578, + 31579, + 31580, + 31581, + 31582, + 31583, + 31584, + 31585, + 31586, + 31587, + 31588, + 31589, + 31590, + 31591, + 31592, + 31593, + 31594, + 31595, + 31596, + 31597, + 31598, + 31599, + 31600, + 31601, + 31602, + 31603, + 31604, + 31605, + 31606, + 31607, + 31608, + 31609, + 31610, + 31611, + 31612, + 31613, + 31614, + 31615, + 31616, + 31617, + 31618, + 31619, + 31620, + 31621, + 31622, + 31623, + 31624, + 31625, + 31626, + 31627, + 31628, + 31629, + 31630, + 31631, + 31632, + 31633, + 31634, + 31635, + 31636, + 31637, + 31638, + 31639, + 31640, + 31641, + 31642, + 31643, + 31644, + 31645, + 31646, + 31647, + 31648, + 31649, + 31650, + 31651, + 31652, + 31653, + 31654, + 31655, + 31656, + 31657, + 31658, + 31659, + 31660, + 31661, + 31662, + 31663, + 31664, + 31665, + 31666, + 31667, + 31668, + 31669, + 31670, + 31671, + 31672, + 31673, + 31674, + 31675, + 31676, + 31677, + 31678, + 31679, + 31680, + 31681, + 31682, + 31683, + 31684, + 31685, + 31686, + 31687, + 31688, + 31689, + 31690, + 31691, + 31692, + 31693, + 31694, + 31695, + 31696, + 31697, + 31698, + 31699, + 31700, + 31701, + 31702, + 31703, + 31704, + 31705, + 31706, + 31707, + 31708, + 31709, + 31710, + 31711, + 31712, + 31713, + 31714, + 31715, + 31716, + 31717, + 31718, + 31719, + 31720, + 31721, + 31722, + 31723, + 31724, + 31725, + 31726, + 31727, + 31728, + 31729, + 31730, + 31731, + 31732, + 31733, + 31734, + 31735, + 31736, + 31737, + 31738, + 31739, + 31740, + 31741, + 31742, + 31743, + 31744, + 31745, + 31746, + 31747, + 31748, + 31749, + 31750, + 31751, + 31752, + 31753, + 31754, + 31755, + 31756, + 31757, + 31758, + 31759, + 31760, + 31761, + 31762, + 31763, + 31764, + 31765, + 31766, + 31767, + 31768, + 31769, + 31770, + 31771, + 31772, + 31773, + 31774, + 31775, + 31776, + 31777, + 31778, + 31779, + 31780, + 31781, + 31782, + 31783, + 31784, + 31785, + 31786, + 31787, + 31788, + 31789, + 31790, + 31791, + 31792, + 31793, + 31794, + 31795, + 31796, + 31797, + 31798, + 31799, + 31800, + 31801, + 31802, + 31803, + 31804, + 31805, + 31806, + 31807, + 31808, + 31809, + 31810, + 31811, + 31812, + 31813, + 31814, + 31815, + 31816, + 31817, + 31818, + 31819, + 31820, + 31821, + 31822, + 31823, + 31824, + 31825, + 31826, + 31827, + 31828, + 31829, + 31830, + 31831, + 31832, + 31833, + 31834, + 31835, + 31836, + 31837, + 31838, + 31839, + 31840, + 31841, + 31842, + 31843, + 31844, + 31845, + 31846, + 31847, + 31848, + 31849, + 31850, + 31851, + 31852, + 31853, + 31854, + 31855, + 31856, + 31857, + 31858, + 31859, + 31860, + 31861, + 31862, + 31863, + 31864, + 31865, + 31866, + 31867, + 31868, + 31869, + 31870, + 31871, + 31872, + 31873, + 31874, + 31875, + 31876, + 31877, + 31878, + 31879, + 31880, + 31881, + 31882, + 31883, + 31884, + 31885, + 31886, + 31887, + 31888, + 31889, + 31890, + 31891, + 31892, + 31893, + 31894, + 31895, + 31896, + 31897, + 31898, + 31899, + 31900, + 31901, + 31902, + 31903, + 31904, + 31905, + 31906, + 31907, + 31908, + 31909, + 31910, + 31911, + 31912, + 31913, + 31914, + 31915, + 31916, + 31917, + 31918, + 31919, + 31920, + 31921, + 31922, + 31923, + 31924, + 31925, + 31926, + 31927, + 31928, + 31929, + 31930, + 31931, + 31932, + 31933, + 31934, + 31935, + 31936, + 31937, + 31938, + 31939, + 31940, + 31941, + 31942, + 31943, + 31944, + 31945, + 31946, + 31947, + 31948, + 31949, + 31950, + 31951, + 31952, + 31953, + 31954, + 31955, + 31956, + 31957, + 31958, + 31959, + 31960, + 31961, + 31962, + 31963, + 31964, + 31965, + 31966, + 31967, + 31968, + 31969, + 31970, + 31971, + 31972, + 31973, + 31974, + 31975, + 31976, + 31977, + 31978, + 31979, + 31980, + 31981, + 31982, + 31983, + 31984, + 31985, + 31986, + 31987, + 31988, + 31989, + 31990, + 31991, + 31992, + 31993, + 31994, + 31995, + 31996, + 31997, + 31998, + 31999, + 32000, + 32001, + 32002, + 32003, + 32004, + 32005, + 32006, + 32007, + 32008, + 32009, + 32010, + 32011, + 32012, + 32013, + 32014, + 32015, + 32016, + 32017, + 32018, + 32019, + 32020, + 32021, + 32022, + 32023, + 32024, + 32025, + 32026, + 32027, + 32028, + 32029, + 32030, + 32031, + 32032, + 32033, + 32034, + 32035, + 32036, + 32037, + 32038, + 32039, + 32040, + 32041, + 32042, + 32043, + 32044, + 32045, + 32046, + 32047, + 32048, + 32049, + 32050, + 32051, + 32052, + 32053, + 32054, + 32055, + 32056, + 32057, + 32058, + 32059, + 32060, + 32061, + 32062, + 32063, + 32064, + 32065, + 32066, + 32067, + 32068, + 32069, + 32070, + 32071, + 32072, + 32073, + 32074, + 32075, + 32076, + 32077, + 32078, + 32079, + 32080, + 32081, + 32082, + 32083, + 32084, + 32085, + 32086, + 32087, + 32088, + 32089, + 32090, + 32091, + 32092, + 32093, + 32094, + 32095, + 32096, + 32097, + 32098, + 32099, + 32100, + 32101, + 32102, + 32103, + 32104, + 32105, + 32106, + 32107, + 32108, + 32109, + 32110, + 32111, + 32112, + 32113, + 32114, + 32115, + 32116, + 32117, + 32118, + 32119, + 32120, + 32121, + 32122, + 32123, + 32124, + 32125, + 32126, + 32127, + 32128, + 32129, + 32130, + 32131, + 32132, + 32133, + 32134, + 32135, + 32136, + 32137, + 32138, + 32139, + 32140, + 32141, + 32142, + 32143, + 32144, + 32145, + 32146, + 32147, + 32148, + 32149, + 32150, + 32151, + 32152, + 32153, + 32154, + 32155, + 32156, + 32157, + 32158, + 32159, + 32160, + 32161, + 32162, + 32163, + 32164, + 32165, + 32166, + 32167, + 32168, + 32169, + 32170, + 32171, + 32172, + 32173, + 32174, + 32175, + 32176, + 32177, + 32178, + 32179, + 32180, + 32181, + 32182, + 32183, + 32184, + 32185, + 32186, + 32187, + 32188, + 32189, + 32190, + 32191, + 32192, + 32193, + 32194, + 32195, + 32196, + 32197, + 32198, + 32199, + 32200, + 32201, + 32202, + 32203, + 32204, + 32205, + 32206, + 32207, + 32208, + 32209, + 32210, + 32211, + 32212, + 32213, + 32214, + 32215, + 32216, + 32217, + 32218, + 32219, + 32220, + 32221, + 32222, + 32223, + 32224, + 32225, + 32226, + 32227, + 32228, + 32229, + 32230, + 32231, + 32232, + 32233, + 32234, + 32235, + 32236, + 32237, + 32238, + 32239, + 32240, + 32241, + 32242, + 32243, + 32244, + 32245, + 32246, + 32247, + 32248, + 32249, + 32250, + 32251, + 32252, + 32253, + 32254, + 32255, + 32256, + 32257, + 32258, + 32259, + 32260, + 32261, + 32262, + 32263, + 32264, + 32265, + 32266, + 32267, + 32268, + 32269, + 32270, + 32271, + 32272, + 32273, + 32274, + 32275, + 32276, + 32277, + 32278, + 32279, + 32280, + 32281, + 32282, + 32283, + 32284, + 32285, + 32286, + 32287, + 32288, + 32289, + 32290, + 32291, + 32292, + 32293, + 32294, + 32295, + 32296, + 32297, + 32298, + 32299, + 32300, + 32301, + 32302, + 32303, + 32304, + 32305, + 32306, + 32307, + 32308, + 32309, + 32310, + 32311, + 32312, + 32313, + 32314, + 32315, + 32316, + 32317, + 32318, + 32319, + 32320, + 32321, + 32322, + 32323, + 32324, + 32325, + 32326, + 32327, + 32328, + 32329, + 32330, + 32331, + 32332, + 32333, + 32334, + 32335, + 32336, + 32337, + 32338, + 32339, + 32340, + 32341, + 32342, + 32343, + 32344, + 32345, + 32346, + 32347, + 32348, + 32349, + 32350, + 32351, + 32352, + 32353, + 32354, + 32355, + 32356, + 32357, + 32358, + 32359, + 32360, + 32361, + 32362, + 32363, + 32364, + 32365, + 32366, + 32367, + 32368, + 32369, + 32370, + 32371, + 32372, + 32373, + 32374, + 32375, + 32376, + 32377, + 32378, + 32379, + 32380, + 32381, + 32382, + 32383, + 32384, + 32385, + 32386, + 32387, + 32388, + 32389, + 32390, + 32391, + 32392, + 32393, + 32394, + 32395, + 32396, + 32397, + 32398, + 32399, + 32400, + 32401, + 32402, + 32403, + 32404, + 32405, + 32406, + 32407, + 32408, + 32409, + 32410, + 32411, + 32412, + 32413, + 32414, + 32415, + 32416, + 32417, + 32418, + 32419, + 32420, + 32421, + 32422, + 32423, + 32424, + 32425, + 32426, + 32427, + 32428, + 32429, + 32430, + 32431, + 32432, + 32433, + 32434, + 32435, + 32436, + 32437, + 32438, + 32439, + 32440, + 32441, + 32442, + 32443, + 32444, + 32445, + 32446, + 32447, + 32448, + 32449, + 32450, + 32451, + 32452, + 32453, + 32454, + 32455, + 32456, + 32457, + 32458, + 32459, + 32460, + 32461, + 32462, + 32463, + 32464, + 32465, + 32466, + 32467, + 32468, + 32469, + 32470, + 32471, + 32472, + 32473, + 32474, + 32475, + 32476, + 32477, + 32478, + 32479, + 32480, + 32481, + 32482, + 32483, + 32484, + 32485, + 32486, + 32487, + 32488, + 32489, + 32490, + 32491, + 32492, + 32493, + 32494, + 32495, + 32496, + 32497, + 32498, + 32499, + 32500, + 32501, + 32502, + 32503, + 32504, + 32505, + 32506, + 32507, + 32508, + 32509, + 32510, + 32511, + 32512, + 32513, + 32514, + 32515, + 32516, + 32517, + 32518, + 32519, + 32520, + 32521, + 32522, + 32523, + 32524, + 32525, + 32526, + 32527, + 32528, + 32529, + 32530, + 32531, + 32532, + 32533, + 32534, + 32535, + 32536, + 32537, + 32538, + 32539, + 32540, + 32541, + 32542, + 32543, + 32544, + 32545, + 32546, + 32547, + 32548, + 32549, + 32550, + 32551, + 32552, + 32553, + 32554, + 32555, + 32556, + 32557, + 32558, + 32559, + 32560, + 32561, + 32562, + 32563, + 32564, + 32565, + 32566, + 32567, + 32568, + 32569, + 32570, + 32571, + 32572, + 32573, + 32574, + 32575, + 32576, + 32577, + 32578, + 32579, + 32580, + 32581, + 32582, + 32583, + 32584, + 32585, + 32586, + 32587, + 32588, + 32589, + 32590, + 32591, + 32592, + 32593, + 32594, + 32595, + 32596, + 32597, + 32598, + 32599, + 32600, + 32601, + 32602, + 32603, + 32604, + 32605, + 32606, + 32607, + 32608, + 32609, + 32610, + 32611, + 32612, + 32613, + 32614, + 32615, + 32616, + 32617, + 32618, + 32619, + 32620, + 32621, + 32622, + 32623, + 32624, + 32625, + 32626, + 32627, + 32628, + 32629, + 32630, + 32631, + 32632, + 32633, + 32634, + 32635, + 32636, + 32637, + 32638, + 32639, + 32640, + 32641, + 32642, + 32643, + 32644, + 32645, + 32646, + 32647, + 32648, + 32649, + 32650, + 32651, + 32652, + 32653, + 32654, + 32655, + 32656, + 32657, + 32658, + 32659, + 32660, + 32661, + 32662, + 32663, + 32664, + 32665, + 32666, + 32667, + 32668, + 32669, + 32670, + 32671, + 32672, + 32673, + 32674, + 32675, + 32676, + 32677, + 32678, + 32679, + 32680, + 32681, + 32682, + 32683, + 32684, + 32685, + 32686, + 32687, + 32688, + 32689, + 32690, + 32691, + 32692, + 32693, + 32694, + 32695, + 32696, + 32697, + 32698, + 32699, + 32700, + 32701, + 32702, + 32703, + 32704, + 32705, + 32706, + 32707, + 32708, + 32709, + 32710, + 32711, + 32712, + 32713, + 32714, + 32715, + 32716, + 32717, + 32718, + 32719, + 32720, + 32721, + 32722, + 32723, + 32724, + 32725, + 32726, + 32727, + 32728, + 32729, + 32730, + 32731, + 32732, + 32733, + 32734, + 32735, + 32736, + 32737, + 32738, + 32739, + 32740, + 32741, + 32742, + 32743, + 32744, + 32745, + 32746, + 32747, + 32748, + 32749, + 32750, + 32751, + 32752, + 32753, + 32754, + 32755, + 32756, + 32757, + 32758, + 32759, + 32760, + 32761, + 32762, + 32763, + 32764, + 32765, + 32766, + 32767, + 32768, + 32769, + 32770, + 32771, + 32772, + 32773, + 32774, + 32775, + 32776, + 32777, + 32778, + 32779, + 32780, + 32781, + 32782, + 32783, + 32784, + 32785, + 32786, + 32787, + 32788, + 32789, + 32790, + 32791, + 32792, + 32793, + 32794, + 32795, + 32796, + 32797, + 32798, + 32799, + 32800, + 32801, + 32802, + 32803, + 32804, + 32805, + 32806, + 32807, + 32808, + 32809, + 32810, + 32811, + 32812, + 32813, + 32814, + 32815, + 32816, + 32817, + 32818, + 32819, + 32820, + 32821, + 32822, + 32823, + 32824, + 32825, + 32826, + 32827, + 32828, + 32829, + 32830, + 32831, + 32832, + 32833, + 32834, + 32835, + 32836, + 32837, + 32838, + 32839, + 32840, + 32841, + 32842, + 32843, + 32844, + 32845, + 32846, + 32847, + 32848, + 32849, + 32850, + 32851, + 32852, + 32853, + 32854, + 32855, + 32856, + 32857, + 32858, + 32859, + 32860, + 32861, + 32862, + 32863, + 32864, + 32865, + 32866, + 32867, + 32868, + 32869, + 32870, + 32871, + 32872, + 32873, + 32874, + 32875, + 32876, + 32877, + 32878, + 32879, + 32880, + 32881, + 32882, + 32883, + 32884, + 32885, + 32886, + 32887, + 32888, + 32889, + 32890, + 32891, + 32892, + 32893, + 32894, + 32895, + 32896, + 32897, + 32898, + 32899, + 32900, + 32901, + 32902, + 32903, + 32904, + 32905, + 32906, + 32907, + 32908, + 32909, + 32910, + 32911, + 32912, + 32913, + 32914, + 32915, + 32916, + 32917, + 32918, + 32919, + 32920, + 32921, + 32922, + 32923, + 32924, + 32925, + 32926, + 32927, + 32928, + 32929, + 32930, + 32931, + 32932, + 32933, + 32934, + 32935, + 32936, + 32937, + 32938, + 32939, + 32940, + 32941, + 32942, + 32943, + 32944, + 32945, + 32946, + 32947, + 32948, + 32949, + 32950, + 32951, + 32952, + 32953, + 32954, + 32955, + 32956, + 32957, + 32958, + 32959, + 32960, + 32961, + 32962, + 32963, + 32964, + 32965, + 32966, + 32967, + 32968, + 32969, + 32970, + 32971, + 32972, + 32973, + 32974, + 32975, + 32976, + 32977, + 32978, + 32979, + 32980, + 32981, + 32982, + 32983, + 32984, + 32985, + 32986, + 32987, + 32988, + 32989, + 32990, + 32991, + 32992, + 32993, + 32994, + 32995, + 32996, + 32997, + 32998, + 32999, + 33000, + 33001, + 33002, + 33003, + 33004, + 33005, + 33006, + 33007, + 33008, + 33009, + 33010, + 33011, + 33012, + 33013, + 33014, + 33015, + 33016, + 33017, + 33018, + 33019, + 33020, + 33021, + 33022, + 33023, + 33024, + 33025, + 33026, + 33027, + 33028, + 33029, + 33030, + 33031, + 33032, + 33033, + 33034, + 33035, + 33036, + 33037, + 33038, + 33039, + 33040, + 33041, + 33042, + 33043, + 33044, + 33045, + 33046, + 33047, + 33048, + 33049, + 33050, + 33051, + 33052, + 33053, + 33054, + 33055, + 33056, + 33057, + 33058, + 33059, + 33060, + 33061, + 33062, + 33063, + 33064, + 33065, + 33066, + 33067, + 33068, + 33069, + 33070, + 33071, + 33072, + 33073, + 33074, + 33075, + 33076, + 33077, + 33078, + 33079, + 33080, + 33081, + 33082, + 33083, + 33084, + 33085, + 33086, + 33087, + 33088, + 33089, + 33090, + 33091, + 33092, + 33093, + 33094, + 33095, + 33096, + 33097, + 33098, + 33099, + 33100, + 33101, + 33102, + 33103, + 33104, + 33105, + 33106, + 33107, + 33108, + 33109, + 33110, + 33111, + 33112, + 33113, + 33114, + 33115, + 33116, + 33117, + 33118, + 33119, + 33120, + 33121, + 33122, + 33123, + 33124, + 33125, + 33126, + 33127, + 33128, + 33129, + 33130, + 33131, + 33132, + 33133, + 33134, + 33135, + 33136, + 33137, + 33138, + 33139, + 33140, + 33141, + 33142, + 33143, + 33144, + 33145, + 33146, + 33147, + 33148, + 33149, + 33150, + 33151, + 33152, + 33153, + 33154, + 33155, + 33156, + 33157, + 33158, + 33159, + 33160, + 33161, + 33162, + 33163, + 33164, + 33165, + 33166, + 33167, + 33168, + 33169, + 33170, + 33171, + 33172, + 33173, + 33174, + 33175, + 33176, + 33177, + 33178, + 33179, + 33180, + 33181, + 33182, + 33183, + 33184, + 33185, + 33186, + 33187, + 33188, + 33189, + 33190, + 33191, + 33192, + 33193, + 33194, + 33195, + 33196, + 33197, + 33198, + 33199, + 33200, + 33201, + 33202, + 33203, + 33204, + 33205, + 33206, + 33207, + 33208, + 33209, + 33210, + 33211, + 33212, + 33213, + 33214, + 33215, + 33216, + 33217, + 33218, + 33219, + 33220, + 33221, + 33222, + 33223, + 33224, + 33225, + 33226, + 33227, + 33228, + 33229, + 33230, + 33231, + 33232, + 33233, + 33234, + 33235, + 33236, + 33237, + 33238, + 33239, + 33240, + 33241, + 33242, + 33243, + 33244, + 33245, + 33246, + 33247, + 33248, + 33249, + 33250, + 33251, + 33252, + 33253, + 33254, + 33255, + 33256, + 33257, + 33258, + 33259, + 33260, + 33261, + 33262, + 33263, + 33264, + 33265, + 33266, + 33267, + 33268, + 33269, + 33270, + 33271, + 33272, + 33273, + 33274, + 33275, + 33276, + 33277, + 33278, + 33279, + 33280, + 33281, + 33282, + 33283, + 33284, + 33285, + 33286, + 33287, + 33288, + 33289, + 33290, + 33291, + 33292, + 33293, + 33294, + 33295, + 33296, + 33297, + 33298, + 33299, + 33300, + 33301, + 33302, + 33303, + 33304, + 33305, + 33306, + 33307, + 33308, + 33309, + 33310, + 33311, + 33312, + 33313, + 33314, + 33315, + 33316, + 33317, + 33318, + 33319, + 33320, + 33321, + 33322, + 33323, + 33324, + 33325, + 33326, + 33327, + 33328, + 33329, + 33330, + 33331, + 33332, + 33333, + 33334, + 33335, + 33336, + 33337, + 33338, + 33339, + 33340, + 33341, + 33342, + 33343, + 33344, + 33345, + 33346, + 33347, + 33348, + 33349, + 33350, + 33351, + 33352, + 33353, + 33354, + 33355, + 33356, + 33357, + 33358, + 33359, + 33360, + 33361, + 33362, + 33363, + 33364, + 33365, + 33366, + 33367, + 33368, + 33369, + 33370, + 33371, + 33372, + 33373, + 33374, + 33375, + 33376, + 33377, + 33378, + 33379, + 33380, + 33381, + 33382, + 33383, + 33384, + 33385, + 33386, + 33387, + 33388, + 33389, + 33390, + 33391, + 33392, + 33393, + 33394, + 33395, + 33396, + 33397, + 33398, + 33399, + 33400, + 33401, + 33402, + 33403, + 33404, + 33405, + 33406, + 33407, + 33408, + 33409, + 33410, + 33411, + 33412, + 33413, + 33414, + 33415, + 33416, + 33417, + 33418, + 33419, + 33420, + 33421, + 33422, + 33423, + 33424, + 33425, + 33426, + 33427, + 33428, + 33429, + 33430, + 33431, + 33432, + 33433, + 33434, + 33435, + 33436, + 33437, + 33438, + 33439, + 33440, + 33441, + 33442, + 33443, + 33444, + 33445, + 33446, + 33447, + 33448, + 33449, + 33450, + 33451, + 33452, + 33453, + 33454, + 33455, + 33456, + 33457, + 33458, + 33459, + 33460, + 33461, + 33462, + 33463, + 33464, + 33465, + 33466, + 33467, + 33468, + 33469, + 33470, + 33471, + 33472, + 33473, + 33474, + 33475, + 33476, + 33477, + 33478, + 33479, + 33480, + 33481, + 33482, + 33483, + 33484, + 33485, + 33486, + 33487, + 33488, + 33489, + 33490, + 33491, + 33492, + 33493, + 33494, + 33495, + 33496, + 33497, + 33498, + 33499, + 33500, + 33501, + 33502, + 33503, + 33504, + 33505, + 33506, + 33507, + 33508, + 33509, + 33510, + 33511, + 33512, + 33513, + 33514, + 33515, + 33516, + 33517, + 33518, + 33519, + 33520, + 33521, + 33522, + 33523, + 33524, + 33525, + 33526, + 33527, + 33528, + 33529, + 33530, + 33531, + 33532, + 33533, + 33534, + 33535, + 33536, + 33537, + 33538, + 33539, + 33540, + 33541, + 33542, + 33543, + 33544, + 33545, + 33546, + 33547, + 33548, + 33549, + 33550, + 33551, + 33552, + 33553, + 33554, + 33555, + 33556, + 33557, + 33558, + 33559, + 33560, + 33561, + 33562, + 33563, + 33564, + 33565, + 33566, + 33567, + 33568, + 33569, + 33570, + 33571, + 33572, + 33573, + 33574, + 33575, + 33576, + 33577, + 33578, + 33579, + 33580, + 33581, + 33582, + 33583, + 33584, + 33585, + 33586, + 33587, + 33588, + 33589, + 33590, + 33591, + 33592, + 33593, + 33594, + 33595, + 33596, + 33597, + 33598, + 33599, + 33600, + 33601, + 33602, + 33603, + 33604, + 33605, + 33606, + 33607, + 33608, + 33609, + 33610, + 33611, + 33612, + 33613, + 33614, + 33615, + 33616, + 33617, + 33618, + 33619, + 33620, + 33621, + 33622, + 33623, + 33624, + 33625, + 33626, + 33627, + 33628, + 33629, + 33630, + 33631, + 33632, + 33633, + 33634, + 33635, + 33636, + 33637, + 33638, + 33639, + 33640, + 33641, + 33642, + 33643, + 33644, + 33645, + 33646, + 33647, + 33648, + 33649, + 33650, + 33651, + 33652, + 33653, + 33654, + 33655, + 33656, + 33657, + 33658, + 33659, + 33660, + 33661, + 33662, + 33663, + 33664, + 33665, + 33666, + 33667, + 33668, + 33669, + 33670, + 33671, + 33672, + 33673, + 33674, + 33675, + 33676, + 33677, + 33678, + 33679, + 33680, + 33681, + 33682, + 33683, + 33684, + 33685, + 33686, + 33687, + 33688, + 33689, + 33690, + 33691, + 33692, + 33693, + 33694, + 33695, + 33696, + 33697, + 33698, + 33699, + 33700, + 33701, + 33702, + 33703, + 33704, + 33705, + 33706, + 33707, + 33708, + 33709, + 33710, + 33711, + 33712, + 33713, + 33714, + 33715, + 33716, + 33717, + 33718, + 33719, + 33720, + 33721, + 33722, + 33723, + 33724, + 33725, + 33726, + 33727, + 33728, + 33729, + 33730, + 33731, + 33732, + 33733, + 33734, + 33735, + 33736, + 33737, + 33738, + 33739, + 33740, + 33741, + 33742, + 33743, + 33744, + 33745, + 33746, + 33747, + 33748, + 33749, + 33750, + 33751, + 33752, + 33753, + 33754, + 33755, + 33756, + 33757, + 33758, + 33759, + 33760, + 33761, + 33762, + 33763, + 33764, + 33765, + 33766, + 33767, + 33768, + 33769, + 33770, + 33771, + 33772, + 33773, + 33774, + 33775, + 33776, + 33777, + 33778, + 33779, + 33780, + 33781, + 33782, + 33783, + 33784, + 33785, + 33786, + 33787, + 33788, + 33789, + 33790, + 33791, + 33792, + 33793, + 33794, + 33795, + 33796, + 33797, + 33798, + 33799, + 33800, + 33801, + 33802, + 33803, + 33804, + 33805, + 33806, + 33807, + 33808, + 33809, + 33810, + 33811, + 33812, + 33813, + 33814, + 33815, + 33816, + 33817, + 33818, + 33819, + 33820, + 33821, + 33822, + 33823, + 33824, + 33825, + 33826, + 33827, + 33828, + 33829, + 33830, + 33831, + 33832, + 33833, + 33834, + 33835, + 33836, + 33837, + 33838, + 33839, + 33840, + 33841, + 33842, + 33843, + 33844, + 33845, + 33846, + 33847, + 33848, + 33849, + 33850, + 33851, + 33852, + 33853, + 33854, + 33855, + 33856, + 33857, + 33858, + 33859, + 33860, + 33861, + 33862, + 33863, + 33864, + 33865, + 33866, + 33867, + 33868, + 33869, + 33870, + 33871, + 33872, + 33873, + 33874, + 33875, + 33876, + 33877, + 33878, + 33879, + 33880, + 33881, + 33882, + 33883, + 33884, + 33885, + 33886, + 33887, + 33888, + 33889, + 33890, + 33891, + 33892, + 33893, + 33894, + 33895, + 33896, + 33897, + 33898, + 33899, + 33900, + 33901, + 33902, + 33903, + 33904, + 33905, + 33906, + 33907, + 33908, + 33909, + 33910, + 33911, + 33912, + 33913, + 33914, + 33915, + 33916, + 33917, + 33918, + 33919, + 33920, + 33921, + 33922, + 33923, + 33924, + 33925, + 33926, + 33927, + 33928, + 33929, + 33930, + 33931, + 33932, + 33933, + 33934, + 33935, + 33936, + 33937, + 33938, + 33939, + 33940, + 33941, + 33942, + 33943, + 33944, + 33945, + 33946, + 33947, + 33948, + 33949, + 33950, + 33951, + 33952, + 33953, + 33954, + 33955, + 33956, + 33957, + 33958, + 33959, + 33960, + 33961, + 33962, + 33963, + 33964, + 33965, + 33966, + 33967, + 33968, + 33969, + 33970, + 33971, + 33972, + 33973, + 33974, + 33975, + 33976, + 33977, + 33978, + 33979, + 33980, + 33981, + 33982, + 33983, + 33984, + 33985, + 33986, + 33987, + 33988, + 33989, + 33990, + 33991, + 33992, + 33993, + 33994, + 33995, + 33996, + 33997, + 33998, + 33999, + 34000, + 34001, + 34002, + 34003, + 34004, + 34005, + 34006, + 34007, + 34008, + 34009, + 34010, + 34011, + 34012, + 34013, + 34014, + 34015, + 34016, + 34017, + 34018, + 34019, + 34020, + 34021, + 34022, + 34023, + 34024, + 34025, + 34026, + 34027, + 34028, + 34029, + 34030, + 34031, + 34032, + 34033, + 34034, + 34035, + 34036, + 34037, + 34038, + 34039, + 34040, + 34041, + 34042, + 34043, + 34044, + 34045, + 34046, + 34047, + 34048, + 34049, + 34050, + 34051, + 34052, + 34053, + 34054, + 34055, + 34056, + 34057, + 34058, + 34059, + 34060, + 34061, + 34062, + 34063, + 34064, + 34065, + 34066, + 34067, + 34068, + 34069, + 34070, + 34071, + 34072, + 34073, + 34074, + 34075, + 34076, + 34077, + 34078, + 34079, + 34080, + 34081, + 34082, + 34083, + 34084, + 34085, + 34086, + 34087, + 34088, + 34089, + 34090, + 34091, + 34092, + 34093, + 34094, + 34095, + 34096, + 34097, + 34098, + 34099, + 34100, + 34101, + 34102, + 34103, + 34104, + 34105, + 34106, + 34107, + 34108, + 34109, + 34110, + 34111, + 34112, + 34113, + 34114, + 34115, + 34116, + 34117, + 34118, + 34119, + 34120, + 34121, + 34122, + 34123, + 34124, + 34125, + 34126, + 34127, + 34128, + 34129, + 34130, + 34131, + 34132, + 34133, + 34134, + 34135, + 34136, + 34137, + 34138, + 34139, + 34140, + 34141, + 34142, + 34143, + 34144, + 34145, + 34146, + 34147, + 34148, + 34149, + 34150, + 34151, + 34152, + 34153, + 34154, + 34155, + 34156, + 34157, + 34158, + 34159, + 34160, + 34161, + 34162, + 34163, + 34164, + 34165, + 34166, + 34167, + 34168, + 34169, + 34170, + 34171, + 34172, + 34173, + 34174, + 34175, + 34176, + 34177, + 34178, + 34179, + 34180, + 34181, + 34182, + 34183, + 34184, + 34185, + 34186, + 34187, + 34188, + 34189, + 34190, + 34191, + 34192, + 34193, + 34194, + 34195, + 34196, + 34197, + 34198, + 34199, + 34200, + 34201, + 34202, + 34203, + 34204, + 34205, + 34206, + 34207, + 34208, + 34209, + 34210, + 34211, + 34212, + 34213, + 34214, + 34215, + 34216, + 34217, + 34218, + 34219, + 34220, + 34221, + 34222, + 34223, + 34224, + 34225, + 34226, + 34227, + 34228, + 34229, + 34230, + 34231, + 34232, + 34233, + 34234, + 34235, + 34236, + 34237, + 34238, + 34239, + 34240, + 34241, + 34242, + 34243, + 34244, + 34245, + 34246, + 34247, + 34248, + 34249, + 34250, + 34251, + 34252, + 34253, + 34254, + 34255, + 34256, + 34257, + 34258, + 34259, + 34260, + 34261, + 34262, + 34263, + 34264, + 34265, + 34266, + 34267, + 34268, + 34269, + 34270, + 34271, + 34272, + 34273, + 34274, + 34275, + 34276, + 34277, + 34278, + 34279, + 34280, + 34281, + 34282, + 34283, + 34284, + 34285, + 34286, + 34287, + 34288, + 34289, + 34290, + 34291, + 34292, + 34293, + 34294, + 34295, + 34296, + 34297, + 34298, + 34299, + 34300, + 34301, + 34302, + 34303, + 34304, + 34305, + 34306, + 34307, + 34308, + 34309, + 34310, + 34311, + 34312, + 34313, + 34314, + 34315, + 34316, + 34317, + 34318, + 34319, + 34320, + 34321, + 34322, + 34323, + 34324, + 34325, + 34326, + 34327, + 34328, + 34329, + 34330, + 34331, + 34332, + 34333, + 34334, + 34335, + 34336, + 34337, + 34338, + 34339, + 34340, + 34341, + 34342, + 34343, + 34344, + 34345, + 34346, + 34347, + 34348, + 34349, + 34350, + 34351, + 34352, + 34353, + 34354, + 34355, + 34356, + 34357, + 34358, + 34359, + 34360, + 34361, + 34362, + 34363, + 34364, + 34365, + 34366, + 34367, + 34368, + 34369, + 34370, + 34371, + 34372, + 34373, + 34374, + 34375, + 34376, + 34377, + 34378, + 34379, + 34380, + 34381, + 34382, + 34383, + 34384, + 34385, + 34386, + 34387, + 34388, + 34389, + 34390, + 34391, + 34392, + 34393, + 34394, + 34395, + 34396, + 34397, + 34398, + 34399, + 34400, + 34401, + 34402, + 34403, + 34404, + 34405, + 34406, + 34407, + 34408, + 34409, + 34410, + 34411, + 34412, + 34413, + 34414, + 34415, + 34416, + 34417, + 34418, + 34419, + 34420, + 34421, + 34422, + 34423, + 34424, + 34425, + 34426, + 34427, + 34428, + 34429, + 34430, + 34431, + 34432, + 34433, + 34434, + 34435, + 34436, + 34437, + 34438, + 34439, + 34440, + 34441, + 34442, + 34443, + 34444, + 34445, + 34446, + 34447, + 34448, + 34449, + 34450, + 34451, + 34452, + 34453, + 34454, + 34455, + 34456, + 34457, + 34458, + 34459, + 34460, + 34461, + 34462, + 34463, + 34464, + 34465, + 34466, + 34467, + 34468, + 34469, + 34470, + 34471, + 34472, + 34473, + 34474, + 34475, + 34476, + 34477, + 34478, + 34479, + 34480, + 34481, + 34482, + 34483, + 34484, + 34485, + 34486, + 34487, + 34488, + 34489, + 34490, + 34491, + 34492, + 34493, + 34494, + 34495, + 34496, + 34497, + 34498, + 34499, + 34500, + 34501, + 34502, + 34503, + 34504, + 34505, + 34506, + 34507, + 34508, + 34509, + 34510, + 34511, + 34512, + 34513, + 34514, + 34515, + 34516, + 34517, + 34518, + 34519, + 34520, + 34521, + 34522, + 34523, + 34524, + 34525, + 34526, + 34527, + 34528, + 34529, + 34530, + 34531, + 34532, + 34533, + 34534, + 34535, + 34536, + 34537, + 34538, + 34539, + 34540, + 34541, + 34542, + 34543, + 34544, + 34545, + 34546, + 34547, + 34548, + 34549, + 34550, + 34551, + 34552, + 34553, + 34554, + 34555, + 34556, + 34557, + 34558, + 34559, + 34560, + 34561, + 34562, + 34563, + 34564, + 34565, + 34566, + 34567, + 34568, + 34569, + 34570, + 34571, + 34572, + 34573, + 34574, + 34575, + 34576, + 34577, + 34578, + 34579, + 34580, + 34581, + 34582, + 34583, + 34584, + 34585, + 34586, + 34587, + 34588, + 34589, + 34590, + 34591, + 34592, + 34593, + 34594, + 34595, + 34596, + 34597, + 34598, + 34599, + 34600, + 34601, + 34602, + 34603, + 34604, + 34605, + 34606, + 34607, + 34608, + 34609, + 34610, + 34611, + 34612, + 34613, + 34614, + 34615, + 34616, + 34617, + 34618, + 34619, + 34620, + 34621, + 34622, + 34623, + 34624, + 34625, + 34626, + 34627, + 34628, + 34629, + 34630, + 34631, + 34632, + 34633, + 34634, + 34635, + 34636, + 34637, + 34638, + 34639, + 34640, + 34641, + 34642, + 34643, + 34644, + 34645, + 34646, + 34647, + 34648, + 34649, + 34650, + 34651, + 34652, + 34653, + 34654, + 34655, + 34656, + 34657, + 34658, + 34659, + 34660, + 34661, + 34662, + 34663, + 34664, + 34665, + 34666, + 34667, + 34668, + 34669, + 34670, + 34671, + 34672, + 34673, + 34674, + 34675, + 34676, + 34677, + 34678, + 34679, + 34680, + 34681, + 34682, + 34683, + 34684, + 34685, + 34686, + 34687, + 34688, + 34689, + 34690, + 34691, + 34692, + 34693, + 34694, + 34695, + 34696, + 34697, + 34698, + 34699, + 34700, + 34701, + 34702, + 34703, + 34704, + 34705, + 34706, + 34707, + 34708, + 34709, + 34710, + 34711, + 34712, + 34713, + 34714, + 34715, + 34716, + 34717, + 34718, + 34719, + 34720, + 34721, + 34722, + 34723, + 34724, + 34725, + 34726, + 34727, + 34728, + 34729, + 34730, + 34731, + 34732, + 34733, + 34734, + 34735, + 34736, + 34737, + 34738, + 34739, + 34740, + 34741, + 34742, + 34743, + 34744, + 34745, + 34746, + 34747, + 34748, + 34749, + 34750, + 34751, + 34752, + 34753, + 34754, + 34755, + 34756, + 34757, + 34758, + 34759, + 34760, + 34761, + 34762, + 34763, + 34764, + 34765, + 34766, + 34767, + 34768, + 34769, + 34770, + 34771, + 34772, + 34773, + 34774, + 34775, + 34776, + 34777, + 34778, + 34779, + 34780, + 34781, + 34782, + 34783, + 34784, + 34785, + 34786, + 34787, + 34788, + 34789, + 34790, + 34791, + 34792, + 34793, + 34794, + 34795, + 34796, + 34797, + 34798, + 34799, + 34800, + 34801, + 34802, + 34803, + 34804, + 34805, + 34806, + 34807, + 34808, + 34809, + 34810, + 34811, + 34812, + 34813, + 34814, + 34815, + 34816, + 34817, + 34818, + 34819, + 34820, + 34821, + 34822, + 34823, + 34824, + 34825, + 34826, + 34827, + 34828, + 34829, + 34830, + 34831, + 34832, + 34833, + 34834, + 34835, + 34836, + 34837, + 34838, + 34839, + 34840, + 34841, + 34842, + 34843, + 34844, + 34845, + 34846, + 34847, + 34848, + 34849, + 34850, + 34851, + 34852, + 34853, + 34854, + 34855, + 34856, + 34857, + 34858, + 34859, + 34860, + 34861, + 34862, + 34863, + 34864, + 34865, + 34866, + 34867, + 34868, + 34869, + 34870, + 34871, + 34872, + 34873, + 34874, + 34875, + 34876, + 34877, + 34878, + 34879, + 34880, + 34881, + 34882, + 34883, + 34884, + 34885, + 34886, + 34887, + 34888, + 34889, + 34890, + 34891, + 34892, + 34893, + 34894, + 34895, + 34896, + 34897, + 34898, + 34899, + 34900, + 34901, + 34902, + 34903, + 34904, + 34905, + 34906, + 34907, + 34908, + 34909, + 34910, + 34911, + 34912, + 34913, + 34914, + 34915, + 34916, + 34917, + 34918, + 34919, + 34920, + 34921, + 34922, + 34923, + 34924, + 34925, + 34926, + 34927, + 34928, + 34929, + 34930, + 34931, + 34932, + 34933, + 34934, + 34935, + 34936, + 34937, + 34938, + 34939, + 34940, + 34941, + 34942, + 34943, + 34944, + 34945, + 34946, + 34947, + 34948, + 34949, + 34950, + 34951, + 34952, + 34953, + 34954, + 34955, + 34956, + 34957, + 34958, + 34959, + 34960, + 34961, + 34962, + 34963, + 34964, + 34965, + 34966, + 34967, + 34968, + 34969, + 34970, + 34971, + 34972, + 34973, + 34974, + 34975, + 34976, + 34977, + 34978, + 34979, + 34980, + 34981, + 34982, + 34983, + 34984, + 34985, + 34986, + 34987, + 34988, + 34989, + 34990, + 34991, + 34992, + 34993, + 34994, + 34995, + 34996, + 34997, + 34998, + 34999, + 35000, + 35001, + 35002, + 35003, + 35004, + 35005, + 35006, + 35007, + 35008, + 35009, + 35010, + 35011, + 35012, + 35013, + 35014, + 35015, + 35016, + 35017, + 35018, + 35019, + 35020, + 35021, + 35022, + 35023, + 35024, + 35025, + 35026, + 35027, + 35028, + 35029, + 35030, + 35031, + 35032, + 35033, + 35034, + 35035, + 35036, + 35037, + 35038, + 35039, + 35040, + 35041, + 35042, + 35043, + 35044, + 35045, + 35046, + 35047, + 35048, + 35049, + 35050, + 35051, + 35052, + 35053, + 35054, + 35055, + 35056, + 35057, + 35058, + 35059, + 35060, + 35061, + 35062, + 35063, + 35064, + 35065, + 35066, + 35067, + 35068, + 35069, + 35070, + 35071, + 35072, + 35073, + 35074, + 35075, + 35076, + 35077, + 35078, + 35079, + 35080, + 35081, + 35082, + 35083, + 35084, + 35085, + 35086, + 35087, + 35088, + 35089, + 35090, + 35091, + 35092, + 35093, + 35094, + 35095, + 35096, + 35097, + 35098, + 35099, + 35100, + 35101, + 35102, + 35103, + 35104, + 35105, + 35106, + 35107, + 35108, + 35109, + 35110, + 35111, + 35112, + 35113, + 35114, + 35115, + 35116, + 35117, + 35118, + 35119, + 35120, + 35121, + 35122, + 35123, + 35124, + 35125, + 35126, + 35127, + 35128, + 35129, + 35130, + 35131, + 35132, + 35133, + 35134, + 35135, + 35136, + 35137, + 35138, + 35139, + 35140, + 35141, + 35142, + 35143, + 35144, + 35145, + 35146, + 35147, + 35148, + 35149, + 35150, + 35151, + 35152, + 35153, + 35154, + 35155, + 35156, + 35157, + 35158, + 35159, + 35160, + 35161, + 35162, + 35163, + 35164, + 35165, + 35166, + 35167, + 35168, + 35169, + 35170, + 35171, + 35172, + 35173, + 35174, + 35175, + 35176, + 35177, + 35178, + 35179, + 35180, + 35181, + 35182, + 35183, + 35184, + 35185, + 35186, + 35187, + 35188, + 35189, + 35190, + 35191, + 35192, + 35193, + 35194, + 35195, + 35196, + 35197, + 35198, + 35199, + 35200, + 35201, + 35202, + 35203, + 35204, + 35205, + 35206, + 35207, + 35208, + 35209, + 35210, + 35211, + 35212, + 35213, + 35214, + 35215, + 35216, + 35217, + 35218, + 35219, + 35220, + 35221, + 35222, + 35223, + 35224, + 35225, + 35226, + 35227, + 35228, + 35229, + 35230, + 35231, + 35232, + 35233, + 35234, + 35235, + 35236, + 35237, + 35238, + 35239, + 35240, + 35241, + 35242, + 35243, + 35244, + 35245, + 35246, + 35247, + 35248, + 35249, + 35250, + 35251, + 35252, + 35253, + 35254, + 35255, + 35256, + 35257, + 35258, + 35259, + 35260, + 35261, + 35262, + 35263, + 35264, + 35265, + 35266, + 35267, + 35268, + 35269, + 35270, + 35271, + 35272, + 35273, + 35274, + 35275, + 35276, + 35277, + 35278, + 35279, + 35280, + 35281, + 35282, + 35283, + 35284, + 35285, + 35286, + 35287, + 35288, + 35289, + 35290, + 35291, + 35292, + 35293, + 35294, + 35295, + 35296, + 35297, + 35298, + 35299, + 35300, + 35301, + 35302, + 35303, + 35304, + 35305, + 35306, + 35307, + 35308, + 35309, + 35310, + 35311, + 35312, + 35313, + 35314, + 35315, + 35316, + 35317, + 35318, + 35319, + 35320, + 35321, + 35322, + 35323, + 35324, + 35325, + 35326, + 35327, + 35328, + 35329, + 35330, + 35331, + 35332, + 35333, + 35334, + 35335, + 35336, + 35337, + 35338, + 35339, + 35340, + 35341, + 35342, + 35343, + 35344, + 35345, + 35346, + 35347, + 35348, + 35349, + 35350, + 35351, + 35352, + 35353, + 35354, + 35355, + 35356, + 35357, + 35358, + 35359, + 35360, + 35361, + 35362, + 35363, + 35364, + 35365, + 35366, + 35367, + 35368, + 35369, + 35370, + 35371, + 35372, + 35373, + 35374, + 35375, + 35376, + 35377, + 35378, + 35379, + 35380, + 35381, + 35382, + 35383, + 35384, + 35385, + 35386, + 35387, + 35388, + 35389, + 35390, + 35391, + 35392, + 35393, + 35394, + 35395, + 35396, + 35397, + 35398, + 35399, + 35400, + 35401, + 35402, + 35403, + 35404, + 35405, + 35406, + 35407, + 35408, + 35409, + 35410, + 35411, + 35412, + 35413, + 35414, + 35415, + 35416, + 35417, + 35418, + 35419, + 35420, + 35421, + 35422, + 35423, + 35424, + 35425, + 35426, + 35427, + 35428, + 35429, + 35430, + 35431, + 35432, + 35433, + 35434, + 35435, + 35436, + 35437, + 35438, + 35439, + 35440, + 35441, + 35442, + 35443, + 35444, + 35445, + 35446, + 35447, + 35448, + 35449, + 35450, + 35451, + 35452, + 35453, + 35454, + 35455, + 35456, + 35457, + 35458, + 35459, + 35460, + 35461, + 35462, + 35463, + 35464, + 35465, + 35466, + 35467, + 35468, + 35469, + 35470, + 35471, + 35472, + 35473, + 35474, + 35475, + 35476, + 35477, + 35478, + 35479, + 35480, + 35481, + 35482, + 35483, + 35484, + 35485, + 35486, + 35487, + 35488, + 35489, + 35490, + 35491, + 35492, + 35493, + 35494, + 35495, + 35496, + 35497, + 35498, + 35499, + 35500, + 35501, + 35502, + 35503, + 35504, + 35505, + 35506, + 35507, + 35508, + 35509, + 35510, + 35511, + 35512, + 35513, + 35514, + 35515, + 35516, + 35517, + 35518, + 35519, + 35520, + 35521, + 35522, + 35523, + 35524, + 35525, + 35526, + 35527, + 35528, + 35529, + 35530, + 35531, + 35532, + 35533, + 35534, + 35535, + 35536, + 35537, + 35538, + 35539, + 35540, + 35541, + 35542, + 35543, + 35544, + 35545, + 35546, + 35547, + 35548, + 35549, + 35550, + 35551, + 35552, + 35553, + 35554, + 35555, + 35556, + 35557, + 35558, + 35559, + 35560, + 35561, + 35562, + 35563, + 35564, + 35565, + 35566, + 35567, + 35568, + 35569, + 35570, + 35571, + 35572, + 35573, + 35574, + 35575, + 35576, + 35577, + 35578, + 35579, + 35580, + 35581, + 35582, + 35583, + 35584, + 35585, + 35586, + 35587, + 35588, + 35589, + 35590, + 35591, + 35592, + 35593, + 35594, + 35595, + 35596, + 35597, + 35598, + 35599, + 35600, + 35601, + 35602, + 35603, + 35604, + 35605, + 35606, + 35607, + 35608, + 35609, + 35610, + 35611, + 35612, + 35613, + 35614, + 35615, + 35616, + 35617, + 35618, + 35619, + 35620, + 35621, + 35622, + 35623, + 35624, + 35625, + 35626, + 35627, + 35628, + 35629, + 35630, + 35631, + 35632, + 35633, + 35634, + 35635, + 35636, + 35637, + 35638, + 35639, + 35640, + 35641, + 35642, + 35643, + 35644, + 35645, + 35646, + 35647, + 35648, + 35649, + 35650, + 35651, + 35652, + 35653, + 35654, + 35655, + 35656, + 35657, + 35658, + 35659, + 35660, + 35661, + 35662, + 35663, + 35664, + 35665, + 35666, + 35667, + 35668, + 35669, + 35670, + 35671, + 35672, + 35673, + 35674, + 35675, + 35676, + 35677, + 35678, + 35679, + 35680, + 35681, + 35682, + 35683, + 35684, + 35685, + 35686, + 35687, + 35688, + 35689, + 35690, + 35691, + 35692, + 35693, + 35694, + 35695, + 35696, + 35697, + 35698, + 35699, + 35700, + 35701, + 35702, + 35703, + 35704, + 35705, + 35706, + 35707, + 35708, + 35709, + 35710, + 35711, + 35712, + 35713, + 35714, + 35715, + 35716, + 35717, + 35718, + 35719, + 35720, + 35721, + 35722, + 35723, + 35724, + 35725, + 35726, + 35727, + 35728, + 35729, + 35730, + 35731, + 35732, + 35733, + 35734, + 35735, + 35736, + 35737, + 35738, + 35739, + 35740, + 35741, + 35742, + 35743, + 35744, + 35745, + 35746, + 35747, + 35748, + 35749, + 35750, + 35751, + 35752, + 35753, + 35754, + 35755, + 35756, + 35757, + 35758, + 35759, + 35760, + 35761, + 35762, + 35763, + 35764, + 35765, + 35766, + 35767, + 35768, + 35769, + 35770, + 35771, + 35772, + 35773, + 35774, + 35775, + 35776, + 35777, + 35778, + 35779, + 35780, + 35781, + 35782, + 35783, + 35784, + 35785, + 35786, + 35787, + 35788, + 35789, + 35790, + 35791, + 35792, + 35793, + 35794, + 35795, + 35796, + 35797, + 35798, + 35799, + 35800, + 35801, + 35802, + 35803, + 35804, + 35805, + 35806, + 35807, + 35808, + 35809, + 35810, + 35811, + 35812, + 35813, + 35814, + 35815, + 35816, + 35817, + 35818, + 35819, + 35820, + 35821, + 35822, + 35823, + 35824, + 35825, + 35826, + 35827, + 35828, + 35829, + 35830, + 35831, + 35832, + 35833, + 35834, + 35835, + 35836, + 35837, + 35838, + 35839, + 35840, + 35841, + 35842, + 35843, + 35844, + 35845, + 35846, + 35847, + 35848, + 35849, + 35850, + 35851, + 35852, + 35853, + 35854, + 35855, + 35856, + 35857, + 35858, + 35859, + 35860, + 35861, + 35862, + 35863, + 35864, + 35865, + 35866, + 35867, + 35868, + 35869, + 35870, + 35871, + 35872, + 35873, + 35874, + 35875, + 35876, + 35877, + 35878, + 35879, + 35880, + 35881, + 35882, + 35883, + 35884, + 35885, + 35886, + 35887, + 35888, + 35889, + 35890, + 35891, + 35892, + 35893, + 35894, + 35895, + 35896, + 35897, + 35898, + 35899, + 35900, + 35901, + 35902, + 35903, + 35904, + 35905, + 35906, + 35907, + 35908, + 35909, + 35910, + 35911, + 35912, + 35913, + 35914, + 35915, + 35916, + 35917, + 35918, + 35919, + 35920, + 35921, + 35922, + 35923, + 35924, + 35925, + 35926, + 35927, + 35928, + 35929, + 35930, + 35931, + 35932, + 35933, + 35934, + 35935, + 35936, + 35937, + 35938, + 35939, + 35940, + 35941, + 35942, + 35943, + 35944, + 35945, + 35946, + 35947, + 35948, + 35949, + 35950, + 35951, + 35952, + 35953, + 35954, + 35955, + 35956, + 35957, + 35958, + 35959, + 35960, + 35961, + 35962, + 35963, + 35964, + 35965, + 35966, + 35967, + 35968, + 35969, + 35970, + 35971, + 35972, + 35973, + 35974, + 35975, + 35976, + 35977, + 35978, + 35979, + 35980, + 35981, + 35982, + 35983, + 35984, + 35985, + 35986, + 35987, + 35988, + 35989, + 35990, + 35991, + 35992, + 35993, + 35994, + 35995, + 35996, + 35997, + 35998, + 35999, + 36000, + 36001, + 36002, + 36003, + 36004, + 36005, + 36006, + 36007, + 36008, + 36009, + 36010, + 36011, + 36012, + 36013, + 36014, + 36015, + 36016, + 36017, + 36018, + 36019, + 36020, + 36021, + 36022, + 36023, + 36024, + 36025, + 36026, + 36027, + 36028, + 36029, + 36030, + 36031, + 36032, + 36033, + 36034, + 36035, + 36036, + 36037, + 36038, + 36039, + 36040, + 36041, + 36042, + 36043, + 36044, + 36045, + 36046, + 36047, + 36048, + 36049, + 36050, + 36051, + 36052, + 36053, + 36054, + 36055, + 36056, + 36057, + 36058, + 36059, + 36060, + 36061, + 36062, + 36063, + 36064, + 36065, + 36066, + 36067, + 36068, + 36069, + 36070, + 36071, + 36072, + 36073, + 36074, + 36075, + 36076, + 36077, + 36078, + 36079, + 36080, + 36081, + 36082, + 36083, + 36084, + 36085, + 36086, + 36087, + 36088, + 36089, + 36090, + 36091, + 36092, + 36093, + 36094, + 36095, + 36096, + 36097, + 36098, + 36099, + 36100, + 36101, + 36102, + 36103, + 36104, + 36105, + 36106, + 36107, + 36108, + 36109, + 36110, + 36111, + 36112, + 36113, + 36114, + 36115, + 36116, + 36117, + 36118, + 36119, + 36120, + 36121, + 36122, + 36123, + 36124, + 36125, + 36126, + 36127, + 36128, + 36129, + 36130, + 36131, + 36132, + 36133, + 36134, + 36135, + 36136, + 36137, + 36138, + 36139, + 36140, + 36141, + 36142, + 36143, + 36144, + 36145, + 36146, + 36147, + 36148, + 36149, + 36150, + 36151, + 36152, + 36153, + 36154, + 36155, + 36156, + 36157, + 36158, + 36159, + 36160, + 36161, + 36162, + 36163, + 36164, + 36165, + 36166, + 36167, + 36168, + 36169, + 36170, + 36171, + 36172, + 36173, + 36174, + 36175, + 36176, + 36177, + 36178, + 36179, + 36180, + 36181, + 36182, + 36183, + 36184, + 36185, + 36186, + 36187, + 36188, + 36189, + 36190, + 36191, + 36192, + 36193, + 36194, + 36195, + 36196, + 36197, + 36198, + 36199, + 36200, + 36201, + 36202, + 36203, + 36204, + 36205, + 36206, + 36207, + 36208, + 36209, + 36210, + 36211, + 36212, + 36213, + 36214, + 36215, + 36216, + 36217, + 36218, + 36219, + 36220, + 36221, + 36222, + 36223, + 36224, + 36225, + 36226, + 36227, + 36228, + 36229, + 36230, + 36231, + 36232, + 36233, + 36234, + 36235, + 36236, + 36237, + 36238, + 36239, + 36240, + 36241, + 36242, + 36243, + 36244, + 36245, + 36246, + 36247, + 36248, + 36249, + 36250, + 36251, + 36252, + 36253, + 36254, + 36255, + 36256, + 36257, + 36258, + 36259, + 36260, + 36261, + 36262, + 36263, + 36264, + 36265, + 36266, + 36267, + 36268, + 36269, + 36270, + 36271, + 36272, + 36273, + 36274, + 36275, + 36276, + 36277, + 36278, + 36279, + 36280, + 36281, + 36282, + 36283, + 36284, + 36285, + 36286, + 36287, + 36288, + 36289, + 36290, + 36291, + 36292, + 36293, + 36294, + 36295, + 36296, + 36297, + 36298, + 36299, + 36300, + 36301, + 36302, + 36303, + 36304, + 36305, + 36306, + 36307, + 36308, + 36309, + 36310, + 36311, + 36312, + 36313, + 36314, + 36315, + 36316, + 36317, + 36318, + 36319, + 36320, + 36321, + 36322, + 36323, + 36324, + 36325, + 36326, + 36327, + 36328, + 36329, + 36330, + 36331, + 36332, + 36333, + 36334, + 36335, + 36336, + 36337, + 36338, + 36339, + 36340, + 36341, + 36342, + 36343, + 36344, + 36345, + 36346, + 36347, + 36348, + 36349, + 36350, + 36351, + 36352, + 36353, + 36354, + 36355, + 36356, + 36357, + 36358, + 36359, + 36360, + 36361, + 36362, + 36363, + 36364, + 36365, + 36366, + 36367, + 36368, + 36369, + 36370, + 36371, + 36372, + 36373, + 36374, + 36375, + 36376, + 36377, + 36378, + 36379, + 36380, + 36381, + 36382, + 36383, + 36384, + 36385, + 36386, + 36387, + 36388, + 36389, + 36390, + 36391, + 36392, + 36393, + 36394, + 36395, + 36396, + 36397, + 36398, + 36399, + 36400, + 36401, + 36402, + 36403, + 36404, + 36405, + 36406, + 36407, + 36408, + 36409, + 36410, + 36411, + 36412, + 36413, + 36414, + 36415, + 36416, + 36417, + 36418, + 36419, + 36420, + 36421, + 36422, + 36423, + 36424, + 36425, + 36426, + 36427, + 36428, + 36429, + 36430, + 36431, + 36432, + 36433, + 36434, + 36435, + 36436, + 36437, + 36438, + 36439, + 36440, + 36441, + 36442, + 36443, + 36444, + 36445, + 36446, + 36447, + 36448, + 36449, + 36450, + 36451, + 36452, + 36453, + 36454, + 36455, + 36456, + 36457, + 36458, + 36459, + 36460, + 36461, + 36462, + 36463, + 36464, + 36465, + 36466, + 36467, + 36468, + 36469, + 36470, + 36471, + 36472, + 36473, + 36474, + 36475, + 36476, + 36477, + 36478, + 36479, + 36480, + 36481, + 36482, + 36483, + 36484, + 36485, + 36486, + 36487, + 36488, + 36489, + 36490, + 36491, + 36492, + 36493, + 36494, + 36495, + 36496, + 36497, + 36498, + 36499, + 36500, + 36501, + 36502, + 36503, + 36504, + 36505, + 36506, + 36507, + 36508, + 36509, + 36510, + 36511, + 36512, + 36513, + 36514, + 36515, + 36516, + 36517, + 36518, + 36519, + 36520, + 36521, + 36522, + 36523, + 36524, + 36525, + 36526, + 36527, + 36528, + 36529, + 36530, + 36531, + 36532, + 36533, + 36534, + 36535, + 36536, + 36537, + 36538, + 36539, + 36540, + 36541, + 36542, + 36543, + 36544, + 36545, + 36546, + 36547, + 36548, + 36549, + 36550, + 36551, + 36552, + 36553, + 36554, + 36555, + 36556, + 36557, + 36558, + 36559, + 36560, + 36561, + 36562, + 36563, + 36564, + 36565, + 36566, + 36567, + 36568, + 36569, + 36570, + 36571, + 36572, + 36573, + 36574, + 36575, + 36576, + 36577, + 36578, + 36579, + 36580, + 36581, + 36582, + 36583, + 36584, + 36585, + 36586, + 36587, + 36588, + 36589, + 36590, + 36591, + 36592, + 36593, + 36594, + 36595, + 36596, + 36597, + 36598, + 36599, + 36600, + 36601, + 36602, + 36603, + 36604, + 36605, + 36606, + 36607, + 36608, + 36609, + 36610, + 36611, + 36612, + 36613, + 36614, + 36615, + 36616, + 36617, + 36618, + 36619, + 36620, + 36621, + 36622, + 36623, + 36624, + 36625, + 36626, + 36627, + 36628, + 36629, + 36630, + 36631, + 36632, + 36633, + 36634, + 36635, + 36636, + 36637, + 36638, + 36639, + 36640, + 36641, + 36642, + 36643, + 36644, + 36645, + 36646, + 36647, + 36648, + 36649, + 36650, + 36651, + 36652, + 36653, + 36654, + 36655, + 36656, + 36657, + 36658, + 36659, + 36660, + 36661, + 36662, + 36663, + 36664, + 36665, + 36666, + 36667, + 36668, + 36669, + 36670, + 36671, + 36672, + 36673, + 36674, + 36675, + 36676, + 36677, + 36678, + 36679, + 36680, + 36681, + 36682, + 36683, + 36684, + 36685, + 36686, + 36687, + 36688, + 36689, + 36690, + 36691, + 36692, + 36693, + 36694, + 36695, + 36696, + 36697, + 36698, + 36699, + 36700, + 36701, + 36702, + 36703, + 36704, + 36705, + 36706, + 36707, + 36708, + 36709, + 36710, + 36711, + 36712, + 36713, + 36714, + 36715, + 36716, + 36717, + 36718, + 36719, + 36720, + 36721, + 36722, + 36723, + 36724, + 36725, + 36726, + 36727, + 36728, + 36729, + 36730, + 36731, + 36732, + 36733, + 36734, + 36735, + 36736, + 36737, + 36738, + 36739, + 36740, + 36741, + 36742, + 36743, + 36744, + 36745, + 36746, + 36747, + 36748, + 36749, + 36750, + 36751, + 36752, + 36753, + 36754, + 36755, + 36756, + 36757, + 36758, + 36759, + 36760, + 36761, + 36762, + 36763, + 36764, + 36765, + 36766, + 36767, + 36768, + 36769, + 36770, + 36771, + 36772, + 36773, + 36774, + 36775, + 36776, + 36777, + 36778, + 36779, + 36780, + 36781, + 36782, + 36783, + 36784, + 36785, + 36786, + 36787, + 36788, + 36789, + 36790, + 36791, + 36792, + 36793, + 36794, + 36795, + 36796, + 36797, + 36798, + 36799, + 36800, + 36801, + 36802, + 36803, + 36804, + 36805, + 36806, + 36807, + 36808, + 36809, + 36810, + 36811, + 36812, + 36813, + 36814, + 36815, + 36816, + 36817, + 36818, + 36819, + 36820, + 36821, + 36822, + 36823, + 36824, + 36825, + 36826, + 36827, + 36828, + 36829, + 36830, + 36831, + 36832, + 36833, + 36834, + 36835, + 36836, + 36837, + 36838, + 36839, + 36840, + 36841, + 36842, + 36843, + 36844, + 36845, + 36846, + 36847, + 36848, + 36849, + 36850, + 36851, + 36852, + 36853, + 36854, + 36855, + 36856, + 36857, + 36858, + 36859, + 36860, + 36861, + 36862, + 36863, + 36864, + 36865, + 36866, + 36867, + 36868, + 36869, + 36870, + 36871, + 36872, + 36873, + 36874, + 36875, + 36876, + 36877, + 36878, + 36879, + 36880, + 36881, + 36882, + 36883, + 36884, + 36885, + 36886, + 36887, + 36888, + 36889, + 36890, + 36891, + 36892, + 36893, + 36894, + 36895, + 36896, + 36897, + 36898, + 36899, + 36900, + 36901, + 36902, + 36903, + 36904, + 36905, + 36906, + 36907, + 36908, + 36909, + 36910, + 36911, + 36912, + 36913, + 36914, + 36915, + 36916, + 36917, + 36918, + 36919, + 36920, + 36921, + 36922, + 36923, + 36924, + 36925, + 36926, + 36927, + 36928, + 36929, + 36930, + 36931, + 36932, + 36933, + 36934, + 36935, + 36936, + 36937, + 36938, + 36939, + 36940, + 36941, + 36942, + 36943, + 36944, + 36945, + 36946, + 36947, + 36948, + 36949, + 36950, + 36951, + 36952, + 36953, + 36954, + 36955, + 36956, + 36957, + 36958, + 36959, + 36960, + 36961, + 36962, + 36963, + 36964, + 36965, + 36966, + 36967, + 36968, + 36969, + 36970, + 36971, + 36972, + 36973, + 36974, + 36975, + 36976, + 36977, + 36978, + 36979, + 36980, + 36981, + 36982, + 36983, + 36984, + 36985, + 36986, + 36987, + 36988, + 36989, + 36990, + 36991, + 36992, + 36993, + 36994, + 36995, + 36996, + 36997, + 36998, + 36999, + 37000, + 37001, + 37002, + 37003, + 37004, + 37005, + 37006, + 37007, + 37008, + 37009, + 37010, + 37011, + 37012, + 37013, + 37014, + 37015, + 37016, + 37017, + 37018, + 37019, + 37020, + 37021, + 37022, + 37023, + 37024, + 37025, + 37026, + 37027, + 37028, + 37029, + 37030, + 37031, + 37032, + 37033, + 37034, + 37035, + 37036, + 37037, + 37038, + 37039, + 37040, + 37041, + 37042, + 37043, + 37044, + 37045, + 37046, + 37047, + 37048, + 37049, + 37050, + 37051, + 37052, + 37053, + 37054, + 37055, + 37056, + 37057, + 37058, + 37059, + 37060, + 37061, + 37062, + 37063, + 37064, + 37065, + 37066, + 37067, + 37068, + 37069, + 37070, + 37071, + 37072, + 37073, + 37074, + 37075, + 37076, + 37077, + 37078, + 37079, + 37080, + 37081, + 37082, + 37083, + 37084, + 37085, + 37086, + 37087, + 37088, + 37089, + 37090, + 37091, + 37092, + 37093, + 37094, + 37095, + 37096, + 37097, + 37098, + 37099, + 37100, + 37101, + 37102, + 37103, + 37104, + 37105, + 37106, + 37107, + 37108, + 37109, + 37110, + 37111, + 37112, + 37113, + 37114, + 37115, + 37116, + 37117, + 37118, + 37119, + 37120, + 37121, + 37122, + 37123, + 37124, + 37125, + 37126, + 37127, + 37128, + 37129, + 37130, + 37131, + 37132, + 37133, + 37134, + 37135, + 37136, + 37137, + 37138, + 37139, + 37140, + 37141, + 37142, + 37143, + 37144, + 37145, + 37146, + 37147, + 37148, + 37149, + 37150, + 37151, + 37152, + 37153, + 37154, + 37155, + 37156, + 37157, + 37158, + 37159, + 37160, + 37161, + 37162, + 37163, + 37164, + 37165, + 37166, + 37167, + 37168, + 37169, + 37170, + 37171, + 37172, + 37173, + 37174, + 37175, + 37176, + 37177, + 37178, + 37179, + 37180, + 37181, + 37182, + 37183, + 37184, + 37185, + 37186, + 37187, + 37188, + 37189, + 37190, + 37191, + 37192, + 37193, + 37194, + 37195, + 37196, + 37197, + 37198, + 37199, + 37200, + 37201, + 37202, + 37203, + 37204, + 37205, + 37206, + 37207, + 37208, + 37209, + 37210, + 37211, + 37212, + 37213, + 37214, + 37215, + 37216, + 37217, + 37218, + 37219, + 37220, + 37221, + 37222, + 37223, + 37224, + 37225, + 37226, + 37227, + 37228, + 37229, + 37230, + 37231, + 37232, + 37233, + 37234, + 37235, + 37236, + 37237, + 37238, + 37239, + 37240, + 37241, + 37242, + 37243, + 37244, + 37245, + 37246, + 37247, + 37248, + 37249, + 37250, + 37251, + 37252, + 37253, + 37254, + 37255, + 37256, + 37257, + 37258, + 37259, + 37260, + 37261, + 37262, + 37263, + 37264, + 37265, + 37266, + 37267, + 37268, + 37269, + 37270, + 37271, + 37272, + 37273, + 37274, + 37275, + 37276, + 37277, + 37278, + 37279, + 37280, + 37281, + 37282, + 37283, + 37284, + 37285, + 37286, + 37287, + 37288, + 37289, + 37290, + 37291, + 37292, + 37293, + 37294, + 37295, + 37296, + 37297, + 37298, + 37299, + 37300, + 37301, + 37302, + 37303, + 37304, + 37305, + 37306, + 37307, + 37308, + 37309, + 37310, + 37311, + 37312, + 37313, + 37314, + 37315, + 37316, + 37317, + 37318, + 37319, + 37320, + 37321, + 37322, + 37323, + 37324, + 37325, + 37326, + 37327, + 37328, + 37329, + 37330, + 37331, + 37332, + 37333, + 37334, + 37335, + 37336, + 37337, + 37338, + 37339, + 37340, + 37341, + 37342, + 37343, + 37344, + 37345, + 37346, + 37347, + 37348, + 37349, + 37350, + 37351, + 37352, + 37353, + 37354, + 37355, + 37356, + 37357, + 37358, + 37359, + 37360, + 37361, + 37362, + 37363, + 37364, + 37365, + 37366, + 37367, + 37368, + 37369, + 37370, + 37371, + 37372, + 37373, + 37374, + 37375, + 37376, + 37377, + 37378, + 37379, + 37380, + 37381, + 37382, + 37383, + 37384, + 37385, + 37386, + 37387, + 37388, + 37389, + 37390, + 37391, + 37392, + 37393, + 37394, + 37395, + 37396, + 37397, + 37398, + 37399, + 37400, + 37401, + 37402, + 37403, + 37404, + 37405, + 37406, + 37407, + 37408, + 37409, + 37410, + 37411, + 37412, + 37413, + 37414, + 37415, + 37416, + 37417, + 37418, + 37419, + 37420, + 37421, + 37422, + 37423, + 37424, + 37425, + 37426, + 37427, + 37428, + 37429, + 37430, + 37431, + 37432, + 37433, + 37434, + 37435, + 37436, + 37437, + 37438, + 37439, + 37440, + 37441, + 37442, + 37443, + 37444, + 37445, + 37446, + 37447, + 37448, + 37449, + 37450, + 37451, + 37452, + 37453, + 37454, + 37455, + 37456, + 37457, + 37458, + 37459, + 37460, + 37461, + 37462, + 37463, + 37464, + 37465, + 37466, + 37467, + 37468, + 37469, + 37470, + 37471, + 37472, + 37473, + 37474, + 37475, + 37476, + 37477, + 37478, + 37479, + 37480, + 37481, + 37482, + 37483, + 37484, + 37485, + 37486, + 37487, + 37488, + 37489, + 37490, + 37491, + 37492, + 37493, + 37494, + 37495, + 37496, + 37497, + 37498, + 37499, + 37500, + 37501, + 37502, + 37503, + 37504, + 37505, + 37506, + 37507, + 37508, + 37509, + 37510, + 37511, + 37512, + 37513, + 37514, + 37515, + 37516, + 37517, + 37518, + 37519, + 37520, + 37521, + 37522, + 37523, + 37524, + 37525, + 37526, + 37527, + 37528, + 37529, + 37530, + 37531, + 37532, + 37533, + 37534, + 37535, + 37536, + 37537, + 37538, + 37539, + 37540, + 37541, + 37542, + 37543, + 37544, + 37545, + 37546, + 37547, + 37548, + 37549, + 37550, + 37551, + 37552, + 37553, + 37554, + 37555, + 37556, + 37557, + 37558, + 37559, + 37560, + 37561, + 37562, + 37563, + 37564, + 37565, + 37566, + 37567, + 37568, + 37569, + 37570, + 37571, + 37572, + 37573, + 37574, + 37575, + 37576, + 37577, + 37578, + 37579, + 37580, + 37581, + 37582, + 37583, + 37584, + 37585, + 37586, + 37587, + 37588, + 37589, + 37590, + 37591, + 37592, + 37593, + 37594, + 37595, + 37596, + 37597, + 37598, + 37599, + 37600, + 37601, + 37602, + 37603, + 37604, + 37605, + 37606, + 37607, + 37608, + 37609, + 37610, + 37611, + 37612, + 37613, + 37614, + 37615, + 37616, + 37617, + 37618, + 37619, + 37620, + 37621, + 37622, + 37623, + 37624, + 37625, + 37626, + 37627, + 37628, + 37629, + 37630, + 37631, + 37632, + 37633, + 37634, + 37635, + 37636, + 37637, + 37638, + 37639, + 37640, + 37641, + 37642, + 37643, + 37644, + 37645, + 37646, + 37647, + 37648, + 37649, + 37650, + 37651, + 37652, + 37653, + 37654, + 37655, + 37656, + 37657, + 37658, + 37659, + 37660, + 37661, + 37662, + 37663, + 37664, + 37665, + 37666, + 37667, + 37668, + 37669, + 37670, + 37671, + 37672, + 37673, + 37674, + 37675, + 37676, + 37677, + 37678, + 37679, + 37680, + 37681, + 37682, + 37683, + 37684, + 37685, + 37686, + 37687, + 37688, + 37689, + 37690, + 37691, + 37692, + 37693, + 37694, + 37695, + 37696, + 37697, + 37698, + 37699, + 37700, + 37701, + 37702, + 37703, + 37704, + 37705, + 37706, + 37707, + 37708, + 37709, + 37710, + 37711, + 37712, + 37713, + 37714, + 37715, + 37716, + 37717, + 37718, + 37719, + 37720, + 37721, + 37722, + 37723, + 37724, + 37725, + 37726, + 37727, + 37728, + 37729, + 37730, + 37731, + 37732, + 37733, + 37734, + 37735, + 37736, + 37737, + 37738, + 37739, + 37740, + 37741, + 37742, + 37743, + 37744, + 37745, + 37746, + 37747, + 37748, + 37749, + 37750, + 37751, + 37752, + 37753, + 37754, + 37755, + 37756, + 37757, + 37758, + 37759, + 37760, + 37761, + 37762, + 37763, + 37764, + 37765, + 37766, + 37767, + 37768, + 37769, + 37770, + 37771, + 37772, + 37773, + 37774, + 37775, + 37776, + 37777, + 37778, + 37779, + 37780, + 37781, + 37782, + 37783, + 37784, + 37785, + 37786, + 37787, + 37788, + 37789, + 37790, + 37791, + 37792, + 37793, + 37794, + 37795, + 37796, + 37797, + 37798, + 37799, + 37800, + 37801, + 37802, + 37803, + 37804, + 37805, + 37806, + 37807, + 37808, + 37809, + 37810, + 37811, + 37812, + 37813, + 37814, + 37815, + 37816, + 37817, + 37818, + 37819, + 37820, + 37821, + 37822, + 37823, + 37824, + 37825, + 37826, + 37827, + 37828, + 37829, + 37830, + 37831, + 37832, + 37833, + 37834, + 37835, + 37836, + 37837, + 37838, + 37839, + 37840, + 37841, + 37842, + 37843, + 37844, + 37845, + 37846, + 37847, + 37848, + 37849, + 37850, + 37851, + 37852, + 37853, + 37854, + 37855, + 37856, + 37857, + 37858, + 37859, + 37860, + 37861, + 37862, + 37863, + 37864, + 37865, + 37866, + 37867, + 37868, + 37869, + 37870, + 37871, + 37872, + 37873, + 37874, + 37875, + 37876, + 37877, + 37878, + 37879, + 37880, + 37881, + 37882, + 37883, + 37884, + 37885, + 37886, + 37887, + 37888, + 37889, + 37890, + 37891, + 37892, + 37893, + 37894, + 37895, + 37896, + 37897, + 37898, + 37899, + 37900, + 37901, + 37902, + 37903, + 37904, + 37905, + 37906, + 37907, + 37908, + 37909, + 37910, + 37911, + 37912, + 37913, + 37914, + 37915, + 37916, + 37917, + 37918, + 37919, + 37920, + 37921, + 37922, + 37923, + 37924, + 37925, + 37926, + 37927, + 37928, + 37929, + 37930, + 37931, + 37932, + 37933, + 37934, + 37935, + 37936, + 37937, + 37938, + 37939, + 37940, + 37941, + 37942, + 37943, + 37944, + 37945, + 37946, + 37947, + 37948, + 37949, + 37950, + 37951, + 37952, + 37953, + 37954, + 37955, + 37956, + 37957, + 37958, + 37959, + 37960, + 37961, + 37962, + 37963, + 37964, + 37965, + 37966, + 37967, + 37968, + 37969, + 37970, + 37971, + 37972, + 37973, + 37974, + 37975, + 37976, + 37977, + 37978, + 37979, + 37980, + 37981, + 37982, + 37983, + 37984, + 37985, + 37986, + 37987, + 37988, + 37989, + 37990, + 37991, + 37992, + 37993, + 37994, + 37995, + 37996, + 37997, + 37998, + 37999, + 38000, + 38001, + 38002, + 38003, + 38004, + 38005, + 38006, + 38007, + 38008, + 38009, + 38010, + 38011, + 38012, + 38013, + 38014, + 38015, + 38016, + 38017, + 38018, + 38019, + 38020, + 38021, + 38022, + 38023, + 38024, + 38025, + 38026, + 38027, + 38028, + 38029, + 38030, + 38031, + 38032, + 38033, + 38034, + 38035, + 38036, + 38037, + 38038, + 38039, + 38040, + 38041, + 38042, + 38043, + 38044, + 38045, + 38046, + 38047, + 38048, + 38049, + 38050, + 38051, + 38052, + 38053, + 38054, + 38055, + 38056, + 38057, + 38058, + 38059, + 38060, + 38061, + 38062, + 38063, + 38064, + 38065, + 38066, + 38067, + 38068, + 38069, + 38070, + 38071, + 38072, + 38073, + 38074, + 38075, + 38076, + 38077, + 38078, + 38079, + 38080, + 38081, + 38082, + 38083, + 38084, + 38085, + 38086, + 38087, + 38088, + 38089, + 38090, + 38091, + 38092, + 38093, + 38094, + 38095, + 38096, + 38097, + 38098, + 38099, + 38100, + 38101, + 38102, + 38103, + 38104, + 38105, + 38106, + 38107, + 38108, + 38109, + 38110, + 38111, + 38112, + 38113, + 38114, + 38115, + 38116, + 38117, + 38118, + 38119, + 38120, + 38121, + 38122, + 38123, + 38124, + 38125, + 38126, + 38127, + 38128, + 38129, + 38130, + 38131, + 38132, + 38133, + 38134, + 38135, + 38136, + 38137, + 38138, + 38139, + 38140, + 38141, + 38142, + 38143, + 38144, + 38145, + 38146, + 38147, + 38148, + 38149, + 38150, + 38151, + 38152, + 38153, + 38154, + 38155, + 38156, + 38157, + 38158, + 38159, + 38160, + 38161, + 38162, + 38163, + 38164, + 38165, + 38166, + 38167, + 38168, + 38169, + 38170, + 38171, + 38172, + 38173, + 38174, + 38175, + 38176, + 38177, + 38178, + 38179, + 38180, + 38181, + 38182, + 38183, + 38184, + 38185, + 38186, + 38187, + 38188, + 38189, + 38190, + 38191, + 38192, + 38193, + 38194, + 38195, + 38196, + 38197, + 38198, + 38199, + 38200, + 38201, + 38202, + 38203, + 38204, + 38205, + 38206, + 38207, + 38208, + 38209, + 38210, + 38211, + 38212, + 38213, + 38214, + 38215, + 38216, + 38217, + 38218, + 38219, + 38220, + 38221, + 38222, + 38223, + 38224, + 38225, + 38226, + 38227, + 38228, + 38229, + 38230, + 38231, + 38232, + 38233, + 38234, + 38235, + 38236, + 38237, + 38238, + 38239, + 38240, + 38241, + 38242, + 38243, + 38244, + 38245, + 38246, + 38247, + 38248, + 38249, + 38250, + 38251, + 38252, + 38253, + 38254, + 38255, + 38256, + 38257, + 38258, + 38259, + 38260, + 38261, + 38262, + 38263, + 38264, + 38265, + 38266, + 38267, + 38268, + 38269, + 38270, + 38271, + 38272, + 38273, + 38274, + 38275, + 38276, + 38277, + 38278, + 38279, + 38280, + 38281, + 38282, + 38283, + 38284, + 38285, + 38286, + 38287, + 38288, + 38289, + 38290, + 38291, + 38292, + 38293, + 38294, + 38295, + 38296, + 38297, + 38298, + 38299, + 38300, + 38301, + 38302, + 38303, + 38304, + 38305, + 38306, + 38307, + 38308, + 38309, + 38310, + 38311, + 38312, + 38313, + 38314, + 38315, + 38316, + 38317, + 38318, + 38319, + 38320, + 38321, + 38322, + 38323, + 38324, + 38325, + 38326, + 38327, + 38328, + 38329, + 38330, + 38331, + 38332, + 38333, + 38334, + 38335, + 38336, + 38337, + 38338, + 38339, + 38340, + 38341, + 38342, + 38343, + 38344, + 38345, + 38346, + 38347, + 38348, + 38349, + 38350, + 38351, + 38352, + 38353, + 38354, + 38355, + 38356, + 38357, + 38358, + 38359, + 38360, + 38361, + 38362, + 38363, + 38364, + 38365, + 38366, + 38367, + 38368, + 38369, + 38370, + 38371, + 38372, + 38373, + 38374, + 38375, + 38376, + 38377, + 38378, + 38379, + 38380, + 38381, + 38382, + 38383, + 38384, + 38385, + 38386, + 38387, + 38388, + 38389, + 38390, + 38391, + 38392, + 38393, + 38394, + 38395, + 38396, + 38397, + 38398, + 38399, + 38400, + 38401, + 38402, + 38403, + 38404, + 38405, + 38406, + 38407, + 38408, + 38409, + 38410, + 38411, + 38412, + 38413, + 38414, + 38415, + 38416, + 38417, + 38418, + 38419, + 38420, + 38421, + 38422, + 38423, + 38424, + 38425, + 38426, + 38427, + 38428, + 38429, + 38430, + 38431, + 38432, + 38433, + 38434, + 38435, + 38436, + 38437, + 38438, + 38439, + 38440, + 38441, + 38442, + 38443, + 38444, + 38445, + 38446, + 38447, + 38448, + 38449, + 38450, + 38451, + 38452, + 38453, + 38454, + 38455, + 38456, + 38457, + 38458, + 38459, + 38460, + 38461, + 38462, + 38463, + 38464, + 38465, + 38466, + 38467, + 38468, + 38469, + 38470, + 38471, + 38472, + 38473, + 38474, + 38475, + 38476, + 38477, + 38478, + 38479, + 38480, + 38481, + 38482, + 38483, + 38484, + 38485, + 38486, + 38487, + 38488, + 38489, + 38490, + 38491, + 38492, + 38493, + 38494, + 38495, + 38496, + 38497, + 38498, + 38499, + 38500, + 38501, + 38502, + 38503, + 38504, + 38505, + 38506, + 38507, + 38508, + 38509, + 38510, + 38511, + 38512, + 38513, + 38514, + 38515, + 38516, + 38517, + 38518, + 38519, + 38520, + 38521, + 38522, + 38523, + 38524, + 38525, + 38526, + 38527, + 38528, + 38529, + 38530, + 38531, + 38532, + 38533, + 38534, + 38535, + 38536, + 38537, + 38538, + 38539, + 38540, + 38541, + 38542, + 38543, + 38544, + 38545, + 38546, + 38547, + 38548, + 38549, + 38550, + 38551, + 38552, + 38553, + 38554, + 38555, + 38556, + 38557, + 38558, + 38559, + 38560, + 38561, + 38562, + 38563, + 38564, + 38565, + 38566, + 38567, + 38568, + 38569, + 38570, + 38571, + 38572, + 38573, + 38574, + 38575, + 38576, + 38577, + 38578, + 38579, + 38580, + 38581, + 38582, + 38583, + 38584, + 38585, + 38586, + 38587, + 38588, + 38589, + 38590, + 38591, + 38592, + 38593, + 38594, + 38595, + 38596, + 38597, + 38598, + 38599, + 38600, + 38601, + 38602, + 38603, + 38604, + 38605, + 38606, + 38607, + 38608, + 38609, + 38610, + 38611, + 38612, + 38613, + 38614, + 38615, + 38616, + 38617, + 38618, + 38619, + 38620, + 38621, + 38622, + 38623, + 38624, + 38625, + 38626, + 38627, + 38628, + 38629, + 38630, + 38631, + 38632, + 38633, + 38634, + 38635, + 38636, + 38637, + 38638, + 38639, + 38640, + 38641, + 38642, + 38643, + 38644, + 38645, + 38646, + 38647, + 38648, + 38649, + 38650, + 38651, + 38652, + 38653, + 38654, + 38655, + 38656, + 38657, + 38658, + 38659, + 38660, + 38661, + 38662, + 38663, + 38664, + 38665, + 38666, + 38667, + 38668, + 38669, + 38670, + 38671, + 38672, + 38673, + 38674, + 38675, + 38676, + 38677, + 38678, + 38679, + 38680, + 38681, + 38682, + 38683, + 38684, + 38685, + 38686, + 38687, + 38688, + 38689, + 38690, + 38691, + 38692, + 38693, + 38694, + 38695, + 38696, + 38697, + 38698, + 38699, + 38700, + 38701, + 38702, + 38703, + 38704, + 38705, + 38706, + 38707, + 38708, + 38709, + 38710, + 38711, + 38712, + 38713, + 38714, + 38715, + 38716, + 38717, + 38718, + 38719, + 38720, + 38721, + 38722, + 38723, + 38724, + 38725, + 38726, + 38727, + 38728, + 38729, + 38730, + 38731, + 38732, + 38733, + 38734, + 38735, + 38736, + 38737, + 38738, + 38739, + 38740, + 38741, + 38742, + 38743, + 38744, + 38745, + 38746, + 38747, + 38748, + 38749, + 38750, + 38751, + 38752, + 38753, + 38754, + 38755, + 38756, + 38757, + 38758, + 38759, + 38760, + 38761, + 38762, + 38763, + 38764, + 38765, + 38766, + 38767, + 38768, + 38769, + 38770, + 38771, + 38772, + 38773, + 38774, + 38775, + 38776, + 38777, + 38778, + 38779, + 38780, + 38781, + 38782, + 38783, + 38784, + 38785, + 38786, + 38787, + 38788, + 38789, + 38790, + 38791, + 38792, + 38793, + 38794, + 38795, + 38796, + 38797, + 38798, + 38799, + 38800, + 38801, + 38802, + 38803, + 38804, + 38805, + 38806, + 38807, + 38808, + 38809, + 38810, + 38811, + 38812, + 38813, + 38814, + 38815, + 38816, + 38817, + 38818, + 38819, + 38820, + 38821, + 38822, + 38823, + 38824, + 38825, + 38826, + 38827, + 38828, + 38829, + 38830, + 38831, + 38832, + 38833, + 38834, + 38835, + 38836, + 38837, + 38838, + 38839, + 38840, + 38841, + 38842, + 38843, + 38844, + 38845, + 38846, + 38847, + 38848, + 38849, + 38850, + 38851, + 38852, + 38853, + 38854, + 38855, + 38856, + 38857, + 38858, + 38859, + 38860, + 38861, + 38862, + 38863, + 38864, + 38865, + 38866, + 38867, + 38868, + 38869, + 38870, + 38871, + 38872, + 38873, + 38874, + 38875, + 38876, + 38877, + 38878, + 38879, + 38880, + 38881, + 38882, + 38883, + 38884, + 38885, + 38886, + 38887, + 38888, + 38889, + 38890, + 38891, + 38892, + 38893, + 38894, + 38895, + 38896, + 38897, + 38898, + 38899, + 38900, + 38901, + 38902, + 38903, + 38904, + 38905, + 38906, + 38907, + 38908, + 38909, + 38910, + 38911, + 38912, + 38913, + 38914, + 38915, + 38916, + 38917, + 38918, + 38919, + 38920, + 38921, + 38922, + 38923, + 38924, + 38925, + 38926, + 38927, + 38928, + 38929, + 38930, + 38931, + 38932, + 38933, + 38934, + 38935, + 38936, + 38937, + 38938, + 38939, + 38940, + 38941, + 38942, + 38943, + 38944, + 38945, + 38946, + 38947, + 38948, + 38949, + 38950, + 38951, + 38952, + 38953, + 38954, + 38955, + 38956, + 38957, + 38958, + 38959, + 38960, + 38961, + 38962, + 38963, + 38964, + 38965, + 38966, + 38967, + 38968, + 38969, + 38970, + 38971, + 38972, + 38973, + 38974, + 38975, + 38976, + 38977, + 38978, + 38979, + 38980, + 38981, + 38982, + 38983, + 38984, + 38985, + 38986, + 38987, + 38988, + 38989, + 38990, + 38991, + 38992, + 38993, + 38994, + 38995, + 38996, + 38997, + 38998, + 38999, + 39000, + 39001, + 39002, + 39003, + 39004, + 39005, + 39006, + 39007, + 39008, + 39009, + 39010, + 39011, + 39012, + 39013, + 39014, + 39015, + 39016, + 39017, + 39018, + 39019, + 39020, + 39021, + 39022, + 39023, + 39024, + 39025, + 39026, + 39027, + 39028, + 39029, + 39030, + 39031, + 39032, + 39033, + 39034, + 39035, + 39036, + 39037, + 39038, + 39039, + 39040, + 39041, + 39042, + 39043, + 39044, + 39045, + 39046, + 39047, + 39048, + 39049, + 39050, + 39051, + 39052, + 39053, + 39054, + 39055, + 39056, + 39057, + 39058, + 39059, + 39060, + 39061, + 39062, + 39063, + 39064, + 39065, + 39066, + 39067, + 39068, + 39069, + 39070, + 39071, + 39072, + 39073, + 39074, + 39075, + 39076, + 39077, + 39078, + 39079, + 39080, + 39081, + 39082, + 39083, + 39084, + 39085, + 39086, + 39087, + 39088, + 39089, + 39090, + 39091, + 39092, + 39093, + 39094, + 39095, + 39096, + 39097, + 39098, + 39099, + 39100, + 39101, + 39102, + 39103, + 39104, + 39105, + 39106, + 39107, + 39108, + 39109, + 39110, + 39111, + 39112, + 39113, + 39114, + 39115, + 39116, + 39117, + 39118, + 39119, + 39120, + 39121, + 39122, + 39123, + 39124, + 39125, + 39126, + 39127, + 39128, + 39129, + 39130, + 39131, + 39132, + 39133, + 39134, + 39135, + 39136, + 39137, + 39138, + 39139, + 39140, + 39141, + 39142, + 39143, + 39144, + 39145, + 39146, + 39147, + 39148, + 39149, + 39150, + 39151, + 39152, + 39153, + 39154, + 39155, + 39156, + 39157, + 39158, + 39159, + 39160, + 39161, + 39162, + 39163, + 39164, + 39165, + 39166, + 39167, + 39168, + 39169, + 39170, + 39171, + 39172, + 39173, + 39174, + 39175, + 39176, + 39177, + 39178, + 39179, + 39180, + 39181, + 39182, + 39183, + 39184, + 39185, + 39186, + 39187, + 39188, + 39189, + 39190, + 39191, + 39192, + 39193, + 39194, + 39195, + 39196, + 39197, + 39198, + 39199, + 39200, + 39201, + 39202, + 39203, + 39204, + 39205, + 39206, + 39207, + 39208, + 39209, + 39210, + 39211, + 39212, + 39213, + 39214, + 39215, + 39216, + 39217, + 39218, + 39219, + 39220, + 39221, + 39222, + 39223, + 39224, + 39225, + 39226, + 39227, + 39228, + 39229, + 39230, + 39231, + 39232, + 39233, + 39234, + 39235, + 39236, + 39237, + 39238, + 39239, + 39240, + 39241, + 39242, + 39243, + 39244, + 39245, + 39246, + 39247, + 39248, + 39249, + 39250, + 39251, + 39252, + 39253, + 39254, + 39255, + 39256, + 39257, + 39258, + 39259, + 39260, + 39261, + 39262, + 39263, + 39264, + 39265, + 39266, + 39267, + 39268, + 39269, + 39270, + 39271, + 39272, + 39273, + 39274, + 39275, + 39276, + 39277, + 39278, + 39279, + 39280, + 39281, + 39282, + 39283, + 39284, + 39285, + 39286, + 39287, + 39288, + 39289, + 39290, + 39291, + 39292, + 39293, + 39294, + 39295, + 39296, + 39297, + 39298, + 39299, + 39300, + 39301, + 39302, + 39303, + 39304, + 39305, + 39306, + 39307, + 39308, + 39309, + 39310, + 39311, + 39312, + 39313, + 39314, + 39315, + 39316, + 39317, + 39318, + 39319, + 39320, + 39321, + 39322, + 39323, + 39324, + 39325, + 39326, + 39327, + 39328, + 39329, + 39330, + 39331, + 39332, + 39333, + 39334, + 39335, + 39336, + 39337, + 39338, + 39339, + 39340, + 39341, + 39342, + 39343, + 39344, + 39345, + 39346, + 39347, + 39348, + 39349, + 39350, + 39351, + 39352, + 39353, + 39354, + 39355, + 39356, + 39357, + 39358, + 39359, + 39360, + 39361, + 39362, + 39363, + 39364, + 39365, + 39366, + 39367, + 39368, + 39369, + 39370, + 39371, + 39372, + 39373, + 39374, + 39375, + 39376, + 39377, + 39378, + 39379, + 39380, + 39381, + 39382, + 39383, + 39384, + 39385, + 39386, + 39387, + 39388, + 39389, + 39390, + 39391, + 39392, + 39393, + 39394, + 39395, + 39396, + 39397, + 39398, + 39399, + 39400, + 39401, + 39402, + 39403, + 39404, + 39405, + 39406, + 39407, + 39408, + 39409, + 39410, + 39411, + 39412, + 39413, + 39414, + 39415, + 39416, + 39417, + 39418, + 39419, + 39420, + 39421, + 39422, + 39423, + 39424, + 39425, + 39426, + 39427, + 39428, + 39429, + 39430, + 39431, + 39432, + 39433, + 39434, + 39435, + 39436, + 39437, + 39438, + 39439, + 39440, + 39441, + 39442, + 39443, + 39444, + 39445, + 39446, + 39447, + 39448, + 39449, + 39450, + 39451, + 39452, + 39453, + 39454, + 39455, + 39456, + 39457, + 39458, + 39459, + 39460, + 39461, + 39462, + 39463, + 39464, + 39465, + 39466, + 39467, + 39468, + 39469, + 39470, + 39471, + 39472, + 39473, + 39474, + 39475, + 39476, + 39477, + 39478, + 39479, + 39480, + 39481, + 39482, + 39483, + 39484, + 39485, + 39486, + 39487, + 39488, + 39489, + 39490, + 39491, + 39492, + 39493, + 39494, + 39495, + 39496, + 39497, + 39498, + 39499, + 39500, + 39501, + 39502, + 39503, + 39504, + 39505, + 39506, + 39507, + 39508, + 39509, + 39510, + 39511, + 39512, + 39513, + 39514, + 39515, + 39516, + 39517, + 39518, + 39519, + 39520, + 39521, + 39522, + 39523, + 39524, + 39525, + 39526, + 39527, + 39528, + 39529, + 39530, + 39531, + 39532, + 39533, + 39534, + 39535, + 39536, + 39537, + 39538, + 39539, + 39540, + 39541, + 39542, + 39543, + 39544, + 39545, + 39546, + 39547, + 39548, + 39549, + 39550, + 39551, + 39552, + 39553, + 39554, + 39555, + 39556, + 39557, + 39558, + 39559, + 39560, + 39561, + 39562, + 39563, + 39564, + 39565, + 39566, + 39567, + 39568, + 39569, + 39570, + 39571, + 39572, + 39573, + 39574, + 39575, + 39576, + 39577, + 39578, + 39579, + 39580, + 39581, + 39582, + 39583, + 39584, + 39585, + 39586, + 39587, + 39588, + 39589, + 39590, + 39591, + 39592, + 39593, + 39594, + 39595, + 39596, + 39597, + 39598, + 39599, + 39600, + 39601, + 39602, + 39603, + 39604, + 39605, + 39606, + 39607, + 39608, + 39609, + 39610, + 39611, + 39612, + 39613, + 39614, + 39615, + 39616, + 39617, + 39618, + 39619, + 39620, + 39621, + 39622, + 39623, + 39624, + 39625, + 39626, + 39627, + 39628, + 39629, + 39630, + 39631, + 39632, + 39633, + 39634, + 39635, + 39636, + 39637, + 39638, + 39639, + 39640, + 39641, + 39642, + 39643, + 39644, + 39645, + 39646, + 39647, + 39648, + 39649, + 39650, + 39651, + 39652, + 39653, + 39654, + 39655, + 39656, + 39657, + 39658, + 39659, + 39660, + 39661, + 39662, + 39663, + 39664, + 39665, + 39666, + 39667, + 39668, + 39669, + 39670, + 39671, + 39672, + 39673, + 39674, + 39675, + 39676, + 39677, + 39678, + 39679, + 39680, + 39681, + 39682, + 39683, + 39684, + 39685, + 39686, + 39687, + 39688, + 39689, + 39690, + 39691, + 39692, + 39693, + 39694, + 39695, + 39696, + 39697, + 39698, + 39699, + 39700, + 39701, + 39702, + 39703, + 39704, + 39705, + 39706, + 39707, + 39708, + 39709, + 39710, + 39711, + 39712, + 39713, + 39714, + 39715, + 39716, + 39717, + 39718, + 39719, + 39720, + 39721, + 39722, + 39723, + 39724, + 39725, + 39726, + 39727, + 39728, + 39729, + 39730, + 39731, + 39732, + 39733, + 39734, + 39735, + 39736, + 39737, + 39738, + 39739, + 39740, + 39741, + 39742, + 39743, + 39744, + 39745, + 39746, + 39747, + 39748, + 39749, + 39750, + 39751, + 39752, + 39753, + 39754, + 39755, + 39756, + 39757, + 39758, + 39759, + 39760, + 39761, + 39762, + 39763, + 39764, + 39765, + 39766, + 39767, + 39768, + 39769, + 39770, + 39771, + 39772, + 39773, + 39774, + 39775, + 39776, + 39777, + 39778, + 39779, + 39780, + 39781, + 39782, + 39783, + 39784, + 39785, + 39786, + 39787, + 39788, + 39789, + 39790, + 39791, + 39792, + 39793, + 39794, + 39795, + 39796, + 39797, + 39798, + 39799, + 39800, + 39801, + 39802, + 39803, + 39804, + 39805, + 39806, + 39807, + 39808, + 39809, + 39810, + 39811, + 39812, + 39813, + 39814, + 39815, + 39816, + 39817, + 39818, + 39819, + 39820, + 39821, + 39822, + 39823, + 39824, + 39825, + 39826, + 39827, + 39828, + 39829, + 39830, + 39831, + 39832, + 39833, + 39834, + 39835, + 39836, + 39837, + 39838, + 39839, + 39840, + 39841, + 39842, + 39843, + 39844, + 39845, + 39846, + 39847, + 39848, + 39849, + 39850, + 39851, + 39852, + 39853, + 39854, + 39855, + 39856, + 39857, + 39858, + 39859, + 39860, + 39861, + 39862, + 39863, + 39864, + 39865, + 39866, + 39867, + 39868, + 39869, + 39870, + 39871, + 39872, + 39873, + 39874, + 39875, + 39876, + 39877, + 39878, + 39879, + 39880, + 39881, + 39882, + 39883, + 39884, + 39885, + 39886, + 39887, + 39888, + 39889, + 39890, + 39891, + 39892, + 39893, + 39894, + 39895, + 39896, + 39897, + 39898, + 39899, + 39900, + 39901, + 39902, + 39903, + 39904, + 39905, + 39906, + 39907, + 39908, + 39909, + 39910, + 39911, + 39912, + 39913, + 39914, + 39915, + 39916, + 39917, + 39918, + 39919, + 39920, + 39921, + 39922, + 39923, + 39924, + 39925, + 39926, + 39927, + 39928, + 39929, + 39930, + 39931, + 39932, + 39933, + 39934, + 39935, + 39936, + 39937, + 39938, + 39939, + 39940, + 39941, + 39942, + 39943, + 39944, + 39945, + 39946, + 39947, + 39948, + 39949, + 39950, + 39951, + 39952, + 39953, + 39954, + 39955, + 39956, + 39957, + 39958, + 39959, + 39960, + 39961, + 39962, + 39963, + 39964, + 39965, + 39966, + 39967, + 39968, + 39969, + 39970, + 39971, + 39972, + 39973, + 39974, + 39975, + 39976, + 39977, + 39978, + 39979, + 39980, + 39981, + 39982, + 39983, + 39984, + 39985, + 39986, + 39987, + 39988, + 39989, + 39990, + 39991, + 39992, + 39993, + 39994, + 39995, + 39996, + 39997, + 39998, + 39999, + 40000, + 40001, + 40002, + 40003, + 40004, + 40005, + 40006, + 40007, + 40008, + 40009, + 40010, + 40011, + 40012, + 40013, + 40014, + 40015, + 40016, + 40017, + 40018, + 40019, + 40020, + 40021, + 40022, + 40023, + 40024, + 40025, + 40026, + 40027, + 40028, + 40029, + 40030, + 40031, + 40032, + 40033, + 40034, + 40035, + 40036, + 40037, + 40038, + 40039, + 40040, + 40041, + 40042, + 40043, + 40044, + 40045, + 40046, + 40047, + 40048, + 40049, + 40050, + 40051, + 40052, + 40053, + 40054, + 40055, + 40056, + 40057, + 40058, + 40059, + 40060, + 40061, + 40062, + 40063, + 40064, + 40065, + 40066, + 40067, + 40068, + 40069, + 40070, + 40071, + 40072, + 40073, + 40074, + 40075, + 40076, + 40077, + 40078, + 40079, + 40080, + 40081, + 40082, + 40083, + 40084, + 40085, + 40086, + 40087, + 40088, + 40089, + 40090, + 40091, + 40092, + 40093, + 40094, + 40095, + 40096, + 40097, + 40098, + 40099, + 40100, + 40101, + 40102, + 40103, + 40104, + 40105, + 40106, + 40107, + 40108, + 40109, + 40110, + 40111, + 40112, + 40113, + 40114, + 40115, + 40116, + 40117, + 40118, + 40119, + 40120, + 40121, + 40122, + 40123, + 40124, + 40125, + 40126, + 40127, + 40128, + 40129, + 40130, + 40131, + 40132, + 40133, + 40134, + 40135, + 40136, + 40137, + 40138, + 40139, + 40140, + 40141, + 40142, + 40143, + 40144, + 40145, + 40146, + 40147, + 40148, + 40149, + 40150, + 40151, + 40152, + 40153, + 40154, + 40155, + 40156, + 40157, + 40158, + 40159, + 40160, + 40161, + 40162, + 40163, + 40164, + 40165, + 40166, + 40167, + 40168, + 40169, + 40170, + 40171, + 40172, + 40173, + 40174, + 40175, + 40176, + 40177, + 40178, + 40179, + 40180, + 40181, + 40182, + 40183, + 40184, + 40185, + 40186, + 40187, + 40188, + 40189, + 40190, + 40191, + 40192, + 40193, + 40194, + 40195, + 40196, + 40197, + 40198, + 40199, + 40200, + 40201, + 40202, + 40203, + 40204, + 40205, + 40206, + 40207, + 40208, + 40209, + 40210, + 40211, + 40212, + 40213, + 40214, + 40215, + 40216, + 40217, + 40218, + 40219, + 40220, + 40221, + 40222, + 40223, + 40224, + 40225, + 40226, + 40227, + 40228, + 40229, + 40230, + 40231, + 40232, + 40233, + 40234, + 40235, + 40236, + 40237, + 40238, + 40239, + 40240, + 40241, + 40242, + 40243, + 40244, + 40245, + 40246, + 40247, + 40248, + 40249, + 40250, + 40251, + 40252, + 40253, + 40254, + 40255, + 40256, + 40257, + 40258, + 40259, + 40260, + 40261, + 40262, + 40263, + 40264, + 40265, + 40266, + 40267, + 40268, + 40269, + 40270, + 40271, + 40272, + 40273, + 40274, + 40275, + 40276, + 40277, + 40278, + 40279, + 40280, + 40281, + 40282, + 40283, + 40284, + 40285, + 40286, + 40287, + 40288, + 40289, + 40290, + 40291, + 40292, + 40293, + 40294, + 40295, + 40296, + 40297, + 40298, + 40299, + 40300, + 40301, + 40302, + 40303, + 40304, + 40305, + 40306, + 40307, + 40308, + 40309, + 40310, + 40311, + 40312, + 40313, + 40314, + 40315, + 40316, + 40317, + 40318, + 40319, + 40320, + 40321, + 40322, + 40323, + 40324, + 40325, + 40326, + 40327, + 40328, + 40329, + 40330, + 40331, + 40332, + 40333, + 40334, + 40335, + 40336, + 40337, + 40338, + 40339, + 40340, + 40341, + 40342, + 40343, + 40344, + 40345, + 40346, + 40347, + 40348, + 40349, + 40350, + 40351, + 40352, + 40353, + 40354, + 40355, + 40356, + 40357, + 40358, + 40359, + 40360, + 40361, + 40362, + 40363, + 40364, + 40365, + 40366, + 40367, + 40368, + 40369, + 40370, + 40371, + 40372, + 40373, + 40374, + 40375, + 40376, + 40377, + 40378, + 40379, + 40380, + 40381, + 40382, + 40383, + 40384, + 40385, + 40386, + 40387, + 40388, + 40389, + 40390, + 40391, + 40392, + 40393, + 40394, + 40395, + 40396, + 40397, + 40398, + 40399, + 40400, + 40401, + 40402, + 40403, + 40404, + 40405, + 40406, + 40407, + 40408, + 40409, + 40410, + 40411, + 40412, + 40413, + 40414, + 40415, + 40416, + 40417, + 40418, + 40419, + 40420, + 40421, + 40422, + 40423, + 40424, + 40425, + 40426, + 40427, + 40428, + 40429, + 40430, + 40431, + 40432, + 40433, + 40434, + 40435, + 40436, + 40437, + 40438, + 40439, + 40440, + 40441, + 40442, + 40443, + 40444, + 40445, + 40446, + 40447, + 40448, + 40449, + 40450, + 40451, + 40452, + 40453, + 40454, + 40455, + 40456, + 40457, + 40458, + 40459, + 40460, + 40461, + 40462, + 40463, + 40464, + 40465, + 40466, + 40467, + 40468, + 40469, + 40470, + 40471, + 40472, + 40473, + 40474, + 40475, + 40476, + 40477, + 40478, + 40479, + 40480, + 40481, + 40482, + 40483, + 40484, + 40485, + 40486, + 40487, + 40488, + 40489, + 40490, + 40491, + 40492, + 40493, + 40494, + 40495, + 40496, + 40497, + 40498, + 40499, + 40500, + 40501, + 40502, + 40503, + 40504, + 40505, + 40506, + 40507, + 40508, + 40509, + 40510, + 40511, + 40512, + 40513, + 40514, + 40515, + 40516, + 40517, + 40518, + 40519, + 40520, + 40521, + 40522, + 40523, + 40524, + 40525, + 40526, + 40527, + 40528, + 40529, + 40530, + 40531, + 40532, + 40533, + 40534, + 40535, + 40536, + 40537, + 40538, + 40539, + 40540, + 40541, + 40542, + 40543, + 40544, + 40545, + 40546, + 40547, + 40548, + 40549, + 40550, + 40551, + 40552, + 40553, + 40554, + 40555, + 40556, + 40557, + 40558, + 40559, + 40560, + 40561, + 40562, + 40563, + 40564, + 40565, + 40566, + 40567, + 40568, + 40569, + 40570, + 40571, + 40572, + 40573, + 40574, + 40575, + 40576, + 40577, + 40578, + 40579, + 40580, + 40581, + 40582, + 40583, + 40584, + 40585, + 40586, + 40587, + 40588, + 40589, + 40590, + 40591, + 40592, + 40593, + 40594, + 40595, + 40596, + 40597, + 40598, + 40599, + 40600, + 40601, + 40602, + 40603, + 40604, + 40605, + 40606, + 40607, + 40608, + 40609, + 40610, + 40611, + 40612, + 40613, + 40614, + 40615, + 40616, + 40617, + 40618, + 40619, + 40620, + 40621, + 40622, + 40623, + 40624, + 40625, + 40626, + 40627, + 40628, + 40629, + 40630, + 40631, + 40632, + 40633, + 40634, + 40635, + 40636, + 40637, + 40638, + 40639, + 40640, + 40641, + 40642, + 40643, + 40644, + 40645, + 40646, + 40647, + 40648, + 40649, + 40650, + 40651, + 40652, + 40653, + 40654, + 40655, + 40656, + 40657, + 40658, + 40659, + 40660, + 40661, + 40662, + 40663, + 40664, + 40665, + 40666, + 40667, + 40668, + 40669, + 40670, + 40671, + 40672, + 40673, + 40674, + 40675, + 40676, + 40677, + 40678, + 40679, + 40680, + 40681, + 40682, + 40683, + 40684, + 40685, + 40686, + 40687, + 40688, + 40689, + 40690, + 40691, + 40692, + 40693, + 40694, + 40695, + 40696, + 40697, + 40698, + 40699, + 40700, + 40701, + 40702, + 40703, + 40704, + 40705, + 40706, + 40707, + 40708, + 40709, + 40710, + 40711, + 40712, + 40713, + 40714, + 40715, + 40716, + 40717, + 40718, + 40719, + 40720, + 40721, + 40722, + 40723, + 40724, + 40725, + 40726, + 40727, + 40728, + 40729, + 40730, + 40731, + 40732, + 40733, + 40734, + 40735, + 40736, + 40737, + 40738, + 40739, + 40740, + 40741, + 40742, + 40743, + 40744, + 40745, + 40746, + 40747, + 40748, + 40749, + 40750, + 40751, + 40752, + 40753, + 40754, + 40755, + 40756, + 40757, + 40758, + 40759, + 40760, + 40761, + 40762, + 40763, + 40764, + 40765, + 40766, + 40767, + 40768, + 40769, + 40770, + 40771, + 40772, + 40773, + 40774, + 40775, + 40776, + 40777, + 40778, + 40779, + 40780, + 40781, + 40782, + 40783, + 40784, + 40785, + 40786, + 40787, + 40788, + 40789, + 40790, + 40791, + 40792, + 40793, + 40794, + 40795, + 40796, + 40797, + 40798, + 40799, + 40800, + 40801, + 40802, + 40803, + 40804, + 40805, + 40806, + 40807, + 40808, + 40809, + 40810, + 40811, + 40812, + 40813, + 40814, + 40815, + 40816, + 40817, + 40818, + 40819, + 40820, + 40821, + 40822, + 40823, + 40824, + 40825, + 40826, + 40827, + 40828, + 40829, + 40830, + 40831, + 40832, + 40833, + 40834, + 40835, + 40836, + 40837, + 40838, + 40839, + 40840, + 40841, + 40842, + 40843, + 40844, + 40845, + 40846, + 40847, + 40848, + 40849, + 40850, + 40851, + 40852, + 40853, + 40854, + 40855, + 40856, + 40857, + 40858, + 40859, + 40860, + 40861, + 40862, + 40863, + 40864, + 40865, + 40866, + 40867, + 40868, + 40869, + 40870, + 40871, + 40872, + 40873, + 40874, + 40875, + 40876, + 40877, + 40878, + 40879, + 40880, + 40881, + 40882, + 40883, + 40884, + 40885, + 40886, + 40887, + 40888, + 40889, + 40890, + 40891, + 40892, + 40893, + 40894, + 40895, + 40896, + 40897, + 40898, + 40899, + 40900, + 40901, + 40902, + 40903, + 40904, + 40905, + 40906, + 40907, + 40908, + 40909, + 40910, + 40911, + 40912, + 40913, + 40914, + 40915, + 40916, + 40917, + 40918, + 40919, + 40920, + 40921, + 40922, + 40923, + 40924, + 40925, + 40926, + 40927, + 40928, + 40929, + 40930, + 40931, + 40932, + 40933, + 40934, + 40935, + 40936, + 40937, + 40938, + 40939, + 40940, + 40941, + 40942, + 40943, + 40944, + 40945, + 40946, + 40947, + 40948, + 40949, + 40950, + 40951, + 40952, + 40953, + 40954, + 40955, + 40956, + 40957, + 40958, + 40959, + 40960, + 40961, + 40962, + 40963, + 40964, + 40965, + 40966, + 40967, + 40968, + 40969, + 40970, + 40971, + 40972, + 40973, + 40974, + 40975, + 40976, + 40977, + 40978, + 40979, + 40980, + 40981, + 40982, + 40983, + 40984, + 40985, + 40986, + 40987, + 40988, + 40989, + 40990, + 40991, + 40992, + 40993, + 40994, + 40995, + 40996, + 40997, + 40998, + 40999, + 41000, + 41001, + 41002, + 41003, + 41004, + 41005, + 41006, + 41007, + 41008, + 41009, + 41010, + 41011, + 41012, + 41013, + 41014, + 41015, + 41016, + 41017, + 41018, + 41019, + 41020, + 41021, + 41022, + 41023, + 41024, + 41025, + 41026, + 41027, + 41028, + 41029, + 41030, + 41031, + 41032, + 41033, + 41034, + 41035, + 41036, + 41037, + 41038, + 41039, + 41040, + 41041, + 41042, + 41043, + 41044, + 41045, + 41046, + 41047, + 41048, + 41049, + 41050, + 41051, + 41052, + 41053, + 41054, + 41055, + 41056, + 41057, + 41058, + 41059, + 41060, + 41061, + 41062, + 41063, + 41064, + 41065, + 41066, + 41067, + 41068, + 41069, + 41070, + 41071, + 41072, + 41073, + 41074, + 41075, + 41076, + 41077, + 41078, + 41079, + 41080, + 41081, + 41082, + 41083, + 41084, + 41085, + 41086, + 41087, + 41088, + 41089, + 41090, + 41091, + 41092, + 41093, + 41094, + 41095, + 41096, + 41097, + 41098, + 41099, + 41100, + 41101, + 41102, + 41103, + 41104, + 41105, + 41106, + 41107, + 41108, + 41109, + 41110, + 41111, + 41112, + 41113, + 41114, + 41115, + 41116, + 41117, + 41118, + 41119, + 41120, + 41121, + 41122, + 41123, + 41124, + 41125, + 41126, + 41127, + 41128, + 41129, + 41130, + 41131, + 41132, + 41133, + 41134, + 41135, + 41136, + 41137, + 41138, + 41139, + 41140, + 41141, + 41142, + 41143, + 41144, + 41145, + 41146, + 41147, + 41148, + 41149, + 41150, + 41151, + 41152, + 41153, + 41154, + 41155, + 41156, + 41157, + 41158, + 41159, + 41160, + 41161, + 41162, + 41163, + 41164, + 41165, + 41166, + 41167, + 41168, + 41169, + 41170, + 41171, + 41172, + 41173, + 41174, + 41175, + 41176, + 41177, + 41178, + 41179, + 41180, + 41181, + 41182, + 41183, + 41184, + 41185, + 41186, + 41187, + 41188, + 41189, + 41190, + 41191, + 41192, + 41193, + 41194, + 41195, + 41196, + 41197, + 41198, + 41199, + 41200, + 41201, + 41202, + 41203, + 41204, + 41205, + 41206, + 41207, + 41208, + 41209, + 41210, + 41211, + 41212, + 41213, + 41214, + 41215, + 41216, + 41217, + 41218, + 41219, + 41220, + 41221, + 41222, + 41223, + 41224, + 41225, + 41226, + 41227, + 41228, + 41229, + 41230, + 41231, + 41232, + 41233, + 41234, + 41235, + 41236, + 41237, + 41238, + 41239, + 41240, + 41241, + 41242, + 41243, + 41244, + 41245, + 41246, + 41247, + 41248, + 41249, + 41250, + 41251, + 41252, + 41253, + 41254, + 41255, + 41256, + 41257, + 41258, + 41259, + 41260, + 41261, + 41262, + 41263, + 41264, + 41265, + 41266, + 41267, + 41268, + 41269, + 41270, + 41271, + 41272, + 41273, + 41274, + 41275, + 41276, + 41277, + 41278, + 41279, + 41280, + 41281, + 41282, + 41283, + 41284, + 41285, + 41286, + 41287, + 41288, + 41289, + 41290, + 41291, + 41292, + 41293, + 41294, + 41295, + 41296, + 41297, + 41298, + 41299, + 41300, + 41301, + 41302, + 41303, + 41304, + 41305, + 41306, + 41307, + 41308, + 41309, + 41310, + 41311, + 41312, + 41313, + 41314, + 41315, + 41316, + 41317, + 41318, + 41319, + 41320, + 41321, + 41322, + 41323, + 41324, + 41325, + 41326, + 41327, + 41328, + 41329, + 41330, + 41331, + 41332, + 41333, + 41334, + 41335, + 41336, + 41337, + 41338, + 41339, + 41340, + 41341, + 41342, + 41343, + 41344, + 41345, + 41346, + 41347, + 41348, + 41349, + 41350, + 41351, + 41352, + 41353, + 41354, + 41355, + 41356, + 41357, + 41358, + 41359, + 41360, + 41361, + 41362, + 41363, + 41364, + 41365, + 41366, + 41367, + 41368, + 41369, + 41370, + 41371, + 41372, + 41373, + 41374, + 41375, + 41376, + 41377, + 41378, + 41379, + 41380, + 41381, + 41382, + 41383, + 41384, + 41385, + 41386, + 41387, + 41388, + 41389, + 41390, + 41391, + 41392, + 41393, + 41394, + 41395, + 41396, + 41397, + 41398, + 41399, + 41400, + 41401, + 41402, + 41403, + 41404, + 41405, + 41406, + 41407, + 41408, + 41409, + 41410, + 41411, + 41412, + 41413, + 41414, + 41415, + 41416, + 41417, + 41418, + 41419, + 41420, + 41421, + 41422, + 41423, + 41424, + 41425, + 41426, + 41427, + 41428, + 41429, + 41430, + 41431, + 41432, + 41433, + 41434, + 41435, + 41436, + 41437, + 41438, + 41439, + 41440, + 41441, + 41442, + 41443, + 41444, + 41445, + 41446, + 41447, + 41448, + 41449, + 41450, + 41451, + 41452, + 41453, + 41454, + 41455, + 41456, + 41457, + 41458, + 41459, + 41460, + 41461, + 41462, + 41463, + 41464, + 41465, + 41466, + 41467, + 41468, + 41469, + 41470, + 41471, + 41472, + 41473, + 41474, + 41475, + 41476, + 41477, + 41478, + 41479, + 41480, + 41481, + 41482, + 41483, + 41484, + 41485, + 41486, + 41487, + 41488, + 41489, + 41490, + 41491, + 41492, + 41493, + 41494, + 41495, + 41496, + 41497, + 41498, + 41499, + 41500, + 41501, + 41502, + 41503, + 41504, + 41505, + 41506, + 41507, + 41508, + 41509, + 41510, + 41511, + 41512, + 41513, + 41514, + 41515, + 41516, + 41517, + 41518, + 41519, + 41520, + 41521, + 41522, + 41523, + 41524, + 41525, + 41526, + 41527, + 41528, + 41529, + 41530, + 41531, + 41532, + 41533, + 41534, + 41535, + 41536, + 41537, + 41538, + 41539, + 41540, + 41541, + 41542, + 41543, + 41544, + 41545, + 41546, + 41547, + 41548, + 41549, + 41550, + 41551, + 41552, + 41553, + 41554, + 41555, + 41556, + 41557, + 41558, + 41559, + 41560, + 41561, + 41562, + 41563, + 41564, + 41565, + 41566, + 41567, + 41568, + 41569, + 41570, + 41571, + 41572, + 41573, + 41574, + 41575, + 41576, + 41577, + 41578, + 41579, + 41580, + 41581, + 41582, + 41583, + 41584, + 41585, + 41586, + 41587, + 41588, + 41589, + 41590, + 41591, + 41592, + 41593, + 41594, + 41595, + 41596, + 41597, + 41598, + 41599, + 41600, + 41601, + 41602, + 41603, + 41604, + 41605, + 41606, + 41607, + 41608, + 41609, + 41610, + 41611, + 41612, + 41613, + 41614, + 41615, + 41616, + 41617, + 41618, + 41619, + 41620, + 41621, + 41622, + 41623, + 41624, + 41625, + 41626, + 41627, + 41628, + 41629, + 41630, + 41631, + 41632, + 41633, + 41634, + 41635, + 41636, + 41637, + 41638, + 41639, + 41640, + 41641, + 41642, + 41643, + 41644, + 41645, + 41646, + 41647, + 41648, + 41649, + 41650, + 41651, + 41652, + 41653, + 41654, + 41655, + 41656, + 41657, + 41658, + 41659, + 41660, + 41661, + 41662, + 41663, + 41664, + 41665, + 41666, + 41667, + 41668, + 41669, + 41670, + 41671, + 41672, + 41673, + 41674, + 41675, + 41676, + 41677, + 41678, + 41679, + 41680, + 41681, + 41682, + 41683, + 41684, + 41685, + 41686, + 41687, + 41688, + 41689, + 41690, + 41691, + 41692, + 41693, + 41694, + 41695, + 41696, + 41697, + 41698, + 41699, + 41700, + 41701, + 41702, + 41703, + 41704, + 41705, + 41706, + 41707, + 41708, + 41709, + 41710, + 41711, + 41712, + 41713, + 41714, + 41715, + 41716, + 41717, + 41718, + 41719, + 41720, + 41721, + 41722, + 41723, + 41724, + 41725, + 41726, + 41727, + 41728, + 41729, + 41730, + 41731, + 41732, + 41733, + 41734, + 41735, + 41736, + 41737, + 41738, + 41739, + 41740, + 41741, + 41742, + 41743, + 41744, + 41745, + 41746, + 41747, + 41748, + 41749, + 41750, + 41751, + 41752, + 41753, + 41754, + 41755, + 41756, + 41757, + 41758, + 41759, + 41760, + 41761, + 41762, + 41763, + 41764, + 41765, + 41766, + 41767, + 41768, + 41769, + 41770, + 41771, + 41772, + 41773, + 41774, + 41775, + 41776, + 41777, + 41778, + 41779, + 41780, + 41781, + 41782, + 41783, + 41784, + 41785, + 41786, + 41787, + 41788, + 41789, + 41790, + 41791, + 41792, + 41793, + 41794, + 41795, + 41796, + 41797, + 41798, + 41799, + 41800, + 41801, + 41802, + 41803, + 41804, + 41805, + 41806, + 41807, + 41808, + 41809, + 41810, + 41811, + 41812, + 41813, + 41814, + 41815, + 41816, + 41817, + 41818, + 41819, + 41820, + 41821, + 41822, + 41823, + 41824, + 41825, + 41826, + 41827, + 41828, + 41829, + 41830, + 41831, + 41832, + 41833, + 41834, + 41835, + 41836, + 41837, + 41838, + 41839, + 41840, + 41841, + 41842, + 41843, + 41844, + 41845, + 41846, + 41847, + 41848, + 41849, + 41850, + 41851, + 41852, + 41853, + 41854, + 41855, + 41856, + 41857, + 41858, + 41859, + 41860, + 41861, + 41862, + 41863, + 41864, + 41865, + 41866, + 41867, + 41868, + 41869, + 41870, + 41871, + 41872, + 41873, + 41874, + 41875, + 41876, + 41877, + 41878, + 41879, + 41880, + 41881, + 41882, + 41883, + 41884, + 41885, + 41886, + 41887, + 41888, + 41889, + 41890, + 41891, + 41892, + 41893, + 41894, + 41895, + 41896, + 41897, + 41898, + 41899, + 41900, + 41901, + 41902, + 41903, + 41904, + 41905, + 41906, + 41907, + 41908, + 41909, + 41910, + 41911, + 41912, + 41913, + 41914, + 41915, + 41916, + 41917, + 41918, + 41919, + 41920, + 41921, + 41922, + 41923, + 41924, + 41925, + 41926, + 41927, + 41928, + 41929, + 41930, + 41931, + 41932, + 41933, + 41934, + 41935, + 41936, + 41937, + 41938, + 41939, + 41940, + 41941, + 41942, + 41943, + 41944, + 41945, + 41946, + 41947, + 41948, + 41949, + 41950, + 41951, + 41952, + 41953, + 41954, + 41955, + 41956, + 41957, + 41958, + 41959, + 41960, + 41961, + 41962, + 41963, + 41964, + 41965, + 41966, + 41967, + 41968, + 41969, + 41970, + 41971, + 41972, + 41973, + 41974, + 41975, + 41976, + 41977, + 41978, + 41979, + 41980, + 41981, + 41982, + 41983, + 41984, + 41985, + 41986, + 41987, + 41988, + 41989, + 41990, + 41991, + 41992, + 41993, + 41994, + 41995, + 41996, + 41997, + 41998, + 41999, + 42000, + 42001, + 42002, + 42003, + 42004, + 42005, + 42006, + 42007, + 42008, + 42009, + 42010, + 42011, + 42012, + 42013, + 42014, + 42015, + 42016, + 42017, + 42018, + 42019, + 42020, + 42021, + 42022, + 42023, + 42024, + 42025, + 42026, + 42027, + 42028, + 42029, + 42030, + 42031, + 42032, + 42033, + 42034, + 42035, + 42036, + 42037, + 42038, + 42039, + 42040, + 42041, + 42042, + 42043, + 42044, + 42045, + 42046, + 42047, + 42048, + 42049, + 42050, + 42051, + 42052, + 42053, + 42054, + 42055, + 42056, + 42057, + 42058, + 42059, + 42060, + 42061, + 42062, + 42063, + 42064, + 42065, + 42066, + 42067, + 42068, + 42069, + 42070, + 42071, + 42072, + 42073, + 42074, + 42075, + 42076, + 42077, + 42078, + 42079, + 42080, + 42081, + 42082, + 42083, + 42084, + 42085, + 42086, + 42087, + 42088, + 42089, + 42090, + 42091, + 42092, + 42093, + 42094, + 42095, + 42096, + 42097, + 42098, + 42099, + 42100, + 42101, + 42102, + 42103, + 42104, + 42105, + 42106, + 42107, + 42108, + 42109, + 42110, + 42111, + 42112, + 42113, + 42114, + 42115, + 42116, + 42117, + 42118, + 42119, + 42120, + 42121, + 42122, + 42123, + 42124, + 42125, + 42126, + 42127, + 42128, + 42129, + 42130, + 42131, + 42132, + 42133, + 42134, + 42135, + 42136, + 42137, + 42138, + 42139, + 42140, + 42141, + 42142, + 42143, + 42144, + 42145, + 42146, + 42147, + 42148, + 42149, + 42150, + 42151, + 42152, + 42153, + 42154, + 42155, + 42156, + 42157, + 42158, + 42159, + 42160, + 42161, + 42162, + 42163, + 42164, + 42165, + 42166, + 42167, + 42168, + 42169, + 42170, + 42171, + 42172, + 42173, + 42174, + 42175, + 42176, + 42177, + 42178, + 42179, + 42180, + 42181, + 42182, + 42183, + 42184, + 42185, + 42186, + 42187, + 42188, + 42189, + 42190, + 42191, + 42192, + 42193, + 42194, + 42195, + 42196, + 42197, + 42198, + 42199, + 42200, + 42201, + 42202, + 42203, + 42204, + 42205, + 42206, + 42207, + 42208, + 42209, + 42210, + 42211, + 42212, + 42213, + 42214, + 42215, + 42216, + 42217, + 42218, + 42219, + 42220, + 42221, + 42222, + 42223, + 42224, + 42225, + 42226, + 42227, + 42228, + 42229, + 42230, + 42231, + 42232, + 42233, + 42234, + 42235, + 42236, + 42237, + 42238, + 42239, + 42240, + 42241, + 42242, + 42243, + 42244, + 42245, + 42246, + 42247, + 42248, + 42249, + 42250, + 42251, + 42252, + 42253, + 42254, + 42255, + 42256, + 42257, + 42258, + 42259, + 42260, + 42261, + 42262, + 42263, + 42264, + 42265, + 42266, + 42267, + 42268, + 42269, + 42270, + 42271, + 42272, + 42273, + 42274, + 42275, + 42276, + 42277, + 42278, + 42279, + 42280, + 42281, + 42282, + 42283, + 42284, + 42285, + 42286, + 42287, + 42288, + 42289, + 42290, + 42291, + 42292, + 42293, + 42294, + 42295, + 42296, + 42297, + 42298, + 42299, + 42300, + 42301, + 42302, + 42303, + 42304, + 42305, + 42306, + 42307, + 42308, + 42309, + 42310, + 42311, + 42312, + 42313, + 42314, + 42315, + 42316, + 42317, + 42318, + 42319, + 42320, + 42321, + 42322, + 42323, + 42324, + 42325, + 42326, + 42327, + 42328, + 42329, + 42330, + 42331, + 42332, + 42333, + 42334, + 42335, + 42336, + 42337, + 42338, + 42339, + 42340, + 42341, + 42342, + 42343, + 42344, + 42345, + 42346, + 42347, + 42348, + 42349, + 42350, + 42351, + 42352, + 42353, + 42354, + 42355, + 42356, + 42357, + 42358, + 42359, + 42360, + 42361, + 42362, + 42363, + 42364, + 42365, + 42366, + 42367, + 42368, + 42369, + 42370, + 42371, + 42372, + 42373, + 42374, + 42375, + 42376, + 42377, + 42378, + 42379, + 42380, + 42381, + 42382, + 42383, + 42384, + 42385, + 42386, + 42387, + 42388, + 42389, + 42390, + 42391, + 42392, + 42393, + 42394, + 42395, + 42396, + 42397, + 42398, + 42399, + 42400, + 42401, + 42402, + 42403, + 42404, + 42405, + 42406, + 42407, + 42408, + 42409, + 42410, + 42411, + 42412, + 42413, + 42414, + 42415, + 42416, + 42417, + 42418, + 42419, + 42420, + 42421, + 42422, + 42423, + 42424, + 42425, + 42426, + 42427, + 42428, + 42429, + 42430, + 42431, + 42432, + 42433, + 42434, + 42435, + 42436, + 42437, + 42438, + 42439, + 42440, + 42441, + 42442, + 42443, + 42444, + 42445, + 42446, + 42447, + 42448, + 42449, + 42450, + 42451, + 42452, + 42453, + 42454, + 42455, + 42456, + 42457, + 42458, + 42459, + 42460, + 42461, + 42462, + 42463, + 42464, + 42465, + 42466, + 42467, + 42468, + 42469, + 42470, + 42471, + 42472, + 42473, + 42474, + 42475, + 42476, + 42477, + 42478, + 42479, + 42480, + 42481, + 42482, + 42483, + 42484, + 42485, + 42486, + 42487, + 42488, + 42489, + 42490, + 42491, + 42492, + 42493, + 42494, + 42495, + 42496, + 42497, + 42498, + 42499, + 42500, + 42501, + 42502, + 42503, + 42504, + 42505, + 42506, + 42507, + 42508, + 42509, + 42510, + 42511, + 42512, + 42513, + 42514, + 42515, + 42516, + 42517, + 42518, + 42519, + 42520, + 42521, + 42522, + 42523, + 42524, + 42525, + 42526, + 42527, + 42528, + 42529, + 42530, + 42531, + 42532, + 42533, + 42534, + 42535, + 42536, + 42537, + 42538, + 42539, + 42540, + 42541, + 42542, + 42543, + 42544, + 42545, + 42546, + 42547, + 42548, + 42549, + 42550, + 42551, + 42552, + 42553, + 42554, + 42555, + 42556, + 42557, + 42558, + 42559, + 42560, + 42561, + 42562, + 42563, + 42564, + 42565, + 42566, + 42567, + 42568, + 42569, + 42570, + 42571, + 42572, + 42573, + 42574, + 42575, + 42576, + 42577, + 42578, + 42579, + 42580, + 42581, + 42582, + 42583, + 42584, + 42585, + 42586, + 42587, + 42588, + 42589, + 42590, + 42591, + 42592, + 42593, + 42594, + 42595, + 42596, + 42597, + 42598, + 42599, + 42600, + 42601, + 42602, + 42603, + 42604, + 42605, + 42606, + 42607, + 42608, + 42609, + 42610, + 42611, + 42612, + 42613, + 42614, + 42615, + 42616, + 42617, + 42618, + 42619, + 42620, + 42621, + 42622, + 42623, + 42624, + 42625, + 42626, + 42627, + 42628, + 42629, + 42630, + 42631, + 42632, + 42633, + 42634, + 42635, + 42636, + 42637, + 42638, + 42639, + 42640, + 42641, + 42642, + 42643, + 42644, + 42645, + 42646, + 42647, + 42648, + 42649, + 42650, + 42651, + 42652, + 42653, + 42654, + 42655, + 42656, + 42657, + 42658, + 42659, + 42660, + 42661, + 42662, + 42663, + 42664, + 42665, + 42666, + 42667, + 42668, + 42669, + 42670, + 42671, + 42672, + 42673, + 42674, + 42675, + 42676, + 42677, + 42678, + 42679, + 42680, + 42681, + 42682, + 42683, + 42684, + 42685, + 42686, + 42687, + 42688, + 42689, + 42690, + 42691, + 42692, + 42693, + 42694, + 42695, + 42696, + 42697, + 42698, + 42699, + 42700, + 42701, + 42702, + 42703, + 42704, + 42705, + 42706, + 42707, + 42708, + 42709, + 42710, + 42711, + 42712, + 42713, + 42714, + 42715, + 42716, + 42717, + 42718, + 42719, + 42720, + 42721, + 42722, + 42723, + 42724, + 42725, + 42726, + 42727, + 42728, + 42729, + 42730, + 42731, + 42732, + 42733, + 42734, + 42735, + 42736, + 42737, + 42738, + 42739, + 42740, + 42741, + 42742, + 42743, + 42744, + 42745, + 42746, + 42747, + 42748, + 42749, + 42750, + 42751, + 42752, + 42753, + 42754, + 42755, + 42756, + 42757, + 42758, + 42759, + 42760, + 42761, + 42762, + 42763, + 42764, + 42765, + 42766, + 42767, + 42768, + 42769, + 42770, + 42771, + 42772, + 42773, + 42774, + 42775, + 42776, + 42777, + 42778, + 42779, + 42780, + 42781, + 42782, + 42783, + 42784, + 42785, + 42786, + 42787, + 42788, + 42789, + 42790, + 42791, + 42792, + 42793, + 42794, + 42795, + 42796, + 42797, + 42798, + 42799, + 42800, + 42801, + 42802, + 42803, + 42804, + 42805, + 42806, + 42807, + 42808, + 42809, + 42810, + 42811, + 42812, + 42813, + 42814, + 42815, + 42816, + 42817, + 42818, + 42819, + 42820, + 42821, + 42822, + 42823, + 42824, + 42825, + 42826, + 42827, + 42828, + 42829, + 42830, + 42831, + 42832, + 42833, + 42834, + 42835, + 42836, + 42837, + 42838, + 42839, + 42840, + 42841, + 42842, + 42843, + 42844, + 42845, + 42846, + 42847, + 42848, + 42849, + 42850, + 42851, + 42852, + 42853, + 42854, + 42855, + 42856, + 42857, + 42858, + 42859, + 42860, + 42861, + 42862, + 42863, + 42864, + 42865, + 42866, + 42867, + 42868, + 42869, + 42870, + 42871, + 42872, + 42873, + 42874, + 42875, + 42876, + 42877, + 42878, + 42879, + 42880, + 42881, + 42882, + 42883, + 42884, + 42885, + 42886, + 42887, + 42888, + 42889, + 42890, + 42891, + 42892, + 42893, + 42894, + 42895, + 42896, + 42897, + 42898, + 42899, + 42900, + 42901, + 42902, + 42903, + 42904, + 42905, + 42906, + 42907, + 42908, + 42909, + 42910, + 42911, + 42912, + 42913, + 42914, + 42915, + 42916, + 42917, + 42918, + 42919, + 42920, + 42921, + 42922, + 42923, + 42924, + 42925, + 42926, + 42927, + 42928, + 42929, + 42930, + 42931, + 42932, + 42933, + 42934, + 42935, + 42936, + 42937, + 42938, + 42939, + 42940, + 42941, + 42942, + 42943, + 42944, + 42945, + 42946, + 42947, + 42948, + 42949, + 42950, + 42951, + 42952, + 42953, + 42954, + 42955, + 42956, + 42957, + 42958, + 42959, + 42960, + 42961, + 42962, + 42963, + 42964, + 42965, + 42966, + 42967, + 42968, + 42969, + 42970, + 42971, + 42972, + 42973, + 42974, + 42975, + 42976, + 42977, + 42978, + 42979, + 42980, + 42981, + 42982, + 42983, + 42984, + 42985, + 42986, + 42987, + 42988, + 42989, + 42990, + 42991, + 42992, + 42993, + 42994, + 42995, + 42996, + 42997, + 42998, + 42999, + 43000, + 43001, + 43002, + 43003, + 43004, + 43005, + 43006, + 43007, + 43008, + 43009, + 43010, + 43011, + 43012, + 43013, + 43014, + 43015, + 43016, + 43017, + 43018, + 43019, + 43020, + 43021, + 43022, + 43023, + 43024, + 43025, + 43026, + 43027, + 43028, + 43029, + 43030, + 43031, + 43032, + 43033, + 43034, + 43035, + 43036, + 43037, + 43038, + 43039, + 43040, + 43041, + 43042, + 43043, + 43044, + 43045, + 43046, + 43047, + 43048, + 43049, + 43050, + 43051, + 43052, + 43053, + 43054, + 43055, + 43056, + 43057, + 43058, + 43059, + 43060, + 43061, + 43062, + 43063, + 43064, + 43065, + 43066, + 43067, + 43068, + 43069, + 43070, + 43071, + 43072, + 43073, + 43074, + 43075, + 43076, + 43077, + 43078, + 43079, + 43080, + 43081, + 43082, + 43083, + 43084, + 43085, + 43086, + 43087, + 43088, + 43089, + 43090, + 43091, + 43092, + 43093, + 43094, + 43095, + 43096, + 43097, + 43098, + 43099, + 43100, + 43101, + 43102, + 43103, + 43104, + 43105, + 43106, + 43107, + 43108, + 43109, + 43110, + 43111, + 43112, + 43113, + 43114, + 43115, + 43116, + 43117, + 43118, + 43119, + 43120, + 43121, + 43122, + 43123, + 43124, + 43125, + 43126, + 43127, + 43128, + 43129, + 43130, + 43131, + 43132, + 43133, + 43134, + 43135, + 43136, + 43137, + 43138, + 43139, + 43140, + 43141, + 43142, + 43143, + 43144, + 43145, + 43146, + 43147, + 43148, + 43149, + 43150, + 43151, + 43152, + 43153, + 43154, + 43155, + 43156, + 43157, + 43158, + 43159, + 43160, + 43161, + 43162, + 43163, + 43164, + 43165, + 43166, + 43167, + 43168, + 43169, + 43170, + 43171, + 43172, + 43173, + 43174, + 43175, + 43176, + 43177, + 43178, + 43179, + 43180, + 43181, + 43182, + 43183, + 43184, + 43185, + 43186, + 43187, + 43188, + 43189, + 43190, + 43191, + 43192, + 43193, + 43194, + 43195, + 43196, + 43197, + 43198, + 43199, + 43200, + 43201, + 43202, + 43203, + 43204, + 43205, + 43206, + 43207, + 43208, + 43209, + 43210, + 43211, + 43212, + 43213, + 43214, + 43215, + 43216, + 43217, + 43218, + 43219, + 43220, + 43221, + 43222, + 43223, + 43224, + 43225, + 43226, + 43227, + 43228, + 43229, + 43230, + 43231, + 43232, + 43233, + 43234, + 43235, + 43236, + 43237, + 43238, + 43239, + 43240, + 43241, + 43242, + 43243, + 43244, + 43245, + 43246, + 43247, + 43248, + 43249, + 43250, + 43251, + 43252, + 43253, + 43254, + 43255, + 43256, + 43257, + 43258, + 43259, + 43260, + 43261, + 43262, + 43263, + 43264, + 43265, + 43266, + 43267, + 43268, + 43269, + 43270, + 43271, + 43272, + 43273, + 43274, + 43275, + 43276, + 43277, + 43278, + 43279, + 43280, + 43281, + 43282, + 43283, + 43284, + 43285, + 43286, + 43287, + 43288, + 43289, + 43290, + 43291, + 43292, + 43293, + 43294, + 43295, + 43296, + 43297, + 43298, + 43299, + 43300, + 43301, + 43302, + 43303, + 43304, + 43305, + 43306, + 43307, + 43308, + 43309, + 43310, + 43311, + 43312, + 43313, + 43314, + 43315, + 43316, + 43317, + 43318, + 43319, + 43320, + 43321, + 43322, + 43323, + 43324, + 43325, + 43326, + 43327, + 43328, + 43329, + 43330, + 43331, + 43332, + 43333, + 43334, + 43335, + 43336, + 43337, + 43338, + 43339, + 43340, + 43341, + 43342, + 43343, + 43344, + 43345, + 43346, + 43347, + 43348, + 43349, + 43350, + 43351, + 43352, + 43353, + 43354, + 43355, + 43356, + 43357, + 43358, + 43359, + 43360, + 43361, + 43362, + 43363, + 43364, + 43365, + 43366, + 43367, + 43368, + 43369, + 43370, + 43371, + 43372, + 43373, + 43374, + 43375, + 43376, + 43377, + 43378, + 43379, + 43380, + 43381, + 43382, + 43383, + 43384, + 43385, + 43386, + 43387, + 43388, + 43389, + 43390, + 43391, + 43392, + 43393, + 43394, + 43395, + 43396, + 43397, + 43398, + 43399, + 43400, + 43401, + 43402, + 43403, + 43404, + 43405, + 43406, + 43407, + 43408, + 43409, + 43410, + 43411, + 43412, + 43413, + 43414, + 43415, + 43416, + 43417, + 43418, + 43419, + 43420, + 43421, + 43422, + 43423, + 43424, + 43425, + 43426, + 43427, + 43428, + 43429, + 43430, + 43431, + 43432, + 43433, + 43434, + 43435, + 43436, + 43437, + 43438, + 43439, + 43440, + 43441, + 43442, + 43443, + 43444, + 43445, + 43446, + 43447, + 43448, + 43449, + 43450, + 43451, + 43452, + 43453, + 43454, + 43455, + 43456, + 43457, + 43458, + 43459, + 43460, + 43461, + 43462, + 43463, + 43464, + 43465, + 43466, + 43467, + 43468, + 43469, + 43470, + 43471, + 43472, + 43473, + 43474, + 43475, + 43476, + 43477, + 43478, + 43479, + 43480, + 43481, + 43482, + 43483, + 43484, + 43485, + 43486, + 43487, + 43488, + 43489, + 43490, + 43491, + 43492, + 43493, + 43494, + 43495, + 43496, + 43497, + 43498, + 43499, + 43500, + 43501, + 43502, + 43503, + 43504, + 43505, + 43506, + 43507, + 43508, + 43509, + 43510, + 43511, + 43512, + 43513, + 43514, + 43515, + 43516, + 43517, + 43518, + 43519, + 43520, + 43521, + 43522, + 43523, + 43524, + 43525, + 43526, + 43527, + 43528, + 43529, + 43530, + 43531, + 43532, + 43533, + 43534, + 43535, + 43536, + 43537, + 43538, + 43539, + 43540, + 43541, + 43542, + 43543, + 43544, + 43545, + 43546, + 43547, + 43548, + 43549, + 43550, + 43551, + 43552, + 43553, + 43554, + 43555, + 43556, + 43557, + 43558, + 43559, + 43560, + 43561, + 43562, + 43563, + 43564, + 43565, + 43566, + 43567, + 43568, + 43569, + 43570, + 43571, + 43572, + 43573, + 43574, + 43575, + 43576, + 43577, + 43578, + 43579, + 43580, + 43581, + 43582, + 43583, + 43584, + 43585, + 43586, + 43587, + 43588, + 43589, + 43590, + 43591, + 43592, + 43593, + 43594, + 43595, + 43596, + 43597, + 43598, + 43599, + 43600, + 43601, + 43602, + 43603, + 43604, + 43605, + 43606, + 43607, + 43608, + 43609, + 43610, + 43611, + 43612, + 43613, + 43614, + 43615, + 43616, + 43617, + 43618, + 43619, + 43620, + 43621, + 43622, + 43623, + 43624, + 43625, + 43626, + 43627, + 43628, + 43629, + 43630, + 43631, + 43632, + 43633, + 43634, + 43635, + 43636, + 43637, + 43638, + 43639, + 43640, + 43641, + 43642, + 43643, + 43644, + 43645, + 43646, + 43647, + 43648, + 43649, + 43650, + 43651, + 43652, + 43653, + 43654, + 43655, + 43656, + 43657, + 43658, + 43659, + 43660, + 43661, + 43662, + 43663, + 43664, + 43665, + 43666, + 43667, + 43668, + 43669, + 43670, + 43671, + 43672, + 43673, + 43674, + 43675, + 43676, + 43677, + 43678, + 43679, + 43680, + 43681, + 43682, + 43683, + 43684, + 43685, + 43686, + 43687, + 43688, + 43689, + 43690, + 43691, + 43692, + 43693, + 43694, + 43695, + 43696, + 43697, + 43698, + 43699, + 43700, + 43701, + 43702, + 43703, + 43704, + 43705, + 43706, + 43707, + 43708, + 43709, + 43710, + 43711, + 43712, + 43713, + 43714, + 43715, + 43716, + 43717, + 43718, + 43719, + 43720, + 43721, + 43722, + 43723, + 43724, + 43725, + 43726, + 43727, + 43728, + 43729, + 43730, + 43731, + 43732, + 43733, + 43734, + 43735, + 43736, + 43737, + 43738, + 43739, + 43740, + 43741, + 43742, + 43743, + 43744, + 43745, + 43746, + 43747, + 43748, + 43749, + 43750, + 43751, + 43752, + 43753, + 43754, + 43755, + 43756, + 43757, + 43758, + 43759, + 43760, + 43761, + 43762, + 43763, + 43764, + 43765, + 43766, + 43767, + 43768, + 43769, + 43770, + 43771, + 43772, + 43773, + 43774, + 43775, + 43776, + 43777, + 43778, + 43779, + 43780, + 43781, + 43782, + 43783, + 43784, + 43785, + 43786, + 43787, + 43788, + 43789, + 43790, + 43791, + 43792, + 43793, + 43794, + 43795, + 43796, + 43797, + 43798, + 43799, + 43800, + 43801, + 43802, + 43803, + 43804, + 43805, + 43806, + 43807, + 43808, + 43809, + 43810, + 43811, + 43812, + 43813, + 43814, + 43815, + 43816, + 43817, + 43818, + 43819, + 43820, + 43821, + 43822, + 43823, + 43824, + 43825, + 43826, + 43827, + 43828, + 43829, + 43830, + 43831, + 43832, + 43833, + 43834, + 43835, + 43836, + 43837, + 43838, + 43839, + 43840, + 43841, + 43842, + 43843, + 43844, + 43845, + 43846, + 43847, + 43848, + 43849, + 43850, + 43851, + 43852, + 43853, + 43854, + 43855, + 43856, + 43857, + 43858, + 43859, + 43860, + 43861, + 43862, + 43863, + 43864, + 43865, + 43866, + 43867, + 43868, + 43869, + 43870, + 43871, + 43872, + 43873, + 43874, + 43875, + 43876, + 43877, + 43878, + 43879, + 43880, + 43881, + 43882, + 43883, + 43884, + 43885, + 43886, + 43887, + 43888, + 43889, + 43890, + 43891, + 43892, + 43893, + 43894, + 43895, + 43896, + 43897, + 43898, + 43899, + 43900, + 43901, + 43902, + 43903, + 43904, + 43905, + 43906, + 43907, + 43908, + 43909, + 43910, + 43911, + 43912, + 43913, + 43914, + 43915, + 43916, + 43917, + 43918, + 43919, + 43920, + 43921, + 43922, + 43923, + 43924, + 43925, + 43926, + 43927, + 43928, + 43929, + 43930, + 43931, + 43932, + 43933, + 43934, + 43935, + 43936, + 43937, + 43938, + 43939, + 43940, + 43941, + 43942, + 43943, + 43944, + 43945, + 43946, + 43947, + 43948, + 43949, + 43950, + 43951, + 43952, + 43953, + 43954, + 43955, + 43956, + 43957, + 43958, + 43959, + 43960, + 43961, + 43962, + 43963, + 43964, + 43965, + 43966, + 43967, + 43968, + 43969, + 43970, + 43971, + 43972, + 43973, + 43974, + 43975, + 43976, + 43977, + 43978, + 43979, + 43980, + 43981, + 43982, + 43983, + 43984, + 43985, + 43986, + 43987, + 43988, + 43989, + 43990, + 43991, + 43992, + 43993, + 43994, + 43995, + 43996, + 43997, + 43998, + 43999, + 44000, + 44001, + 44002, + 44003, + 44004, + 44005, + 44006, + 44007, + 44008, + 44009, + 44010, + 44011, + 44012, + 44013, + 44014, + 44015, + 44016, + 44017, + 44018, + 44019, + 44020, + 44021, + 44022, + 44023, + 44024, + 44025, + 44026, + 44027, + 44028, + 44029, + 44030, + 44031, + 44032, + 44033, + 44034, + 44035, + 44036, + 44037, + 44038, + 44039, + 44040, + 44041, + 44042, + 44043, + 44044, + 44045, + 44046, + 44047, + 44048, + 44049, + 44050, + 44051, + 44052, + 44053, + 44054, + 44055, + 44056, + 44057, + 44058, + 44059, + 44060, + 44061, + 44062, + 44063, + 44064, + 44065, + 44066, + 44067, + 44068, + 44069, + 44070, + 44071, + 44072, + 44073, + 44074, + 44075, + 44076, + 44077, + 44078, + 44079, + 44080, + 44081, + 44082, + 44083, + 44084, + 44085, + 44086, + 44087, + 44088, + 44089, + 44090, + 44091, + 44092, + 44093, + 44094, + 44095, + 44096, + 44097, + 44098, + 44099, + 44100, + 44101, + 44102, + 44103, + 44104, + 44105, + 44106, + 44107, + 44108, + 44109, + 44110, + 44111, + 44112, + 44113, + 44114, + 44115, + 44116, + 44117, + 44118, + 44119, + 44120, + 44121, + 44122, + 44123, + 44124, + 44125, + 44126, + 44127, + 44128, + 44129, + 44130, + 44131, + 44132, + 44133, + 44134, + 44135, + 44136, + 44137, + 44138, + 44139, + 44140, + 44141, + 44142, + 44143, + 44144, + 44145, + 44146, + 44147, + 44148, + 44149, + 44150, + 44151, + 44152, + 44153, + 44154, + 44155, + 44156, + 44157, + 44158, + 44159, + 44160, + 44161, + 44162, + 44163, + 44164, + 44165, + 44166, + 44167, + 44168, + 44169, + 44170, + 44171, + 44172, + 44173, + 44174, + 44175, + 44176, + 44177, + 44178, + 44179, + 44180, + 44181, + 44182, + 44183, + 44184, + 44185, + 44186, + 44187, + 44188, + 44189, + 44190, + 44191, + 44192, + 44193, + 44194, + 44195, + 44196, + 44197, + 44198, + 44199, + 44200, + 44201, + 44202, + 44203, + 44204, + 44205, + 44206, + 44207, + 44208, + 44209, + 44210, + 44211, + 44212, + 44213, + 44214, + 44215, + 44216, + 44217, + 44218, + 44219, + 44220, + 44221, + 44222, + 44223, + 44224, + 44225, + 44226, + 44227, + 44228, + 44229, + 44230, + 44231, + 44232, + 44233, + 44234, + 44235, + 44236, + 44237, + 44238, + 44239, + 44240, + 44241, + 44242, + 44243, + 44244, + 44245, + 44246, + 44247, + 44248, + 44249, + 44250, + 44251, + 44252, + 44253, + 44254, + 44255, + 44256, + 44257, + 44258, + 44259, + 44260, + 44261, + 44262, + 44263, + 44264, + 44265, + 44266, + 44267, + 44268, + 44269, + 44270, + 44271, + 44272, + 44273, + 44274, + 44275, + 44276, + 44277, + 44278, + 44279, + 44280, + 44281, + 44282, + 44283, + 44284, + 44285, + 44286, + 44287, + 44288, + 44289, + 44290, + 44291, + 44292, + 44293, + 44294, + 44295, + 44296, + 44297, + 44298, + 44299, + 44300, + 44301, + 44302, + 44303, + 44304, + 44305, + 44306, + 44307, + 44308, + 44309, + 44310, + 44311, + 44312, + 44313, + 44314, + 44315, + 44316, + 44317, + 44318, + 44319, + 44320, + 44321, + 44322, + 44323, + 44324, + 44325, + 44326, + 44327, + 44328, + 44329, + 44330, + 44331, + 44332, + 44333, + 44334, + 44335, + 44336, + 44337, + 44338, + 44339, + 44340, + 44341, + 44342, + 44343, + 44344, + 44345, + 44346, + 44347, + 44348, + 44349, + 44350, + 44351, + 44352, + 44353, + 44354, + 44355, + 44356, + 44357, + 44358, + 44359, + 44360, + 44361, + 44362, + 44363, + 44364, + 44365, + 44366, + 44367, + 44368, + 44369, + 44370, + 44371, + 44372, + 44373, + 44374, + 44375, + 44376, + 44377, + 44378, + 44379, + 44380, + 44381, + 44382, + 44383, + 44384, + 44385, + 44386, + 44387, + 44388, + 44389, + 44390, + 44391, + 44392, + 44393, + 44394, + 44395, + 44396, + 44397, + 44398, + 44399, + 44400, + 44401, + 44402, + 44403, + 44404, + 44405, + 44406, + 44407, + 44408, + 44409, + 44410, + 44411, + 44412, + 44413, + 44414, + 44415, + 44416, + 44417, + 44418, + 44419, + 44420, + 44421, + 44422, + 44423, + 44424, + 44425, + 44426, + 44427, + 44428, + 44429, + 44430, + 44431, + 44432, + 44433, + 44434, + 44435, + 44436, + 44437, + 44438, + 44439, + 44440, + 44441, + 44442, + 44443, + 44444, + 44445, + 44446, + 44447, + 44448, + 44449, + 44450, + 44451, + 44452, + 44453, + 44454, + 44455, + 44456, + 44457, + 44458, + 44459, + 44460, + 44461, + 44462, + 44463, + 44464, + 44465, + 44466, + 44467, + 44468, + 44469, + 44470, + 44471, + 44472, + 44473, + 44474, + 44475, + 44476, + 44477, + 44478, + 44479, + 44480, + 44481, + 44482, + 44483, + 44484, + 44485, + 44486, + 44487, + 44488, + 44489, + 44490, + 44491, + 44492, + 44493, + 44494, + 44495, + 44496, + 44497, + 44498, + 44499, + 44500, + 44501, + 44502, + 44503, + 44504, + 44505, + 44506, + 44507, + 44508, + 44509, + 44510, + 44511, + 44512, + 44513, + 44514, + 44515, + 44516, + 44517, + 44518, + 44519, + 44520, + 44521, + 44522, + 44523, + 44524, + 44525, + 44526, + 44527, + 44528, + 44529, + 44530, + 44531, + 44532, + 44533, + 44534, + 44535, + 44536, + 44537, + 44538, + 44539, + 44540, + 44541, + 44542, + 44543, + 44544, + 44545, + 44546, + 44547, + 44548, + 44549, + 44550, + 44551, + 44552, + 44553, + 44554, + 44555, + 44556, + 44557, + 44558, + 44559, + 44560, + 44561, + 44562, + 44563, + 44564, + 44565, + 44566, + 44567, + 44568, + 44569, + 44570, + 44571, + 44572, + 44573, + 44574, + 44575, + 44576, + 44577, + 44578, + 44579, + 44580, + 44581, + 44582, + 44583, + 44584, + 44585, + 44586, + 44587, + 44588, + 44589, + 44590, + 44591, + 44592, + 44593, + 44594, + 44595, + 44596, + 44597, + 44598, + 44599, + 44600, + 44601, + 44602, + 44603, + 44604, + 44605, + 44606, + 44607, + 44608, + 44609, + 44610, + 44611, + 44612, + 44613, + 44614, + 44615, + 44616, + 44617, + 44618, + 44619, + 44620, + 44621, + 44622, + 44623, + 44624, + 44625, + 44626, + 44627, + 44628, + 44629, + 44630, + 44631, + 44632, + 44633, + 44634, + 44635, + 44636, + 44637, + 44638, + 44639, + 44640, + 44641, + 44642, + 44643, + 44644, + 44645, + 44646, + 44647, + 44648, + 44649, + 44650, + 44651, + 44652, + 44653, + 44654, + 44655, + 44656, + 44657, + 44658, + 44659, + 44660, + 44661, + 44662, + 44663, + 44664, + 44665, + 44666, + 44667, + 44668, + 44669, + 44670, + 44671, + 44672, + 44673, + 44674, + 44675, + 44676, + 44677, + 44678, + 44679, + 44680, + 44681, + 44682, + 44683, + 44684, + 44685, + 44686, + 44687, + 44688, + 44689, + 44690, + 44691, + 44692, + 44693, + 44694, + 44695, + 44696, + 44697, + 44698, + 44699, + 44700, + 44701, + 44702, + 44703, + 44704, + 44705, + 44706, + 44707, + 44708, + 44709, + 44710, + 44711, + 44712, + 44713, + 44714, + 44715, + 44716, + 44717, + 44718, + 44719, + 44720, + 44721, + 44722, + 44723, + 44724, + 44725, + 44726, + 44727, + 44728, + 44729, + 44730, + 44731, + 44732, + 44733, + 44734, + 44735, + 44736, + 44737, + 44738, + 44739, + 44740, + 44741, + 44742, + 44743, + 44744, + 44745, + 44746, + 44747, + 44748, + 44749, + 44750, + 44751, + 44752, + 44753, + 44754, + 44755, + 44756, + 44757, + 44758, + 44759, + 44760, + 44761, + 44762, + 44763, + 44764, + 44765, + 44766, + 44767, + 44768, + 44769, + 44770, + 44771, + 44772, + 44773, + 44774, + 44775, + 44776, + 44777, + 44778, + 44779, + 44780, + 44781, + 44782, + 44783, + 44784, + 44785, + 44786, + 44787, + 44788, + 44789, + 44790, + 44791, + 44792, + 44793, + 44794, + 44795, + 44796, + 44797, + 44798, + 44799, + 44800, + 44801, + 44802, + 44803, + 44804, + 44805, + 44806, + 44807, + 44808, + 44809, + 44810, + 44811, + 44812, + 44813, + 44814, + 44815, + 44816, + 44817, + 44818, + 44819, + 44820, + 44821, + 44822, + 44823, + 44824, + 44825, + 44826, + 44827, + 44828, + 44829, + 44830, + 44831, + 44832, + 44833, + 44834, + 44835, + 44836, + 44837, + 44838, + 44839, + 44840, + 44841, + 44842, + 44843, + 44844, + 44845, + 44846, + 44847, + 44848, + 44849, + 44850, + 44851, + 44852, + 44853, + 44854, + 44855, + 44856, + 44857, + 44858, + 44859, + 44860, + 44861, + 44862, + 44863, + 44864, + 44865, + 44866, + 44867, + 44868, + 44869, + 44870, + 44871, + 44872, + 44873, + 44874, + 44875, + 44876, + 44877, + 44878, + 44879, + 44880, + 44881, + 44882, + 44883, + 44884, + 44885, + 44886, + 44887, + 44888, + 44889, + 44890, + 44891, + 44892, + 44893, + 44894, + 44895, + 44896, + 44897, + 44898, + 44899, + 44900, + 44901, + 44902, + 44903, + 44904, + 44905, + 44906, + 44907, + 44908, + 44909, + 44910, + 44911, + 44912, + 44913, + 44914, + 44915, + 44916, + 44917, + 44918, + 44919, + 44920, + 44921, + 44922, + 44923, + 44924, + 44925, + 44926, + 44927, + 44928, + 44929, + 44930, + 44931, + 44932, + 44933, + 44934, + 44935, + 44936, + 44937, + 44938, + 44939, + 44940, + 44941, + 44942, + 44943, + 44944, + 44945, + 44946, + 44947, + 44948, + 44949, + 44950, + 44951, + 44952, + 44953, + 44954, + 44955, + 44956, + 44957, + 44958, + 44959, + 44960, + 44961, + 44962, + 44963, + 44964, + 44965, + 44966, + 44967, + 44968, + 44969, + 44970, + 44971, + 44972, + 44973, + 44974, + 44975, + 44976, + 44977, + 44978, + 44979, + 44980, + 44981, + 44982, + 44983, + 44984, + 44985, + 44986, + 44987, + 44988, + 44989, + 44990, + 44991, + 44992, + 44993, + 44994, + 44995, + 44996, + 44997, + 44998, + 44999, + 45000, + 45001, + 45002, + 45003, + 45004, + 45005, + 45006, + 45007, + 45008, + 45009, + 45010, + 45011, + 45012, + 45013, + 45014, + 45015, + 45016, + 45017, + 45018, + 45019, + 45020, + 45021, + 45022, + 45023, + 45024, + 45025, + 45026, + 45027, + 45028, + 45029, + 45030, + 45031, + 45032, + 45033, + 45034, + 45035, + 45036, + 45037, + 45038, + 45039, + 45040, + 45041, + 45042, + 45043, + 45044, + 45045, + 45046, + 45047, + 45048, + 45049, + 45050, + 45051, + 45052, + 45053, + 45054, + 45055, + 45056, + 45057, + 45058, + 45059, + 45060, + 45061, + 45062, + 45063, + 45064, + 45065, + 45066, + 45067, + 45068, + 45069, + 45070, + 45071, + 45072, + 45073, + 45074, + 45075, + 45076, + 45077, + 45078, + 45079, + 45080, + 45081, + 45082, + 45083, + 45084, + 45085, + 45086, + 45087, + 45088, + 45089, + 45090, + 45091, + 45092, + 45093, + 45094, + 45095, + 45096, + 45097, + 45098, + 45099, + 45100, + 45101, + 45102, + 45103, + 45104, + 45105, + 45106, + 45107, + 45108, + 45109, + 45110, + 45111, + 45112, + 45113, + 45114, + 45115, + 45116, + 45117, + 45118, + 45119, + 45120, + 45121, + 45122, + 45123, + 45124, + 45125, + 45126, + 45127, + 45128, + 45129, + 45130, + 45131, + 45132, + 45133, + 45134, + 45135, + 45136, + 45137, + 45138, + 45139, + 45140, + 45141, + 45142, + 45143, + 45144, + 45145, + 45146, + 45147, + 45148, + 45149, + 45150, + 45151, + 45152, + 45153, + 45154, + 45155, + 45156, + 45157, + 45158, + 45159, + 45160, + 45161, + 45162, + 45163, + 45164, + 45165, + 45166, + 45167, + 45168, + 45169, + 45170, + 45171, + 45172, + 45173, + 45174, + 45175, + 45176, + 45177, + 45178, + 45179, + 45180, + 45181, + 45182, + 45183, + 45184, + 45185, + 45186, + 45187, + 45188, + 45189, + 45190, + 45191, + 45192, + 45193, + 45194, + 45195, + 45196, + 45197, + 45198, + 45199, + 45200, + 45201, + 45202, + 45203, + 45204, + 45205, + 45206, + 45207, + 45208, + 45209, + 45210, + 45211, + 45212, + 45213, + 45214, + 45215, + 45216, + 45217, + 45218, + 45219, + 45220, + 45221, + 45222, + 45223, + 45224, + 45225, + 45226, + 45227, + 45228, + 45229, + 45230, + 45231, + 45232, + 45233, + 45234, + 45235, + 45236, + 45237, + 45238, + 45239, + 45240, + 45241, + 45242, + 45243, + 45244, + 45245, + 45246, + 45247, + 45248, + 45249, + 45250, + 45251, + 45252, + 45253, + 45254, + 45255, + 45256, + 45257, + 45258, + 45259, + 45260, + 45261, + 45262, + 45263, + 45264, + 45265, + 45266, + 45267, + 45268, + 45269, + 45270, + 45271, + 45272, + 45273, + 45274, + 45275, + 45276, + 45277, + 45278, + 45279, + 45280, + 45281, + 45282, + 45283, + 45284, + 45285, + 45286, + 45287, + 45288, + 45289, + 45290, + 45291, + 45292, + 45293, + 45294, + 45295, + 45296, + 45297, + 45298, + 45299, + 45300, + 45301, + 45302, + 45303, + 45304, + 45305, + 45306, + 45307, + 45308, + 45309, + 45310, + 45311, + 45312, + 45313, + 45314, + 45315, + 45316, + 45317, + 45318, + 45319, + 45320, + 45321, + 45322, + 45323, + 45324, + 45325, + 45326, + 45327, + 45328, + 45329, + 45330, + 45331, + 45332, + 45333, + 45334, + 45335, + 45336, + 45337, + 45338, + 45339, + 45340, + 45341, + 45342, + 45343, + 45344, + 45345, + 45346, + 45347, + 45348, + 45349, + 45350, + 45351, + 45352, + 45353, + 45354, + 45355, + 45356, + 45357, + 45358, + 45359, + 45360, + 45361, + 45362, + 45363, + 45364, + 45365, + 45366, + 45367, + 45368, + 45369, + 45370, + 45371, + 45372, + 45373, + 45374, + 45375, + 45376, + 45377, + 45378, + 45379, + 45380, + 45381, + 45382, + 45383, + 45384, + 45385, + 45386, + 45387, + 45388, + 45389, + 45390, + 45391, + 45392, + 45393, + 45394, + 45395, + 45396, + 45397, + 45398, + 45399, + 45400, + 45401, + 45402, + 45403, + 45404, + 45405, + 45406, + 45407, + 45408, + 45409, + 45410, + 45411, + 45412, + 45413, + 45414, + 45415, + 45416, + 45417, + 45418, + 45419, + 45420, + 45421, + 45422, + 45423, + 45424, + 45425, + 45426, + 45427, + 45428, + 45429, + 45430, + 45431, + 45432, + 45433, + 45434, + 45435, + 45436, + 45437, + 45438, + 45439, + 45440, + 45441, + 45442, + 45443, + 45444, + 45445, + 45446, + 45447, + 45448, + 45449, + 45450, + 45451, + 45452, + 45453, + 45454, + 45455, + 45456, + 45457, + 45458, + 45459, + 45460, + 45461, + 45462, + 45463, + 45464, + 45465, + 45466, + 45467, + 45468, + 45469, + 45470, + 45471, + 45472, + 45473, + 45474, + 45475, + 45476, + 45477, + 45478, + 45479, + 45480, + 45481, + 45482, + 45483, + 45484, + 45485, + 45486, + 45487, + 45488, + 45489, + 45490, + 45491, + 45492, + 45493, + 45494, + 45495, + 45496, + 45497, + 45498, + 45499, + 45500, + 45501, + 45502, + 45503, + 45504, + 45505, + 45506, + 45507, + 45508, + 45509, + 45510, + 45511, + 45512, + 45513, + 45514, + 45515, + 45516, + 45517, + 45518, + 45519, + 45520, + 45521, + 45522, + 45523, + 45524, + 45525, + 45526, + 45527, + 45528, + 45529, + 45530, + 45531, + 45532, + 45533, + 45534, + 45535, + 45536, + 45537, + 45538, + 45539, + 45540, + 45541, + 45542, + 45543, + 45544, + 45545, + 45546, + 45547, + 45548, + 45549, + 45550, + 45551, + 45552, + 45553, + 45554, + 45555, + 45556, + 45557, + 45558, + 45559, + 45560, + 45561, + 45562, + 45563, + 45564, + 45565, + 45566, + 45567, + 45568, + 45569, + 45570, + 45571, + 45572, + 45573, + 45574, + 45575, + 45576, + 45577, + 45578, + 45579, + 45580, + 45581, + 45582, + 45583, + 45584, + 45585, + 45586, + 45587, + 45588, + 45589, + 45590, + 45591, + 45592, + 45593, + 45594, + 45595, + 45596, + 45597, + 45598, + 45599, + 45600, + 45601, + 45602, + 45603, + 45604, + 45605, + 45606, + 45607, + 45608, + 45609, + 45610, + 45611, + 45612, + 45613, + 45614, + 45615, + 45616, + 45617, + 45618, + 45619, + 45620, + 45621, + 45622, + 45623, + 45624, + 45625, + 45626, + 45627, + 45628, + 45629, + 45630, + 45631, + 45632, + 45633, + 45634, + 45635, + 45636, + 45637, + 45638, + 45639, + 45640, + 45641, + 45642, + 45643, + 45644, + 45645, + 45646, + 45647, + 45648, + 45649, + 45650, + 45651, + 45652, + 45653, + 45654, + 45655, + 45656, + 45657, + 45658, + 45659, + 45660, + 45661, + 45662, + 45663, + 45664, + 45665, + 45666, + 45667, + 45668, + 45669, + 45670, + 45671, + 45672, + 45673, + 45674, + 45675, + 45676, + 45677, + 45678, + 45679, + 45680, + 45681, + 45682, + 45683, + 45684, + 45685, + 45686, + 45687, + 45688, + 45689, + 45690, + 45691, + 45692, + 45693, + 45694, + 45695, + 45696, + 45697, + 45698, + 45699, + 45700, + 45701, + 45702, + 45703, + 45704, + 45705, + 45706, + 45707, + 45708, + 45709, + 45710, + 45711, + 45712, + 45713, + 45714, + 45715, + 45716, + 45717, + 45718, + 45719, + 45720, + 45721, + 45722, + 45723, + 45724, + 45725, + 45726, + 45727, + 45728, + 45729, + 45730, + 45731, + 45732, + 45733, + 45734, + 45735, + 45736, + 45737, + 45738, + 45739, + 45740, + 45741, + 45742, + 45743, + 45744, + 45745, + 45746, + 45747, + 45748, + 45749, + 45750, + 45751, + 45752, + 45753, + 45754, + 45755, + 45756, + 45757, + 45758, + 45759, + 45760, + 45761, + 45762, + 45763, + 45764, + 45765, + 45766, + 45767, + 45768, + 45769, + 45770, + 45771, + 45772, + 45773, + 45774, + 45775, + 45776, + 45777, + 45778, + 45779, + 45780, + 45781, + 45782, + 45783, + 45784, + 45785, + 45786, + 45787, + 45788, + 45789, + 45790, + 45791, + 45792, + 45793, + 45794, + 45795, + 45796, + 45797, + 45798, + 45799, + 45800, + 45801, + 45802, + 45803, + 45804, + 45805, + 45806, + 45807, + 45808, + 45809, + 45810, + 45811, + 45812, + 45813, + 45814, + 45815, + 45816, + 45817, + 45818, + 45819, + 45820, + 45821, + 45822, + 45823, + 45824, + 45825, + 45826, + 45827, + 45828, + 45829, + 45830, + 45831, + 45832, + 45833, + 45834, + 45835, + 45836, + 45837, + 45838, + 45839, + 45840, + 45841, + 45842, + 45843, + 45844, + 45845, + 45846, + 45847, + 45848, + 45849, + 45850, + 45851, + 45852, + 45853, + 45854, + 45855, + 45856, + 45857, + 45858, + 45859, + 45860, + 45861, + 45862, + 45863, + 45864, + 45865, + 45866, + 45867, + 45868, + 45869, + 45870, + 45871, + 45872, + 45873, + 45874, + 45875, + 45876, + 45877, + 45878, + 45879, + 45880, + 45881, + 45882, + 45883, + 45884, + 45885, + 45886, + 45887, + 45888, + 45889, + 45890, + 45891, + 45892, + 45893, + 45894, + 45895, + 45896, + 45897, + 45898, + 45899, + 45900, + 45901, + 45902, + 45903, + 45904, + 45905, + 45906, + 45907, + 45908, + 45909, + 45910, + 45911, + 45912, + 45913, + 45914, + 45915, + 45916, + 45917, + 45918, + 45919, + 45920, + 45921, + 45922, + 45923, + 45924, + 45925, + 45926, + 45927, + 45928, + 45929, + 45930, + 45931, + 45932, + 45933, + 45934, + 45935, + 45936, + 45937, + 45938, + 45939, + 45940, + 45941, + 45942, + 45943, + 45944, + 45945, + 45946, + 45947, + 45948, + 45949, + 45950, + 45951, + 45952, + 45953, + 45954, + 45955, + 45956, + 45957, + 45958, + 45959, + 45960, + 45961, + 45962, + 45963, + 45964, + 45965, + 45966, + 45967, + 45968, + 45969, + 45970, + 45971, + 45972, + 45973, + 45974, + 45975, + 45976, + 45977, + 45978, + 45979, + 45980, + 45981, + 45982, + 45983, + 45984, + 45985, + 45986, + 45987, + 45988, + 45989, + 45990, + 45991, + 45992, + 45993, + 45994, + 45995, + 45996, + 45997, + 45998, + 45999, + 46000, + 46001, + 46002, + 46003, + 46004, + 46005, + 46006, + 46007, + 46008, + 46009, + 46010, + 46011, + 46012, + 46013, + 46014, + 46015, + 46016, + 46017, + 46018, + 46019, + 46020, + 46021, + 46022, + 46023, + 46024, + 46025, + 46026, + 46027, + 46028, + 46029, + 46030, + 46031, + 46032, + 46033, + 46034, + 46035, + 46036, + 46037, + 46038, + 46039, + 46040, + 46041, + 46042, + 46043, + 46044, + 46045, + 46046, + 46047, + 46048, + 46049, + 46050, + 46051, + 46052, + 46053, + 46054, + 46055, + 46056, + 46057, + 46058, + 46059, + 46060, + 46061, + 46062, + 46063, + 46064, + 46065, + 46066, + 46067, + 46068, + 46069, + 46070, + 46071, + 46072, + 46073, + 46074, + 46075, + 46076, + 46077, + 46078, + 46079, + 46080, + 46081, + 46082, + 46083, + 46084, + 46085, + 46086, + 46087, + 46088, + 46089, + 46090, + 46091, + 46092, + 46093, + 46094, + 46095, + 46096, + 46097, + 46098, + 46099, + 46100, + 46101, + 46102, + 46103, + 46104, + 46105, + 46106, + 46107, + 46108, + 46109, + 46110, + 46111, + 46112, + 46113, + 46114, + 46115, + 46116, + 46117, + 46118, + 46119, + 46120, + 46121, + 46122, + 46123, + 46124, + 46125, + 46126, + 46127, + 46128, + 46129, + 46130, + 46131, + 46132, + 46133, + 46134, + 46135, + 46136, + 46137, + 46138, + 46139, + 46140, + 46141, + 46142, + 46143, + 46144, + 46145, + 46146, + 46147, + 46148, + 46149, + 46150, + 46151, + 46152, + 46153, + 46154, + 46155, + 46156, + 46157, + 46158, + 46159, + 46160, + 46161, + 46162, + 46163, + 46164, + 46165, + 46166, + 46167, + 46168, + 46169, + 46170, + 46171, + 46172, + 46173, + 46174, + 46175, + 46176, + 46177, + 46178, + 46179, + 46180, + 46181, + 46182, + 46183, + 46184, + 46185, + 46186, + 46187, + 46188, + 46189, + 46190, + 46191, + 46192, + 46193, + 46194, + 46195, + 46196, + 46197, + 46198, + 46199, + 46200, + 46201, + 46202, + 46203, + 46204, + 46205, + 46206, + 46207, + 46208, + 46209, + 46210, + 46211, + 46212, + 46213, + 46214, + 46215, + 46216, + 46217, + 46218, + 46219, + 46220, + 46221, + 46222, + 46223, + 46224, + 46225, + 46226, + 46227, + 46228, + 46229, + 46230, + 46231, + 46232, + 46233, + 46234, + 46235, + 46236, + 46237, + 46238, + 46239, + 46240, + 46241, + 46242, + 46243, + 46244, + 46245, + 46246, + 46247, + 46248, + 46249, + 46250, + 46251, + 46252, + 46253, + 46254, + 46255, + 46256, + 46257, + 46258, + 46259, + 46260, + 46261, + 46262, + 46263, + 46264, + 46265, + 46266, + 46267, + 46268, + 46269, + 46270, + 46271, + 46272, + 46273, + 46274, + 46275, + 46276, + 46277, + 46278, + 46279, + 46280, + 46281, + 46282, + 46283, + 46284, + 46285, + 46286, + 46287, + 46288, + 46289, + 46290, + 46291, + 46292, + 46293, + 46294, + 46295, + 46296, + 46297, + 46298, + 46299, + 46300, + 46301, + 46302, + 46303, + 46304, + 46305, + 46306, + 46307, + 46308, + 46309, + 46310, + 46311, + 46312, + 46313, + 46314, + 46315, + 46316, + 46317, + 46318, + 46319, + 46320, + 46321, + 46322, + 46323, + 46324, + 46325, + 46326, + 46327, + 46328, + 46329, + 46330, + 46331, + 46332, + 46333, + 46334, + 46335, + 46336, + 46337, + 46338, + 46339, + 46340, + 46341, + 46342, + 46343, + 46344, + 46345, + 46346, + 46347, + 46348, + 46349, + 46350, + 46351, + 46352, + 46353, + 46354, + 46355, + 46356, + 46357, + 46358, + 46359, + 46360, + 46361, + 46362, + 46363, + 46364, + 46365, + 46366, + 46367, + 46368, + 46369, + 46370, + 46371, + 46372, + 46373, + 46374, + 46375, + 46376, + 46377, + 46378, + 46379, + 46380, + 46381, + 46382, + 46383, + 46384, + 46385, + 46386, + 46387, + 46388, + 46389, + 46390, + 46391, + 46392, + 46393, + 46394, + 46395, + 46396, + 46397, + 46398, + 46399, + 46400, + 46401, + 46402, + 46403, + 46404, + 46405, + 46406, + 46407, + 46408, + 46409, + 46410, + 46411, + 46412, + 46413, + 46414, + 46415, + 46416, + 46417, + 46418, + 46419, + 46420, + 46421, + 46422, + 46423, + 46424, + 46425, + 46426, + 46427, + 46428, + 46429, + 46430, + 46431, + 46432, + 46433, + 46434, + 46435, + 46436, + 46437, + 46438, + 46439, + 46440, + 46441, + 46442, + 46443, + 46444, + 46445, + 46446, + 46447, + 46448, + 46449, + 46450, + 46451, + 46452, + 46453, + 46454, + 46455, + 46456, + 46457, + 46458, + 46459, + 46460, + 46461, + 46462, + 46463, + 46464, + 46465, + 46466, + 46467, + 46468, + 46469, + 46470, + 46471, + 46472, + 46473, + 46474, + 46475, + 46476, + 46477, + 46478, + 46479, + 46480, + 46481, + 46482, + 46483, + 46484, + 46485, + 46486, + 46487, + 46488, + 46489, + 46490, + 46491, + 46492, + 46493, + 46494, + 46495, + 46496, + 46497, + 46498, + 46499, + 46500, + 46501, + 46502, + 46503, + 46504, + 46505, + 46506, + 46507, + 46508, + 46509, + 46510, + 46511, + 46512, + 46513, + 46514, + 46515, + 46516, + 46517, + 46518, + 46519, + 46520, + 46521, + 46522, + 46523, + 46524, + 46525, + 46526, + 46527, + 46528, + 46529, + 46530, + 46531, + 46532, + 46533, + 46534, + 46535, + 46536, + 46537, + 46538, + 46539, + 46540, + 46541, + 46542, + 46543, + 46544, + 46545, + 46546, + 46547, + 46548, + 46549, + 46550, + 46551, + 46552, + 46553, + 46554, + 46555, + 46556, + 46557, + 46558, + 46559, + 46560, + 46561, + 46562, + 46563, + 46564, + 46565, + 46566, + 46567, + 46568, + 46569, + 46570, + 46571, + 46572, + 46573, + 46574, + 46575, + 46576, + 46577, + 46578, + 46579, + 46580, + 46581, + 46582, + 46583, + 46584, + 46585, + 46586, + 46587, + 46588, + 46589, + 46590, + 46591, + 46592, + 46593, + 46594, + 46595, + 46596, + 46597, + 46598, + 46599, + 46600, + 46601, + 46602, + 46603, + 46604, + 46605, + 46606, + 46607, + 46608, + 46609, + 46610, + 46611, + 46612, + 46613, + 46614, + 46615, + 46616, + 46617, + 46618, + 46619, + 46620, + 46621, + 46622, + 46623, + 46624, + 46625, + 46626, + 46627, + 46628, + 46629, + 46630, + 46631, + 46632, + 46633, + 46634, + 46635, + 46636, + 46637, + 46638, + 46639, + 46640, + 46641, + 46642, + 46643, + 46644, + 46645, + 46646, + 46647, + 46648, + 46649, + 46650, + 46651, + 46652, + 46653, + 46654, + 46655, + 46656, + 46657, + 46658, + 46659, + 46660, + 46661, + 46662, + 46663, + 46664, + 46665, + 46666, + 46667, + 46668, + 46669, + 46670, + 46671, + 46672, + 46673, + 46674, + 46675, + 46676, + 46677, + 46678, + 46679, + 46680, + 46681, + 46682, + 46683, + 46684, + 46685, + 46686, + 46687, + 46688, + 46689, + 46690, + 46691, + 46692, + 46693, + 46694, + 46695, + 46696, + 46697, + 46698, + 46699, + 46700, + 46701, + 46702, + 46703, + 46704, + 46705, + 46706, + 46707, + 46708, + 46709, + 46710, + 46711, + 46712, + 46713, + 46714, + 46715, + 46716, + 46717, + 46718, + 46719, + 46720, + 46721, + 46722, + 46723, + 46724, + 46725, + 46726, + 46727, + 46728, + 46729, + 46730, + 46731, + 46732, + 46733, + 46734, + 46735, + 46736, + 46737, + 46738, + 46739, + 46740, + 46741, + 46742, + 46743, + 46744, + 46745, + 46746, + 46747, + 46748, + 46749, + 46750, + 46751, + 46752, + 46753, + 46754, + 46755, + 46756, + 46757, + 46758, + 46759, + 46760, + 46761, + 46762, + 46763, + 46764, + 46765, + 46766, + 46767, + 46768, + 46769, + 46770, + 46771, + 46772, + 46773, + 46774, + 46775, + 46776, + 46777, + 46778, + 46779, + 46780, + 46781, + 46782, + 46783, + 46784, + 46785, + 46786, + 46787, + 46788, + 46789, + 46790, + 46791, + 46792, + 46793, + 46794, + 46795, + 46796, + 46797, + 46798, + 46799, + 46800, + 46801, + 46802, + 46803, + 46804, + 46805, + 46806, + 46807, + 46808, + 46809, + 46810, + 46811, + 46812, + 46813, + 46814, + 46815, + 46816, + 46817, + 46818, + 46819, + 46820, + 46821, + 46822, + 46823, + 46824, + 46825, + 46826, + 46827, + 46828, + 46829, + 46830, + 46831, + 46832, + 46833, + 46834, + 46835, + 46836, + 46837, + 46838, + 46839, + 46840, + 46841, + 46842, + 46843, + 46844, + 46845, + 46846, + 46847, + 46848, + 46849, + 46850, + 46851, + 46852, + 46853, + 46854, + 46855, + 46856, + 46857, + 46858, + 46859, + 46860, + 46861, + 46862, + 46863, + 46864, + 46865, + 46866, + 46867, + 46868, + 46869, + 46870, + 46871, + 46872, + 46873, + 46874, + 46875, + 46876, + 46877, + 46878, + 46879, + 46880, + 46881, + 46882, + 46883, + 46884, + 46885, + 46886, + 46887, + 46888, + 46889, + 46890, + 46891, + 46892, + 46893, + 46894, + 46895, + 46896, + 46897, + 46898, + 46899, + 46900, + 46901, + 46902, + 46903, + 46904, + 46905, + 46906, + 46907, + 46908, + 46909, + 46910, + 46911, + 46912, + 46913, + 46914, + 46915, + 46916, + 46917, + 46918, + 46919, + 46920, + 46921, + 46922, + 46923, + 46924, + 46925, + 46926, + 46927, + 46928, + 46929, + 46930, + 46931, + 46932, + 46933, + 46934, + 46935, + 46936, + 46937, + 46938, + 46939, + 46940, + 46941, + 46942, + 46943, + 46944, + 46945, + 46946, + 46947, + 46948, + 46949, + 46950, + 46951, + 46952, + 46953, + 46954, + 46955, + 46956, + 46957, + 46958, + 46959, + 46960, + 46961, + 46962, + 46963, + 46964, + 46965, + 46966, + 46967, + 46968, + 46969, + 46970, + 46971, + 46972, + 46973, + 46974, + 46975, + 46976, + 46977, + 46978, + 46979, + 46980, + 46981, + 46982, + 46983, + 46984, + 46985, + 46986, + 46987, + 46988, + 46989, + 46990, + 46991, + 46992, + 46993, + 46994, + 46995, + 46996, + 46997, + 46998, + 46999, + 47000, + 47001, + 47002, + 47003, + 47004, + 47005, + 47006, + 47007, + 47008, + 47009, + 47010, + 47011, + 47012, + 47013, + 47014, + 47015, + 47016, + 47017, + 47018, + 47019, + 47020, + 47021, + 47022, + 47023, + 47024, + 47025, + 47026, + 47027, + 47028, + 47029, + 47030, + 47031, + 47032, + 47033, + 47034, + 47035, + 47036, + 47037, + 47038, + 47039, + 47040, + 47041, + 47042, + 47043, + 47044, + 47045, + 47046, + 47047, + 47048, + 47049, + 47050, + 47051, + 47052, + 47053, + 47054, + 47055, + 47056, + 47057, + 47058, + 47059, + 47060, + 47061, + 47062, + 47063, + 47064, + 47065, + 47066, + 47067, + 47068, + 47069, + 47070, + 47071, + 47072, + 47073, + 47074, + 47075, + 47076, + 47077, + 47078, + 47079, + 47080, + 47081, + 47082, + 47083, + 47084, + 47085, + 47086, + 47087, + 47088, + 47089, + 47090, + 47091, + 47092, + 47093, + 47094, + 47095, + 47096, + 47097, + 47098, + 47099, + 47100, + 47101, + 47102, + 47103, + 47104, + 47105, + 47106, + 47107, + 47108, + 47109, + 47110, + 47111, + 47112, + 47113, + 47114, + 47115, + 47116, + 47117, + 47118, + 47119, + 47120, + 47121, + 47122, + 47123, + 47124, + 47125, + 47126, + 47127, + 47128, + 47129, + 47130, + 47131, + 47132, + 47133, + 47134, + 47135, + 47136, + 47137, + 47138, + 47139, + 47140, + 47141, + 47142, + 47143, + 47144, + 47145, + 47146, + 47147, + 47148, + 47149, + 47150, + 47151, + 47152, + 47153, + 47154, + 47155, + 47156, + 47157, + 47158, + 47159, + 47160, + 47161, + 47162, + 47163, + 47164, + 47165, + 47166, + 47167, + 47168, + 47169, + 47170, + 47171, + 47172, + 47173, + 47174, + 47175, + 47176, + 47177, + 47178, + 47179, + 47180, + 47181, + 47182, + 47183, + 47184, + 47185, + 47186, + 47187, + 47188, + 47189, + 47190, + 47191, + 47192, + 47193, + 47194, + 47195, + 47196, + 47197, + 47198, + 47199, + 47200, + 47201, + 47202, + 47203, + 47204, + 47205, + 47206, + 47207, + 47208, + 47209, + 47210, + 47211, + 47212, + 47213, + 47214, + 47215, + 47216, + 47217, + 47218, + 47219, + 47220, + 47221, + 47222, + 47223, + 47224, + 47225, + 47226, + 47227, + 47228, + 47229, + 47230, + 47231, + 47232, + 47233, + 47234, + 47235, + 47236, + 47237, + 47238, + 47239, + 47240, + 47241, + 47242, + 47243, + 47244, + 47245, + 47246, + 47247, + 47248, + 47249, + 47250, + 47251, + 47252, + 47253, + 47254, + 47255, + 47256, + 47257, + 47258, + 47259, + 47260, + 47261, + 47262, + 47263, + 47264, + 47265, + 47266, + 47267, + 47268, + 47269, + 47270, + 47271, + 47272, + 47273, + 47274, + 47275, + 47276, + 47277, + 47278, + 47279, + 47280, + 47281, + 47282, + 47283, + 47284, + 47285, + 47286, + 47287, + 47288, + 47289, + 47290, + 47291, + 47292, + 47293, + 47294, + 47295, + 47296, + 47297, + 47298, + 47299, + 47300, + 47301, + 47302, + 47303, + 47304, + 47305, + 47306, + 47307, + 47308, + 47309, + 47310, + 47311, + 47312, + 47313, + 47314, + 47315, + 47316, + 47317, + 47318, + 47319, + 47320, + 47321, + 47322, + 47323, + 47324, + 47325, + 47326, + 47327, + 47328, + 47329, + 47330, + 47331, + 47332, + 47333, + 47334, + 47335, + 47336, + 47337, + 47338, + 47339, + 47340, + 47341, + 47342, + 47343, + 47344, + 47345, + 47346, + 47347, + 47348, + 47349, + 47350, + 47351, + 47352, + 47353, + 47354, + 47355, + 47356, + 47357, + 47358, + 47359, + 47360, + 47361, + 47362, + 47363, + 47364, + 47365, + 47366, + 47367, + 47368, + 47369, + 47370, + 47371, + 47372, + 47373, + 47374, + 47375, + 47376, + 47377, + 47378, + 47379, + 47380, + 47381, + 47382, + 47383, + 47384, + 47385, + 47386, + 47387, + 47388, + 47389, + 47390, + 47391, + 47392, + 47393, + 47394, + 47395, + 47396, + 47397, + 47398, + 47399, + 47400, + 47401, + 47402, + 47403, + 47404, + 47405, + 47406, + 47407, + 47408, + 47409, + 47410, + 47411, + 47412, + 47413, + 47414, + 47415, + 47416, + 47417, + 47418, + 47419, + 47420, + 47421, + 47422, + 47423, + 47424, + 47425, + 47426, + 47427, + 47428, + 47429, + 47430, + 47431, + 47432, + 47433, + 47434, + 47435, + 47436, + 47437, + 47438, + 47439, + 47440, + 47441, + 47442, + 47443, + 47444, + 47445, + 47446, + 47447, + 47448, + 47449, + 47450, + 47451, + 47452, + 47453, + 47454, + 47455, + 47456, + 47457, + 47458, + 47459, + 47460, + 47461, + 47462, + 47463, + 47464, + 47465, + 47466, + 47467, + 47468, + 47469, + 47470, + 47471, + 47472, + 47473, + 47474, + 47475, + 47476, + 47477, + 47478, + 47479, + 47480, + 47481, + 47482, + 47483, + 47484, + 47485, + 47486, + 47487, + 47488, + 47489, + 47490, + 47491, + 47492, + 47493, + 47494, + 47495, + 47496, + 47497, + 47498, + 47499, + 47500, + 47501, + 47502, + 47503, + 47504, + 47505, + 47506, + 47507, + 47508, + 47509, + 47510, + 47511, + 47512, + 47513, + 47514, + 47515, + 47516, + 47517, + 47518, + 47519, + 47520, + 47521, + 47522, + 47523, + 47524, + 47525, + 47526, + 47527, + 47528, + 47529, + 47530, + 47531, + 47532, + 47533, + 47534, + 47535, + 47536, + 47537, + 47538, + 47539, + 47540, + 47541, + 47542, + 47543, + 47544, + 47545, + 47546, + 47547, + 47548, + 47549, + 47550, + 47551, + 47552, + 47553, + 47554, + 47555, + 47556, + 47557, + 47558, + 47559, + 47560, + 47561, + 47562, + 47563, + 47564, + 47565, + 47566, + 47567, + 47568, + 47569, + 47570, + 47571, + 47572, + 47573, + 47574, + 47575, + 47576, + 47577, + 47578, + 47579, + 47580, + 47581, + 47582, + 47583, + 47584, + 47585, + 47586, + 47587, + 47588, + 47589, + 47590, + 47591, + 47592, + 47593, + 47594, + 47595, + 47596, + 47597, + 47598, + 47599, + 47600, + 47601, + 47602, + 47603, + 47604, + 47605, + 47606, + 47607, + 47608, + 47609, + 47610, + 47611, + 47612, + 47613, + 47614, + 47615, + 47616, + 47617, + 47618, + 47619, + 47620, + 47621, + 47622, + 47623, + 47624, + 47625, + 47626, + 47627, + 47628, + 47629, + 47630, + 47631, + 47632, + 47633, + 47634, + 47635, + 47636, + 47637, + 47638, + 47639, + 47640, + 47641, + 47642, + 47643, + 47644, + 47645, + 47646, + 47647, + 47648, + 47649, + 47650, + 47651, + 47652, + 47653, + 47654, + 47655, + 47656, + 47657, + 47658, + 47659, + 47660, + 47661, + 47662, + 47663, + 47664, + 47665, + 47666, + 47667, + 47668, + 47669, + 47670, + 47671, + 47672, + 47673, + 47674, + 47675, + 47676, + 47677, + 47678, + 47679, + 47680, + 47681, + 47682, + 47683, + 47684, + 47685, + 47686, + 47687, + 47688, + 47689, + 47690, + 47691, + 47692, + 47693, + 47694, + 47695, + 47696, + 47697, + 47698, + 47699, + 47700, + 47701, + 47702, + 47703, + 47704, + 47705, + 47706, + 47707, + 47708, + 47709, + 47710, + 47711, + 47712, + 47713, + 47714, + 47715, + 47716, + 47717, + 47718, + 47719, + 47720, + 47721, + 47722, + 47723, + 47724, + 47725, + 47726, + 47727, + 47728, + 47729, + 47730, + 47731, + 47732, + 47733, + 47734, + 47735, + 47736, + 47737, + 47738, + 47739, + 47740, + 47741, + 47742, + 47743, + 47744, + 47745, + 47746, + 47747, + 47748, + 47749, + 47750, + 47751, + 47752, + 47753, + 47754, + 47755, + 47756, + 47757, + 47758, + 47759, + 47760, + 47761, + 47762, + 47763, + 47764, + 47765, + 47766, + 47767, + 47768, + 47769, + 47770, + 47771, + 47772, + 47773, + 47774, + 47775, + 47776, + 47777, + 47778, + 47779, + 47780, + 47781, + 47782, + 47783, + 47784, + 47785, + 47786, + 47787, + 47788, + 47789, + 47790, + 47791, + 47792, + 47793, + 47794, + 47795, + 47796, + 47797, + 47798, + 47799, + 47800, + 47801, + 47802, + 47803, + 47804, + 47805, + 47806, + 47807, + 47808, + 47809, + 47810, + 47811, + 47812, + 47813, + 47814, + 47815, + 47816, + 47817, + 47818, + 47819, + 47820, + 47821, + 47822, + 47823, + 47824, + 47825, + 47826, + 47827, + 47828, + 47829, + 47830, + 47831, + 47832, + 47833, + 47834, + 47835, + 47836, + 47837, + 47838, + 47839, + 47840, + 47841, + 47842, + 47843, + 47844, + 47845, + 47846, + 47847, + 47848, + 47849, + 47850, + 47851, + 47852, + 47853, + 47854, + 47855, + 47856, + 47857, + 47858, + 47859, + 47860, + 47861, + 47862, + 47863, + 47864, + 47865, + 47866, + 47867, + 47868, + 47869, + 47870, + 47871, + 47872, + 47873, + 47874, + 47875, + 47876, + 47877, + 47878, + 47879, + 47880, + 47881, + 47882, + 47883, + 47884, + 47885, + 47886, + 47887, + 47888, + 47889, + 47890, + 47891, + 47892, + 47893, + 47894, + 47895, + 47896, + 47897, + 47898, + 47899, + 47900, + 47901, + 47902, + 47903, + 47904, + 47905, + 47906, + 47907, + 47908, + 47909, + 47910, + 47911, + 47912, + 47913, + 47914, + 47915, + 47916, + 47917, + 47918, + 47919, + 47920, + 47921, + 47922, + 47923, + 47924, + 47925, + 47926, + 47927, + 47928, + 47929, + 47930, + 47931, + 47932, + 47933, + 47934, + 47935, + 47936, + 47937, + 47938, + 47939, + 47940, + 47941, + 47942, + 47943, + 47944, + 47945, + 47946, + 47947, + 47948, + 47949, + 47950, + 47951, + 47952, + 47953, + 47954, + 47955, + 47956, + 47957, + 47958, + 47959, + 47960, + 47961, + 47962, + 47963, + 47964, + 47965, + 47966, + 47967, + 47968, + 47969, + 47970, + 47971, + 47972, + 47973, + 47974, + 47975, + 47976, + 47977, + 47978, + 47979, + 47980, + 47981, + 47982, + 47983, + 47984, + 47985, + 47986, + 47987, + 47988, + 47989, + 47990, + 47991, + 47992, + 47993, + 47994, + 47995, + 47996, + 47997, + 47998, + 47999, + 48000, + 48001, + 48002, + 48003, + 48004, + 48005, + 48006, + 48007, + 48008, + 48009, + 48010, + 48011, + 48012, + 48013, + 48014, + 48015, + 48016, + 48017, + 48018, + 48019, + 48020, + 48021, + 48022, + 48023, + 48024, + 48025, + 48026, + 48027, + 48028, + 48029, + 48030, + 48031, + 48032, + 48033, + 48034, + 48035, + 48036, + 48037, + 48038, + 48039, + 48040, + 48041, + 48042, + 48043, + 48044, + 48045, + 48046, + 48047, + 48048, + 48049, + 48050, + 48051, + 48052, + 48053, + 48054, + 48055, + 48056, + 48057, + 48058, + 48059, + 48060, + 48061, + 48062, + 48063, + 48064, + 48065, + 48066, + 48067, + 48068, + 48069, + 48070, + 48071, + 48072, + 48073, + 48074, + 48075, + 48076, + 48077, + 48078, + 48079, + 48080, + 48081, + 48082, + 48083, + 48084, + 48085, + 48086, + 48087, + 48088, + 48089, + 48090, + 48091, + 48092, + 48093, + 48094, + 48095, + 48096, + 48097, + 48098, + 48099, + 48100, + 48101, + 48102, + 48103, + 48104, + 48105, + 48106, + 48107, + 48108, + 48109, + 48110, + 48111, + 48112, + 48113, + 48114, + 48115, + 48116, + 48117, + 48118, + 48119, + 48120, + 48121, + 48122, + 48123, + 48124, + 48125, + 48126, + 48127, + 48128, + 48129, + 48130, + 48131, + 48132, + 48133, + 48134, + 48135, + 48136, + 48137, + 48138, + 48139, + 48140, + 48141, + 48142, + 48143, + 48144, + 48145, + 48146, + 48147, + 48148, + 48149, + 48150, + 48151, + 48152, + 48153, + 48154, + 48155, + 48156, + 48157, + 48158, + 48159, + 48160, + 48161, + 48162, + 48163, + 48164, + 48165, + 48166, + 48167, + 48168, + 48169, + 48170, + 48171, + 48172, + 48173, + 48174, + 48175, + 48176, + 48177, + 48178, + 48179, + 48180, + 48181, + 48182, + 48183, + 48184, + 48185, + 48186, + 48187, + 48188, + 48189, + 48190, + 48191, + 48192, + 48193, + 48194, + 48195, + 48196, + 48197, + 48198, + 48199, + 48200, + 48201, + 48202, + 48203, + 48204, + 48205, + 48206, + 48207, + 48208, + 48209, + 48210, + 48211, + 48212, + 48213, + 48214, + 48215, + 48216, + 48217, + 48218, + 48219, + 48220, + 48221, + 48222, + 48223, + 48224, + 48225, + 48226, + 48227, + 48228, + 48229, + 48230, + 48231, + 48232, + 48233, + 48234, + 48235, + 48236, + 48237, + 48238, + 48239, + 48240, + 48241, + 48242, + 48243, + 48244, + 48245, + 48246, + 48247, + 48248, + 48249, + 48250, + 48251, + 48252, + 48253, + 48254, + 48255, + 48256, + 48257, + 48258, + 48259, + 48260, + 48261, + 48262, + 48263, + 48264, + 48265, + 48266, + 48267, + 48268, + 48269, + 48270, + 48271, + 48272, + 48273, + 48274, + 48275, + 48276, + 48277, + 48278, + 48279, + 48280, + 48281, + 48282, + 48283, + 48284, + 48285, + 48286, + 48287, + 48288, + 48289, + 48290, + 48291, + 48292, + 48293, + 48294, + 48295, + 48296, + 48297, + 48298, + 48299, + 48300, + 48301, + 48302, + 48303, + 48304, + 48305, + 48306, + 48307, + 48308, + 48309, + 48310, + 48311, + 48312, + 48313, + 48314, + 48315, + 48316, + 48317, + 48318, + 48319, + 48320, + 48321, + 48322, + 48323, + 48324, + 48325, + 48326, + 48327, + 48328, + 48329, + 48330, + 48331, + 48332, + 48333, + 48334, + 48335, + 48336, + 48337, + 48338, + 48339, + 48340, + 48341, + 48342, + 48343, + 48344, + 48345, + 48346, + 48347, + 48348, + 48349, + 48350, + 48351, + 48352, + 48353, + 48354, + 48355, + 48356, + 48357, + 48358, + 48359, + 48360, + 48361, + 48362, + 48363, + 48364, + 48365, + 48366, + 48367, + 48368, + 48369, + 48370, + 48371, + 48372, + 48373, + 48374, + 48375, + 48376, + 48377, + 48378, + 48379, + 48380, + 48381, + 48382, + 48383, + 48384, + 48385, + 48386, + 48387, + 48388, + 48389, + 48390, + 48391, + 48392, + 48393, + 48394, + 48395, + 48396, + 48397, + 48398, + 48399, + 48400, + 48401, + 48402, + 48403, + 48404, + 48405, + 48406, + 48407, + 48408, + 48409, + 48410, + 48411, + 48412, + 48413, + 48414, + 48415, + 48416, + 48417, + 48418, + 48419, + 48420, + 48421, + 48422, + 48423, + 48424, + 48425, + 48426, + 48427, + 48428, + 48429, + 48430, + 48431, + 48432, + 48433, + 48434, + 48435, + 48436, + 48437, + 48438, + 48439, + 48440, + 48441, + 48442, + 48443, + 48444, + 48445, + 48446, + 48447, + 48448, + 48449, + 48450, + 48451, + 48452, + 48453, + 48454, + 48455, + 48456, + 48457, + 48458, + 48459, + 48460, + 48461, + 48462, + 48463, + 48464, + 48465, + 48466, + 48467, + 48468, + 48469, + 48470, + 48471, + 48472, + 48473, + 48474, + 48475, + 48476, + 48477, + 48478, + 48479, + 48480, + 48481, + 48482, + 48483, + 48484, + 48485, + 48486, + 48487, + 48488, + 48489, + 48490, + 48491, + 48492, + 48493, + 48494, + 48495, + 48496, + 48497, + 48498, + 48499, + 48500, + 48501, + 48502, + 48503, + 48504, + 48505, + 48506, + 48507, + 48508, + 48509, + 48510, + 48511, + 48512, + 48513, + 48514, + 48515, + 48516, + 48517, + 48518, + 48519, + 48520, + 48521, + 48522, + 48523, + 48524, + 48525, + 48526, + 48527, + 48528, + 48529, + 48530, + 48531, + 48532, + 48533, + 48534, + 48535, + 48536, + 48537, + 48538, + 48539, + 48540, + 48541, + 48542, + 48543, + 48544, + 48545, + 48546, + 48547, + 48548, + 48549, + 48550, + 48551, + 48552, + 48553, + 48554, + 48555, + 48556, + 48557, + 48558, + 48559, + 48560, + 48561, + 48562, + 48563, + 48564, + 48565, + 48566, + 48567, + 48568, + 48569, + 48570, + 48571, + 48572, + 48573, + 48574, + 48575, + 48576, + 48577, + 48578, + 48579, + 48580, + 48581, + 48582, + 48583, + 48584, + 48585, + 48586, + 48587, + 48588, + 48589, + 48590, + 48591, + 48592, + 48593, + 48594, + 48595, + 48596, + 48597, + 48598, + 48599, + 48600, + 48601, + 48602, + 48603, + 48604, + 48605, + 48606, + 48607, + 48608, + 48609, + 48610, + 48611, + 48612, + 48613, + 48614, + 48615, + 48616, + 48617, + 48618, + 48619, + 48620, + 48621, + 48622, + 48623, + 48624, + 48625, + 48626, + 48627, + 48628, + 48629, + 48630, + 48631, + 48632, + 48633, + 48634, + 48635, + 48636, + 48637, + 48638, + 48639, + 48640, + 48641, + 48642, + 48643, + 48644, + 48645, + 48646, + 48647, + 48648, + 48649, + 48650, + 48651, + 48652, + 48653, + 48654, + 48655, + 48656, + 48657, + 48658, + 48659, + 48660, + 48661, + 48662, + 48663, + 48664, + 48665, + 48666, + 48667, + 48668, + 48669, + 48670, + 48671, + 48672, + 48673, + 48674, + 48675, + 48676, + 48677, + 48678, + 48679, + 48680, + 48681, + 48682, + 48683, + 48684, + 48685, + 48686, + 48687, + 48688, + 48689, + 48690, + 48691, + 48692, + 48693, + 48694, + 48695, + 48696, + 48697, + 48698, + 48699, + 48700, + 48701, + 48702, + 48703, + 48704, + 48705, + 48706, + 48707, + 48708, + 48709, + 48710, + 48711, + 48712, + 48713, + 48714, + 48715, + 48716, + 48717, + 48718, + 48719, + 48720, + 48721, + 48722, + 48723, + 48724, + 48725, + 48726, + 48727, + 48728, + 48729, + 48730, + 48731, + 48732, + 48733, + 48734, + 48735, + 48736, + 48737, + 48738, + 48739, + 48740, + 48741, + 48742, + 48743, + 48744, + 48745, + 48746, + 48747, + 48748, + 48749, + 48750, + 48751, + 48752, + 48753, + 48754, + 48755, + 48756, + 48757, + 48758, + 48759, + 48760, + 48761, + 48762, + 48763, + 48764, + 48765, + 48766, + 48767, + 48768, + 48769, + 48770, + 48771, + 48772, + 48773, + 48774, + 48775, + 48776, + 48777, + 48778, + 48779, + 48780, + 48781, + 48782, + 48783, + 48784, + 48785, + 48786, + 48787, + 48788, + 48789, + 48790, + 48791, + 48792, + 48793, + 48794, + 48795, + 48796, + 48797, + 48798, + 48799, + 48800, + 48801, + 48802, + 48803, + 48804, + 48805, + 48806, + 48807, + 48808, + 48809, + 48810, + 48811, + 48812, + 48813, + 48814, + 48815, + 48816, + 48817, + 48818, + 48819, + 48820, + 48821, + 48822, + 48823, + 48824, + 48825, + 48826, + 48827, + 48828, + 48829, + 48830, + 48831, + 48832, + 48833, + 48834, + 48835, + 48836, + 48837, + 48838, + 48839, + 48840, + 48841, + 48842, + 48843, + 48844, + 48845, + 48846, + 48847, + 48848, + 48849, + 48850, + 48851, + 48852, + 48853, + 48854, + 48855, + 48856, + 48857, + 48858, + 48859, + 48860, + 48861, + 48862, + 48863, + 48864, + 48865, + 48866, + 48867, + 48868, + 48869, + 48870, + 48871, + 48872, + 48873, + 48874, + 48875, + 48876, + 48877, + 48878, + 48879, + 48880, + 48881, + 48882, + 48883, + 48884, + 48885, + 48886, + 48887, + 48888, + 48889, + 48890, + 48891, + 48892, + 48893, + 48894, + 48895, + 48896, + 48897, + 48898, + 48899, + 48900, + 48901, + 48902, + 48903, + 48904, + 48905, + 48906, + 48907, + 48908, + 48909, + 48910, + 48911, + 48912, + 48913, + 48914, + 48915, + 48916, + 48917, + 48918, + 48919, + 48920, + 48921, + 48922, + 48923, + 48924, + 48925, + 48926, + 48927, + 48928, + 48929, + 48930, + 48931, + 48932, + 48933, + 48934, + 48935, + 48936, + 48937, + 48938, + 48939, + 48940, + 48941, + 48942, + 48943, + 48944, + 48945, + 48946, + 48947, + 48948, + 48949, + 48950, + 48951, + 48952, + 48953, + 48954, + 48955, + 48956, + 48957, + 48958, + 48959, + 48960, + 48961, + 48962, + 48963, + 48964, + 48965, + 48966, + 48967, + 48968, + 48969, + 48970, + 48971, + 48972, + 48973, + 48974, + 48975, + 48976, + 48977, + 48978, + 48979, + 48980, + 48981, + 48982, + 48983, + 48984, + 48985, + 48986, + 48987, + 48988, + 48989, + 48990, + 48991, + 48992, + 48993, + 48994, + 48995, + 48996, + 48997, + 48998, + 48999, + 49000, + 49001, + 49002, + 49003, + 49004, + 49005, + 49006, + 49007, + 49008, + 49009, + 49010, + 49011, + 49012, + 49013, + 49014, + 49015, + 49016, + 49017, + 49018, + 49019, + 49020, + 49021, + 49022, + 49023, + 49024, + 49025, + 49026, + 49027, + 49028, + 49029, + 49030, + 49031, + 49032, + 49033, + 49034, + 49035, + 49036, + 49037, + 49038, + 49039, + 49040, + 49041, + 49042, + 49043, + 49044, + 49045, + 49046, + 49047, + 49048, + 49049, + 49050, + 49051, + 49052, + 49053, + 49054, + 49055, + 49056, + 49057, + 49058, + 49059, + 49060, + 49061, + 49062, + 49063, + 49064, + 49065, + 49066, + 49067, + 49068, + 49069, + 49070, + 49071, + 49072, + 49073, + 49074, + 49075, + 49076, + 49077, + 49078, + 49079, + 49080, + 49081, + 49082, + 49083, + 49084, + 49085, + 49086, + 49087, + 49088, + 49089, + 49090, + 49091, + 49092, + 49093, + 49094, + 49095, + 49096, + 49097, + 49098, + 49099, + 49100, + 49101, + 49102, + 49103, + 49104, + 49105, + 49106, + 49107, + 49108, + 49109, + 49110, + 49111, + 49112, + 49113, + 49114, + 49115, + 49116, + 49117, + 49118, + 49119, + 49120, + 49121, + 49122, + 49123, + 49124, + 49125, + 49126, + 49127, + 49128, + 49129, + 49130, + 49131, + 49132, + 49133, + 49134, + 49135, + 49136, + 49137, + 49138, + 49139, + 49140, + 49141, + 49142, + 49143, + 49144, + 49145, + 49146, + 49147, + 49148, + 49149, + 49150, + 49151, + 49152, + 49153, + 49154, + 49155, + 49156, + 49157, + 49158, + 49159, + 49160, + 49161, + 49162, + 49163, + 49164, + 49165, + 49166, + 49167, + 49168, + 49169, + 49170, + 49171, + 49172, + 49173, + 49174, + 49175, + 49176, + 49177, + 49178, + 49179, + 49180, + 49181, + 49182, + 49183, + 49184, + 49185, + 49186, + 49187, + 49188, + 49189, + 49190, + 49191, + 49192, + 49193, + 49194, + 49195, + 49196, + 49197, + 49198, + 49199, + 49200, + 49201, + 49202, + 49203, + 49204, + 49205, + 49206, + 49207, + 49208, + 49209, + 49210, + 49211, + 49212, + 49213, + 49214, + 49215, + 49216, + 49217, + 49218, + 49219, + 49220, + 49221, + 49222, + 49223, + 49224, + 49225, + 49226, + 49227, + 49228, + 49229, + 49230, + 49231, + 49232, + 49233, + 49234, + 49235, + 49236, + 49237, + 49238, + 49239, + 49240, + 49241, + 49242, + 49243, + 49244, + 49245, + 49246, + 49247, + 49248, + 49249, + 49250, + 49251, + 49252, + 49253, + 49254, + 49255, + 49256, + 49257, + 49258, + 49259, + 49260, + 49261, + 49262, + 49263, + 49264, + 49265, + 49266, + 49267, + 49268, + 49269, + 49270, + 49271, + 49272, + 49273, + 49274, + 49275, + 49276, + 49277, + 49278, + 49279, + 49280, + 49281, + 49282, + 49283, + 49284, + 49285, + 49286, + 49287, + 49288, + 49289, + 49290, + 49291, + 49292, + 49293, + 49294, + 49295, + 49296, + 49297, + 49298, + 49299, + 49300, + 49301, + 49302, + 49303, + 49304, + 49305, + 49306, + 49307, + 49308, + 49309, + 49310, + 49311, + 49312, + 49313, + 49314, + 49315, + 49316, + 49317, + 49318, + 49319, + 49320, + 49321, + 49322, + 49323, + 49324, + 49325, + 49326, + 49327, + 49328, + 49329, + 49330, + 49331, + 49332, + 49333, + 49334, + 49335, + 49336, + 49337, + 49338, + 49339, + 49340, + 49341, + 49342, + 49343, + 49344, + 49345, + 49346, + 49347, + 49348, + 49349, + 49350, + 49351, + 49352, + 49353, + 49354, + 49355, + 49356, + 49357, + 49358, + 49359, + 49360, + 49361, + 49362, + 49363, + 49364, + 49365, + 49366, + 49367, + 49368, + 49369, + 49370, + 49371, + 49372, + 49373, + 49374, + 49375, + 49376, + 49377, + 49378, + 49379, + 49380, + 49381, + 49382, + 49383, + 49384, + 49385, + 49386, + 49387, + 49388, + 49389, + 49390, + 49391, + 49392, + 49393, + 49394, + 49395, + 49396, + 49397, + 49398, + 49399, + 49400, + 49401, + 49402, + 49403, + 49404, + 49405, + 49406, + 49407, + 49408, + 49409, + 49410, + 49411, + 49412, + 49413, + 49414, + 49415, + 49416, + 49417, + 49418, + 49419, + 49420, + 49421, + 49422, + 49423, + 49424, + 49425, + 49426, + 49427, + 49428, + 49429, + 49430, + 49431, + 49432, + 49433, + 49434, + 49435, + 49436, + 49437, + 49438, + 49439, + 49440, + 49441, + 49442, + 49443, + 49444, + 49445, + 49446, + 49447, + 49448, + 49449, + 49450, + 49451, + 49452, + 49453, + 49454, + 49455, + 49456, + 49457, + 49458, + 49459, + 49460, + 49461, + 49462, + 49463, + 49464, + 49465, + 49466, + 49467, + 49468, + 49469, + 49470, + 49471, + 49472, + 49473, + 49474, + 49475, + 49476, + 49477, + 49478, + 49479, + 49480, + 49481, + 49482, + 49483, + 49484, + 49485, + 49486, + 49487, + 49488, + 49489, + 49490, + 49491, + 49492, + 49493, + 49494, + 49495, + 49496, + 49497, + 49498, + 49499, + 49500, + 49501, + 49502, + 49503, + 49504, + 49505, + 49506, + 49507, + 49508, + 49509, + 49510, + 49511, + 49512, + 49513, + 49514, + 49515, + 49516, + 49517, + 49518, + 49519, + 49520, + 49521, + 49522, + 49523, + 49524, + 49525, + 49526, + 49527, + 49528, + 49529, + 49530, + 49531, + 49532, + 49533, + 49534, + 49535, + 49536, + 49537, + 49538, + 49539, + 49540, + 49541, + 49542, + 49543, + 49544, + 49545, + 49546, + 49547, + 49548, + 49549, + 49550, + 49551, + 49552, + 49553, + 49554, + 49555, + 49556, + 49557, + 49558, + 49559, + 49560, + 49561, + 49562, + 49563, + 49564, + 49565, + 49566, + 49567, + 49568, + 49569, + 49570, + 49571, + 49572, + 49573, + 49574, + 49575, + 49576, + 49577, + 49578, + 49579, + 49580, + 49581, + 49582, + 49583, + 49584, + 49585, + 49586, + 49587, + 49588, + 49589, + 49590, + 49591, + 49592, + 49593, + 49594, + 49595, + 49596, + 49597, + 49598, + 49599, + 49600, + 49601, + 49602, + 49603, + 49604, + 49605, + 49606, + 49607, + 49608, + 49609, + 49610, + 49611, + 49612, + 49613, + 49614, + 49615, + 49616, + 49617, + 49618, + 49619, + 49620, + 49621, + 49622, + 49623, + 49624, + 49625, + 49626, + 49627, + 49628, + 49629, + 49630, + 49631, + 49632, + 49633, + 49634, + 49635, + 49636, + 49637, + 49638, + 49639, + 49640, + 49641, + 49642, + 49643, + 49644, + 49645, + 49646, + 49647, + 49648, + 49649, + 49650, + 49651, + 49652, + 49653, + 49654, + 49655, + 49656, + 49657, + 49658, + 49659, + 49660, + 49661, + 49662, + 49663, + 49664, + 49665, + 49666, + 49667, + 49668, + 49669, + 49670, + 49671, + 49672, + 49673, + 49674, + 49675, + 49676, + 49677, + 49678, + 49679, + 49680, + 49681, + 49682, + 49683, + 49684, + 49685, + 49686, + 49687, + 49688, + 49689, + 49690, + 49691, + 49692, + 49693, + 49694, + 49695, + 49696, + 49697, + 49698, + 49699, + 49700, + 49701, + 49702, + 49703, + 49704, + 49705, + 49706, + 49707, + 49708, + 49709, + 49710, + 49711, + 49712, + 49713, + 49714, + 49715, + 49716, + 49717, + 49718, + 49719, + 49720, + 49721, + 49722, + 49723, + 49724, + 49725, + 49726, + 49727, + 49728, + 49729, + 49730, + 49731, + 49732, + 49733, + 49734, + 49735, + 49736, + 49737, + 49738, + 49739, + 49740, + 49741, + 49742, + 49743, + 49744, + 49745, + 49746, + 49747, + 49748, + 49749, + 49750, + 49751, + 49752, + 49753, + 49754, + 49755, + 49756, + 49757, + 49758, + 49759, + 49760, + 49761, + 49762, + 49763, + 49764, + 49765, + 49766, + 49767, + 49768, + 49769, + 49770, + 49771, + 49772, + 49773, + 49774, + 49775, + 49776, + 49777, + 49778, + 49779, + 49780, + 49781, + 49782, + 49783, + 49784, + 49785, + 49786, + 49787, + 49788, + 49789, + 49790, + 49791, + 49792, + 49793, + 49794, + 49795, + 49796, + 49797, + 49798, + 49799, + 49800, + 49801, + 49802, + 49803, + 49804, + 49805, + 49806, + 49807, + 49808, + 49809, + 49810, + 49811, + 49812, + 49813, + 49814, + 49815, + 49816, + 49817, + 49818, + 49819, + 49820, + 49821, + 49822, + 49823, + 49824, + 49825, + 49826, + 49827, + 49828, + 49829, + 49830, + 49831, + 49832, + 49833, + 49834, + 49835, + 49836, + 49837, + 49838, + 49839, + 49840, + 49841, + 49842, + 49843, + 49844, + 49845, + 49846, + 49847, + 49848, + 49849, + 49850, + 49851, + 49852, + 49853, + 49854, + 49855, + 49856, + 49857, + 49858, + 49859, + 49860, + 49861, + 49862, + 49863, + 49864, + 49865, + 49866, + 49867, + 49868, + 49869, + 49870, + 49871, + 49872, + 49873, + 49874, + 49875, + 49876, + 49877, + 49878, + 49879, + 49880, + 49881, + 49882, + 49883, + 49884, + 49885, + 49886, + 49887, + 49888, + 49889, + 49890, + 49891, + 49892, + 49893, + 49894, + 49895, + 49896, + 49897, + 49898, + 49899, + 49900, + 49901, + 49902, + 49903, + 49904, + 49905, + 49906, + 49907, + 49908, + 49909, + 49910, + 49911, + 49912, + 49913, + 49914, + 49915, + 49916, + 49917, + 49918, + 49919, + 49920, + 49921, + 49922, + 49923, + 49924, + 49925, + 49926, + 49927, + 49928, + 49929, + 49930, + 49931, + 49932, + 49933, + 49934, + 49935, + 49936, + 49937, + 49938, + 49939, + 49940, + 49941, + 49942, + 49943, + 49944, + 49945, + 49946, + 49947, + 49948, + 49949, + 49950, + 49951, + 49952, + 49953, + 49954, + 49955, + 49956, + 49957, + 49958, + 49959, + 49960, + 49961, + 49962, + 49963, + 49964, + 49965, + 49966, + 49967, + 49968, + 49969, + 49970, + 49971, + 49972, + 49973, + 49974, + 49975, + 49976, + 49977, + 49978, + 49979, + 49980, + 49981, + 49982, + 49983, + 49984, + 49985, + 49986, + 49987, + 49988, + 49989, + 49990, + 49991, + 49992, + 49993, + 49994, + 49995, + 49996, + 49997, + 49998, + 49999 + ], + "test": [], + "validation": [] +} \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/dataset/info.json b/ResNet-CIFAR10/Classification-normal/dataset/info.json new file mode 100644 index 0000000000000000000000000000000000000000..e9906227f9926f9b42d8b2582e463f5e559b6229 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/dataset/info.json @@ -0,0 +1,4 @@ +{ + "model": "ResNet18", + "classes":["airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck"] +} \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/dataset/labels.npy b/ResNet-CIFAR10/Classification-normal/dataset/labels.npy new file mode 100644 index 0000000000000000000000000000000000000000..1bf2ef14b0937183818f5b93d26e18dcf376dd06 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/dataset/labels.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9dfee6f275bac0f14e63de8d1091cd1f4487a16d30c6d8726f61d1b8f999c745 +size 400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a299cb393db9ac2a4bc06edddb7b7ddbb5966215 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f221ffa7106ff2c59dab5967f958e1e4647fde14de5d3832225bea21d8bda49 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2f0a725cf2b24ac95f3cf833bb9291a7889537fc --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9321c5e70de5cec93d53d15e84eb6364a307101965c76cf03752a722c15a2807 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..c292d8edc839be632c797c0f969cad2f28187e22 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_1/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:444ff79612d3d5fc55402f24b112dc09ffacda378ad6be4c7d76ddbab44931e1 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..f2ab3bb5b0bd5bdb7f97043c5a2177bc3e754413 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84fcddd08ad9ed3266577f4d4d34769c6f4fbd3df9fa10005a50a9df9742b980 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8398ec47797820c09de18f8be97010fc93cd21d3 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa35ca822a2dd71485b70eeaad1204145772b0353953585fe1e64e79e380d588 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..40043323e6d2bfafd741547363bb8fb7d27599aa --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_10/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74992b9342087d18edf18786d99bad0047c291c104e539c11ba62e9053b9b7b4 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..be6003703998e70c95332023b4e827d398450fd3 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:535ea522718c672ff6d6b8054bb851bd08b75c8853acb3d2f11eadb788e5091c +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..61283b1592d0f8ac654c768f1f0cdc646f5921be --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e439ce52c508ade5a4c88d1bda9c6f06a31900fb173d9d8e8f13e111fa387422 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..7d5aad1bb6b832f56c69a1d4c420c83caadf6e62 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_12/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:068582f86bf959a4a48cf5f50a476fd482f09bbba37ee2b541133ce99c09b01e +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2a240a09fdbdec53df5122e7e19339a4b8c532bb --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55c3cbec604afbdc644c7421c94c8e7ebfcf405f50ef748e1a61151ef8c2a65f +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5a653fa022f0e1ec038b422f03a95585478dac53 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3a266953e3335a1c21b598721e30f77764685977bdd54c4300075eb076efd71 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..3e93b8c113ad0b6dac47324eb9d0326f67358612 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_14/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3ad821b805954b3d7261c960671475d4a817e181c1dde8619ec254e58c311ab +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..7ac686f5357d977bbf96280ed079f0afd036c99f --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8e98de6fcf55cc5648c58af1969a0289e5bb42e7af2f4342389e8fa35b77792 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ac08304d2e7af5b82db2efa3e685da9085547f12 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:feac3b53d2c13eea52f824665a7dd0403c1abfb15af2f816f397de23a074c57a +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..687120d5f3da5bf5f427d32ddfab9a099c165b5d --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_16/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b03854447f8d2c47fb35c53033909e88028d37acf37264359c3c2b69d037ca53 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a24d656b1069aa2383bd5055bc8ca464ebfda59b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0d1af02da0a12114a9f5dfb19a5c59f6232733cdcc2b7f4569f3b754c5397fd +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..3856d18fffb6f3104be516afd4e92876d5cca239 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8136ca906dbe2132526ec283bc7e1acaf28a6acf1ebaea0aea27731b9cf99d7 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..186f1187476dcaf821f30904fc16c6c228738963 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_18/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:155e93eaa195de6f3a2a9379a47106384dc877b397bdd030daa31a21961f081e +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..2e38e8e21f854f8c305130228da9ac01f46c934b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5823e66f8fab8b69965bed0c03c38eb6f9dc94ca5dc2d8ac768066f30e7b6a71 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..9aeefa38c7c4b2bde32fb4ffc045c86905a71ba2 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5db1080007301432367189977b37205548dba9e08a3758f9f54b5cc167341cd4 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..cf8a990a7de5bbf4691e96ffcf37f7504fe2bb8f --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_2/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30cc9e8ecdca9f69dc6a1db00ff740a793304c833cb10b59d81f45d0cf1bb627 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..8538e4a51ef82f4a970bd70ce583a76bd1c6dd1b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e58ab4702b783a5bd194188e7e022f1cec82d02f9ef533d753101d5e88a8c29c +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..b738fc074c3229ef4b3bde04dadffa6e9a55cac8 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52b31bb1c250df07a4ff8b1bd0e198820c6ea680fe4f8a16de39f2fdd858b81f +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..0fe29d1237d57a5d5a6c902271fc1d09cbcf1dd5 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_20/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c5f1d86e4ebc7473bbf6081fb3c60389aaca9fe316714804c7b722bf2516e35 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..91814331ffbf80e58da2c39215539a840bab063b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bebfbbc89406ba08d7738381ab8c2ecfcab2afb5f848f0235e56ac7c54e9dc8c +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4f99fc20b3fb3b00044a0f983cbb7095e1fdc1f2 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ec48cc355581dc854b69e37c6a7da0983fbb095c750e006dce66fdd3d8d4291 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..e930cd56bf9a7ed15b4a3614f8a76e46db2d9668 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_22/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4231e68449e4f38c020317cb87006b4714a12a3aa908ac2c1905964a3ceebbf1 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..0665552a7010c309c5ca3395c1c1e46230ee7238 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82092ca41bef0919df8f6a08dd20997bf916c1088a51641695824ccde17a950c +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d0b8ee15b8fb9dc8dee2a90b780f5e0b12456fce --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc35eb54ebaa15cf0050ce5ff120bcf4829740048e363b7e6af2ab19e6c62380 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..ec7e5d542e5197406a6737afa950af9fc3b303d9 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_24/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e38a6ce032e87e6352936ae1f25ecfa5f2dd53c0df41a48077b515fdc205eb63 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..a8dce1d15b12f9ea88223dc5f3e7c33cb4fe0519 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6499b1dfc83728975694c7c2a5a0421d2cc5ed4e905352d8b779e3cd792bcfc +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ed7b4f80159610943570516a95d432f7f7728165 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cee0de6d2868a43ad94289910873f381e36ee1bd340bea0971c81ccc8a77c135 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..21638e47ceea934115c95f580e85937f1fe566fa --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_26/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:54edcf9c36b504b0219d4cb16868699228b47982890fa51c1cd7724691a6683d +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..9e48fd74638b57f1cae7df70f7e8da0436e1d81b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69b5d92bc134cef78137e60d503f04c23697fecee19533eb15318cbcadee6979 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..908d42aefaea0bfae0d61cb7f65d18a6c95291dc --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c38124d80f6bdf5d20835e19d3fa2d5ec8b7b4a6cad14a8be3f90d5ebcda81a +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..68b009cbbc4eca2dd16b1ffc5c185f5b099a9a67 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_28/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abb11208b91148c5549afcd1002acf2bab7d9ec4e05e7f1cdd4f4a5691c9de5e +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c5bd2fe594c2ed8479e79ef8e282601c4d8388f2 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc83c27fef7f7a42f8fcc72e1b1ac285a5a28eb9b409862f0c74251fa2223c47 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ff3f25d854228231c7a1def8378215e2a416ee3e --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d71704901229f06b52a727381ad8df57b9a7f81d7e30637bc2c2be2525769c7 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..f7f228aad3f42dc718f7dbeed6bdc0b604d0e7c3 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_30/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a8ed77dca96cc0725000de699b8c6c5bc51421b2c4cd6b56797782c086b6de8 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e1dd691b51c2ed9e2479581a209242103084126f --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:268788537df9b13fd4de53701212329a4339e93e288fe9adeccf737133b50059 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..304f17ae65ef3464a879d7764a99c28c5cb83d3a --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8a1a609a7e99c997cad36c05f9eecfb8ae38ca1c43156d56592d752ad9a298e +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..6ba3625e26ff7f0ddc32e1a11572dbcfab33e122 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_32/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec98fd1832aebf9da8a0763162e634e7c2c391bd2d0fde697a1cd63168014a06 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..fa80ca5d63fc8ef08026dfc3d41c1457b29aaff0 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9928dee95ce67726e1dbdf85723b70984f70a64cefc1702f83d12ed77ab12a49 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c123305161b1938dbf5d3ab7e12e32b4e3fe1cbc --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:210ee2b5f72c88373fce78ef7110187489f2a9fc98886b8589e2d512a53f5392 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..d8d6a30f7ce857144d88ec8509e4b323a4c15694 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_34/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a8d52fe9411825e78a48d1006462a1c54c3efbf07abdd54c49dcb0ffcc68406 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..371d7eb9f3c36d881a76e838cedfd9c327680316 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ece85f72ea15a58bc4add15607eaef3f27c5a474354d5e287f46be31ab0613c8 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..5f62e09f4484d05e0f7d25ef3e6d7c76a24224ce --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79f4484aa7241149fded6412c659dddb20ffbd66345c24529a2f38af27348084 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..e572c2bc15c0864a048ed44c0fce1be4db1b5cf8 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_36/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:465435a87009122adcf9d0233ad1f02812bb7af334535358f13c5ab221f50d5f +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..581c0ea8e6a6cf5f27bef5004660427eba8f1227 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1103b5600152d02d7bed6a9b33037cc5e5aeadd432fd40b608a46d39140e22ed +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..19184a677cf77dd0993b91996d6599d7af292a41 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcfb02edda9f11c073992beb1454b92e07d0395dbc2cdb44f569a6b9e6d557fd +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..ee49d95614d296feada0765bf2a78f71f9846a25 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_38/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:708e3ba0ed2635ddcd56fa649d7c847b4688653073ee611c7b781f981426a158 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..c9db16156e94e79266c6a1424a8343124a1364db --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a735199dd94c8acdd028543b148ac9a0683dccfcbc987f7962aa43bde147b9c +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ac46ac2f75fcc83bc733891a6447fe28b9f4036e --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be87ccb483883ab9f9bbb438278f8a16d3e126954e47bcf892f6a8669d09ae31 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..e5ee0941c168ce0b773da3c0e62dc4b0c7cbe58f --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_4/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ca6db45f509253fbff43b7113ed3d707e29b2cacc609cffb19b9413e2c4038a +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..64edeac84989eaa6cf294ce652692878cb6adaf4 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bbab19c28864a5b835429fa4ab711f5508dc5c014f23642e3ad9bf41c74e2fe +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7a75d0c41c61389aadf0c8e0f6edff44b8964e8c --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8745a00372e62d341cf9a614db0d0dd338aee0eeec2894df4610e2f714d09c14 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..cd930cf6a6578d9a84f8539a3935670a4f5ac007 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_40/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67edcebf7e58658f37ca1d02d83cfd2c17d91731a0c682aa59aa79b79a05d870 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..e2360a7a160a3ab16cc8569c279a7235f541aca7 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d71c5655668ba58f70150363076ab76e9075b588d9d3f16577d81500c038ff0 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..642a6cff3503483e42b25c372a38e762a5cd48c0 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d76850b6195fa4666180faccb502295716e05143a9becb10e39443c17f86065b +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..8d631342b5bf1aac361b68a2148a913aeb7c626d --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_42/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2aea956bf3d973e9a95cbf723670175e3ff6cfe26ce7e79b1f0a1b6031a56929 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..47648e1979a12a9032571284f6f02d164354122a --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a41542176497e35919b1348cb483591e632d021f4f9e3755716403c5b93d647 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a0089ab02b03262a8f0bbdf8962f02b69c80af9b --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d55ff1ef867f4a2417c90a5b3d06f4107acaebcf7adf42ae6400b7efe32a9d99 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..cade7de4553543b8d533f53ad8f2ca1c1d835938 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_44/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52668a79041d8d5b5922327a167dcd52030ffa6693918318cae5f9ad2e2f6479 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..b627905cc0de2f201fb45b3ff9ae37f8d8bfb060 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52d177bacda049f1479c2d4f210be8ce3bb8167b550503886df305979fa26265 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2c120685be0d0c55f8e7acea80930c34982fa808 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b936c64ff8b4e759c682c3e337897752c10ffc3ca5a73806999712591db52d2 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..1d22f3d497c0ceeb2727a84ba7892480e1bc00f1 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_46/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:180d99acd735d39325daf1a04ee7a2af16ad412bb9154c7a55ff743962262bc4 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..4ddc7dfe23da3bde9647ac8f3475b13ab6922e08 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:120ce4de7c781fef67b2ca73209071eba1a27ca2ed44b5041abc2f40bacc858a +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..d4f6687c809e263274685e1767b969a8297914fd --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7eb3a97dbf144f7ccda7b61dec986493ba4ec336c780c0dbcc891be3989c2d9e +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..2a365fed69ca2ceb27963831706cc2931db5734a --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_48/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73f83408a45494fbc5c8f1492ff1434b5fa424671b3e53c72c2a6a9303a2a750 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..36dfa251459d3ec749dc6eef77c010652dddd7d0 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:00bd503c2b15b4a24327b5ea56594bdfad525027fd4a6bfe192cc04987adabd3 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..8d5e8b2115df7adb6c980e04edf36c741e0ea082 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c68b4b71f1d4827495a2c8a0cabaeaed9633e0b9950e12181551ec7e70b04b2e +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..d856a92e0a6ee329a69e092af1de6a71399163e6 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_50/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:229030526d558d9ec69844d1c11e1a6bdd10f40dc7194230c1fe4781cc23615e +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..697a84b6a80df1c24b32fe3e7b5f7582a6d6159c --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c77c6e74999b1f2ba850aa16d137cd8d6c36213cd83b2af00f13f4655dcd78c9 +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..12c811e4415b7a9e3c3e29cec32b451b115a98a6 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f222efaea18c7c07a948a652864fb2f1461bd4c94721f8c8d21c50d56ef76805 +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..fb791d5f5f66643506090694adb2b2aeb49d2fa5 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_6/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd3c04ca3b47c18ec9154f8475c624c85aeeed058b6332e8ca73f3b6862ebd7e +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/embeddings.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/embeddings.npy new file mode 100644 index 0000000000000000000000000000000000000000..623848fdab4428a7cc358e0e769d9e2a2fe454ee --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/embeddings.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dd10804e6dbba1970b8d24346b9990ab8e9507d3c398ed22e684681c8a64b8bd +size 102400128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/model.pth b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c483eb6e7c54f2784c1977db4848b18bff7d7eef --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6830bceef55ec5964a0259616864ac05ecbbef917287ed571a27314f4d903d1c +size 44769410 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/predictions.npy b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/predictions.npy new file mode 100644 index 0000000000000000000000000000000000000000..ccfcd840d546cf0143405403f077961e4f2283b3 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/epoch_8/predictions.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8e6c40ebbaef61009897c3bee2bb6a24c096ec0698125eb021977a5cba418e3 +size 2000128 diff --git a/ResNet-CIFAR10/Classification-normal/epochs/layer_info.json b/ResNet-CIFAR10/Classification-normal/epochs/layer_info.json new file mode 100644 index 0000000000000000000000000000000000000000..831ec605ce2b201b006a9b80451f476a004570b1 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/layer_info.json @@ -0,0 +1 @@ +{"layer_id": "avg_pool", "dim": 512} \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/epochs/train.log b/ResNet-CIFAR10/Classification-normal/epochs/train.log new file mode 100644 index 0000000000000000000000000000000000000000..b1a7c2345932c933921989986f72aa880e752212 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/epochs/train.log @@ -0,0 +1,53 @@ +2025-04-19 15:56:27,733 - train - INFO - 开始训练 ResNet18 +2025-04-19 15:56:27,734 - train - INFO - 总轮数: 50, 学习率: 0.1, 设备: cuda:0 +2025-04-19 15:56:38,756 - train - INFO - Epoch: 1 | Train Loss: 1.926 | Train Acc: 29.84% | Test Loss: 1.537 | Test Acc: 42.91% +2025-04-19 15:56:56,918 - train - INFO - Epoch: 2 | Train Loss: 1.391 | Train Acc: 49.07% | Test Loss: 1.202 | Test Acc: 56.37% +2025-04-19 15:57:15,338 - train - INFO - Epoch: 3 | Train Loss: 1.068 | Train Acc: 61.45% | Test Loss: 1.117 | Test Acc: 61.41% +2025-04-19 15:57:25,898 - train - INFO - Epoch: 4 | Train Loss: 0.828 | Train Acc: 70.92% | Test Loss: 0.827 | Test Acc: 71.07% +2025-04-19 15:57:44,323 - train - INFO - Epoch: 5 | Train Loss: 0.686 | Train Acc: 76.14% | Test Loss: 0.706 | Test Acc: 75.83% +2025-04-19 15:57:54,794 - train - INFO - Epoch: 6 | Train Loss: 0.605 | Train Acc: 79.05% | Test Loss: 0.666 | Test Acc: 78.02% +2025-04-19 15:58:13,400 - train - INFO - Epoch: 7 | Train Loss: 0.557 | Train Acc: 80.82% | Test Loss: 0.758 | Test Acc: 74.84% +2025-04-19 15:58:24,211 - train - INFO - Epoch: 8 | Train Loss: 0.523 | Train Acc: 82.10% | Test Loss: 0.561 | Test Acc: 81.16% +2025-04-19 15:58:42,583 - train - INFO - Epoch: 9 | Train Loss: 0.494 | Train Acc: 83.25% | Test Loss: 0.677 | Test Acc: 77.88% +2025-04-19 15:58:53,155 - train - INFO - Epoch: 10 | Train Loss: 0.468 | Train Acc: 83.98% | Test Loss: 0.674 | Test Acc: 78.17% +2025-04-19 15:59:11,517 - train - INFO - Epoch: 11 | Train Loss: 0.455 | Train Acc: 84.35% | Test Loss: 0.537 | Test Acc: 81.69% +2025-04-19 15:59:21,661 - train - INFO - Epoch: 12 | Train Loss: 0.431 | Train Acc: 85.27% | Test Loss: 0.696 | Test Acc: 77.96% +2025-04-19 15:59:39,672 - train - INFO - Epoch: 13 | Train Loss: 0.414 | Train Acc: 85.80% | Test Loss: 0.466 | Test Acc: 84.24% +2025-04-19 15:59:50,271 - train - INFO - Epoch: 14 | Train Loss: 0.402 | Train Acc: 86.21% | Test Loss: 0.526 | Test Acc: 82.67% +2025-04-19 16:00:08,505 - train - INFO - Epoch: 15 | Train Loss: 0.387 | Train Acc: 86.76% | Test Loss: 0.494 | Test Acc: 83.42% +2025-04-19 16:00:19,057 - train - INFO - Epoch: 16 | Train Loss: 0.375 | Train Acc: 87.13% | Test Loss: 0.483 | Test Acc: 83.80% +2025-04-19 16:00:37,688 - train - INFO - Epoch: 17 | Train Loss: 0.356 | Train Acc: 87.79% | Test Loss: 0.479 | Test Acc: 83.95% +2025-04-19 16:00:48,191 - train - INFO - Epoch: 18 | Train Loss: 0.344 | Train Acc: 88.32% | Test Loss: 0.548 | Test Acc: 81.28% +2025-04-19 16:01:06,303 - train - INFO - Epoch: 19 | Train Loss: 0.337 | Train Acc: 88.39% | Test Loss: 0.646 | Test Acc: 80.06% +2025-04-19 16:01:17,008 - train - INFO - Epoch: 20 | Train Loss: 0.326 | Train Acc: 88.86% | Test Loss: 0.531 | Test Acc: 82.93% +2025-04-19 16:01:35,532 - train - INFO - Epoch: 21 | Train Loss: 0.312 | Train Acc: 89.34% | Test Loss: 0.488 | Test Acc: 83.91% +2025-04-19 16:01:46,199 - train - INFO - Epoch: 22 | Train Loss: 0.298 | Train Acc: 89.77% | Test Loss: 0.448 | Test Acc: 85.26% +2025-04-19 16:02:04,386 - train - INFO - Epoch: 23 | Train Loss: 0.286 | Train Acc: 90.17% | Test Loss: 0.441 | Test Acc: 85.51% +2025-04-19 16:02:14,888 - train - INFO - Epoch: 24 | Train Loss: 0.278 | Train Acc: 90.53% | Test Loss: 0.410 | Test Acc: 86.16% +2025-04-19 16:02:33,273 - train - INFO - Epoch: 25 | Train Loss: 0.262 | Train Acc: 91.04% | Test Loss: 0.391 | Test Acc: 86.95% +2025-04-19 16:02:43,650 - train - INFO - Epoch: 26 | Train Loss: 0.247 | Train Acc: 91.54% | Test Loss: 0.350 | Test Acc: 88.46% +2025-04-19 16:03:01,907 - train - INFO - Epoch: 27 | Train Loss: 0.235 | Train Acc: 91.90% | Test Loss: 0.384 | Test Acc: 87.65% +2025-04-19 16:03:12,565 - train - INFO - Epoch: 28 | Train Loss: 0.219 | Train Acc: 92.52% | Test Loss: 0.349 | Test Acc: 88.26% +2025-04-19 16:03:31,406 - train - INFO - Epoch: 29 | Train Loss: 0.201 | Train Acc: 93.11% | Test Loss: 0.461 | Test Acc: 85.69% +2025-04-19 16:03:42,204 - train - INFO - Epoch: 30 | Train Loss: 0.198 | Train Acc: 93.19% | Test Loss: 0.349 | Test Acc: 88.51% +2025-04-19 16:04:00,528 - train - INFO - Epoch: 31 | Train Loss: 0.180 | Train Acc: 93.77% | Test Loss: 0.345 | Test Acc: 88.90% +2025-04-19 16:04:11,210 - train - INFO - Epoch: 32 | Train Loss: 0.164 | Train Acc: 94.38% | Test Loss: 0.351 | Test Acc: 89.29% +2025-04-19 16:04:29,421 - train - INFO - Epoch: 33 | Train Loss: 0.147 | Train Acc: 94.94% | Test Loss: 0.309 | Test Acc: 90.04% +2025-04-19 16:04:39,909 - train - INFO - Epoch: 34 | Train Loss: 0.137 | Train Acc: 95.23% | Test Loss: 0.303 | Test Acc: 90.20% +2025-04-19 16:04:57,980 - train - INFO - Epoch: 35 | Train Loss: 0.118 | Train Acc: 96.02% | Test Loss: 0.275 | Test Acc: 91.58% +2025-04-19 16:05:08,411 - train - INFO - Epoch: 36 | Train Loss: 0.103 | Train Acc: 96.46% | Test Loss: 0.300 | Test Acc: 90.59% +2025-04-19 16:05:27,044 - train - INFO - Epoch: 37 | Train Loss: 0.086 | Train Acc: 97.24% | Test Loss: 0.249 | Test Acc: 92.38% +2025-04-19 16:05:37,653 - train - INFO - Epoch: 38 | Train Loss: 0.078 | Train Acc: 97.37% | Test Loss: 0.273 | Test Acc: 91.86% +2025-04-19 16:05:56,031 - train - INFO - Epoch: 39 | Train Loss: 0.060 | Train Acc: 98.01% | Test Loss: 0.277 | Test Acc: 91.82% +2025-04-19 16:06:06,632 - train - INFO - Epoch: 40 | Train Loss: 0.046 | Train Acc: 98.53% | Test Loss: 0.238 | Test Acc: 93.21% +2025-04-19 16:06:24,636 - train - INFO - Epoch: 41 | Train Loss: 0.036 | Train Acc: 98.82% | Test Loss: 0.248 | Test Acc: 93.00% +2025-04-19 16:06:35,138 - train - INFO - Epoch: 42 | Train Loss: 0.026 | Train Acc: 99.26% | Test Loss: 0.223 | Test Acc: 93.63% +2025-04-19 16:06:53,263 - train - INFO - Epoch: 43 | Train Loss: 0.019 | Train Acc: 99.45% | Test Loss: 0.231 | Test Acc: 93.49% +2025-04-19 16:07:03,814 - train - INFO - Epoch: 44 | Train Loss: 0.015 | Train Acc: 99.62% | Test Loss: 0.219 | Test Acc: 93.88% +2025-04-19 16:07:22,127 - train - INFO - Epoch: 45 | Train Loss: 0.011 | Train Acc: 99.76% | Test Loss: 0.211 | Test Acc: 94.20% +2025-04-19 16:07:32,623 - train - INFO - Epoch: 46 | Train Loss: 0.010 | Train Acc: 99.78% | Test Loss: 0.208 | Test Acc: 94.17% +2025-04-19 16:07:51,118 - train - INFO - Epoch: 47 | Train Loss: 0.009 | Train Acc: 99.83% | Test Loss: 0.213 | Test Acc: 94.20% +2025-04-19 16:08:01,638 - train - INFO - Epoch: 48 | Train Loss: 0.008 | Train Acc: 99.85% | Test Loss: 0.209 | Test Acc: 94.21% +2025-04-19 16:08:19,934 - train - INFO - Epoch: 49 | Train Loss: 0.007 | Train Acc: 99.86% | Test Loss: 0.207 | Test Acc: 94.19% +2025-04-19 16:08:30,462 - train - INFO - Epoch: 50 | Train Loss: 0.007 | Train Acc: 99.86% | Test Loss: 0.210 | Test Acc: 94.21% +2025-04-19 16:08:37,994 - train - INFO - 训练完成! diff --git a/ResNet-CIFAR10/Classification-normal/readme.md b/ResNet-CIFAR10/Classification-normal/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..8c611c0bd991ee78b89642d8ff65656fd88e463f --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/readme.md @@ -0,0 +1,54 @@ +# ResNet-CIFAR10 训练与特征提取 + +这个项目实现了ResNet模型在CIFAR10数据集上的训练,并集成了特征提取和可视化所需的功能。 + +## time_travel_saver数据提取器 +```python + #保存可视化训练过程所需要的文件 + if (epoch + 1) % interval == 0 or (epoch == 0): + # 创建一个专门用于收集embedding的顺序dataloader + ordered_trainloader = torch.utils.data.DataLoader( + trainloader.dataset, + batch_size=trainloader.batch_size, + shuffle=False, + num_workers=trainloader.num_workers + ) + epoch_save_dir = os.path.join(save_dir, f'epoch_{epoch+1}') #epoch保存路径 + save_model = time_travel_saver(model, ordered_trainloader, device, epoch_save_dir, model_name, + show=True, layer_name='avg_pool', auto_save_embedding=True) + #show:是否显示模型的维度信息 + #layer_name:选择要提取特征的层,如果为None,则提取符合维度范围的层 + #auto_save_embedding:是否自动保存特征向量 must be True + save_model.save_checkpoint_embeddings_predictions() #保存模型权重、特征向量和预测结果到epoch_x + if epoch == 0: + save_model.save_lables_index(path = "../dataset") #保存标签和索引到dataset +``` + + +## 项目结构 + +- `./scripts/train.yaml`:训练配置文件,包含批次大小、学习率、GPU设置等参数 +- `./scripts/train.py`:训练脚本,执行模型训练并自动收集特征数据 +- `./model/`:保存训练好的模型权重 +- `./epochs/`:保存训练过程中的高维特征向量、预测结果等数据 + +## 使用方法 + +1. 配置 `train.yaml` 文件设置训练参数 +2. 执行训练脚本: + ``` + python train.py + ``` +3. 训练完成后,可以在以下位置找到相关数据: + - 模型权重:`./epochs/epoch_{n}/model.pth` + - 特征向量:`./epochs/epoch_{n}/embeddings.npy` + - 预测结果:`./epochs/epoch_{n}/predictions.npy` + - 标签数据:`./dataset/labels.npy` + - 数据索引:`./dataset/index.json` + +## 数据格式 + +- `embeddings.npy`:形状为 [n_samples, feature_dim] 的特征向量 +- `predictions.npy`:形状为 [n_samples, n_classes] 的预测概率 +- `labels.npy`:形状为 [n_samples] 的真实标签 +- `index.json`:包含训练集、测试集和验证集的索引信息 \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/scripts/dataset_utils.py b/ResNet-CIFAR10/Classification-normal/scripts/dataset_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ce2e94fec43198ef54dc0f8282fe8bbe53f79d50 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/scripts/dataset_utils.py @@ -0,0 +1,59 @@ +import torch +import torchvision +import torchvision.transforms as transforms +import os + +#加载数据集 + +def get_cifar10_dataloaders(batch_size=128, num_workers=2, local_dataset_path=None, shuffle=False): + """获取CIFAR10数据集的数据加载器 + + Args: + batch_size: 批次大小 + num_workers: 数据加载的工作进程数 + local_dataset_path: 本地数据集路径,如果提供则使用本地数据集,否则下载 + + Returns: + trainloader: 训练数据加载器 + testloader: 测试数据加载器 + """ + # 数据预处理 + transform_train = transforms.Compose([ + transforms.RandomCrop(32, padding=4), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), + ]) + + transform_test = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), + ]) + + # 设置数据集路径 + if local_dataset_path: + print(f"使用本地数据集: {local_dataset_path}") + # 检查数据集路径是否有数据集,没有的话则下载 + cifar_path = os.path.join(local_dataset_path, 'cifar-10-batches-py') + download = not os.path.exists(cifar_path) or not os.listdir(cifar_path) + dataset_path = local_dataset_path + else: + print("未指定本地数据集路径,将下载数据集") + download = True + dataset_path = '../dataset' + + # 创建数据集路径 + if not os.path.exists(dataset_path): + os.makedirs(dataset_path) + + trainset = torchvision.datasets.CIFAR10( + root=dataset_path, train=True, download=download, transform=transform_train) + trainloader = torch.utils.data.DataLoader( + trainset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + testset = torchvision.datasets.CIFAR10( + root=dataset_path, train=False, download=download, transform=transform_test) + testloader = torch.utils.data.DataLoader( + testset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + + return trainloader, testloader diff --git a/ResNet-CIFAR10/Classification-normal/scripts/model.py b/ResNet-CIFAR10/Classification-normal/scripts/model.py new file mode 100644 index 0000000000000000000000000000000000000000..b7d0365b0da8227a4b63d61049dbe23e597b1d57 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/scripts/model.py @@ -0,0 +1,308 @@ +''' +ResNet in PyTorch. + +ResNet(深度残差网络)是由微软研究院的Kaiming He等人提出的深度神经网络架构。 +主要创新点是引入了残差学习的概念,通过跳跃连接解决了深层网络的退化问题。 + +主要特点: +1. 引入残差块(Residual Block),使用跳跃连接 +2. 使用Batch Normalization进行归一化 +3. 支持更深的网络结构(最深可达152层) +4. 在多个计算机视觉任务上取得了突破性进展 + +Reference: +[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun + Deep Residual Learning for Image Recognition. arXiv:1512.03385 +''' +import torch +import torch.nn as nn + +class BasicBlock(nn.Module): + """基础残差块 + + 用于ResNet18/34等浅层网络。结构为: + x -> Conv -> BN -> ReLU -> Conv -> BN -> (+) -> ReLU + |------------------------------------------| + + Args: + in_channels: 输入通道数 + out_channels: 输出通道数 + stride: 步长,用于下采样,默认为1 + + 注意:基础模块没有通道压缩,expansion=1 + """ + expansion = 1 + + def __init__(self, in_channels, out_channels, stride=1): + super(BasicBlock,self).__init__() + self.features = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True), + nn.Conv2d(out_channels,out_channels, kernel_size=3, stride=1, padding=1, bias=False), + nn.BatchNorm2d(out_channels) + ) + + # 如果输入输出维度不等,则使用1x1卷积层来改变维度 + self.shortcut = nn.Sequential() + if stride != 1 or in_channels != self.expansion * out_channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, self.expansion * out_channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(self.expansion * out_channels), + ) + + def forward(self, x): + out = self.features(x) + out += self.shortcut(x) + out = torch.relu(out) + return out + + +class Bottleneck(nn.Module): + """瓶颈残差块 + + 用于ResNet50/101/152等深层网络。结构为: + x -> 1x1Conv -> BN -> ReLU -> 3x3Conv -> BN -> ReLU -> 1x1Conv -> BN -> (+) -> ReLU + |-------------------------------------------------------------------| + + Args: + in_channels: 输入通道数 + zip_channels: 压缩后的通道数 + stride: 步长,用于下采样,默认为1 + + 注意:通过1x1卷积先压缩通道数,再还原,expansion=4 + """ + expansion = 4 + + def __init__(self, in_channels, zip_channels, stride=1): + super(Bottleneck, self).__init__() + out_channels = self.expansion * zip_channels + self.features = nn.Sequential( + # 1x1卷积压缩通道 + nn.Conv2d(in_channels, zip_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(zip_channels), + nn.ReLU(inplace=True), + # 3x3卷积提取特征 + nn.Conv2d(zip_channels, zip_channels, kernel_size=3, stride=stride, padding=1, bias=False), + nn.BatchNorm2d(zip_channels), + nn.ReLU(inplace=True), + # 1x1卷积还原通道 + nn.Conv2d(zip_channels, out_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(out_channels) + ) + + self.shortcut = nn.Sequential() + if stride != 1 or in_channels != out_channels: + self.shortcut = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(out_channels) + ) + + def forward(self, x): + out = self.features(x) + out += self.shortcut(x) + out = torch.relu(out) + return out + +class ResNet(nn.Module): + """ResNet模型 + + 网络结构: + 1. 一个卷积层用于特征提取 + 2. 四个残差层,每层包含多个残差块 + 3. 平均池化和全连接层进行分类 + + 对于CIFAR10,特征图大小变化为: + (32,32,3) -> [Conv] -> (32,32,64) -> [Layer1] -> (32,32,64) -> [Layer2] + -> (16,16,128) -> [Layer3] -> (8,8,256) -> [Layer4] -> (4,4,512) -> [AvgPool] + -> (1,1,512) -> [FC] -> (num_classes) + + Args: + block: 残差块类型(BasicBlock或Bottleneck) + num_blocks: 每层残差块数量的列表 + num_classes: 分类数量,默认为10 + verbose: 是否打印中间特征图大小 + init_weights: 是否初始化权重 + dropout: 是否在全连接层前使用dropout + """ + def __init__(self, block, num_blocks, num_classes=10, verbose=False, init_weights=True, dropout=False): + super(ResNet, self).__init__() + self.verbose = verbose + self.in_channels = 64 + + # 第一层卷积 + self.features = nn.Sequential( + nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True) + ) + + # 四个残差层 + self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) + self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) + self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) + self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) + + # 分类层 + self.avg_pool = nn.AvgPool2d(kernel_size=4) + if dropout: + self.dropout = nn.Dropout(p=0.5) + else: + self.dropout = nn.Identity() + self.classifier = nn.Linear(512 * block.expansion, num_classes) + + if init_weights: + self._initialize_weights() + + def _make_layer(self, block, out_channels, num_blocks, stride): + """构建残差层 + + Args: + block: 残差块类型 + out_channels: 输出通道数 + num_blocks: 残差块数量 + stride: 第一个残差块的步长(用于下采样) + + Returns: + nn.Sequential: 残差层 + """ + strides = [stride] + [1] * (num_blocks - 1) + layers = [] + for stride in strides: + layers.append(block(self.in_channels, out_channels, stride)) + self.in_channels = out_channels * block.expansion + return nn.Sequential(*layers) + + def forward(self, x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + out = self.features(x) + if self.verbose: + print('block 1 output: {}'.format(out.shape)) + + out = self.layer1(out) + if self.verbose: + print('block 2 output: {}'.format(out.shape)) + + out = self.layer2(out) + if self.verbose: + print('block 3 output: {}'.format(out.shape)) + + out = self.layer3(out) + if self.verbose: + print('block 4 output: {}'.format(out.shape)) + + out = self.layer4(out) + if self.verbose: + print('block 5 output: {}'.format(out.shape)) + + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + out = self.dropout(out) + out = self.classifier(out) + return out + + def feature(self,x): + """前向传播 + + Args: + x: 输入张量,[N,3,32,32] + + Returns: + out: 输出张量,[N,num_classes] + """ + out = self.features(x) + if self.verbose: + print('block 1 output: {}'.format(out.shape)) + + out = self.layer1(out) + if self.verbose: + print('block 2 output: {}'.format(out.shape)) + + out = self.layer2(out) + if self.verbose: + print('block 3 output: {}'.format(out.shape)) + + out = self.layer3(out) + if self.verbose: + print('block 4 output: {}'.format(out.shape)) + + out = self.layer4(out) + if self.verbose: + print('block 5 output: {}'.format(out.shape)) + + out = self.avg_pool(out) + out = out.view(out.size(0), -1) + return out + + def prediction(self, x): + out = self.classifier(x) + return out + + def _initialize_weights(self): + """初始化模型权重 + + 采用kaiming初始化方法: + - 卷积层权重采用kaiming_normal_初始化 + - BN层参数采用常数初始化 + - 线性层采用正态分布初始化 + """ + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + +def ResNet18(verbose=False, num_classes=10, dropout=False): + """ResNet-18模型 + + Args: + verbose: 是否打印中间特征图大小 + num_classes: 分类数量 + dropout: 是否在全连接层前使用dropout + """ + return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, verbose=verbose, dropout=dropout) + +def ResNet34(verbose=False, num_classes=10, dropout=False): + """ResNet-34模型""" + return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, verbose=verbose, dropout=dropout) + +def ResNet50(verbose=False): + """ResNet-50模型""" + return ResNet(Bottleneck, [3,4,6,3], verbose=verbose) + +def ResNet101(verbose=False): + """ResNet-101模型""" + return ResNet(Bottleneck, [3,4,23,3], verbose=verbose) + +def ResNet152(verbose=False): + """ResNet-152模型""" + return ResNet(Bottleneck, [3,8,36,3], verbose=verbose) + +def test(): + """测试函数""" + net = ResNet34() + x = torch.randn(2,3,32,32) + y = net(x) + print('Output shape:', y.size()) + + # 打印模型结构 + from torchinfo import summary + device = 'cuda' if torch.cuda.is_available() else 'cpu' + net = net.to(device) + summary(net,(2,3,32,32)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/scripts/train.py b/ResNet-CIFAR10/Classification-normal/scripts/train.py new file mode 100644 index 0000000000000000000000000000000000000000..f4dd0594ef3778665c3090c0465f9a5d4ca485e6 --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/scripts/train.py @@ -0,0 +1,225 @@ +import sys +import os +import yaml +from pathlib import Path +import torch +import torch.nn as nn +import torch.optim as optim +import time +import logging +import numpy as np +from tqdm import tqdm + +# 将项目根目录添加到Python路径中 +current_dir = Path(__file__).resolve().parent +project_root = current_dir.parent.parent.parent +sys.path.append(str(project_root)) +sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) + +from dataset_utils import get_cifar10_dataloaders +from model import ResNet18 +from ttv_utils import time_travel_saver + +def setup_logger(log_file): + """配置日志记录器,如果日志文件存在则覆盖 + + Args: + log_file: 日志文件路径 + + Returns: + logger: 配置好的日志记录器 + """ + # 创建logger + logger = logging.getLogger('train') + logger.setLevel(logging.INFO) + + # 移除现有的处理器 + if logger.hasHandlers(): + logger.handlers.clear() + + # 创建文件处理器,使用'w'模式覆盖现有文件 + fh = logging.FileHandler(log_file, mode='w') + fh.setLevel(logging.INFO) + + # 创建控制台处理器 + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + + # 创建格式器 + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + fh.setFormatter(formatter) + ch.setFormatter(formatter) + + # 添加处理器 + logger.addHandler(fh) + logger.addHandler(ch) + + return logger + +def train_model(model, trainloader, testloader, epochs=200, lr=0.1, device='cuda:0', + save_dir='./epochs', model_name='model', interval=1): + """通用的模型训练函数 + Args: + model: 要训练的模型 + trainloader: 训练数据加载器 + testloader: 测试数据加载器 + epochs: 训练轮数 + lr: 学习率 + device: 训练设备,格式为'cuda:N',其中N为GPU编号(0,1,2,3) + save_dir: 模型保存目录 + model_name: 模型名称 + interval: 模型保存间隔 + """ + # 检查并设置GPU设备 + if not torch.cuda.is_available(): + print("CUDA不可用,将使用CPU训练") + device = 'cpu' + elif not device.startswith('cuda:'): + device = f'cuda:0' + + # 确保device格式正确 + if device.startswith('cuda:'): + gpu_id = int(device.split(':')[1]) + if gpu_id >= torch.cuda.device_count(): + print(f"GPU {gpu_id} 不可用,将使用GPU 0") + device = 'cuda:0' + + # 设置保存目录 + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + # 设置日志文件路径 + log_file = os.path.join(os.path.dirname(save_dir),'epochs', 'train.log') + if not os.path.exists(os.path.dirname(log_file)): + os.makedirs(os.path.dirname(log_file)) + + logger = setup_logger(log_file) + + # 损失函数和优化器 + criterion = nn.CrossEntropyLoss() + optimizer = optim.SGD(model.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4) + scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=50) + + # 移动模型到指定设备 + model = model.to(device) + best_acc = 0 + start_time = time.time() + + logger.info(f'开始训练 {model_name}') + logger.info(f'总轮数: {epochs}, 学习率: {lr}, 设备: {device}') + + for epoch in range(epochs): + # 训练阶段 + model.train() + train_loss = 0 + correct = 0 + total = 0 + + train_pbar = tqdm(trainloader, desc=f'Epoch {epoch+1}/{epochs} [Train]') + for batch_idx, (inputs, targets) in enumerate(train_pbar): + inputs, targets = inputs.to(device), targets.to(device) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + + train_loss += loss.item() + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + # 更新进度条 + train_pbar.set_postfix({ + 'loss': f'{train_loss/(batch_idx+1):.3f}', + 'acc': f'{100.*correct/total:.2f}%' + }) + + # 保存训练阶段的准确率 + train_acc = 100.*correct/total + train_correct = correct + train_total = total + + # 测试阶段 + model.eval() + test_loss = 0 + correct = 0 + total = 0 + + test_pbar = tqdm(testloader, desc=f'Epoch {epoch+1}/{epochs} [Test]') + with torch.no_grad(): + for batch_idx, (inputs, targets) in enumerate(test_pbar): + inputs, targets = inputs.to(device), targets.to(device) + outputs = model(inputs) + loss = criterion(outputs, targets) + + test_loss += loss.item() + _, predicted = outputs.max(1) + total += targets.size(0) + correct += predicted.eq(targets).sum().item() + + # 更新进度条 + test_pbar.set_postfix({ + 'loss': f'{test_loss/(batch_idx+1):.3f}', + 'acc': f'{100.*correct/total:.2f}%' + }) + + # 计算测试精度 + acc = 100.*correct/total + + # 记录训练和测试的损失与准确率 + logger.info(f'Epoch: {epoch+1} | Train Loss: {train_loss/(len(trainloader)):.3f} | Train Acc: {train_acc:.2f}% | ' + f'Test Loss: {test_loss/(batch_idx+1):.3f} | Test Acc: {acc:.2f}%') + + # 保存可视化训练过程所需要的文件 + if (epoch + 1) % interval == 0 or (epoch == 0): + # 创建一个专门用于收集embedding的顺序dataloader + ordered_trainloader = torch.utils.data.DataLoader( + trainloader.dataset, + batch_size=trainloader.batch_size, + shuffle=False, + num_workers=trainloader.num_workers + ) + epoch_save_dir = os.path.join(save_dir, f'epoch_{epoch+1}') + save_model = time_travel_saver(model, ordered_trainloader, device, epoch_save_dir, model_name, + show=True, layer_name='avg_pool', auto_save_embedding=True) + save_model.save_checkpoint_embeddings_predictions() + if epoch == 0: + save_model.save_lables_index(path = "../dataset") + + scheduler.step() + + logger.info('训练完成!') + +def main(): + # 加载配置文件 + config_path = Path(__file__).parent / 'train.yaml' + with open(config_path) as f: + config = yaml.safe_load(f) + + # 创建模型 + model = ResNet18(num_classes=10) + + # 获取数据加载器 + trainloader, testloader = get_cifar10_dataloaders( + batch_size=128, + num_workers=2, + local_dataset_path=config['dataset_path'], + shuffle=True + ) + + # 训练模型 + train_model( + model=model, + trainloader=trainloader, + testloader=testloader, + epochs=config['epochs'], + lr=config['lr'], + device=f'cuda:{config["gpu"]}', + save_dir='../epochs', + model_name='ResNet18', + interval=config['interval'] + ) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/ResNet-CIFAR10/Classification-normal/scripts/train.yaml b/ResNet-CIFAR10/Classification-normal/scripts/train.yaml new file mode 100644 index 0000000000000000000000000000000000000000..205f5c70f85793b0b7856d8230ecc2456fa6f3dd --- /dev/null +++ b/ResNet-CIFAR10/Classification-normal/scripts/train.yaml @@ -0,0 +1,7 @@ +batch_size: 128 +num_workers: 2 +dataset_path: ../dataset +epochs: 50 +gpu: 0 +lr: 0.1 +interval: 2 \ No newline at end of file diff --git a/ttv_utils/__init__.py b/ttv_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..720b4c55409f6f685b4c0f90a32c03f3c4bae9d9 --- /dev/null +++ b/ttv_utils/__init__.py @@ -0,0 +1,57 @@ +"""ttv_utils包提供了用于特征预测 和 模型训练过程收集 的工具函数 + +主要组件: +1. FeaturePredictor: 用于从模型中间层特征向量预测预测结果的类 + 使用示例: + ```python + predictor = FeaturePredictor( + model_class=model, 模型类 + model_weights_path=weight_path,模型权重文件路径 + layer_info_path=layer_info_path,层信息文件路径 + device=device 运行设备 + ) + ``` + +2. predict_feature: 从模型中间层特征向量预测预测结果便捷函数 + 使用示例: + ```python + output = predict_feature( + model=model, # 模型类 + weight_path=weight_path, # 模型权重文件路径 + layer_info_path=layer_info_path, # 层信息文件路径 + feature=feature, # 特征向量 + device=device # 运行设备 + ) + ``` + +3. time_travel_saver: 用于在训练过程中保存模型权重、特征和预测结果的类 + 使用示例: + ```python + # 创建一个保存器实例 + saver = time_travel_saver( + model=model, # 模型实例 + dataloader=ordered_loader, # 顺序数据加载器 + device='cuda:0', # 计算设备 + save_dir='./checkpoints', # 保存根目录 + model_name='alexnet', # 模型名称 + interval=1 # 每隔多少个epoch保存一次 + ) + + # 在训练循环中调用save方法 + for epoch in range(epochs): + # 训练代码... + if epoch % interval == 0: + saver.save(model) # 保存当前epoch的模型状态 + ``` + + 保存的文件结构: + - model/{epoch}.pth: 模型权重 + - dataset/representation/{epoch}.npy: 特征向量 + - dataset/prediction/{epoch}.npy: 预测结果 + - dataset/label/labels.npy: 标签 +""" + +from .feature_predictor import FeaturePredictor, predict_feature +from .save_embeddings import time_travel_saver + +__all__ = ['FeaturePredictor', 'predict_feature', 'time_travel_saver'] diff --git a/ttv_utils/feature_predictor.py b/ttv_utils/feature_predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..246f429dfe310aa15dc83cddc12ae00e8dcfc77d --- /dev/null +++ b/ttv_utils/feature_predictor.py @@ -0,0 +1,410 @@ +""" +特征预测器模块 + +该模块使用钩子机制从模型的中间层特征向量预测分类结果 +""" + +import torch +import torch.nn as nn +import json +import os +import torch.nn.functional as F +import numpy as np +from typing import Type, Union, Optional + +class FeaturePredictor: + def __init__(self, model_class, model_weights_path, layer_info_path, device='cuda' if torch.cuda.is_available() else 'cpu'): + """ + 初始化特征预测器 + + Args: + model_class: 模型类 + model_weights_path: 模型权重文件路径 + layer_info_path: 层信息文件路径 + device: 运行设备 + """ + self.device = device + self.model = model_class().to(device) + self.model.load_state_dict(torch.load(model_weights_path, map_location=device, weights_only=True)) + self.model.eval() + # 加载层信息 + with open(layer_info_path, 'r') as f: + layer_info = json.load(f) + self.target_layer = layer_info['layer_id'] + self.feature_dim = layer_info['dim'] + + # 初始化变量 + self.output_shape = None + self.inject_feature = None + self.handles = [] + self.layer_name_map = {} + + # 用于调试的变量 + self.last_normalized_feature = None + self.last_reshaped_feature = None + self.last_layer_outputs = {} + + # 注册钩子 + self.register_hooks() + + # 运行一次前向传播来获取形状 + self._get_output_shape() + + def _get_output_shape(self): + """运行一次前向传播来获取目标层的输出形状""" + def shape_hook(module, input, output): + self.output_shape = output.shape[1:] # 不包括batch维度 + print(f"[Init] 获取到目标层输出形状: {self.output_shape}") + return output + + # 找到目标层并注册临时钩子 + def find_layer(module, name=''): + for n, child in module.named_children(): + current_name = f"{name}.{n}" if name else n + if current_name == self.target_layer: + handle = child.register_forward_hook(shape_hook) + return handle, True + else: + handle, found = find_layer(child, current_name) + if found: + return handle, True + return None, False + + # 注册临时钩子 + handle, found = find_layer(self.model) + if not found: + raise ValueError(f"未找到目标层: {self.target_layer}") + + # 运行一次前向传播 + with torch.no_grad(): + dummy_input = torch.zeros(1, 3, 32, 32).to(self.device) + self.model(dummy_input) + + # 移除临时钩子 + handle.remove() + + if self.output_shape is None: + raise RuntimeError("无法获取目标层的输出形状") + + def register_hooks(self): + """注册钩子函数,在目标层注入特征向量和监控每层输出""" + def print_tensor_info(name, tensor): + """打印张量的统计信息""" + print(f"\n[Hook Debug] {name}:") + print(f"- 形状: {tensor.shape}") + print(f"- 数值范围: [{tensor.min().item():.4f}, {tensor.max().item():.4f}]") + print(f"- 均值: {tensor.mean().item():.4f}") + print(f"- 标准差: {tensor.std().item():.4f}") + + def hook_fn(module, input, output): + """钩子函数:输出层信息并在目标层注入特征""" + layer_name = self.layer_name_map.get(module, "未知层") + print(f"\n[Hook Debug] 层: {layer_name}") + print(f"- 类型: {type(module).__name__}") + + # 输出输入信息 + if input and len(input) > 0: + print_tensor_info("输入张量", input[0]) + + # 输出原始输出信息 + print_tensor_info("输出张量", output) + + # 如果是目标层且有注入特征,则替换输出 + if layer_name == self.target_layer and self.inject_feature is not None: + print("\n[Hook Debug] 正在注入特征...") + print_tensor_info("注入特征", self.inject_feature) + print(f"[Hook Debug] 将层 {layer_name} 的输出从 {output.shape} 替换为注入特征 {self.inject_feature.shape}") + # 替换输出 + output = self.inject_feature + print("[Hook Debug] 特征注入完成,将作为下一层的输入") + return output + + return output + + def hook_layer(module, name=''): + """为每一层注册钩子""" + for n, child in module.named_children(): + current_name = f"{name}.{n}" if name else n + # 保存层名到模块的映射 + self.layer_name_map[child] = current_name + # 注册钩子 + handle = child.register_forward_hook(hook_fn) + self.handles.append(handle) + # 递归处理子模块 + hook_layer(child, current_name) + + # 注册所有层的钩子 + hook_layer(self.model) + print(f"[Debug] 钩子注册完成,共注册了 {len(self.handles)} 个钩子") + + def reshape_feature(self, feature): + """调整特征向量的形状""" + if self.output_shape is None: + raise RuntimeError("目标层的输出形状未初始化") + + batch_size = feature.shape[0] + expected_dim = np.prod(self.output_shape) + + # 检查输入特征维度是否正确 + if feature.shape[1] != expected_dim: + raise ValueError(f"特征维度不匹配:预期 {expected_dim},实际 {feature.shape[1]}") + + # 使用自动获取的形状重塑特征 + new_shape = (batch_size,) + self.output_shape + print(f"[Debug] 调整特征形状: {feature.shape} -> {new_shape}") + return feature.view(new_shape) + + def predict(self, feature): + """使用给定的特征向量进行预测""" + print(f"\n[Debug] 开始预测,输入特征形状: {feature.shape}") + + # 检查输入维度 + if feature.shape[1] != self.feature_dim: + raise ValueError(f"特征维度不匹配:预期 {self.feature_dim},实际 {feature.shape[1]}") + + # 将特征转移到正确的设备并重塑 + feature = feature.to(self.device) + self.inject_feature = self.reshape_feature(feature) + + # 使用虚拟输入进行预测 + dummy_input = torch.zeros(feature.shape[0], 3, 32, 32).to(self.device) + + # 进行前向传播(钩子会自动在目标层注入特征) + with torch.no_grad(): + output = self.model(dummy_input) + + # 清除注入的特征 + self.inject_feature = None + + return output + +def predict_feature( + model: Type[nn.Module], + weight_path: str, + layer_info_path: str, + feature: Union[torch.Tensor, np.ndarray], + device: Optional[str] = None +) -> torch.Tensor: + """ + 使用预训练模型预测特征向量的类别。 + + Args: + model: PyTorch模型类(不是实例) + weight_path: 模型权重文件路径 + layer_info_path: 层信息配置文件路径 + feature: 输入特征向量,可以是torch.Tensor或numpy.ndarray + device: 运行设备,可选 'cuda' 或 'cpu'。如果为None,将自动选择。 + + Returns: + torch.Tensor: 模型输出的预测结果 + + Raises: + ValueError: 如果输入特征维度不正确 + FileNotFoundError: 如果权重文件或层信息文件不存在 + RuntimeError: 如果模型加载或预测过程出错 + """ + try: + # 检查文件是否存在 + if not os.path.exists(weight_path): + raise FileNotFoundError(f"权重文件不存在: {weight_path}") + if not os.path.exists(layer_info_path): + raise FileNotFoundError(f"层信息文件不存在: {layer_info_path}") + + # 确定设备 + if device is None: + device = 'cuda' if torch.cuda.is_available() else 'cpu' + + # 转换输入特征为torch.Tensor + if isinstance(feature, np.ndarray): + feature = torch.from_numpy(feature).float() + elif not isinstance(feature, torch.Tensor): + raise ValueError("输入特征必须是numpy数组或torch张量") + + # 创建预测器实例 + predictor = FeaturePredictor( + model_class=model, + model_weights_path=weight_path, + layer_info_path=layer_info_path, + device=device + ) + + # 进行预测 + with torch.no_grad(): + output = predictor.predict(feature) + + return output + + except Exception as e: + raise RuntimeError(f"预测过程出错: {str(e)}") + + +def test_predictor(): + """测试特征预测器的功能""" + from AlexNet.code.model import AlexNet + import os + import numpy as np + + # 创建预测器实例 + predictor = FeaturePredictor( + model_class=AlexNet, + model_weights_path='AlexNet/model/0/epoch_195/subject_model.pth', + layer_info_path='AlexNet/code/layer_info.json' + ) + + print("\n开始单点测试...") + + # 生成一个测试点,使用较大的尺度以增加特征的差异性 + feature = torch.randn(1, predictor.feature_dim) * 10.0 + output = predictor.predict(feature) + probs = output.softmax(dim=1) + print("\n结果:",output) + # 显示最终预测结果 + print("\n最终预测结果:") + top_k = torch.topk(probs[0], k=3) + for idx, (class_idx, prob) in enumerate(zip(top_k.indices.tolist(), top_k.values.tolist())): + print(f"Top-{idx+1}: 类别 {class_idx}, 概率 {prob:.4f}") + +def test_predictor_from_train_data(): + """测试特征预测器的批量预测功能""" + from AlexNet.code.model import AlexNet + import numpy as np + import torch + + print("\n开始处理训练数据集...") + # 创建预测器实例 + predictor = FeaturePredictor( + model_class=AlexNet, + model_weights_path='AlexNet/model/0/epoch_195/subject_model.pth', + layer_info_path='AlexNet/code/layer_info.json' + ) + + # 加载训练数据 + print("\n加载训练数据...") + features = np.load('AlexNet/model/0/epoch_195/train_data.npy') + print(f"数据形状: {features.shape}") + + # 转换为tensor + features = torch.from_numpy(features).float() + + # 批量处理 + batch_size = 100 + num_samples = len(features) + num_batches = (num_samples + batch_size - 1) // batch_size + + # 用于统计结果 + all_predictions = [] + class_counts = {} + + print("\n开始批量预测...") + with torch.no_grad(): + for i in range(num_batches): + start_idx = i * batch_size + end_idx = min((i + 1) * batch_size, num_samples) + batch_features = features[start_idx:end_idx] + + # 使用预测器进行预测 + outputs = predictor.predict(batch_features) + predictions = outputs.argmax(dim=1).cpu().numpy() + + # 更新统计信息 + for pred in predictions: + class_counts[int(pred)] = class_counts.get(int(pred), 0) + 1 + + all_predictions.extend(predictions) + + # 打印进度和当前批次的预测分布 + if (i + 1) % 10 == 0: + print(f"\n已处理: {end_idx}/{num_samples} 个样本") + batch_unique, batch_counts = np.unique(predictions, return_counts=True) + print("当前批次预测分布:") + for class_idx, count in zip(batch_unique, batch_counts): + print(f"类别 {class_idx}: {count} 个样本 ({count/len(predictions)*100:.2f}%)") + + # 打印总体统计结果 + print("\n最终预测结果统计:") + total_samples = len(all_predictions) + for class_idx in sorted(class_counts.keys()): + count = class_counts[class_idx] + percentage = (count / total_samples) * 100 + print(f"类别 {class_idx}: {count} 个样本 ({percentage:.2f}%)") + +def test_train_data(): + """测试训练数据集的预测结果分布""" + from AlexNet.code.model import AlexNet + import numpy as np + import torch + import torch.nn.functional as F + + print("\n开始处理训练数据集...") + + # 初始化模型 + device = 'cuda' if torch.cuda.is_available() else 'cpu' + model = AlexNet().to(device) + model.load_state_dict(torch.load('AlexNet/model/0/epoch_195/subject_model.pth', + map_location=device, weights_only=True)) + model.eval() + + # 加载训练数据 + print("加载训练数据...") + features = np.load('AlexNet/model/0/epoch_195/train_data.npy') + print(f"数据形状: {features.shape}") + + # 转换为tensor + features = torch.from_numpy(features).float().to(device) + + # 批量处理 + batch_size = 100 + num_samples = len(features) + num_batches = (num_samples + batch_size - 1) // batch_size + + # 用于统计结果 + all_predictions = [] + class_counts = {} + + print("\n开始批量预测...") + with torch.no_grad(): + for i in range(num_batches): + start_idx = i * batch_size + end_idx = min((i + 1) * batch_size, num_samples) + batch_features = features[start_idx:end_idx] + + # 将特征重塑为[batch_size, 16, 8, 8] + reshaped_features = batch_features.view(-1, 16, 8, 8) + + # 使用模型的predict函数 + outputs = model.predict(reshaped_features) + predictions = outputs.argmax(dim=1).cpu().numpy() + + # 更新统计信息 + for pred in predictions: + class_counts[int(pred)] = class_counts.get(int(pred), 0) + 1 + + all_predictions.extend(predictions) + + # 打印进度 + if (i + 1) % 10 == 0: + print(f"已处理: {end_idx}/{num_samples} 个样本") + + # 打印统计结果 + print("\n预测结果统计:") + total_samples = len(all_predictions) + for class_idx in sorted(class_counts.keys()): + count = class_counts[class_idx] + percentage = (count / total_samples) * 100 + print(f"类别 {class_idx}: {count} 个样本 ({percentage:.2f}%)") + + # 保存详细结果 + print("\n保存详细结果...") + results = { + 'predictions': all_predictions, + 'class_counts': class_counts + } + # np.save('prediction_results.npy', results) + # print("结果已保存到 prediction_results.npy") + + + +if __name__ == "__main__": + test_predictor() + # test_predictor_from_train_data() + # test_train_data() \ No newline at end of file diff --git a/ttv_utils/save_embeddings.py b/ttv_utils/save_embeddings.py new file mode 100644 index 0000000000000000000000000000000000000000..c41a3d0374ba72d31adafb27bf3da7e6b9deded8 --- /dev/null +++ b/ttv_utils/save_embeddings.py @@ -0,0 +1,272 @@ +import torch +import torch.nn as nn +import numpy as np +import os +import json +from tqdm import tqdm + +class time_travel_saver: + """可视化数据保存类 + + 用于保存模型训练过程中的各种数据,包括: + 1. 模型权重 (.pth) + 2. 高维特征 (representation/*.npy) + 3. 预测结果 (prediction/*.npy) + 4. 标签数据 (label/labels.npy) + """ + + def __init__(self, model, dataloader, device, save_dir, model_name, + auto_save_embedding=False, layer_name=None,show = False): + """初始化 + + Args: + model: 要保存的模型实例 + dataloader: 数据加载器(必须是顺序加载的) + device: 计算设备(cpu or gpu) + save_dir: 保存根目录 + model_name: 模型名称 + """ + self.model = model + self.dataloader = dataloader + self.device = device + self.save_dir = save_dir + self.model_name = model_name + self.auto_save = auto_save_embedding + self.layer_name = layer_name + + if show and not layer_name: + layer_dimensions = self.show_dimensions() + # print(layer_dimensions) + + def show_dimensions(self): + """显示模型中所有层的名称和对应的维度 + + 这个函数会输出模型中所有层的名称和它们的输出维度, + 帮助用户选择合适的层来提取特征。 + + Returns: + layer_dimensions: 包含层名称和维度的字典 + """ + activation = {} + layer_dimensions = {} + + def get_activation(name): + def hook(model, input, output): + activation[name] = output.detach() + return hook + + # 注册钩子到所有层 + handles = [] + for name, module in self.model.named_modules(): + if isinstance(module, nn.Module) and not isinstance(module, nn.ModuleList) and not isinstance(module, nn.ModuleDict): + handles.append(module.register_forward_hook(get_activation(name))) + + self.model.eval() + with torch.no_grad(): + # 获取一个batch来分析每层的输出维度 + inputs, _ = next(iter(self.dataloader)) + inputs = inputs.to(self.device) + _ = self.model(inputs) + + # 分析所有层的输出维度 + print("\n模型各层的名称和维度:") + print("-" * 50) + print(f"{'层名称':<40} {'特征维度':<15} {'输出形状'}") + print("-" * 50) + + for name, feat in activation.items(): + if feat is None: + continue + + # 获取特征维度(展平后) + feat_dim = feat.view(feat.size(0), -1).size(1) + layer_dimensions[name] = feat_dim + # 打印层信息 + shape_str = str(list(feat.shape)) + print(f"{name:<40} {feat_dim:<15} {shape_str}") + + print("-" * 50) + print("注: 特征维度是将输出张量展平后的维度大小") + print("你可以通过修改time_travel_saver的layer_name参数来选择不同的层") + print("例如:layer_name='avg_pool'或layer_name='layer4'等") + + # 移除所有钩子 + for handle in handles: + handle.remove() + + return layer_dimensions + + def _extract_features_and_predictions(self): + """提取特征和预测结果 + + Returns: + features: 高维特征 [样本数, 特征维度] + predictions: 预测结果 [样本数, 类别数] + """ + features = [] + predictions = [] + indices = [] + activation = {} + + def get_activation(name): + def hook(model, input, output): + # 只在需要时保存激活值,避免内存浪费 + if name not in activation or activation[name] is None: + activation[name] = output.detach() + return hook + + # 根据层的名称或维度来选择层 + + # 注册钩子到所有层 + handles = [] + for name, module in self.model.named_modules(): + if isinstance(module, nn.Module) and not isinstance(module, nn.ModuleList) and not isinstance(module, nn.ModuleDict): + handles.append(module.register_forward_hook(get_activation(name))) + + self.model.eval() + with torch.no_grad(): + # 首先获取一个batch来分析每层的输出维度 + inputs, _ = next(iter(self.dataloader)) + inputs = inputs.to(self.device) + _ = self.model(inputs) + + # 如果指定了层名,则直接使用该层 + if self.layer_name is not None: + if self.layer_name not in activation: + raise ValueError(f"指定的层 {self.layer_name} 不存在于模型中") + + feat = activation[self.layer_name] + if feat is None: + raise ValueError(f"指定的层 {self.layer_name} 没有输出特征") + + suitable_layer_name = self.layer_name + suitable_dim = feat.view(feat.size(0), -1).size(1) + print(f"使用指定的特征层: {suitable_layer_name}, 特征维度: {suitable_dim}") + else: + # 找到维度在指定范围内的层 + target_dim_range = (256, 2048) + suitable_layer_name = None + suitable_dim = None + + # 分析所有层的输出维度 + for name, feat in activation.items(): + if feat is None: + continue + feat_dim = feat.view(feat.size(0), -1).size(1) + if target_dim_range[0] <= feat_dim <= target_dim_range[1]: + suitable_layer_name = name + suitable_dim = feat_dim + break + + if suitable_layer_name is None: + raise ValueError("没有找到合适维度的特征层") + + print(f"自动选择的特征层: {suitable_layer_name}, 特征维度: {suitable_dim}") + + # 保存层信息 + layer_info = { + 'layer_id': suitable_layer_name, + 'dim': suitable_dim + } + layer_info_path = os.path.join(os.path.dirname(self.save_dir), 'layer_info.json') + with open(layer_info_path, 'w') as f: + json.dump(layer_info, f) + + # 清除第一次运行的激活值 + activation.clear() + + # 现在处理所有数据 + for batch_idx, (inputs, _) in enumerate(tqdm(self.dataloader, desc="提取特征和预测结果")): + inputs = inputs.to(self.device) + outputs = self.model(inputs) # 获取预测结果 + + # 获取并处理特征 + feat = activation[suitable_layer_name] + flat_features = torch.flatten(feat, start_dim=1) + features.append(flat_features.cpu().numpy()) + predictions.append(outputs.cpu().numpy()) + + # 清除本次的激活值 + activation.clear() + + # 移除所有钩子 + for handle in handles: + handle.remove() + + if len(features) > 0: + features = np.vstack(features) + predictions = np.vstack(predictions) + return features, predictions + else: + return np.array([]), np.array([]) + + def save_lables_index(self, path): + """保存标签数据和索引信息 + + Args: + path: 保存路径 + """ + os.makedirs(path, exist_ok=True) + labels_path = os.path.join(path, 'labels.npy') + index_path = os.path.join(path, 'index.json') + + # 尝试从不同的属性获取标签 + try: + if hasattr(self.dataloader.dataset, 'targets'): + # CIFAR10/CIFAR100使用targets属性 + labels = np.array(self.dataloader.dataset.targets) + elif hasattr(self.dataloader.dataset, 'labels'): + # 某些数据集使用labels属性 + labels = np.array(self.dataloader.dataset.labels) + else: + # 如果上面的方法都不起作用,则从数据加载器中收集标签 + labels = [] + for _, batch_labels in self.dataloader: + labels.append(batch_labels.numpy()) + labels = np.concatenate(labels) + + # 保存标签数据 + np.save(labels_path, labels) + print(f"标签数据已保存到 {labels_path}") + + # 创建数据集索引 + num_samples = len(labels) + indices = list(range(num_samples)) + + # 创建索引字典 + index_dict = { + "train": indices, # 所有数据默认为训练集 + "test": [], # 初始为空 + "validation": [] # 初始为空 + } + + # 保存索引到JSON文件 + with open(index_path, 'w') as f: + json.dump(index_dict, f, indent=4) + + print(f"数据集索引已保存到 {index_path}") + + except Exception as e: + print(f"保存标签和索引时出错: {e}") + + def save_checkpoint_embeddings_predictions(self, model = None): + """保存所有数据""" + if model is not None: + self.model = model + # 保存模型权重 + os.makedirs(self.save_dir, exist_ok=True) + model_path = os.path.join(self.save_dir,'model.pth') + torch.save(self.model.state_dict(), model_path) + + if self.auto_save: + # 提取并保存特征和预测结果 + features, predictions = self._extract_features_and_predictions() + + # 保存特征 + np.save(os.path.join(self.save_dir, 'embeddings.npy'), features) + # 保存预测结果 + np.save(os.path.join(self.save_dir, 'predictions.npy'), predictions) + print("\n保存了以下数据:") + print(f"- 模型权重: {model_path}") + print(f"- 特征向量: [样本数: {features.shape[0]}, 特征维度: {features.shape[1]}]") + print(f"- 预测结果: [样本数: {predictions.shape[0]}, 类别数: {predictions.shape[1]}]") \ No newline at end of file