RRFRRF2 commited on
Commit
9e14a76
·
0 Parent(s):

初始提交,清理历史记录

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +56 -0
  2. .gitignore +2 -0
  3. Image/AlexNet/code/backdoor_train.log +503 -0
  4. Image/AlexNet/code/model.py +81 -0
  5. Image/AlexNet/code/train.log +503 -0
  6. Image/AlexNet/code/train.py +42 -0
  7. Image/AlexNet/dataset/.gitkeep +0 -0
  8. Image/AlexNet/model/0/epoch1/embeddings.npy +3 -0
  9. Image/AlexNet/model/0/epoch1/subject_model.pth +3 -0
  10. Image/AlexNet/model/0/epoch10/embeddings.npy +3 -0
  11. Image/AlexNet/model/0/epoch10/subject_model.pth +3 -0
  12. Image/AlexNet/model/0/epoch11/embeddings.npy +3 -0
  13. Image/AlexNet/model/0/epoch11/subject_model.pth +3 -0
  14. Image/AlexNet/model/0/epoch12/embeddings.npy +3 -0
  15. Image/AlexNet/model/0/epoch12/subject_model.pth +3 -0
  16. Image/AlexNet/model/0/epoch13/embeddings.npy +3 -0
  17. Image/AlexNet/model/0/epoch13/subject_model.pth +3 -0
  18. Image/AlexNet/model/0/epoch14/embeddings.npy +3 -0
  19. Image/AlexNet/model/0/epoch14/subject_model.pth +3 -0
  20. Image/AlexNet/model/0/epoch15/embeddings.npy +3 -0
  21. Image/AlexNet/model/0/epoch15/subject_model.pth +3 -0
  22. Image/AlexNet/model/0/epoch16/embeddings.npy +3 -0
  23. Image/AlexNet/model/0/epoch16/subject_model.pth +3 -0
  24. Image/AlexNet/model/0/epoch17/embeddings.npy +3 -0
  25. Image/AlexNet/model/0/epoch17/subject_model.pth +3 -0
  26. Image/AlexNet/model/0/epoch18/embeddings.npy +3 -0
  27. Image/AlexNet/model/0/epoch18/subject_model.pth +3 -0
  28. Image/AlexNet/model/0/epoch19/embeddings.npy +3 -0
  29. Image/AlexNet/model/0/epoch19/subject_model.pth +3 -0
  30. Image/AlexNet/model/0/epoch2/embeddings.npy +3 -0
  31. Image/AlexNet/model/0/epoch2/subject_model.pth +3 -0
  32. Image/AlexNet/model/0/epoch20/embeddings.npy +3 -0
  33. Image/AlexNet/model/0/epoch20/subject_model.pth +3 -0
  34. Image/AlexNet/model/0/epoch21/embeddings.npy +3 -0
  35. Image/AlexNet/model/0/epoch21/subject_model.pth +3 -0
  36. Image/AlexNet/model/0/epoch22/embeddings.npy +3 -0
  37. Image/AlexNet/model/0/epoch22/subject_model.pth +3 -0
  38. Image/AlexNet/model/0/epoch23/embeddings.npy +3 -0
  39. Image/AlexNet/model/0/epoch23/subject_model.pth +3 -0
  40. Image/AlexNet/model/0/epoch24/embeddings.npy +3 -0
  41. Image/AlexNet/model/0/epoch24/subject_model.pth +3 -0
  42. Image/AlexNet/model/0/epoch25/embeddings.npy +3 -0
  43. Image/AlexNet/model/0/epoch25/subject_model.pth +3 -0
  44. Image/AlexNet/model/0/epoch3/embeddings.npy +3 -0
  45. Image/AlexNet/model/0/epoch3/subject_model.pth +3 -0
  46. Image/AlexNet/model/0/epoch4/embeddings.npy +3 -0
  47. Image/AlexNet/model/0/epoch4/subject_model.pth +3 -0
  48. Image/AlexNet/model/0/epoch5/embeddings.npy +3 -0
  49. Image/AlexNet/model/0/epoch5/subject_model.pth +3 -0
  50. Image/AlexNet/model/0/epoch6/embeddings.npy +3 -0
.gitattributes ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.lz4 filter=lfs diff=lfs merge=lfs -text
12
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
+ *.model filter=lfs diff=lfs merge=lfs -text
14
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
15
+ *.npy filter=lfs diff=lfs merge=lfs -text
16
+ *.npz filter=lfs diff=lfs merge=lfs -text
17
+ *.onnx filter=lfs diff=lfs merge=lfs -text
18
+ *.ot filter=lfs diff=lfs merge=lfs -text
19
+ *.parquet filter=lfs diff=lfs merge=lfs -text
20
+ *.pb filter=lfs diff=lfs merge=lfs -text
21
+ *.pickle filter=lfs diff=lfs merge=lfs -text
22
+ *.pkl filter=lfs diff=lfs merge=lfs -text
23
+ *.pt filter=lfs diff=lfs merge=lfs -text
24
+ *.pth filter=lfs diff=lfs merge=lfs -text
25
+ *.rar filter=lfs diff=lfs merge=lfs -text
26
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
27
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
29
+ *.tar filter=lfs diff=lfs merge=lfs -text
30
+ *.tflite filter=lfs diff=lfs merge=lfs -text
31
+ *.tgz filter=lfs diff=lfs merge=lfs -text
32
+ *.wasm filter=lfs diff=lfs merge=lfs -text
33
+ *.xz filter=lfs diff=lfs merge=lfs -text
34
+ *.zip filter=lfs diff=lfs merge=lfs -text
35
+ *.zst filter=lfs diff=lfs merge=lfs -text
36
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
37
+ # Audio files - uncompressed
38
+ *.pcm filter=lfs diff=lfs merge=lfs -text
39
+ *.sam filter=lfs diff=lfs merge=lfs -text
40
+ *.raw filter=lfs diff=lfs merge=lfs -text
41
+ # Audio files - compressed
42
+ *.aac filter=lfs diff=lfs merge=lfs -text
43
+ *.flac filter=lfs diff=lfs merge=lfs -text
44
+ *.mp3 filter=lfs diff=lfs merge=lfs -text
45
+ *.ogg filter=lfs diff=lfs merge=lfs -text
46
+ *.wav filter=lfs diff=lfs merge=lfs -text
47
+ # Image files - uncompressed
48
+ *.bmp filter=lfs diff=lfs merge=lfs -text
49
+ *.gif filter=lfs diff=lfs merge=lfs -text
50
+ *.png filter=lfs diff=lfs merge=lfs -text
51
+ *.tiff filter=lfs diff=lfs merge=lfs -text
52
+ # Image files - compressed
53
+ *.jpg filter=lfs diff=lfs merge=lfs -text
54
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
55
+ *.webp filter=lfs diff=lfs merge=lfs -text
56
+ ResNet-CIFAR10/Classification-normal/dataset/cifar-10-batches-py/* filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ *.pyc
2
+ _pycache_
Image/AlexNet/code/backdoor_train.log ADDED
@@ -0,0 +1,503 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-03-09 20:19:12,058 - train - INFO - 开始训练 alexnet
2
+ 2025-03-09 20:19:12,058 - train - INFO - 总轮数: 100, 学习率: 0.1, 设备: cuda:2
3
+ 2025-03-09 20:19:12,675 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 16.41%
4
+ 2025-03-09 20:19:14,626 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.271 | Acc: 18.68%
5
+ 2025-03-09 20:19:16,523 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.251 | Acc: 19.08%
6
+ 2025-03-09 20:19:18,515 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.190 | Acc: 19.65%
7
+ 2025-03-09 20:19:21,395 - train - INFO - Epoch: 1 | Test Loss: 1.966 | Test Acc: 18.53%
8
+ 2025-03-09 20:19:21,531 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 2.000 | Acc: 20.31%
9
+ 2025-03-09 20:19:23,427 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.985 | Acc: 20.66%
10
+ 2025-03-09 20:19:25,413 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.976 | Acc: 20.93%
11
+ 2025-03-09 20:19:27,414 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.963 | Acc: 21.40%
12
+ 2025-03-09 20:19:30,365 - train - INFO - Epoch: 2 | Test Loss: 1.889 | Test Acc: 20.28%
13
+ 2025-03-09 20:19:30,566 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.928 | Acc: 26.56%
14
+ 2025-03-09 20:19:33,166 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.907 | Acc: 24.49%
15
+ 2025-03-09 20:19:35,301 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.885 | Acc: 25.66%
16
+ 2025-03-09 20:19:37,244 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.861 | Acc: 26.84%
17
+ 2025-03-09 20:19:40,542 - train - INFO - Epoch: 3 | Test Loss: 1.751 | Test Acc: 28.73%
18
+ 2025-03-09 20:19:40,699 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.752 | Acc: 31.25%
19
+ 2025-03-09 20:19:42,615 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.688 | Acc: 34.92%
20
+ 2025-03-09 20:19:44,581 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.674 | Acc: 35.46%
21
+ 2025-03-09 20:19:46,517 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.670 | Acc: 35.93%
22
+ 2025-03-09 20:19:49,443 - train - INFO - Epoch: 4 | Test Loss: 1.597 | Test Acc: 40.89%
23
+ 2025-03-09 20:19:57,639 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.450 | Acc: 47.66%
24
+ 2025-03-09 20:19:59,573 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.591 | Acc: 40.60%
25
+ 2025-03-09 20:20:01,613 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.572 | Acc: 41.28%
26
+ 2025-03-09 20:20:03,589 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.564 | Acc: 41.79%
27
+ 2025-03-09 20:20:06,878 - train - INFO - Epoch: 5 | Test Loss: 1.498 | Test Acc: 46.10%
28
+ 2025-03-09 20:20:07,049 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.323 | Acc: 53.12%
29
+ 2025-03-09 20:20:09,159 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.504 | Acc: 44.93%
30
+ 2025-03-09 20:20:11,263 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.507 | Acc: 45.20%
31
+ 2025-03-09 20:20:13,313 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.511 | Acc: 45.22%
32
+ 2025-03-09 20:20:16,255 - train - INFO - Epoch: 6 | Test Loss: 1.616 | Test Acc: 43.10%
33
+ 2025-03-09 20:20:16,412 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.604 | Acc: 40.62%
34
+ 2025-03-09 20:20:18,415 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.507 | Acc: 44.99%
35
+ 2025-03-09 20:20:20,351 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.481 | Acc: 46.18%
36
+ 2025-03-09 20:20:22,266 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.494 | Acc: 46.05%
37
+ 2025-03-09 20:20:25,325 - train - INFO - Epoch: 7 | Test Loss: 1.543 | Test Acc: 43.40%
38
+ 2025-03-09 20:20:25,492 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.399 | Acc: 49.22%
39
+ 2025-03-09 20:20:27,700 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.477 | Acc: 47.11%
40
+ 2025-03-09 20:20:29,710 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.464 | Acc: 47.44%
41
+ 2025-03-09 20:20:31,913 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.461 | Acc: 47.83%
42
+ 2025-03-09 20:20:35,491 - train - INFO - Epoch: 8 | Test Loss: 1.713 | Test Acc: 42.90%
43
+ 2025-03-09 20:20:44,721 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.387 | Acc: 50.78%
44
+ 2025-03-09 20:20:46,712 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.450 | Acc: 48.65%
45
+ 2025-03-09 20:20:48,602 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.446 | Acc: 48.56%
46
+ 2025-03-09 20:20:50,410 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.449 | Acc: 48.34%
47
+ 2025-03-09 20:20:53,243 - train - INFO - Epoch: 9 | Test Loss: 1.560 | Test Acc: 44.42%
48
+ 2025-03-09 20:20:53,419 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.467 | Acc: 47.66%
49
+ 2025-03-09 20:20:55,289 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.438 | Acc: 48.78%
50
+ 2025-03-09 20:20:57,200 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.437 | Acc: 49.22%
51
+ 2025-03-09 20:20:59,140 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.425 | Acc: 49.63%
52
+ 2025-03-09 20:21:02,190 - train - INFO - Epoch: 10 | Test Loss: 1.399 | Test Acc: 51.59%
53
+ 2025-03-09 20:21:02,359 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.214 | Acc: 60.94%
54
+ 2025-03-09 20:21:04,254 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.449 | Acc: 48.41%
55
+ 2025-03-09 20:21:06,162 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.457 | Acc: 48.33%
56
+ 2025-03-09 20:21:08,031 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.449 | Acc: 48.74%
57
+ 2025-03-09 20:21:11,037 - train - INFO - Epoch: 11 | Test Loss: 1.485 | Test Acc: 49.63%
58
+ 2025-03-09 20:21:11,206 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.293 | Acc: 51.56%
59
+ 2025-03-09 20:21:13,154 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.422 | Acc: 50.02%
60
+ 2025-03-09 20:21:15,086 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.406 | Acc: 50.85%
61
+ 2025-03-09 20:21:17,146 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.413 | Acc: 50.54%
62
+ 2025-03-09 20:21:20,196 - train - INFO - Epoch: 12 | Test Loss: 1.515 | Test Acc: 46.31%
63
+ 2025-03-09 20:21:28,840 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.368 | Acc: 46.88%
64
+ 2025-03-09 20:21:30,993 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.425 | Acc: 50.31%
65
+ 2025-03-09 20:21:33,129 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.418 | Acc: 50.51%
66
+ 2025-03-09 20:21:35,080 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.422 | Acc: 50.37%
67
+ 2025-03-09 20:21:38,299 - train - INFO - Epoch: 13 | Test Loss: 1.688 | Test Acc: 43.65%
68
+ 2025-03-09 20:21:38,495 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.597 | Acc: 46.88%
69
+ 2025-03-09 20:21:40,470 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.417 | Acc: 50.60%
70
+ 2025-03-09 20:21:42,443 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.441 | Acc: 49.58%
71
+ 2025-03-09 20:21:44,403 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.439 | Acc: 49.61%
72
+ 2025-03-09 20:21:47,387 - train - INFO - Epoch: 14 | Test Loss: 1.390 | Test Acc: 53.03%
73
+ 2025-03-09 20:21:47,552 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.497 | Acc: 50.78%
74
+ 2025-03-09 20:21:49,502 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.401 | Acc: 51.78%
75
+ 2025-03-09 20:21:51,470 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.405 | Acc: 51.27%
76
+ 2025-03-09 20:21:53,392 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.413 | Acc: 51.07%
77
+ 2025-03-09 20:21:56,330 - train - INFO - Epoch: 15 | Test Loss: 1.420 | Test Acc: 51.08%
78
+ 2025-03-09 20:21:56,494 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.452 | Acc: 45.31%
79
+ 2025-03-09 20:21:58,628 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.414 | Acc: 50.87%
80
+ 2025-03-09 20:22:00,738 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.409 | Acc: 51.10%
81
+ 2025-03-09 20:22:02,889 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.410 | Acc: 51.06%
82
+ 2025-03-09 20:22:06,141 - train - INFO - Epoch: 16 | Test Loss: 1.340 | Test Acc: 54.35%
83
+ 2025-03-09 20:22:15,047 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.409 | Acc: 50.00%
84
+ 2025-03-09 20:22:17,210 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.392 | Acc: 52.21%
85
+ 2025-03-09 20:22:19,647 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.388 | Acc: 52.15%
86
+ 2025-03-09 20:22:21,842 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.386 | Acc: 52.22%
87
+ 2025-03-09 20:22:25,119 - train - INFO - Epoch: 17 | Test Loss: 1.341 | Test Acc: 53.27%
88
+ 2025-03-09 20:22:25,281 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.476 | Acc: 46.09%
89
+ 2025-03-09 20:22:27,733 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.405 | Acc: 52.00%
90
+ 2025-03-09 20:22:29,947 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.408 | Acc: 51.39%
91
+ 2025-03-09 20:22:31,944 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.402 | Acc: 51.70%
92
+ 2025-03-09 20:22:35,223 - train - INFO - Epoch: 18 | Test Loss: 1.446 | Test Acc: 49.65%
93
+ 2025-03-09 20:22:35,389 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.405 | Acc: 50.78%
94
+ 2025-03-09 20:22:37,625 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.415 | Acc: 51.73%
95
+ 2025-03-09 20:22:39,692 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.413 | Acc: 51.99%
96
+ 2025-03-09 20:22:41,607 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.405 | Acc: 52.11%
97
+ 2025-03-09 20:22:44,549 - train - INFO - Epoch: 19 | Test Loss: 1.407 | Test Acc: 52.25%
98
+ 2025-03-09 20:22:44,717 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.496 | Acc: 52.34%
99
+ 2025-03-09 20:22:46,695 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.393 | Acc: 52.61%
100
+ 2025-03-09 20:22:48,558 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.391 | Acc: 52.68%
101
+ 2025-03-09 20:22:50,490 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.386 | Acc: 52.47%
102
+ 2025-03-09 20:22:53,448 - train - INFO - Epoch: 20 | Test Loss: 1.386 | Test Acc: 52.54%
103
+ 2025-03-09 20:23:02,248 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.271 | Acc: 53.91%
104
+ 2025-03-09 20:23:04,411 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.377 | Acc: 52.96%
105
+ 2025-03-09 20:23:06,656 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.362 | Acc: 53.40%
106
+ 2025-03-09 20:23:08,804 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.365 | Acc: 53.34%
107
+ 2025-03-09 20:23:11,945 - train - INFO - Epoch: 21 | Test Loss: 1.409 | Test Acc: 52.31%
108
+ 2025-03-09 20:23:12,122 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.437 | Acc: 52.34%
109
+ 2025-03-09 20:23:14,013 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.347 | Acc: 53.79%
110
+ 2025-03-09 20:23:15,873 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.363 | Acc: 52.99%
111
+ 2025-03-09 20:23:17,764 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.364 | Acc: 53.03%
112
+ 2025-03-09 20:23:20,673 - train - INFO - Epoch: 22 | Test Loss: 1.469 | Test Acc: 49.71%
113
+ 2025-03-09 20:23:20,830 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.354 | Acc: 49.22%
114
+ 2025-03-09 20:23:22,923 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.379 | Acc: 52.44%
115
+ 2025-03-09 20:23:24,848 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.379 | Acc: 52.37%
116
+ 2025-03-09 20:23:26,896 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.374 | Acc: 52.68%
117
+ 2025-03-09 20:23:30,039 - train - INFO - Epoch: 23 | Test Loss: 1.362 | Test Acc: 52.68%
118
+ 2025-03-09 20:23:30,193 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.161 | Acc: 61.72%
119
+ 2025-03-09 20:23:32,373 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.389 | Acc: 52.10%
120
+ 2025-03-09 20:23:34,420 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.380 | Acc: 52.79%
121
+ 2025-03-09 20:23:36,633 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.385 | Acc: 52.65%
122
+ 2025-03-09 20:23:39,821 - train - INFO - Epoch: 24 | Test Loss: 1.370 | Test Acc: 52.61%
123
+ 2025-03-09 20:23:48,489 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.393 | Acc: 53.12%
124
+ 2025-03-09 20:23:50,419 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.372 | Acc: 53.69%
125
+ 2025-03-09 20:23:52,355 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.400 | Acc: 52.04%
126
+ 2025-03-09 20:23:54,234 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.385 | Acc: 52.35%
127
+ 2025-03-09 20:23:57,196 - train - INFO - Epoch: 25 | Test Loss: 1.394 | Test Acc: 52.26%
128
+ 2025-03-09 20:23:57,352 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.450 | Acc: 59.38%
129
+ 2025-03-09 20:23:59,265 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.348 | Acc: 54.16%
130
+ 2025-03-09 20:24:01,145 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.334 | Acc: 54.64%
131
+ 2025-03-09 20:24:03,153 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.333 | Acc: 54.60%
132
+ 2025-03-09 20:24:06,282 - train - INFO - Epoch: 26 | Test Loss: 1.348 | Test Acc: 53.70%
133
+ 2025-03-09 20:24:06,508 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.219 | Acc: 58.59%
134
+ 2025-03-09 20:24:08,538 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.338 | Acc: 54.12%
135
+ 2025-03-09 20:24:10,668 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.350 | Acc: 53.35%
136
+ 2025-03-09 20:24:12,748 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.349 | Acc: 53.77%
137
+ 2025-03-09 20:24:15,793 - train - INFO - Epoch: 27 | Test Loss: 1.359 | Test Acc: 52.39%
138
+ 2025-03-09 20:24:15,977 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.496 | Acc: 47.66%
139
+ 2025-03-09 20:24:17,971 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.372 | Acc: 52.45%
140
+ 2025-03-09 20:24:19,834 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.352 | Acc: 53.57%
141
+ 2025-03-09 20:24:21,705 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.358 | Acc: 53.43%
142
+ 2025-03-09 20:24:24,689 - train - INFO - Epoch: 28 | Test Loss: 1.377 | Test Acc: 54.25%
143
+ 2025-03-09 20:24:33,155 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.412 | Acc: 48.44%
144
+ 2025-03-09 20:24:35,070 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.370 | Acc: 53.01%
145
+ 2025-03-09 20:24:37,032 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.367 | Acc: 53.06%
146
+ 2025-03-09 20:24:39,067 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.353 | Acc: 53.75%
147
+ 2025-03-09 20:24:42,247 - train - INFO - Epoch: 29 | Test Loss: 1.488 | Test Acc: 47.56%
148
+ 2025-03-09 20:24:42,420 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.462 | Acc: 48.44%
149
+ 2025-03-09 20:24:44,388 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.386 | Acc: 52.71%
150
+ 2025-03-09 20:24:46,476 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.375 | Acc: 53.26%
151
+ 2025-03-09 20:24:48,451 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.367 | Acc: 53.45%
152
+ 2025-03-09 20:24:51,659 - train - INFO - Epoch: 30 | Test Loss: 1.347 | Test Acc: 52.89%
153
+ 2025-03-09 20:24:51,817 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.475 | Acc: 51.56%
154
+ 2025-03-09 20:24:53,919 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.358 | Acc: 54.30%
155
+ 2025-03-09 20:24:56,164 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.349 | Acc: 54.70%
156
+ 2025-03-09 20:24:58,081 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.339 | Acc: 54.88%
157
+ 2025-03-09 20:25:00,872 - train - INFO - Epoch: 31 | Test Loss: 1.342 | Test Acc: 54.27%
158
+ 2025-03-09 20:25:01,010 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.281 | Acc: 56.25%
159
+ 2025-03-09 20:25:02,872 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.343 | Acc: 54.04%
160
+ 2025-03-09 20:25:04,904 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.345 | Acc: 53.87%
161
+ 2025-03-09 20:25:06,912 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.336 | Acc: 54.46%
162
+ 2025-03-09 20:25:09,820 - train - INFO - Epoch: 32 | Test Loss: 1.399 | Test Acc: 53.36%
163
+ 2025-03-09 20:25:18,309 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.352 | Acc: 57.81%
164
+ 2025-03-09 20:25:20,363 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.311 | Acc: 55.27%
165
+ 2025-03-09 20:25:22,301 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.311 | Acc: 55.33%
166
+ 2025-03-09 20:25:24,323 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.303 | Acc: 55.76%
167
+ 2025-03-09 20:25:27,526 - train - INFO - Epoch: 33 | Test Loss: 1.348 | Test Acc: 55.64%
168
+ 2025-03-09 20:25:27,697 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.294 | Acc: 52.34%
169
+ 2025-03-09 20:25:29,828 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.319 | Acc: 55.61%
170
+ 2025-03-09 20:25:31,968 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.313 | Acc: 55.77%
171
+ 2025-03-09 20:25:34,273 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.317 | Acc: 55.53%
172
+ 2025-03-09 20:25:37,696 - train - INFO - Epoch: 34 | Test Loss: 1.453 | Test Acc: 52.69%
173
+ 2025-03-09 20:25:37,860 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.607 | Acc: 48.44%
174
+ 2025-03-09 20:25:40,073 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.330 | Acc: 55.03%
175
+ 2025-03-09 20:25:42,446 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.314 | Acc: 55.59%
176
+ 2025-03-09 20:25:44,548 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.313 | Acc: 55.58%
177
+ 2025-03-09 20:25:47,695 - train - INFO - Epoch: 35 | Test Loss: 1.366 | Test Acc: 53.31%
178
+ 2025-03-09 20:25:47,852 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.387 | Acc: 53.91%
179
+ 2025-03-09 20:25:49,788 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.331 | Acc: 55.28%
180
+ 2025-03-09 20:25:51,705 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.331 | Acc: 55.26%
181
+ 2025-03-09 20:25:53,699 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.332 | Acc: 55.45%
182
+ 2025-03-09 20:25:56,774 - train - INFO - Epoch: 36 | Test Loss: 1.273 | Test Acc: 56.38%
183
+ 2025-03-09 20:26:05,606 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.296 | Acc: 58.59%
184
+ 2025-03-09 20:26:07,890 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.324 | Acc: 54.72%
185
+ 2025-03-09 20:26:09,936 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.321 | Acc: 55.64%
186
+ 2025-03-09 20:26:12,029 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.315 | Acc: 55.72%
187
+ 2025-03-09 20:26:15,068 - train - INFO - Epoch: 37 | Test Loss: 1.290 | Test Acc: 56.71%
188
+ 2025-03-09 20:26:15,238 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.452 | Acc: 51.56%
189
+ 2025-03-09 20:26:17,226 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.312 | Acc: 56.12%
190
+ 2025-03-09 20:26:19,216 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.300 | Acc: 56.18%
191
+ 2025-03-09 20:26:21,112 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.306 | Acc: 55.91%
192
+ 2025-03-09 20:26:23,992 - train - INFO - Epoch: 38 | Test Loss: 1.361 | Test Acc: 54.99%
193
+ 2025-03-09 20:26:24,159 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.445 | Acc: 52.34%
194
+ 2025-03-09 20:26:26,133 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.322 | Acc: 54.90%
195
+ 2025-03-09 20:26:28,117 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.314 | Acc: 55.84%
196
+ 2025-03-09 20:26:30,147 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.314 | Acc: 55.66%
197
+ 2025-03-09 20:26:33,109 - train - INFO - Epoch: 39 | Test Loss: 1.350 | Test Acc: 54.42%
198
+ 2025-03-09 20:26:33,294 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.216 | Acc: 60.16%
199
+ 2025-03-09 20:26:35,443 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.298 | Acc: 56.68%
200
+ 2025-03-09 20:26:38,192 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.292 | Acc: 56.84%
201
+ 2025-03-09 20:26:40,191 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.288 | Acc: 56.83%
202
+ 2025-03-09 20:26:43,736 - train - INFO - Epoch: 40 | Test Loss: 1.414 | Test Acc: 53.13%
203
+ 2025-03-09 20:26:52,109 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.310 | Acc: 50.78%
204
+ 2025-03-09 20:26:53,954 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.288 | Acc: 56.18%
205
+ 2025-03-09 20:26:55,875 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.305 | Acc: 55.85%
206
+ 2025-03-09 20:26:57,745 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.307 | Acc: 56.09%
207
+ 2025-03-09 20:27:00,677 - train - INFO - Epoch: 41 | Test Loss: 1.363 | Test Acc: 53.66%
208
+ 2025-03-09 20:27:00,850 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.274 | Acc: 55.47%
209
+ 2025-03-09 20:27:02,898 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.287 | Acc: 56.66%
210
+ 2025-03-09 20:27:04,913 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.295 | Acc: 56.11%
211
+ 2025-03-09 20:27:07,000 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.310 | Acc: 55.73%
212
+ 2025-03-09 20:27:10,353 - train - INFO - Epoch: 42 | Test Loss: 1.267 | Test Acc: 55.87%
213
+ 2025-03-09 20:27:10,566 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.068 | Acc: 61.72%
214
+ 2025-03-09 20:27:12,538 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.276 | Acc: 57.14%
215
+ 2025-03-09 20:27:14,555 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.281 | Acc: 56.92%
216
+ 2025-03-09 20:27:16,452 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.294 | Acc: 56.41%
217
+ 2025-03-09 20:27:19,499 - train - INFO - Epoch: 43 | Test Loss: 1.402 | Test Acc: 54.51%
218
+ 2025-03-09 20:27:19,668 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.519 | Acc: 47.66%
219
+ 2025-03-09 20:27:21,706 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.300 | Acc: 56.28%
220
+ 2025-03-09 20:27:23,755 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.310 | Acc: 55.94%
221
+ 2025-03-09 20:27:25,686 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.307 | Acc: 56.03%
222
+ 2025-03-09 20:27:28,811 - train - INFO - Epoch: 44 | Test Loss: 1.234 | Test Acc: 58.47%
223
+ 2025-03-09 20:27:37,172 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.269 | Acc: 58.59%
224
+ 2025-03-09 20:27:39,166 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.280 | Acc: 56.24%
225
+ 2025-03-09 20:27:41,156 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.286 | Acc: 56.27%
226
+ 2025-03-09 20:27:43,004 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.286 | Acc: 56.40%
227
+ 2025-03-09 20:27:45,868 - train - INFO - Epoch: 45 | Test Loss: 1.282 | Test Acc: 55.78%
228
+ 2025-03-09 20:27:46,023 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.239 | Acc: 51.56%
229
+ 2025-03-09 20:27:47,975 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.302 | Acc: 55.83%
230
+ 2025-03-09 20:27:49,950 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.266 | Acc: 57.14%
231
+ 2025-03-09 20:27:52,118 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.255 | Acc: 57.51%
232
+ 2025-03-09 20:27:55,154 - train - INFO - Epoch: 46 | Test Loss: 1.309 | Test Acc: 56.11%
233
+ 2025-03-09 20:27:55,469 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.226 | Acc: 57.03%
234
+ 2025-03-09 20:27:57,480 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.251 | Acc: 57.84%
235
+ 2025-03-09 20:27:59,499 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.258 | Acc: 57.76%
236
+ 2025-03-09 20:28:01,442 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.252 | Acc: 58.02%
237
+ 2025-03-09 20:28:04,566 - train - INFO - Epoch: 47 | Test Loss: 1.253 | Test Acc: 57.28%
238
+ 2025-03-09 20:28:04,739 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.394 | Acc: 53.12%
239
+ 2025-03-09 20:28:06,693 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.278 | Acc: 56.41%
240
+ 2025-03-09 20:28:08,637 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.268 | Acc: 57.14%
241
+ 2025-03-09 20:28:10,711 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.262 | Acc: 57.50%
242
+ 2025-03-09 20:28:13,848 - train - INFO - Epoch: 48 | Test Loss: 1.249 | Test Acc: 58.69%
243
+ 2025-03-09 20:28:22,849 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.235 | Acc: 60.94%
244
+ 2025-03-09 20:28:24,718 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.262 | Acc: 57.60%
245
+ 2025-03-09 20:28:26,655 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.252 | Acc: 57.92%
246
+ 2025-03-09 20:28:28,615 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.256 | Acc: 57.75%
247
+ 2025-03-09 20:28:31,510 - train - INFO - Epoch: 49 | Test Loss: 1.236 | Test Acc: 59.64%
248
+ 2025-03-09 20:28:31,684 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.417 | Acc: 52.34%
249
+ 2025-03-09 20:28:33,617 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.226 | Acc: 58.89%
250
+ 2025-03-09 20:28:35,516 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.262 | Acc: 57.55%
251
+ 2025-03-09 20:28:37,408 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.257 | Acc: 57.70%
252
+ 2025-03-09 20:28:40,309 - train - INFO - Epoch: 50 | Test Loss: 1.257 | Test Acc: 57.34%
253
+ 2025-03-09 20:28:40,473 - train - INFO - Epoch: 51 | Batch: 0 | Loss: 1.113 | Acc: 63.28%
254
+ 2025-03-09 20:28:42,519 - train - INFO - Epoch: 51 | Batch: 100 | Loss: 1.259 | Acc: 58.03%
255
+ 2025-03-09 20:28:44,502 - train - INFO - Epoch: 51 | Batch: 200 | Loss: 1.264 | Acc: 57.69%
256
+ 2025-03-09 20:28:46,631 - train - INFO - Epoch: 51 | Batch: 300 | Loss: 1.269 | Acc: 57.29%
257
+ 2025-03-09 20:28:49,638 - train - INFO - Epoch: 51 | Test Loss: 1.281 | Test Acc: 57.03%
258
+ 2025-03-09 20:28:49,812 - train - INFO - Epoch: 52 | Batch: 0 | Loss: 1.257 | Acc: 55.47%
259
+ 2025-03-09 20:28:51,845 - train - INFO - Epoch: 52 | Batch: 100 | Loss: 1.262 | Acc: 57.78%
260
+ 2025-03-09 20:28:53,837 - train - INFO - Epoch: 52 | Batch: 200 | Loss: 1.265 | Acc: 57.56%
261
+ 2025-03-09 20:28:55,995 - train - INFO - Epoch: 52 | Batch: 300 | Loss: 1.261 | Acc: 57.63%
262
+ 2025-03-09 20:28:59,048 - train - INFO - Epoch: 52 | Test Loss: 1.331 | Test Acc: 53.97%
263
+ 2025-03-09 20:29:07,981 - train - INFO - Epoch: 53 | Batch: 0 | Loss: 1.271 | Acc: 53.91%
264
+ 2025-03-09 20:29:09,928 - train - INFO - Epoch: 53 | Batch: 100 | Loss: 1.254 | Acc: 58.34%
265
+ 2025-03-09 20:29:11,910 - train - INFO - Epoch: 53 | Batch: 200 | Loss: 1.252 | Acc: 58.11%
266
+ 2025-03-09 20:29:13,867 - train - INFO - Epoch: 53 | Batch: 300 | Loss: 1.262 | Acc: 57.70%
267
+ 2025-03-09 20:29:16,853 - train - INFO - Epoch: 53 | Test Loss: 1.308 | Test Acc: 57.07%
268
+ 2025-03-09 20:29:17,032 - train - INFO - Epoch: 54 | Batch: 0 | Loss: 1.168 | Acc: 60.16%
269
+ 2025-03-09 20:29:18,963 - train - INFO - Epoch: 54 | Batch: 100 | Loss: 1.250 | Acc: 57.74%
270
+ 2025-03-09 20:29:20,860 - train - INFO - Epoch: 54 | Batch: 200 | Loss: 1.238 | Acc: 58.21%
271
+ 2025-03-09 20:29:22,743 - train - INFO - Epoch: 54 | Batch: 300 | Loss: 1.241 | Acc: 58.10%
272
+ 2025-03-09 20:29:25,703 - train - INFO - Epoch: 54 | Test Loss: 1.218 | Test Acc: 59.11%
273
+ 2025-03-09 20:29:25,847 - train - INFO - Epoch: 55 | Batch: 0 | Loss: 1.268 | Acc: 60.16%
274
+ 2025-03-09 20:29:27,805 - train - INFO - Epoch: 55 | Batch: 100 | Loss: 1.251 | Acc: 57.75%
275
+ 2025-03-09 20:29:29,824 - train - INFO - Epoch: 55 | Batch: 200 | Loss: 1.233 | Acc: 58.28%
276
+ 2025-03-09 20:29:31,744 - train - INFO - Epoch: 55 | Batch: 300 | Loss: 1.229 | Acc: 58.43%
277
+ 2025-03-09 20:29:34,830 - train - INFO - Epoch: 55 | Test Loss: 1.157 | Test Acc: 61.97%
278
+ 2025-03-09 20:29:35,053 - train - INFO - Epoch: 56 | Batch: 0 | Loss: 1.206 | Acc: 57.81%
279
+ 2025-03-09 20:29:37,115 - train - INFO - Epoch: 56 | Batch: 100 | Loss: 1.258 | Acc: 57.43%
280
+ 2025-03-09 20:29:39,236 - train - INFO - Epoch: 56 | Batch: 200 | Loss: 1.241 | Acc: 58.07%
281
+ 2025-03-09 20:29:41,226 - train - INFO - Epoch: 56 | Batch: 300 | Loss: 1.233 | Acc: 58.52%
282
+ 2025-03-09 20:29:44,437 - train - INFO - Epoch: 56 | Test Loss: 1.167 | Test Acc: 60.27%
283
+ 2025-03-09 20:29:52,970 - train - INFO - Epoch: 57 | Batch: 0 | Loss: 1.121 | Acc: 60.16%
284
+ 2025-03-09 20:29:55,013 - train - INFO - Epoch: 57 | Batch: 100 | Loss: 1.195 | Acc: 60.04%
285
+ 2025-03-09 20:29:57,034 - train - INFO - Epoch: 57 | Batch: 200 | Loss: 1.213 | Acc: 59.17%
286
+ 2025-03-09 20:29:58,980 - train - INFO - Epoch: 57 | Batch: 300 | Loss: 1.219 | Acc: 58.95%
287
+ 2025-03-09 20:30:01,938 - train - INFO - Epoch: 57 | Test Loss: 1.274 | Test Acc: 57.75%
288
+ 2025-03-09 20:30:02,116 - train - INFO - Epoch: 58 | Batch: 0 | Loss: 1.284 | Acc: 53.12%
289
+ 2025-03-09 20:30:04,029 - train - INFO - Epoch: 58 | Batch: 100 | Loss: 1.250 | Acc: 58.62%
290
+ 2025-03-09 20:30:06,039 - train - INFO - Epoch: 58 | Batch: 200 | Loss: 1.243 | Acc: 58.39%
291
+ 2025-03-09 20:30:08,038 - train - INFO - Epoch: 58 | Batch: 300 | Loss: 1.240 | Acc: 58.48%
292
+ 2025-03-09 20:30:11,145 - train - INFO - Epoch: 58 | Test Loss: 1.232 | Test Acc: 58.94%
293
+ 2025-03-09 20:30:11,327 - train - INFO - Epoch: 59 | Batch: 0 | Loss: 1.047 | Acc: 63.28%
294
+ 2025-03-09 20:30:13,348 - train - INFO - Epoch: 59 | Batch: 100 | Loss: 1.210 | Acc: 59.13%
295
+ 2025-03-09 20:30:15,556 - train - INFO - Epoch: 59 | Batch: 200 | Loss: 1.208 | Acc: 59.16%
296
+ 2025-03-09 20:30:17,594 - train - INFO - Epoch: 59 | Batch: 300 | Loss: 1.212 | Acc: 59.30%
297
+ 2025-03-09 20:30:20,606 - train - INFO - Epoch: 59 | Test Loss: 1.165 | Test Acc: 62.08%
298
+ 2025-03-09 20:30:20,783 - train - INFO - Epoch: 60 | Batch: 0 | Loss: 1.238 | Acc: 55.47%
299
+ 2025-03-09 20:30:22,724 - train - INFO - Epoch: 60 | Batch: 100 | Loss: 1.218 | Acc: 58.86%
300
+ 2025-03-09 20:30:24,630 - train - INFO - Epoch: 60 | Batch: 200 | Loss: 1.215 | Acc: 58.99%
301
+ 2025-03-09 20:30:26,703 - train - INFO - Epoch: 60 | Batch: 300 | Loss: 1.215 | Acc: 59.11%
302
+ 2025-03-09 20:30:29,708 - train - INFO - Epoch: 60 | Test Loss: 1.178 | Test Acc: 60.43%
303
+ 2025-03-09 20:30:38,516 - train - INFO - Epoch: 61 | Batch: 0 | Loss: 1.265 | Acc: 58.59%
304
+ 2025-03-09 20:30:40,669 - train - INFO - Epoch: 61 | Batch: 100 | Loss: 1.206 | Acc: 59.23%
305
+ 2025-03-09 20:30:42,615 - train - INFO - Epoch: 61 | Batch: 200 | Loss: 1.196 | Acc: 59.98%
306
+ 2025-03-09 20:30:44,531 - train - INFO - Epoch: 61 | Batch: 300 | Loss: 1.183 | Acc: 60.42%
307
+ 2025-03-09 20:30:47,435 - train - INFO - Epoch: 61 | Test Loss: 1.302 | Test Acc: 57.91%
308
+ 2025-03-09 20:30:47,597 - train - INFO - Epoch: 62 | Batch: 0 | Loss: 1.109 | Acc: 64.06%
309
+ 2025-03-09 20:30:49,444 - train - INFO - Epoch: 62 | Batch: 100 | Loss: 1.195 | Acc: 60.26%
310
+ 2025-03-09 20:30:51,346 - train - INFO - Epoch: 62 | Batch: 200 | Loss: 1.189 | Acc: 60.26%
311
+ 2025-03-09 20:30:53,324 - train - INFO - Epoch: 62 | Batch: 300 | Loss: 1.198 | Acc: 59.84%
312
+ 2025-03-09 20:30:56,425 - train - INFO - Epoch: 62 | Test Loss: 1.144 | Test Acc: 61.91%
313
+ 2025-03-09 20:30:56,594 - train - INFO - Epoch: 63 | Batch: 0 | Loss: 1.158 | Acc: 57.81%
314
+ 2025-03-09 20:30:58,593 - train - INFO - Epoch: 63 | Batch: 100 | Loss: 1.202 | Acc: 60.19%
315
+ 2025-03-09 20:31:00,631 - train - INFO - Epoch: 63 | Batch: 200 | Loss: 1.201 | Acc: 60.02%
316
+ 2025-03-09 20:31:02,630 - train - INFO - Epoch: 63 | Batch: 300 | Loss: 1.205 | Acc: 59.69%
317
+ 2025-03-09 20:31:06,449 - train - INFO - Epoch: 63 | Test Loss: 1.192 | Test Acc: 60.56%
318
+ 2025-03-09 20:31:06,659 - train - INFO - Epoch: 64 | Batch: 0 | Loss: 1.170 | Acc: 67.97%
319
+ 2025-03-09 20:31:08,897 - train - INFO - Epoch: 64 | Batch: 100 | Loss: 1.166 | Acc: 60.61%
320
+ 2025-03-09 20:31:11,221 - train - INFO - Epoch: 64 | Batch: 200 | Loss: 1.184 | Acc: 60.09%
321
+ 2025-03-09 20:31:13,224 - train - INFO - Epoch: 64 | Batch: 300 | Loss: 1.188 | Acc: 60.04%
322
+ 2025-03-09 20:31:16,234 - train - INFO - Epoch: 64 | Test Loss: 1.150 | Test Acc: 62.05%
323
+ 2025-03-09 20:31:24,778 - train - INFO - Epoch: 65 | Batch: 0 | Loss: 1.002 | Acc: 61.72%
324
+ 2025-03-09 20:31:26,799 - train - INFO - Epoch: 65 | Batch: 100 | Loss: 1.141 | Acc: 61.68%
325
+ 2025-03-09 20:31:28,890 - train - INFO - Epoch: 65 | Batch: 200 | Loss: 1.169 | Acc: 60.47%
326
+ 2025-03-09 20:31:30,932 - train - INFO - Epoch: 65 | Batch: 300 | Loss: 1.173 | Acc: 60.45%
327
+ 2025-03-09 20:31:34,033 - train - INFO - Epoch: 65 | Test Loss: 1.124 | Test Acc: 62.72%
328
+ 2025-03-09 20:31:34,227 - train - INFO - Epoch: 66 | Batch: 0 | Loss: 1.387 | Acc: 53.91%
329
+ 2025-03-09 20:31:36,268 - train - INFO - Epoch: 66 | Batch: 100 | Loss: 1.168 | Acc: 61.05%
330
+ 2025-03-09 20:31:38,189 - train - INFO - Epoch: 66 | Batch: 200 | Loss: 1.175 | Acc: 61.00%
331
+ 2025-03-09 20:31:40,070 - train - INFO - Epoch: 66 | Batch: 300 | Loss: 1.175 | Acc: 60.98%
332
+ 2025-03-09 20:31:42,970 - train - INFO - Epoch: 66 | Test Loss: 1.184 | Test Acc: 61.65%
333
+ 2025-03-09 20:31:43,120 - train - INFO - Epoch: 67 | Batch: 0 | Loss: 1.150 | Acc: 61.72%
334
+ 2025-03-09 20:31:45,207 - train - INFO - Epoch: 67 | Batch: 100 | Loss: 1.149 | Acc: 61.84%
335
+ 2025-03-09 20:31:47,200 - train - INFO - Epoch: 67 | Batch: 200 | Loss: 1.171 | Acc: 61.07%
336
+ 2025-03-09 20:31:49,096 - train - INFO - Epoch: 67 | Batch: 300 | Loss: 1.165 | Acc: 61.19%
337
+ 2025-03-09 20:31:52,455 - train - INFO - Epoch: 67 | Test Loss: 1.216 | Test Acc: 60.41%
338
+ 2025-03-09 20:31:52,629 - train - INFO - Epoch: 68 | Batch: 0 | Loss: 1.103 | Acc: 61.72%
339
+ 2025-03-09 20:31:54,772 - train - INFO - Epoch: 68 | Batch: 100 | Loss: 1.161 | Acc: 61.22%
340
+ 2025-03-09 20:31:56,740 - train - INFO - Epoch: 68 | Batch: 200 | Loss: 1.154 | Acc: 61.48%
341
+ 2025-03-09 20:31:59,231 - train - INFO - Epoch: 68 | Batch: 300 | Loss: 1.162 | Acc: 61.29%
342
+ 2025-03-09 20:32:02,617 - train - INFO - Epoch: 68 | Test Loss: 1.161 | Test Acc: 61.05%
343
+ 2025-03-09 20:32:10,865 - train - INFO - Epoch: 69 | Batch: 0 | Loss: 1.238 | Acc: 55.47%
344
+ 2025-03-09 20:32:12,884 - train - INFO - Epoch: 69 | Batch: 100 | Loss: 1.173 | Acc: 60.76%
345
+ 2025-03-09 20:32:14,815 - train - INFO - Epoch: 69 | Batch: 200 | Loss: 1.175 | Acc: 60.64%
346
+ 2025-03-09 20:32:16,725 - train - INFO - Epoch: 69 | Batch: 300 | Loss: 1.183 | Acc: 60.28%
347
+ 2025-03-09 20:32:19,765 - train - INFO - Epoch: 69 | Test Loss: 1.230 | Test Acc: 58.66%
348
+ 2025-03-09 20:32:19,932 - train - INFO - Epoch: 70 | Batch: 0 | Loss: 1.389 | Acc: 54.69%
349
+ 2025-03-09 20:32:22,033 - train - INFO - Epoch: 70 | Batch: 100 | Loss: 1.158 | Acc: 61.07%
350
+ 2025-03-09 20:32:24,089 - train - INFO - Epoch: 70 | Batch: 200 | Loss: 1.157 | Acc: 60.95%
351
+ 2025-03-09 20:32:26,117 - train - INFO - Epoch: 70 | Batch: 300 | Loss: 1.166 | Acc: 60.71%
352
+ 2025-03-09 20:32:29,298 - train - INFO - Epoch: 70 | Test Loss: 1.226 | Test Acc: 59.05%
353
+ 2025-03-09 20:32:29,467 - train - INFO - Epoch: 71 | Batch: 0 | Loss: 1.339 | Acc: 55.47%
354
+ 2025-03-09 20:32:31,438 - train - INFO - Epoch: 71 | Batch: 100 | Loss: 1.167 | Acc: 60.74%
355
+ 2025-03-09 20:32:33,454 - train - INFO - Epoch: 71 | Batch: 200 | Loss: 1.162 | Acc: 61.08%
356
+ 2025-03-09 20:32:35,462 - train - INFO - Epoch: 71 | Batch: 300 | Loss: 1.162 | Acc: 61.09%
357
+ 2025-03-09 20:32:38,625 - train - INFO - Epoch: 71 | Test Loss: 1.196 | Test Acc: 60.13%
358
+ 2025-03-09 20:32:38,825 - train - INFO - Epoch: 72 | Batch: 0 | Loss: 1.065 | Acc: 67.97%
359
+ 2025-03-09 20:32:41,013 - train - INFO - Epoch: 72 | Batch: 100 | Loss: 1.153 | Acc: 61.68%
360
+ 2025-03-09 20:32:43,148 - train - INFO - Epoch: 72 | Batch: 200 | Loss: 1.139 | Acc: 62.09%
361
+ 2025-03-09 20:32:45,102 - train - INFO - Epoch: 72 | Batch: 300 | Loss: 1.142 | Acc: 61.86%
362
+ 2025-03-09 20:32:48,052 - train - INFO - Epoch: 72 | Test Loss: 1.178 | Test Acc: 60.15%
363
+ 2025-03-09 20:32:56,719 - train - INFO - Epoch: 73 | Batch: 0 | Loss: 1.149 | Acc: 57.81%
364
+ 2025-03-09 20:32:58,840 - train - INFO - Epoch: 73 | Batch: 100 | Loss: 1.124 | Acc: 62.13%
365
+ 2025-03-09 20:33:01,060 - train - INFO - Epoch: 73 | Batch: 200 | Loss: 1.124 | Acc: 62.22%
366
+ 2025-03-09 20:33:02,959 - train - INFO - Epoch: 73 | Batch: 300 | Loss: 1.124 | Acc: 62.25%
367
+ 2025-03-09 20:33:06,081 - train - INFO - Epoch: 73 | Test Loss: 1.108 | Test Acc: 62.58%
368
+ 2025-03-09 20:33:06,242 - train - INFO - Epoch: 74 | Batch: 0 | Loss: 0.976 | Acc: 64.06%
369
+ 2025-03-09 20:33:08,117 - train - INFO - Epoch: 74 | Batch: 100 | Loss: 1.129 | Acc: 61.83%
370
+ 2025-03-09 20:33:10,070 - train - INFO - Epoch: 74 | Batch: 200 | Loss: 1.141 | Acc: 61.62%
371
+ 2025-03-09 20:33:12,009 - train - INFO - Epoch: 74 | Batch: 300 | Loss: 1.139 | Acc: 61.84%
372
+ 2025-03-09 20:33:14,980 - train - INFO - Epoch: 74 | Test Loss: 1.232 | Test Acc: 57.82%
373
+ 2025-03-09 20:33:15,160 - train - INFO - Epoch: 75 | Batch: 0 | Loss: 1.104 | Acc: 60.94%
374
+ 2025-03-09 20:33:17,074 - train - INFO - Epoch: 75 | Batch: 100 | Loss: 1.153 | Acc: 61.48%
375
+ 2025-03-09 20:33:18,961 - train - INFO - Epoch: 75 | Batch: 200 | Loss: 1.149 | Acc: 61.54%
376
+ 2025-03-09 20:33:20,780 - train - INFO - Epoch: 75 | Batch: 300 | Loss: 1.141 | Acc: 61.77%
377
+ 2025-03-09 20:33:23,699 - train - INFO - Epoch: 75 | Test Loss: 1.157 | Test Acc: 61.00%
378
+ 2025-03-09 20:33:23,864 - train - INFO - Epoch: 76 | Batch: 0 | Loss: 1.193 | Acc: 60.94%
379
+ 2025-03-09 20:33:25,763 - train - INFO - Epoch: 76 | Batch: 100 | Loss: 1.147 | Acc: 61.41%
380
+ 2025-03-09 20:33:27,728 - train - INFO - Epoch: 76 | Batch: 200 | Loss: 1.138 | Acc: 61.87%
381
+ 2025-03-09 20:33:29,731 - train - INFO - Epoch: 76 | Batch: 300 | Loss: 1.143 | Acc: 61.68%
382
+ 2025-03-09 20:33:32,727 - train - INFO - Epoch: 76 | Test Loss: 1.101 | Test Acc: 63.51%
383
+ 2025-03-09 20:33:41,051 - train - INFO - Epoch: 77 | Batch: 0 | Loss: 1.110 | Acc: 59.38%
384
+ 2025-03-09 20:33:42,990 - train - INFO - Epoch: 77 | Batch: 100 | Loss: 1.108 | Acc: 62.87%
385
+ 2025-03-09 20:33:44,886 - train - INFO - Epoch: 77 | Batch: 200 | Loss: 1.115 | Acc: 62.29%
386
+ 2025-03-09 20:33:46,815 - train - INFO - Epoch: 77 | Batch: 300 | Loss: 1.119 | Acc: 62.45%
387
+ 2025-03-09 20:33:49,764 - train - INFO - Epoch: 77 | Test Loss: 1.099 | Test Acc: 63.53%
388
+ 2025-03-09 20:33:49,916 - train - INFO - Epoch: 78 | Batch: 0 | Loss: 1.111 | Acc: 64.84%
389
+ 2025-03-09 20:33:51,904 - train - INFO - Epoch: 78 | Batch: 100 | Loss: 1.127 | Acc: 62.42%
390
+ 2025-03-09 20:33:53,862 - train - INFO - Epoch: 78 | Batch: 200 | Loss: 1.127 | Acc: 62.17%
391
+ 2025-03-09 20:33:55,835 - train - INFO - Epoch: 78 | Batch: 300 | Loss: 1.125 | Acc: 62.13%
392
+ 2025-03-09 20:33:58,919 - train - INFO - Epoch: 78 | Test Loss: 1.075 | Test Acc: 63.41%
393
+ 2025-03-09 20:33:59,092 - train - INFO - Epoch: 79 | Batch: 0 | Loss: 0.990 | Acc: 67.19%
394
+ 2025-03-09 20:34:01,066 - train - INFO - Epoch: 79 | Batch: 100 | Loss: 1.107 | Acc: 62.40%
395
+ 2025-03-09 20:34:03,169 - train - INFO - Epoch: 79 | Batch: 200 | Loss: 1.105 | Acc: 62.84%
396
+ 2025-03-09 20:34:05,127 - train - INFO - Epoch: 79 | Batch: 300 | Loss: 1.102 | Acc: 63.05%
397
+ 2025-03-09 20:34:08,053 - train - INFO - Epoch: 79 | Test Loss: 1.190 | Test Acc: 60.83%
398
+ 2025-03-09 20:34:08,244 - train - INFO - Epoch: 80 | Batch: 0 | Loss: 1.227 | Acc: 60.16%
399
+ 2025-03-09 20:34:10,084 - train - INFO - Epoch: 80 | Batch: 100 | Loss: 1.120 | Acc: 62.61%
400
+ 2025-03-09 20:34:11,957 - train - INFO - Epoch: 80 | Batch: 200 | Loss: 1.102 | Acc: 62.97%
401
+ 2025-03-09 20:34:13,864 - train - INFO - Epoch: 80 | Batch: 300 | Loss: 1.109 | Acc: 62.86%
402
+ 2025-03-09 20:34:16,933 - train - INFO - Epoch: 80 | Test Loss: 1.065 | Test Acc: 64.60%
403
+ 2025-03-09 20:34:25,319 - train - INFO - Epoch: 81 | Batch: 0 | Loss: 1.093 | Acc: 60.16%
404
+ 2025-03-09 20:34:27,214 - train - INFO - Epoch: 81 | Batch: 100 | Loss: 1.133 | Acc: 62.27%
405
+ 2025-03-09 20:34:29,084 - train - INFO - Epoch: 81 | Batch: 200 | Loss: 1.107 | Acc: 63.35%
406
+ 2025-03-09 20:34:31,102 - train - INFO - Epoch: 81 | Batch: 300 | Loss: 1.102 | Acc: 63.25%
407
+ 2025-03-09 20:34:34,224 - train - INFO - Epoch: 81 | Test Loss: 1.085 | Test Acc: 63.63%
408
+ 2025-03-09 20:34:34,386 - train - INFO - Epoch: 82 | Batch: 0 | Loss: 1.183 | Acc: 58.59%
409
+ 2025-03-09 20:34:36,276 - train - INFO - Epoch: 82 | Batch: 100 | Loss: 1.093 | Acc: 63.10%
410
+ 2025-03-09 20:34:38,370 - train - INFO - Epoch: 82 | Batch: 200 | Loss: 1.084 | Acc: 63.45%
411
+ 2025-03-09 20:34:40,329 - train - INFO - Epoch: 82 | Batch: 300 | Loss: 1.083 | Acc: 63.66%
412
+ 2025-03-09 20:34:43,386 - train - INFO - Epoch: 82 | Test Loss: 1.124 | Test Acc: 62.13%
413
+ 2025-03-09 20:34:43,538 - train - INFO - Epoch: 83 | Batch: 0 | Loss: 1.178 | Acc: 57.81%
414
+ 2025-03-09 20:34:45,536 - train - INFO - Epoch: 83 | Batch: 100 | Loss: 1.103 | Acc: 63.26%
415
+ 2025-03-09 20:34:47,538 - train - INFO - Epoch: 83 | Batch: 200 | Loss: 1.090 | Acc: 63.46%
416
+ 2025-03-09 20:34:49,608 - train - INFO - Epoch: 83 | Batch: 300 | Loss: 1.091 | Acc: 63.37%
417
+ 2025-03-09 20:34:52,626 - train - INFO - Epoch: 83 | Test Loss: 1.165 | Test Acc: 62.11%
418
+ 2025-03-09 20:34:52,802 - train - INFO - Epoch: 84 | Batch: 0 | Loss: 1.103 | Acc: 67.19%
419
+ 2025-03-09 20:34:54,966 - train - INFO - Epoch: 84 | Batch: 100 | Loss: 1.115 | Acc: 62.58%
420
+ 2025-03-09 20:34:57,031 - train - INFO - Epoch: 84 | Batch: 200 | Loss: 1.091 | Acc: 63.27%
421
+ 2025-03-09 20:34:59,134 - train - INFO - Epoch: 84 | Batch: 300 | Loss: 1.097 | Acc: 63.19%
422
+ 2025-03-09 20:35:02,352 - train - INFO - Epoch: 84 | Test Loss: 1.085 | Test Acc: 63.95%
423
+ 2025-03-09 20:35:10,685 - train - INFO - Epoch: 85 | Batch: 0 | Loss: 1.311 | Acc: 59.38%
424
+ 2025-03-09 20:35:12,612 - train - INFO - Epoch: 85 | Batch: 100 | Loss: 1.084 | Acc: 63.76%
425
+ 2025-03-09 20:35:14,479 - train - INFO - Epoch: 85 | Batch: 200 | Loss: 1.078 | Acc: 64.00%
426
+ 2025-03-09 20:35:16,456 - train - INFO - Epoch: 85 | Batch: 300 | Loss: 1.092 | Acc: 63.53%
427
+ 2025-03-09 20:35:19,502 - train - INFO - Epoch: 85 | Test Loss: 1.063 | Test Acc: 63.69%
428
+ 2025-03-09 20:35:19,675 - train - INFO - Epoch: 86 | Batch: 0 | Loss: 0.918 | Acc: 71.88%
429
+ 2025-03-09 20:35:21,627 - train - INFO - Epoch: 86 | Batch: 100 | Loss: 1.078 | Acc: 63.58%
430
+ 2025-03-09 20:35:23,603 - train - INFO - Epoch: 86 | Batch: 200 | Loss: 1.080 | Acc: 63.76%
431
+ 2025-03-09 20:35:25,467 - train - INFO - Epoch: 86 | Batch: 300 | Loss: 1.081 | Acc: 63.70%
432
+ 2025-03-09 20:35:28,448 - train - INFO - Epoch: 86 | Test Loss: 1.040 | Test Acc: 65.00%
433
+ 2025-03-09 20:35:28,618 - train - INFO - Epoch: 87 | Batch: 0 | Loss: 1.037 | Acc: 64.06%
434
+ 2025-03-09 20:35:30,607 - train - INFO - Epoch: 87 | Batch: 100 | Loss: 1.045 | Acc: 64.91%
435
+ 2025-03-09 20:35:32,555 - train - INFO - Epoch: 87 | Batch: 200 | Loss: 1.054 | Acc: 64.58%
436
+ 2025-03-09 20:35:34,461 - train - INFO - Epoch: 87 | Batch: 300 | Loss: 1.055 | Acc: 64.54%
437
+ 2025-03-09 20:35:37,534 - train - INFO - Epoch: 87 | Test Loss: 1.117 | Test Acc: 63.18%
438
+ 2025-03-09 20:35:37,728 - train - INFO - Epoch: 88 | Batch: 0 | Loss: 1.114 | Acc: 64.06%
439
+ 2025-03-09 20:35:39,667 - train - INFO - Epoch: 88 | Batch: 100 | Loss: 1.081 | Acc: 63.65%
440
+ 2025-03-09 20:35:41,724 - train - INFO - Epoch: 88 | Batch: 200 | Loss: 1.089 | Acc: 63.46%
441
+ 2025-03-09 20:35:43,693 - train - INFO - Epoch: 88 | Batch: 300 | Loss: 1.080 | Acc: 63.89%
442
+ 2025-03-09 20:35:46,704 - train - INFO - Epoch: 88 | Test Loss: 1.116 | Test Acc: 62.02%
443
+ 2025-03-09 20:35:55,157 - train - INFO - Epoch: 89 | Batch: 0 | Loss: 1.281 | Acc: 59.38%
444
+ 2025-03-09 20:35:57,082 - train - INFO - Epoch: 89 | Batch: 100 | Loss: 1.027 | Acc: 65.76%
445
+ 2025-03-09 20:35:59,072 - train - INFO - Epoch: 89 | Batch: 200 | Loss: 1.056 | Acc: 64.67%
446
+ 2025-03-09 20:36:00,922 - train - INFO - Epoch: 89 | Batch: 300 | Loss: 1.067 | Acc: 64.10%
447
+ 2025-03-09 20:36:03,799 - train - INFO - Epoch: 89 | Test Loss: 1.044 | Test Acc: 64.42%
448
+ 2025-03-09 20:36:03,955 - train - INFO - Epoch: 90 | Batch: 0 | Loss: 1.228 | Acc: 53.91%
449
+ 2025-03-09 20:36:05,961 - train - INFO - Epoch: 90 | Batch: 100 | Loss: 1.043 | Acc: 64.84%
450
+ 2025-03-09 20:36:48,800 - train - INFO - Epoch: 90 | Batch: 200 | Loss: 1.040 | Acc: 64.89%
451
+ 2025-03-09 20:36:50,792 - train - INFO - Epoch: 90 | Batch: 300 | Loss: 1.049 | Acc: 64.79%
452
+ 2025-03-09 20:36:53,914 - train - INFO - Epoch: 90 | Test Loss: 1.150 | Test Acc: 61.58%
453
+ 2025-03-09 20:36:54,094 - train - INFO - Epoch: 91 | Batch: 0 | Loss: 1.127 | Acc: 59.38%
454
+ 2025-03-09 20:36:56,089 - train - INFO - Epoch: 91 | Batch: 100 | Loss: 1.053 | Acc: 64.09%
455
+ 2025-03-09 20:36:58,254 - train - INFO - Epoch: 91 | Batch: 200 | Loss: 1.043 | Acc: 64.86%
456
+ 2025-03-09 20:37:00,272 - train - INFO - Epoch: 91 | Batch: 300 | Loss: 1.049 | Acc: 64.74%
457
+ 2025-03-09 20:37:50,812 - train - INFO - Epoch: 91 | Test Loss: 1.092 | Test Acc: 63.34%
458
+ 2025-03-09 20:37:50,972 - train - INFO - Epoch: 92 | Batch: 0 | Loss: 1.025 | Acc: 66.41%
459
+ 2025-03-09 20:37:52,835 - train - INFO - Epoch: 92 | Batch: 100 | Loss: 1.039 | Acc: 64.90%
460
+ 2025-03-09 20:37:54,790 - train - INFO - Epoch: 92 | Batch: 200 | Loss: 1.041 | Acc: 64.68%
461
+ 2025-03-09 20:37:56,666 - train - INFO - Epoch: 92 | Batch: 300 | Loss: 1.037 | Acc: 64.84%
462
+ 2025-03-09 20:37:59,681 - train - INFO - Epoch: 92 | Test Loss: 1.112 | Test Acc: 62.78%
463
+ 2025-03-09 20:38:53,132 - train - INFO - Epoch: 93 | Batch: 0 | Loss: 1.193 | Acc: 56.25%
464
+ 2025-03-09 20:38:55,019 - train - INFO - Epoch: 93 | Batch: 100 | Loss: 1.057 | Acc: 64.53%
465
+ 2025-03-09 20:38:56,952 - train - INFO - Epoch: 93 | Batch: 200 | Loss: 1.046 | Acc: 65.06%
466
+ 2025-03-09 20:38:58,885 - train - INFO - Epoch: 93 | Batch: 300 | Loss: 1.044 | Acc: 65.08%
467
+ 2025-03-09 20:39:02,028 - train - INFO - Epoch: 93 | Test Loss: 1.024 | Test Acc: 65.24%
468
+ 2025-03-09 20:39:02,185 - train - INFO - Epoch: 94 | Batch: 0 | Loss: 1.010 | Acc: 64.06%
469
+ 2025-03-09 20:39:04,323 - train - INFO - Epoch: 94 | Batch: 100 | Loss: 1.023 | Acc: 65.28%
470
+ 2025-03-09 20:39:49,435 - train - INFO - Epoch: 94 | Batch: 200 | Loss: 1.023 | Acc: 65.44%
471
+ 2025-03-09 20:39:51,309 - train - INFO - Epoch: 94 | Batch: 300 | Loss: 1.027 | Acc: 65.52%
472
+ 2025-03-09 20:39:54,281 - train - INFO - Epoch: 94 | Test Loss: 1.013 | Test Acc: 67.01%
473
+ 2025-03-09 20:39:54,472 - train - INFO - Epoch: 95 | Batch: 0 | Loss: 1.236 | Acc: 59.38%
474
+ 2025-03-09 20:39:56,452 - train - INFO - Epoch: 95 | Batch: 100 | Loss: 1.011 | Acc: 65.98%
475
+ 2025-03-09 20:39:58,416 - train - INFO - Epoch: 95 | Batch: 200 | Loss: 1.012 | Acc: 65.91%
476
+ 2025-03-09 20:40:48,850 - train - INFO - Epoch: 95 | Batch: 300 | Loss: 1.019 | Acc: 65.65%
477
+ 2025-03-09 20:40:51,907 - train - INFO - Epoch: 95 | Test Loss: 0.994 | Test Acc: 66.97%
478
+ 2025-03-09 20:40:52,057 - train - INFO - Epoch: 96 | Batch: 0 | Loss: 0.973 | Acc: 67.97%
479
+ 2025-03-09 20:40:54,040 - train - INFO - Epoch: 96 | Batch: 100 | Loss: 1.040 | Acc: 65.18%
480
+ 2025-03-09 20:40:56,141 - train - INFO - Epoch: 96 | Batch: 200 | Loss: 1.035 | Acc: 64.95%
481
+ 2025-03-09 20:40:58,151 - train - INFO - Epoch: 96 | Batch: 300 | Loss: 1.021 | Acc: 65.43%
482
+ 2025-03-09 20:41:48,845 - train - INFO - Epoch: 96 | Test Loss: 1.065 | Test Acc: 64.38%
483
+ 2025-03-09 20:41:57,437 - train - INFO - Epoch: 97 | Batch: 0 | Loss: 0.995 | Acc: 66.41%
484
+ 2025-03-09 20:41:59,481 - train - INFO - Epoch: 97 | Batch: 100 | Loss: 1.012 | Acc: 66.12%
485
+ 2025-03-09 20:42:01,488 - train - INFO - Epoch: 97 | Batch: 200 | Loss: 0.995 | Acc: 66.64%
486
+ 2025-03-09 20:42:03,533 - train - INFO - Epoch: 97 | Batch: 300 | Loss: 1.014 | Acc: 66.01%
487
+ 2025-03-09 20:42:06,717 - train - INFO - Epoch: 97 | Test Loss: 1.043 | Test Acc: 64.98%
488
+ 2025-03-09 20:42:06,871 - train - INFO - Epoch: 98 | Batch: 0 | Loss: 1.299 | Acc: 58.59%
489
+ 2025-03-09 20:42:09,116 - train - INFO - Epoch: 98 | Batch: 100 | Loss: 0.971 | Acc: 67.10%
490
+ 2025-03-09 20:42:50,612 - train - INFO - Epoch: 98 | Batch: 200 | Loss: 0.997 | Acc: 66.37%
491
+ 2025-03-09 20:42:52,541 - train - INFO - Epoch: 98 | Batch: 300 | Loss: 1.001 | Acc: 66.31%
492
+ 2025-03-09 20:42:55,458 - train - INFO - Epoch: 98 | Test Loss: 1.017 | Test Acc: 65.77%
493
+ 2025-03-09 20:42:55,635 - train - INFO - Epoch: 99 | Batch: 0 | Loss: 1.074 | Acc: 66.41%
494
+ 2025-03-09 20:42:57,563 - train - INFO - Epoch: 99 | Batch: 100 | Loss: 1.027 | Acc: 65.38%
495
+ 2025-03-09 20:42:59,650 - train - INFO - Epoch: 99 | Batch: 200 | Loss: 1.013 | Acc: 66.04%
496
+ 2025-03-09 20:43:49,927 - train - INFO - Epoch: 99 | Batch: 300 | Loss: 1.010 | Acc: 66.04%
497
+ 2025-03-09 20:43:53,133 - train - INFO - Epoch: 99 | Test Loss: 1.013 | Test Acc: 66.35%
498
+ 2025-03-09 20:43:53,291 - train - INFO - Epoch: 100 | Batch: 0 | Loss: 0.778 | Acc: 76.56%
499
+ 2025-03-09 20:43:55,285 - train - INFO - Epoch: 100 | Batch: 100 | Loss: 0.999 | Acc: 66.10%
500
+ 2025-03-09 20:43:57,360 - train - INFO - Epoch: 100 | Batch: 200 | Loss: 0.995 | Acc: 66.41%
501
+ 2025-03-09 20:43:59,416 - train - INFO - Epoch: 100 | Batch: 300 | Loss: 0.994 | Acc: 66.56%
502
+ 2025-03-09 20:44:49,933 - train - INFO - Epoch: 100 | Test Loss: 1.009 | Test Acc: 65.66%
503
+ 2025-03-09 20:44:58,423 - train - INFO - 训练完成!
Image/AlexNet/code/model.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
+ AlexNet in Pytorch
3
+ '''
4
+
5
+ import torch
6
+ import torch.nn as nn
7
+
8
+ class AlexNet(nn.Module): # 训练 ALexNet
9
+ '''
10
+ AlexNet模型
11
+ '''
12
+ def __init__(self,num_classes=10):
13
+ super(AlexNet,self).__init__()
14
+ # 五个卷积层 输入 32 * 32 * 3
15
+ self.conv1 = nn.Sequential(
16
+ nn.Conv2d(in_channels=3, out_channels=6, kernel_size=3, stride=1, padding=1), # (32-3+2)/1+1 = 32
17
+ nn.ReLU(),
18
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (32-2)/2+1 = 16
19
+ )
20
+ self.conv2 = nn.Sequential( # 输入 16 * 16 * 6
21
+ nn.Conv2d(in_channels=6, out_channels=16, kernel_size=3, stride=1, padding=1), # (16-3+2)/1+1 = 16
22
+ nn.ReLU(),
23
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (16-2)/2+1 = 8
24
+ )
25
+ self.conv3 = nn.Sequential( # 输入 8 * 8 * 16
26
+ nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, stride=1, padding=1), # (8-3+2)/1+1 = 8
27
+ nn.ReLU(),
28
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (8-2)/2+1 = 4
29
+ )
30
+ self.conv4 = nn.Sequential( # 输入 4 * 4 * 64
31
+ nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=1), # (4-3+2)/1+1 = 4
32
+ nn.ReLU(),
33
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (4-2)/2+1 = 2
34
+ )
35
+ self.conv5 = nn.Sequential( # 输入 2 * 2 * 128
36
+ nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=1),# (2-3+2)/1+1 = 2
37
+ nn.ReLU(),
38
+ nn.MaxPool2d(kernel_size=2, stride=2, padding=0) # (2-2)/2+1 = 1
39
+ ) # 最后一层卷积层,输出 1 * 1 * 128
40
+ # 全连接层
41
+ self.dense = nn.Sequential(
42
+ nn.Linear(128,120),
43
+ nn.ReLU(),
44
+ nn.Linear(120,84),
45
+ nn.ReLU(),
46
+ nn.Linear(84,num_classes)
47
+ )
48
+
49
+ # 初始化权重
50
+ self._initialize_weights()
51
+
52
+ def forward(self,x):
53
+ x = self.conv1(x)
54
+ x = self.conv2(x)
55
+ x = self.conv3(x)
56
+ x = self.conv4(x)
57
+ x = self.conv5(x)
58
+ x = x.view(x.size()[0],-1)
59
+ x = self.dense(x)
60
+ return x
61
+
62
+ def _initialize_weights(self):
63
+ for m in self.modules():
64
+ if isinstance(m, nn.Conv2d):
65
+ nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
66
+ if m.bias is not None:
67
+ nn.init.constant_(m.bias, 0)
68
+ elif isinstance(m, nn.Linear):
69
+ nn.init.normal_(m.weight, 0, 0.01)
70
+ if m.bias is not None:
71
+ nn.init.constant_(m.bias, 0)
72
+
73
+ def test():
74
+ net = AlexNet()
75
+ x = torch.randn(2,3,32,32)
76
+ y = net(x)
77
+ print(y.size())
78
+ from torchinfo import summary
79
+ device = 'cuda' if torch.cuda.is_available() else 'cpu'
80
+ net = net.to(device)
81
+ summary(net,(3,32,32))
Image/AlexNet/code/train.log ADDED
@@ -0,0 +1,503 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-03-09 19:50:57,307 - train - INFO - 开始训练 alexnet
2
+ 2025-03-09 19:50:57,308 - train - INFO - 总轮数: 100, 学习率: 0.1, 设备: cuda:2
3
+ 2025-03-09 19:50:57,941 - train - INFO - Epoch: 1 | Batch: 0 | Loss: 2.303 | Acc: 9.38%
4
+ 2025-03-09 19:50:59,844 - train - INFO - Epoch: 1 | Batch: 100 | Loss: 2.300 | Acc: 10.04%
5
+ 2025-03-09 19:51:01,901 - train - INFO - Epoch: 1 | Batch: 200 | Loss: 2.227 | Acc: 13.40%
6
+ 2025-03-09 19:51:03,899 - train - INFO - Epoch: 1 | Batch: 300 | Loss: 2.162 | Acc: 15.09%
7
+ 2025-03-09 19:51:07,107 - train - INFO - Epoch: 1 | Test Loss: 1.896 | Test Acc: 19.86%
8
+ 2025-03-09 19:51:07,264 - train - INFO - Epoch: 2 | Batch: 0 | Loss: 1.888 | Acc: 16.41%
9
+ 2025-03-09 19:51:09,345 - train - INFO - Epoch: 2 | Batch: 100 | Loss: 1.929 | Acc: 21.02%
10
+ 2025-03-09 19:51:11,452 - train - INFO - Epoch: 2 | Batch: 200 | Loss: 1.917 | Acc: 21.42%
11
+ 2025-03-09 19:51:13,434 - train - INFO - Epoch: 2 | Batch: 300 | Loss: 1.911 | Acc: 21.87%
12
+ 2025-03-09 19:51:16,541 - train - INFO - Epoch: 2 | Test Loss: 1.840 | Test Acc: 25.23%
13
+ 2025-03-09 19:51:16,704 - train - INFO - Epoch: 3 | Batch: 0 | Loss: 1.938 | Acc: 17.97%
14
+ 2025-03-09 19:51:18,746 - train - INFO - Epoch: 3 | Batch: 100 | Loss: 1.837 | Acc: 26.49%
15
+ 2025-03-09 19:51:20,674 - train - INFO - Epoch: 3 | Batch: 200 | Loss: 1.812 | Acc: 27.79%
16
+ 2025-03-09 19:51:22,577 - train - INFO - Epoch: 3 | Batch: 300 | Loss: 1.798 | Acc: 28.75%
17
+ 2025-03-09 19:51:25,741 - train - INFO - Epoch: 3 | Test Loss: 1.613 | Test Acc: 38.13%
18
+ 2025-03-09 19:51:25,920 - train - INFO - Epoch: 4 | Batch: 0 | Loss: 1.613 | Acc: 32.81%
19
+ 2025-03-09 19:51:28,016 - train - INFO - Epoch: 4 | Batch: 100 | Loss: 1.683 | Acc: 34.67%
20
+ 2025-03-09 19:51:30,038 - train - INFO - Epoch: 4 | Batch: 200 | Loss: 1.668 | Acc: 35.46%
21
+ 2025-03-09 19:51:32,131 - train - INFO - Epoch: 4 | Batch: 300 | Loss: 1.675 | Acc: 36.00%
22
+ 2025-03-09 19:51:35,398 - train - INFO - Epoch: 4 | Test Loss: 1.624 | Test Acc: 39.42%
23
+ 2025-03-09 19:51:43,699 - train - INFO - Epoch: 5 | Batch: 0 | Loss: 1.684 | Acc: 39.06%
24
+ 2025-03-09 19:51:45,670 - train - INFO - Epoch: 5 | Batch: 100 | Loss: 1.638 | Acc: 39.09%
25
+ 2025-03-09 19:51:47,561 - train - INFO - Epoch: 5 | Batch: 200 | Loss: 1.623 | Acc: 39.87%
26
+ 2025-03-09 19:51:49,399 - train - INFO - Epoch: 5 | Batch: 300 | Loss: 1.614 | Acc: 40.60%
27
+ 2025-03-09 19:51:52,227 - train - INFO - Epoch: 5 | Test Loss: 1.487 | Test Acc: 45.98%
28
+ 2025-03-09 19:51:52,392 - train - INFO - Epoch: 6 | Batch: 0 | Loss: 1.565 | Acc: 45.31%
29
+ 2025-03-09 19:51:54,259 - train - INFO - Epoch: 6 | Batch: 100 | Loss: 1.550 | Acc: 44.09%
30
+ 2025-03-09 19:51:56,191 - train - INFO - Epoch: 6 | Batch: 200 | Loss: 1.551 | Acc: 44.07%
31
+ 2025-03-09 19:51:58,102 - train - INFO - Epoch: 6 | Batch: 300 | Loss: 1.552 | Acc: 44.16%
32
+ 2025-03-09 19:52:00,982 - train - INFO - Epoch: 6 | Test Loss: 1.498 | Test Acc: 46.14%
33
+ 2025-03-09 19:52:01,147 - train - INFO - Epoch: 7 | Batch: 0 | Loss: 1.703 | Acc: 35.94%
34
+ 2025-03-09 19:52:03,112 - train - INFO - Epoch: 7 | Batch: 100 | Loss: 1.549 | Acc: 44.65%
35
+ 2025-03-09 19:52:05,091 - train - INFO - Epoch: 7 | Batch: 200 | Loss: 1.558 | Acc: 44.57%
36
+ 2025-03-09 19:52:07,124 - train - INFO - Epoch: 7 | Batch: 300 | Loss: 1.546 | Acc: 44.95%
37
+ 2025-03-09 19:52:10,122 - train - INFO - Epoch: 7 | Test Loss: 1.463 | Test Acc: 48.95%
38
+ 2025-03-09 19:52:10,282 - train - INFO - Epoch: 8 | Batch: 0 | Loss: 1.559 | Acc: 46.88%
39
+ 2025-03-09 19:52:12,269 - train - INFO - Epoch: 8 | Batch: 100 | Loss: 1.522 | Acc: 45.61%
40
+ 2025-03-09 19:52:14,191 - train - INFO - Epoch: 8 | Batch: 200 | Loss: 1.515 | Acc: 46.19%
41
+ 2025-03-09 19:52:16,026 - train - INFO - Epoch: 8 | Batch: 300 | Loss: 1.512 | Acc: 46.34%
42
+ 2025-03-09 19:52:18,972 - train - INFO - Epoch: 8 | Test Loss: 1.414 | Test Acc: 51.07%
43
+ 2025-03-09 19:52:27,269 - train - INFO - Epoch: 9 | Batch: 0 | Loss: 1.405 | Acc: 50.00%
44
+ 2025-03-09 19:52:29,140 - train - INFO - Epoch: 9 | Batch: 100 | Loss: 1.506 | Acc: 46.11%
45
+ 2025-03-09 19:52:31,111 - train - INFO - Epoch: 9 | Batch: 200 | Loss: 1.495 | Acc: 46.85%
46
+ 2025-03-09 19:52:33,061 - train - INFO - Epoch: 9 | Batch: 300 | Loss: 1.500 | Acc: 46.91%
47
+ 2025-03-09 19:52:36,118 - train - INFO - Epoch: 9 | Test Loss: 1.540 | Test Acc: 46.20%
48
+ 2025-03-09 19:52:36,307 - train - INFO - Epoch: 10 | Batch: 0 | Loss: 1.710 | Acc: 42.97%
49
+ 2025-03-09 19:52:38,571 - train - INFO - Epoch: 10 | Batch: 100 | Loss: 1.515 | Acc: 47.08%
50
+ 2025-03-09 19:52:40,615 - train - INFO - Epoch: 10 | Batch: 200 | Loss: 1.506 | Acc: 47.12%
51
+ 2025-03-09 19:52:42,631 - train - INFO - Epoch: 10 | Batch: 300 | Loss: 1.498 | Acc: 47.72%
52
+ 2025-03-09 19:52:45,697 - train - INFO - Epoch: 10 | Test Loss: 1.359 | Test Acc: 51.08%
53
+ 2025-03-09 19:52:45,908 - train - INFO - Epoch: 11 | Batch: 0 | Loss: 1.288 | Acc: 48.44%
54
+ 2025-03-09 19:52:48,156 - train - INFO - Epoch: 11 | Batch: 100 | Loss: 1.493 | Acc: 47.49%
55
+ 2025-03-09 19:52:50,122 - train - INFO - Epoch: 11 | Batch: 200 | Loss: 1.483 | Acc: 47.77%
56
+ 2025-03-09 19:52:52,086 - train - INFO - Epoch: 11 | Batch: 300 | Loss: 1.482 | Acc: 47.71%
57
+ 2025-03-09 19:52:55,093 - train - INFO - Epoch: 11 | Test Loss: 1.425 | Test Acc: 49.77%
58
+ 2025-03-09 19:52:55,255 - train - INFO - Epoch: 12 | Batch: 0 | Loss: 1.601 | Acc: 38.28%
59
+ 2025-03-09 19:52:57,285 - train - INFO - Epoch: 12 | Batch: 100 | Loss: 1.495 | Acc: 47.03%
60
+ 2025-03-09 19:52:59,361 - train - INFO - Epoch: 12 | Batch: 200 | Loss: 1.484 | Acc: 47.75%
61
+ 2025-03-09 19:53:01,286 - train - INFO - Epoch: 12 | Batch: 300 | Loss: 1.474 | Acc: 48.21%
62
+ 2025-03-09 19:53:04,233 - train - INFO - Epoch: 12 | Test Loss: 1.395 | Test Acc: 50.04%
63
+ 2025-03-09 19:53:13,654 - train - INFO - Epoch: 13 | Batch: 0 | Loss: 1.349 | Acc: 54.69%
64
+ 2025-03-09 19:53:16,010 - train - INFO - Epoch: 13 | Batch: 100 | Loss: 1.476 | Acc: 47.81%
65
+ 2025-03-09 19:53:18,014 - train - INFO - Epoch: 13 | Batch: 200 | Loss: 1.459 | Acc: 48.68%
66
+ 2025-03-09 19:53:19,910 - train - INFO - Epoch: 13 | Batch: 300 | Loss: 1.463 | Acc: 48.69%
67
+ 2025-03-09 19:53:22,970 - train - INFO - Epoch: 13 | Test Loss: 1.433 | Test Acc: 50.43%
68
+ 2025-03-09 19:53:23,123 - train - INFO - Epoch: 14 | Batch: 0 | Loss: 1.438 | Acc: 48.44%
69
+ 2025-03-09 19:53:25,027 - train - INFO - Epoch: 14 | Batch: 100 | Loss: 1.490 | Acc: 47.90%
70
+ 2025-03-09 19:53:27,032 - train - INFO - Epoch: 14 | Batch: 200 | Loss: 1.478 | Acc: 48.26%
71
+ 2025-03-09 19:53:28,990 - train - INFO - Epoch: 14 | Batch: 300 | Loss: 1.473 | Acc: 48.42%
72
+ 2025-03-09 19:53:31,930 - train - INFO - Epoch: 14 | Test Loss: 1.419 | Test Acc: 50.80%
73
+ 2025-03-09 19:53:32,132 - train - INFO - Epoch: 15 | Batch: 0 | Loss: 1.496 | Acc: 49.22%
74
+ 2025-03-09 19:53:34,072 - train - INFO - Epoch: 15 | Batch: 100 | Loss: 1.468 | Acc: 49.19%
75
+ 2025-03-09 19:53:36,080 - train - INFO - Epoch: 15 | Batch: 200 | Loss: 1.473 | Acc: 49.07%
76
+ 2025-03-09 19:53:38,079 - train - INFO - Epoch: 15 | Batch: 300 | Loss: 1.468 | Acc: 49.20%
77
+ 2025-03-09 19:53:41,161 - train - INFO - Epoch: 15 | Test Loss: 1.424 | Test Acc: 52.09%
78
+ 2025-03-09 19:53:41,356 - train - INFO - Epoch: 16 | Batch: 0 | Loss: 1.368 | Acc: 46.88%
79
+ 2025-03-09 19:53:43,498 - train - INFO - Epoch: 16 | Batch: 100 | Loss: 1.485 | Acc: 48.47%
80
+ 2025-03-09 19:53:45,625 - train - INFO - Epoch: 16 | Batch: 200 | Loss: 1.475 | Acc: 48.40%
81
+ 2025-03-09 19:53:47,626 - train - INFO - Epoch: 16 | Batch: 300 | Loss: 1.468 | Acc: 48.62%
82
+ 2025-03-09 19:53:50,536 - train - INFO - Epoch: 16 | Test Loss: 1.368 | Test Acc: 54.06%
83
+ 2025-03-09 19:53:58,981 - train - INFO - Epoch: 17 | Batch: 0 | Loss: 1.322 | Acc: 48.44%
84
+ 2025-03-09 19:54:01,050 - train - INFO - Epoch: 17 | Batch: 100 | Loss: 1.453 | Acc: 50.21%
85
+ 2025-03-09 19:54:03,003 - train - INFO - Epoch: 17 | Batch: 200 | Loss: 1.445 | Acc: 50.24%
86
+ 2025-03-09 19:54:05,016 - train - INFO - Epoch: 17 | Batch: 300 | Loss: 1.442 | Acc: 50.14%
87
+ 2025-03-09 19:54:08,407 - train - INFO - Epoch: 17 | Test Loss: 1.427 | Test Acc: 50.52%
88
+ 2025-03-09 19:54:08,577 - train - INFO - Epoch: 18 | Batch: 0 | Loss: 1.677 | Acc: 39.84%
89
+ 2025-03-09 19:54:11,118 - train - INFO - Epoch: 18 | Batch: 100 | Loss: 1.466 | Acc: 49.07%
90
+ 2025-03-09 19:54:13,136 - train - INFO - Epoch: 18 | Batch: 200 | Loss: 1.459 | Acc: 49.04%
91
+ 2025-03-09 19:54:15,032 - train - INFO - Epoch: 18 | Batch: 300 | Loss: 1.450 | Acc: 49.59%
92
+ 2025-03-09 19:54:18,113 - train - INFO - Epoch: 18 | Test Loss: 1.461 | Test Acc: 51.56%
93
+ 2025-03-09 19:54:18,278 - train - INFO - Epoch: 19 | Batch: 0 | Loss: 1.473 | Acc: 53.91%
94
+ 2025-03-09 19:54:20,263 - train - INFO - Epoch: 19 | Batch: 100 | Loss: 1.457 | Acc: 49.16%
95
+ 2025-03-09 19:54:22,414 - train - INFO - Epoch: 19 | Batch: 200 | Loss: 1.433 | Acc: 50.05%
96
+ 2025-03-09 19:54:24,518 - train - INFO - Epoch: 19 | Batch: 300 | Loss: 1.427 | Acc: 50.53%
97
+ 2025-03-09 19:54:27,682 - train - INFO - Epoch: 19 | Test Loss: 1.490 | Test Acc: 53.65%
98
+ 2025-03-09 19:54:27,849 - train - INFO - Epoch: 20 | Batch: 0 | Loss: 1.817 | Acc: 46.09%
99
+ 2025-03-09 19:54:29,974 - train - INFO - Epoch: 20 | Batch: 100 | Loss: 1.438 | Acc: 50.66%
100
+ 2025-03-09 19:54:32,009 - train - INFO - Epoch: 20 | Batch: 200 | Loss: 1.428 | Acc: 50.77%
101
+ 2025-03-09 19:54:34,136 - train - INFO - Epoch: 20 | Batch: 300 | Loss: 1.427 | Acc: 50.63%
102
+ 2025-03-09 19:54:37,134 - train - INFO - Epoch: 20 | Test Loss: 1.343 | Test Acc: 54.50%
103
+ 2025-03-09 19:54:45,895 - train - INFO - Epoch: 21 | Batch: 0 | Loss: 1.165 | Acc: 61.72%
104
+ 2025-03-09 19:54:48,130 - train - INFO - Epoch: 21 | Batch: 100 | Loss: 1.424 | Acc: 49.71%
105
+ 2025-03-09 19:54:50,095 - train - INFO - Epoch: 21 | Batch: 200 | Loss: 1.431 | Acc: 50.06%
106
+ 2025-03-09 19:54:52,237 - train - INFO - Epoch: 21 | Batch: 300 | Loss: 1.431 | Acc: 50.08%
107
+ 2025-03-09 19:54:55,342 - train - INFO - Epoch: 21 | Test Loss: 1.527 | Test Acc: 53.13%
108
+ 2025-03-09 19:54:55,542 - train - INFO - Epoch: 22 | Batch: 0 | Loss: 1.576 | Acc: 50.78%
109
+ 2025-03-09 19:54:57,589 - train - INFO - Epoch: 22 | Batch: 100 | Loss: 1.439 | Acc: 50.59%
110
+ 2025-03-09 19:54:59,618 - train - INFO - Epoch: 22 | Batch: 200 | Loss: 1.443 | Acc: 50.26%
111
+ 2025-03-09 19:55:01,786 - train - INFO - Epoch: 22 | Batch: 300 | Loss: 1.447 | Acc: 50.06%
112
+ 2025-03-09 19:55:04,883 - train - INFO - Epoch: 22 | Test Loss: 1.385 | Test Acc: 52.66%
113
+ 2025-03-09 19:55:05,110 - train - INFO - Epoch: 23 | Batch: 0 | Loss: 1.361 | Acc: 49.22%
114
+ 2025-03-09 19:55:07,153 - train - INFO - Epoch: 23 | Batch: 100 | Loss: 1.446 | Acc: 48.96%
115
+ 2025-03-09 19:55:09,130 - train - INFO - Epoch: 23 | Batch: 200 | Loss: 1.446 | Acc: 49.48%
116
+ 2025-03-09 19:55:11,103 - train - INFO - Epoch: 23 | Batch: 300 | Loss: 1.436 | Acc: 50.26%
117
+ 2025-03-09 19:55:14,018 - train - INFO - Epoch: 23 | Test Loss: 1.312 | Test Acc: 54.84%
118
+ 2025-03-09 19:55:14,202 - train - INFO - Epoch: 24 | Batch: 0 | Loss: 1.346 | Acc: 50.78%
119
+ 2025-03-09 19:55:16,194 - train - INFO - Epoch: 24 | Batch: 100 | Loss: 1.455 | Acc: 48.91%
120
+ 2025-03-09 19:55:18,163 - train - INFO - Epoch: 24 | Batch: 200 | Loss: 1.440 | Acc: 49.81%
121
+ 2025-03-09 19:55:20,197 - train - INFO - Epoch: 24 | Batch: 300 | Loss: 1.437 | Acc: 50.04%
122
+ 2025-03-09 19:55:23,241 - train - INFO - Epoch: 24 | Test Loss: 1.458 | Test Acc: 51.68%
123
+ 2025-03-09 19:55:32,077 - train - INFO - Epoch: 25 | Batch: 0 | Loss: 1.432 | Acc: 54.69%
124
+ 2025-03-09 19:55:34,182 - train - INFO - Epoch: 25 | Batch: 100 | Loss: 1.410 | Acc: 51.34%
125
+ 2025-03-09 19:55:36,201 - train - INFO - Epoch: 25 | Batch: 200 | Loss: 1.422 | Acc: 51.18%
126
+ 2025-03-09 19:55:38,082 - train - INFO - Epoch: 25 | Batch: 300 | Loss: 1.414 | Acc: 51.21%
127
+ 2025-03-09 19:55:41,003 - train - INFO - Epoch: 25 | Test Loss: 1.407 | Test Acc: 53.10%
128
+ 2025-03-09 19:55:41,169 - train - INFO - Epoch: 26 | Batch: 0 | Loss: 1.408 | Acc: 53.91%
129
+ 2025-03-09 19:55:43,071 - train - INFO - Epoch: 26 | Batch: 100 | Loss: 1.397 | Acc: 51.52%
130
+ 2025-03-09 19:55:45,032 - train - INFO - Epoch: 26 | Batch: 200 | Loss: 1.416 | Acc: 51.12%
131
+ 2025-03-09 19:55:47,033 - train - INFO - Epoch: 26 | Batch: 300 | Loss: 1.431 | Acc: 50.55%
132
+ 2025-03-09 19:55:50,303 - train - INFO - Epoch: 26 | Test Loss: 1.337 | Test Acc: 53.87%
133
+ 2025-03-09 19:55:50,467 - train - INFO - Epoch: 27 | Batch: 0 | Loss: 1.436 | Acc: 52.34%
134
+ 2025-03-09 19:55:52,603 - train - INFO - Epoch: 27 | Batch: 100 | Loss: 1.400 | Acc: 51.96%
135
+ 2025-03-09 19:55:54,700 - train - INFO - Epoch: 27 | Batch: 200 | Loss: 1.415 | Acc: 51.58%
136
+ 2025-03-09 19:55:56,931 - train - INFO - Epoch: 27 | Batch: 300 | Loss: 1.403 | Acc: 51.88%
137
+ 2025-03-09 19:56:00,315 - train - INFO - Epoch: 27 | Test Loss: 1.284 | Test Acc: 57.01%
138
+ 2025-03-09 19:56:00,490 - train - INFO - Epoch: 28 | Batch: 0 | Loss: 1.196 | Acc: 53.91%
139
+ 2025-03-09 19:56:02,614 - train - INFO - Epoch: 28 | Batch: 100 | Loss: 1.402 | Acc: 51.76%
140
+ 2025-03-09 19:56:04,933 - train - INFO - Epoch: 28 | Batch: 200 | Loss: 1.408 | Acc: 51.64%
141
+ 2025-03-09 19:56:06,826 - train - INFO - Epoch: 28 | Batch: 300 | Loss: 1.415 | Acc: 51.37%
142
+ 2025-03-09 19:56:09,723 - train - INFO - Epoch: 28 | Test Loss: 1.512 | Test Acc: 49.97%
143
+ 2025-03-09 19:56:18,095 - train - INFO - Epoch: 29 | Batch: 0 | Loss: 1.663 | Acc: 49.22%
144
+ 2025-03-09 19:56:20,071 - train - INFO - Epoch: 29 | Batch: 100 | Loss: 1.450 | Acc: 49.74%
145
+ 2025-03-09 19:56:22,046 - train - INFO - Epoch: 29 | Batch: 200 | Loss: 1.418 | Acc: 51.07%
146
+ 2025-03-09 19:56:23,941 - train - INFO - Epoch: 29 | Batch: 300 | Loss: 1.418 | Acc: 51.21%
147
+ 2025-03-09 19:56:27,201 - train - INFO - Epoch: 29 | Test Loss: 1.399 | Test Acc: 50.57%
148
+ 2025-03-09 19:56:27,418 - train - INFO - Epoch: 30 | Batch: 0 | Loss: 1.487 | Acc: 50.78%
149
+ 2025-03-09 19:56:29,631 - train - INFO - Epoch: 30 | Batch: 100 | Loss: 1.407 | Acc: 52.09%
150
+ 2025-03-09 19:56:31,749 - train - INFO - Epoch: 30 | Batch: 200 | Loss: 1.405 | Acc: 52.03%
151
+ 2025-03-09 19:56:34,079 - train - INFO - Epoch: 30 | Batch: 300 | Loss: 1.409 | Acc: 51.79%
152
+ 2025-03-09 19:56:37,224 - train - INFO - Epoch: 30 | Test Loss: 1.353 | Test Acc: 51.54%
153
+ 2025-03-09 19:56:37,407 - train - INFO - Epoch: 31 | Batch: 0 | Loss: 1.319 | Acc: 50.78%
154
+ 2025-03-09 19:56:39,290 - train - INFO - Epoch: 31 | Batch: 100 | Loss: 1.408 | Acc: 52.58%
155
+ 2025-03-09 19:56:41,278 - train - INFO - Epoch: 31 | Batch: 200 | Loss: 1.403 | Acc: 52.27%
156
+ 2025-03-09 19:56:43,239 - train - INFO - Epoch: 31 | Batch: 300 | Loss: 1.405 | Acc: 52.06%
157
+ 2025-03-09 19:56:46,411 - train - INFO - Epoch: 31 | Test Loss: 1.437 | Test Acc: 50.89%
158
+ 2025-03-09 19:56:46,625 - train - INFO - Epoch: 32 | Batch: 0 | Loss: 1.625 | Acc: 38.28%
159
+ 2025-03-09 19:56:48,617 - train - INFO - Epoch: 32 | Batch: 100 | Loss: 1.408 | Acc: 52.27%
160
+ 2025-03-09 19:56:50,612 - train - INFO - Epoch: 32 | Batch: 200 | Loss: 1.404 | Acc: 52.34%
161
+ 2025-03-09 19:56:52,563 - train - INFO - Epoch: 32 | Batch: 300 | Loss: 1.419 | Acc: 51.68%
162
+ 2025-03-09 19:56:55,759 - train - INFO - Epoch: 32 | Test Loss: 1.462 | Test Acc: 49.63%
163
+ 2025-03-09 19:57:04,788 - train - INFO - Epoch: 33 | Batch: 0 | Loss: 1.380 | Acc: 50.78%
164
+ 2025-03-09 19:57:06,775 - train - INFO - Epoch: 33 | Batch: 100 | Loss: 1.404 | Acc: 52.44%
165
+ 2025-03-09 19:57:08,717 - train - INFO - Epoch: 33 | Batch: 200 | Loss: 1.401 | Acc: 52.40%
166
+ 2025-03-09 19:57:10,621 - train - INFO - Epoch: 33 | Batch: 300 | Loss: 1.402 | Acc: 52.39%
167
+ 2025-03-09 19:57:13,611 - train - INFO - Epoch: 33 | Test Loss: 1.429 | Test Acc: 50.51%
168
+ 2025-03-09 19:57:13,780 - train - INFO - Epoch: 34 | Batch: 0 | Loss: 1.457 | Acc: 45.31%
169
+ 2025-03-09 19:57:15,716 - train - INFO - Epoch: 34 | Batch: 100 | Loss: 1.382 | Acc: 52.42%
170
+ 2025-03-09 19:57:17,568 - train - INFO - Epoch: 34 | Batch: 200 | Loss: 1.369 | Acc: 53.09%
171
+ 2025-03-09 19:57:19,467 - train - INFO - Epoch: 34 | Batch: 300 | Loss: 1.385 | Acc: 52.48%
172
+ 2025-03-09 19:57:22,494 - train - INFO - Epoch: 34 | Test Loss: 1.269 | Test Acc: 58.16%
173
+ 2025-03-09 19:57:22,652 - train - INFO - Epoch: 35 | Batch: 0 | Loss: 1.363 | Acc: 50.00%
174
+ 2025-03-09 19:57:24,637 - train - INFO - Epoch: 35 | Batch: 100 | Loss: 1.370 | Acc: 53.33%
175
+ 2025-03-09 19:57:26,607 - train - INFO - Epoch: 35 | Batch: 200 | Loss: 1.357 | Acc: 53.85%
176
+ 2025-03-09 19:57:28,609 - train - INFO - Epoch: 35 | Batch: 300 | Loss: 1.376 | Acc: 53.20%
177
+ 2025-03-09 19:57:31,872 - train - INFO - Epoch: 35 | Test Loss: 1.374 | Test Acc: 52.82%
178
+ 2025-03-09 19:57:32,038 - train - INFO - Epoch: 36 | Batch: 0 | Loss: 1.409 | Acc: 48.44%
179
+ 2025-03-09 19:57:34,309 - train - INFO - Epoch: 36 | Batch: 100 | Loss: 1.380 | Acc: 53.10%
180
+ 2025-03-09 19:57:36,495 - train - INFO - Epoch: 36 | Batch: 200 | Loss: 1.412 | Acc: 52.32%
181
+ 2025-03-09 19:57:38,858 - train - INFO - Epoch: 36 | Batch: 300 | Loss: 1.407 | Acc: 52.43%
182
+ 2025-03-09 19:57:41,965 - train - INFO - Epoch: 36 | Test Loss: 1.315 | Test Acc: 53.89%
183
+ 2025-03-09 19:57:50,440 - train - INFO - Epoch: 37 | Batch: 0 | Loss: 1.449 | Acc: 54.69%
184
+ 2025-03-09 19:57:52,335 - train - INFO - Epoch: 37 | Batch: 100 | Loss: 1.406 | Acc: 52.27%
185
+ 2025-03-09 19:57:54,375 - train - INFO - Epoch: 37 | Batch: 200 | Loss: 1.402 | Acc: 52.22%
186
+ 2025-03-09 19:57:56,397 - train - INFO - Epoch: 37 | Batch: 300 | Loss: 1.402 | Acc: 51.95%
187
+ 2025-03-09 19:57:59,642 - train - INFO - Epoch: 37 | Test Loss: 1.317 | Test Acc: 54.21%
188
+ 2025-03-09 19:57:59,815 - train - INFO - Epoch: 38 | Batch: 0 | Loss: 1.396 | Acc: 53.91%
189
+ 2025-03-09 19:58:01,888 - train - INFO - Epoch: 38 | Batch: 100 | Loss: 1.402 | Acc: 52.91%
190
+ 2025-03-09 19:58:04,248 - train - INFO - Epoch: 38 | Batch: 200 | Loss: 1.390 | Acc: 53.00%
191
+ 2025-03-09 19:58:06,220 - train - INFO - Epoch: 38 | Batch: 300 | Loss: 1.384 | Acc: 52.99%
192
+ 2025-03-09 19:58:09,163 - train - INFO - Epoch: 38 | Test Loss: 1.377 | Test Acc: 53.56%
193
+ 2025-03-09 19:58:09,344 - train - INFO - Epoch: 39 | Batch: 0 | Loss: 1.318 | Acc: 56.25%
194
+ 2025-03-09 19:58:11,213 - train - INFO - Epoch: 39 | Batch: 100 | Loss: 1.357 | Acc: 53.60%
195
+ 2025-03-09 19:58:13,162 - train - INFO - Epoch: 39 | Batch: 200 | Loss: 1.359 | Acc: 53.66%
196
+ 2025-03-09 19:58:15,049 - train - INFO - Epoch: 39 | Batch: 300 | Loss: 1.365 | Acc: 53.45%
197
+ 2025-03-09 19:58:17,934 - train - INFO - Epoch: 39 | Test Loss: 1.387 | Test Acc: 53.77%
198
+ 2025-03-09 19:58:18,107 - train - INFO - Epoch: 40 | Batch: 0 | Loss: 1.412 | Acc: 48.44%
199
+ 2025-03-09 19:58:20,130 - train - INFO - Epoch: 40 | Batch: 100 | Loss: 1.366 | Acc: 53.23%
200
+ 2025-03-09 19:58:22,068 - train - INFO - Epoch: 40 | Batch: 200 | Loss: 1.351 | Acc: 53.61%
201
+ 2025-03-09 19:58:24,230 - train - INFO - Epoch: 40 | Batch: 300 | Loss: 1.354 | Acc: 53.64%
202
+ 2025-03-09 19:58:27,405 - train - INFO - Epoch: 40 | Test Loss: 1.372 | Test Acc: 53.59%
203
+ 2025-03-09 19:58:36,177 - train - INFO - Epoch: 41 | Batch: 0 | Loss: 1.538 | Acc: 44.53%
204
+ 2025-03-09 19:58:38,243 - train - INFO - Epoch: 41 | Batch: 100 | Loss: 1.372 | Acc: 53.30%
205
+ 2025-03-09 19:58:40,234 - train - INFO - Epoch: 41 | Batch: 200 | Loss: 1.364 | Acc: 53.72%
206
+ 2025-03-09 19:58:42,245 - train - INFO - Epoch: 41 | Batch: 300 | Loss: 1.369 | Acc: 53.57%
207
+ 2025-03-09 19:58:45,634 - train - INFO - Epoch: 41 | Test Loss: 1.296 | Test Acc: 54.74%
208
+ 2025-03-09 19:58:45,798 - train - INFO - Epoch: 42 | Batch: 0 | Loss: 1.475 | Acc: 44.53%
209
+ 2025-03-09 19:58:47,867 - train - INFO - Epoch: 42 | Batch: 100 | Loss: 1.368 | Acc: 52.49%
210
+ 2025-03-09 19:58:50,126 - train - INFO - Epoch: 42 | Batch: 200 | Loss: 1.368 | Acc: 52.68%
211
+ 2025-03-09 19:58:52,366 - train - INFO - Epoch: 42 | Batch: 300 | Loss: 1.377 | Acc: 52.70%
212
+ 2025-03-09 19:58:55,621 - train - INFO - Epoch: 42 | Test Loss: 1.309 | Test Acc: 58.19%
213
+ 2025-03-09 19:58:55,774 - train - INFO - Epoch: 43 | Batch: 0 | Loss: 1.291 | Acc: 61.72%
214
+ 2025-03-09 19:58:57,733 - train - INFO - Epoch: 43 | Batch: 100 | Loss: 1.340 | Acc: 54.46%
215
+ 2025-03-09 19:58:59,676 - train - INFO - Epoch: 43 | Batch: 200 | Loss: 1.329 | Acc: 54.83%
216
+ 2025-03-09 19:59:01,636 - train - INFO - Epoch: 43 | Batch: 300 | Loss: 1.331 | Acc: 55.12%
217
+ 2025-03-09 19:59:04,691 - train - INFO - Epoch: 43 | Test Loss: 1.373 | Test Acc: 53.69%
218
+ 2025-03-09 19:59:04,836 - train - INFO - Epoch: 44 | Batch: 0 | Loss: 1.310 | Acc: 52.34%
219
+ 2025-03-09 19:59:06,796 - train - INFO - Epoch: 44 | Batch: 100 | Loss: 1.373 | Acc: 53.46%
220
+ 2025-03-09 19:59:08,711 - train - INFO - Epoch: 44 | Batch: 200 | Loss: 1.368 | Acc: 53.84%
221
+ 2025-03-09 19:59:10,613 - train - INFO - Epoch: 44 | Batch: 300 | Loss: 1.362 | Acc: 53.90%
222
+ 2025-03-09 19:59:13,399 - train - INFO - Epoch: 44 | Test Loss: 1.378 | Test Acc: 53.67%
223
+ 2025-03-09 19:59:21,620 - train - INFO - Epoch: 45 | Batch: 0 | Loss: 1.383 | Acc: 58.59%
224
+ 2025-03-09 19:59:23,458 - train - INFO - Epoch: 45 | Batch: 100 | Loss: 1.325 | Acc: 55.49%
225
+ 2025-03-09 19:59:25,416 - train - INFO - Epoch: 45 | Batch: 200 | Loss: 1.345 | Acc: 54.83%
226
+ 2025-03-09 19:59:27,390 - train - INFO - Epoch: 45 | Batch: 300 | Loss: 1.361 | Acc: 54.17%
227
+ 2025-03-09 19:59:30,307 - train - INFO - Epoch: 45 | Test Loss: 1.345 | Test Acc: 54.53%
228
+ 2025-03-09 19:59:30,478 - train - INFO - Epoch: 46 | Batch: 0 | Loss: 1.303 | Acc: 55.47%
229
+ 2025-03-09 19:59:32,427 - train - INFO - Epoch: 46 | Batch: 100 | Loss: 1.342 | Acc: 54.19%
230
+ 2025-03-09 19:59:34,336 - train - INFO - Epoch: 46 | Batch: 200 | Loss: 1.337 | Acc: 54.64%
231
+ 2025-03-09 19:59:36,228 - train - INFO - Epoch: 46 | Batch: 300 | Loss: 1.351 | Acc: 54.22%
232
+ 2025-03-09 19:59:39,262 - train - INFO - Epoch: 46 | Test Loss: 1.345 | Test Acc: 56.16%
233
+ 2025-03-09 19:59:39,452 - train - INFO - Epoch: 47 | Batch: 0 | Loss: 1.489 | Acc: 51.56%
234
+ 2025-03-09 19:59:41,279 - train - INFO - Epoch: 47 | Batch: 100 | Loss: 1.328 | Acc: 54.90%
235
+ 2025-03-09 19:59:43,204 - train - INFO - Epoch: 47 | Batch: 200 | Loss: 1.327 | Acc: 55.05%
236
+ 2025-03-09 19:59:45,120 - train - INFO - Epoch: 47 | Batch: 300 | Loss: 1.333 | Acc: 55.00%
237
+ 2025-03-09 19:59:48,254 - train - INFO - Epoch: 47 | Test Loss: 1.314 | Test Acc: 56.75%
238
+ 2025-03-09 19:59:48,434 - train - INFO - Epoch: 48 | Batch: 0 | Loss: 1.208 | Acc: 59.38%
239
+ 2025-03-09 19:59:50,454 - train - INFO - Epoch: 48 | Batch: 100 | Loss: 1.317 | Acc: 55.10%
240
+ 2025-03-09 19:59:52,374 - train - INFO - Epoch: 48 | Batch: 200 | Loss: 1.312 | Acc: 55.74%
241
+ 2025-03-09 19:59:54,256 - train - INFO - Epoch: 48 | Batch: 300 | Loss: 1.309 | Acc: 55.89%
242
+ 2025-03-09 19:59:57,327 - train - INFO - Epoch: 48 | Test Loss: 1.271 | Test Acc: 58.09%
243
+ 2025-03-09 20:00:06,506 - train - INFO - Epoch: 49 | Batch: 0 | Loss: 1.332 | Acc: 57.81%
244
+ 2025-03-09 20:00:08,781 - train - INFO - Epoch: 49 | Batch: 100 | Loss: 1.298 | Acc: 55.98%
245
+ 2025-03-09 20:00:11,349 - train - INFO - Epoch: 49 | Batch: 200 | Loss: 1.302 | Acc: 56.07%
246
+ 2025-03-09 20:00:13,500 - train - INFO - Epoch: 49 | Batch: 300 | Loss: 1.307 | Acc: 55.87%
247
+ 2025-03-09 20:00:16,544 - train - INFO - Epoch: 49 | Test Loss: 1.286 | Test Acc: 54.88%
248
+ 2025-03-09 20:00:16,712 - train - INFO - Epoch: 50 | Batch: 0 | Loss: 1.354 | Acc: 55.47%
249
+ 2025-03-09 20:00:18,651 - train - INFO - Epoch: 50 | Batch: 100 | Loss: 1.353 | Acc: 53.57%
250
+ 2025-03-09 20:00:20,584 - train - INFO - Epoch: 50 | Batch: 200 | Loss: 1.335 | Acc: 54.65%
251
+ 2025-03-09 20:00:22,491 - train - INFO - Epoch: 50 | Batch: 300 | Loss: 1.333 | Acc: 54.62%
252
+ 2025-03-09 20:00:25,512 - train - INFO - Epoch: 50 | Test Loss: 1.264 | Test Acc: 56.42%
253
+ 2025-03-09 20:00:25,692 - train - INFO - Epoch: 51 | Batch: 0 | Loss: 1.259 | Acc: 57.81%
254
+ 2025-03-09 20:00:27,737 - train - INFO - Epoch: 51 | Batch: 100 | Loss: 1.293 | Acc: 56.82%
255
+ 2025-03-09 20:00:29,766 - train - INFO - Epoch: 51 | Batch: 200 | Loss: 1.300 | Acc: 56.34%
256
+ 2025-03-09 20:00:31,756 - train - INFO - Epoch: 51 | Batch: 300 | Loss: 1.297 | Acc: 56.14%
257
+ 2025-03-09 20:00:34,929 - train - INFO - Epoch: 51 | Test Loss: 1.223 | Test Acc: 57.27%
258
+ 2025-03-09 20:00:35,118 - train - INFO - Epoch: 52 | Batch: 0 | Loss: 1.323 | Acc: 55.47%
259
+ 2025-03-09 20:00:37,282 - train - INFO - Epoch: 52 | Batch: 100 | Loss: 1.299 | Acc: 55.91%
260
+ 2025-03-09 20:00:39,484 - train - INFO - Epoch: 52 | Batch: 200 | Loss: 1.289 | Acc: 56.01%
261
+ 2025-03-09 20:00:41,515 - train - INFO - Epoch: 52 | Batch: 300 | Loss: 1.293 | Acc: 55.98%
262
+ 2025-03-09 20:00:44,482 - train - INFO - Epoch: 52 | Test Loss: 1.310 | Test Acc: 55.00%
263
+ 2025-03-09 20:00:52,924 - train - INFO - Epoch: 53 | Batch: 0 | Loss: 1.438 | Acc: 50.00%
264
+ 2025-03-09 20:00:55,121 - train - INFO - Epoch: 53 | Batch: 100 | Loss: 1.297 | Acc: 56.12%
265
+ 2025-03-09 20:00:57,170 - train - INFO - Epoch: 53 | Batch: 200 | Loss: 1.306 | Acc: 55.61%
266
+ 2025-03-09 20:00:59,086 - train - INFO - Epoch: 53 | Batch: 300 | Loss: 1.310 | Acc: 55.32%
267
+ 2025-03-09 20:01:01,976 - train - INFO - Epoch: 53 | Test Loss: 1.293 | Test Acc: 55.28%
268
+ 2025-03-09 20:01:02,134 - train - INFO - Epoch: 54 | Batch: 0 | Loss: 1.311 | Acc: 53.91%
269
+ 2025-03-09 20:01:04,118 - train - INFO - Epoch: 54 | Batch: 100 | Loss: 1.335 | Acc: 54.83%
270
+ 2025-03-09 20:01:06,123 - train - INFO - Epoch: 54 | Batch: 200 | Loss: 1.312 | Acc: 55.50%
271
+ 2025-03-09 20:01:08,143 - train - INFO - Epoch: 54 | Batch: 300 | Loss: 1.296 | Acc: 55.97%
272
+ 2025-03-09 20:01:11,377 - train - INFO - Epoch: 54 | Test Loss: 1.293 | Test Acc: 57.74%
273
+ 2025-03-09 20:01:11,543 - train - INFO - Epoch: 55 | Batch: 0 | Loss: 1.194 | Acc: 58.59%
274
+ 2025-03-09 20:01:13,621 - train - INFO - Epoch: 55 | Batch: 100 | Loss: 1.266 | Acc: 56.81%
275
+ 2025-03-09 20:01:15,599 - train - INFO - Epoch: 55 | Batch: 200 | Loss: 1.267 | Acc: 57.00%
276
+ 2025-03-09 20:01:17,646 - train - INFO - Epoch: 55 | Batch: 300 | Loss: 1.297 | Acc: 56.35%
277
+ 2025-03-09 20:01:20,601 - train - INFO - Epoch: 55 | Test Loss: 1.288 | Test Acc: 56.27%
278
+ 2025-03-09 20:01:20,778 - train - INFO - Epoch: 56 | Batch: 0 | Loss: 1.295 | Acc: 54.69%
279
+ 2025-03-09 20:01:22,676 - train - INFO - Epoch: 56 | Batch: 100 | Loss: 1.306 | Acc: 55.91%
280
+ 2025-03-09 20:01:24,633 - train - INFO - Epoch: 56 | Batch: 200 | Loss: 1.289 | Acc: 56.60%
281
+ 2025-03-09 20:01:26,625 - train - INFO - Epoch: 56 | Batch: 300 | Loss: 1.299 | Acc: 56.09%
282
+ 2025-03-09 20:01:29,991 - train - INFO - Epoch: 56 | Test Loss: 1.264 | Test Acc: 56.79%
283
+ 2025-03-09 20:01:38,741 - train - INFO - Epoch: 57 | Batch: 0 | Loss: 1.413 | Acc: 53.12%
284
+ 2025-03-09 20:01:40,895 - train - INFO - Epoch: 57 | Batch: 100 | Loss: 1.295 | Acc: 56.21%
285
+ 2025-03-09 20:01:43,111 - train - INFO - Epoch: 57 | Batch: 200 | Loss: 1.287 | Acc: 56.34%
286
+ 2025-03-09 20:01:45,114 - train - INFO - Epoch: 57 | Batch: 300 | Loss: 1.276 | Acc: 56.62%
287
+ 2025-03-09 20:01:48,333 - train - INFO - Epoch: 57 | Test Loss: 1.283 | Test Acc: 56.47%
288
+ 2025-03-09 20:01:48,483 - train - INFO - Epoch: 58 | Batch: 0 | Loss: 1.203 | Acc: 60.94%
289
+ 2025-03-09 20:01:50,350 - train - INFO - Epoch: 58 | Batch: 100 | Loss: 1.265 | Acc: 57.16%
290
+ 2025-03-09 20:01:52,319 - train - INFO - Epoch: 58 | Batch: 200 | Loss: 1.278 | Acc: 56.86%
291
+ 2025-03-09 20:01:54,253 - train - INFO - Epoch: 58 | Batch: 300 | Loss: 1.278 | Acc: 56.76%
292
+ 2025-03-09 20:01:57,139 - train - INFO - Epoch: 58 | Test Loss: 1.250 | Test Acc: 55.98%
293
+ 2025-03-09 20:01:57,294 - train - INFO - Epoch: 59 | Batch: 0 | Loss: 1.299 | Acc: 54.69%
294
+ 2025-03-09 20:01:59,177 - train - INFO - Epoch: 59 | Batch: 100 | Loss: 1.275 | Acc: 56.53%
295
+ 2025-03-09 20:02:01,061 - train - INFO - Epoch: 59 | Batch: 200 | Loss: 1.264 | Acc: 57.11%
296
+ 2025-03-09 20:02:03,013 - train - INFO - Epoch: 59 | Batch: 300 | Loss: 1.270 | Acc: 57.01%
297
+ 2025-03-09 20:02:05,873 - train - INFO - Epoch: 59 | Test Loss: 1.175 | Test Acc: 60.14%
298
+ 2025-03-09 20:02:06,084 - train - INFO - Epoch: 60 | Batch: 0 | Loss: 1.176 | Acc: 60.16%
299
+ 2025-03-09 20:02:08,156 - train - INFO - Epoch: 60 | Batch: 100 | Loss: 1.276 | Acc: 56.69%
300
+ 2025-03-09 20:02:10,235 - train - INFO - Epoch: 60 | Batch: 200 | Loss: 1.261 | Acc: 57.39%
301
+ 2025-03-09 20:02:12,262 - train - INFO - Epoch: 60 | Batch: 300 | Loss: 1.272 | Acc: 56.96%
302
+ 2025-03-09 20:02:15,547 - train - INFO - Epoch: 60 | Test Loss: 1.258 | Test Acc: 57.63%
303
+ 2025-03-09 20:02:24,176 - train - INFO - Epoch: 61 | Batch: 0 | Loss: 1.181 | Acc: 63.28%
304
+ 2025-03-09 20:02:26,274 - train - INFO - Epoch: 61 | Batch: 100 | Loss: 1.283 | Acc: 56.98%
305
+ 2025-03-09 20:02:28,240 - train - INFO - Epoch: 61 | Batch: 200 | Loss: 1.267 | Acc: 57.38%
306
+ 2025-03-09 20:02:30,146 - train - INFO - Epoch: 61 | Batch: 300 | Loss: 1.266 | Acc: 57.71%
307
+ 2025-03-09 20:02:33,184 - train - INFO - Epoch: 61 | Test Loss: 1.185 | Test Acc: 57.88%
308
+ 2025-03-09 20:02:33,361 - train - INFO - Epoch: 62 | Batch: 0 | Loss: 1.181 | Acc: 55.47%
309
+ 2025-03-09 20:02:35,405 - train - INFO - Epoch: 62 | Batch: 100 | Loss: 1.237 | Acc: 58.36%
310
+ 2025-03-09 20:02:37,431 - train - INFO - Epoch: 62 | Batch: 200 | Loss: 1.222 | Acc: 58.83%
311
+ 2025-03-09 20:02:39,492 - train - INFO - Epoch: 62 | Batch: 300 | Loss: 1.225 | Acc: 58.54%
312
+ 2025-03-09 20:02:42,962 - train - INFO - Epoch: 62 | Test Loss: 1.201 | Test Acc: 59.17%
313
+ 2025-03-09 20:02:43,194 - train - INFO - Epoch: 63 | Batch: 0 | Loss: 1.167 | Acc: 57.03%
314
+ 2025-03-09 20:02:45,381 - train - INFO - Epoch: 63 | Batch: 100 | Loss: 1.255 | Acc: 57.75%
315
+ 2025-03-09 20:02:47,427 - train - INFO - Epoch: 63 | Batch: 200 | Loss: 1.251 | Acc: 57.57%
316
+ 2025-03-09 20:02:49,782 - train - INFO - Epoch: 63 | Batch: 300 | Loss: 1.247 | Acc: 57.64%
317
+ 2025-03-09 20:02:52,930 - train - INFO - Epoch: 63 | Test Loss: 1.212 | Test Acc: 59.46%
318
+ 2025-03-09 20:02:53,093 - train - INFO - Epoch: 64 | Batch: 0 | Loss: 1.213 | Acc: 60.16%
319
+ 2025-03-09 20:02:54,979 - train - INFO - Epoch: 64 | Batch: 100 | Loss: 1.238 | Acc: 58.62%
320
+ 2025-03-09 20:02:56,924 - train - INFO - Epoch: 64 | Batch: 200 | Loss: 1.231 | Acc: 58.56%
321
+ 2025-03-09 20:02:59,004 - train - INFO - Epoch: 64 | Batch: 300 | Loss: 1.235 | Acc: 58.42%
322
+ 2025-03-09 20:03:02,027 - train - INFO - Epoch: 64 | Test Loss: 1.301 | Test Acc: 56.76%
323
+ 2025-03-09 20:03:11,492 - train - INFO - Epoch: 65 | Batch: 0 | Loss: 1.440 | Acc: 59.38%
324
+ 2025-03-09 20:03:13,589 - train - INFO - Epoch: 65 | Batch: 100 | Loss: 1.219 | Acc: 57.99%
325
+ 2025-03-09 20:03:15,758 - train - INFO - Epoch: 65 | Batch: 200 | Loss: 1.214 | Acc: 58.48%
326
+ 2025-03-09 20:03:17,811 - train - INFO - Epoch: 65 | Batch: 300 | Loss: 1.220 | Acc: 58.31%
327
+ 2025-03-09 20:03:20,962 - train - INFO - Epoch: 65 | Test Loss: 1.133 | Test Acc: 62.39%
328
+ 2025-03-09 20:03:21,151 - train - INFO - Epoch: 66 | Batch: 0 | Loss: 1.090 | Acc: 64.84%
329
+ 2025-03-09 20:03:23,279 - train - INFO - Epoch: 66 | Batch: 100 | Loss: 1.212 | Acc: 58.54%
330
+ 2025-03-09 20:03:25,314 - train - INFO - Epoch: 66 | Batch: 200 | Loss: 1.221 | Acc: 58.45%
331
+ 2025-03-09 20:03:27,262 - train - INFO - Epoch: 66 | Batch: 300 | Loss: 1.220 | Acc: 58.67%
332
+ 2025-03-09 20:03:30,250 - train - INFO - Epoch: 66 | Test Loss: 1.128 | Test Acc: 62.20%
333
+ 2025-03-09 20:03:30,427 - train - INFO - Epoch: 67 | Batch: 0 | Loss: 1.194 | Acc: 62.50%
334
+ 2025-03-09 20:03:32,409 - train - INFO - Epoch: 67 | Batch: 100 | Loss: 1.205 | Acc: 59.23%
335
+ 2025-03-09 20:03:34,525 - train - INFO - Epoch: 67 | Batch: 200 | Loss: 1.219 | Acc: 58.96%
336
+ 2025-03-09 20:03:36,669 - train - INFO - Epoch: 67 | Batch: 300 | Loss: 1.224 | Acc: 58.88%
337
+ 2025-03-09 20:03:40,213 - train - INFO - Epoch: 67 | Test Loss: 1.198 | Test Acc: 60.10%
338
+ 2025-03-09 20:03:40,438 - train - INFO - Epoch: 68 | Batch: 0 | Loss: 1.071 | Acc: 59.38%
339
+ 2025-03-09 20:03:42,654 - train - INFO - Epoch: 68 | Batch: 100 | Loss: 1.195 | Acc: 59.75%
340
+ 2025-03-09 20:03:44,633 - train - INFO - Epoch: 68 | Batch: 200 | Loss: 1.193 | Acc: 59.75%
341
+ 2025-03-09 20:03:46,611 - train - INFO - Epoch: 68 | Batch: 300 | Loss: 1.193 | Acc: 59.72%
342
+ 2025-03-09 20:03:49,635 - train - INFO - Epoch: 68 | Test Loss: 1.170 | Test Acc: 60.74%
343
+ 2025-03-09 20:03:58,030 - train - INFO - Epoch: 69 | Batch: 0 | Loss: 1.065 | Acc: 57.81%
344
+ 2025-03-09 20:04:00,061 - train - INFO - Epoch: 69 | Batch: 100 | Loss: 1.217 | Acc: 58.83%
345
+ 2025-03-09 20:04:01,977 - train - INFO - Epoch: 69 | Batch: 200 | Loss: 1.226 | Acc: 58.59%
346
+ 2025-03-09 20:04:03,999 - train - INFO - Epoch: 69 | Batch: 300 | Loss: 1.229 | Acc: 58.67%
347
+ 2025-03-09 20:04:07,214 - train - INFO - Epoch: 69 | Test Loss: 1.170 | Test Acc: 61.05%
348
+ 2025-03-09 20:04:07,420 - train - INFO - Epoch: 70 | Batch: 0 | Loss: 1.327 | Acc: 55.47%
349
+ 2025-03-09 20:04:09,428 - train - INFO - Epoch: 70 | Batch: 100 | Loss: 1.212 | Acc: 59.03%
350
+ 2025-03-09 20:04:11,490 - train - INFO - Epoch: 70 | Batch: 200 | Loss: 1.210 | Acc: 59.22%
351
+ 2025-03-09 20:04:13,652 - train - INFO - Epoch: 70 | Batch: 300 | Loss: 1.201 | Acc: 59.55%
352
+ 2025-03-09 20:04:16,600 - train - INFO - Epoch: 70 | Test Loss: 1.083 | Test Acc: 63.70%
353
+ 2025-03-09 20:04:16,780 - train - INFO - Epoch: 71 | Batch: 0 | Loss: 1.090 | Acc: 64.84%
354
+ 2025-03-09 20:04:18,739 - train - INFO - Epoch: 71 | Batch: 100 | Loss: 1.180 | Acc: 60.05%
355
+ 2025-03-09 20:04:20,670 - train - INFO - Epoch: 71 | Batch: 200 | Loss: 1.185 | Acc: 59.98%
356
+ 2025-03-09 20:04:22,543 - train - INFO - Epoch: 71 | Batch: 300 | Loss: 1.183 | Acc: 60.13%
357
+ 2025-03-09 20:04:25,494 - train - INFO - Epoch: 71 | Test Loss: 1.122 | Test Acc: 63.08%
358
+ 2025-03-09 20:04:25,672 - train - INFO - Epoch: 72 | Batch: 0 | Loss: 1.234 | Acc: 57.81%
359
+ 2025-03-09 20:04:27,737 - train - INFO - Epoch: 72 | Batch: 100 | Loss: 1.229 | Acc: 58.80%
360
+ 2025-03-09 20:04:29,724 - train - INFO - Epoch: 72 | Batch: 200 | Loss: 1.214 | Acc: 59.19%
361
+ 2025-03-09 20:04:31,711 - train - INFO - Epoch: 72 | Batch: 300 | Loss: 1.205 | Acc: 59.47%
362
+ 2025-03-09 20:04:35,205 - train - INFO - Epoch: 72 | Test Loss: 1.286 | Test Acc: 58.20%
363
+ 2025-03-09 20:04:44,301 - train - INFO - Epoch: 73 | Batch: 0 | Loss: 1.294 | Acc: 57.81%
364
+ 2025-03-09 20:04:46,184 - train - INFO - Epoch: 73 | Batch: 100 | Loss: 1.195 | Acc: 59.38%
365
+ 2025-03-09 20:04:48,292 - train - INFO - Epoch: 73 | Batch: 200 | Loss: 1.186 | Acc: 59.92%
366
+ 2025-03-09 20:04:50,218 - train - INFO - Epoch: 73 | Batch: 300 | Loss: 1.186 | Acc: 59.97%
367
+ 2025-03-09 20:04:53,168 - train - INFO - Epoch: 73 | Test Loss: 1.130 | Test Acc: 62.16%
368
+ 2025-03-09 20:04:53,370 - train - INFO - Epoch: 74 | Batch: 0 | Loss: 1.022 | Acc: 66.41%
369
+ 2025-03-09 20:04:55,408 - train - INFO - Epoch: 74 | Batch: 100 | Loss: 1.219 | Acc: 58.83%
370
+ 2025-03-09 20:04:57,367 - train - INFO - Epoch: 74 | Batch: 200 | Loss: 1.196 | Acc: 59.82%
371
+ 2025-03-09 20:04:59,411 - train - INFO - Epoch: 74 | Batch: 300 | Loss: 1.184 | Acc: 60.10%
372
+ 2025-03-09 20:05:02,780 - train - INFO - Epoch: 74 | Test Loss: 1.169 | Test Acc: 59.53%
373
+ 2025-03-09 20:05:02,956 - train - INFO - Epoch: 75 | Batch: 0 | Loss: 1.042 | Acc: 64.06%
374
+ 2025-03-09 20:05:04,928 - train - INFO - Epoch: 75 | Batch: 100 | Loss: 1.179 | Acc: 59.97%
375
+ 2025-03-09 20:05:07,026 - train - INFO - Epoch: 75 | Batch: 200 | Loss: 1.164 | Acc: 60.59%
376
+ 2025-03-09 20:05:09,052 - train - INFO - Epoch: 75 | Batch: 300 | Loss: 1.168 | Acc: 60.56%
377
+ 2025-03-09 20:05:12,486 - train - INFO - Epoch: 75 | Test Loss: 1.086 | Test Acc: 64.61%
378
+ 2025-03-09 20:05:12,668 - train - INFO - Epoch: 76 | Batch: 0 | Loss: 1.005 | Acc: 67.97%
379
+ 2025-03-09 20:05:14,872 - train - INFO - Epoch: 76 | Batch: 100 | Loss: 1.188 | Acc: 59.54%
380
+ 2025-03-09 20:05:17,020 - train - INFO - Epoch: 76 | Batch: 200 | Loss: 1.168 | Acc: 60.31%
381
+ 2025-03-09 20:05:18,884 - train - INFO - Epoch: 76 | Batch: 300 | Loss: 1.171 | Acc: 60.22%
382
+ 2025-03-09 20:05:21,857 - train - INFO - Epoch: 76 | Test Loss: 1.077 | Test Acc: 64.56%
383
+ 2025-03-09 20:05:30,426 - train - INFO - Epoch: 77 | Batch: 0 | Loss: 0.993 | Acc: 67.97%
384
+ 2025-03-09 20:05:32,481 - train - INFO - Epoch: 77 | Batch: 100 | Loss: 1.187 | Acc: 59.47%
385
+ 2025-03-09 20:05:34,515 - train - INFO - Epoch: 77 | Batch: 200 | Loss: 1.162 | Acc: 60.40%
386
+ 2025-03-09 20:05:36,603 - train - INFO - Epoch: 77 | Batch: 300 | Loss: 1.156 | Acc: 60.49%
387
+ 2025-03-09 20:05:39,781 - train - INFO - Epoch: 77 | Test Loss: 1.148 | Test Acc: 61.77%
388
+ 2025-03-09 20:05:39,985 - train - INFO - Epoch: 78 | Batch: 0 | Loss: 1.126 | Acc: 60.16%
389
+ 2025-03-09 20:05:41,942 - train - INFO - Epoch: 78 | Batch: 100 | Loss: 1.135 | Acc: 61.81%
390
+ 2025-03-09 20:05:43,948 - train - INFO - Epoch: 78 | Batch: 200 | Loss: 1.130 | Acc: 61.72%
391
+ 2025-03-09 20:05:46,120 - train - INFO - Epoch: 78 | Batch: 300 | Loss: 1.144 | Acc: 61.18%
392
+ 2025-03-09 20:05:49,313 - train - INFO - Epoch: 78 | Test Loss: 1.187 | Test Acc: 60.45%
393
+ 2025-03-09 20:05:49,505 - train - INFO - Epoch: 79 | Batch: 0 | Loss: 1.205 | Acc: 60.94%
394
+ 2025-03-09 20:05:51,476 - train - INFO - Epoch: 79 | Batch: 100 | Loss: 1.161 | Acc: 60.95%
395
+ 2025-03-09 20:05:53,457 - train - INFO - Epoch: 79 | Batch: 200 | Loss: 1.157 | Acc: 61.07%
396
+ 2025-03-09 20:05:55,418 - train - INFO - Epoch: 79 | Batch: 300 | Loss: 1.156 | Acc: 61.12%
397
+ 2025-03-09 20:05:58,509 - train - INFO - Epoch: 79 | Test Loss: 1.056 | Test Acc: 63.68%
398
+ 2025-03-09 20:05:58,686 - train - INFO - Epoch: 80 | Batch: 0 | Loss: 1.065 | Acc: 64.84%
399
+ 2025-03-09 20:06:00,797 - train - INFO - Epoch: 80 | Batch: 100 | Loss: 1.121 | Acc: 62.57%
400
+ 2025-03-09 20:06:02,773 - train - INFO - Epoch: 80 | Batch: 200 | Loss: 1.133 | Acc: 61.80%
401
+ 2025-03-09 20:06:04,652 - train - INFO - Epoch: 80 | Batch: 300 | Loss: 1.145 | Acc: 61.24%
402
+ 2025-03-09 20:06:07,896 - train - INFO - Epoch: 80 | Test Loss: 1.118 | Test Acc: 62.60%
403
+ 2025-03-09 20:06:17,187 - train - INFO - Epoch: 81 | Batch: 0 | Loss: 0.981 | Acc: 64.06%
404
+ 2025-03-09 20:06:19,096 - train - INFO - Epoch: 81 | Batch: 100 | Loss: 1.167 | Acc: 61.12%
405
+ 2025-03-09 20:06:21,115 - train - INFO - Epoch: 81 | Batch: 200 | Loss: 1.154 | Acc: 61.34%
406
+ 2025-03-09 20:06:23,095 - train - INFO - Epoch: 81 | Batch: 300 | Loss: 1.144 | Acc: 61.42%
407
+ 2025-03-09 20:06:26,187 - train - INFO - Epoch: 81 | Test Loss: 1.173 | Test Acc: 60.44%
408
+ 2025-03-09 20:06:26,358 - train - INFO - Epoch: 82 | Batch: 0 | Loss: 1.302 | Acc: 53.91%
409
+ 2025-03-09 20:06:28,368 - train - INFO - Epoch: 82 | Batch: 100 | Loss: 1.138 | Acc: 61.95%
410
+ 2025-03-09 20:06:30,300 - train - INFO - Epoch: 82 | Batch: 200 | Loss: 1.133 | Acc: 62.14%
411
+ 2025-03-09 20:06:32,316 - train - INFO - Epoch: 82 | Batch: 300 | Loss: 1.134 | Acc: 62.13%
412
+ 2025-03-09 20:06:35,449 - train - INFO - Epoch: 82 | Test Loss: 1.126 | Test Acc: 61.16%
413
+ 2025-03-09 20:06:35,669 - train - INFO - Epoch: 83 | Batch: 0 | Loss: 1.332 | Acc: 53.12%
414
+ 2025-03-09 20:06:37,637 - train - INFO - Epoch: 83 | Batch: 100 | Loss: 1.145 | Acc: 60.94%
415
+ 2025-03-09 20:06:39,716 - train - INFO - Epoch: 83 | Batch: 200 | Loss: 1.138 | Acc: 61.66%
416
+ 2025-03-09 20:06:41,791 - train - INFO - Epoch: 83 | Batch: 300 | Loss: 1.143 | Acc: 61.64%
417
+ 2025-03-09 20:06:44,690 - train - INFO - Epoch: 83 | Test Loss: 1.201 | Test Acc: 61.08%
418
+ 2025-03-09 20:06:44,856 - train - INFO - Epoch: 84 | Batch: 0 | Loss: 1.367 | Acc: 61.72%
419
+ 2025-03-09 20:06:46,837 - train - INFO - Epoch: 84 | Batch: 100 | Loss: 1.157 | Acc: 61.66%
420
+ 2025-03-09 20:06:48,817 - train - INFO - Epoch: 84 | Batch: 200 | Loss: 1.129 | Acc: 62.19%
421
+ 2025-03-09 20:06:50,922 - train - INFO - Epoch: 84 | Batch: 300 | Loss: 1.124 | Acc: 62.21%
422
+ 2025-03-09 20:06:54,051 - train - INFO - Epoch: 84 | Test Loss: 1.030 | Test Acc: 65.83%
423
+ 2025-03-09 20:07:03,364 - train - INFO - Epoch: 85 | Batch: 0 | Loss: 0.997 | Acc: 66.41%
424
+ 2025-03-09 20:07:06,169 - train - INFO - Epoch: 85 | Batch: 100 | Loss: 1.102 | Acc: 62.93%
425
+ 2025-03-09 20:07:08,295 - train - INFO - Epoch: 85 | Batch: 200 | Loss: 1.099 | Acc: 62.92%
426
+ 2025-03-09 20:07:10,316 - train - INFO - Epoch: 85 | Batch: 300 | Loss: 1.100 | Acc: 62.74%
427
+ 2025-03-09 20:07:13,443 - train - INFO - Epoch: 85 | Test Loss: 0.993 | Test Acc: 66.24%
428
+ 2025-03-09 20:07:13,613 - train - INFO - Epoch: 86 | Batch: 0 | Loss: 0.879 | Acc: 66.41%
429
+ 2025-03-09 20:07:15,530 - train - INFO - Epoch: 86 | Batch: 100 | Loss: 1.070 | Acc: 63.68%
430
+ 2025-03-09 20:07:17,620 - train - INFO - Epoch: 86 | Batch: 200 | Loss: 1.082 | Acc: 63.56%
431
+ 2025-03-09 20:07:19,568 - train - INFO - Epoch: 86 | Batch: 300 | Loss: 1.095 | Acc: 63.20%
432
+ 2025-03-09 20:07:22,857 - train - INFO - Epoch: 86 | Test Loss: 1.084 | Test Acc: 64.14%
433
+ 2025-03-09 20:07:23,032 - train - INFO - Epoch: 87 | Batch: 0 | Loss: 1.100 | Acc: 59.38%
434
+ 2025-03-09 20:07:25,120 - train - INFO - Epoch: 87 | Batch: 100 | Loss: 1.086 | Acc: 62.84%
435
+ 2025-03-09 20:07:27,174 - train - INFO - Epoch: 87 | Batch: 200 | Loss: 1.096 | Acc: 62.75%
436
+ 2025-03-09 20:07:29,361 - train - INFO - Epoch: 87 | Batch: 300 | Loss: 1.107 | Acc: 62.41%
437
+ 2025-03-09 20:07:32,701 - train - INFO - Epoch: 87 | Test Loss: 1.017 | Test Acc: 66.01%
438
+ 2025-03-09 20:07:32,864 - train - INFO - Epoch: 88 | Batch: 0 | Loss: 0.993 | Acc: 71.88%
439
+ 2025-03-09 20:07:34,869 - train - INFO - Epoch: 88 | Batch: 100 | Loss: 1.064 | Acc: 64.05%
440
+ 2025-03-09 20:07:36,860 - train - INFO - Epoch: 88 | Batch: 200 | Loss: 1.078 | Acc: 63.53%
441
+ 2025-03-09 20:07:38,850 - train - INFO - Epoch: 88 | Batch: 300 | Loss: 1.093 | Acc: 63.05%
442
+ 2025-03-09 20:07:41,861 - train - INFO - Epoch: 88 | Test Loss: 0.984 | Test Acc: 67.59%
443
+ 2025-03-09 20:07:50,269 - train - INFO - Epoch: 89 | Batch: 0 | Loss: 1.033 | Acc: 64.06%
444
+ 2025-03-09 20:07:52,348 - train - INFO - Epoch: 89 | Batch: 100 | Loss: 1.060 | Acc: 64.22%
445
+ 2025-03-09 20:07:54,441 - train - INFO - Epoch: 89 | Batch: 200 | Loss: 1.060 | Acc: 64.06%
446
+ 2025-03-09 20:07:56,483 - train - INFO - Epoch: 89 | Batch: 300 | Loss: 1.066 | Acc: 63.70%
447
+ 2025-03-09 20:07:59,622 - train - INFO - Epoch: 89 | Test Loss: 1.052 | Test Acc: 65.01%
448
+ 2025-03-09 20:07:59,816 - train - INFO - Epoch: 90 | Batch: 0 | Loss: 1.035 | Acc: 65.62%
449
+ 2025-03-09 20:08:02,021 - train - INFO - Epoch: 90 | Batch: 100 | Loss: 1.072 | Acc: 64.50%
450
+ 2025-03-09 20:08:04,109 - train - INFO - Epoch: 90 | Batch: 200 | Loss: 1.073 | Acc: 63.96%
451
+ 2025-03-09 20:08:06,098 - train - INFO - Epoch: 90 | Batch: 300 | Loss: 1.079 | Acc: 63.72%
452
+ 2025-03-09 20:08:09,147 - train - INFO - Epoch: 90 | Test Loss: 0.991 | Test Acc: 67.06%
453
+ 2025-03-09 20:08:09,323 - train - INFO - Epoch: 91 | Batch: 0 | Loss: 1.019 | Acc: 66.41%
454
+ 2025-03-09 20:08:11,288 - train - INFO - Epoch: 91 | Batch: 100 | Loss: 1.040 | Acc: 64.88%
455
+ 2025-03-09 20:08:13,266 - train - INFO - Epoch: 91 | Batch: 200 | Loss: 1.043 | Acc: 64.61%
456
+ 2025-03-09 20:08:15,248 - train - INFO - Epoch: 91 | Batch: 300 | Loss: 1.056 | Acc: 64.13%
457
+ 2025-03-09 20:08:18,420 - train - INFO - Epoch: 91 | Test Loss: 1.099 | Test Acc: 63.08%
458
+ 2025-03-09 20:08:18,624 - train - INFO - Epoch: 92 | Batch: 0 | Loss: 1.044 | Acc: 68.75%
459
+ 2025-03-09 20:08:20,632 - train - INFO - Epoch: 92 | Batch: 100 | Loss: 1.055 | Acc: 64.34%
460
+ 2025-03-09 20:08:22,645 - train - INFO - Epoch: 92 | Batch: 200 | Loss: 1.051 | Acc: 64.52%
461
+ 2025-03-09 20:08:24,710 - train - INFO - Epoch: 92 | Batch: 300 | Loss: 1.063 | Acc: 63.84%
462
+ 2025-03-09 20:08:27,778 - train - INFO - Epoch: 92 | Test Loss: 1.023 | Test Acc: 65.02%
463
+ 2025-03-09 20:08:36,292 - train - INFO - Epoch: 93 | Batch: 0 | Loss: 1.010 | Acc: 64.06%
464
+ 2025-03-09 20:08:38,381 - train - INFO - Epoch: 93 | Batch: 100 | Loss: 1.084 | Acc: 63.50%
465
+ 2025-03-09 20:08:40,403 - train - INFO - Epoch: 93 | Batch: 200 | Loss: 1.055 | Acc: 64.34%
466
+ 2025-03-09 20:08:42,384 - train - INFO - Epoch: 93 | Batch: 300 | Loss: 1.055 | Acc: 64.37%
467
+ 2025-03-09 20:08:45,302 - train - INFO - Epoch: 93 | Test Loss: 1.040 | Test Acc: 65.43%
468
+ 2025-03-09 20:08:45,476 - train - INFO - Epoch: 94 | Batch: 0 | Loss: 1.013 | Acc: 66.41%
469
+ 2025-03-09 20:08:47,512 - train - INFO - Epoch: 94 | Batch: 100 | Loss: 1.048 | Acc: 64.67%
470
+ 2025-03-09 20:08:49,355 - train - INFO - Epoch: 94 | Batch: 200 | Loss: 1.034 | Acc: 64.90%
471
+ 2025-03-09 20:08:51,320 - train - INFO - Epoch: 94 | Batch: 300 | Loss: 1.032 | Acc: 65.03%
472
+ 2025-03-09 20:08:54,352 - train - INFO - Epoch: 94 | Test Loss: 1.007 | Test Acc: 65.77%
473
+ 2025-03-09 20:08:54,532 - train - INFO - Epoch: 95 | Batch: 0 | Loss: 0.963 | Acc: 68.75%
474
+ 2025-03-09 20:08:56,486 - train - INFO - Epoch: 95 | Batch: 100 | Loss: 1.044 | Acc: 64.52%
475
+ 2025-03-09 20:08:58,522 - train - INFO - Epoch: 95 | Batch: 200 | Loss: 1.059 | Acc: 64.16%
476
+ 2025-03-09 20:09:00,641 - train - INFO - Epoch: 95 | Batch: 300 | Loss: 1.046 | Acc: 64.58%
477
+ 2025-03-09 20:09:03,741 - train - INFO - Epoch: 95 | Test Loss: 1.032 | Test Acc: 65.96%
478
+ 2025-03-09 20:09:03,950 - train - INFO - Epoch: 96 | Batch: 0 | Loss: 0.983 | Acc: 65.62%
479
+ 2025-03-09 20:09:05,855 - train - INFO - Epoch: 96 | Batch: 100 | Loss: 1.032 | Acc: 64.93%
480
+ 2025-03-09 20:09:07,774 - train - INFO - Epoch: 96 | Batch: 200 | Loss: 1.017 | Acc: 65.41%
481
+ 2025-03-09 20:09:09,825 - train - INFO - Epoch: 96 | Batch: 300 | Loss: 1.017 | Acc: 65.49%
482
+ 2025-03-09 20:09:12,898 - train - INFO - Epoch: 96 | Test Loss: 1.003 | Test Acc: 65.83%
483
+ 2025-03-09 20:09:21,539 - train - INFO - Epoch: 97 | Batch: 0 | Loss: 0.992 | Acc: 66.41%
484
+ 2025-03-09 20:09:23,513 - train - INFO - Epoch: 97 | Batch: 100 | Loss: 1.008 | Acc: 65.87%
485
+ 2025-03-09 20:09:25,738 - train - INFO - Epoch: 97 | Batch: 200 | Loss: 1.015 | Acc: 65.68%
486
+ 2025-03-09 20:09:27,822 - train - INFO - Epoch: 97 | Batch: 300 | Loss: 1.024 | Acc: 65.47%
487
+ 2025-03-09 20:09:31,131 - train - INFO - Epoch: 97 | Test Loss: 0.978 | Test Acc: 67.57%
488
+ 2025-03-09 20:09:31,294 - train - INFO - Epoch: 98 | Batch: 0 | Loss: 0.893 | Acc: 67.97%
489
+ 2025-03-09 20:09:33,256 - train - INFO - Epoch: 98 | Batch: 100 | Loss: 1.005 | Acc: 65.52%
490
+ 2025-03-09 20:09:35,255 - train - INFO - Epoch: 98 | Batch: 200 | Loss: 1.003 | Acc: 65.77%
491
+ 2025-03-09 20:09:37,222 - train - INFO - Epoch: 98 | Batch: 300 | Loss: 1.001 | Acc: 66.07%
492
+ 2025-03-09 20:09:40,328 - train - INFO - Epoch: 98 | Test Loss: 0.932 | Test Acc: 68.67%
493
+ 2025-03-09 20:09:40,520 - train - INFO - Epoch: 99 | Batch: 0 | Loss: 0.896 | Acc: 66.41%
494
+ 2025-03-09 20:09:42,505 - train - INFO - Epoch: 99 | Batch: 100 | Loss: 1.014 | Acc: 65.79%
495
+ 2025-03-09 20:09:44,453 - train - INFO - Epoch: 99 | Batch: 200 | Loss: 1.009 | Acc: 65.97%
496
+ 2025-03-09 20:09:46,594 - train - INFO - Epoch: 99 | Batch: 300 | Loss: 1.004 | Acc: 65.94%
497
+ 2025-03-09 20:09:49,671 - train - INFO - Epoch: 99 | Test Loss: 0.956 | Test Acc: 68.30%
498
+ 2025-03-09 20:09:49,866 - train - INFO - Epoch: 100 | Batch: 0 | Loss: 0.916 | Acc: 69.53%
499
+ 2025-03-09 20:09:52,093 - train - INFO - Epoch: 100 | Batch: 100 | Loss: 1.005 | Acc: 65.68%
500
+ 2025-03-09 20:09:54,201 - train - INFO - Epoch: 100 | Batch: 200 | Loss: 0.994 | Acc: 66.23%
501
+ 2025-03-09 20:09:56,269 - train - INFO - Epoch: 100 | Batch: 300 | Loss: 0.995 | Acc: 66.23%
502
+ 2025-03-09 20:09:59,443 - train - INFO - Epoch: 100 | Test Loss: 1.019 | Test Acc: 66.05%
503
+ 2025-03-09 20:10:08,141 - train - INFO - 训练完成!
Image/AlexNet/code/train.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
4
+ from utils.dataset_utils import get_cifar10_dataloaders
5
+ from utils.train_utils import train_model, train_model_data_augmentation, train_model_backdoor
6
+ from utils.parse_args import parse_args
7
+ from model import AlexNet
8
+ #args.train_type #0 for normal train, 1 for data aug train,2 for back door train
9
+
10
+ def main():
11
+ # 解析命令行参数
12
+ args = parse_args()
13
+ # 创建模型
14
+ model = AlexNet()
15
+ if args.train_type == '0':
16
+ # 获取数据加载器
17
+ trainloader, testloader = get_cifar10_dataloaders(batch_size=args.batch_size, local_dataset_path=args.dataset_path)
18
+ # 训练模型
19
+ train_model(
20
+ model=model,
21
+ trainloader=trainloader,
22
+ testloader=testloader,
23
+ epochs=args.epochs,
24
+ lr=args.lr,
25
+ device=f'cuda:{args.gpu}',
26
+ save_dir='../model',
27
+ model_name='alexnet',
28
+ layer_name='conv3.2'
29
+ )
30
+ elif args.train_type == '1':
31
+ train_model_data_augmentation(model, epochs=args.epochs, lr=args.lr, device=f'cuda:{args.gpu}',
32
+ save_dir='../model', model_name='alexnet',
33
+ batch_size=args.batch_size, num_workers=args.num_workers,
34
+ local_dataset_path=args.dataset_path)
35
+ elif args.train_type == '2':
36
+ train_model_backdoor(model, poison_ratio=0.1, target_label=0, epochs=args.epochs, lr=args.lr,
37
+ device=f'cuda:{args.gpu}', save_dir='../model', model_name='alexnet',
38
+ batch_size=args.batch_size, num_workers=args.num_workers,
39
+ local_dataset_path=args.dataset_path, layer_name='conv3.2')
40
+
41
+ if __name__ == '__main__':
42
+ main()
Image/AlexNet/dataset/.gitkeep ADDED
File without changes
Image/AlexNet/model/0/epoch1/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6821ced877c4142fb43869d0c012cc021a0b345481a956e674f1ca23cf562dd
3
+ size 102400128
Image/AlexNet/model/0/epoch1/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:627be8aeffac84936d668c55f53d81bde77a4adccb8cc7b9fb5298c24db2377c
3
+ size 504030
Image/AlexNet/model/0/epoch10/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa2fc49313edc663ca2d2f5f30ad503e4bd3bd327ee79ef750c672dc99e14117
3
+ size 102400128
Image/AlexNet/model/0/epoch10/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e7833a1f521ddeb7e339f405f5a9eabc1b72ae65e07cb04e7382b4157f3a524
3
+ size 504030
Image/AlexNet/model/0/epoch11/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c183185a4010aaa9b8d95d8b55c39d81d676d8cfe59bf7ab53d03f032d79a2e6
3
+ size 102400128
Image/AlexNet/model/0/epoch11/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d9ebc99ef61325c7fc2d918b93213dff60b007e78ff8c0e6b742a3780376445
3
+ size 504030
Image/AlexNet/model/0/epoch12/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7feafa977c4b00e19952c97a3bfda6be0cfda676452ceb76dd90e4c46a0ec8c4
3
+ size 102400128
Image/AlexNet/model/0/epoch12/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c4a1d959572699b4c3d11ad82ea2c38c93c9d4fca4378ecf517d84290581d61
3
+ size 504030
Image/AlexNet/model/0/epoch13/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d168848025d937363ef62aee6e34687866f23066aaa833ae1ae5def57fa167ab
3
+ size 102400128
Image/AlexNet/model/0/epoch13/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:344989713106a9417acf7843ac665278f00ee668a27f6a86583304dc662eec55
3
+ size 504030
Image/AlexNet/model/0/epoch14/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1313f0b51793784f0f9dac212866d89e4d5100815bdfac64e478cd8294db245a
3
+ size 102400128
Image/AlexNet/model/0/epoch14/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b602e19274664678c893a86a4ab009351ef30e1bdcc7b849e92c230ce1d3fbe
3
+ size 504030
Image/AlexNet/model/0/epoch15/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bae98bee14fde7e418d9170696bf1d58d7f6dc660029aa3ca398ebe36fe3a2b
3
+ size 102400128
Image/AlexNet/model/0/epoch15/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ac906242fc7fcf3d4caad304ceb0e513aaf9d07d9aa81b295e2e608e3770d92
3
+ size 504030
Image/AlexNet/model/0/epoch16/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18d92f53bf3de4f7990a36788dac7f2fe278e2a2fe910892c02927730bb4d157
3
+ size 102400128
Image/AlexNet/model/0/epoch16/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71e3dde3dd78fdefd0103332ccfed0f1f6caaef52591f400211db0c2a5f75159
3
+ size 504030
Image/AlexNet/model/0/epoch17/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63153e8eaa2813364e8274eed93c798b53fd83d648b3a3cb02d614da4249c7a9
3
+ size 102400128
Image/AlexNet/model/0/epoch17/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4763ff84b8d47ec9168335351c43235bb16db45eed50b88e4cfb20f7830ba15a
3
+ size 504030
Image/AlexNet/model/0/epoch18/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ab17898773f2697e0d27efb3e001cdb36b16d1e50c4120254a7516484fdb852
3
+ size 102400128
Image/AlexNet/model/0/epoch18/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:890aa5282eceb12688bb2962d3efcb183f0ea9133763edfa4538c795f70a4f35
3
+ size 504030
Image/AlexNet/model/0/epoch19/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e216289ef89446d6036cb2c43fbdb6c422fb3a4494a266f37225e68ca9a212e
3
+ size 102400128
Image/AlexNet/model/0/epoch19/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4654d5eb2f0c48ccfc9401b39df7ccb1b05b71ee2fff4b82324b4fa36c06295a
3
+ size 504030
Image/AlexNet/model/0/epoch2/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdd4967f08295d1a6cb6f754f2bde1d184ff98a5ee53d0927d516022c68b6e74
3
+ size 102400128
Image/AlexNet/model/0/epoch2/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4eda7982c92730cd281f998564592591d9ecbdaec5872cad7e48d1016a699cdf
3
+ size 504030
Image/AlexNet/model/0/epoch20/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e24f5e27e0af90a5e05d219ac1e8b4145ff09891b9a96c4cdc7db888cb301da5
3
+ size 102400128
Image/AlexNet/model/0/epoch20/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa5a282151c770473b2d2fba2d2d93bf7b0df419645752967f2847a66d69ba20
3
+ size 504030
Image/AlexNet/model/0/epoch21/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0cca779bd795f02b5dcfadbc0d94f242ba3b71a9595a9ecddd25dfe382809a8
3
+ size 102400128
Image/AlexNet/model/0/epoch21/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f75c8cd907e636860b42eb9e48c255275b35ffea8d796588e98933fbc63407e6
3
+ size 504030
Image/AlexNet/model/0/epoch22/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:102862fc18fb57c1b80f5567edf3184b57f47941166871cf79224916ace6bfa9
3
+ size 102400128
Image/AlexNet/model/0/epoch22/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83373d9958fa6dfea29a706fc915adff916d27ea4df4a43374a9d1b65ef10631
3
+ size 504030
Image/AlexNet/model/0/epoch23/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f727d422d39de56d9ae9a3c702a1de543e51f2cfaa8f1b1e8c14e114f96380a8
3
+ size 102400128
Image/AlexNet/model/0/epoch23/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2783b7549f6773cf6795b6b5289aab01b6bc3435a5abf76058903609a26e1500
3
+ size 504030
Image/AlexNet/model/0/epoch24/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:616994b0ca2995e7414d9f115dbf59d90beed0094dd7300e54c7ea55710ec4e3
3
+ size 102400128
Image/AlexNet/model/0/epoch24/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9901c77a9a23683436f90a0445550597ba5d4dcb3614b90ddf13ef3d7ba626a4
3
+ size 504030
Image/AlexNet/model/0/epoch25/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58f45051955a9872fd6be62caf1459b0c0c1f78516a925037c57b163257aa73c
3
+ size 102400128
Image/AlexNet/model/0/epoch25/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1490c924fed9da15f400a73abb5a4592c807277997de473b9075b4f9c2cc178e
3
+ size 504030
Image/AlexNet/model/0/epoch3/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e64bef991238315064614a7a4e9c0654b361466b76db179d51bd6983c91f21d
3
+ size 102400128
Image/AlexNet/model/0/epoch3/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e071afd5db9bf4992a8456350cc14f7448f72e9c8bd2633af60f4be2c46544e4
3
+ size 504030
Image/AlexNet/model/0/epoch4/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3bed11a3e5243cb7885899e78d3516d2b1be888964ec55dc9941e2c6536c275
3
+ size 102400128
Image/AlexNet/model/0/epoch4/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1601ea4d88a2e8c8b91d41260caaf1e0aa5ff2f3c7eec70c78834cbb18e5b09d
3
+ size 504030
Image/AlexNet/model/0/epoch5/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:113a78b111dcfed47bc133ab4c6a8938edb9d2c4602a46503f51ce1a5bb2bc4c
3
+ size 102400128
Image/AlexNet/model/0/epoch5/subject_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb5adc11bb4ba0b31461c0aadb7360d06dc1aec22e76d7edcc34058d4d3e8ed6
3
+ size 504030
Image/AlexNet/model/0/epoch6/embeddings.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf1b170704da827cdb96b00a47738201319d6396ce5077fd7458b922dc72a624
3
+ size 102400128