NORLIE JHON MALAGDAO commited on
Commit
6c13748
·
verified ·
1 Parent(s): 83e4be4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -85
app.py CHANGED
@@ -8,18 +8,11 @@ import tensorflow as tf
8
  from tensorflow import keras
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
11
-
12
-
13
  from PIL import Image
14
  import gdown
15
  import zipfile
16
-
17
  import pathlib
18
 
19
-
20
-
21
-
22
-
23
  # Define the Google Drive shareable link
24
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
25
 
@@ -59,92 +52,55 @@ for root, dirs, files in os.walk(extracted_path):
59
  for f in files:
60
  print(f"{subindent}{f}")
61
 
62
- import pathlib
63
  # Path to the dataset directory
64
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
65
- data_dir = pathlib.Path(data_dir)
66
-
67
-
68
- bees = list(data_dir.glob('bees/*'))
69
- print(bees[0])
70
- PIL.Image.open(str(bees[0]))
71
-
72
 
73
- bees = list(data_dir.glob('bees/*'))
74
- print(bees[0])
75
- PIL.Image.open(str(bees[0]))
76
 
77
-
78
- img_height,img_width=180,180
79
- batch_size=32
80
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
81
- data_dir,
82
- validation_split=0.2,
83
- subset="training",
84
- seed=123,
85
- image_size=(img_height, img_width),
86
- batch_size=batch_size)
87
-
88
 
89
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
90
- data_dir,
91
- validation_split=0.2,
92
- subset="validation",
93
- seed=123,
94
- image_size=(img_height, img_width),
95
- batch_size=batch_size)
96
-
97
-
98
- class_names = train_ds.class_names
99
- print(class_names)
100
-
101
-
102
- import matplotlib.pyplot as plt
103
-
104
- plt.figure(figsize=(10, 10))
105
- for images, labels in train_ds.take(1):
106
- for i in range(9):
107
- ax = plt.subplot(3, 3, i + 1)
108
- plt.imshow(images[i].numpy().astype("uint8"))
109
- plt.title(class_names[labels[i]])
110
- plt.axis("off")
111
-
112
 
 
113
  data_augmentation = keras.Sequential(
114
- [
115
- layers.RandomFlip("horizontal",
116
- input_shape=(img_height,
117
- img_width,
118
- 3)),
119
- layers.RandomRotation(0.1),
120
- layers.RandomZoom(0.1),
121
- ]
122
  )
123
 
124
-
125
- plt.figure(figsize=(10, 10))
126
- for images, _ in train_ds.take(1):
127
- for i in range(9):
128
- augmented_images = data_augmentation(images)
129
- ax = plt.subplot(3, 3, i + 1)
130
- plt.imshow(augmented_images[0].numpy().astype("uint8"))
131
- plt.axis("off")
132
-
133
-
134
- num_classes = len(class_names)
135
  model = Sequential([
136
- data_augmentation,
137
- layers.Rescaling(1./255),
138
- layers.Conv2D(16, 3, padding='same', activation='relu'),
139
- layers.MaxPooling2D(),
140
- layers.Conv2D(32, 3, padding='same', activation='relu'),
141
- layers.MaxPooling2D(),
142
- layers.Conv2D(64, 3, padding='same', activation='relu'),
143
- layers.MaxPooling2D(),
144
- layers.Dropout(0.2),
145
- layers.Flatten(),
146
- layers.Dense(128, activation='relu'),
147
- layers.Dense(num_classes, activation='softmax', name="outputs") # Use softmax here
148
  ])
149
 
150
  model.compile(optimizer='adam',
@@ -153,14 +109,30 @@ model.compile(optimizer='adam',
153
 
154
  model.summary()
155
 
156
-
157
  epochs = 15
158
  history = model.fit(
159
- train_ds,
160
- validation_data=val_ds,
161
- epochs=epochs
162
  )
163
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
 
165
 
166
  # Define category descriptions
 
8
  from tensorflow import keras
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
 
 
11
  from PIL import Image
12
  import gdown
13
  import zipfile
 
14
  import pathlib
15
 
 
 
 
 
16
  # Define the Google Drive shareable link
17
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
18
 
 
52
  for f in files:
53
  print(f"{subindent}{f}")
54
 
 
55
  # Path to the dataset directory
56
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
 
 
 
 
 
 
 
57
 
58
+ img_height, img_width = 180, 180
59
+ batch_size = 32
 
60
 
61
+ # Load training and validation datasets
 
 
62
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
63
+ data_dir,
64
+ validation_split=0.2,
65
+ subset="training",
66
+ seed=123,
67
+ image_size=(img_height, img_width),
68
+ batch_size=batch_size
69
+ )
70
 
71
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
72
+ data_dir,
73
+ validation_split=0.2,
74
+ subset="validation",
75
+ seed=123,
76
+ image_size=(img_height, img_width),
77
+ batch_size=batch_size
78
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
+ # Define data augmentation layers
81
  data_augmentation = keras.Sequential(
82
+ [
83
+ layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
84
+ layers.RandomRotation(0.1),
85
+ layers.RandomZoom(0.1),
86
+ ]
 
 
 
87
  )
88
 
89
+ # Define the model
90
+ num_classes = len(train_ds.class_names)
 
 
 
 
 
 
 
 
 
91
  model = Sequential([
92
+ data_augmentation,
93
+ layers.Rescaling(1./255),
94
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
95
+ layers.MaxPooling2D(),
96
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
97
+ layers.MaxPooling2D(),
98
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
99
+ layers.MaxPooling2D(),
100
+ layers.Dropout(0.2),
101
+ layers.Flatten(),
102
+ layers.Dense(128, activation='relu'),
103
+ layers.Dense(num_classes, activation='softmax', name="outputs") # Use softmax here
104
  ])
105
 
106
  model.compile(optimizer='adam',
 
109
 
110
  model.summary()
111
 
112
+ # Train the model
113
  epochs = 15
114
  history = model.fit(
115
+ train_ds,
116
+ validation_data=val_ds,
117
+ epochs=epochs
118
  )
119
 
120
+ # Plot training history
121
+ plt.plot(history.history['accuracy'], label='accuracy')
122
+ plt.plot(history.history['val_accuracy'], label='val_accuracy')
123
+ plt.xlabel('Epoch')
124
+ plt.ylabel('Accuracy')
125
+ plt.legend()
126
+ plt.show()
127
+
128
+ plt.plot(history.history['loss'], label='loss')
129
+ plt.plot(history.history['val_loss'], label='val_loss')
130
+ plt.xlabel('Epoch')
131
+ plt.ylabel('Loss')
132
+ plt.legend()
133
+ plt.show()
134
+
135
+
136
 
137
 
138
  # Define category descriptions