NORLIE JHON MALAGDAO commited on
Commit
5e42be8
·
verified ·
1 Parent(s): 20b17af

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -80
app.py CHANGED
@@ -8,15 +8,11 @@ import tensorflow as tf
8
  from tensorflow import keras
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
11
-
12
-
13
  from PIL import Image
14
  import gdown
15
  import zipfile
16
-
17
  import pathlib
18
 
19
-
20
  # Define the Google Drive shareable link
21
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
22
 
@@ -56,28 +52,18 @@ for root, dirs, files in os.walk(extracted_path):
56
  for f in files:
57
  print(f"{subindent}{f}")
58
 
59
-
60
  # Path to the dataset directory
61
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
62
  data_dir = pathlib.Path(data_dir)
63
 
64
-
65
  bees = list(data_dir.glob('bees/*'))
66
  print(bees[0])
67
  PIL.Image.open(str(bees[0]))
68
 
69
-
70
- bees = list(data_dir.glob('bees/*'))
71
- print(bees[0])
72
- PIL.Image.open(str(bees[0]))
73
-
74
-
75
-
76
  batch_size = 32
77
  img_height = 180
78
  img_width = 180
79
 
80
-
81
  train_ds = tf.keras.utils.image_dataset_from_directory(
82
  data_dir,
83
  validation_split=0.2,
@@ -86,7 +72,6 @@ train_ds = tf.keras.utils.image_dataset_from_directory(
86
  image_size=(img_height, img_width),
87
  batch_size=batch_size)
88
 
89
-
90
  val_ds = tf.keras.utils.image_dataset_from_directory(
91
  data_dir,
92
  validation_split=0.2,
@@ -95,58 +80,71 @@ val_ds = tf.keras.utils.image_dataset_from_directory(
95
  image_size=(img_height, img_width),
96
  batch_size=batch_size)
97
 
98
-
99
  class_names = train_ds.class_names
100
  print(class_names)
101
 
102
-
103
- import matplotlib.pyplot as plt
104
-
105
- plt.figure(figsize=(10, 10))
106
- for images, labels in train_ds.take(1):
107
- for i in range(9):
108
- ax = plt.subplot(3, 3, i + 1)
109
- plt.imshow(images[i].numpy().astype("uint8"))
110
- plt.title(class_names[labels[i]])
111
- plt.axis("off")
112
-
113
-
114
-
115
- for image_batch, labels_batch in train_ds:
116
- print(image_batch.shape)
117
- print(labels_batch.shape)
118
- break
119
-
120
-
121
  AUTOTUNE = tf.data.AUTOTUNE
122
 
123
  train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
124
  val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
125
 
126
-
127
  normalization_layer = layers.Rescaling(1./255)
128
 
129
-
130
-
131
-
132
-
133
-
134
  normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
135
  image_batch, labels_batch = next(iter(normalized_ds))
136
  first_image = image_batch[0]
137
  # Notice the pixel values are now in `[0,1]`.
138
  print(np.min(first_image), np.max(first_image))
139
 
 
140
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
 
 
 
142
 
 
143
 
 
 
 
 
 
 
144
 
 
 
145
 
146
- num_classes = len(class_names)
 
147
 
 
148
 
 
 
 
 
 
 
149
 
 
 
 
 
 
 
150
 
151
  data_augmentation = keras.Sequential(
152
  [
@@ -159,8 +157,6 @@ data_augmentation = keras.Sequential(
159
  ]
160
  )
161
 
162
-
163
-
164
  plt.figure(figsize=(10, 10))
165
  for images, _ in train_ds.take(1):
166
  for i in range(9):
@@ -169,9 +165,6 @@ for images, _ in train_ds.take(1):
169
  plt.imshow(augmented_images[0].numpy().astype("uint8"))
170
  plt.axis("off")
171
 
172
-
173
-
174
-
175
  model = Sequential([
176
  data_augmentation,
177
  layers.Rescaling(1./255),
@@ -187,18 +180,12 @@ model = Sequential([
187
  layers.Dense(num_classes, name="outputs")
188
  ])
189
 
190
-
191
-
192
-
193
  model.compile(optimizer='adam',
194
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
195
  metrics=['accuracy'])
196
 
197
-
198
  model.summary()
199
 
200
-
201
-
202
  epochs = 15
203
  history = model.fit(
204
  train_ds,
@@ -206,41 +193,17 @@ history = model.fit(
206
  epochs=epochs
207
  )
208
 
209
-
210
-
211
  def predict_image(img):
212
  img = np.array(img)
213
  img_resized = tf.image.resize(img, (180, 180))
214
  img_4d = tf.expand_dims(img_resized, axis=0)
215
  prediction = model.predict(img_4d)[0]
216
- return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
 
 
217
 
218
  image = gr.Image()
219
  label = gr.Label(num_top_classes=1)
220
 
221
  # Define custom CSS for background image
222
  custom_css = """
223
- body {
224
- background-image: url('extracted_files/Pest_Dataset/bees/bees (444).jpg');
225
- background-size: cover;
226
- background-repeat: no-repeat;
227
- background-attachment: fixed;
228
- color: white;
229
- }
230
- """
231
-
232
- gr.Interface(
233
- fn=predict_image,
234
- inputs=image,
235
- outputs=label,
236
- title="Welcome to Agricultural Pest Image Classification",
237
- description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
238
- css=custom_css
239
- ).launch(debug=True)
240
-
241
-
242
-
243
-
244
-
245
-
246
-
 
8
  from tensorflow import keras
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
 
 
11
  from PIL import Image
12
  import gdown
13
  import zipfile
 
14
  import pathlib
15
 
 
16
  # Define the Google Drive shareable link
17
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
18
 
 
52
  for f in files:
53
  print(f"{subindent}{f}")
54
 
 
55
  # Path to the dataset directory
56
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
57
  data_dir = pathlib.Path(data_dir)
58
 
 
59
  bees = list(data_dir.glob('bees/*'))
60
  print(bees[0])
61
  PIL.Image.open(str(bees[0]))
62
 
 
 
 
 
 
 
 
63
  batch_size = 32
64
  img_height = 180
65
  img_width = 180
66
 
 
67
  train_ds = tf.keras.utils.image_dataset_from_directory(
68
  data_dir,
69
  validation_split=0.2,
 
72
  image_size=(img_height, img_width),
73
  batch_size=batch_size)
74
 
 
75
  val_ds = tf.keras.utils.image_dataset_from_directory(
76
  data_dir,
77
  validation_split=0.2,
 
80
  image_size=(img_height, img_width),
81
  batch_size=batch_size)
82
 
 
83
  class_names = train_ds.class_names
84
  print(class_names)
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  AUTOTUNE = tf.data.AUTOTUNE
87
 
88
  train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
89
  val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
90
 
 
91
  normalization_layer = layers.Rescaling(1./255)
92
 
 
 
 
 
 
93
  normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
94
  image_batch, labels_batch = next(iter(normalized_ds))
95
  first_image = image_batch[0]
96
  # Notice the pixel values are now in `[0,1]`.
97
  print(np.min(first_image), np.max(first_image))
98
 
99
+ num_classes = len(class_names)
100
 
101
+ model = Sequential([
102
+ layers.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
103
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
104
+ layers.MaxPooling2D(),
105
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
106
+ layers.MaxPooling2D(),
107
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
108
+ layers.MaxPooling2D(),
109
+ layers.Flatten(),
110
+ layers.Dense(128, activation='relu'),
111
+ layers.Dense(num_classes)
112
+ ])
113
 
114
+ model.compile(optimizer='adam',
115
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
116
+ metrics=['accuracy'])
117
 
118
+ model.summary()
119
 
120
+ epochs=10
121
+ history = model.fit(
122
+ train_ds,
123
+ validation_data=val_ds,
124
+ epochs=epochs
125
+ )
126
 
127
+ acc = history.history['accuracy']
128
+ val_acc = history.history['val_accuracy']
129
 
130
+ loss = history.history['loss']
131
+ val_loss = history.history['val_loss']
132
 
133
+ epochs_range = range(epochs)
134
 
135
+ plt.figure(figsize=(8, 8))
136
+ plt.subplot(1, 2, 1)
137
+ plt.plot(epochs_range, acc, label='Training Accuracy')
138
+ plt.plot(epochs_range, val_acc, label='Validation Accuracy')
139
+ plt.legend(loc='lower right')
140
+ plt.title('Training and Validation Accuracy')
141
 
142
+ plt.subplot(1, 2, 2)
143
+ plt.plot(epochs_range, loss, label='Training Loss')
144
+ plt.plot(epochs_range, val_loss, label='Validation Loss')
145
+ plt.legend(loc='upper right')
146
+ plt.title('Training and Validation Loss')
147
+ plt.show()
148
 
149
  data_augmentation = keras.Sequential(
150
  [
 
157
  ]
158
  )
159
 
 
 
160
  plt.figure(figsize=(10, 10))
161
  for images, _ in train_ds.take(1):
162
  for i in range(9):
 
165
  plt.imshow(augmented_images[0].numpy().astype("uint8"))
166
  plt.axis("off")
167
 
 
 
 
168
  model = Sequential([
169
  data_augmentation,
170
  layers.Rescaling(1./255),
 
180
  layers.Dense(num_classes, name="outputs")
181
  ])
182
 
 
 
 
183
  model.compile(optimizer='adam',
184
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
185
  metrics=['accuracy'])
186
 
 
187
  model.summary()
188
 
 
 
189
  epochs = 15
190
  history = model.fit(
191
  train_ds,
 
193
  epochs=epochs
194
  )
195
 
 
 
196
  def predict_image(img):
197
  img = np.array(img)
198
  img_resized = tf.image.resize(img, (180, 180))
199
  img_4d = tf.expand_dims(img_resized, axis=0)
200
  prediction = model.predict(img_4d)[0]
201
+ probabilities = tf.nn.softmax(prediction).numpy()
202
+ class_probabilities = {class_names[i]: probabilities[i] * 100 for i in range(len(class_names))}
203
+ return class_probabilities
204
 
205
  image = gr.Image()
206
  label = gr.Label(num_top_classes=1)
207
 
208
  # Define custom CSS for background image
209
  custom_css = """