NORLIE JHON MALAGDAO commited on
Commit
41cbe95
·
verified ·
1 Parent(s): 79cd26d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -60
app.py CHANGED
@@ -3,10 +3,12 @@ import zipfile
3
  import gdown
4
  import pathlib
5
  import tensorflow as tf
6
- from tensorflow import keras
 
7
  from tensorflow.keras import layers
8
  from tensorflow.keras.models import Sequential
9
- import matplotlib.pyplot as plt
 
10
  import gradio as gr
11
  import numpy as np
12
 
@@ -40,21 +42,14 @@ os.remove(local_zip_file)
40
  # Convert the extracted directory path to a pathlib.Path object
41
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
42
 
43
- # Verify the directory structure
44
- for root, dirs, files in os.walk(extracted_path):
45
- level = root.replace(extracted_path, '').count(os.sep)
46
- indent = ' ' * 4 * (level)
47
- print(f"{indent}{os.path.basename(root)}/")
48
- subindent = ' ' * 4 * (level + 1)
49
- for f in files:
50
- print(f"{subindent}{f}")
51
 
52
  # Set image dimensions and batch size
53
  img_height, img_width = 180, 180
54
  batch_size = 32
55
 
56
  # Create training and validation datasets
57
- train_ds = tf.keras.preprocessing.image_dataset_from_directory(
58
  data_dir,
59
  validation_split=0.2,
60
  subset="training",
@@ -63,7 +58,7 @@ train_ds = tf.keras.preprocessing.image_dataset_from_directory(
63
  batch_size=batch_size
64
  )
65
 
66
- val_ds = tf.keras.preprocessing.image_dataset_from_directory(
67
  data_dir,
68
  validation_split=0.2,
69
  subset="validation",
@@ -75,17 +70,12 @@ val_ds = tf.keras.preprocessing.image_dataset_from_directory(
75
  class_names = train_ds.class_names
76
  print(class_names)
77
 
78
- # Display some sample images
79
- plt.figure(figsize=(10, 10))
80
- for images, labels in train_ds.take(1):
81
- for i in range(9):
82
- ax = plt.subplot(3, 3, i + 1)
83
- plt.imshow(images[i].numpy().astype("uint8"))
84
- plt.title(class_names[labels[i]])
85
- plt.axis("off")
86
-
87
- # Enhanced data augmentation
88
- data_augmentation = keras.Sequential(
89
  [
90
  layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
91
  layers.RandomRotation(0.2),
@@ -95,59 +85,52 @@ data_augmentation = keras.Sequential(
95
  ]
96
  )
97
 
98
- # Display augmented images
99
- plt.figure(figsize=(10, 10))
100
- for images, _ in train_ds.take(1):
101
- for i in range(9):
102
- augmented_images = data_augmentation(images)
103
- ax = plt.subplot(3, 3, i + 1)
104
- plt.imshow(augmented_images[0].numpy().astype("uint8"))
105
- plt.axis("off")
106
 
107
- # Define a deeper CNN model with more regularization techniques
 
108
  num_classes = len(class_names)
109
  model = Sequential()
110
 
111
  model.add(data_augmentation)
112
- model.add(layers.Rescaling(1./255))
113
 
114
- model.add(layers.Conv2D(32, 3, padding='same', activation='relu'))
115
- model.add(layers.BatchNormalization())
116
- model.add(layers.MaxPooling2D())
117
 
118
- model.add(layers.Conv2D(64, 3, padding='same', activation='relu'))
119
- model.add(layers.BatchNormalization())
120
- model.add(layers.MaxPooling2D())
121
 
122
- model.add(layers.Conv2D(128, 3, padding='same', activation='relu'))
123
- model.add(layers.BatchNormalization())
124
- model.add(layers.MaxPooling2D())
125
 
126
- model.add(layers.Conv2D(256, 3, padding='same', activation='relu'))
127
- model.add(layers.BatchNormalization())
128
- model.add(layers.MaxPooling2D())
129
 
130
- model.add(layers.Conv2D(512, 3, padding='same', activation='relu'))
131
- model.add(layers.BatchNormalization())
132
- model.add(layers.MaxPooling2D())
133
 
134
- model.add(layers.Dropout(0.5))
135
- model.add(layers.Flatten())
136
 
137
- model.add(layers.Dense(256, activation='relu'))
138
- model.add(layers.Dropout(0.5))
139
 
140
- model.add(layers.Dense(num_classes, activation='softmax', name="outputs"))
141
 
142
- model.compile(optimizer=keras.optimizers.Adam(learning_rate=1e-4),
143
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
144
  metrics=['accuracy'])
145
 
146
  model.summary()
147
 
148
- # Implement early stopping
149
- from tensorflow.keras.callbacks import EarlyStopping
150
 
 
 
151
  early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
152
 
153
  # Learning rate scheduler
@@ -157,7 +140,7 @@ def scheduler(epoch, lr):
157
  else:
158
  return lr * tf.math.exp(-0.1)
159
 
160
- lr_scheduler = keras.callbacks.LearningRateScheduler(scheduler)
161
 
162
  # Train the model
163
  epochs = 30
@@ -168,17 +151,17 @@ history = model.fit(
168
  callbacks=[early_stopping, lr_scheduler]
169
  )
170
 
171
- # Define the prediction function
 
172
  def predict_image(img):
173
  img = np.array(img)
174
- img_resized = tf.image.resize(img, (180, 180))
175
  img_4d = tf.expand_dims(img_resized, axis=0)
176
  prediction = model.predict(img_4d)[0]
177
  predicted_class = np.argmax(prediction)
178
  predicted_label = class_names[predicted_class]
179
  return {predicted_label: f"{float(prediction[predicted_class]):.2f}"}
180
 
181
- # Set up Gradio interface
182
  image = gr.Image()
183
  label = gr.Label(num_top_classes=1)
184
 
@@ -201,3 +184,5 @@ gr.Interface(
201
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
202
  css=custom_css
203
  ).launch(debug=True)
 
 
 
3
  import gdown
4
  import pathlib
5
  import tensorflow as tf
6
+ from tensorflow.keras.preprocessing import image_dataset_from_directory
7
+
8
  from tensorflow.keras import layers
9
  from tensorflow.keras.models import Sequential
10
+ from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dropout, Flatten, Dense, BatchNormalization, Rescaling
11
+ from tensorflow.keras.callbacks import EarlyStopping, LearningRateScheduler
12
  import gradio as gr
13
  import numpy as np
14
 
 
42
  # Convert the extracted directory path to a pathlib.Path object
43
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
44
 
45
+
 
 
 
 
 
 
 
46
 
47
  # Set image dimensions and batch size
48
  img_height, img_width = 180, 180
49
  batch_size = 32
50
 
51
  # Create training and validation datasets
52
+ train_ds = image_dataset_from_directory(
53
  data_dir,
54
  validation_split=0.2,
55
  subset="training",
 
58
  batch_size=batch_size
59
  )
60
 
61
+ val_ds = image_dataset_from_directory(
62
  data_dir,
63
  validation_split=0.2,
64
  subset="validation",
 
70
  class_names = train_ds.class_names
71
  print(class_names)
72
 
73
+
74
+
75
+
76
+
77
+
78
+ data_augmentation = tf.keras.Sequential(
 
 
 
 
 
79
  [
80
  layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
81
  layers.RandomRotation(0.2),
 
85
  ]
86
  )
87
 
 
 
 
 
 
 
 
 
88
 
89
+
90
+
91
  num_classes = len(class_names)
92
  model = Sequential()
93
 
94
  model.add(data_augmentation)
95
+ model.add(Rescaling(1./255))
96
 
97
+ model.add(Conv2D(32, 3, padding='same', activation='relu'))
98
+ model.add(BatchNormalization())
99
+ model.add(MaxPooling2D())
100
 
101
+ model.add(Conv2D(64, 3, padding='same', activation='relu'))
102
+ model.add(BatchNormalization())
103
+ model.add(MaxPooling2D())
104
 
105
+ model.add(Conv2D(128, 3, padding='same', activation='relu'))
106
+ model.add(BatchNormalization())
107
+ model.add(MaxPooling2D())
108
 
109
+ model.add(Conv2D(256, 3, padding='same', activation='relu'))
110
+ model.add(BatchNormalization())
111
+ model.add(MaxPooling2D())
112
 
113
+ model.add(Conv2D(512, 3, padding='same', activation='relu'))
114
+ model.add(BatchNormalization())
115
+ model.add(MaxPooling2D())
116
 
117
+ model.add(Dropout(0.5))
118
+ model.add(Flatten())
119
 
120
+ model.add(Dense(256, activation='relu'))
121
+ model.add(Dropout(0.5))
122
 
123
+ model.add(Dense(num_classes, activation='softmax', name="outputs"))
124
 
125
+ model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-4),
126
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
127
  metrics=['accuracy'])
128
 
129
  model.summary()
130
 
 
 
131
 
132
+
133
+ # Implement early stopping
134
  early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
135
 
136
  # Learning rate scheduler
 
140
  else:
141
  return lr * tf.math.exp(-0.1)
142
 
143
+ lr_scheduler = LearningRateScheduler(scheduler)
144
 
145
  # Train the model
146
  epochs = 30
 
151
  callbacks=[early_stopping, lr_scheduler]
152
  )
153
 
154
+
155
+
156
  def predict_image(img):
157
  img = np.array(img)
158
+ img_resized = tf.image.resize(img, (img_height, img_width))
159
  img_4d = tf.expand_dims(img_resized, axis=0)
160
  prediction = model.predict(img_4d)[0]
161
  predicted_class = np.argmax(prediction)
162
  predicted_label = class_names[predicted_class]
163
  return {predicted_label: f"{float(prediction[predicted_class]):.2f}"}
164
 
 
165
  image = gr.Image()
166
  label = gr.Label(num_top_classes=1)
167
 
 
184
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
185
  css=custom_css
186
  ).launch(debug=True)
187
+
188
+