NORLIE JHON MALAGDAO commited on
Commit
f8c68bd
·
verified ·
1 Parent(s): 41cbe95

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -96
app.py CHANGED
@@ -1,16 +1,20 @@
 
 
 
1
  import os
2
- import zipfile
3
- import gdown
4
- import pathlib
5
  import tensorflow as tf
6
- from tensorflow.keras.preprocessing import image_dataset_from_directory
7
 
 
8
  from tensorflow.keras import layers
9
  from tensorflow.keras.models import Sequential
10
- from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dropout, Flatten, Dense, BatchNormalization, Rescaling
11
- from tensorflow.keras.callbacks import EarlyStopping, LearningRateScheduler
12
- import gradio as gr
13
- import numpy as np
 
 
 
14
 
15
  # Define the Google Drive shareable link
16
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
@@ -40,127 +44,127 @@ except zipfile.BadZipFile:
40
  os.remove(local_zip_file)
41
 
42
  # Convert the extracted directory path to a pathlib.Path object
43
- data_dir = pathlib.Path('extracted_files/Pest_Dataset')
44
-
45
-
46
-
47
- # Set image dimensions and batch size
48
- img_height, img_width = 180, 180
49
- batch_size = 32
50
-
51
- # Create training and validation datasets
52
- train_ds = image_dataset_from_directory(
53
- data_dir,
54
- validation_split=0.2,
55
- subset="training",
56
- seed=123,
57
- image_size=(img_height, img_width),
58
- batch_size=batch_size
59
- )
60
-
61
- val_ds = image_dataset_from_directory(
62
- data_dir,
63
- validation_split=0.2,
64
- subset="validation",
65
- seed=123,
66
- image_size=(img_height, img_width),
67
- batch_size=batch_size
68
- )
69
 
70
- class_names = train_ds.class_names
71
- print(class_names)
 
 
 
 
 
 
72
 
 
 
 
 
73
 
74
 
 
 
 
75
 
76
 
 
 
 
77
 
78
- data_augmentation = tf.keras.Sequential(
79
- [
80
- layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
81
- layers.RandomRotation(0.2),
82
- layers.RandomZoom(0.2),
83
- layers.RandomContrast(0.2),
84
- layers.RandomBrightness(0.2),
85
- ]
86
- )
87
 
 
 
 
 
 
 
 
 
 
88
 
89
 
 
 
 
 
 
 
 
90
 
91
- num_classes = len(class_names)
92
- model = Sequential()
93
 
94
- model.add(data_augmentation)
95
- model.add(Rescaling(1./255))
96
 
97
- model.add(Conv2D(32, 3, padding='same', activation='relu'))
98
- model.add(BatchNormalization())
99
- model.add(MaxPooling2D())
100
 
101
- model.add(Conv2D(64, 3, padding='same', activation='relu'))
102
- model.add(BatchNormalization())
103
- model.add(MaxPooling2D())
104
 
105
- model.add(Conv2D(128, 3, padding='same', activation='relu'))
106
- model.add(BatchNormalization())
107
- model.add(MaxPooling2D())
 
 
 
 
108
 
109
- model.add(Conv2D(256, 3, padding='same', activation='relu'))
110
- model.add(BatchNormalization())
111
- model.add(MaxPooling2D())
112
 
113
- model.add(Conv2D(512, 3, padding='same', activation='relu'))
114
- model.add(BatchNormalization())
115
- model.add(MaxPooling2D())
 
 
 
 
116
 
117
- model.add(Dropout(0.5))
118
- model.add(Flatten())
119
 
120
- model.add(Dense(256, activation='relu'))
121
- model.add(Dropout(0.5))
 
 
 
 
 
122
 
123
- model.add(Dense(num_classes, activation='softmax', name="outputs"))
124
 
125
- model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-4),
126
- loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  metrics=['accuracy'])
128
 
129
  model.summary()
130
 
131
 
132
-
133
- # Implement early stopping
134
- early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
135
-
136
- # Learning rate scheduler
137
- def scheduler(epoch, lr):
138
- if epoch < 10:
139
- return lr
140
- else:
141
- return lr * tf.math.exp(-0.1)
142
-
143
- lr_scheduler = LearningRateScheduler(scheduler)
144
-
145
- # Train the model
146
- epochs = 30
147
  history = model.fit(
148
- train_ds,
149
- validation_data=val_ds,
150
- epochs=epochs,
151
- callbacks=[early_stopping, lr_scheduler]
152
  )
153
 
154
 
 
 
 
155
 
156
  def predict_image(img):
157
  img = np.array(img)
158
- img_resized = tf.image.resize(img, (img_height, img_width))
159
  img_4d = tf.expand_dims(img_resized, axis=0)
160
  prediction = model.predict(img_4d)[0]
161
- predicted_class = np.argmax(prediction)
162
- predicted_label = class_names[predicted_class]
163
- return {predicted_label: f"{float(prediction[predicted_class]):.2f}"}
164
 
165
  image = gr.Image()
166
  label = gr.Label(num_top_classes=1)
@@ -184,5 +188,3 @@ gr.Interface(
184
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
185
  css=custom_css
186
  ).launch(debug=True)
187
-
188
-
 
1
+ import gradio as gr
2
+ import matplotlib.pyplot as plt
3
+ import numpy as np
4
  import os
5
+ import PIL
 
 
6
  import tensorflow as tf
 
7
 
8
+ from tensorflow import keras
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
11
+
12
+
13
+ from PIL import Image
14
+ import gdown
15
+ import zipfile
16
+
17
+ import pathlib
18
 
19
  # Define the Google Drive shareable link
20
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
 
44
  os.remove(local_zip_file)
45
 
46
  # Convert the extracted directory path to a pathlib.Path object
47
+ data_dir = pathlib.Path(extracted_path)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
+ # Print the directory structure to debug
50
+ for root, dirs, files in os.walk(extracted_path):
51
+ level = root.replace(extracted_path, '').count(os.sep)
52
+ indent = ' ' * 4 * (level)
53
+ print(f"{indent}{os.path.basename(root)}/")
54
+ subindent = ' ' * 4 * (level + 1)
55
+ for f in files:
56
+ print(f"{subindent}{f}")
57
 
58
+ import pathlib
59
+ # Path to the dataset directory
60
+ data_dir = pathlib.Path('extracted_files/Pest_Dataset')
61
+ data_dir = pathlib.Path(data_dir)
62
 
63
 
64
+ bees = list(data_dir.glob('bees/*'))
65
+ print(bees[0])
66
+ PIL.Image.open(str(bees[0]))
67
 
68
 
69
+ bees = list(data_dir.glob('bees/*'))
70
+ print(bees[0])
71
+ PIL.Image.open(str(bees[0]))
72
 
 
 
 
 
 
 
 
 
 
73
 
74
+ img_height,img_width=180,180
75
+ batch_size=32
76
+ train_ds = tf.keras.preprocessing.image_dataset_from_directory(
77
+ data_dir,
78
+ validation_split=0.2,
79
+ subset="training",
80
+ seed=123,
81
+ image_size=(img_height, img_width),
82
+ batch_size=batch_size)
83
 
84
 
85
+ val_ds = tf.keras.preprocessing.image_dataset_from_directory(
86
+ data_dir,
87
+ validation_split=0.2,
88
+ subset="validation",
89
+ seed=123,
90
+ image_size=(img_height, img_width),
91
+ batch_size=batch_size)
92
 
 
 
93
 
94
+ class_names = train_ds.class_names
95
+ print(class_names)
96
 
 
 
 
97
 
98
+ import matplotlib.pyplot as plt
 
 
99
 
100
+ plt.figure(figsize=(10, 10))
101
+ for images, labels in train_ds.take(1):
102
+ for i in range(9):
103
+ ax = plt.subplot(3, 3, i + 1)
104
+ plt.imshow(images[i].numpy().astype("uint8"))
105
+ plt.title(class_names[labels[i]])
106
+ plt.axis("off")
107
 
 
 
 
108
 
109
+ data_augmentation = keras.Sequential(
110
+ [
111
+ layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
112
+ layers.RandomRotation(0.1),
113
+ layers.RandomZoom(0.1),
114
+ ]
115
+ )
116
 
 
 
117
 
118
+ plt.figure(figsize=(10, 10))
119
+ for images, _ in train_ds.take(1):
120
+ for i in range(9):
121
+ augmented_images = data_augmentation(images)
122
+ ax = plt.subplot(3, 3, i + 1)
123
+ plt.imshow(augmented_images[0].numpy().astype("uint8"))
124
+ plt.axis("off")
125
 
 
126
 
127
+ num_classes = len(class_names)
128
+ model = Sequential([
129
+ data_augmentation,
130
+ layers.Rescaling(1./255),
131
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
132
+ layers.MaxPooling2D(),
133
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
134
+ layers.MaxPooling2D(),
135
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
136
+ layers.MaxPooling2D(),
137
+ layers.Dropout(0.2),
138
+ layers.Flatten(),
139
+ layers.Dense(128, activation='relu'),
140
+ layers.Dense(num_classes, activation='softmax', name="outputs") # Use softmax here
141
+ ])
142
+
143
+ model.compile(optimizer='adam',
144
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False), # Change from_logits to False
145
  metrics=['accuracy'])
146
 
147
  model.summary()
148
 
149
 
150
+ epochs = 15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  history = model.fit(
152
+ train_ds,
153
+ validation_data=val_ds,
154
+ epochs=epochs
 
155
  )
156
 
157
 
158
+ import gradio as gr
159
+ import numpy as np
160
+ import tensorflow as tf
161
 
162
  def predict_image(img):
163
  img = np.array(img)
164
+ img_resized = tf.image.resize(img, (180, 180))
165
  img_4d = tf.expand_dims(img_resized, axis=0)
166
  prediction = model.predict(img_4d)[0]
167
+ return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
 
 
168
 
169
  image = gr.Image()
170
  label = gr.Label(num_top_classes=1)
 
188
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
189
  css=custom_css
190
  ).launch(debug=True)