NORLIE JHON MALAGDAO commited on
Commit
fd3cb72
·
verified ·
1 Parent(s): 5d4f9ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +74 -110
app.py CHANGED
@@ -4,24 +4,14 @@ import numpy as np
4
  import os
5
  import PIL
6
  import tensorflow as tf
7
-
8
- from tensorflow import keras
9
- from tensorflow.keras import layers
10
- from tensorflow.keras.models import Sequential
11
-
12
-
13
- from PIL import Image
14
  import gdown
15
  import zipfile
16
-
17
  import pathlib
18
-
19
-
20
 
21
  # Define the Google Drive shareable link
22
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
23
-
24
- # Extract the file ID from the URL
25
  file_id = gdrive_url.split('/d/')[1].split('/view')[0]
26
  direct_download_url = f'https://drive.google.com/uc?id={file_id}'
27
 
@@ -46,137 +36,115 @@ except zipfile.BadZipFile:
46
  os.remove(local_zip_file)
47
 
48
  # Convert the extracted directory path to a pathlib.Path object
49
- data_dir = pathlib.Path(extracted_path)
50
 
51
- # Print the directory structure to debug
52
- for root, dirs, files in os.walk(extracted_path):
53
- level = root.replace(extracted_path, '').count(os.sep)
54
- indent = ' ' * 4 * (level)
55
- print(f"{indent}{os.path.basename(root)}/")
56
- subindent = ' ' * 4 * (level + 1)
57
- for f in files:
58
- print(f"{subindent}{f}")
59
 
60
- import pathlib
61
- # Path to the dataset directory
62
- data_dir = pathlib.Path('extracted_files/Pest_Dataset')
63
- data_dir = pathlib.Path(data_dir)
64
-
65
-
66
-
67
- bees = list(data_dir.glob('bees/*'))
68
- print(bees[0])
69
- PIL.Image.open(str(bees[0]))
70
-
71
- img_height,img_width=180,180
72
- batch_size=32
73
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
74
- data_dir,
75
- validation_split=0.2,
76
- subset="training",
77
- seed=123,
78
- image_size=(img_height, img_width),
79
- batch_size=batch_size)
 
80
 
81
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
82
- data_dir,
83
- validation_split=0.2,
84
- subset="validation",
85
- seed=123,
86
- image_size=(img_height, img_width),
87
- batch_size=batch_size)
88
-
89
 
 
90
  class_names = train_ds.class_names
91
- print(class_names)
92
-
93
- import matplotlib.pyplot as plt
94
 
95
- plt.figure(figsize=(10, 10))
96
- for images, labels in train_ds.take(1):
97
- for i in range(9):
98
- ax = plt.subplot(3, 3, i + 1)
99
- plt.imshow(images[i].numpy().astype("uint8"))
100
- plt.title(class_names[labels[i]])
101
- plt.axis("off")
102
-
103
- data_augmentation = keras.Sequential(
104
- [
105
- layers.RandomFlip("horizontal",
106
- input_shape=(img_height,
107
- img_width,
108
- 3)),
109
  layers.RandomRotation(0.1),
110
  layers.RandomZoom(0.1),
111
- ]
112
- )
113
-
114
- plt.figure(figsize=(10, 10))
115
- for images, _ in train_ds.take(1):
116
- for i in range(9):
117
- augmented_images = data_augmentation(images)
118
- ax = plt.subplot(3, 3, i + 1)
119
- plt.imshow(augmented_images[0].numpy().astype("uint8"))
120
- plt.axis("off")
121
-
122
 
 
123
  num_classes = len(class_names)
124
  model = Sequential([
125
- data_augmentation,
126
- layers.Rescaling(1./255),
127
- layers.Conv2D(16, 3, padding='same', activation='relu'),
128
- layers.MaxPooling2D(),
129
- layers.Conv2D(32, 3, padding='same', activation='relu'),
130
- layers.MaxPooling2D(),
131
- layers.Conv2D(64, 3, padding='same', activation='relu'),
132
- layers.MaxPooling2D(),
133
- layers.Dropout(0.2),
134
- layers.Flatten(),
135
- layers.Dense(128, activation='relu'),
136
- layers.Dense(num_classes, name="outputs")
137
  ])
138
 
139
-
140
  model.compile(optimizer='adam',
141
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
142
  metrics=['accuracy'])
143
 
 
 
 
 
144
 
145
- model.summary()
146
-
147
  epochs = 50
148
  history = model.fit(
149
- train_ds,
150
- validation_data=val_ds,
151
- epochs=epochs
 
152
  )
153
 
 
154
  results = model.evaluate(val_ds, verbose=0)
155
-
156
  print("Validation Loss: {:.5f}".format(results[0]))
157
  print("Validation Accuracy: {:.2f}%".format(results[1] * 100))
158
 
159
- import gradio as gr
160
- import numpy as np
161
- import tensorflow as tf
162
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  def predict_image(img):
164
  img = np.array(img)
165
- img_resized = tf.image.resize(img, (180, 180))
166
  img_4d = tf.expand_dims(img_resized, axis=0)
167
  prediction = model.predict(img_4d)[0]
168
- return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
169
-
170
-
171
-
172
 
 
173
  image = gr.Image()
174
- label = gr.Label(num_top_classes=12)
175
 
176
- # Define custom CSS for background image
177
  custom_css = """
178
  body {
179
- background-image: url('\extracted_files\Pest_Dataset\bees\bees (444).jpg');
180
  background-size: cover;
181
  background-repeat: no-repeat;
182
  background-attachment: fixed;
@@ -192,7 +160,3 @@ gr.Interface(
192
  description="Upload an image of a pest to classify it into one of the predefined categories.",
193
  css=custom_css
194
  ).launch(debug=True)
195
-
196
-
197
-
198
-
 
4
  import os
5
  import PIL
6
  import tensorflow as tf
 
 
 
 
 
 
 
7
  import gdown
8
  import zipfile
 
9
  import pathlib
10
+ from tensorflow import keras
11
+ from tensorflow.keras import layers, callbacks
12
 
13
  # Define the Google Drive shareable link
14
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
 
 
15
  file_id = gdrive_url.split('/d/')[1].split('/view')[0]
16
  direct_download_url = f'https://drive.google.com/uc?id={file_id}'
17
 
 
36
  os.remove(local_zip_file)
37
 
38
  # Convert the extracted directory path to a pathlib.Path object
39
+ data_dir = pathlib.Path(extracted_path) / 'Pest_Dataset'
40
 
41
+ # Load and preprocess data
42
+ img_height, img_width = 180, 180
43
+ batch_size = 32
 
 
 
 
 
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
46
+ data_dir,
47
+ validation_split=0.2,
48
+ subset="training",
49
+ seed=123,
50
+ image_size=(img_height, img_width),
51
+ batch_size=batch_size
52
+ )
53
 
54
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
55
+ data_dir,
56
+ validation_split=0.2,
57
+ subset="validation",
58
+ seed=123,
59
+ image_size=(img_height, img_width),
60
+ batch_size=batch_size
61
+ )
62
 
63
+ # Class names
64
  class_names = train_ds.class_names
 
 
 
65
 
66
+ # Data augmentation
67
+ data_augmentation = keras.Sequential([
68
+ layers.RandomFlip("horizontal"),
 
 
 
 
 
 
 
 
 
 
 
69
  layers.RandomRotation(0.1),
70
  layers.RandomZoom(0.1),
71
+ ])
 
 
 
 
 
 
 
 
 
 
72
 
73
+ # Model
74
  num_classes = len(class_names)
75
  model = Sequential([
76
+ data_augmentation,
77
+ layers.Rescaling(1./255),
78
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
79
+ layers.MaxPooling2D(),
80
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
81
+ layers.MaxPooling2D(),
82
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
83
+ layers.MaxPooling2D(),
84
+ layers.Dropout(0.2),
85
+ layers.Flatten(),
86
+ layers.Dense(128, activation='relu'),
87
+ layers.Dense(num_classes, name="outputs")
88
  ])
89
 
90
+ # Compile the model
91
  model.compile(optimizer='adam',
92
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
93
  metrics=['accuracy'])
94
 
95
+ # Early stopping callback
96
+ early_stopping = callbacks.EarlyStopping(
97
+ monitor='val_loss', patience=5, restore_best_weights=True
98
+ )
99
 
100
+ # Train the model
 
101
  epochs = 50
102
  history = model.fit(
103
+ train_ds,
104
+ validation_data=val_ds,
105
+ epochs=epochs,
106
+ callbacks=[early_stopping]
107
  )
108
 
109
+ # Evaluate the model on validation data
110
  results = model.evaluate(val_ds, verbose=0)
 
111
  print("Validation Loss: {:.5f}".format(results[0]))
112
  print("Validation Accuracy: {:.2f}%".format(results[1] * 100))
113
 
114
+ # Plot training history
115
+ plt.figure(figsize=(12, 6))
116
+ plt.subplot(1, 2, 1)
117
+ plt.plot(history.history['loss'], label='Training Loss')
118
+ plt.plot(history.history['val_loss'], label='Validation Loss')
119
+ plt.xlabel('Epoch')
120
+ plt.ylabel('Loss')
121
+ plt.legend()
122
+ plt.title('Training and Validation Loss')
123
+
124
+ plt.subplot(1, 2, 2)
125
+ plt.plot(history.history['accuracy'], label='Training Accuracy')
126
+ plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
127
+ plt.xlabel('Epoch')
128
+ plt.ylabel('Accuracy')
129
+ plt.legend()
130
+ plt.title('Training and Validation Accuracy')
131
+ plt.show()
132
+
133
+ # Prediction function
134
  def predict_image(img):
135
  img = np.array(img)
136
+ img_resized = tf.image.resize(img, (img_height, img_width))
137
  img_4d = tf.expand_dims(img_resized, axis=0)
138
  prediction = model.predict(img_4d)[0]
139
+ return {class_names[i]: float(prediction[i]) for i in range(num_classes)}
 
 
 
140
 
141
+ # Interface
142
  image = gr.Image()
143
+ label = gr.Label(num_top_classes=num_classes)
144
 
 
145
  custom_css = """
146
  body {
147
+ background-image: url('extracted_files/Pest_Dataset/bees/bees (444).jpg');
148
  background-size: cover;
149
  background-repeat: no-repeat;
150
  background-attachment: fixed;
 
160
  description="Upload an image of a pest to classify it into one of the predefined categories.",
161
  css=custom_css
162
  ).launch(debug=True)