NORLIE JHON MALAGDAO commited on
Commit
c0cb430
·
verified ·
1 Parent(s): 5e0f68f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -79
app.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import gradio as gr
3
  import matplotlib.pyplot as plt
4
  import numpy as np
@@ -15,8 +14,6 @@ import gdown
15
  import zipfile
16
  import pathlib
17
 
18
-
19
-
20
  # Define the Google Drive shareable link
21
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
22
 
@@ -55,114 +52,92 @@ for root, dirs, files in os.walk(extracted_path):
55
  subindent = ' ' * 4 * (level + 1)
56
  for f in files:
57
  print(f"{subindent}{f}")
58
-
59
 
60
- # Path to the dataset directory
61
- import pathlib
62
  # Path to the dataset directory
63
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
64
- data_dir = pathlib.Path(data_dir)
65
 
66
- bees = list(data_dir.glob('bees/*'))
67
- print(bees[0])
68
- PIL.Image.open(str(bees[0]))
69
 
70
- img_height,img_width=180,180
71
- batch_size=32
72
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
73
- data_dir,
74
- validation_split=0.2,
75
- subset="training",
76
- seed=123,
77
- image_size=(img_height, img_width),
78
- batch_size=batch_size)
79
-
80
 
81
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
82
- data_dir,
83
- validation_split=0.2,
84
- subset="validation",
85
- seed=123,
86
- image_size=(img_height, img_width),
87
- batch_size=batch_size)
88
-
89
-
90
 
91
  class_names = train_ds.class_names
92
  print(class_names)
93
 
94
-
95
- import matplotlib.pyplot as plt
96
-
97
  plt.figure(figsize=(10, 10))
98
  for images, labels in train_ds.take(1):
99
- for i in range(9):
100
- ax = plt.subplot(3, 3, i + 1)
101
- plt.imshow(images[i].numpy().astype("uint8"))
102
- plt.title(class_names[labels[i]])
103
- plt.axis("off")
104
-
 
 
 
 
 
 
 
105
 
106
- num_classes = 12
 
 
 
 
107
 
 
108
  model = Sequential([
109
- layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
110
- layers.Conv2D(16, 3, padding='same', activation='relu'),
111
- layers.MaxPooling2D(),
112
- layers.Conv2D(32, 3, padding='same', activation='relu'),
113
- layers.MaxPooling2D(),
114
- layers.Conv2D(64, 3, padding='same', activation='relu'),
115
- layers.MaxPooling2D(),
116
- layers.Flatten(),
117
- layers.Dense(128, activation='relu'),
118
- layers.Dense(num_classes,activation='softmax')
119
  ])
120
 
121
-
122
  model.compile(optimizer='adam',
123
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
124
  metrics=['accuracy'])
125
 
126
- epochs=10
127
  history = model.fit(
128
- train_ds,
129
- validation_data=val_ds,
130
- epochs=epochs
131
  )
132
 
133
-
134
-
135
- import gradio as gr
136
- import numpy as np
137
- import tensorflow as tf
138
-
139
  def predict_image(img):
140
  img = np.array(img)
141
- img_resized = tf.image.resize(img, (180, 180))
142
  img_4d = tf.expand_dims(img_resized, axis=0)
143
  prediction = model.predict(img_4d)[0]
144
  return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
145
 
146
-
147
- image = gr.Image()
148
- label = gr.Label(num_top_classes=5)
149
-
150
- # Define custom CSS for background image
151
- custom_css = """
152
- body {
153
- background-image: url('\extracted_files\Pest_Dataset\bees\bees (444).jpg');
154
- background-size: cover;
155
- background-repeat: no-repeat;
156
- background-attachment: fixed;
157
- color: white;
158
- }
159
- """
160
 
161
  gr.Interface(
162
- fn=predict_image,
163
- inputs=image,
164
- outputs=label,
165
  title="Pest Classification",
166
- description="Upload an image of a pest to classify it into one of the predefined categories.",
167
- css=custom_css
168
- ).launch(debug=True)
 
 
1
  import gradio as gr
2
  import matplotlib.pyplot as plt
3
  import numpy as np
 
14
  import zipfile
15
  import pathlib
16
 
 
 
17
  # Define the Google Drive shareable link
18
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
19
 
 
52
  subindent = ' ' * 4 * (level + 1)
53
  for f in files:
54
  print(f"{subindent}{f}")
 
55
 
 
 
56
  # Path to the dataset directory
57
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
 
58
 
59
+ img_height, img_width = 180, 180
60
+ batch_size = 32
 
61
 
 
 
62
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
63
+ data_dir,
64
+ validation_split=0.2,
65
+ subset="training",
66
+ seed=123,
67
+ image_size=(img_height, img_width),
68
+ batch_size=batch_size
69
+ )
70
 
71
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
72
+ data_dir,
73
+ validation_split=0.2,
74
+ subset="validation",
75
+ seed=123,
76
+ image_size=(img_height, img_width),
77
+ batch_size=batch_size
78
+ )
 
79
 
80
  class_names = train_ds.class_names
81
  print(class_names)
82
 
 
 
 
83
  plt.figure(figsize=(10, 10))
84
  for images, labels in train_ds.take(1):
85
+ for i in range(9):
86
+ ax = plt.subplot(3, 3, i + 1)
87
+ plt.imshow(images[i].numpy().astype("uint8"))
88
+ plt.title(class_names[labels[i]])
89
+ plt.axis("off")
90
+
91
+ # Define data augmentation
92
+ data_augmentation = keras.Sequential([
93
+ layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
94
+ layers.RandomRotation(0.1),
95
+ layers.RandomZoom(0.1),
96
+ layers.RandomContrast(0.1),
97
+ ])
98
 
99
+ # Load a pretrained model and fine-tune it
100
+ base_model = tf.keras.applications.MobileNetV2(input_shape=(img_height, img_width, 3),
101
+ include_top=False,
102
+ weights='imagenet')
103
+ base_model.trainable = False # Freeze the base model
104
 
105
+ # Add custom layers on top of the pretrained model
106
  model = Sequential([
107
+ data_augmentation,
108
+ layers.Rescaling(1./255),
109
+ base_model,
110
+ layers.GlobalAveragePooling2D(),
111
+ layers.Dropout(0.2),
112
+ layers.Dense(128, activation='relu'),
113
+ layers.Dense(len(class_names), name="outputs")
 
 
 
114
  ])
115
 
 
116
  model.compile(optimizer='adam',
117
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
118
  metrics=['accuracy'])
119
 
120
+ epochs = 10
121
  history = model.fit(
122
+ train_ds,
123
+ validation_data=val_ds,
124
+ epochs=epochs
125
  )
126
 
 
 
 
 
 
 
127
  def predict_image(img):
128
  img = np.array(img)
129
+ img_resized = tf.image.resize(img, (img_height, img_width))
130
  img_4d = tf.expand_dims(img_resized, axis=0)
131
  prediction = model.predict(img_4d)[0]
132
  return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
133
 
134
+ image = gr.Image()
135
+ label = gr.Label(num_top_classes=12)
 
 
 
 
 
 
 
 
 
 
 
 
136
 
137
  gr.Interface(
138
+ fn=predict_image,
139
+ inputs=image,
140
+ outputs=label,
141
  title="Pest Classification",
142
+ description="Upload an image of a pest to classify it into one of the predefined categories."
143
+ ).launch(debug=True)