NORLIE JHON MALAGDAO commited on
Commit
83e4be4
·
verified ·
1 Parent(s): b19977f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -24
app.py CHANGED
@@ -4,13 +4,22 @@ import numpy as np
4
  import os
5
  import PIL
6
  import tensorflow as tf
 
7
  from tensorflow import keras
8
- from tensorflow.keras import layers, Sequential
 
 
 
9
  from PIL import Image
10
  import gdown
11
  import zipfile
 
12
  import pathlib
13
 
 
 
 
 
14
  # Define the Google Drive shareable link
15
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
16
 
@@ -41,37 +50,88 @@ os.remove(local_zip_file)
41
  # Convert the extracted directory path to a pathlib.Path object
42
  data_dir = pathlib.Path(extracted_path)
43
 
 
 
 
 
 
 
 
 
 
 
44
  # Path to the dataset directory
45
- data_dir = data_dir / 'Pest_Dataset'
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
- # Load dataset
 
48
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
49
- data_dir,
50
- validation_split=0.2,
51
- subset="training",
52
- seed=123,
53
- image_size=(180, 180),
54
- batch_size=32)
 
55
 
56
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
57
- data_dir,
58
- validation_split=0.2,
59
- subset="validation",
60
- seed=123,
61
- image_size=(180, 180),
62
- batch_size=32)
 
63
 
64
  class_names = train_ds.class_names
 
65
 
66
- data_augmentation = Sequential([
67
- layers.RandomFlip("horizontal"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  layers.RandomRotation(0.1),
69
  layers.RandomZoom(0.1),
70
- ])
 
71
 
72
- num_classes = len(class_names)
 
 
 
 
 
 
 
73
 
74
 
 
75
  model = Sequential([
76
  data_augmentation,
77
  layers.Rescaling(1./255),
@@ -84,22 +144,25 @@ model = Sequential([
84
  layers.Dropout(0.2),
85
  layers.Flatten(),
86
  layers.Dense(128, activation='relu'),
87
- layers.Dense(num_classes, name="outputs")
88
  ])
89
 
90
  model.compile(optimizer='adam',
91
- loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
92
  metrics=['accuracy'])
93
 
94
  model.summary()
95
 
 
96
  epochs = 15
97
  history = model.fit(
98
- train_ds,
99
- validation_data=val_ds,
100
- epochs=epochs
101
  )
102
 
 
 
103
  # Define category descriptions
104
  category_descriptions = {
105
  "Ants": "Ants are small insects known for their complex social structures and teamwork.",
@@ -150,3 +213,4 @@ gr.Interface(
150
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
151
  css=custom_css
152
  ).launch(debug=True)
 
 
4
  import os
5
  import PIL
6
  import tensorflow as tf
7
+
8
  from tensorflow import keras
9
+ from tensorflow.keras import layers
10
+ from tensorflow.keras.models import Sequential
11
+
12
+
13
  from PIL import Image
14
  import gdown
15
  import zipfile
16
+
17
  import pathlib
18
 
19
+
20
+
21
+
22
+
23
  # Define the Google Drive shareable link
24
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
25
 
 
50
  # Convert the extracted directory path to a pathlib.Path object
51
  data_dir = pathlib.Path(extracted_path)
52
 
53
+ # Print the directory structure to debug
54
+ for root, dirs, files in os.walk(extracted_path):
55
+ level = root.replace(extracted_path, '').count(os.sep)
56
+ indent = ' ' * 4 * (level)
57
+ print(f"{indent}{os.path.basename(root)}/")
58
+ subindent = ' ' * 4 * (level + 1)
59
+ for f in files:
60
+ print(f"{subindent}{f}")
61
+
62
+ import pathlib
63
  # Path to the dataset directory
64
+ data_dir = pathlib.Path('extracted_files/Pest_Dataset')
65
+ data_dir = pathlib.Path(data_dir)
66
+
67
+
68
+ bees = list(data_dir.glob('bees/*'))
69
+ print(bees[0])
70
+ PIL.Image.open(str(bees[0]))
71
+
72
+
73
+ bees = list(data_dir.glob('bees/*'))
74
+ print(bees[0])
75
+ PIL.Image.open(str(bees[0]))
76
+
77
 
78
+ img_height,img_width=180,180
79
+ batch_size=32
80
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
81
+ data_dir,
82
+ validation_split=0.2,
83
+ subset="training",
84
+ seed=123,
85
+ image_size=(img_height, img_width),
86
+ batch_size=batch_size)
87
+
88
 
89
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
90
+ data_dir,
91
+ validation_split=0.2,
92
+ subset="validation",
93
+ seed=123,
94
+ image_size=(img_height, img_width),
95
+ batch_size=batch_size)
96
+
97
 
98
  class_names = train_ds.class_names
99
+ print(class_names)
100
 
101
+
102
+ import matplotlib.pyplot as plt
103
+
104
+ plt.figure(figsize=(10, 10))
105
+ for images, labels in train_ds.take(1):
106
+ for i in range(9):
107
+ ax = plt.subplot(3, 3, i + 1)
108
+ plt.imshow(images[i].numpy().astype("uint8"))
109
+ plt.title(class_names[labels[i]])
110
+ plt.axis("off")
111
+
112
+
113
+ data_augmentation = keras.Sequential(
114
+ [
115
+ layers.RandomFlip("horizontal",
116
+ input_shape=(img_height,
117
+ img_width,
118
+ 3)),
119
  layers.RandomRotation(0.1),
120
  layers.RandomZoom(0.1),
121
+ ]
122
+ )
123
 
124
+
125
+ plt.figure(figsize=(10, 10))
126
+ for images, _ in train_ds.take(1):
127
+ for i in range(9):
128
+ augmented_images = data_augmentation(images)
129
+ ax = plt.subplot(3, 3, i + 1)
130
+ plt.imshow(augmented_images[0].numpy().astype("uint8"))
131
+ plt.axis("off")
132
 
133
 
134
+ num_classes = len(class_names)
135
  model = Sequential([
136
  data_augmentation,
137
  layers.Rescaling(1./255),
 
144
  layers.Dropout(0.2),
145
  layers.Flatten(),
146
  layers.Dense(128, activation='relu'),
147
+ layers.Dense(num_classes, activation='softmax', name="outputs") # Use softmax here
148
  ])
149
 
150
  model.compile(optimizer='adam',
151
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False), # Change from_logits to False
152
  metrics=['accuracy'])
153
 
154
  model.summary()
155
 
156
+
157
  epochs = 15
158
  history = model.fit(
159
+ train_ds,
160
+ validation_data=val_ds,
161
+ epochs=epochs
162
  )
163
 
164
+
165
+
166
  # Define category descriptions
167
  category_descriptions = {
168
  "Ants": "Ants are small insects known for their complex social structures and teamwork.",
 
213
  description="The image data set used was obtained from Kaggle and has a collection of 12 different types of agricultural pests: Ants, Bees, Beetles, Caterpillars, Earthworms, Earwigs, Grasshoppers, Moths, Slugs, Snails, Wasps, and Weevils",
214
  css=custom_css
215
  ).launch(debug=True)
216
+