NORLIE JHON MALAGDAO commited on
Commit
621e88d
·
verified ·
1 Parent(s): 6c74c1b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +200 -0
app.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !pip install gradio
2
+
3
+ import gradio as gr
4
+ import matplotlib.pyplot as plt
5
+ import numpy as np
6
+ import os
7
+ import PIL
8
+ import tensorflow as tf
9
+
10
+ from tensorflow import keras
11
+ from tensorflow.keras import layers
12
+ from tensorflow.keras.models import Sequential
13
+
14
+
15
+ from PIL import Image
16
+ import gdown
17
+ import zipfile
18
+
19
+ import pathlib
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+ # Define the Google Drive shareable link
28
+ gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
29
+
30
+ # Extract the file ID from the URL
31
+ file_id = gdrive_url.split('/d/')[1].split('/view')[0]
32
+ direct_download_url = f'https://drive.google.com/uc?id={file_id}'
33
+
34
+ # Define the local filename to save the ZIP file
35
+ local_zip_file = 'file.zip'
36
+
37
+ # Download the ZIP file
38
+ gdown.download(direct_download_url, local_zip_file, quiet=False)
39
+
40
+ # Directory to extract files
41
+ extracted_path = 'extracted_files'
42
+
43
+ # Verify if the downloaded file is a ZIP file and extract it
44
+ try:
45
+ with zipfile.ZipFile(local_zip_file, 'r') as zip_ref:
46
+ zip_ref.extractall(extracted_path)
47
+ print("Extraction successful!")
48
+ except zipfile.BadZipFile:
49
+ print("Error: The downloaded file is not a valid ZIP file.")
50
+
51
+ # Optionally, you can delete the ZIP file after extraction
52
+ os.remove(local_zip_file)
53
+
54
+ # Convert the extracted directory path to a pathlib.Path object
55
+ data_dir = pathlib.Path(extracted_path)
56
+
57
+ # Print the directory structure to debug
58
+ for root, dirs, files in os.walk(extracted_path):
59
+ level = root.replace(extracted_path, '').count(os.sep)
60
+ indent = ' ' * 4 * (level)
61
+ print(f"{indent}{os.path.basename(root)}/")
62
+ subindent = ' ' * 4 * (level + 1)
63
+ for f in files:
64
+ print(f"{subindent}{f}")
65
+
66
+
67
+
68
+ import pathlib
69
+ # Path to the dataset directory
70
+ data_dir = pathlib.Path('extracted_files/Pest_Dataset')
71
+ data_dir = pathlib.Path(data_dir)
72
+
73
+
74
+
75
+ bees = list(data_dir.glob('bees/*'))
76
+ print(bees[0])
77
+ PIL.Image.open(str(bees[0]))
78
+
79
+
80
+
81
+ img_height,img_width=180,180
82
+ batch_size=32
83
+ train_ds = tf.keras.preprocessing.image_dataset_from_directory(
84
+ data_dir,
85
+ validation_split=0.2,
86
+ subset="training",
87
+ seed=123,
88
+ image_size=(img_height, img_width),
89
+ batch_size=batch_size)
90
+
91
+
92
+
93
+
94
+
95
+
96
+ val_ds = tf.keras.preprocessing.image_dataset_from_directory(
97
+ data_dir,
98
+ validation_split=0.2,
99
+ subset="validation",
100
+ seed=123,
101
+ image_size=(img_height, img_width),
102
+ batch_size=batch_size)
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+ class_names = train_ds.class_names
112
+ print(class_names)
113
+
114
+
115
+
116
+
117
+ import matplotlib.pyplot as plt
118
+
119
+ plt.figure(figsize=(10, 10))
120
+ for images, labels in train_ds.take(1):
121
+ for i in range(9):
122
+ ax = plt.subplot(3, 3, i + 1)
123
+ plt.imshow(images[i].numpy().astype("uint8"))
124
+ plt.title(class_names[labels[i]])
125
+ plt.axis("off")
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+ num_classes = 12
135
+
136
+ model = Sequential([
137
+ layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
138
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
139
+ layers.MaxPooling2D(),
140
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
141
+ layers.MaxPooling2D(),
142
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
143
+ layers.MaxPooling2D(),
144
+ layers.Flatten(),
145
+ layers.Dense(128, activation='relu'),
146
+ layers.Dense(num_classes,activation='softmax')
147
+ ])
148
+
149
+
150
+
151
+
152
+
153
+ model.compile(optimizer='adam',
154
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
155
+ metrics=['accuracy'])
156
+
157
+
158
+
159
+
160
+ epochs=10
161
+ history = model.fit(
162
+ train_ds,
163
+ validation_data=val_ds,
164
+ epochs=epochs
165
+ )
166
+
167
+
168
+ import gradio as gr
169
+ import numpy as np
170
+ import tensorflow as tf
171
+
172
+ def predict_image(img):
173
+ img = np.array(img)
174
+ img_resized = tf.image.resize(img, (180, 180))
175
+ img_4d = tf.expand_dims(img_resized, axis=0)
176
+ prediction = model.predict(img_4d)[0]
177
+ return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
178
+
179
+ image = gr.Image()
180
+ label = gr.Label(num_top_classes=5)
181
+
182
+ # Define custom CSS for background image
183
+ custom_css = """
184
+ body {
185
+ background-image: url('\extracted_files\Pest_Dataset\bees\bees (444).jpg');
186
+ background-size: cover;
187
+ background-repeat: no-repeat;
188
+ background-attachment: fixed;
189
+ color: white;
190
+ }
191
+ """
192
+
193
+ gr.Interface(
194
+ fn=predict_image,
195
+ inputs=image,
196
+ outputs=label,
197
+ title="Pest Classification",
198
+ description="Upload an image of a pest to classify it into one of the predefined categories.",
199
+ css=custom_css
200
+ ).launch(debug=True)