NORLIE JHON MALAGDAO commited on
Commit
68d5b48
·
verified ·
1 Parent(s): 442b326

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +143 -0
app.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import gradio as gr
3
+ import matplotlib.pyplot as plt
4
+ import numpy as np
5
+ import os
6
+ import PIL
7
+ import tensorflow as tf
8
+
9
+ from tensorflow import keras
10
+ from tensorflow.keras import layers
11
+ from tensorflow.keras.models import Sequential
12
+
13
+ from PIL import Image
14
+ import gdown
15
+ import zipfile
16
+ import pathlib
17
+
18
+ # Define the Google Drive shareable link
19
+ gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
20
+
21
+ # Extract the file ID from the URL
22
+ file_id = gdrive_url.split('/d/')[1].split('/view')[0]
23
+ direct_download_url = f'https://drive.google.com/uc?id={file_id}'
24
+
25
+ # Define the local filename to save the ZIP file
26
+ local_zip_file = 'file.zip'
27
+
28
+ # Download the ZIP file
29
+ gdown.download(direct_download_url, local_zip_file, quiet=False)
30
+
31
+ # Directory to extract files
32
+ extracted_path = 'extracted_files'
33
+
34
+ # Verify if the downloaded file is a ZIP file and extract it
35
+ try:
36
+ with zipfile.ZipFile(local_zip_file, 'r') as zip_ref:
37
+ zip_ref.extractall(extracted_path)
38
+ print("Extraction successful!")
39
+ except zipfile.BadZipFile:
40
+ print("Error: The downloaded file is not a valid ZIP file.")
41
+
42
+ # Optionally, you can delete the ZIP file after extraction
43
+ os.remove(local_zip_file)
44
+
45
+ # Convert the extracted directory path to a pathlib.Path object
46
+ data_dir = pathlib.Path(extracted_path)
47
+
48
+ # Print the directory structure to debug
49
+ for root, dirs, files in os.walk(extracted_path):
50
+ level = root.replace(extracted_path, '').count(os.sep)
51
+ indent = ' ' * 4 * (level)
52
+ print(f"{indent}{os.path.basename(root)}/")
53
+ subindent = ' ' * 4 * (level + 1)
54
+ for f in files:
55
+ print(f"{subindent}{f}")
56
+
57
+ # Path to the dataset directory
58
+ data_dir = pathlib.Path('extracted_files/Pest_Dataset')
59
+
60
+ img_height, img_width = 180, 180
61
+ batch_size = 32
62
+
63
+ train_ds = tf.keras.preprocessing.image_dataset_from_directory(
64
+ data_dir,
65
+ validation_split=0.2,
66
+ subset="training",
67
+ seed=123,
68
+ image_size=(img_height, img_width),
69
+ batch_size=batch_size
70
+ )
71
+
72
+ val_ds = tf.keras.preprocessing.image_dataset_from_directory(
73
+ data_dir,
74
+ validation_split=0.2,
75
+ subset="validation",
76
+ seed=123,
77
+ image_size=(img_height, img_width),
78
+ batch_size=batch_size
79
+ )
80
+
81
+ class_names = train_ds.class_names
82
+ print(class_names)
83
+
84
+ plt.figure(figsize=(10, 10))
85
+ for images, labels in train_ds.take(1):
86
+ for i in range(9):
87
+ ax = plt.subplot(3, 3, i + 1)
88
+ plt.imshow(images[i].numpy().astype("uint8"))
89
+ plt.title(class_names[labels[i]])
90
+ plt.axis("off")
91
+
92
+ # Define data augmentation
93
+ data_augmentation = keras.Sequential([
94
+ layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
95
+ layers.RandomRotation(0.1),
96
+ layers.RandomZoom(0.1),
97
+ ])
98
+
99
+ num_classes = 12
100
+
101
+ model = Sequential([
102
+ data_augmentation,
103
+ layers.Rescaling(1./255),
104
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
105
+ layers.MaxPooling2D(),
106
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
107
+ layers.MaxPooling2D(),
108
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
109
+ layers.MaxPooling2D(),
110
+ layers.Dropout(0.2),
111
+ layers.Flatten(),
112
+ layers.Dense(128, activation='relu'),
113
+ layers.Dense(num_classes, name="outputs")
114
+ ])
115
+
116
+ model.compile(optimizer='adam',
117
+ loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
118
+ metrics=['accuracy'])
119
+
120
+ epochs = 10
121
+ history = model.fit(
122
+ train_ds,
123
+ validation_data=val_ds,
124
+ epochs=epochs
125
+ )
126
+
127
+ def predict_image(img):
128
+ img = np.array(img)
129
+ img_resized = tf.image.resize(img, (180, 180))
130
+ img_4d = tf.expand_dims(img_resized, axis=0)
131
+ prediction = model.predict(img_4d)[0]
132
+ return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
133
+
134
+ image = gr.Image()
135
+ label = gr.Label(num_top_classes=5)
136
+
137
+ gr.Interface(
138
+ fn=predict_image,
139
+ inputs=image,
140
+ outputs=label,
141
+ title="Pest Classification",
142
+ description="Upload an image of a pest to classify it into one of the predefined categories."
143
+ ).launch(debug=True)