Spaces:
Runtime error
Runtime error
import matplotlib.pyplot as plt | |
import numpy as np | |
import os | |
import PIL | |
import tensorflow as tf | |
from tensorflow import keras | |
from tensorflow.keras import layers | |
from tensorflow.keras.models import Sequential | |
import pathlib | |
dataset_url = "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz" | |
data_dir = tf.keras.utils.get_file('flower_photos', origin=dataset_url, untar=True) | |
data_dir = pathlib.Path(data_dir) | |
image_count = len(list(data_dir.glob('*/*.jpg'))) | |
print(image_count) | |
print(os.listdir(data_dir)) | |
roses = list(data_dir.glob('roses/*')) | |
PIL.Image.open(str(roses[1])) | |
daisy = list(data_dir.glob('daisy/*')) | |
PIL.Image.open(str(daisy[2])) | |
batch_size = 32 | |
img_height = 180 | |
img_width = 180 | |
train_ds = tf.keras.utils.image_dataset_from_directory( | |
data_dir, | |
validation_split=0.2, | |
subset="training", | |
seed=123, | |
image_size=(img_height, img_width), | |
batch_size=batch_size) | |
val_ds = tf.keras.utils.image_dataset_from_directory( | |
data_dir, | |
validation_split=0.2, | |
subset="validation", | |
seed=123, | |
image_size=(img_height, img_width), | |
batch_size=batch_size) | |
class_names = train_ds.class_names | |
print(class_names) | |
import matplotlib.pyplot as plt | |
plt.figure(figsize=(12, 12)) | |
for images, labels in train_ds.take(1): | |
for i in range(12): | |
ax = plt.subplot(3, 4, i + 1) | |
plt.imshow(images[i].numpy().astype("uint8")) | |
plt.title(class_names[labels[i]]) | |
plt.axis("off") | |
num_classes = len(class_names) | |
model = Sequential([ | |
layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)), | |
layers.Conv2D(16, 3, padding='same', activation='relu'), | |
layers.MaxPooling2D(), | |
layers.Conv2D(32, 3, padding='same', activation='relu'), | |
layers.MaxPooling2D(), | |
layers.Conv2D(64, 3, padding='same', activation='relu'), | |
layers.MaxPooling2D(), | |
layers.Flatten(), | |
layers.Dense(128, activation='relu'), | |
layers.Dense(num_classes,activation='softmax') | |
]) | |
model.compile(optimizer='adam', | |
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), | |
metrics=['accuracy']) | |
model.summary() | |
epochs=15 | |
history = model.fit( | |
train_ds, | |
validation_data=val_ds, | |
epochs=epochs | |
) | |
acc = history.history['accuracy'] | |
val_acc = history.history['val_accuracy'] | |
loss = history.history['loss'] | |
val_loss = history.history['val_loss'] | |
epochs_range = range(epochs) | |
plt.figure(figsize=(12, 8)) | |
plt.subplot(1, 2, 1) | |
plt.plot(epochs_range, acc, label='Training Accuracy') | |
plt.plot(epochs_range, val_acc, label='Validation Accuracy') | |
plt.legend(loc='lower right') | |
plt.title('Training and Validation Accuracy') | |
plt.subplot(1, 2, 2) | |
plt.plot(epochs_range, loss, label='Training Loss') | |
plt.plot(epochs_range, val_loss, label='Validation Loss') | |
plt.legend(loc='upper right') | |
plt.title('Training and Validation Loss') | |
plt.show() | |
def resize_image(input_image): | |
img = PIL.Image.fromarray(input_image) | |
resized_img = img.resize((180, 180)) | |
resized_array = np.array(resized_img) | |
return resized_array | |
def predict_input_image(img): | |
img=resize_image(img) | |
img_4d=img.reshape(-1,180,180,3) | |
prediction=model.predict(img_4d)[0] | |
return {class_names[i]: float(prediction[i]) for i in range(5)} | |
#!pip install gradio | |
import gradio as gr | |
gr.Interface(fn=predict_input_image, | |
inputs=gr.Image(), | |
outputs=gr.Label(num_top_classes=5), | |
live=False).launch(debug='True') |