Spaces:
Runtime error
Runtime error
File size: 1,503 Bytes
3aa0ad6 fed0169 ea83981 3aa0ad6 ea83981 51bd975 3aa0ad6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import gradio as gr
from PIL import Image
import tensorflow as tf
from keras.models import Model
from keras.layers import Input, Conv2D, MaxPooling2D, Conv2DTranspose, concatenate
from keras.optimizers import Adam
def build_model(input_shape):
size=128
model = build_model(input_shape=(size, size, 1))
model.load_weights('BreastCancerSegmentation.h5')
return model
def preprocess_image(image, size: int=128):
image = cv2.resize(image, (size,size))
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
image = image/255.
return image
def segment(image):
image = preprocess_image(image, size=size)
image = np.expand_dims(image, 0)
output = model.predict(image, verbose=0)
mask_image = output[0]
mask_image = np.squeeze(mask_image, -1)
mask_image *= 255
mask_image = mask_image.astype(np.uint8)
mask_image = Image.fromarray(mask_image).convert("L")
return mask_image
if __name__ == "__main__":
gr.Interface(
fn=segment,
inputs="image",
outputs=gr.Image(type="pil", label="Breast Cancer Mask"),
examples = [["/content/benign(10).png"], ["/content/benign(109).png"]],
title = "Breast Cancer Ultrasound Image Segmentation",
description = "Check out this exciting development in the field of breast cancer diagnosis and treatment! A demo of Breast Cancer Ultrasound Image Segmentation has been developed. Upload image file, or try out one of the examples below!"
).launch(share=True, debug=True) |