jmparejaz's picture
Upload app.py
a7f8465
raw
history blame
1.38 kB
import gradio as gr
from huggingface_hub import from_pretrained_keras
import numpy as np
reloaded_model = from_pretrained_keras('jmparejaz/Facial_Age-gender-eth_Recognition')
reloaded_model_eth = from_pretrained_keras('jmparejaz/Facial_eth_recognition')
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.2989, 0.5870, 0.1140])
def predict_model(x_, model_1, model_2):
pred = model_1.predict(X_test.reshape(x_.shape[0], 48, 48, 1))
pred_eth=model_2.predict(x_.reshape(x_.shape[0], 48, 48, 1))
pred_gender=[round(pred[0][x][0]) for x in range(x_.shape[0])]
pred_age=[round(pred[1][x][0]) for x in range(x_.shape[0])]
pred_eth=[np.argmax(pred_eth[x]) for x in range(x_.shape[0])]
return pred_gender, pred_age, pred_eth
def image_classifier(input_img):
gray=rgb2gray(input_img)
g,a,e=predict_model(gray.reshape(1, 48, 48, 1),reloaded_model,reloaded_model_eth)
dict_gender={ 0: 'Male', 1:'Female'}
g=dict_gender[g]
dict_eth={0:"White", 1:"Black", 2:"Asian", 3:"Indian", 4:"Hispanic"}
e=dict_eth[e]
return ("The predicted gender is {} , predicted age is {} and the predicted ethnicity is {}".format(g,a,e))
iface = gr.Interface(
image_classifier,
gr.inputs.Image(shape=(48, 48),),
outputs=['text']
capture_session=True,
interpretation="default",
)
if __name__ == "__main__":
iface.launch(share=True)