File size: 5,827 Bytes
08f7bb1 aa60e14 80765d1 730c4bf aa60e14 08f7bb1 aa60e14 d7d62a4 20e02e5 08f7bb1 b4b5e67 9e7b677 dbd03d2 aa60e14 2c5f193 4aae833 0632969 aa60e14 4c2038a aa60e14 d7d62a4 0632969 d7d62a4 aa60e14 d7d62a4 0632969 08f7bb1 6dbbc6c 08f7bb1 6dbbc6c 08f7bb1 6dbbc6c 08f7bb1 6dbbc6c 08f7bb1 69d37bb 08f7bb1 aecb06a b4b5e67 62ffdf2 b4b5e67 20e02e5 9a7106c 20e02e5 62ffdf2 b4b5e67 62ffdf2 14ba2df 62ffdf2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
import gradio as gr
import numpy as np
import json
import joblib
import tensorflow as tf
import pandas as pd
from joblib import load
from tensorflow.keras.models import load_model
from sklearn.preprocessing import MinMaxScaler
import os
import sklearn # Import sklearn
# Display library versions
print(f"Gradio version: {gr.__version__}")
print(f"NumPy version: {np.__version__}")
print(f"Scikit-learn version: {sklearn.__version__}")
print(f"Joblib version: {joblib.__version__}")
print(f"TensorFlow version: {tf.__version__}")
print(f"Pandas version: {pd.__version__}")
# Directory paths for the saved models
script_dir = os.path.dirname(os.path.abspath(__file__))
scaler_path = os.path.join(script_dir, 'toolkit', 'scaler_X.json')
rf_model_path = os.path.join(script_dir, 'toolkit', 'rf_model.joblib')
mlp_model_path = os.path.join(script_dir, 'toolkit', 'mlp_model.keras')
meta_model_path = os.path.join(script_dir, 'toolkit', 'meta_model.joblib')
image_path = os.path.join(script_dir, 'toolkit', 'car.png')
# Load the scaler and models
try:
# Load the scaler
with open(scaler_path, 'r') as f:
scaler_params = json.load(f)
scaler_X = MinMaxScaler()
scaler_X.scale_ = np.array(scaler_params["scale_"])
scaler_X.min_ = np.array(scaler_params["min_"])
scaler_X.data_min_ = np.array(scaler_params["data_min_"])
scaler_X.data_max_ = np.array(scaler_params["data_max_"])
scaler_X.data_range_ = np.array(scaler_params["data_range_"])
scaler_X.n_features_in_ = scaler_params["n_features_in_"]
scaler_X.feature_names_in_ = np.array(scaler_params["feature_names_in_"])
# Load the models
loaded_rf_model = load(rf_model_path)
print("Random Forest model loaded successfully.")
loaded_mlp_model = load_model(mlp_model_path)
print("MLP model loaded successfully.")
loaded_meta_model = load(meta_model_path)
print("Meta model loaded successfully.")
except Exception as e:
print(f"Error loading models or scaler: {e}")
def predict_new_values(new_input_data):
try:
# Ensure the new input data is in the correct format
print(f"Raw Input Data: {new_input_data}")
new_input_data = np.array(new_input_data).reshape(1, -1)
# Scale the new input data
new_input_scaled = scaler_X.transform(new_input_data)
print(f"Scaled Input Data: {new_input_scaled}")
# Make predictions with both base models
mlp_predictions_new = loaded_mlp_model.predict(new_input_scaled)
rf_predictions_new = loaded_rf_model.predict(new_input_scaled)
# Combine the predictions
combined_features_new = np.concatenate([mlp_predictions_new, rf_predictions_new], axis=1)
print(f"Combined Features: {combined_features_new}")
# Use the loaded meta model to make predictions on the new data
loaded_meta_predictions_new = loaded_meta_model.predict(combined_features_new)
print(f"Meta Model Predictions: {loaded_meta_predictions_new}")
return loaded_meta_predictions_new[0]
except Exception as e:
print(f"Error in prediction: {e}")
return ["Error", "Error", "Error", "Error", "Error", "Error"]
def gradio_interface(velocity, temperature, precipitation, humidity):
try:
input_data = [velocity, temperature, precipitation, humidity]
print(f"Input Data: {input_data}")
predictions = predict_new_values(input_data)
print(f"Predictions: {predictions}")
return [
f"{predictions[0] * 100:.2f}%" if predictions[0] != "Error" else "Error",
f"{predictions[1] * 100:.2f}%" if predictions[1] != "Error" else "Error",
f"{predictions[2] * 100:.2f}%" if predictions[2] != "Error" else "Error",
f"{predictions[3] * 100:.2f}%" if predictions[3] != "Error" else "Error",
f"{predictions[4] * 100:.2f}%" if predictions[4] != "Error" else "Error",
f"{predictions[5] * 100:.2f}%" if predictions[5] != "Error" else "Error"
]
except Exception as e:
print(f"Error in Gradio interface: {e}")
return ["Error", "Error", "Error", "Error", "Error", "Error"]
inputs = [
gr.Slider(minimum=0, maximum=100, value=50, step=0.05, label="Velocity (mph)"),
gr.Slider(minimum=-2, maximum=30, value=0, step=0.5, label="Temperature (°C)"),
gr.Slider(minimum=0, maximum=1, value=0, step=0.01, label="Precipitation (inch)"),
gr.Slider(minimum=0, maximum=100, value=50, label="Humidity (%)")
]
outputs = [
gr.Textbox(label="Front Left"),
gr.Textbox(label="Front Right"),
gr.Textbox(label="Left"),
gr.Textbox(label="Right"),
gr.Textbox(label="Roof"),
gr.Textbox(label="Rear")
]
with gr.Blocks() as demo:
gr.Markdown("<h1 style='text-align: center;'>Environmental Factor-Based Contamination Level Prediction</h1>")
gr.Markdown("This application predicts the contamination levels on different parts of a car's LiDAR system based on environmental factors such as velocity, temperature, precipitation, and humidity.")
# Layout with two columns
with gr.Row():
with gr.Column():
gr.Markdown("### Input Parameters")
for inp in inputs:
inp.render()
# Centered image display
with gr.Row():
with gr.Column(scale=1, min_width=0, elem_id="center-column"):
gr.Image(image_path) # Ensure the image is centered
gr.Button(value="Submit", variant="primary").click(fn=gradio_interface, inputs=inputs, outputs=outputs)
gr.Button(value="Clear").click(fn=lambda: None)
with gr.Column():
gr.Markdown("### Output Predictions ± 7.1%")
for out in outputs:
out.render()
demo.launch()
|