|
import gradio as gr |
|
import numpy as np |
|
import json |
|
import joblib |
|
import tensorflow as tf |
|
import pandas as pd |
|
from joblib import load |
|
from tensorflow.keras.models import load_model |
|
from sklearn.preprocessing import MinMaxScaler |
|
import matplotlib.pyplot as plt |
|
import os |
|
import sklearn |
|
|
|
|
|
print(f"Gradio version: {gr.__version__}") |
|
print(f"NumPy version: {np.__version__}") |
|
print(f"Scikit-learn version: {sklearn.__version__}") |
|
print(f"Joblib version: {joblib.__version__}") |
|
print(f"TensorFlow version: {tf.__version__}") |
|
print(f"Pandas version: {pd.__version__}") |
|
|
|
|
|
script_dir = os.path.dirname(os.path.abspath(__file__)) |
|
scaler_path = os.path.join(script_dir, 'toolkit', 'scaler_X.json') |
|
rf_model_path = os.path.join(script_dir, 'toolkit', 'rf_model.joblib') |
|
mlp_model_path = os.path.join(script_dir, 'toolkit', 'mlp_model.keras') |
|
meta_model_path = os.path.join(script_dir, 'toolkit', 'meta_model.joblib') |
|
image_path = os.path.join(script_dir, 'toolkit', 'car.png') |
|
|
|
|
|
try: |
|
|
|
with open(scaler_path, 'r') as f: |
|
scaler_params = json.load(f) |
|
scaler_X = MinMaxScaler() |
|
scaler_X.scale_ = np.array(scaler_params["scale_"]) |
|
scaler_X.min_ = np.array(scaler_params["min_"]) |
|
scaler_X.data_min_ = np.array(scaler_params["data_min_"]) |
|
scaler_X.data_max_ = np.array(scaler_params["data_max_"]) |
|
scaler_X.data_range_ = np.array(scaler_params["data_range_"]) |
|
scaler_X.n_features_in_ = scaler_params["n_features_in_"] |
|
scaler_X.feature_names_in_ = np.array(scaler_params["feature_names_in_"]) |
|
|
|
|
|
loaded_rf_model = load(rf_model_path) |
|
print("Random Forest model loaded successfully.") |
|
loaded_mlp_model = load_model(mlp_model_path) |
|
print("MLP model loaded successfully.") |
|
loaded_meta_model = load(meta_model_path) |
|
print("Meta model loaded successfully.") |
|
except Exception as e: |
|
print(f"Error loading models or scaler: {e}") |
|
|
|
def predict_and_plot(velocity, temperature, precipitation, humidity): |
|
try: |
|
|
|
example_data = pd.DataFrame({ |
|
'Velocity(mph)': [velocity], |
|
'Temperature': [temperature], |
|
'Precipitation': [precipitation], |
|
'Humidity': [humidity] |
|
}) |
|
|
|
|
|
example_data_scaled = scaler_X.transform(example_data) |
|
|
|
|
|
def predict_contamination(example_data_scaled): |
|
|
|
mlp_predictions_contamination, mlp_predictions_gradients = loaded_mlp_model.predict(example_data_scaled) |
|
|
|
|
|
rf_predictions = loaded_rf_model.predict(example_data_scaled) |
|
|
|
|
|
combined_features = np.concatenate([np.concatenate([mlp_predictions_contamination, mlp_predictions_gradients], axis=1), rf_predictions], axis=1) |
|
|
|
|
|
meta_predictions = loaded_meta_model.predict(combined_features) |
|
|
|
return meta_predictions[:, :6] |
|
|
|
|
|
contamination_levels = predict_contamination(example_data_scaled) |
|
|
|
|
|
time_intervals = np.arange(0, 601, 60) |
|
|
|
|
|
simulated_contamination_levels = np.array([ |
|
np.linspace(contamination_levels[0][i], contamination_levels[0][i] * 2, len(time_intervals)) |
|
for i in range(contamination_levels.shape[1]) |
|
]).T |
|
|
|
|
|
def calculate_cleaning_time(time_intervals, contamination_levels, threshold=0.4): |
|
cleaning_times = [] |
|
for i in range(contamination_levels.shape[1]): |
|
levels = contamination_levels[:, i] |
|
for j in range(1, len(levels)): |
|
if levels[j-1] <= threshold <= levels[j]: |
|
|
|
t1, t2 = time_intervals[j-1], time_intervals[j] |
|
c1, c2 = levels[j-1], levels[j] |
|
cleaning_time = t1 + (threshold - c1) * (t2 - t1) / (c2 - c1) |
|
cleaning_times.append(cleaning_time) |
|
break |
|
return cleaning_times |
|
|
|
|
|
cleaning_times = calculate_cleaning_time(time_intervals, simulated_contamination_levels) |
|
|
|
|
|
lidar_names = ['F/L', 'F/R', 'Left', ' |
|
|