import gradio as gr
import numpy as np
import json
import joblib
import tensorflow as tf
import pandas as pd
from joblib import load
from tensorflow.keras.models import load_model
from sklearn.preprocessing import MinMaxScaler
import matplotlib.pyplot as plt
import os
import sklearn

# Display library versions
print(f"Gradio version: {gr.__version__}")
print(f"NumPy version: {np.__version__}")
print(f"Scikit-learn version: {sklearn.__version__}")
print(f"Joblib version: {joblib.__version__}")
print(f"TensorFlow version: {tf.__version__}")
print(f"Pandas version: {pd.__version__}")

# Directory paths for the saved models
script_dir = os.path.dirname(os.path.abspath(__file__))
scaler_path = os.path.join(script_dir, 'toolkit', 'scaler_X.json')
rf_model_path = os.path.join(script_dir, 'toolkit', 'rf_model.joblib')
mlp_model_path = os.path.join(script_dir, 'toolkit', 'mlp_model.keras')
meta_model_path = os.path.join(script_dir, 'toolkit', 'meta_model.joblib')
image_path = os.path.join(script_dir, 'toolkit', 'car.png')

# Load the scaler and models
try:
    # Load the scaler
    with open(scaler_path, 'r') as f:
        scaler_params = json.load(f)
    scaler_X = MinMaxScaler()
    scaler_X.scale_ = np.array(scaler_params["scale_"])
    scaler_X.min_ = np.array(scaler_params["min_"])
    scaler_X.data_min_ = np.array(scaler_params["data_min_"])
    scaler_X.data_max_ = np.array(scaler_params["data_max_"])
    scaler_X.data_range_ = np.array(scaler_params["data_range_"])
    scaler_X.n_features_in_ = scaler_params["n_features_in_"]
    scaler_X.feature_names_in_ = np.array(scaler_params["feature_names_in_"])

    # Load the models
    loaded_rf_model = load(rf_model_path)
    print("Random Forest model loaded successfully.")
    loaded_mlp_model = load_model(mlp_model_path)
    print("MLP model loaded successfully.")
    loaded_meta_model = load(meta_model_path)
    print("Meta model loaded successfully.")
except Exception as e:
    print(f"Error loading models or scaler: {e}")

def predict_and_plot(velocity, temperature, precipitation, humidity):
    try:
        # Prepare the example data
        example_data = pd.DataFrame({
            'Velocity(mph)': [velocity],
            'Temperature': [temperature],
            'Precipitation': [precipitation],
            'Humidity': [humidity]
        })

        # Scale the example data
        example_data_scaled = scaler_X.transform(example_data)

        # Function to predict contamination levels
        def predict_contamination(example_data_scaled):
            # Predict using MLP model
            mlp_predictions_contamination, mlp_predictions_gradients = loaded_mlp_model.predict(example_data_scaled)

            # Predict using RF model
            rf_predictions = loaded_rf_model.predict(example_data_scaled)

            # Combine predictions for meta model
            combined_features = np.concatenate([np.concatenate([mlp_predictions_contamination, mlp_predictions_gradients], axis=1), rf_predictions], axis=1)

            # Predict using meta model
            meta_predictions = loaded_meta_model.predict(combined_features)

            return meta_predictions[:, :6]  # Assuming the first 6 columns are contamination predictions

        # Predict contamination levels for the single example
        contamination_levels = predict_contamination(example_data_scaled)

        # Simulate contamination levels at multiple time intervals
        time_intervals = np.arange(0, 601, 60)  # Simulating time intervals from 0 to 600 seconds

        # Generate simulated contamination levels (linear interpolation between predicted values)
        simulated_contamination_levels = np.array([
            np.linspace(contamination_levels[0][i], contamination_levels[0][i] * 2, len(time_intervals))
            for i in range(contamination_levels.shape[1])
        ]).T

        # Function to calculate cleaning time using linear interpolation
        def calculate_cleaning_time(time_intervals, contamination_levels, threshold=0.4):
            cleaning_times = []
            for i in range(contamination_levels.shape[1]):
                levels = contamination_levels[:, i]
                for j in range(1, len(levels)):
                    if levels[j-1] <= threshold <= levels[j]:
                        # Linear interpolation
                        t1, t2 = time_intervals[j-1], time_intervals[j]
                        c1, c2 = levels[j-1], levels[j]
                        cleaning_time = t1 + (threshold - c1) * (t2 - t1) / (c2 - c1)
                        cleaning_times.append(cleaning_time)
                        break
            return cleaning_times

        # Calculate cleaning times for all 6 lidars
        cleaning_times = calculate_cleaning_time(time_intervals, simulated_contamination_levels)

        # Lidar names
        lidar_names = ['F/L', 'F/R', 'Left', '