Env_ContaminationLevel / App_Env&CL.py
saba000farahani's picture
Create App_Env&CL.py
e3634e1 verified
raw
history blame
4.74 kB
import os
import requests
import gradio as gr
import numpy as np
import joblib
from tensorflow.keras.models import load_model
# URLs to your saved models and scaler
scaler_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/scaler_X.pkl'
rf_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/rf_model.pkl'
mlp_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/mlp_model.h5'
meta_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/meta_model.pkl'
# Paths to save the downloaded models
scaler_path = 'scaler_X.pkl'
rf_model_path = 'rf_model.pkl'
mlp_model_path = 'mlp_model.h5'
meta_model_path = 'meta_model.pkl'
def download_model(url, path):
if not os.path.exists(path):
print(f"Downloading {path}...")
response = requests.get(url)
with open(path, 'wb') as f:
f.write(response.content)
print(f"Downloaded {path}.")
else:
print(f"{path} already exists.")
# Download the models and scaler if they don't exist locally
download_model(scaler_url, scaler_path)
download_model(rf_model_url, rf_model_path)
download_model(mlp_model_url, mlp_model_path)
download_model(meta_model_url, meta_model_path)
try:
# Load the scaler and models
scaler_X = joblib.load(scaler_path)
loaded_rf_model = joblib.load(rf_model_path)
loaded_mlp_model = load_model(mlp_model_path)
loaded_meta_model = joblib.load(meta_model_path)
print("Models and scaler loaded successfully.")
except Exception as e:
print(f"Error loading models or scaler: {e}")
def predict_new_values(new_input_data):
try:
# Ensure the new input data is in the correct format
print(f"Raw Input Data: {new_input_data}")
new_input_data = np.array(new_input_data).reshape(1, -1)
# Scale the new input data
new_input_scaled = scaler_X.transform(new_input_data)
print(f"Scaled Input Data: {new_input_scaled}")
# Make predictions with both base models
mlp_predictions_new = loaded_mlp_model.predict(new_input_scaled)
rf_predictions_new = loaded_rf_model.predict(new_input_scaled)
# Combine the predictions
combined_features_new = np.concatenate([mlp_predictions_new, rf_predictions_new], axis=1)
print(f"Combined Features: {combined_features_new}")
# Use the loaded meta model to make predictions on the new data
loaded_meta_predictions_new = loaded_meta_model.predict(combined_features_new)
print(f"Meta Model Predictions: {loaded_meta_predictions_new}")
return loaded_meta_predictions_new[0]
except Exception as e:
print(f"Error in prediction: {e}")
return ["Error", "Error", "Error", "Error", "Error", "Error"]
def gradio_interface(velocity, temperature, precipitation, humidity):
try:
input_data = [velocity, temperature, precipitation, humidity]
print(f"Input Data: {input_data}")
predictions = predict_new_values(input_data)
print(f"Predictions: {predictions}")
return [
f"{predictions[0] * 100:.2f}%" if predictions[0] != "Error" else "Error",
f"{predictions[1] * 100:.2f}%" if predictions[1] != "Error" else "Error",
f"{predictions[2] * 100:.2f}%" if predictions[2] != "Error" else "Error",
f"{predictions[3] * 100:.2f}%" if predictions[3] != "Error" else "Error",
f"{predictions[4] * 100:.2f}%" if predictions[4] != "Error" else "Error",
f"{predictions[5] * 100:.2f}%" if predictions[5] != "Error" else "Error"
]
except Exception as e:
print(f"Error in Gradio interface: {e}")
return ["Error", "Error", "Error", "Error", "Error", "Error"]
inputs = [
gr.Slider(minimum=0, maximum=100, value=50,step=0.5, label="Velocity (mph)"),
gr.Slider(minimum=-30, maximum=50, value=0,step=0.5, label="Temperature (°C)"),
gr.Slider(minimum=0, maximum=10, value=0,step=0.01, label="Precipitation (inch)"),
gr.Slider(minimum=0, maximum=100, value=50, label="Humidity (%)")
]
outputs = [
gr.Textbox(label="Front Left"),
gr.Textbox(label="Front Right"),
gr.Textbox(label="Left"),
gr.Textbox(label="Right"),
gr.Textbox(label="Roof"),
gr.Textbox(label="Rear")
]
gr.Interface(
fn=gradio_interface,
inputs=inputs,
outputs=outputs,
title="Environmental Factor-Based Contamination Level Prediction",
description="Enter the environmental factors to get the contamination levels for Front Left, Front Right, Left, Right, Roof, and Rear LiDARs."
).launch()