Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import numpy as np
|
3 |
+
import joblib
|
4 |
+
from tensorflow.keras.models import load_model
|
5 |
+
import requests
|
6 |
+
import os
|
7 |
+
|
8 |
+
# URLs to your saved models and scaler
|
9 |
+
scaler_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/scaler_X.pkl'
|
10 |
+
rf_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/rf_model.pkl'
|
11 |
+
mlp_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/mlp_model.h5'
|
12 |
+
meta_model_url = 'https://huggingface.co/spaces/saba000farahani/Env_ContaminationLevel/blob/main/meta_model.pkl'
|
13 |
+
|
14 |
+
# Paths to save the downloaded models
|
15 |
+
scaler_path = 'scaler_X.pkl'
|
16 |
+
rf_model_path = 'rf_model.pkl'
|
17 |
+
mlp_model_path = 'mlp_model.h5'
|
18 |
+
meta_model_path = 'meta_model.pkl'
|
19 |
+
|
20 |
+
def download_model(url, path):
|
21 |
+
if not os.path.exists(path):
|
22 |
+
print(f"Downloading {path}...")
|
23 |
+
response = requests.get(url)
|
24 |
+
with open(path, 'wb') as f:
|
25 |
+
f.write(response.content)
|
26 |
+
print(f"Downloaded {path}.")
|
27 |
+
else:
|
28 |
+
print(f"{path} already exists.")
|
29 |
+
|
30 |
+
# Download the models and scaler if they don't exist locally
|
31 |
+
download_model(scaler_url, scaler_path)
|
32 |
+
download_model(rf_model_url, rf_model_path)
|
33 |
+
download_model(mlp_model_url, mlp_model_path)
|
34 |
+
download_model(meta_model_url, meta_model_path)
|
35 |
+
|
36 |
+
try:
|
37 |
+
# Load the scaler and models
|
38 |
+
scaler_X = joblib.load(scaler_path)
|
39 |
+
loaded_rf_model = joblib.load(rf_model_path)
|
40 |
+
loaded_mlp_model = load_model(mlp_model_path)
|
41 |
+
loaded_meta_model = joblib.load(meta_model_path)
|
42 |
+
print("Models and scaler loaded successfully.")
|
43 |
+
except Exception as e:
|
44 |
+
print(f"Error loading models or scaler: {e}")
|
45 |
+
|
46 |
+
def predict_new_values(new_input_data):
|
47 |
+
try:
|
48 |
+
# Ensure the new input data is in the correct format
|
49 |
+
print(f"Raw Input Data: {new_input_data}")
|
50 |
+
new_input_data = np.array(new_input_data).reshape(1, -1)
|
51 |
+
|
52 |
+
# Scale the new input data
|
53 |
+
new_input_scaled = scaler_X.transform(new_input_data)
|
54 |
+
print(f"Scaled Input Data: {new_input_scaled}")
|
55 |
+
|
56 |
+
# Make predictions with both base models
|
57 |
+
mlp_predictions_new = loaded_mlp_model.predict(new_input_scaled)
|
58 |
+
rf_predictions_new = loaded_rf_model.predict(new_input_scaled)
|
59 |
+
|
60 |
+
# Combine the predictions
|
61 |
+
combined_features_new = np.concatenate([mlp_predictions_new, rf_predictions_new], axis=1)
|
62 |
+
print(f"Combined Features: {combined_features_new}")
|
63 |
+
|
64 |
+
# Use the loaded meta model to make predictions on the new data
|
65 |
+
loaded_meta_predictions_new = loaded_meta_model.predict(combined_features_new)
|
66 |
+
print(f"Meta Model Predictions: {loaded_meta_predictions_new}")
|
67 |
+
|
68 |
+
return loaded_meta_predictions_new[0]
|
69 |
+
except Exception as e:
|
70 |
+
print(f"Error in prediction: {e}")
|
71 |
+
return ["Error", "Error", "Error", "Error", "Error", "Error"]
|
72 |
+
|
73 |
+
def gradio_interface(velocity, temperature, precipitation, humidity):
|
74 |
+
try:
|
75 |
+
input_data = [velocity, temperature, precipitation, humidity]
|
76 |
+
print(f"Input Data: {input_data}")
|
77 |
+
predictions = predict_new_values(input_data)
|
78 |
+
print(f"Predictions: {predictions}")
|
79 |
+
return [
|
80 |
+
f"{predictions[0] * 100:.2f}%" if predictions[0] != "Error" else "Error",
|
81 |
+
f"{predictions[1] * 100:.2f}%" if predictions[1] != "Error" else "Error",
|
82 |
+
f"{predictions[2] * 100:.2f}%" if predictions[2] != "Error" else "Error",
|
83 |
+
f"{predictions[3] * 100:.2f}%" if predictions[3] != "Error" else "Error",
|
84 |
+
f"{predictions[4] * 100:.2f}%" if predictions[4] != "Error" else "Error",
|
85 |
+
f"{predictions[5] * 100:.2f}%" if predictions[5] != "Error" else "Error"
|
86 |
+
]
|
87 |
+
except Exception as e:
|
88 |
+
print(f"Error in Gradio interface: {e}")
|
89 |
+
return ["Error", "Error", "Error", "Error", "Error", "Error"]
|
90 |
+
|
91 |
+
inputs = [
|
92 |
+
gr.Slider(minimum=0, maximum=100, value=50,step=0.5, label="Velocity (mph)"),
|
93 |
+
gr.Slider(minimum=-30, maximum=50, value=0,step=0.5, label="Temperature (°C)"),
|
94 |
+
gr.Slider(minimum=0, maximum=10, value=0,step=0.01, label="Precipitation (inch)"),
|
95 |
+
gr.Slider(minimum=0, maximum=100, value=50, label="Humidity (%)")
|
96 |
+
]
|
97 |
+
|
98 |
+
outputs = [
|
99 |
+
gr.Textbox(label="Front Left"),
|
100 |
+
gr.Textbox(label="Front Right"),
|
101 |
+
gr.Textbox(label="Left"),
|
102 |
+
gr.Textbox(label="Right"),
|
103 |
+
gr.Textbox(label="Roof"),
|
104 |
+
gr.Textbox(label="Rear")
|
105 |
+
]
|
106 |
+
|
107 |
+
gr.Interface(
|
108 |
+
fn=gradio_interface,
|
109 |
+
inputs=inputs,
|
110 |
+
outputs=outputs,
|
111 |
+
title="Environmental Factor-Based Contamination Level Prediction",
|
112 |
+
description="Enter the environmental factors to get the contamination levels for Front Left, Front Right, Left, Right, Roof, and Rear LiDARs."
|
113 |
+
).launch()
|