Spaces:
Sleeping
Sleeping
import tensorflow as tf | |
import numpy as np | |
import pandas as pd | |
import matplotlib.pyplot as plt | |
from pathlib import Path | |
from keras.models import model_from_json | |
import tensorflow as tf | |
import matplotlib.pyplot as plt | |
import joblib | |
import requests | |
import json | |
from datetime import datetime | |
def load_model(name): | |
# Load JSON and create model | |
json_file = open("%s.json" % name, "r") | |
loaded_model_json = json_file.read() | |
json_file.close() | |
loaded_model = model_from_json(loaded_model_json) | |
# Check if the weights file exists before loading | |
weights_file = f"{name}.weights.h5" | |
if not Path(weights_file).is_file(): | |
raise FileNotFoundError(f"Weight file {weights_file} not found.") | |
# Load weights into the new model | |
loaded_model.load_weights(weights_file) | |
print("Loaded model from disk") | |
return loaded_model | |
model = load_model("3_day_forecast_AQI_v5") | |
#################################### | |
# Load the scalers | |
scaler_X = joblib.load('scaler_X_AQI.pkl') | |
scaler_y = joblib.load('scaler_y_AQI.pkl') | |
import requests | |
import pandas as pd | |
import joblib | |
import os | |
from datetime import datetime | |
# delhi 28.639638713652012, 77.19002000205269 | |
# bhopal 23.23731292701139, 77.44433463788636 | |
# ahemdabad 23.0364012974141, 72.58238347964425 | |
# ankleshwar 21.62880896774956, 73.0043990197163 | |
# jamnagar 22.3033564155508, 70.8012921707898 | |
# 21.22050672027795 72.83355967457062# | |
# 21.236796371788703, 72.8665479925569 | |
# Define API parameters | |
api_key = "26daca1b78f44099a755b921be4bfcf1" # Your WeatherAPI key | |
latitude = 21.236796371788703 # Example latitude | |
longitude = 72.8665479925569 # # Example longitude | |
base_url = f"https://api.weatherbit.io/v2.0/current/airquality?lat={latitude}&lon={longitude}&key={api_key}" | |
# Make the API request | |
response = requests.get(base_url) | |
if response.status_code == 200: | |
data = response.json() | |
# Extract forecast data | |
dx = [data['data'][0]] | |
test = pd.DataFrame(dx) | |
# Add time-based features | |
now = datetime.now() | |
current_time = now.strftime("%Y-%m-%d %H:%M:%S") | |
test = test[['pm25', 'pm10', 'no2', 'so2', 'co', 'aqi']] | |
test['Date'] = pd.to_datetime(current_time) | |
test['Day'] = test['Date'].dt.day | |
test['Month'] = test['Date'].dt.month | |
test['Hour'] = test['Date'].dt.hour | |
test = test[['pm25', 'pm10', 'no2', 'so2', 'co', 'aqi', 'Day', 'Month', 'Hour']] | |
test.columns = ['PM2.5', 'PM10', 'NO2', 'SO2', 'CO', 'AQI', 'Day', 'Month', 'Hour'] | |
# Load scalers | |
scaler_X = joblib.load('scaler_X_AQI.pkl') | |
scaler_y = joblib.load('scaler_y_AQI.pkl') | |
# Standardize data and make predictions | |
data_normalized = scaler_X.transform(test) | |
prediction_Test = model.predict(data_normalized) | |
predictions_actual = scaler_y.inverse_transform(prediction_Test) | |
test['lat']= latitude | |
test['lon']= longitude | |
# Create a DataFrame for predictions | |
pred = pd.DataFrame(predictions_actual, columns=['AQI_step_1', 'AQI_step_2', 'AQI_step_3']) | |
df = pd.concat([test, pred], axis=1) | |
# Define the CSV file path | |
csv_file_path = "aqi_data.csv" | |
# Create the CSV file with headers if it doesn't exist | |
if not os.path.exists(csv_file_path): | |
columns = ['PM2.5', 'PM10', 'NO2', 'SO2', 'CO', 'AQI', 'Day', 'Month', 'Hour', 'lat', 'lon', 'AQI_step_1', 'AQI_step_2', 'AQI_step_3'] | |
df_empty = pd.DataFrame(columns=columns) | |
df_empty.to_csv(csv_file_path, index=False) | |
# Append new data to the existing CSV | |
df.to_csv(csv_file_path, mode='a', index=False, header=False) | |
print(f"Data appended to {csv_file_path}") | |
#################################### | |
import requests | |
import json | |
from datetime import datetime | |
# Define API parameters | |
api_key = "26daca1b78f44099a755b921be4bfcf1" # Your WeatherAPI key | |
# 21.195069775800516,72.79324648126439 | |
# 21.22050672027795, 72.83355967457062 | |
# 28.639638713652012,77.19002000205269 | |
# 23.23731292701139,77.44433463788636, | |
# 23.0364012974141,72.58238347964425, | |
# 21.62880896774956,73.0043990197163, | |
# jamnagar 22.3033564155508, 70.8012921707898 | |
# new delhi 28.619913380208967, 77.20633325621425 | |
latitude = 21.236796371788703 # Example latitude | |
longitude = 72.8665479925569 # # Example longitude | |
base_url = f"https://api.weatherbit.io/v2.0/forecast/airquality?lat={latitude}&lon={longitude}&key={api_key}" | |
# Make the API request | |
response = requests.get(base_url) | |
if response.status_code == 200: | |
# Parse the returned JSON data | |
data = response.json() | |
data | |
data=data['data'] | |
from collections import defaultdict | |
# Group AQI values by date (ignoring the hour) | |
grouped_aqi = defaultdict(list) | |
for entry in data: | |
# Extract date part only from the datetime (before the colon) | |
date = entry['datetime'].split(':')[0] | |
aqi = entry['aqi'] | |
grouped_aqi[date].append(aqi) | |
# Convert defaultdict to a regular dictionary | |
grouped_aqi = dict(grouped_aqi) | |
# Display the result | |
print(grouped_aqi) | |
index=11 | |
key= grouped_aqi.keys() | |
samp={} | |
samp.clear() | |
for i in key: | |
print(i) | |
ls=grouped_aqi[i] | |
if index<len(ls): | |
print(ls[11]) | |
samp[i]=ls[11] | |
else: | |
print(ls[-1]) | |
samp[i]=ls[-1] | |
print(samp) | |
df = pd.DataFrame([samp]) | |
df.columns=['AQI_currrent','AQI_step_1', 'AQI_step_2', 'AQI_step_3'] | |
print(df) | |
# Define the CSV file path | |
csv_file_path = "aqi_data_actual_api.csv" | |
# Create the CSV file with headers if it doesn't exist | |
if not os.path.exists(csv_file_path): | |
columns = ['AQI_currrent_API','AQI_step_1_API', 'AQI_step_2_API', 'AQI_step_3_API'] | |
df_empty = pd.DataFrame(columns=columns) | |
df_empty.to_csv(csv_file_path, index=False) | |
# Append new data to the existing CSV | |
df.to_csv(csv_file_path, mode='a', index=False, header=False) | |
########################################################## | |
import folium | |
import matplotlib.pyplot as plt | |
from io import BytesIO | |
import base64 | |
import pandas as pd | |
df1= pd.read_csv('aqi_data.csv') | |
df2= pd.read_csv('aqi_data_actual_api.csv') | |
data = pd.concat([df1,df2],axis=1) | |
data = data.head(3) | |
# Create the Folium map | |
map_center = [data['lat'].mean(), data['lon'].mean()] | |
m = folium.Map(location=map_center, zoom_start=10) | |
# AQI Color Legend | |
legend_html = """ | |
<div style=" | |
position: fixed; | |
bottom: 20px; left: 20px; width: 350px; height: 225px; | |
background-color: white; | |
z-index:9999; font-size:14px; border:2px solid grey; | |
padding: 10px; overflow-y: auto;"> | |
<b>AQI Color Legend</b> | |
<table style="width: 100%; border-collapse: collapse; text-align: left;"> | |
<thead> | |
<tr style="border-bottom: 2px solid grey;"> | |
<th style="padding: 5px;">Color</th> | |
<th style="padding: 5px;">Remark</th> | |
<th style="padding: 5px;">Range</th> | |
</tr> | |
</thead> | |
<tbody> | |
<tr> | |
<td><i style="background:green; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Good</td> | |
<td>0-50</td> | |
</tr> | |
<tr> | |
<td><i style="background:yellow; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Moderate</td> | |
<td>51-100</td> | |
</tr> | |
<tr> | |
<td><i style="background:orange; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Unhealthy for Sensitive Groups</td> | |
<td>101-150</td> | |
</tr> | |
<tr> | |
<td><i style="background:red; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Unhealthy</td> | |
<td>151-200</td> | |
</tr> | |
<tr> | |
<td><i style="background:purple; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Very Unhealthy</td> | |
<td>201-300</td> | |
</tr> | |
<tr> | |
<td><i style="background:maroon; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td> | |
<td>Hazardous</td> | |
<td>301+</td> | |
</tr> | |
</tbody> | |
</table> | |
</div> | |
""" | |
# Add the legend to the map | |
legend = folium.Element(legend_html) | |
m.get_root().html.add_child(legend) | |
# Function to generate a horizontal bar plot | |
def create_aqi_comparison_plot(data): | |
fig, ax = plt.subplots(figsize=(5, 2)) | |
categories = ['DAY 1', 'DAY 2', 'DAY 3'] | |
actual_values = [data['AQI_step_1'], data['AQI_step_2'], data['AQI_step_3']] | |
api_values = [data['AQI_step_1_API'], data['AQI_step_2_API'], data['AQI_step_3_API']] | |
bar_width = 0.35 | |
index = range(len(categories)) | |
# Plot horizontal bars | |
bars_actual = ax.barh(index, actual_values, bar_width, label="Model AQI", color='blue') | |
bars_api = ax.barh([i + bar_width for i in index], api_values, bar_width, label="API AQI", color='green') | |
# Add values to each bar | |
max_value = 0 # Track the maximum value for axis limit adjustment | |
for bar in bars_actual: | |
value = bar.get_width() | |
ax.text(value + 2, bar.get_y() + bar.get_height() / 2, | |
f'{value:.1f}', va='center', fontsize=10) | |
max_value = max(max_value, value) | |
for bar in bars_api: | |
value = bar.get_width() | |
ax.text(value + 2, bar.get_y() + bar.get_height() / 2, | |
f'{value:.1f}', va='center', fontsize=10) | |
max_value = max(max_value, value) | |
# Adjust x-axis limits to accommodate annotations | |
ax.set_xlim(0, max_value * 1.2) | |
# Customize y-ticks and labels | |
ax.set_yticks([i + bar_width / 2 for i in index]) | |
ax.set_yticklabels(categories) | |
ax.set_xlabel('AQI') | |
ax.set_title('AQI Comparison') | |
# Place legend outside the plot area | |
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), frameon=False) | |
plt.tight_layout() | |
# Save the plot to a PNG image in memory | |
buffer = BytesIO() | |
plt.savefig(buffer, format="png", bbox_inches='tight') | |
plt.close(fig) | |
buffer.seek(0) | |
# Encode the image to base64 to embed it in the HTML | |
image_base64 = base64.b64encode(buffer.read()).decode() | |
return f'<img src="data:image/png;base64,{image_base64}">' | |
# Function to determine AQI marker color | |
def get_color_for_aqi(aqi_value): | |
if aqi_value <= 50: | |
return 'green' | |
elif aqi_value <= 100: | |
return 'yellow' | |
elif aqi_value <= 150: | |
return 'orange' | |
elif aqi_value <= 200: | |
return 'red' | |
elif aqi_value <= 300: | |
return 'purple' | |
else: | |
return 'maroon' | |
# Add markers with AQI comparison plot | |
for _, row in data.iterrows(): | |
color = get_color_for_aqi(row['AQI_step_1']) | |
popup_html = create_aqi_comparison_plot(row) | |
folium.Marker( | |
location=[row["lat"], row["lon"]], | |
popup=folium.Popup(html=popup_html, max_width=500), | |
#tooltip=row["name"], | |
icon=folium.Icon(color=color) | |
).add_to(m) | |
# Save the map | |
m.save("aqi_forecast_with_legend.html") | |
m |