Spaces:
Sleeping
Sleeping
File size: 11,297 Bytes
1e216a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 |
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from pathlib import Path
from keras.models import model_from_json
import tensorflow as tf
import matplotlib.pyplot as plt
import joblib
import requests
import json
from datetime import datetime
def load_model(name):
# Load JSON and create model
json_file = open("%s.json" % name, "r")
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# Check if the weights file exists before loading
weights_file = f"{name}.weights.h5"
if not Path(weights_file).is_file():
raise FileNotFoundError(f"Weight file {weights_file} not found.")
# Load weights into the new model
loaded_model.load_weights(weights_file)
print("Loaded model from disk")
return loaded_model
model = load_model("3_day_forecast_AQI_v5")
####################################
# Load the scalers
scaler_X = joblib.load('scaler_X_AQI.pkl')
scaler_y = joblib.load('scaler_y_AQI.pkl')
import requests
import pandas as pd
import joblib
import os
from datetime import datetime
# delhi 28.639638713652012, 77.19002000205269
# bhopal 23.23731292701139, 77.44433463788636
# ahemdabad 23.0364012974141, 72.58238347964425
# ankleshwar 21.62880896774956, 73.0043990197163
# jamnagar 22.3033564155508, 70.8012921707898
# 21.22050672027795 72.83355967457062#
# 21.236796371788703, 72.8665479925569
# Define API parameters
api_key = "26daca1b78f44099a755b921be4bfcf1" # Your WeatherAPI key
latitude = 21.236796371788703 # Example latitude
longitude = 72.8665479925569 # # Example longitude
base_url = f"https://api.weatherbit.io/v2.0/current/airquality?lat={latitude}&lon={longitude}&key={api_key}"
# Make the API request
response = requests.get(base_url)
if response.status_code == 200:
data = response.json()
# Extract forecast data
dx = [data['data'][0]]
test = pd.DataFrame(dx)
# Add time-based features
now = datetime.now()
current_time = now.strftime("%Y-%m-%d %H:%M:%S")
test = test[['pm25', 'pm10', 'no2', 'so2', 'co', 'aqi']]
test['Date'] = pd.to_datetime(current_time)
test['Day'] = test['Date'].dt.day
test['Month'] = test['Date'].dt.month
test['Hour'] = test['Date'].dt.hour
test = test[['pm25', 'pm10', 'no2', 'so2', 'co', 'aqi', 'Day', 'Month', 'Hour']]
test.columns = ['PM2.5', 'PM10', 'NO2', 'SO2', 'CO', 'AQI', 'Day', 'Month', 'Hour']
# Load scalers
scaler_X = joblib.load('scaler_X_AQI.pkl')
scaler_y = joblib.load('scaler_y_AQI.pkl')
# Standardize data and make predictions
data_normalized = scaler_X.transform(test)
prediction_Test = model.predict(data_normalized)
predictions_actual = scaler_y.inverse_transform(prediction_Test)
test['lat']= latitude
test['lon']= longitude
# Create a DataFrame for predictions
pred = pd.DataFrame(predictions_actual, columns=['AQI_step_1', 'AQI_step_2', 'AQI_step_3'])
df = pd.concat([test, pred], axis=1)
# Define the CSV file path
csv_file_path = "aqi_data.csv"
# Create the CSV file with headers if it doesn't exist
if not os.path.exists(csv_file_path):
columns = ['PM2.5', 'PM10', 'NO2', 'SO2', 'CO', 'AQI', 'Day', 'Month', 'Hour', 'lat', 'lon', 'AQI_step_1', 'AQI_step_2', 'AQI_step_3']
df_empty = pd.DataFrame(columns=columns)
df_empty.to_csv(csv_file_path, index=False)
# Append new data to the existing CSV
df.to_csv(csv_file_path, mode='a', index=False, header=False)
print(f"Data appended to {csv_file_path}")
####################################
import requests
import json
from datetime import datetime
# Define API parameters
api_key = "26daca1b78f44099a755b921be4bfcf1" # Your WeatherAPI key
# 21.195069775800516,72.79324648126439
# 21.22050672027795, 72.83355967457062
# 28.639638713652012,77.19002000205269
# 23.23731292701139,77.44433463788636,
# 23.0364012974141,72.58238347964425,
# 21.62880896774956,73.0043990197163,
# jamnagar 22.3033564155508, 70.8012921707898
# new delhi 28.619913380208967, 77.20633325621425
latitude = 21.236796371788703 # Example latitude
longitude = 72.8665479925569 # # Example longitude
base_url = f"https://api.weatherbit.io/v2.0/forecast/airquality?lat={latitude}&lon={longitude}&key={api_key}"
# Make the API request
response = requests.get(base_url)
if response.status_code == 200:
# Parse the returned JSON data
data = response.json()
data
data=data['data']
from collections import defaultdict
# Group AQI values by date (ignoring the hour)
grouped_aqi = defaultdict(list)
for entry in data:
# Extract date part only from the datetime (before the colon)
date = entry['datetime'].split(':')[0]
aqi = entry['aqi']
grouped_aqi[date].append(aqi)
# Convert defaultdict to a regular dictionary
grouped_aqi = dict(grouped_aqi)
# Display the result
print(grouped_aqi)
index=11
key= grouped_aqi.keys()
samp={}
samp.clear()
for i in key:
print(i)
ls=grouped_aqi[i]
if index<len(ls):
print(ls[11])
samp[i]=ls[11]
else:
print(ls[-1])
samp[i]=ls[-1]
print(samp)
df = pd.DataFrame([samp])
df.columns=['AQI_currrent','AQI_step_1', 'AQI_step_2', 'AQI_step_3']
print(df)
# Define the CSV file path
csv_file_path = "aqi_data_actual_api.csv"
# Create the CSV file with headers if it doesn't exist
if not os.path.exists(csv_file_path):
columns = ['AQI_currrent_API','AQI_step_1_API', 'AQI_step_2_API', 'AQI_step_3_API']
df_empty = pd.DataFrame(columns=columns)
df_empty.to_csv(csv_file_path, index=False)
# Append new data to the existing CSV
df.to_csv(csv_file_path, mode='a', index=False, header=False)
##########################################################
import folium
import matplotlib.pyplot as plt
from io import BytesIO
import base64
import pandas as pd
df1= pd.read_csv('aqi_data.csv')
df2= pd.read_csv('aqi_data_actual_api.csv')
data = pd.concat([df1,df2],axis=1)
data = data.head(3)
# Create the Folium map
map_center = [data['lat'].mean(), data['lon'].mean()]
m = folium.Map(location=map_center, zoom_start=10)
# AQI Color Legend
legend_html = """
<div style="
position: fixed;
bottom: 20px; left: 20px; width: 350px; height: 225px;
background-color: white;
z-index:9999; font-size:14px; border:2px solid grey;
padding: 10px; overflow-y: auto;">
<b>AQI Color Legend</b>
<table style="width: 100%; border-collapse: collapse; text-align: left;">
<thead>
<tr style="border-bottom: 2px solid grey;">
<th style="padding: 5px;">Color</th>
<th style="padding: 5px;">Remark</th>
<th style="padding: 5px;">Range</th>
</tr>
</thead>
<tbody>
<tr>
<td><i style="background:green; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Good</td>
<td>0-50</td>
</tr>
<tr>
<td><i style="background:yellow; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Moderate</td>
<td>51-100</td>
</tr>
<tr>
<td><i style="background:orange; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Unhealthy for Sensitive Groups</td>
<td>101-150</td>
</tr>
<tr>
<td><i style="background:red; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Unhealthy</td>
<td>151-200</td>
</tr>
<tr>
<td><i style="background:purple; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Very Unhealthy</td>
<td>201-300</td>
</tr>
<tr>
<td><i style="background:maroon; width:15px; height:15px; display:inline-block; border:1px solid black;"></i></td>
<td>Hazardous</td>
<td>301+</td>
</tr>
</tbody>
</table>
</div>
"""
# Add the legend to the map
legend = folium.Element(legend_html)
m.get_root().html.add_child(legend)
# Function to generate a horizontal bar plot
def create_aqi_comparison_plot(data):
fig, ax = plt.subplots(figsize=(5, 2))
categories = ['DAY 1', 'DAY 2', 'DAY 3']
actual_values = [data['AQI_step_1'], data['AQI_step_2'], data['AQI_step_3']]
api_values = [data['AQI_step_1_API'], data['AQI_step_2_API'], data['AQI_step_3_API']]
bar_width = 0.35
index = range(len(categories))
# Plot horizontal bars
bars_actual = ax.barh(index, actual_values, bar_width, label="Model AQI", color='blue')
bars_api = ax.barh([i + bar_width for i in index], api_values, bar_width, label="API AQI", color='green')
# Add values to each bar
max_value = 0 # Track the maximum value for axis limit adjustment
for bar in bars_actual:
value = bar.get_width()
ax.text(value + 2, bar.get_y() + bar.get_height() / 2,
f'{value:.1f}', va='center', fontsize=10)
max_value = max(max_value, value)
for bar in bars_api:
value = bar.get_width()
ax.text(value + 2, bar.get_y() + bar.get_height() / 2,
f'{value:.1f}', va='center', fontsize=10)
max_value = max(max_value, value)
# Adjust x-axis limits to accommodate annotations
ax.set_xlim(0, max_value * 1.2)
# Customize y-ticks and labels
ax.set_yticks([i + bar_width / 2 for i in index])
ax.set_yticklabels(categories)
ax.set_xlabel('AQI')
ax.set_title('AQI Comparison')
# Place legend outside the plot area
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), frameon=False)
plt.tight_layout()
# Save the plot to a PNG image in memory
buffer = BytesIO()
plt.savefig(buffer, format="png", bbox_inches='tight')
plt.close(fig)
buffer.seek(0)
# Encode the image to base64 to embed it in the HTML
image_base64 = base64.b64encode(buffer.read()).decode()
return f'<img src="data:image/png;base64,{image_base64}">'
# Function to determine AQI marker color
def get_color_for_aqi(aqi_value):
if aqi_value <= 50:
return 'green'
elif aqi_value <= 100:
return 'yellow'
elif aqi_value <= 150:
return 'orange'
elif aqi_value <= 200:
return 'red'
elif aqi_value <= 300:
return 'purple'
else:
return 'maroon'
# Add markers with AQI comparison plot
for _, row in data.iterrows():
color = get_color_for_aqi(row['AQI_step_1'])
popup_html = create_aqi_comparison_plot(row)
folium.Marker(
location=[row["lat"], row["lon"]],
popup=folium.Popup(html=popup_html, max_width=500),
#tooltip=row["name"],
icon=folium.Icon(color=color)
).add_to(m)
# Save the map
m.save("aqi_forecast_with_legend.html")
m |