WCarlW commited on
Commit
8ffc352
·
1 Parent(s): 4f801de

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +140 -0
  2. functions.py +212 -0
  3. requirements.txt +11 -0
app.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import hopsworks
3
+ import joblib
4
+ import pandas as pd
5
+
6
+ import numpy as np
7
+ import folium
8
+ from streamlit_folium import st_folium, folium_static
9
+ import json
10
+ import time
11
+ from datetime import timedelta, datetime
12
+ from branca.element import Figure
13
+
14
+ from functions import decode_features, get_model
15
+
16
+
17
+ def fancy_header(text, font_size=24):
18
+ res = f'<span style="color:#ff5f27; font-size: {font_size}px;">{text}</span>'
19
+ st.markdown(res, unsafe_allow_html=True )
20
+
21
+
22
+ st.title('⛅️Air Quality Prediction Project🌩')
23
+
24
+ progress_bar = st.sidebar.header('⚙️ Working Progress')
25
+ progress_bar = st.sidebar.progress(0)
26
+ st.write(36 * "-")
27
+ fancy_header('\n📡 Connecting to Hopsworks Feature Store...')
28
+
29
+ project = hopsworks.login()
30
+ fs = project.get_feature_store()
31
+ feature_view = fs.get_feature_view(
32
+ name = 'air_quality_fv',
33
+ version = 1
34
+ )
35
+
36
+ st.write("Successfully connected!✔️")
37
+ progress_bar.progress(20)
38
+
39
+ st.write(36 * "-")
40
+ fancy_header('\n☁️ Getting batch data from Feature Store...')
41
+
42
+ start_date = datetime.now() - timedelta(days=1)
43
+ start_time = int(start_date.timestamp()) * 1000
44
+
45
+ X = feature_view.get_batch_data(start_time=start_time)
46
+ progress_bar.progress(50)
47
+
48
+ latest_date_unix = str(X.date.values[0])[:10]
49
+ latest_date = time.ctime(int(latest_date_unix))
50
+
51
+ st.write(f"⏱ Data for {latest_date}")
52
+
53
+ X = X.drop(columns=["date"]).fillna(0)
54
+
55
+ data_to_display = decode_features(X, feature_view=feature_view)
56
+
57
+ progress_bar.progress(60)
58
+
59
+ st.write(36 * "-")
60
+ fancy_header(f"🗺 Processing the map...")
61
+
62
+ fig = Figure(width=550,height=350)
63
+
64
+ my_map = folium.Map(location=[58, 20], zoom_start=3.71)
65
+ fig.add_child(my_map)
66
+ folium.TileLayer('Stamen Terrain').add_to(my_map)
67
+ folium.TileLayer('Stamen Toner').add_to(my_map)
68
+ folium.TileLayer('Stamen Water Color').add_to(my_map)
69
+ folium.TileLayer('cartodbpositron').add_to(my_map)
70
+ folium.TileLayer('cartodbdark_matter').add_to(my_map)
71
+ folium.LayerControl().add_to(my_map)
72
+
73
+ data_to_display = data_to_display[["city", "temp", "humidity",
74
+ "conditions", "aqi"]]
75
+
76
+ cities_coords = {("Sundsvall", "Sweden"): [62.390811, 17.306927],
77
+ ("Stockholm", "Sweden"): [59.334591, 18.063240],
78
+ ("Malmo", "Sweden"): [55.604981, 13.003822]}
79
+
80
+ if "Kyiv" in data_to_display["city"]:
81
+ cities_coords[("Kyiv", "Ukraine")]: [50.450001, 30.523333]
82
+
83
+ data_to_display = data_to_display.set_index("city")
84
+
85
+ cols_names_dict = {"temp": "Temperature",
86
+ "humidity": "Humidity",
87
+ "conditions": "Conditions",
88
+ "aqi": "AQI"}
89
+
90
+ data_to_display = data_to_display.rename(columns=cols_names_dict)
91
+
92
+ cols_ = ["Temperature", "Humidity", "AQI"]
93
+ data_to_display[cols_] = data_to_display[cols_].apply(lambda x: round(x, 1))
94
+
95
+ for city, country in cities_coords:
96
+ text = f"""
97
+ <h4 style="color:green;">{city}</h4>
98
+ <h5 style="color":"green">
99
+ <table style="text-align: right;">
100
+ <tr>
101
+ <th>Country:</th>
102
+ <td><b>{country}</b></td>
103
+ </tr>
104
+ """
105
+ for column in data_to_display.columns:
106
+ text += f"""
107
+ <tr>
108
+ <th>{column}:</th>
109
+ <td>{data_to_display.loc[city][column]}</td>
110
+ </tr>"""
111
+ text += """</table>
112
+ </h5>"""
113
+
114
+ folium.Marker(
115
+ cities_coords[(city, country)], popup=text, tooltip=f"<strong>{city}</strong>"
116
+ ).add_to(my_map)
117
+
118
+
119
+ # call to render Folium map in Streamlit
120
+ folium_static(my_map)
121
+ progress_bar.progress(80)
122
+ st.sidebar.write("-" * 36)
123
+
124
+
125
+ model = get_model(project=project,
126
+ model_name="gradient_boost_model",
127
+ evaluation_metric="f1_score",
128
+ sort_metrics_by="max")
129
+
130
+ preds = model.predict(X)
131
+
132
+ cities = [city_tuple[0] for city_tuple in cities_coords.keys()]
133
+
134
+ next_day_date = datetime.today() + timedelta(days=1)
135
+ next_day = next_day_date.strftime ('%d/%m/%Y')
136
+ df = pd.DataFrame(data=preds, index=cities, columns=[f"AQI Predictions for {next_day}"], dtype=int)
137
+
138
+ st.sidebar.write(df)
139
+ progress_bar.progress(100)
140
+ st.button("Re-run")
functions.py ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The air quality data in this file are requested from IQAir website
2
+ # The weather data in this file are requested from VisualCrossing website
3
+
4
+ from datetime import datetime
5
+ import requests
6
+ import os
7
+ import joblib
8
+ import pandas as pd
9
+
10
+ from dotenv import load_dotenv
11
+ load_dotenv()
12
+
13
+ # API key requested from the IQAir website
14
+ # AIR_QUALITY_API_KEY = "794ac24a-edae-4d19-8ce9-7faf880cc303"
15
+
16
+ # API key requested from VisualCrossing website
17
+ # WEATHER_API_KEY = "582P2T7XUZWW6FGWEQK2W68LF"
18
+
19
+ def decode_features(df, feature_view):
20
+ """Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
21
+ df_res = df.copy()
22
+
23
+ import inspect
24
+
25
+
26
+ td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
27
+
28
+ res = {}
29
+ for feature_name in td_transformation_functions:
30
+ if feature_name in df_res.columns:
31
+ td_transformation_function = td_transformation_functions[feature_name]
32
+ sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
33
+ param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
34
+ if td_transformation_function.name == "min_max_scaler":
35
+ df_res[feature_name] = df_res[feature_name].map(
36
+ lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
37
+
38
+ elif td_transformation_function.name == "standard_scaler":
39
+ df_res[feature_name] = df_res[feature_name].map(
40
+ lambda x: x * param_dict['std_dev'] + param_dict["mean"])
41
+ elif td_transformation_function.name == "label_encoder":
42
+ dictionary = param_dict['value_to_index']
43
+ dictionary_ = {v: k for k, v in dictionary.items()}
44
+ df_res[feature_name] = df_res[feature_name].map(
45
+ lambda x: dictionary_[x])
46
+ return df_res
47
+
48
+
49
+ def get_model(project, model_name, evaluation_metric, sort_metrics_by):
50
+ """Retrieve desired model or download it from the Hopsworks Model Registry.
51
+ In second case, it will be physically downloaded to this directory"""
52
+ TARGET_FILE = "model.pkl"
53
+ list_of_files = [os.path.join(dirpath,filename) for dirpath, _, filenames \
54
+ in os.walk('.') for filename in filenames if filename == TARGET_FILE]
55
+
56
+ if list_of_files:
57
+ model_path = list_of_files[0]
58
+ model = joblib.load(model_path)
59
+ else:
60
+ if not os.path.exists(TARGET_FILE):
61
+ mr = project.get_model_registry()
62
+ # get best model based on custom metrics
63
+ model = mr.get_best_model(model_name,
64
+ evaluation_metric,
65
+ sort_metrics_by)
66
+ model_dir = model.download()
67
+ model = joblib.load(model_dir + "/model.pkl")
68
+
69
+ return model
70
+
71
+
72
+ def get_air_json(city_name, AIR_QUALITY_API_KEY):
73
+ return requests.get(f'https://api.waqi.info/feed/{city_name}/?token={AIR_QUALITY_API_KEY}').json()['data']
74
+
75
+ def get_air_quality_data(city_name):
76
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
77
+ json = get_air_json(city_name, AIR_QUALITY_API_KEY)
78
+ iaqi = json['iaqi']
79
+ forecast = json['forecast']['daily']
80
+ return [
81
+ city_name,
82
+ json['aqi'], # AQI
83
+ json['time']['s'][:10], # Date
84
+ iaqi['h']['v'],
85
+ iaqi['p']['v'],
86
+ iaqi['pm10']['v'],
87
+ iaqi['t']['v'],
88
+ forecast['o3'][0]['avg'],
89
+ forecast['o3'][0]['max'],
90
+ forecast['o3'][0]['min'],
91
+ forecast['pm10'][0]['avg'],
92
+ forecast['pm10'][0]['max'],
93
+ forecast['pm10'][0]['min'],
94
+ forecast['pm25'][0]['avg'],
95
+ forecast['pm25'][0]['max'],
96
+ forecast['pm25'][0]['min'],
97
+ # forecast['uvi'][0]['avg'],
98
+ # forecast['uvi'][0]['avg'],
99
+ # forecast['uvi'][0]['avg']
100
+ ]
101
+
102
+ def get_air_quality_df(data):
103
+ col_names = [
104
+ 'city',
105
+ 'aqi',
106
+ 'date&time',
107
+ 'iaqi_h',
108
+ 'iaqi_p',
109
+ 'iaqi_pm10',
110
+ 'iaqi_t',
111
+ 'o3_avg',
112
+ 'o3_max',
113
+ 'o3_min',
114
+ 'pm10_avg',
115
+ 'pm10_max',
116
+ 'pm10_min',
117
+ 'pm25_avg',
118
+ 'pm25_max',
119
+ 'pm25_min',
120
+ # 'uvi_avg',
121
+ # 'uvi_max',
122
+ # 'uvi_min',
123
+ ]
124
+
125
+ new_data = pd.DataFrame(
126
+ data,
127
+ columns=col_names
128
+ )
129
+ new_data.date = new_data.date.apply(timestamp_2_time)
130
+
131
+ return new_data
132
+
133
+
134
+ def get_weather_json(city, date, WEATHER_API_KEY):
135
+ return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{city.lower()}/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
136
+
137
+
138
+ def get_weather_data(city_name, date):
139
+ WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
140
+ json = get_weather_json(city_name, date, WEATHER_API_KEY)
141
+ data = json['days'][0]
142
+
143
+ return [
144
+ json['address'].capitalize(),
145
+ data['datetime'],
146
+ data['tempmax'],
147
+ data['tempmin'],
148
+ data['temp'],
149
+ data['feelslikemax'],
150
+ data['feelslikemin'],
151
+ data['feelslike'],
152
+ data['dew'],
153
+ data['humidity'],
154
+ data['precip'],
155
+ data['precipprob'],
156
+ data['precipcover'],
157
+ data['snow'],
158
+ data['snowdepth'],
159
+ data['windgust'],
160
+ data['windspeed'],
161
+ data['winddir'],
162
+ data['pressure'],
163
+ data['cloudcover'],
164
+ data['visibility'],
165
+ data['solarradiation'],
166
+ data['solarenergy'],
167
+ data['uvindex'],
168
+ data['conditions']
169
+ ]
170
+
171
+
172
+ def get_weather_df(data):
173
+ col_names = [
174
+ 'city',
175
+ 'date',
176
+ 'tempmax',
177
+ 'tempmin',
178
+ 'temp',
179
+ 'feelslikemax',
180
+ 'feelslikemin',
181
+ 'feelslike',
182
+ 'dew',
183
+ 'humidity',
184
+ 'precip',
185
+ 'precipprob',
186
+ 'precipcover',
187
+ 'snow',
188
+ 'snowdepth',
189
+ 'windgust',
190
+ 'windspeed',
191
+ 'winddir',
192
+ 'pressure',
193
+ 'cloudcover',
194
+ 'visibility',
195
+ 'solarradiation',
196
+ 'solarenergy',
197
+ 'uvindex',
198
+ 'conditions'
199
+ ]
200
+
201
+ new_data = pd.DataFrame(
202
+ data,
203
+ columns=col_names
204
+ )
205
+ new_data.date = new_data.date.apply(timestamp_2_time)
206
+
207
+ return new_data
208
+
209
+ def timestamp_2_time(x):
210
+ dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
211
+ dt_obj = dt_obj.timestamp() * 1000
212
+ return int(dt_obj)
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ branca==0.6.0
2
+ folium==0.14.0
3
+ hopsworks==3.0.5
4
+ joblib==1.2.0
5
+ numpy==1.23.5
6
+ pandas==1.5.2
7
+ python-dotenv==0.21.0
8
+ requests==2.28.1
9
+ streamlit==1.17.0
10
+ streamlit_folium==0.10.0
11
+ dotenv