anofa commited on
Commit
baa380e
·
verified ·
1 Parent(s): 1eeb366

Upload engine.py.old

Browse files
Files changed (1) hide show
  1. demo2/pages/engine.py.old +334 -0
demo2/pages/engine.py.old ADDED
@@ -0,0 +1,334 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #%%
2
+ import warnings
3
+ import json
4
+ import sys
5
+ import argparse
6
+ import io
7
+ import os
8
+ import pandas as pd
9
+ import psycopg2
10
+ import geopandas as gpd
11
+ import numpy as np
12
+ from scipy.stats import norm
13
+ from scipy.interpolate import interp1d
14
+
15
+ # Damage States
16
+ DS_NO = 1
17
+ DS_SLIGHT = 2
18
+ DS_MODERATE = 3
19
+ DS_EXTENSIZE = 4
20
+ DS_COLLAPSED = 5
21
+
22
+ # Hazard Types
23
+ HAZARD_EARTHQUAKE = "earthquake"
24
+ HAZARD_FLOOD = "flood"
25
+ HAZARD_DEBRIS = "debris"
26
+
27
+ weights = {
28
+ "earthquake": {"metric1": 0.2,
29
+ "metric2": 0.2,
30
+ "metric3": 0.2,
31
+ "metric4": 0.2,
32
+ "metric5": 0.2,
33
+ "metric6": 0.2,
34
+ "metric7": 0.2,
35
+ },
36
+ "flood": {"metric1": 20,
37
+ "metric2": 20,
38
+ "metric3": 20,
39
+ "metric4": 20,
40
+ "metric5": 20,
41
+ "metric6": 20,
42
+ "metric7": 20,
43
+ },
44
+ "debris": {"metric1": 200,
45
+ "metric2": 200,
46
+ "metric3": 200,
47
+ "metric4": 200,
48
+ "metric5": 200,
49
+ "metric6": 200,
50
+ "metric7": 200,
51
+ }
52
+ }
53
+
54
+ def run_engine(hazard_type, scenario, gdf_intensity, policies=[]):
55
+
56
+ building_file = f"building_tv50_{scenario}.geojson"
57
+ household_file = f"household_tv50_{scenario}.json"
58
+ individual_file = f"individual_tv50_{scenario}.json"
59
+
60
+ threshold = 1
61
+ threshold_flood = 0.2
62
+ threshold_flood_distance = 40
63
+ epsg = 32645 # katmandu
64
+
65
+ df_buildings = gpd.read_file(building_file)
66
+ df_household = pd.read_json(household_file)
67
+ df_individual = pd.read_json(individual_file)
68
+ #gdf_intensity = intensity_df.copy()
69
+
70
+ # Read vulnerability from this table if hazard is earthquake
71
+ if hazard_type == HAZARD_EARTHQUAKE:
72
+ df_eq = pd.read_csv('earthquake_fragility.csv')
73
+
74
+ elif hazard_type == HAZARD_FLOOD or hazard_type == HAZARD_DEBRIS:
75
+ df_flood = pd.read_csv('flood_vulnerability.csv')
76
+
77
+ # TODO: Fix the confusion geometry/geograhy etc
78
+ gdf_buildings = gpd.GeoDataFrame(df_buildings,
79
+ geometry=gpd.points_from_xy(df_buildings.xcoord, df_buildings.ycoord))
80
+
81
+ #
82
+ # We asssume all input is EPSG:4326
83
+ gdf_buildings = gdf_buildings.set_crs("EPSG:4326",allow_override=True)
84
+ #gdf_intensity = gdf_intensity.set_crs("EPSG:4326",allow_override=True)
85
+
86
+ # Convert both to the same target coordinate system
87
+ print(epsg)
88
+ gdf_buildings = gdf_buildings.to_crs(f"EPSG:{epsg}")
89
+ #gdf_intensity = gdf_intensity.to_crs(f"EPSG:{epsg}")
90
+
91
+ #%%
92
+ gdf_building_intensity = gpd.sjoin_nearest(gdf_buildings,gdf_intensity,
93
+ how='left', rsuffix='intensity',distance_col='distance')
94
+ gdf_building_intensity = gdf_building_intensity.drop_duplicates(subset=['bldid'], keep='first')
95
+
96
+ # TODO: Check if the logic makes sense
97
+ if hazard_type == HAZARD_FLOOD or hazard_type == HAZARD_DEBRIS:
98
+ away_from_flood = gdf_building_intensity['distance'] > threshold_flood_distance
99
+ print('threshold_flood_distance',threshold_flood_distance)
100
+ print('number of distant buildings', len(gdf_building_intensity.loc[away_from_flood, 'im']))
101
+ gdf_building_intensity.loc[away_from_flood, 'im'] = 0
102
+
103
+ #%%
104
+ gdf_building_intensity[['material','code_level','storeys','occupancy']] = \
105
+ gdf_building_intensity['expstr'].str.split('+',expand=True)
106
+ gdf_building_intensity['height'] = gdf_building_intensity['storeys'].str.extract(r'([0-9]+)s').astype('int')
107
+ #%%
108
+ lr = (gdf_building_intensity['height'] <= 4)
109
+ mr = (gdf_building_intensity['height'] >= 5) & (gdf_building_intensity['height'] <= 8)
110
+ hr = (gdf_building_intensity['height'] >= 9)
111
+ gdf_building_intensity.loc[lr, 'height_level'] = 'LR'
112
+ gdf_building_intensity.loc[mr, 'height_level'] = 'MR'
113
+ gdf_building_intensity.loc[hr, 'height_level'] = 'HR'
114
+
115
+ #%%
116
+ gdf_building_intensity['vulnstreq'] = \
117
+ gdf_building_intensity[['material','code_level','height_level']] \
118
+ .agg('+'.join,axis=1)
119
+
120
+ #%%
121
+ if hazard_type == HAZARD_EARTHQUAKE:
122
+ bld_eq = gdf_building_intensity.merge(df_eq, on='vulnstreq', how='left')
123
+ nulls = bld_eq['muds1_g'].isna()
124
+ bld_eq.loc[nulls, ['muds1_g','muds2_g','muds3_g','muds4_g']] = [0.048,0.203,0.313,0.314]
125
+ bld_eq.loc[nulls, ['sigmads1','sigmads2','sigmads3','sigmads4']] = [0.301,0.276,0.252,0.253]
126
+ bld_eq['logim'] = np.log(bld_eq['im']/9.81)
127
+ for m in ['muds1_g','muds2_g','muds3_g','muds4_g']:
128
+ bld_eq[m] = np.log(bld_eq[m])
129
+
130
+ for i in [1,2,3,4]:
131
+ bld_eq[f'prob_ds{i}'] = norm.cdf(bld_eq['logim'],bld_eq[f'muds{i}_g'],bld_eq[f'sigmads{i}'])
132
+ bld_eq[['prob_ds0','prob_ds5']] = [1,0]
133
+ for i in [1,2,3,4,5]:
134
+ bld_eq[f'ds_{i}'] = np.abs(bld_eq[f'prob_ds{i-1}'] - bld_eq[f'prob_ds{i}'])
135
+ df_ds = bld_eq[['ds_1','ds_2','ds_3','ds_4','ds_5']]
136
+ bld_eq['eq_ds'] = df_ds.idxmax(axis='columns').str.extract(r'ds_([0-9]+)').astype('int')
137
+
138
+ # Create a simplified building-hazard relation
139
+ bld_hazard = bld_eq[['bldid','occupancy','eq_ds']]
140
+ bld_hazard = bld_hazard.rename(columns={'eq_ds':'ds'})
141
+
142
+ ds_str = {1: 'No Damage',2:'Low',3:'Medium',4:'High',5:'Collapsed'}
143
+
144
+ elif hazard_type == HAZARD_FLOOD or hazard_type == HAZARD_DEBRIS:
145
+ bld_flood = gdf_building_intensity.merge(df_flood, on='expstr', how='left')
146
+ x = np.array([0,0.5,1,1.5,2,3,4,5,6])
147
+ y = bld_flood[['hw0','hw0.5','hw1','hw1.5','hw2','hw3','hw4','hw5','hw6']].to_numpy()
148
+ xnew = bld_flood['im'].to_numpy()
149
+ flood_mapping = interp1d(x,y,axis=1,kind='linear',bounds_error=False, fill_value=(0,1))
150
+ # TODO: find another way for vectorized interpolate
151
+ bld_flood['fl_prob'] = np.diag(flood_mapping(xnew))
152
+ bld_flood['fl_ds'] = 0
153
+ bld_flood.loc[bld_flood['fl_prob'] > threshold_flood,'fl_ds'] = 1
154
+
155
+ # Create a simplified building-hazard relation
156
+ bld_hazard = bld_flood[['bldid','occupancy','fl_ds']]
157
+ bld_hazard = bld_hazard.rename(columns={'fl_ds':'ds'})
158
+
159
+ ds_str = {0: 'No Damage',1:'Flooded'}
160
+
161
+ bld_hazard['occupancy'] = pd.Categorical(bld_hazard['occupancy'])
162
+ for key, value in ds_str.items():
163
+ bld_hazard.loc[bld_hazard['ds'] == key,'damage_level'] = value
164
+ bld_hazard['damage_level'] = pd.Categorical(bld_hazard['damage_level'], list(ds_str.values()))
165
+
166
+ #%% Find the damage state of the building that the household is in
167
+ df_household_bld = df_household.merge(bld_hazard[['bldid','ds']], on='bldid', how='left',validate='many_to_one')
168
+ #%% find the damage state of the hospital that the household is associated with
169
+ df_hospitals = df_household.merge(bld_hazard[['bldid','damage_level', 'ds']],
170
+ how='left', left_on='commfacid', right_on='bldid', suffixes=['','_comm'],
171
+ validate='many_to_one')
172
+ #%%
173
+ df_workers = df_individual.merge(bld_hazard[['bldid','damage_level', 'ds']],
174
+ how='left', left_on='indivfacid_2', right_on='bldid',
175
+ suffixes=['_l','_r'],validate='many_to_one')
176
+
177
+ #%%
178
+ df_students = df_individual.merge(bld_hazard[['bldid','damage_level', 'ds']],
179
+ how='left', left_on='indivfacid_1', right_on='bldid',
180
+ suffixes=['_l','_r'],validate='many_to_one')
181
+ #%%
182
+ df_indiv_hosp = df_individual.merge(df_hospitals[['hhid','ds','bldid']],
183
+ how='left', on='hhid', validate='many_to_one')
184
+ #%%
185
+ # get the ds of household that individual lives in
186
+ df_indiv_household = df_individual[['hhid','individ']].merge(df_household_bld[['hhid','ds']])
187
+
188
+ df_displaced_indiv = df_indiv_hosp.rename(columns={'ds':'ds_hospital'})\
189
+ .merge(df_workers[['individ','ds']].rename(columns={'ds':'ds_workplace'}),on='individ', how='inner')\
190
+ .merge(df_students[['individ','ds']].rename(columns={'ds':'ds_school'}), on='individ', how='inner')\
191
+ .merge(df_indiv_household[['individ','ds']].rename(columns={'ds':'ds_household'}), on='individ',how='left')
192
+ #%%
193
+ if hazard_type == HAZARD_EARTHQUAKE:
194
+ # Effect of policies on thresholds
195
+ # First get the global threshold
196
+ thresholds = {f'metric{id}': threshold for id in range(8)}
197
+ else:
198
+ # Default thresholds for flood and debris
199
+ # For flood, there are only two states: 0 or 1.
200
+ # So threshold is set to 0.
201
+ thresholds = {f'metric{id}': 0 for id in range(8)}
202
+
203
+ # Policy 6 is valid for all three hazard types
204
+ # Policy-6: Compulsory content insurance for schools and hospitals
205
+ # increases threshold for loss of edu/health in all hazard-types from minor to moderate
206
+ # slight to moderate
207
+ if 6 in policies and thresholds['metric3'] == DS_NO:
208
+ thresholds['metric3'] = DS_SLIGHT
209
+ if 6 in policies and thresholds['metric2'] == DS_NO:
210
+ thresholds['metric2'] = DS_SLIGHT
211
+
212
+ if hazard_type == HAZARD_EARTHQUAKE:
213
+ # Policy-1: Loans for reconstruction for minor to moderate damages
214
+ # Changes: Damage state thresholds for “displacement”
215
+ # Increase thresholds from “slight to moderate” as fewer people will be displaced.
216
+ if 1 in policies and thresholds['metric7'] == DS_NO:
217
+ thresholds['metric7'] = DS_SLIGHT
218
+
219
+ # Policy-3: Cat-bond agreement for education and health facilities
220
+ # Changes: Damage state thresholds for “loss of access to hospitals” and “loss of access to schools”
221
+ # Increase thresholds from “slight to moderate” as fewer people will be displaced.
222
+ if 3 in policies and thresholds['metric3'] == DS_NO:
223
+ thresholds['metric3'] = DS_SLIGHT
224
+ if 3 in policies and thresholds['metric2'] == DS_NO:
225
+ thresholds['metric2'] = DS_SLIGHT
226
+
227
+ # Policy-2: Knowledge sharing about DRR in public and private schools
228
+ # Changes: Damage state thresholds for “loss of school access”
229
+ # Increase thresholds loss of school access to beyond current scale. So that the impact will be downgraded to “0”.
230
+ if 2 in policies:
231
+ thresholds['metric2'] = DS_COLLAPSED
232
+
233
+ if hazard_type == HAZARD_FLOOD:
234
+ # Polcy-4: Repair loan assistance for flooding
235
+ # we have only two states 0/1. So if this policy
236
+ # is effective, increase it to 1 meaning that
237
+ # population displacement is solved
238
+ if 4 in policies:
239
+ thresholds['metric6'] = 1
240
+
241
+ if hazard_type == HAZARD_FLOOD or hazard_type == HAZARD_DEBRIS:
242
+ # Policy-5: Technical assistance for debris removal in education facilities
243
+ # loss of education is solved via this policy. For both flood and debris
244
+ # loss of education metric is fixed.
245
+ if 5 in policies:
246
+ thresholds['metric2'] = 1
247
+
248
+ #%% metric 1 number of unemployed workers in each building
249
+ df_workers_per_building = df_workers[df_workers['ds'] > thresholds['metric1']].groupby('bldid',as_index=False).agg({'individ':'count'})
250
+ df_metric1 = bld_hazard.merge(df_workers_per_building,how='left',left_on='bldid',right_on = 'bldid')[['bldid','individ']]
251
+ df_metric1.rename(columns={'individ':'metric1'}, inplace=True)
252
+ df_metric1['metric1'] = df_metric1['metric1'].fillna(0).astype(int)
253
+
254
+ #%% metric 2 number of students in each building with no access to schools
255
+ df_students_per_building = df_students[df_students['ds'] > thresholds['metric2']].groupby('bldid',as_index=False).agg({'individ':'count'})
256
+ df_metric2 = bld_hazard.merge(df_students_per_building,how='left',left_on='bldid',right_on = 'bldid')[['bldid','individ']]
257
+ df_metric2.rename(columns={'individ':'metric2'}, inplace=True)
258
+ df_metric2['metric2'] = df_metric2['metric2'].fillna(0).astype(int)
259
+
260
+ #%% metric 3 number of households in each building with no access to hospitals
261
+ df_hospitals_per_household = df_hospitals[df_hospitals['ds'] > thresholds['metric3']].groupby('bldid',as_index=False).agg({'hhid':'count'})
262
+ df_metric3 = bld_hazard.merge(df_hospitals_per_household,how='left',left_on='bldid',right_on='bldid')[['bldid','hhid']]
263
+ df_metric3.rename(columns={'hhid':'metric3'}, inplace=True)
264
+ df_metric3['metric3'] = df_metric3['metric3'].fillna(0).astype(int)
265
+
266
+ #%% metric 4 number of individuals in each building with no access to hospitals
267
+ df_hospitals_per_individual = df_hospitals[df_hospitals['ds'] > thresholds['metric4']].groupby('bldid',as_index=False).agg({'nind':'sum'})
268
+ df_metric4 = bld_hazard.merge(df_hospitals_per_individual,how='left',left_on='bldid',right_on='bldid')[['bldid','nind']]
269
+ df_metric4.rename(columns={'nind':'metric4'}, inplace=True)
270
+ df_metric4['metric4'] = df_metric4['metric4'].fillna(0).astype(int)
271
+
272
+ #%% metric 5 number of damaged households in each building
273
+ df_homeless_households = df_household_bld[df_household_bld['ds'] > thresholds['metric5']].groupby('bldid',as_index=False).agg({'hhid':'count'})
274
+ df_metric5 = bld_hazard.merge(df_homeless_households,how='left',left_on='bldid',right_on='bldid')[['bldid','hhid']]
275
+ df_metric5.rename(columns={'hhid':'metric5'}, inplace=True)
276
+ df_metric5['metric5'] = df_metric5['metric5'].fillna(0).astype(int)
277
+
278
+ #%% metric 6 number of homeless individuals in each building
279
+ df_homeless_individuals = df_household_bld[df_household_bld['ds'] > thresholds['metric6']].groupby('bldid',as_index=False).agg({'nind':'sum'})
280
+ df_metric6 = bld_hazard.merge(df_homeless_individuals,how='left',left_on='bldid',right_on='bldid')[['bldid','nind']]
281
+ df_metric6.rename(columns={'nind':'metric6'}, inplace=True)
282
+ df_metric6['metric6'] = df_metric6['metric6'].fillna(0).astype(int)
283
+
284
+ #%% metric 7 the number of displaced individuals in each building
285
+ # more info: an individual is displaced if at least of the conditions below hold
286
+ df_disp_per_bld = df_displaced_indiv[(df_displaced_indiv['ds_household'] > thresholds['metric6']) |
287
+ (df_displaced_indiv['ds_school'] > thresholds['metric7']) |
288
+ (df_displaced_indiv['ds_workplace'] > thresholds['metric7']) |
289
+ (df_displaced_indiv['ds_hospital'] > thresholds['metric7'])].groupby('bldid',as_index=False).agg({'individ':'count'})
290
+ df_metric7 = bld_hazard.merge(df_disp_per_bld,how='left',left_on='bldid',right_on='bldid')[['bldid','individ']]
291
+ df_metric7.rename(columns={'individ':'metric7'}, inplace=True)
292
+ df_metric7['metric7'] = df_metric7['metric7'].fillna(0).astype(int)
293
+
294
+ df_metrics = {'metric1': df_metric1,
295
+ 'metric2': df_metric2,
296
+ 'metric3': df_metric3,
297
+ 'metric4': df_metric4,
298
+ 'metric5': df_metric5,
299
+ 'metric6': df_metric6,
300
+ 'metric7': df_metric7}
301
+
302
+ #%%
303
+ number_of_workers = len(df_workers.loc[df_workers['indivfacid_2'] > 0])
304
+ print('number of workers', number_of_workers)
305
+
306
+ number_of_students = len(df_workers.loc[df_students['indivfacid_1'] > 0])
307
+ print('number of students', number_of_students)
308
+
309
+ number_of_households = len(df_household)
310
+ print('number of households', number_of_households)
311
+
312
+ number_of_individuals = len(df_individual)
313
+ print('number of individuals', number_of_individuals)
314
+ metrics = {"metric1": {"desc": "Number of workers unemployed", "value": 0, "max_value": number_of_individuals},
315
+ "metric2": {"desc": "Number of children with no access to education", "value": 0, "max_value": number_of_individuals},
316
+ "metric3": {"desc": "Number of households with no access to hospital", "value": 0, "max_value": number_of_individuals},
317
+ "metric4": {"desc": "Number of individuals with no access to hospital", "value": 0, "max_value": number_of_individuals},
318
+ "metric5": {"desc": "Number of homeless households", "value": 0, "max_value": number_of_individuals},
319
+ "metric6": {"desc": "Number of homeless individuals", "value": 0, "max_value": number_of_individuals},
320
+ "metric7": {"desc": "Population displacement", "value": 0, "max_value": number_of_individuals},}
321
+ metrics["metric1"]["value"] = int(df_metric1['metric1'].sum())
322
+ metrics["metric2"]["value"] = int(df_metric2['metric2'].sum())
323
+ metrics["metric3"]["value"] = int(df_metric3['metric3'].sum())
324
+ metrics["metric4"]["value"] = int(df_metric4['metric4'].sum())
325
+ metrics["metric5"]["value"] = int(df_metric5['metric5'].sum())
326
+ metrics["metric6"]["value"] = int(df_metric6['metric6'].sum())
327
+ metrics["metric7"]["value"] = int(df_metric7['metric7'].sum())
328
+
329
+ for key in metrics.keys():
330
+ metrics[key]["value"] = int(metrics[key]["value"] * weights[hazard_type][key])
331
+
332
+ # return to WSG
333
+ return metrics
334
+