Spaces:
Sleeping
Sleeping
Update imports_utils.py
Browse files- imports_utils.py +52 -56
imports_utils.py
CHANGED
@@ -17,10 +17,6 @@ import json
|
|
17 |
from notion_client import Client
|
18 |
import os
|
19 |
|
20 |
-
notionToken = os.getenv('notionToken')
|
21 |
-
notion = Client(auth=notionToken)
|
22 |
-
speckleToken = os.getenv('speckleToken')
|
23 |
-
|
24 |
from config import landuseColumnName
|
25 |
from config import subdomainColumnName
|
26 |
from config import sqmPerEmployeeColumnName
|
@@ -33,6 +29,14 @@ from config import landuseDatabaseId , streamId, dmBranchName, dmCommitId, luBr
|
|
33 |
import speckle_utils
|
34 |
import data_utils
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
# ----------------------------------------------------------------------------------
|
37 |
|
38 |
|
@@ -127,7 +131,7 @@ def get_page_by_id(notion_db_pages, page_id):
|
|
127 |
if pg["id"] == page_id:
|
128 |
return pg
|
129 |
|
130 |
-
|
131 |
|
132 |
def fetchDomainMapper (luAttributePages):
|
133 |
|
@@ -175,7 +179,7 @@ def fetchSubdomainMapper (livabilityAttributePages):
|
|
175 |
#domains_unique = list(set(domains_unique))
|
176 |
return attribute_mapper
|
177 |
|
178 |
-
|
179 |
# --------------------------------------------------------------------------------------------- #
|
180 |
|
181 |
|
@@ -266,59 +270,51 @@ def getDataFromSpeckle(
|
|
266 |
|
267 |
|
268 |
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
continue
|
283 |
-
|
284 |
-
matrix_data = getattr(matrix, matrix_name)
|
285 |
-
originUUID = matrix_data["@originUUID"]
|
286 |
-
destinationUUID = matrix_data["@destinationUUID"]
|
287 |
-
|
288 |
-
processed_rows = []
|
289 |
-
for chunk in matrix_data["@chunks"]:
|
290 |
-
for row in chunk["@rows"]:
|
291 |
-
processed_rows.append(row["@row"])
|
292 |
-
|
293 |
-
matrix_array = np.array(processed_rows)
|
294 |
-
matrix_df = pd.DataFrame(matrix_array, index=originUUID, columns=destinationUUID)
|
295 |
-
matrices[matrix_name] = matrix_df
|
296 |
-
except KeyError:
|
297 |
-
data_part = streamObj["@Data"].__dict__
|
298 |
-
print(data_part.keys())
|
299 |
-
|
300 |
-
for k, v in data_part.items():
|
301 |
-
if "matrix" in k:
|
302 |
-
matrix_name = k
|
303 |
-
matrix_data = v
|
304 |
-
originUUID = matrix_data["@originUUID"]
|
305 |
-
destinationUUID = matrix_data["@destinationUUID"]
|
306 |
-
|
307 |
-
processed_rows = []
|
308 |
-
for chunk in matrix_data["@chunks"]:
|
309 |
-
for row in chunk["@rows"]:
|
310 |
-
processed_rows.append(row["@row"])
|
311 |
-
|
312 |
-
matrix_array = np.array(processed_rows)
|
313 |
-
matrix_df = pd.DataFrame(matrix_array, index=originUUID, columns=destinationUUID)
|
314 |
-
matrices[matrix_name] = matrix_df
|
315 |
-
|
316 |
-
return matrices
|
317 |
|
|
|
|
|
318 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
319 |
|
320 |
-
|
321 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
322 |
|
323 |
|
324 |
|
|
|
17 |
from notion_client import Client
|
18 |
import os
|
19 |
|
|
|
|
|
|
|
|
|
20 |
from config import landuseColumnName
|
21 |
from config import subdomainColumnName
|
22 |
from config import sqmPerEmployeeColumnName
|
|
|
29 |
import speckle_utils
|
30 |
import data_utils
|
31 |
|
32 |
+
|
33 |
+
notionToken = os.getenv('notionToken')
|
34 |
+
notion = Client(auth=notionToken)
|
35 |
+
speckleToken = os.getenv('speckleToken')
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
|
40 |
# ----------------------------------------------------------------------------------
|
41 |
|
42 |
|
|
|
131 |
if pg["id"] == page_id:
|
132 |
return pg
|
133 |
|
134 |
+
"""
|
135 |
|
136 |
def fetchDomainMapper (luAttributePages):
|
137 |
|
|
|
179 |
#domains_unique = list(set(domains_unique))
|
180 |
return attribute_mapper
|
181 |
|
182 |
+
"""
|
183 |
# --------------------------------------------------------------------------------------------- #
|
184 |
|
185 |
|
|
|
270 |
|
271 |
|
272 |
|
273 |
+
def getDataFromNotion(
|
274 |
+
notion,
|
275 |
+
notionToken,
|
276 |
+
landuseDatabaseID,
|
277 |
+
subdomainDatabaseID,
|
278 |
+
landuseColumnName ="LANDUSE",
|
279 |
+
subdomainColumnName ="SUBDOMAIN_LIVABILITY",
|
280 |
+
sqmPerEmployeeColumnName = "SQM PER EMPL",
|
281 |
+
thresholdsColumnName="MANHATTAN THRESHOLD"
|
282 |
+
maxPointsColumnName = "LIVABILITY MAX POINT",
|
283 |
+
domainColumnName = "DOMAIN_LIVABILITY",
|
284 |
+
|
285 |
+
):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
286 |
|
287 |
+
landuse_attributes = fetch_all_database_pages(notion, landuseDatabaseID)
|
288 |
+
livability_attributes = fetch_all_database_pages(notion, subdomainDatabaseID)
|
289 |
|
290 |
+
landuseMapperDict ={}
|
291 |
+
livabilityMapperDict ={}
|
292 |
+
|
293 |
+
for page in landuse_attributes:
|
294 |
+
value_landuse = get_property_value(page, landuseColumnName)
|
295 |
+
value_subdomain = get_property_value(page, subdomainColumnName)
|
296 |
+
origin = "false" if not get_property_value(page, "is_origin_mask") else get_property_value(page, "is_origin_mask")
|
297 |
+
if value_subdomain and value_landuse:
|
298 |
+
landuseMapperDict[value_landuse] = {
|
299 |
+
'subdomain livability': value_subdomain,
|
300 |
+
'is origin': origin
|
301 |
+
}
|
302 |
|
303 |
+
for page in livability_attributes:
|
304 |
+
subdomain = get_property_value(page, subdomainColumnName)
|
305 |
+
sqm_per_employee = get_property_value(page, sqmPerEmployeeColumnName)
|
306 |
+
thresholds = get_property_value(page, thresholdsColumnName)
|
307 |
+
max_points = get_property_value(page, maxPointsColumnName)
|
308 |
+
domain = get_property_value(page, domainColumnName)
|
309 |
+
if thresholds:
|
310 |
+
livabilityMapperDict[subdomain] = {
|
311 |
+
'sqmPerEmpl': sqm_per_employee if sqm_per_employee != "" else 0,
|
312 |
+
'thresholds': thresholds,
|
313 |
+
'max_points': max_points,
|
314 |
+
'domain': [domain if domain != "" else 0]
|
315 |
+
}
|
316 |
+
|
317 |
+
return landuseMapperDict, livabilityMapperDict,
|
318 |
|
319 |
|
320 |
|