Commit
·
b9729f2
1
Parent(s):
851525f
Upload to_mongo_4326reproj.py
Browse files- to_mongo_4326reproj.py +231 -0
to_mongo_4326reproj.py
ADDED
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import json
|
3 |
+
import os
|
4 |
+
import random
|
5 |
+
import uuid
|
6 |
+
from collections.abc import MutableMapping
|
7 |
+
from glob import glob
|
8 |
+
from typing import AsyncGenerator
|
9 |
+
|
10 |
+
import aiofiles
|
11 |
+
import nest_asyncio
|
12 |
+
from motor.motor_asyncio import AsyncIOMotorClient
|
13 |
+
from pymongo.server_api import ServerApi
|
14 |
+
from pyproj import CRS, Proj, Transformer
|
15 |
+
from pyproj.exceptions import CRSError
|
16 |
+
from shapely.errors import GEOSException
|
17 |
+
from shapely.geometry import Polygon
|
18 |
+
|
19 |
+
nest_asyncio.apply()
|
20 |
+
|
21 |
+
|
22 |
+
def create_uuid(input_str: str) -> str:
|
23 |
+
# Consistent random UUIDs based on input string
|
24 |
+
# https://nathanielknight.ca/articles/consistent_random_uuids_in_python.html
|
25 |
+
random.seed(input_str)
|
26 |
+
return str(
|
27 |
+
uuid.UUID(bytes=bytes(random.getrandbits(8) for _ in range(16)), version=4),
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
async def reproject_to_4326_and_convert_to_geojson(
|
32 |
+
bbox: dict,
|
33 |
+
) -> dict:
|
34 |
+
"""
|
35 |
+
Reprojects a bounding box from EPSG:102100 (3857) to EPSG:4326 and converts it to GeoJSON.
|
36 |
+
|
37 |
+
Parameters:
|
38 |
+
bbox (dict): A dictionary containing the keys 'xmin', 'ymin', 'xmax', 'ymax', and 'spatialReference'.
|
39 |
+
|
40 |
+
Returns:
|
41 |
+
dict: A GeoJSON object representing the reprojected bounding box.
|
42 |
+
"""
|
43 |
+
|
44 |
+
# Define the source and destination coordinate systems
|
45 |
+
def get_src_proj() -> Proj:
|
46 |
+
sr = bbox["spatialReference"]
|
47 |
+
for wkid in ["latestWkid", "wkid"]:
|
48 |
+
if wkid in sr:
|
49 |
+
for authority in ["EPSG", "ESRI"]:
|
50 |
+
try:
|
51 |
+
return Proj(f"{authority}:{sr[wkid]}")
|
52 |
+
except CRSError:
|
53 |
+
pass
|
54 |
+
if "wkt" in sr:
|
55 |
+
return Proj(CRS.from_wkt(sr["wkt"]))
|
56 |
+
raise ValueError("no spatialReference found")
|
57 |
+
|
58 |
+
src_proj = get_src_proj()
|
59 |
+
dst_proj = Proj("epsg:4326")
|
60 |
+
transformer = Transformer.from_proj(src_proj, dst_proj)
|
61 |
+
|
62 |
+
# Extract coordinates
|
63 |
+
xmin, ymin, xmax, ymax = bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]
|
64 |
+
|
65 |
+
# Transform the coordinates
|
66 |
+
xmin_trans, ymin_trans = transformer.transform(xmin, ymin)
|
67 |
+
xmax_trans, ymax_trans = transformer.transform(xmax, ymax)
|
68 |
+
|
69 |
+
# Create a polygon from the transformed coordinates
|
70 |
+
# Ensure that the polygon is closed by repeating the first point at the end
|
71 |
+
polygon = Polygon(
|
72 |
+
[
|
73 |
+
(xmin_trans, ymin_trans),
|
74 |
+
(xmax_trans, ymin_trans),
|
75 |
+
(xmax_trans, ymax_trans),
|
76 |
+
(xmin_trans, ymax_trans),
|
77 |
+
(xmin_trans, ymin_trans),
|
78 |
+
],
|
79 |
+
)
|
80 |
+
# Convert the polygon to GeoJSON format
|
81 |
+
geojson = {
|
82 |
+
"type": "Polygon",
|
83 |
+
"coordinates": [list(polygon.exterior.coords)],
|
84 |
+
}
|
85 |
+
|
86 |
+
return geojson
|
87 |
+
|
88 |
+
|
89 |
+
async def process_nested_entries_v2(
|
90 |
+
input_data: dict,
|
91 |
+
) -> dict:
|
92 |
+
required_keys = {"xmin", "ymin", "xmax", "ymax", "spatialReference"}
|
93 |
+
|
94 |
+
if not isinstance(input_data, (dict, list)):
|
95 |
+
raise ValueError("Input data must be a dictionary or list")
|
96 |
+
|
97 |
+
stack = [(None, input_data, None)] # (parent, current, key/index)
|
98 |
+
|
99 |
+
while stack:
|
100 |
+
parent, current, key = stack.pop()
|
101 |
+
|
102 |
+
if (
|
103 |
+
isinstance(current, dict)
|
104 |
+
and required_keys.issubset(current.keys())
|
105 |
+
and not any(
|
106 |
+
str(current.get(coord, "nan")).lower() == "nan"
|
107 |
+
for coord in ["xmin", "ymin", "xmax", "ymax"]
|
108 |
+
)
|
109 |
+
):
|
110 |
+
try:
|
111 |
+
result = await reproject_to_4326_and_convert_to_geojson(current)
|
112 |
+
except (ValueError, GEOSException, CRSError) as e:
|
113 |
+
print(current, e)
|
114 |
+
result = current # Optionally keep the original item on error
|
115 |
+
if parent is not None:
|
116 |
+
if isinstance(parent, MutableMapping):
|
117 |
+
parent[key] = result
|
118 |
+
else: # List
|
119 |
+
parent[key] = result
|
120 |
+
elif isinstance(current, MutableMapping):
|
121 |
+
for k, v in current.items():
|
122 |
+
stack.append((current, v, k)) # type: ignore
|
123 |
+
elif isinstance(current, list):
|
124 |
+
for i, item in enumerate(current):
|
125 |
+
stack.append((current, item, i))
|
126 |
+
|
127 |
+
return input_data
|
128 |
+
|
129 |
+
|
130 |
+
def process_metadata(metadata: dict, additional_fields: dict = {}) -> dict:
|
131 |
+
# Process metadata and add any additional fields
|
132 |
+
processed_md = {
|
133 |
+
k: v for k, v in metadata.items() if k not in ["folders", "services", "layers"]
|
134 |
+
}
|
135 |
+
processed_md.update(additional_fields)
|
136 |
+
processed_md["original_id"] = processed_md.get("id", None)
|
137 |
+
processed_md["id"] = processed_md["hash"]
|
138 |
+
del processed_md["hash"]
|
139 |
+
|
140 |
+
return processed_md
|
141 |
+
|
142 |
+
|
143 |
+
def get_type(layer: dict) -> str:
|
144 |
+
return layer.get("type", "unknown").lower().replace(" ", "_").strip()
|
145 |
+
|
146 |
+
|
147 |
+
async def read_data(jsonfiles: list[str]) -> AsyncGenerator:
|
148 |
+
# Async generator to yield file content one by one
|
149 |
+
for f in jsonfiles:
|
150 |
+
async with aiofiles.open(f, "r") as infile:
|
151 |
+
content = await infile.read()
|
152 |
+
yield json.loads(content)
|
153 |
+
|
154 |
+
|
155 |
+
async def process_server(server, services_collection, db):
|
156 |
+
server_services = server.pop("services")
|
157 |
+
server = await process_nested_entries_v2(server)
|
158 |
+
server_md = process_metadata(
|
159 |
+
server["metadata"],
|
160 |
+
{
|
161 |
+
"url": server["metadata"]["url"],
|
162 |
+
"hash": create_uuid(server["metadata"]["url"]),
|
163 |
+
},
|
164 |
+
)
|
165 |
+
|
166 |
+
services_to_insert = []
|
167 |
+
layer_insert_tasks = []
|
168 |
+
|
169 |
+
for service in server_services:
|
170 |
+
service_layers = service["metadata"].pop("layers")
|
171 |
+
service = await process_nested_entries_v2(service)
|
172 |
+
|
173 |
+
service_md = process_metadata(
|
174 |
+
service["metadata"],
|
175 |
+
{
|
176 |
+
"url": service["url"],
|
177 |
+
"hash": create_uuid(service["url"]),
|
178 |
+
"server": server_md,
|
179 |
+
},
|
180 |
+
)
|
181 |
+
service_md["layers"] = []
|
182 |
+
|
183 |
+
layer_dict = {}
|
184 |
+
|
185 |
+
for layer in service_layers:
|
186 |
+
layer = await process_nested_entries_v2(layer)
|
187 |
+
layer_md = process_metadata(
|
188 |
+
layer,
|
189 |
+
{
|
190 |
+
"url": layer["url"],
|
191 |
+
"hash": create_uuid(layer["url"]),
|
192 |
+
"service": service_md["id"],
|
193 |
+
},
|
194 |
+
)
|
195 |
+
layer_type = get_type(layer)
|
196 |
+
service_md["layers"].append(
|
197 |
+
dict(type=layer_type, layer_id=layer_md["id"]),
|
198 |
+
)
|
199 |
+
if layer_type not in layer_dict:
|
200 |
+
layer_dict[layer_type] = []
|
201 |
+
layer_dict[layer_type].append(layer_md)
|
202 |
+
|
203 |
+
services_to_insert.append(service_md)
|
204 |
+
|
205 |
+
for layer_type, layers in layer_dict.items():
|
206 |
+
if layers:
|
207 |
+
layer_insert_tasks.append(db[layer_type].insert_many(layers))
|
208 |
+
|
209 |
+
# Insert services in batch
|
210 |
+
if services_to_insert:
|
211 |
+
await services_collection.insert_many(services_to_insert)
|
212 |
+
|
213 |
+
# Wait for all layer insert tasks to complete
|
214 |
+
await asyncio.gather(*layer_insert_tasks)
|
215 |
+
|
216 |
+
|
217 |
+
async def main() -> None:
|
218 |
+
output_dir = os.path.abspath("./output_tryagain")
|
219 |
+
jsonfiles = glob(os.path.join(output_dir, "*.json"))
|
220 |
+
|
221 |
+
client = AsyncIOMotorClient(
|
222 |
+
r"mongodb://root:example@mongo:27017/",
|
223 |
+
server_api=ServerApi("1"),
|
224 |
+
)
|
225 |
+
|
226 |
+
db = client["govgis-nov2023"]
|
227 |
+
services_collection = db.services
|
228 |
+
|
229 |
+
# Process each server concurrently
|
230 |
+
async for server in read_data(jsonfiles):
|
231 |
+
await process_server(server, services_collection, db)
|