File size: 8,063 Bytes
b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 4f59df9 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 4f59df9 b9729f2 4f59df9 b9729f2 4f59df9 b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e 4f59df9 b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 50e151e b9729f2 4f59df9 50e151e b9729f2 50e151e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 |
import asyncio
import json
import os
import random
import uuid
from glob import glob
from typing import AsyncGenerator, Optional
import aiofiles
import nest_asyncio
from motor.motor_asyncio import AsyncIOMotorClient
from motor.motor_tornado import MotorCollection
from pymongo.server_api import ServerApi
from pyproj import CRS, Proj, Transformer
from pyproj.exceptions import CRSError
from shapely.errors import GEOSException
from shapely.geometry import Polygon
nest_asyncio.apply()
async def safe_insert_many(collection: MotorCollection, documents: list[dict]) -> None:
if len(documents) > 0:
try:
# Attempt to insert the documents
await collection.insert_many(documents)
except OverflowError:
# If an OverflowError occurs, split the batch
if len(documents) > 1:
mid = len(documents) // 2
# Recursively attempt to insert each half
await safe_insert_many(collection, documents[:mid])
await safe_insert_many(collection, documents[mid:])
else:
# Handle the case where a single document is too large
raise ValueError(
"A document exceeds the maximum BSON size or dtype conflict.",
)
def create_uuid(input_str: str) -> str:
# Consistent random UUIDs based on input string
# https://nathanielknight.ca/articles/consistent_random_uuids_in_python.html
random.seed(input_str)
return str(
uuid.UUID(bytes=bytes(random.getrandbits(8) for _ in range(16)), version=4),
)
def validate_coordinate(lon: float, lat: float) -> tuple:
"""
Validate and adjust the longitude and latitude values to ensure they are within the valid range.
Parameters:
lon (float): Longitude value.
lat (float): Latitude value.
Returns:
tuple: Validated and potentially adjusted (longitude, latitude) pair.
"""
# Adjust longitude and latitude values to their valid range
if not -180 <= lon <= 180:
lon = min(max(lon, -180), 180)
if not -90 <= lat <= 90:
lat = min(max(lat, -90), 90)
return lon, lat
def reproject_to_4326_and_convert_to_geojson(
bbox: dict,
) -> dict:
"""
Reprojects a bounding box from EPSG:102100 (3857) to EPSG:4326 and converts it to GeoJSON.
Parameters:
bbox (dict): A dictionary containing the keys 'xmin', 'ymin', 'xmax', 'ymax', and 'spatialReference'.
Returns:
dict: A GeoJSON object representing the reprojected bounding box.
"""
# Define the source and destination coordinate systems
def get_src_proj() -> Proj:
sr = bbox["spatialReference"]
for wkid in ["latestWkid", "wkid"]:
if (sr_wkid := sr.get(wkid)) is not None:
for authority in ["EPSG", "ESRI"]:
try:
return Proj(f"{authority}:{sr_wkid}")
except CRSError:
pass
if (sr_wkt := sr.get("wkt")) is not None:
return Proj(CRS.from_wkt(sr_wkt))
raise ValueError("no spatialReference found")
src_proj = get_src_proj()
dst_proj = Proj("epsg:4326")
transformer = Transformer.from_proj(src_proj, dst_proj, always_xy=True)
# Extract coordinates
xmin, ymin, xmax, ymax = bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]
# Transform the coordinates
xmin_trans, ymin_trans = validate_coordinate(*transformer.transform(xmin, ymin))
xmax_trans, ymax_trans = validate_coordinate(*transformer.transform(xmax, ymax))
# Create a polygon from the transformed coordinates
# Ensure that the polygon is closed by repeating the first point at the end
coords = [
(xmin_trans, ymin_trans),
(xmax_trans, ymin_trans),
(xmax_trans, ymax_trans),
(xmin_trans, ymax_trans),
(xmin_trans, ymin_trans),
]
if len(set(coords)) < 3:
raise ValueError("invalid extent")
polygon = Polygon(coords)
# # Convert the polygon to GeoJSON format
geojson = {
"type": "Polygon",
"coordinates": [list(polygon.exterior.coords)],
}
return geojson
keepkeys = {
"id",
"associatedlayers",
"domains",
"copyrighttext",
"description",
"documentinfo",
"fields",
"mapname",
"name",
"parentlayer",
"servicedescription",
"subLayers",
"tables",
"version",
"currentversion",
"geometrytype",
"extent",
"type",
"url",
"server",
"layers",
"service",
}
async def process_metadata(
metadata: dict,
additional_fields: Optional[dict] = None,
) -> dict:
additional_fields = additional_fields or {}
# Process metadata and add any additional fields
processed_md = {
k: v for k, v in metadata.items() if k not in ["folders", "services", "layers"]
}
processed_md.update(additional_fields)
processed_md["original_id"] = processed_md.get("id", None)
processed_md["id"] = processed_md["hash"]
del processed_md["hash"]
processed_md = {k: v for k, v in processed_md.items() if k in keepkeys}
if "extent" in processed_md:
if (
(extent := processed_md.get("extent")) is not None
and extent.get("spatialReference") is not None
and not any(
str(v).lower() in {"nan", "none", "null"} for v in extent.values()
)
):
try:
processed_md["extent"] = reproject_to_4326_and_convert_to_geojson(
extent,
)
except (ValueError, GEOSException, CRSError):
del processed_md["extent"]
# print(extent, e)
else:
del processed_md["extent"]
return processed_md
def get_type(layer: dict) -> str:
return layer.get("type", "unknown").lower().replace(" ", "_").strip()
async def read_data(jsonfiles: list[str]) -> AsyncGenerator[dict, None]:
# Async generator to yield file content one by one
for f in jsonfiles:
async with aiofiles.open(f, "r") as infile:
content = await infile.read()
yield json.loads(content)
# Modified process_server function
async def process_server(server: dict, layers_collection: MotorCollection):
server_services = server.pop("services")
server_md = await process_metadata(
server["metadata"],
{
"url": server["metadata"]["url"],
"hash": create_uuid(server["metadata"]["url"]),
},
)
layer_tasks = []
for service in server_services:
service_md = await process_metadata(
service["metadata"],
{
"url": service["url"],
"hash": create_uuid(service["url"]),
"server": server_md,
},
)
for layer in service["metadata"].pop("layers"):
task = asyncio.create_task(process_layer(layer, service_md))
layer_tasks.append(task)
layers_md = await asyncio.gather(*layer_tasks)
await safe_insert_many(layers_collection, layers_md)
# Modified process_layer function
async def process_layer(layer: dict, service: dict) -> dict:
# Embed service metadata into layer
layer_md = await process_metadata(
layer,
{
"url": layer["url"],
"hash": create_uuid(layer["url"]),
"service": {**service},
},
)
return layer_md
async def main() -> None:
output_dir = os.path.abspath("/home/appuser/restgdf_api/lab/output_tryagain")
jsonfiles = glob(os.path.join(output_dir, "*.json"))
client = AsyncIOMotorClient(
r"mongodb://root:example@mongo:27017/",
server_api=ServerApi("1"),
)
db = client["govgis-nov2023-slim-spatial"]
layers = db.layers
# Process each server concurrently
async for server in read_data(jsonfiles):
await process_server(server, layers)
if __name__ == "__main__":
asyncio.run(main())
|