[refactor] move core instance segmentation (segment anything) functionality to samgis_core, bump to version 1.2.0
Browse files- .idea/vcs.xml +1 -0
- docs/Changelog.md +4 -0
- docs/conf.py +2 -2
- docs/modules.rst +1 -0
- docs/samgis.prediction_api.rst +0 -8
- docs/samgis.utilities.rst +0 -24
- docs/specs/openapi.yaml +3 -2
- poetry.lock +82 -62
- pyproject.toml +6 -5
- requirements.txt +0 -18
- requirements_dev.txt +0 -17
- requirements_dockerfile.txt +0 -14
- samgis/__init__.py +1 -1
- samgis/__version__.py +1 -1
- samgis/io/coordinates_pixel_conversion.py +3 -1
- samgis/io/geo_helpers.py +1 -1
- samgis/io/raster_helpers.py +4 -4
- samgis/io/tms2geotiff.py +4 -1
- samgis/io/wrappers_helpers.py +1 -1
- samgis/prediction_api/predictors.py +7 -45
- samgis/prediction_api/sam_onnx.py +0 -246
- samgis/utilities/constants.py +0 -3
- samgis/utilities/fastapi_logger.py +0 -24
- samgis/utilities/serialize.py +0 -84
- samgis/utilities/type_hints.py +0 -13
- samgis/utilities/utilities.py +0 -92
- tests/io/test_coordinates_pixel_conversion.py +1 -28
- tests/io/test_geo_helpers.py +0 -1
- tests/io/test_lambda_helpers.py +0 -133
- tests/io/test_raster_helpers.py +2 -2
- tests/io/test_tms2geotiff.py +42 -35
- tests/io/test_wrappers_helpers.py +136 -0
- tests/prediction_api/test_sam_onnx.py +0 -90
- tests/test_fastapi_app.py +20 -16
- tests/utilities/__init__.py +0 -0
- tests/utilities/test_serialize.py +0 -96
- wrappers/fastapi_wrapper.py +8 -4
.idea/vcs.xml
CHANGED
@@ -2,5 +2,6 @@
|
|
2 |
<project version="4">
|
3 |
<component name="VcsDirectoryMappings">
|
4 |
<mapping directory="" vcs="Git" />
|
|
|
5 |
</component>
|
6 |
</project>
|
|
|
2 |
<project version="4">
|
3 |
<component name="VcsDirectoryMappings">
|
4 |
<mapping directory="" vcs="Git" />
|
5 |
+
<mapping directory="$PROJECT_DIR$/../samgis_core" vcs="Git" />
|
6 |
</component>
|
7 |
</project>
|
docs/Changelog.md
CHANGED
@@ -1,5 +1,9 @@
|
|
1 |
# Changelog
|
2 |
|
|
|
|
|
|
|
|
|
3 |
## Version 1.1.0
|
4 |
- Added this changelog
|
5 |
- specific backend branch code uses terrain providers like nextzen and MapBox Terrain-RGB v1
|
|
|
1 |
# Changelog
|
2 |
|
3 |
+
## Version 1.2.0
|
4 |
+
- code refactor to separate core functionality (instance segmentation) from other code
|
5 |
+
- updated test coverage
|
6 |
+
|
7 |
## Version 1.1.0
|
8 |
- Added this changelog
|
9 |
- specific backend branch code uses terrain providers like nextzen and MapBox Terrain-RGB v1
|
docs/conf.py
CHANGED
@@ -9,9 +9,9 @@ import os
|
|
9 |
import sys
|
10 |
|
11 |
project = 'SamGIS'
|
12 |
-
copyright = '2023-
|
13 |
author = 'alessandro trinca tornidor'
|
14 |
-
release = '1.
|
15 |
|
16 |
# -- General configuration ---------------------------------------------------
|
17 |
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
|
|
9 |
import sys
|
10 |
|
11 |
project = 'SamGIS'
|
12 |
+
copyright = '2023-now, alessandro trinca tornidor'
|
13 |
author = 'alessandro trinca tornidor'
|
14 |
+
release = '1.2.0'
|
15 |
|
16 |
# -- General configuration ---------------------------------------------------
|
17 |
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
docs/modules.rst
CHANGED
@@ -5,3 +5,4 @@ samgis
|
|
5 |
:maxdepth: 4
|
6 |
|
7 |
samgis
|
|
|
|
5 |
:maxdepth: 4
|
6 |
|
7 |
samgis
|
8 |
+
samgis_core
|
docs/samgis.prediction_api.rst
CHANGED
@@ -12,14 +12,6 @@ samgis.prediction\_api.predictors module
|
|
12 |
:undoc-members:
|
13 |
:show-inheritance:
|
14 |
|
15 |
-
samgis.prediction\_api.sam\_onnx module
|
16 |
-
---------------------------------------
|
17 |
-
|
18 |
-
.. automodule:: samgis.prediction_api.sam_onnx
|
19 |
-
:members:
|
20 |
-
:undoc-members:
|
21 |
-
:show-inheritance:
|
22 |
-
|
23 |
Module contents
|
24 |
---------------
|
25 |
|
|
|
12 |
:undoc-members:
|
13 |
:show-inheritance:
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
Module contents
|
16 |
---------------
|
17 |
|
docs/samgis.utilities.rst
CHANGED
@@ -12,22 +12,6 @@ samgis.utilities.constants module
|
|
12 |
:undoc-members:
|
13 |
:show-inheritance:
|
14 |
|
15 |
-
samgis.utilities.fastapi\_logger module
|
16 |
-
---------------------------------------
|
17 |
-
|
18 |
-
.. automodule:: samgis.utilities.fastapi_logger
|
19 |
-
:members:
|
20 |
-
:undoc-members:
|
21 |
-
:show-inheritance:
|
22 |
-
|
23 |
-
samgis.utilities.serialize module
|
24 |
-
---------------------------------
|
25 |
-
|
26 |
-
.. automodule:: samgis.utilities.serialize
|
27 |
-
:members:
|
28 |
-
:undoc-members:
|
29 |
-
:show-inheritance:
|
30 |
-
|
31 |
samgis.utilities.type\_hints module
|
32 |
-----------------------------------
|
33 |
|
@@ -36,14 +20,6 @@ samgis.utilities.type\_hints module
|
|
36 |
:undoc-members:
|
37 |
:show-inheritance:
|
38 |
|
39 |
-
samgis.utilities.utilities module
|
40 |
-
---------------------------------
|
41 |
-
|
42 |
-
.. automodule:: samgis.utilities.utilities
|
43 |
-
:members:
|
44 |
-
:undoc-members:
|
45 |
-
:show-inheritance:
|
46 |
-
|
47 |
Module contents
|
48 |
---------------
|
49 |
|
|
|
12 |
:undoc-members:
|
13 |
:show-inheritance:
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
samgis.utilities.type\_hints module
|
16 |
-----------------------------------
|
17 |
|
|
|
20 |
:undoc-members:
|
21 |
:show-inheritance:
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
Module contents
|
24 |
---------------
|
25 |
|
docs/specs/openapi.yaml
CHANGED
@@ -8,11 +8,12 @@ info:
|
|
8 |
- [SamGIS documentation](https://docs.ml-trinca.tornidor.com)
|
9 |
- [My blog](https://trinca.tornidor.com)
|
10 |
contact:
|
11 |
-
|
|
|
12 |
license:
|
13 |
name: MIT License
|
14 |
url: https://opensource.org/license/mit/
|
15 |
-
version: "1.
|
16 |
servers:
|
17 |
- url: https://localhost:8000/
|
18 |
tags:
|
|
|
8 |
- [SamGIS documentation](https://docs.ml-trinca.tornidor.com)
|
9 |
- [My blog](https://trinca.tornidor.com)
|
10 |
contact:
|
11 |
+
name: alessandro's website
|
12 |
+
url: https://trinca.tornidor.com
|
13 |
license:
|
14 |
name: MIT License
|
15 |
url: https://opensource.org/license/mit/
|
16 |
+
version: "1.2.0"
|
17 |
servers:
|
18 |
- url: https://localhost:8000/
|
19 |
tags:
|
poetry.lock
CHANGED
@@ -435,63 +435,63 @@ test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"]
|
|
435 |
|
436 |
[[package]]
|
437 |
name = "coverage"
|
438 |
-
version = "7.4.
|
439 |
description = "Code coverage measurement for Python"
|
440 |
optional = false
|
441 |
python-versions = ">=3.8"
|
442 |
files = [
|
443 |
-
{file = "coverage-7.4.
|
444 |
-
{file = "coverage-7.4.
|
445 |
-
{file = "coverage-7.4.
|
446 |
-
{file = "coverage-7.4.
|
447 |
-
{file = "coverage-7.4.
|
448 |
-
{file = "coverage-7.4.
|
449 |
-
{file = "coverage-7.4.
|
450 |
-
{file = "coverage-7.4.
|
451 |
-
{file = "coverage-7.4.
|
452 |
-
{file = "coverage-7.4.
|
453 |
-
{file = "coverage-7.4.
|
454 |
-
{file = "coverage-7.4.
|
455 |
-
{file = "coverage-7.4.
|
456 |
-
{file = "coverage-7.4.
|
457 |
-
{file = "coverage-7.4.
|
458 |
-
{file = "coverage-7.4.
|
459 |
-
{file = "coverage-7.4.
|
460 |
-
{file = "coverage-7.4.
|
461 |
-
{file = "coverage-7.4.
|
462 |
-
{file = "coverage-7.4.
|
463 |
-
{file = "coverage-7.4.
|
464 |
-
{file = "coverage-7.4.
|
465 |
-
{file = "coverage-7.4.
|
466 |
-
{file = "coverage-7.4.
|
467 |
-
{file = "coverage-7.4.
|
468 |
-
{file = "coverage-7.4.
|
469 |
-
{file = "coverage-7.4.
|
470 |
-
{file = "coverage-7.4.
|
471 |
-
{file = "coverage-7.4.
|
472 |
-
{file = "coverage-7.4.
|
473 |
-
{file = "coverage-7.4.
|
474 |
-
{file = "coverage-7.4.
|
475 |
-
{file = "coverage-7.4.
|
476 |
-
{file = "coverage-7.4.
|
477 |
-
{file = "coverage-7.4.
|
478 |
-
{file = "coverage-7.4.
|
479 |
-
{file = "coverage-7.4.
|
480 |
-
{file = "coverage-7.4.
|
481 |
-
{file = "coverage-7.4.
|
482 |
-
{file = "coverage-7.4.
|
483 |
-
{file = "coverage-7.4.
|
484 |
-
{file = "coverage-7.4.
|
485 |
-
{file = "coverage-7.4.
|
486 |
-
{file = "coverage-7.4.
|
487 |
-
{file = "coverage-7.4.
|
488 |
-
{file = "coverage-7.4.
|
489 |
-
{file = "coverage-7.4.
|
490 |
-
{file = "coverage-7.4.
|
491 |
-
{file = "coverage-7.4.
|
492 |
-
{file = "coverage-7.4.
|
493 |
-
{file = "coverage-7.4.
|
494 |
-
{file = "coverage-7.4.
|
495 |
]
|
496 |
|
497 |
[package.extras]
|
@@ -1568,13 +1568,13 @@ xmp = ["defusedxml"]
|
|
1568 |
|
1569 |
[[package]]
|
1570 |
name = "pluggy"
|
1571 |
-
version = "1.
|
1572 |
description = "plugin and hook calling mechanisms for python"
|
1573 |
optional = false
|
1574 |
python-versions = ">=3.8"
|
1575 |
files = [
|
1576 |
-
{file = "pluggy-1.
|
1577 |
-
{file = "pluggy-1.
|
1578 |
]
|
1579 |
|
1580 |
[package.extras]
|
@@ -1918,6 +1918,7 @@ files = [
|
|
1918 |
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
1919 |
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
1920 |
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
|
|
1921 |
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
1922 |
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
1923 |
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
@@ -2145,6 +2146,25 @@ files = [
|
|
2145 |
{file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"},
|
2146 |
]
|
2147 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2148 |
[[package]]
|
2149 |
name = "setuptools"
|
2150 |
version = "69.0.3"
|
@@ -2359,22 +2379,22 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools
|
|
2359 |
|
2360 |
[[package]]
|
2361 |
name = "sphinx-autodoc-typehints"
|
2362 |
-
version = "1.25.
|
2363 |
description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
|
2364 |
optional = false
|
2365 |
python-versions = ">=3.8"
|
2366 |
files = [
|
2367 |
-
{file = "sphinx_autodoc_typehints-1.25.
|
2368 |
-
{file = "sphinx_autodoc_typehints-1.25.
|
2369 |
]
|
2370 |
|
2371 |
[package.dependencies]
|
2372 |
sphinx = ">=7.1.2"
|
2373 |
|
2374 |
[package.extras]
|
2375 |
-
docs = ["furo (>=2023.
|
2376 |
numpy = ["nptyping (>=2.5)"]
|
2377 |
-
testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=
|
2378 |
|
2379 |
[[package]]
|
2380 |
name = "sphinx-mdinclude"
|
@@ -2636,4 +2656,4 @@ files = [
|
|
2636 |
[metadata]
|
2637 |
lock-version = "2.0"
|
2638 |
python-versions = "^3.11"
|
2639 |
-
content-hash = "
|
|
|
435 |
|
436 |
[[package]]
|
437 |
name = "coverage"
|
438 |
+
version = "7.4.1"
|
439 |
description = "Code coverage measurement for Python"
|
440 |
optional = false
|
441 |
python-versions = ">=3.8"
|
442 |
files = [
|
443 |
+
{file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"},
|
444 |
+
{file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"},
|
445 |
+
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"},
|
446 |
+
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"},
|
447 |
+
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"},
|
448 |
+
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"},
|
449 |
+
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"},
|
450 |
+
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"},
|
451 |
+
{file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"},
|
452 |
+
{file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"},
|
453 |
+
{file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"},
|
454 |
+
{file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"},
|
455 |
+
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"},
|
456 |
+
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"},
|
457 |
+
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"},
|
458 |
+
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"},
|
459 |
+
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"},
|
460 |
+
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"},
|
461 |
+
{file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"},
|
462 |
+
{file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"},
|
463 |
+
{file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"},
|
464 |
+
{file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"},
|
465 |
+
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"},
|
466 |
+
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"},
|
467 |
+
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"},
|
468 |
+
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"},
|
469 |
+
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"},
|
470 |
+
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"},
|
471 |
+
{file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"},
|
472 |
+
{file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"},
|
473 |
+
{file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"},
|
474 |
+
{file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"},
|
475 |
+
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"},
|
476 |
+
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"},
|
477 |
+
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"},
|
478 |
+
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"},
|
479 |
+
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"},
|
480 |
+
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"},
|
481 |
+
{file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"},
|
482 |
+
{file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"},
|
483 |
+
{file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"},
|
484 |
+
{file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"},
|
485 |
+
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"},
|
486 |
+
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"},
|
487 |
+
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"},
|
488 |
+
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"},
|
489 |
+
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"},
|
490 |
+
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"},
|
491 |
+
{file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"},
|
492 |
+
{file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"},
|
493 |
+
{file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"},
|
494 |
+
{file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"},
|
495 |
]
|
496 |
|
497 |
[package.extras]
|
|
|
1568 |
|
1569 |
[[package]]
|
1570 |
name = "pluggy"
|
1571 |
+
version = "1.4.0"
|
1572 |
description = "plugin and hook calling mechanisms for python"
|
1573 |
optional = false
|
1574 |
python-versions = ">=3.8"
|
1575 |
files = [
|
1576 |
+
{file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
|
1577 |
+
{file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
|
1578 |
]
|
1579 |
|
1580 |
[package.extras]
|
|
|
1918 |
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
1919 |
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
1920 |
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
1921 |
+
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
1922 |
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
1923 |
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
1924 |
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
|
|
2146 |
{file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"},
|
2147 |
]
|
2148 |
|
2149 |
+
[[package]]
|
2150 |
+
name = "samgis-core"
|
2151 |
+
version = "1.0.0"
|
2152 |
+
description = "SamGIS CORE"
|
2153 |
+
optional = false
|
2154 |
+
python-versions = ">=3.11,<4.0"
|
2155 |
+
files = [
|
2156 |
+
{file = "samgis_core-1.0.0-py3-none-any.whl", hash = "sha256:bc468492383ff3587d248f18123b60fa185a1e68ad5ccc18ed8b79f106636572"},
|
2157 |
+
{file = "samgis_core-1.0.0.tar.gz", hash = "sha256:2f9cb142c7950f12e9ccd23d2ce2106ddbc280a95a9e32f0fda3147884d9e6bf"},
|
2158 |
+
]
|
2159 |
+
|
2160 |
+
[package.dependencies]
|
2161 |
+
bson = ">=0.5.10,<0.6.0"
|
2162 |
+
loguru = ">=0.7.2,<0.8.0"
|
2163 |
+
numpy = ">=1.26.3,<2.0.0"
|
2164 |
+
onnxruntime = ">=1.16.3,<2.0.0"
|
2165 |
+
opencv-python-headless = ">=4.9.0.80,<5.0.0.0"
|
2166 |
+
pillow = ">=10.2.0,<11.0.0"
|
2167 |
+
|
2168 |
[[package]]
|
2169 |
name = "setuptools"
|
2170 |
version = "69.0.3"
|
|
|
2379 |
|
2380 |
[[package]]
|
2381 |
name = "sphinx-autodoc-typehints"
|
2382 |
+
version = "1.25.3"
|
2383 |
description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
|
2384 |
optional = false
|
2385 |
python-versions = ">=3.8"
|
2386 |
files = [
|
2387 |
+
{file = "sphinx_autodoc_typehints-1.25.3-py3-none-any.whl", hash = "sha256:d3da7fa9a9761eff6ff09f8b1956ae3090a2d4f4ad54aebcade8e458d6340835"},
|
2388 |
+
{file = "sphinx_autodoc_typehints-1.25.3.tar.gz", hash = "sha256:70db10b391acf4e772019765991d2de0ff30ec0899b9ba137706dc0b3c4835e0"},
|
2389 |
]
|
2390 |
|
2391 |
[package.dependencies]
|
2392 |
sphinx = ">=7.1.2"
|
2393 |
|
2394 |
[package.extras]
|
2395 |
+
docs = ["furo (>=2023.9.10)"]
|
2396 |
numpy = ["nptyping (>=2.5)"]
|
2397 |
+
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.8)"]
|
2398 |
|
2399 |
[[package]]
|
2400 |
name = "sphinx-mdinclude"
|
|
|
2656 |
[metadata]
|
2657 |
lock-version = "2.0"
|
2658 |
python-versions = "^3.11"
|
2659 |
+
content-hash = "db415b27fa26250177185aa8c701f45176523e7d6f783096d9c6e46709032002"
|
pyproject.toml
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
[tool.poetry]
|
2 |
name = "samgis"
|
3 |
-
version = "1.
|
4 |
description = "A backend for machine learning instance segmentation on geospatial data even without dedicated graphics cards."
|
5 |
authors = ["alessandro trinca tornidor <[email protected]>"]
|
6 |
license = "MIT license"
|
@@ -8,17 +8,18 @@ readme = "README.md"
|
|
8 |
|
9 |
[tool.poetry.dependencies]
|
10 |
bson = "^0.5.10"
|
11 |
-
contextily = "^1.
|
12 |
-
geopandas = "^0.14.
|
|
|
13 |
numpy = "^1.26.2"
|
14 |
onnxruntime = "^1.16.3"
|
15 |
opencv-python-headless = "^4.8.1.78"
|
|
|
16 |
python = "^3.11"
|
17 |
python-dotenv = "^1.0.0"
|
18 |
rasterio = "^1.3.9"
|
19 |
requests = "^2.31.0"
|
20 |
-
|
21 |
-
loguru = "^0.7.2"
|
22 |
|
23 |
[tool.poetry.group.aws_lambda]
|
24 |
optional = true
|
|
|
1 |
[tool.poetry]
|
2 |
name = "samgis"
|
3 |
+
version = "1.2.0"
|
4 |
description = "A backend for machine learning instance segmentation on geospatial data even without dedicated graphics cards."
|
5 |
authors = ["alessandro trinca tornidor <[email protected]>"]
|
6 |
license = "MIT license"
|
|
|
8 |
|
9 |
[tool.poetry.dependencies]
|
10 |
bson = "^0.5.10"
|
11 |
+
contextily = "^1.5.0"
|
12 |
+
geopandas = "^0.14.2"
|
13 |
+
loguru = "^0.7.2"
|
14 |
numpy = "^1.26.2"
|
15 |
onnxruntime = "^1.16.3"
|
16 |
opencv-python-headless = "^4.8.1.78"
|
17 |
+
pillow = "^10.2.0"
|
18 |
python = "^3.11"
|
19 |
python-dotenv = "^1.0.0"
|
20 |
rasterio = "^1.3.9"
|
21 |
requests = "^2.31.0"
|
22 |
+
samgis-core = "^1.0.0"
|
|
|
23 |
|
24 |
[tool.poetry.group.aws_lambda]
|
25 |
optional = true
|
requirements.txt
DELETED
@@ -1,18 +0,0 @@
|
|
1 |
-
aws-lambda-powertools
|
2 |
-
awslambdaric
|
3 |
-
bson
|
4 |
-
contextily
|
5 |
-
geopandas
|
6 |
-
jmespath
|
7 |
-
myst-parser
|
8 |
-
numpy
|
9 |
-
onnxruntime
|
10 |
-
opencv-python-headless
|
11 |
-
pillow
|
12 |
-
pydantic>=2.0.3
|
13 |
-
python-dotenv
|
14 |
-
rasterio
|
15 |
-
requests
|
16 |
-
sphinx
|
17 |
-
sphinx-autodoc-typehints
|
18 |
-
sphinxcontrib-openapi
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements_dev.txt
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
aws-lambda-powertools
|
2 |
-
awslambdaric
|
3 |
-
bson
|
4 |
-
contextily
|
5 |
-
geopandas
|
6 |
-
jmespath
|
7 |
-
myst-parser
|
8 |
-
numpy
|
9 |
-
onnxruntime
|
10 |
-
opencv-python-headless
|
11 |
-
pillow
|
12 |
-
pydantic>=2.0.3
|
13 |
-
pytest
|
14 |
-
pytest-cov
|
15 |
-
python-dotenv
|
16 |
-
rasterio
|
17 |
-
requests
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements_dockerfile.txt
DELETED
@@ -1,14 +0,0 @@
|
|
1 |
-
aws-lambda-powertools
|
2 |
-
awslambdaric
|
3 |
-
bson
|
4 |
-
contextily
|
5 |
-
geopandas
|
6 |
-
jmespath
|
7 |
-
numpy
|
8 |
-
onnxruntime
|
9 |
-
opencv-python-headless
|
10 |
-
pillow
|
11 |
-
pydantic>=2.0.3
|
12 |
-
python-dotenv
|
13 |
-
rasterio
|
14 |
-
requests
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/__init__.py
CHANGED
@@ -12,6 +12,6 @@ try:
|
|
12 |
|
13 |
app_logger = Logger(service=SERVICE_NAME)
|
14 |
except ModuleNotFoundError:
|
15 |
-
from
|
16 |
|
17 |
app_logger = setup_logging(debug=True)
|
|
|
12 |
|
13 |
app_logger = Logger(service=SERVICE_NAME)
|
14 |
except ModuleNotFoundError:
|
15 |
+
from samgis_core.utilities.fastapi_logger import setup_logging
|
16 |
|
17 |
app_logger = setup_logging(debug=True)
|
samgis/__version__.py
CHANGED
@@ -1 +1 @@
|
|
1 |
-
__version__ = "1.
|
|
|
1 |
+
__version__ = "1.2.0"
|
samgis/io/coordinates_pixel_conversion.py
CHANGED
@@ -1,7 +1,9 @@
|
|
1 |
"""functions useful to convert to/from latitude-longitude coordinates to pixel image coordinates"""
|
|
|
|
|
2 |
from samgis import app_logger
|
3 |
from samgis.utilities.constants import TILE_SIZE, EARTH_EQUATORIAL_RADIUS
|
4 |
-
from samgis.utilities.type_hints import ImagePixelCoordinates
|
5 |
from samgis.utilities.type_hints import LatLngDict
|
6 |
|
7 |
|
|
|
1 |
"""functions useful to convert to/from latitude-longitude coordinates to pixel image coordinates"""
|
2 |
+
from samgis_core.utilities.type_hints import tuple_float, tuple_float_any
|
3 |
+
|
4 |
from samgis import app_logger
|
5 |
from samgis.utilities.constants import TILE_SIZE, EARTH_EQUATORIAL_RADIUS
|
6 |
+
from samgis.utilities.type_hints import ImagePixelCoordinates
|
7 |
from samgis.utilities.type_hints import LatLngDict
|
8 |
|
9 |
|
samgis/io/geo_helpers.py
CHANGED
@@ -2,8 +2,8 @@
|
|
2 |
from affine import Affine
|
3 |
from numpy import ndarray as np_ndarray
|
4 |
|
|
|
5 |
from samgis import app_logger
|
6 |
-
from samgis.utilities.type_hints import list_float, tuple_float, dict_str_int
|
7 |
|
8 |
|
9 |
def load_affine_transformation_from_matrix(matrix_source_coefficients: list_float) -> Affine:
|
|
|
2 |
from affine import Affine
|
3 |
from numpy import ndarray as np_ndarray
|
4 |
|
5 |
+
from samgis_core.utilities.type_hints import list_float, tuple_float, dict_str_int
|
6 |
from samgis import app_logger
|
|
|
7 |
|
8 |
|
9 |
def load_affine_transformation_from_matrix(matrix_source_coefficients: list_float) -> Affine:
|
samgis/io/raster_helpers.py
CHANGED
@@ -87,7 +87,7 @@ def get_rgb_prediction_image(raster_cropped: ndarray, slope_cellsize: int, inver
|
|
87 |
|
88 |
channel0 = raster_cropped
|
89 |
channel1 = normalize_array_list(
|
90 |
-
[raster_cropped, slope, curvature], CHANNEL_EXAGGERATIONS_LIST, title=
|
91 |
channel2 = curvature
|
92 |
|
93 |
return get_rgb_image(channel0, channel1, channel2, invert_image=invert_image)
|
@@ -121,11 +121,11 @@ def get_rgb_image(arr_channel0: ndarray, arr_channel1: ndarray, arr_channel2: nd
|
|
121 |
data_rgb = np.zeros((arr_channel0.shape[0], arr_channel0.shape[1], 3), dtype=np.uint8)
|
122 |
app_logger.debug(f"arr_container data_rgb, type:{type(data_rgb)}, arr_shape:{data_rgb.shape}.")
|
123 |
data_rgb[:, :, 0] = normalize_array(
|
124 |
-
arr_channel0.astype(float), high=1, norm_type="float", title=
|
125 |
data_rgb[:, :, 1] = normalize_array(
|
126 |
-
arr_channel1.astype(float), high=1, norm_type="float", title=
|
127 |
data_rgb[:, :, 2] = normalize_array(
|
128 |
-
arr_channel2.astype(float), high=1, norm_type="float", title=
|
129 |
if invert_image:
|
130 |
app_logger.debug(f"data_rgb:{type(data_rgb)}, {data_rgb.dtype}.")
|
131 |
data_rgb = bitwise_not(data_rgb)
|
|
|
87 |
|
88 |
channel0 = raster_cropped
|
89 |
channel1 = normalize_array_list(
|
90 |
+
[raster_cropped, slope, curvature], CHANNEL_EXAGGERATIONS_LIST, title="channel1_normlist")
|
91 |
channel2 = curvature
|
92 |
|
93 |
return get_rgb_image(channel0, channel1, channel2, invert_image=invert_image)
|
|
|
121 |
data_rgb = np.zeros((arr_channel0.shape[0], arr_channel0.shape[1], 3), dtype=np.uint8)
|
122 |
app_logger.debug(f"arr_container data_rgb, type:{type(data_rgb)}, arr_shape:{data_rgb.shape}.")
|
123 |
data_rgb[:, :, 0] = normalize_array(
|
124 |
+
arr_channel0.astype(float), high=1, norm_type="float", title="RGB:channel0") * 64
|
125 |
data_rgb[:, :, 1] = normalize_array(
|
126 |
+
arr_channel1.astype(float), high=1, norm_type="float", title="RGB:channel1") * 128
|
127 |
data_rgb[:, :, 2] = normalize_array(
|
128 |
+
arr_channel2.astype(float), high=1, norm_type="float", title="RGB:channel2") * 192
|
129 |
if invert_image:
|
130 |
app_logger.debug(f"data_rgb:{type(data_rgb)}, {data_rgb.dtype}.")
|
131 |
data_rgb = bitwise_not(data_rgb)
|
samgis/io/tms2geotiff.py
CHANGED
@@ -1,11 +1,14 @@
|
|
1 |
import os
|
|
|
2 |
from numpy import ndarray
|
|
|
3 |
from xyzservices import TileProvider
|
4 |
|
5 |
from samgis import app_logger
|
6 |
from samgis.utilities.constants import (OUTPUT_CRS_STRING, DRIVER_RASTERIO_GTIFF, N_MAX_RETRIES, N_CONNECTION, N_WAIT,
|
7 |
ZOOM_AUTO, BOOL_USE_CACHE)
|
8 |
-
from samgis.utilities.type_hints import tuple_ndarray_transform
|
|
|
9 |
|
10 |
bool_use_cache = int(os.getenv("BOOL_USE_CACHE", BOOL_USE_CACHE))
|
11 |
n_connection = int(os.getenv("N_CONNECTION", N_CONNECTION))
|
|
|
1 |
import os
|
2 |
+
|
3 |
from numpy import ndarray
|
4 |
+
from samgis_core.utilities.type_hints import tuple_float
|
5 |
from xyzservices import TileProvider
|
6 |
|
7 |
from samgis import app_logger
|
8 |
from samgis.utilities.constants import (OUTPUT_CRS_STRING, DRIVER_RASTERIO_GTIFF, N_MAX_RETRIES, N_CONNECTION, N_WAIT,
|
9 |
ZOOM_AUTO, BOOL_USE_CACHE)
|
10 |
+
from samgis.utilities.type_hints import tuple_ndarray_transform
|
11 |
+
|
12 |
|
13 |
bool_use_cache = int(os.getenv("BOOL_USE_CACHE", BOOL_USE_CACHE))
|
14 |
n_connection = int(os.getenv("N_CONNECTION", N_CONNECTION))
|
samgis/io/wrappers_helpers.py
CHANGED
@@ -6,7 +6,7 @@ from samgis import app_logger
|
|
6 |
from samgis.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
|
7 |
from samgis.utilities.constants import COMPLETE_URL_TILES_MAPBOX, COMPLETE_URL_TILES_NEXTZEN, CUSTOM_RESPONSE_MESSAGES
|
8 |
from samgis.utilities.type_hints import ApiRequestBody, ContentTypes, XYZTerrainProvidersNames, XYZDefaultProvidersNames
|
9 |
-
from
|
10 |
|
11 |
|
12 |
def get_response(status: int, start_time: float, request_id: str, response_body: Dict = None) -> str:
|
|
|
6 |
from samgis.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
|
7 |
from samgis.utilities.constants import COMPLETE_URL_TILES_MAPBOX, COMPLETE_URL_TILES_NEXTZEN, CUSTOM_RESPONSE_MESSAGES
|
8 |
from samgis.utilities.type_hints import ApiRequestBody, ContentTypes, XYZTerrainProvidersNames, XYZDefaultProvidersNames
|
9 |
+
from samgis_core.utilities.utilities import base64_decode
|
10 |
|
11 |
|
12 |
def get_response(status: int, start_time: float, request_id: str, response_body: Dict = None) -> str:
|
samgis/prediction_api/predictors.py
CHANGED
@@ -1,15 +1,15 @@
|
|
1 |
"""functions using machine learning instance model(s)"""
|
2 |
-
from numpy import array as np_array, uint8, zeros, ndarray
|
3 |
-
|
4 |
from samgis import app_logger, MODEL_FOLDER
|
5 |
from samgis.io.geo_helpers import get_vectorized_raster_as_geojson
|
6 |
from samgis.io.raster_helpers import get_raster_terrain_rgb_like, get_rgb_prediction_image
|
7 |
from samgis.io.tms2geotiff import download_extent
|
8 |
from samgis.io.wrappers_helpers import check_source_type_is_terrain
|
9 |
-
from samgis.
|
10 |
-
from
|
11 |
-
|
12 |
-
from
|
|
|
|
|
13 |
|
14 |
models_dict = {"fastsam": {"instance": None}}
|
15 |
|
@@ -53,7 +53,7 @@ def samexporter_predict(
|
|
53 |
app_logger.info(f"tile_source: {source}: downloading geo-referenced raster with bbox {bbox}, zoom {zoom}.")
|
54 |
img, transform = download_extent(w=pt1[1], s=pt1[0], e=pt0[1], n=pt0[0], zoom=zoom, source=source)
|
55 |
if check_source_type_is_terrain(source):
|
56 |
-
app_logger.info(
|
57 |
dem = get_raster_terrain_rgb_like(img, source.name)
|
58 |
# set a slope cell size proportional to the image width
|
59 |
slope_cellsize = int(img.shape[1] * SLOPE_CELLSIZE / DEFAULT_INPUT_SHAPE[1])
|
@@ -69,41 +69,3 @@ def samexporter_predict(
|
|
69 |
"n_predictions": n_predictions,
|
70 |
**get_vectorized_raster_as_geojson(mask, transform)
|
71 |
}
|
72 |
-
|
73 |
-
|
74 |
-
def get_raster_inference(
|
75 |
-
img: PIL_Image or ndarray, prompt: list_dict, models_instance: SegmentAnythingONNX, model_name: str
|
76 |
-
) -> tuple_ndarr_int:
|
77 |
-
"""
|
78 |
-
Wrapper for rasterio Affine from_gdal method
|
79 |
-
|
80 |
-
Args:
|
81 |
-
img: input PIL Image
|
82 |
-
prompt: list of prompt dict
|
83 |
-
models_instance: SegmentAnythingONNX instance model
|
84 |
-
model_name: model name string
|
85 |
-
|
86 |
-
Returns:
|
87 |
-
raster prediction mask, prediction number
|
88 |
-
"""
|
89 |
-
np_img = np_array(img)
|
90 |
-
app_logger.info(f"img type {type(np_img)}, prompt:{prompt}.")
|
91 |
-
app_logger.debug(f"onnxruntime input shape/size (shape if PIL) {np_img.size}.")
|
92 |
-
try:
|
93 |
-
app_logger.debug(f"onnxruntime input shape (NUMPY) {np_img.shape}.")
|
94 |
-
except Exception as e_shape:
|
95 |
-
app_logger.error(f"e_shape:{e_shape}.")
|
96 |
-
app_logger.info(f"instantiated model {model_name}, ENCODER {MODEL_ENCODER_NAME}, "
|
97 |
-
f"DECODER {MODEL_DECODER_NAME} from {MODEL_FOLDER}: Creating embedding...")
|
98 |
-
embedding = models_instance.encode(np_img)
|
99 |
-
app_logger.debug(f"embedding created, running predict_masks with prompt {prompt}...")
|
100 |
-
inference_out = models_instance.predict_masks(embedding, prompt)
|
101 |
-
len_inference_out = len(inference_out[0, :, :, :])
|
102 |
-
app_logger.info(f"Created {len_inference_out} prediction_masks,"
|
103 |
-
f"shape:{inference_out.shape}, dtype:{inference_out.dtype}.")
|
104 |
-
mask = zeros((inference_out.shape[2], inference_out.shape[3]), dtype=uint8)
|
105 |
-
for n, m in enumerate(inference_out[0, :, :, :]):
|
106 |
-
app_logger.debug(f"{n}th of prediction_masks shape {inference_out.shape}"
|
107 |
-
f" => mask shape:{mask.shape}, {mask.dtype}.")
|
108 |
-
mask[m > 0.0] = 255
|
109 |
-
return mask, len_inference_out
|
|
|
1 |
"""functions using machine learning instance model(s)"""
|
|
|
|
|
2 |
from samgis import app_logger, MODEL_FOLDER
|
3 |
from samgis.io.geo_helpers import get_vectorized_raster_as_geojson
|
4 |
from samgis.io.raster_helpers import get_raster_terrain_rgb_like, get_rgb_prediction_image
|
5 |
from samgis.io.tms2geotiff import download_extent
|
6 |
from samgis.io.wrappers_helpers import check_source_type_is_terrain
|
7 |
+
from samgis.utilities.constants import DEFAULT_URL_TILES, SLOPE_CELLSIZE
|
8 |
+
from samgis_core.prediction_api.sam_onnx import SegmentAnythingONNX
|
9 |
+
from samgis_core.prediction_api.sam_onnx import get_raster_inference
|
10 |
+
from samgis_core.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME, DEFAULT_INPUT_SHAPE
|
11 |
+
from samgis_core.utilities.type_hints import llist_float, dict_str_int, list_dict
|
12 |
+
|
13 |
|
14 |
models_dict = {"fastsam": {"instance": None}}
|
15 |
|
|
|
53 |
app_logger.info(f"tile_source: {source}: downloading geo-referenced raster with bbox {bbox}, zoom {zoom}.")
|
54 |
img, transform = download_extent(w=pt1[1], s=pt1[0], e=pt0[1], n=pt0[0], zoom=zoom, source=source)
|
55 |
if check_source_type_is_terrain(source):
|
56 |
+
app_logger.info("terrain-rgb like raster: transforms it into a DEM")
|
57 |
dem = get_raster_terrain_rgb_like(img, source.name)
|
58 |
# set a slope cell size proportional to the image width
|
59 |
slope_cellsize = int(img.shape[1] * SLOPE_CELLSIZE / DEFAULT_INPUT_SHAPE[1])
|
|
|
69 |
"n_predictions": n_predictions,
|
70 |
**get_vectorized_raster_as_geojson(mask, transform)
|
71 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/prediction_api/sam_onnx.py
DELETED
@@ -1,246 +0,0 @@
|
|
1 |
-
"""
|
2 |
-
Define a machine learning model executed by ONNX Runtime (https://ai/)
|
3 |
-
for Segment Anything (https://segment-anything.com).
|
4 |
-
Modified from https://github.com/vietanhdev/samexporter/
|
5 |
-
|
6 |
-
Copyright (c) 2023 Viet Anh Nguyen
|
7 |
-
|
8 |
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
9 |
-
of this software and associated documentation files (the "Software"), to deal
|
10 |
-
in the Software without restriction, including without limitation the rights
|
11 |
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
12 |
-
copies of the Software, and to permit persons to whom the Software is
|
13 |
-
furnished to do so, subject to the following conditions:
|
14 |
-
|
15 |
-
The above copyright notice and this permission notice shall be included in all
|
16 |
-
copies or substantial portions of the Software.
|
17 |
-
|
18 |
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
19 |
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
20 |
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
21 |
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
22 |
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
23 |
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
24 |
-
SOFTWARE.
|
25 |
-
"""
|
26 |
-
from copy import deepcopy
|
27 |
-
from numpy import array as np_array, concatenate, float32, linalg, matmul, ndarray, ones, zeros
|
28 |
-
from cv2 import INTER_LINEAR, warpAffine
|
29 |
-
from onnxruntime import get_available_providers, InferenceSession
|
30 |
-
|
31 |
-
from samgis import app_logger
|
32 |
-
from samgis.utilities.constants import DEFAULT_INPUT_SHAPE
|
33 |
-
|
34 |
-
|
35 |
-
class SegmentAnythingONNX:
|
36 |
-
"""Segmentation model using SegmentAnything"""
|
37 |
-
|
38 |
-
def __init__(self, encoder_model_path, decoder_model_path) -> None:
|
39 |
-
self.target_size = DEFAULT_INPUT_SHAPE[1]
|
40 |
-
self.input_size = DEFAULT_INPUT_SHAPE
|
41 |
-
|
42 |
-
# Load models
|
43 |
-
providers = get_available_providers()
|
44 |
-
|
45 |
-
# Pop TensorRT Runtime due to crashing issues
|
46 |
-
# TODO: Add back when TensorRT backend is stable
|
47 |
-
providers = [p for p in providers if p != "TensorrtExecutionProvider"]
|
48 |
-
|
49 |
-
if providers:
|
50 |
-
app_logger.info(
|
51 |
-
"Available providers for ONNXRuntime: %s", ", ".join(providers)
|
52 |
-
)
|
53 |
-
else:
|
54 |
-
app_logger.warning("No available providers for ONNXRuntime")
|
55 |
-
self.encoder_session = InferenceSession(
|
56 |
-
encoder_model_path, providers=providers
|
57 |
-
)
|
58 |
-
self.encoder_input_name = self.encoder_session.get_inputs()[0].name
|
59 |
-
self.decoder_session = InferenceSession(
|
60 |
-
decoder_model_path, providers=providers
|
61 |
-
)
|
62 |
-
|
63 |
-
@staticmethod
|
64 |
-
def get_input_points(prompt):
|
65 |
-
"""Get input points"""
|
66 |
-
points = []
|
67 |
-
labels = []
|
68 |
-
for mark in prompt:
|
69 |
-
if mark["type"] == "point":
|
70 |
-
points.append(mark["data"])
|
71 |
-
labels.append(mark["label"])
|
72 |
-
elif mark["type"] == "rectangle":
|
73 |
-
points.append([mark["data"][0], mark["data"][1]]) # top left
|
74 |
-
points.append(
|
75 |
-
[mark["data"][2], mark["data"][3]]
|
76 |
-
) # bottom right
|
77 |
-
labels.append(2)
|
78 |
-
labels.append(3)
|
79 |
-
points, labels = np_array(points), np_array(labels)
|
80 |
-
return points, labels
|
81 |
-
|
82 |
-
def run_encoder(self, encoder_inputs):
|
83 |
-
"""Run encoder"""
|
84 |
-
output = self.encoder_session.run(None, encoder_inputs)
|
85 |
-
image_embedding = output[0]
|
86 |
-
return image_embedding
|
87 |
-
|
88 |
-
@staticmethod
|
89 |
-
def get_preprocess_shape(old_h: int, old_w: int, long_side_length: int):
|
90 |
-
"""
|
91 |
-
Compute the output size given input size and target long side length.
|
92 |
-
"""
|
93 |
-
scale = long_side_length * 1.0 / max(old_h, old_w)
|
94 |
-
new_h, new_w = old_h * scale, old_w * scale
|
95 |
-
new_w = int(new_w + 0.5)
|
96 |
-
new_h = int(new_h + 0.5)
|
97 |
-
return new_h, new_w
|
98 |
-
|
99 |
-
def apply_coords(self, coords: ndarray, original_size, target_length):
|
100 |
-
"""
|
101 |
-
Expects a numpy np_array of length 2 in the final dimension. Requires the
|
102 |
-
original image size in (H, W) format.
|
103 |
-
"""
|
104 |
-
old_h, old_w = original_size
|
105 |
-
new_h, new_w = self.get_preprocess_shape(
|
106 |
-
original_size[0], original_size[1], target_length
|
107 |
-
)
|
108 |
-
coords = deepcopy(coords).astype(float)
|
109 |
-
coords[..., 0] = coords[..., 0] * (new_w / old_w)
|
110 |
-
coords[..., 1] = coords[..., 1] * (new_h / old_h)
|
111 |
-
return coords
|
112 |
-
|
113 |
-
def run_decoder(
|
114 |
-
self, image_embedding, original_size, transform_matrix, prompt
|
115 |
-
):
|
116 |
-
"""Run decoder"""
|
117 |
-
input_points, input_labels = self.get_input_points(prompt)
|
118 |
-
|
119 |
-
# Add a batch index, concatenate a padding point, and transform.
|
120 |
-
onnx_coord = concatenate(
|
121 |
-
[input_points, np_array([[0.0, 0.0]])], axis=0
|
122 |
-
)[None, :, :]
|
123 |
-
onnx_label = concatenate([input_labels, np_array([-1])], axis=0)[
|
124 |
-
None, :
|
125 |
-
].astype(float32)
|
126 |
-
onnx_coord = self.apply_coords(
|
127 |
-
onnx_coord, self.input_size, self.target_size
|
128 |
-
).astype(float32)
|
129 |
-
|
130 |
-
# Apply the transformation matrix to the coordinates.
|
131 |
-
onnx_coord = concatenate(
|
132 |
-
[
|
133 |
-
onnx_coord,
|
134 |
-
ones((1, onnx_coord.shape[1], 1), dtype=float32),
|
135 |
-
],
|
136 |
-
axis=2,
|
137 |
-
)
|
138 |
-
onnx_coord = matmul(onnx_coord, transform_matrix.T)
|
139 |
-
onnx_coord = onnx_coord[:, :, :2].astype(float32)
|
140 |
-
|
141 |
-
# Create an empty mask input and an indicator for no mask.
|
142 |
-
onnx_mask_input = zeros((1, 1, 256, 256), dtype=float32)
|
143 |
-
onnx_has_mask_input = zeros(1, dtype=float32)
|
144 |
-
|
145 |
-
decoder_inputs = {
|
146 |
-
"image_embeddings": image_embedding,
|
147 |
-
"point_coords": onnx_coord,
|
148 |
-
"point_labels": onnx_label,
|
149 |
-
"mask_input": onnx_mask_input,
|
150 |
-
"has_mask_input": onnx_has_mask_input,
|
151 |
-
"orig_im_size": np_array(self.input_size, dtype=float32),
|
152 |
-
}
|
153 |
-
masks, _, _ = self.decoder_session.run(None, decoder_inputs)
|
154 |
-
|
155 |
-
# Transform the masks back to the original image size.
|
156 |
-
inv_transform_matrix = linalg.inv(transform_matrix)
|
157 |
-
transformed_masks = self.transform_masks(
|
158 |
-
masks, original_size, inv_transform_matrix
|
159 |
-
)
|
160 |
-
|
161 |
-
return transformed_masks
|
162 |
-
|
163 |
-
@staticmethod
|
164 |
-
def transform_masks(masks, original_size, transform_matrix):
|
165 |
-
"""Transform masks
|
166 |
-
Transform the masks back to the original image size.
|
167 |
-
"""
|
168 |
-
output_masks = []
|
169 |
-
for batch in range(masks.shape[0]):
|
170 |
-
batch_masks = []
|
171 |
-
for mask_id in range(masks.shape[1]):
|
172 |
-
mask = masks[batch, mask_id]
|
173 |
-
try:
|
174 |
-
try:
|
175 |
-
app_logger.debug(f"mask_shape transform_masks:{mask.shape}, dtype:{mask.dtype}.")
|
176 |
-
except Exception as e_mask_shape_transform_masks:
|
177 |
-
app_logger.error(f"e_mask_shape_transform_masks:{e_mask_shape_transform_masks}.")
|
178 |
-
mask = warpAffine(
|
179 |
-
mask,
|
180 |
-
transform_matrix[:2],
|
181 |
-
(original_size[1], original_size[0]),
|
182 |
-
flags=INTER_LINEAR,
|
183 |
-
)
|
184 |
-
except Exception as e_warp_affine1:
|
185 |
-
app_logger.error(f"e_warp_affine1 mask shape:{mask.shape}, dtype:{mask.dtype}.")
|
186 |
-
app_logger.error(f"e_warp_affine1 transform_matrix:{transform_matrix}, [:2] {transform_matrix[:2]}.")
|
187 |
-
app_logger.error(f"e_warp_affine1 original_size:{original_size}.")
|
188 |
-
raise e_warp_affine1
|
189 |
-
batch_masks.append(mask)
|
190 |
-
output_masks.append(batch_masks)
|
191 |
-
return np_array(output_masks)
|
192 |
-
|
193 |
-
def encode(self, cv_image):
|
194 |
-
"""
|
195 |
-
Calculate embedding and metadata for a single image.
|
196 |
-
"""
|
197 |
-
original_size = cv_image.shape[:2]
|
198 |
-
|
199 |
-
# Calculate a transformation matrix to convert to self.input_size
|
200 |
-
scale_x = self.input_size[1] / cv_image.shape[1]
|
201 |
-
scale_y = self.input_size[0] / cv_image.shape[0]
|
202 |
-
scale = min(scale_x, scale_y)
|
203 |
-
transform_matrix = np_array(
|
204 |
-
[
|
205 |
-
[scale, 0, 0],
|
206 |
-
[0, scale, 0],
|
207 |
-
[0, 0, 1],
|
208 |
-
]
|
209 |
-
)
|
210 |
-
try:
|
211 |
-
cv_image = warpAffine(
|
212 |
-
cv_image,
|
213 |
-
transform_matrix[:2],
|
214 |
-
(self.input_size[1], self.input_size[0]),
|
215 |
-
flags=INTER_LINEAR,
|
216 |
-
)
|
217 |
-
except Exception as e_warp_affine2:
|
218 |
-
app_logger.error(f"e_warp_affine2:{e_warp_affine2}.")
|
219 |
-
np_cv_image = np_array(cv_image)
|
220 |
-
app_logger.error(f"e_warp_affine2 cv_image shape:{np_cv_image.shape}, dtype:{np_cv_image.dtype}.")
|
221 |
-
app_logger.error(f"e_warp_affine2 transform_matrix:{transform_matrix}, [:2] {transform_matrix[:2]}")
|
222 |
-
app_logger.error(f"e_warp_affine2 self.input_size:{self.input_size}.")
|
223 |
-
raise e_warp_affine2
|
224 |
-
|
225 |
-
encoder_inputs = {
|
226 |
-
self.encoder_input_name: cv_image.astype(float32),
|
227 |
-
}
|
228 |
-
image_embedding = self.run_encoder(encoder_inputs)
|
229 |
-
return {
|
230 |
-
"image_embedding": image_embedding,
|
231 |
-
"original_size": original_size,
|
232 |
-
"transform_matrix": transform_matrix,
|
233 |
-
}
|
234 |
-
|
235 |
-
def predict_masks(self, embedding, prompt):
|
236 |
-
"""
|
237 |
-
Predict masks for a single image.
|
238 |
-
"""
|
239 |
-
masks = self.run_decoder(
|
240 |
-
embedding["image_embedding"],
|
241 |
-
embedding["original_size"],
|
242 |
-
embedding["transform_matrix"],
|
243 |
-
prompt,
|
244 |
-
)
|
245 |
-
|
246 |
-
return masks
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/utilities/constants.py
CHANGED
@@ -9,8 +9,6 @@ CUSTOM_RESPONSE_MESSAGES = {
|
|
9 |
422: "Missing required parameter",
|
10 |
500: "Internal server error"
|
11 |
}
|
12 |
-
MODEL_ENCODER_NAME = "mobile_sam.encoder.onnx"
|
13 |
-
MODEL_DECODER_NAME = "sam_vit_h_4b8939.decoder.onnx"
|
14 |
TILE_SIZE = 256
|
15 |
EARTH_EQUATORIAL_RADIUS = 6378137.0
|
16 |
WKT_3857 = 'PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
|
@@ -39,5 +37,4 @@ DOMAIN_URL_TILES_NEXTZEN = "s3.amazonaws.com"
|
|
39 |
RELATIVE_URL_TILES_NEXTZEN = "elevation-tiles-prod/terrarium/{z}/{x}/{y}.png" # "terrarium/{z}/{x}/{y}.png"
|
40 |
COMPLETE_URL_TILES_NEXTZEN = f"https://{DOMAIN_URL_TILES_NEXTZEN}/{RELATIVE_URL_TILES_NEXTZEN}"
|
41 |
CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
|
42 |
-
DEFAULT_INPUT_SHAPE = 684, 1024
|
43 |
SLOPE_CELLSIZE = 61
|
|
|
9 |
422: "Missing required parameter",
|
10 |
500: "Internal server error"
|
11 |
}
|
|
|
|
|
12 |
TILE_SIZE = 256
|
13 |
EARTH_EQUATORIAL_RADIUS = 6378137.0
|
14 |
WKT_3857 = 'PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
|
|
|
37 |
RELATIVE_URL_TILES_NEXTZEN = "elevation-tiles-prod/terrarium/{z}/{x}/{y}.png" # "terrarium/{z}/{x}/{y}.png"
|
38 |
COMPLETE_URL_TILES_NEXTZEN = f"https://{DOMAIN_URL_TILES_NEXTZEN}/{RELATIVE_URL_TILES_NEXTZEN}"
|
39 |
CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
|
|
|
40 |
SLOPE_CELLSIZE = 61
|
samgis/utilities/fastapi_logger.py
DELETED
@@ -1,24 +0,0 @@
|
|
1 |
-
import loguru
|
2 |
-
|
3 |
-
|
4 |
-
def setup_logging(debug: bool = False, formatter: str = "{time} - {level} - ({extra[request_id]}) {message} "
|
5 |
-
) -> loguru.logger:
|
6 |
-
"""
|
7 |
-
Create a logging instance with log string formatter.
|
8 |
-
|
9 |
-
Args:
|
10 |
-
debug: logging debug argument
|
11 |
-
formatter: log string formatter
|
12 |
-
|
13 |
-
Returns:
|
14 |
-
Logger
|
15 |
-
|
16 |
-
"""
|
17 |
-
import sys
|
18 |
-
|
19 |
-
logger = loguru.logger
|
20 |
-
logger.remove()
|
21 |
-
level_logger = "DEBUG" if debug else "INFO"
|
22 |
-
logger.add(sys.stdout, format=formatter, level=level_logger)
|
23 |
-
logger.info(f"type_logger:{type(logger)}, logger:{logger}.")
|
24 |
-
return logger
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/utilities/serialize.py
DELETED
@@ -1,84 +0,0 @@
|
|
1 |
-
"""Serialize objects"""
|
2 |
-
from typing import Mapping
|
3 |
-
|
4 |
-
from samgis import app_logger
|
5 |
-
from samgis.utilities.type_hints import dict_str, dict_str_any
|
6 |
-
|
7 |
-
|
8 |
-
def serialize(obj: any, include_none: bool = False):
|
9 |
-
"""
|
10 |
-
Return the input object into a serializable one
|
11 |
-
|
12 |
-
Args:
|
13 |
-
obj: Object to serialize
|
14 |
-
include_none: bool to indicate if include also keys with None values during dict serialization
|
15 |
-
|
16 |
-
Returns:
|
17 |
-
serialized object
|
18 |
-
"""
|
19 |
-
return _serialize(obj, include_none)
|
20 |
-
|
21 |
-
|
22 |
-
def _serialize(obj: any, include_none: bool):
|
23 |
-
from numpy import ndarray as np_ndarray, floating as np_floating, integer as np_integer
|
24 |
-
|
25 |
-
primitive = (int, float, str, bool)
|
26 |
-
# print(type(obj))
|
27 |
-
try:
|
28 |
-
if obj is None:
|
29 |
-
return None
|
30 |
-
elif isinstance(obj, np_integer):
|
31 |
-
return int(obj)
|
32 |
-
elif isinstance(obj, np_floating):
|
33 |
-
return float(obj)
|
34 |
-
elif isinstance(obj, np_ndarray):
|
35 |
-
return obj.tolist()
|
36 |
-
elif isinstance(obj, primitive):
|
37 |
-
return obj
|
38 |
-
elif type(obj) is list:
|
39 |
-
return _serialize_list(obj, include_none)
|
40 |
-
elif type(obj) is tuple:
|
41 |
-
return list(obj)
|
42 |
-
elif type(obj) is bytes:
|
43 |
-
return _serialize_bytes(obj)
|
44 |
-
elif isinstance(obj, Exception):
|
45 |
-
return _serialize_exception(obj)
|
46 |
-
# elif isinstance(obj, object):
|
47 |
-
# return _serialize_object(obj, include_none)
|
48 |
-
else:
|
49 |
-
return _serialize_object(obj, include_none)
|
50 |
-
except Exception as e_serialize:
|
51 |
-
app_logger.error(f"e_serialize::{e_serialize}, type_obj:{type(obj)}, obj:{obj}.")
|
52 |
-
return f"object_name:{str(obj)}__object_type_str:{str(type(obj))}."
|
53 |
-
|
54 |
-
|
55 |
-
def _serialize_object(obj: Mapping[any, object], include_none: bool) -> dict[any]:
|
56 |
-
from bson import ObjectId
|
57 |
-
|
58 |
-
res = {}
|
59 |
-
if type(obj) is not dict:
|
60 |
-
keys = [i for i in obj.__dict__.keys() if (getattr(obj, i) is not None) or include_none]
|
61 |
-
else:
|
62 |
-
keys = [i for i in obj.keys() if (obj[i] is not None) or include_none]
|
63 |
-
for key in keys:
|
64 |
-
if type(obj) is not dict:
|
65 |
-
res[key] = _serialize(getattr(obj, key), include_none)
|
66 |
-
elif isinstance(obj[key], ObjectId):
|
67 |
-
continue
|
68 |
-
else:
|
69 |
-
res[key] = _serialize(obj[key], include_none)
|
70 |
-
return res
|
71 |
-
|
72 |
-
|
73 |
-
def _serialize_list(ls: list, include_none: bool) -> list:
|
74 |
-
return [_serialize(elem, include_none) for elem in ls]
|
75 |
-
|
76 |
-
|
77 |
-
def _serialize_bytes(b: bytes) -> dict_str:
|
78 |
-
import base64
|
79 |
-
encoded = base64.b64encode(b)
|
80 |
-
return {"value": encoded.decode('ascii'), "type": "bytes"}
|
81 |
-
|
82 |
-
|
83 |
-
def _serialize_exception(e: Exception) -> dict_str_any:
|
84 |
-
return {"msg": str(e), "type": str(type(e)), **e.__dict__}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/utilities/type_hints.py
CHANGED
@@ -2,24 +2,11 @@
|
|
2 |
from enum import IntEnum, Enum, StrEnum
|
3 |
from typing import TypedDict
|
4 |
|
5 |
-
from PIL.Image import Image
|
6 |
from affine import Affine
|
7 |
from numpy import ndarray
|
8 |
from pydantic import BaseModel
|
9 |
|
10 |
|
11 |
-
dict_str_int = dict[str, int]
|
12 |
-
dict_str = dict[str]
|
13 |
-
dict_str_any = dict[str, any]
|
14 |
-
list_dict = list[dict]
|
15 |
-
list_float = list[float]
|
16 |
-
list_int = list[int]
|
17 |
-
tuple_int = tuple[int]
|
18 |
-
tuple_ndarr_int = tuple[ndarray, int]
|
19 |
-
llist_float = list[list_float]
|
20 |
-
tuple_float = tuple[float]
|
21 |
-
tuple_float_any = tuple[float, any]
|
22 |
-
PIL_Image = Image
|
23 |
tuple_ndarray_transform = tuple[ndarray, Affine]
|
24 |
|
25 |
|
|
|
2 |
from enum import IntEnum, Enum, StrEnum
|
3 |
from typing import TypedDict
|
4 |
|
|
|
5 |
from affine import Affine
|
6 |
from numpy import ndarray
|
7 |
from pydantic import BaseModel
|
8 |
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
tuple_ndarray_transform = tuple[ndarray, Affine]
|
11 |
|
12 |
|
samgis/utilities/utilities.py
DELETED
@@ -1,92 +0,0 @@
|
|
1 |
-
"""Various utilities (logger, time benchmark, args dump, numerical and stats info)"""
|
2 |
-
from samgis import app_logger
|
3 |
-
from samgis.utilities.serialize import serialize
|
4 |
-
|
5 |
-
|
6 |
-
def _prepare_base64_input(sb):
|
7 |
-
if isinstance(sb, str):
|
8 |
-
# If there's any unicode here, an exception will be thrown and the function will return false
|
9 |
-
return bytes(sb, 'ascii')
|
10 |
-
elif isinstance(sb, bytes):
|
11 |
-
return sb
|
12 |
-
raise ValueError("Argument must be string or bytes")
|
13 |
-
|
14 |
-
|
15 |
-
def _is_base64(sb: str or bytes):
|
16 |
-
import base64
|
17 |
-
|
18 |
-
try:
|
19 |
-
sb_bytes = _prepare_base64_input(sb)
|
20 |
-
decoded = base64.b64decode(sb_bytes, validate=True)
|
21 |
-
return base64.b64encode(decoded).decode("utf-8") == sb_bytes.decode("utf-8")
|
22 |
-
except ValueError:
|
23 |
-
return False
|
24 |
-
|
25 |
-
|
26 |
-
def base64_decode(s):
|
27 |
-
"""
|
28 |
-
Decode base64 strings
|
29 |
-
|
30 |
-
Args:
|
31 |
-
s: input string
|
32 |
-
|
33 |
-
Returns:
|
34 |
-
decoded string
|
35 |
-
"""
|
36 |
-
import base64
|
37 |
-
|
38 |
-
if isinstance(s, str) and _is_base64(s):
|
39 |
-
return base64.b64decode(s, validate=True).decode("utf-8")
|
40 |
-
|
41 |
-
return s
|
42 |
-
|
43 |
-
|
44 |
-
def base64_encode(sb: str or bytes) -> bytes:
|
45 |
-
"""
|
46 |
-
Encode input strings or bytes as base64
|
47 |
-
|
48 |
-
Args:
|
49 |
-
sb: input string or bytes
|
50 |
-
|
51 |
-
Returns:
|
52 |
-
base64 encoded bytes
|
53 |
-
"""
|
54 |
-
import base64
|
55 |
-
|
56 |
-
sb_bytes = _prepare_base64_input(sb)
|
57 |
-
return base64.b64encode(sb_bytes)
|
58 |
-
|
59 |
-
|
60 |
-
def hash_calculate(arr) -> str or bytes:
|
61 |
-
"""
|
62 |
-
Return computed hash from input variable (typically a numpy array).
|
63 |
-
|
64 |
-
Args:
|
65 |
-
arr: input variable
|
66 |
-
|
67 |
-
Returns:
|
68 |
-
computed hash from input variable
|
69 |
-
"""
|
70 |
-
from hashlib import sha256
|
71 |
-
from base64 import b64encode
|
72 |
-
from numpy import ndarray as np_ndarray
|
73 |
-
|
74 |
-
if isinstance(arr, np_ndarray):
|
75 |
-
hash_fn = sha256(arr.data)
|
76 |
-
elif isinstance(arr, dict):
|
77 |
-
import json
|
78 |
-
|
79 |
-
serialized = serialize(arr)
|
80 |
-
variable_to_hash = json.dumps(serialized, sort_keys=True).encode('utf-8')
|
81 |
-
hash_fn = sha256(variable_to_hash)
|
82 |
-
elif isinstance(arr, str):
|
83 |
-
try:
|
84 |
-
hash_fn = sha256(arr)
|
85 |
-
except TypeError:
|
86 |
-
app_logger.warning(f"TypeError, re-try encoding arg:{arr},type:{type(arr)}.")
|
87 |
-
hash_fn = sha256(arr.encode('utf-8'))
|
88 |
-
elif isinstance(arr, bytes):
|
89 |
-
hash_fn = sha256(arr)
|
90 |
-
else:
|
91 |
-
raise ValueError(f"variable 'arr':{arr} not yet handled.")
|
92 |
-
return b64encode(hash_fn.digest())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_coordinates_pixel_conversion.py
CHANGED
@@ -1,37 +1,10 @@
|
|
1 |
import json
|
2 |
|
3 |
-
from samgis.io.coordinates_pixel_conversion import
|
4 |
-
get_latlng_to_pixel_coordinates
|
5 |
from samgis.utilities.type_hints import LatLngDict
|
6 |
from tests import TEST_EVENTS_FOLDER
|
7 |
|
8 |
|
9 |
-
def test_get_latlng2pixel_projection():
|
10 |
-
name_fn = "get_latlng2pixel_projection"
|
11 |
-
|
12 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
13 |
-
inputs_outputs = json.load(tst_json)
|
14 |
-
for k, input_output in inputs_outputs.items():
|
15 |
-
print(f"k:{k}")
|
16 |
-
current_input = input_output["input"]
|
17 |
-
latlng_input = LatLngDict.model_validate(current_input["latlng"])
|
18 |
-
output = _get_latlng2pixel_projection(latlng_input)
|
19 |
-
assert output == input_output["output"]
|
20 |
-
|
21 |
-
|
22 |
-
def test_get_point_latlng_to_pixel_coordinates():
|
23 |
-
name_fn = "get_point_latlng_to_pixel_coordinates"
|
24 |
-
|
25 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
26 |
-
inputs_outputs = json.load(tst_json)
|
27 |
-
for k, input_output in inputs_outputs.items():
|
28 |
-
print(f"k:{k}")
|
29 |
-
current_input = input_output["input"]
|
30 |
-
latlng_input = LatLngDict.model_validate(current_input["latlng"])
|
31 |
-
output = _get_point_latlng_to_pixel_coordinates(latlng=latlng_input, zoom=current_input["zoom"])
|
32 |
-
assert output == input_output["output"]
|
33 |
-
|
34 |
-
|
35 |
def test_get_latlng_to_pixel_coordinates():
|
36 |
name_fn = "get_latlng_to_pixel_coordinates"
|
37 |
|
|
|
1 |
import json
|
2 |
|
3 |
+
from samgis.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
|
|
|
4 |
from samgis.utilities.type_hints import LatLngDict
|
5 |
from tests import TEST_EVENTS_FOLDER
|
6 |
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
def test_get_latlng_to_pixel_coordinates():
|
9 |
name_fn = "get_latlng_to_pixel_coordinates"
|
10 |
|
tests/io/test_geo_helpers.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
import json
|
2 |
import unittest
|
3 |
-
|
4 |
import numpy as np
|
5 |
import shapely
|
6 |
|
|
|
1 |
import json
|
2 |
import unittest
|
|
|
3 |
import numpy as np
|
4 |
import shapely
|
5 |
|
tests/io/test_lambda_helpers.py
DELETED
@@ -1,133 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import time
|
3 |
-
from http import HTTPStatus
|
4 |
-
from unittest.mock import patch
|
5 |
-
|
6 |
-
from samgis.io import wrappers_helpers
|
7 |
-
from samgis.io.wrappers_helpers import get_parsed_bbox_points, get_parsed_request_body, get_response
|
8 |
-
from samgis.utilities.type_hints import ApiRequestBody
|
9 |
-
from samgis.utilities import utilities
|
10 |
-
from tests import TEST_EVENTS_FOLDER
|
11 |
-
|
12 |
-
|
13 |
-
@patch.object(time, "time")
|
14 |
-
def test_get_response(time_mocked):
|
15 |
-
time_diff = 108
|
16 |
-
end_run = 1000
|
17 |
-
time_mocked.return_value = end_run
|
18 |
-
start_time = end_run - time_diff
|
19 |
-
aws_request_id = "test_invoke_id"
|
20 |
-
|
21 |
-
with open(TEST_EVENTS_FOLDER / "get_response.json") as tst_json:
|
22 |
-
inputs_outputs = json.load(tst_json)
|
23 |
-
|
24 |
-
response_type = "200"
|
25 |
-
body_response = inputs_outputs[response_type]["input"]
|
26 |
-
output = get_response(HTTPStatus.OK.value, start_time, aws_request_id, body_response)
|
27 |
-
assert json.loads(output) == inputs_outputs[response_type]["output"]
|
28 |
-
|
29 |
-
response_type = "400"
|
30 |
-
response_400 = get_response(HTTPStatus.BAD_REQUEST.value, start_time, aws_request_id, {})
|
31 |
-
assert response_400 == inputs_outputs[response_type]["output"]
|
32 |
-
|
33 |
-
response_type = "422"
|
34 |
-
response_422 = get_response(HTTPStatus.UNPROCESSABLE_ENTITY.value, start_time, aws_request_id, {})
|
35 |
-
assert response_422 == inputs_outputs[response_type]["output"]
|
36 |
-
|
37 |
-
response_type = "500"
|
38 |
-
response_500 = get_response(HTTPStatus.INTERNAL_SERVER_ERROR.value, start_time, aws_request_id, {})
|
39 |
-
assert response_500 == inputs_outputs[response_type]["output"]
|
40 |
-
|
41 |
-
|
42 |
-
def test_get_parsed_bbox_points():
|
43 |
-
with open(TEST_EVENTS_FOLDER / "get_parsed_bbox_prompts_single_point.json") as tst_json:
|
44 |
-
inputs_outputs = json.load(tst_json)
|
45 |
-
for k, input_output in inputs_outputs.items():
|
46 |
-
print(f"k:{k}.")
|
47 |
-
raw_body = get_parsed_request_body(**input_output["input"])
|
48 |
-
output = get_parsed_bbox_points(raw_body)
|
49 |
-
assert output == input_output["output"]
|
50 |
-
|
51 |
-
|
52 |
-
def test_get_parsed_bbox_other_inputs():
|
53 |
-
for json_filename in ["single_rectangle", "multi_prompt"]:
|
54 |
-
with open(TEST_EVENTS_FOLDER / f"get_parsed_bbox_prompts_{json_filename}.json") as tst_json:
|
55 |
-
inputs_outputs = json.load(tst_json)
|
56 |
-
parsed_input = ApiRequestBody.model_validate(inputs_outputs["input"])
|
57 |
-
output = get_parsed_bbox_points(parsed_input)
|
58 |
-
assert output == inputs_outputs["output"]
|
59 |
-
|
60 |
-
|
61 |
-
def test_get_parsed_request_body():
|
62 |
-
input_event = {
|
63 |
-
"event": {
|
64 |
-
"bbox": {
|
65 |
-
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
66 |
-
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
67 |
-
},
|
68 |
-
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
69 |
-
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
70 |
-
}
|
71 |
-
}
|
72 |
-
expected_output_dict = {
|
73 |
-
"bbox": {
|
74 |
-
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
75 |
-
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
76 |
-
},
|
77 |
-
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
78 |
-
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
79 |
-
}
|
80 |
-
output = get_parsed_request_body(input_event["event"])
|
81 |
-
assert output == ApiRequestBody.model_validate(input_event["event"])
|
82 |
-
|
83 |
-
input_event_str = json.dumps(input_event["event"])
|
84 |
-
output = get_parsed_request_body(input_event_str)
|
85 |
-
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
86 |
-
|
87 |
-
event = {"body": utilities.base64_encode(input_event_str).decode("utf-8")}
|
88 |
-
output = get_parsed_request_body(event)
|
89 |
-
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
90 |
-
|
91 |
-
|
92 |
-
@patch.object(wrappers_helpers, "providers")
|
93 |
-
def test_get_url_tile(providers_mocked):
|
94 |
-
import xyzservices
|
95 |
-
from samgis.io.wrappers_helpers import get_url_tile
|
96 |
-
|
97 |
-
from tests import LOCAL_URL_TILE
|
98 |
-
|
99 |
-
local_tile_provider = xyzservices.TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE, attribution="")
|
100 |
-
expected_output = {'name': 'local_tile_provider', 'url': LOCAL_URL_TILE, 'attribution': ''}
|
101 |
-
providers_mocked.query_name.return_value = local_tile_provider
|
102 |
-
assert get_url_tile("OpenStreetMap") == expected_output
|
103 |
-
|
104 |
-
local_url = 'http://localhost:8000/{parameter}/{z}/{x}/{y}.png'
|
105 |
-
local_tile_provider = xyzservices.TileProvider(
|
106 |
-
name="local_tile_provider_param", url=local_url, attribution="", parameter="lamda_handler"
|
107 |
-
)
|
108 |
-
providers_mocked.query_name.return_value = local_tile_provider
|
109 |
-
assert get_url_tile("OpenStreetMap.HOT") == {
|
110 |
-
"parameter": "lamda_handler", 'name': 'local_tile_provider_param', 'url': local_url, 'attribution': ''
|
111 |
-
}
|
112 |
-
|
113 |
-
|
114 |
-
def test_get_url_tile_real():
|
115 |
-
from samgis.io.wrappers_helpers import get_url_tile
|
116 |
-
|
117 |
-
assert get_url_tile("OpenStreetMap") == {
|
118 |
-
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
119 |
-
'html_attribution': '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
120 |
-
'attribution': '(C) OpenStreetMap contributors',
|
121 |
-
'name': 'OpenStreetMap.Mapnik'}
|
122 |
-
|
123 |
-
html_attribution_hot = '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, '
|
124 |
-
html_attribution_hot += 'Tiles style by <a href="https://www.hotosm.org/" target="_blank">Humanitarian '
|
125 |
-
html_attribution_hot += 'OpenStreetMap Team</a> hosted by <a href="https://openstreetmap.fr/" target="_blank">'
|
126 |
-
html_attribution_hot += 'OpenStreetMap France</a>'
|
127 |
-
attribution_hot = '(C) OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by '
|
128 |
-
attribution_hot += 'OpenStreetMap France'
|
129 |
-
assert get_url_tile("OpenStreetMap.HOT") == {
|
130 |
-
'url': 'https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png', 'max_zoom': 19,
|
131 |
-
'html_attribution': html_attribution_hot, 'attribution': attribution_hot, 'name': 'OpenStreetMap.HOT'
|
132 |
-
}
|
133 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_raster_helpers.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
-
import numpy as np
|
2 |
import unittest
|
3 |
from unittest.mock import patch
|
|
|
4 |
|
|
|
5 |
from samgis.io import raster_helpers
|
6 |
-
from samgis.utilities.utilities import hash_calculate
|
7 |
|
8 |
|
9 |
def get_three_channels(size=5, param1=1000, param2=3, param3=-88):
|
|
|
|
|
1 |
import unittest
|
2 |
from unittest.mock import patch
|
3 |
+
import numpy as np
|
4 |
|
5 |
+
from samgis_core.utilities.utilities import hash_calculate
|
6 |
from samgis.io import raster_helpers
|
|
|
7 |
|
8 |
|
9 |
def get_three_channels(size=5, param1=1000, param2=3, param3=-88):
|
tests/io/test_tms2geotiff.py
CHANGED
@@ -1,40 +1,41 @@
|
|
1 |
import unittest
|
2 |
|
3 |
import numpy as np
|
|
|
4 |
|
5 |
from samgis import app_logger
|
6 |
from samgis.io.tms2geotiff import download_extent
|
7 |
-
from samgis.utilities.utilities import hash_calculate
|
8 |
from tests import LOCAL_URL_TILE, TEST_EVENTS_FOLDER
|
9 |
|
|
|
10 |
input_bbox = [[39.036252959636606, 15.040283203125002], [38.302869955150044, 13.634033203125002]]
|
11 |
|
12 |
|
13 |
class TestTms2geotiff(unittest.TestCase):
|
14 |
-
def test_download_extent_simple_source(self):
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
|
39 |
def test_download_extent_source_with_parameter(self):
|
40 |
from rasterio import Affine
|
@@ -52,16 +53,22 @@ class TestTms2geotiff(unittest.TestCase):
|
|
52 |
s_lat = pt1[0]
|
53 |
w_lng = pt1[1]
|
54 |
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
def test_download_extent_source_with_parameter_key_error(self):
|
67 |
from xyzservices import TileProvider
|
|
|
1 |
import unittest
|
2 |
|
3 |
import numpy as np
|
4 |
+
from samgis_core.utilities.utilities import hash_calculate
|
5 |
|
6 |
from samgis import app_logger
|
7 |
from samgis.io.tms2geotiff import download_extent
|
|
|
8 |
from tests import LOCAL_URL_TILE, TEST_EVENTS_FOLDER
|
9 |
|
10 |
+
|
11 |
input_bbox = [[39.036252959636606, 15.040283203125002], [38.302869955150044, 13.634033203125002]]
|
12 |
|
13 |
|
14 |
class TestTms2geotiff(unittest.TestCase):
|
15 |
+
# def test_download_extent_simple_source(self):
|
16 |
+
# from rasterio import Affine
|
17 |
+
# from xyzservices import TileProvider
|
18 |
+
# from tests.local_tiles_http_server import LocalTilesHttpServer
|
19 |
+
#
|
20 |
+
# listen_port = 8000
|
21 |
+
#
|
22 |
+
# with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
23 |
+
# pt0, pt1 = input_bbox
|
24 |
+
# zoom = 10
|
25 |
+
#
|
26 |
+
# n_lat = pt0[0]
|
27 |
+
# e_lng = pt0[1]
|
28 |
+
# s_lat = pt1[0]
|
29 |
+
# w_lng = pt1[1]
|
30 |
+
#
|
31 |
+
# source = TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE, attribution="")
|
32 |
+
# img, matrix = download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=source)
|
33 |
+
# app_logger.info(f"# DOWNLOAD ENDED, shape: {img.shape} #")
|
34 |
+
# np_img = np.ascontiguousarray(img)
|
35 |
+
# output_hash = hash_calculate(np_img)
|
36 |
+
# assert output_hash == b'UmbkwbPJpRT1XXcLnLUapUDP320w7YhS/AmT3H7u+b4='
|
37 |
+
# assert Affine.to_gdal(matrix) == (
|
38 |
+
# 1517657.1966021745, 152.8740565703525, 0.0, 4726942.266183584, 0.0, -152.87405657034955)
|
39 |
|
40 |
def test_download_extent_source_with_parameter(self):
|
41 |
from rasterio import Affine
|
|
|
53 |
s_lat = pt1[0]
|
54 |
w_lng = pt1[1]
|
55 |
|
56 |
+
local_url = "http://localhost:8000/{parameter}/{z}/{x}/{y}.png"
|
57 |
+
download_extent_args_no_parameter = {"name": "local_tile_provider", "url": LOCAL_URL_TILE, "attribution": ""}
|
58 |
+
download_extent_args = {
|
59 |
+
"no_parameter": download_extent_args_no_parameter,
|
60 |
+
"with_parameter": {"url": local_url, "parameter": "lambda_handler", **download_extent_args_no_parameter}
|
61 |
+
}
|
62 |
+
for _args_names, _args in download_extent_args.items():
|
63 |
+
app_logger.info(f"args_names:{_args_names}.")
|
64 |
+
source = TileProvider(**_args)
|
65 |
+
img, matrix = download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=source)
|
66 |
+
app_logger.info(f"# DOWNLOAD ENDED, shape: {img.shape} #")
|
67 |
+
np_img = np.ascontiguousarray(img)
|
68 |
+
output_hash = hash_calculate(np_img)
|
69 |
+
assert output_hash == b'UmbkwbPJpRT1XXcLnLUapUDP320w7YhS/AmT3H7u+b4='
|
70 |
+
assert Affine.to_gdal(matrix) == (
|
71 |
+
1517657.1966021745, 152.8740565703525, 0.0, 4726942.266183584, 0.0, -152.87405657034955)
|
72 |
|
73 |
def test_download_extent_source_with_parameter_key_error(self):
|
74 |
from xyzservices import TileProvider
|
tests/io/test_wrappers_helpers.py
ADDED
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import time
|
3 |
+
import unittest
|
4 |
+
|
5 |
+
from http import HTTPStatus
|
6 |
+
from unittest.mock import patch
|
7 |
+
|
8 |
+
from samgis.io import wrappers_helpers
|
9 |
+
from samgis.io.wrappers_helpers import get_parsed_bbox_points, get_parsed_request_body, get_response
|
10 |
+
from samgis.utilities.type_hints import ApiRequestBody
|
11 |
+
from tests import TEST_EVENTS_FOLDER
|
12 |
+
|
13 |
+
|
14 |
+
class WrappersHelpersTest(unittest.TestCase):
|
15 |
+
@patch.object(time, "time")
|
16 |
+
def test_get_response(self, time_mocked):
|
17 |
+
time_diff = 108
|
18 |
+
end_run = 1000
|
19 |
+
time_mocked.return_value = end_run
|
20 |
+
start_time = end_run - time_diff
|
21 |
+
aws_request_id = "test_invoke_id"
|
22 |
+
|
23 |
+
with open(TEST_EVENTS_FOLDER / "get_response.json") as tst_json:
|
24 |
+
inputs_outputs = json.load(tst_json)
|
25 |
+
|
26 |
+
response_type = "200"
|
27 |
+
body_response = inputs_outputs[response_type]["input"]
|
28 |
+
output = get_response(HTTPStatus.OK.value, start_time, aws_request_id, body_response)
|
29 |
+
assert json.loads(output) == inputs_outputs[response_type]["output"]
|
30 |
+
|
31 |
+
response_type = "400"
|
32 |
+
response_400 = get_response(HTTPStatus.BAD_REQUEST.value, start_time, aws_request_id, {})
|
33 |
+
assert response_400 == inputs_outputs[response_type]["output"]
|
34 |
+
|
35 |
+
response_type = "422"
|
36 |
+
response_422 = get_response(HTTPStatus.UNPROCESSABLE_ENTITY.value, start_time, aws_request_id, {})
|
37 |
+
assert response_422 == inputs_outputs[response_type]["output"]
|
38 |
+
|
39 |
+
response_type = "500"
|
40 |
+
response_500 = get_response(HTTPStatus.INTERNAL_SERVER_ERROR.value, start_time, aws_request_id, {})
|
41 |
+
assert response_500 == inputs_outputs[response_type]["output"]
|
42 |
+
|
43 |
+
@staticmethod
|
44 |
+
def test_get_parsed_bbox_points():
|
45 |
+
with open(TEST_EVENTS_FOLDER / "get_parsed_bbox_prompts_single_point.json") as tst_json:
|
46 |
+
inputs_outputs = json.load(tst_json)
|
47 |
+
for k, input_output in inputs_outputs.items():
|
48 |
+
print(f"k:{k}.")
|
49 |
+
raw_body = get_parsed_request_body(**input_output["input"])
|
50 |
+
output = get_parsed_bbox_points(raw_body)
|
51 |
+
assert output == input_output["output"]
|
52 |
+
|
53 |
+
@staticmethod
|
54 |
+
def test_get_parsed_bbox_other_inputs():
|
55 |
+
for json_filename in ["single_rectangle", "multi_prompt"]:
|
56 |
+
with open(TEST_EVENTS_FOLDER / f"get_parsed_bbox_prompts_{json_filename}.json") as tst_json:
|
57 |
+
inputs_outputs = json.load(tst_json)
|
58 |
+
parsed_input = ApiRequestBody.model_validate(inputs_outputs["input"])
|
59 |
+
output = get_parsed_bbox_points(parsed_input)
|
60 |
+
assert output == inputs_outputs["output"]
|
61 |
+
|
62 |
+
@staticmethod
|
63 |
+
def test_get_parsed_request_body():
|
64 |
+
from samgis_core.utilities.utilities import base64_encode
|
65 |
+
|
66 |
+
input_event = {
|
67 |
+
"event": {
|
68 |
+
"bbox": {
|
69 |
+
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
70 |
+
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
71 |
+
},
|
72 |
+
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
73 |
+
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
74 |
+
}
|
75 |
+
}
|
76 |
+
expected_output_dict = {
|
77 |
+
"bbox": {
|
78 |
+
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
79 |
+
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
80 |
+
},
|
81 |
+
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
82 |
+
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
83 |
+
}
|
84 |
+
output = get_parsed_request_body(input_event["event"])
|
85 |
+
assert output == ApiRequestBody.model_validate(input_event["event"])
|
86 |
+
|
87 |
+
input_event_str = json.dumps(input_event["event"])
|
88 |
+
output = get_parsed_request_body(input_event_str)
|
89 |
+
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
90 |
+
|
91 |
+
event = {"body": base64_encode(input_event_str).decode("utf-8")}
|
92 |
+
output = get_parsed_request_body(event)
|
93 |
+
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
94 |
+
|
95 |
+
@patch.object(wrappers_helpers, "providers")
|
96 |
+
def test_get_url_tile(self, providers_mocked):
|
97 |
+
import xyzservices
|
98 |
+
from samgis.io.wrappers_helpers import get_url_tile
|
99 |
+
|
100 |
+
from tests import LOCAL_URL_TILE
|
101 |
+
|
102 |
+
local_tile_provider = xyzservices.TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE, attribution="")
|
103 |
+
expected_output = {'name': 'local_tile_provider', 'url': LOCAL_URL_TILE, 'attribution': ''}
|
104 |
+
providers_mocked.query_name.return_value = local_tile_provider
|
105 |
+
assert get_url_tile("OpenStreetMap") == expected_output
|
106 |
+
|
107 |
+
local_url = 'http://localhost:8000/{parameter}/{z}/{x}/{y}.png'
|
108 |
+
local_tile_provider = xyzservices.TileProvider(
|
109 |
+
name="local_tile_provider_param", url=local_url, attribution="", parameter="lamda_handler"
|
110 |
+
)
|
111 |
+
providers_mocked.query_name.return_value = local_tile_provider
|
112 |
+
assert get_url_tile("OpenStreetMap.HOT") == {
|
113 |
+
"parameter": "lamda_handler", 'name': 'local_tile_provider_param', 'url': local_url, 'attribution': ''
|
114 |
+
}
|
115 |
+
|
116 |
+
@staticmethod
|
117 |
+
def test_get_url_tile_real():
|
118 |
+
from samgis.io.wrappers_helpers import get_url_tile
|
119 |
+
|
120 |
+
assert get_url_tile("OpenStreetMap") == {
|
121 |
+
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
122 |
+
'html_attribution': '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
123 |
+
'attribution': '(C) OpenStreetMap contributors',
|
124 |
+
'name': 'OpenStreetMap.Mapnik'}
|
125 |
+
|
126 |
+
html_attribution_hot = '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, '
|
127 |
+
html_attribution_hot += 'Tiles style by <a href="https://www.hotosm.org/" target="_blank">Humanitarian '
|
128 |
+
html_attribution_hot += 'OpenStreetMap Team</a> hosted by <a href="https://openstreetmap.fr/" target="_blank">'
|
129 |
+
html_attribution_hot += 'OpenStreetMap France</a>'
|
130 |
+
attribution_hot = '(C) OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by '
|
131 |
+
attribution_hot += 'OpenStreetMap France'
|
132 |
+
assert get_url_tile("OpenStreetMap.HOT") == {
|
133 |
+
'url': 'https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png', 'max_zoom': 19,
|
134 |
+
'html_attribution': html_attribution_hot, 'attribution': attribution_hot, 'name': 'OpenStreetMap.HOT'
|
135 |
+
}
|
136 |
+
|
tests/prediction_api/test_sam_onnx.py
DELETED
@@ -1,90 +0,0 @@
|
|
1 |
-
import logging
|
2 |
-
import unittest
|
3 |
-
|
4 |
-
import numpy as np
|
5 |
-
|
6 |
-
from samgis import MODEL_FOLDER
|
7 |
-
from samgis.prediction_api.sam_onnx import SegmentAnythingONNX
|
8 |
-
from samgis.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME
|
9 |
-
from samgis.utilities.utilities import hash_calculate
|
10 |
-
from tests import TEST_EVENTS_FOLDER
|
11 |
-
|
12 |
-
|
13 |
-
instance_sam_onnx = SegmentAnythingONNX(
|
14 |
-
encoder_model_path=MODEL_FOLDER / MODEL_ENCODER_NAME,
|
15 |
-
decoder_model_path=MODEL_FOLDER / MODEL_DECODER_NAME
|
16 |
-
)
|
17 |
-
np_img = np.load(TEST_EVENTS_FOLDER / "samexporter_predict" / "oceania" / "img.npy")
|
18 |
-
prompt = [{
|
19 |
-
"type": "point",
|
20 |
-
"data": [934, 510],
|
21 |
-
"label": 0
|
22 |
-
}]
|
23 |
-
|
24 |
-
|
25 |
-
class TestSegmentAnythingONNX(unittest.TestCase):
|
26 |
-
def test_encode_predict_masks_ok(self):
|
27 |
-
embedding = instance_sam_onnx.encode(np_img)
|
28 |
-
try:
|
29 |
-
assert hash_calculate(embedding) == b"m2O3y7pNUwlLuAZhBHkRIu8cDIIej0oOmWOXevs39r4="
|
30 |
-
except AssertionError as ae1:
|
31 |
-
logging.warning(f"ae1:{ae1}.")
|
32 |
-
inference_mask = instance_sam_onnx.predict_masks(embedding, prompt)
|
33 |
-
try:
|
34 |
-
assert hash_calculate(inference_mask) == b'YSKKNCs3AMpbeDUVwqIwNQqJ365OG4239hxjFnW7XTM='
|
35 |
-
except AssertionError as ae2:
|
36 |
-
logging.warning(f"ae2:{ae2}.")
|
37 |
-
mask_output = np.zeros((inference_mask.shape[2], inference_mask.shape[3]), dtype=np.uint8)
|
38 |
-
for n, m in enumerate(inference_mask[0, :, :, :]):
|
39 |
-
logging.debug(f"{n}th of prediction_masks shape {inference_mask.shape}"
|
40 |
-
f" => mask shape:{mask_output.shape}, {mask_output.dtype}.")
|
41 |
-
mask_output[m > 0.0] = 255
|
42 |
-
mask_expected = np.load(TEST_EVENTS_FOLDER / "SegmentAnythingONNX" / "mask_output.npy")
|
43 |
-
|
44 |
-
# assert MAP (mean average precision) is 100%
|
45 |
-
# sum expected mask to output mask:
|
46 |
-
# - asserted "good" inference values are 2 (matched object) or 0 (matched background)
|
47 |
-
# - "bad" inference value is 1 (there are differences between expected and output mask)
|
48 |
-
sum_mask_output_vs_expected = mask_expected / 255 + mask_output / 255
|
49 |
-
unique_values__output_vs_expected = np.unique(sum_mask_output_vs_expected, return_counts=True)
|
50 |
-
tot = sum_mask_output_vs_expected.size
|
51 |
-
perc = {
|
52 |
-
k: 100 * v / tot for
|
53 |
-
k, v in
|
54 |
-
zip(unique_values__output_vs_expected[0], unique_values__output_vs_expected[1])
|
55 |
-
}
|
56 |
-
try:
|
57 |
-
assert 1 not in perc
|
58 |
-
except AssertionError:
|
59 |
-
n_pixels = perc[1]
|
60 |
-
logging.error(f"found {n_pixels:.2%} different pixels between expected masks and output mask.")
|
61 |
-
# try to assert that the % of different pixels are minor than 5%
|
62 |
-
assert perc[1] < 5
|
63 |
-
|
64 |
-
def test_encode_predict_masks_ex1(self):
|
65 |
-
with self.assertRaises(Exception):
|
66 |
-
try:
|
67 |
-
np_input = np.zeros((10, 10))
|
68 |
-
instance_sam_onnx.encode(np_input)
|
69 |
-
except Exception as e:
|
70 |
-
logging.error(f"e:{e}.")
|
71 |
-
msg = "[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Invalid rank for input: input_image "
|
72 |
-
msg += "Got: 2 Expected: 3 Please fix either the inputs or the model."
|
73 |
-
assert str(e) == msg
|
74 |
-
raise e
|
75 |
-
|
76 |
-
def test_encode_predict_masks_ex2(self):
|
77 |
-
wrong_prompt = [{
|
78 |
-
"type": "rectangle",
|
79 |
-
"data": [934, 510],
|
80 |
-
"label": 0
|
81 |
-
}]
|
82 |
-
embedding = instance_sam_onnx.encode(np_img)
|
83 |
-
|
84 |
-
with self.assertRaises(IndexError):
|
85 |
-
try:
|
86 |
-
instance_sam_onnx.predict_masks(embedding, wrong_prompt)
|
87 |
-
except IndexError as ie:
|
88 |
-
print(ie)
|
89 |
-
assert str(ie) == "list index out of range"
|
90 |
-
raise ie
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/test_fastapi_app.py
CHANGED
@@ -12,6 +12,10 @@ from tests.local_tiles_http_server import LocalTilesHttpServer
|
|
12 |
from wrappers import fastapi_wrapper
|
13 |
from wrappers.fastapi_wrapper import app
|
14 |
|
|
|
|
|
|
|
|
|
15 |
client = TestClient(app)
|
16 |
source = {
|
17 |
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
@@ -95,11 +99,11 @@ class TestFastapiApp(unittest.TestCase):
|
|
95 |
assert response.status_code == 404
|
96 |
|
97 |
def test_infer_samgis_422(self):
|
98 |
-
response = client.post(
|
99 |
-
print(
|
100 |
assert response.status_code == 422
|
101 |
body_loaded = response.json()
|
102 |
-
print(
|
103 |
assert body_loaded == {"msg": "Error - Unprocessable Entity"}
|
104 |
|
105 |
def test_infer_samgis_middleware_500(self):
|
@@ -107,11 +111,11 @@ class TestFastapiApp(unittest.TestCase):
|
|
107 |
local_event = deepcopy(event)
|
108 |
|
109 |
local_event["source_type"] = "source_fake"
|
110 |
-
response = client.post(
|
111 |
-
print(
|
112 |
assert response.status_code == 500
|
113 |
body_loaded = response.json()
|
114 |
-
print(
|
115 |
assert body_loaded == {'success': False}
|
116 |
|
117 |
@patch.object(time, "time")
|
@@ -120,11 +124,11 @@ class TestFastapiApp(unittest.TestCase):
|
|
120 |
time_mocked.return_value = 0
|
121 |
samexporter_predict_mocked.side_effect = ValueError("I raise a value error!")
|
122 |
|
123 |
-
response = client.post(
|
124 |
-
print(
|
125 |
assert response.status_code == 500
|
126 |
body = response.json()
|
127 |
-
print(
|
128 |
assert body == {'msg': 'Error - Internal Server Error'}
|
129 |
|
130 |
@patch.object(wrappers_helpers, "get_url_tile")
|
@@ -141,19 +145,19 @@ class TestFastapiApp(unittest.TestCase):
|
|
141 |
get_url_tile_mocked.return_value = local_tile_provider
|
142 |
|
143 |
with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
144 |
-
response = client.post(
|
145 |
-
print(
|
146 |
assert response.status_code == 200
|
147 |
body_string = response.json()["body"]
|
148 |
body_loaded = json.loads(body_string)
|
149 |
-
print(
|
150 |
assert "duration_run" in body_loaded
|
151 |
output = body_loaded["output"]
|
152 |
assert 'n_predictions' in output
|
153 |
assert "n_shapes_geojson" in output
|
154 |
geojson = output["geojson"]
|
155 |
output_geojson = shapely.from_geojson(geojson)
|
156 |
-
print("output_geojson::"
|
157 |
assert isinstance(output_geojson, shapely.GeometryCollection)
|
158 |
assert len(output_geojson.geoms) == 3
|
159 |
|
@@ -173,10 +177,10 @@ class TestFastapiApp(unittest.TestCase):
|
|
173 |
}
|
174 |
samexporter_predict_mocked.return_value = samexporter_output
|
175 |
|
176 |
-
response = client.post(
|
177 |
-
print(
|
178 |
assert response.status_code == 200
|
179 |
response_json = response.json()
|
180 |
body_loaded = json.loads(response_json["body"])
|
181 |
-
print(
|
182 |
self.assertDictEqual(body_loaded, {'duration_run': 0, 'output': samexporter_output})
|
|
|
12 |
from wrappers import fastapi_wrapper
|
13 |
from wrappers.fastapi_wrapper import app
|
14 |
|
15 |
+
|
16 |
+
infer_samgis = "/infer_samgis"
|
17 |
+
response_status_code = "response.status_code:{}."
|
18 |
+
response_body_loaded = "response.body_loaded:{}."
|
19 |
client = TestClient(app)
|
20 |
source = {
|
21 |
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
|
|
99 |
assert response.status_code == 404
|
100 |
|
101 |
def test_infer_samgis_422(self):
|
102 |
+
response = client.post(infer_samgis, json={})
|
103 |
+
print(response_status_code.format(response.status_code))
|
104 |
assert response.status_code == 422
|
105 |
body_loaded = response.json()
|
106 |
+
print(response_body_loaded.format(body_loaded))
|
107 |
assert body_loaded == {"msg": "Error - Unprocessable Entity"}
|
108 |
|
109 |
def test_infer_samgis_middleware_500(self):
|
|
|
111 |
local_event = deepcopy(event)
|
112 |
|
113 |
local_event["source_type"] = "source_fake"
|
114 |
+
response = client.post(infer_samgis, json=local_event)
|
115 |
+
print(response_status_code.format(response.status_code))
|
116 |
assert response.status_code == 500
|
117 |
body_loaded = response.json()
|
118 |
+
print(response_body_loaded.format(body_loaded))
|
119 |
assert body_loaded == {'success': False}
|
120 |
|
121 |
@patch.object(time, "time")
|
|
|
124 |
time_mocked.return_value = 0
|
125 |
samexporter_predict_mocked.side_effect = ValueError("I raise a value error!")
|
126 |
|
127 |
+
response = client.post(infer_samgis, json=event)
|
128 |
+
print(response_status_code.format(response.status_code))
|
129 |
assert response.status_code == 500
|
130 |
body = response.json()
|
131 |
+
print(response_body_loaded.format(body))
|
132 |
assert body == {'msg': 'Error - Internal Server Error'}
|
133 |
|
134 |
@patch.object(wrappers_helpers, "get_url_tile")
|
|
|
145 |
get_url_tile_mocked.return_value = local_tile_provider
|
146 |
|
147 |
with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
148 |
+
response = client.post(infer_samgis, json=event)
|
149 |
+
print(response_status_code.format(response.status_code))
|
150 |
assert response.status_code == 200
|
151 |
body_string = response.json()["body"]
|
152 |
body_loaded = json.loads(body_string)
|
153 |
+
print(response_body_loaded.format(body_loaded))
|
154 |
assert "duration_run" in body_loaded
|
155 |
output = body_loaded["output"]
|
156 |
assert 'n_predictions' in output
|
157 |
assert "n_shapes_geojson" in output
|
158 |
geojson = output["geojson"]
|
159 |
output_geojson = shapely.from_geojson(geojson)
|
160 |
+
print("output_geojson::{}.".format(output_geojson))
|
161 |
assert isinstance(output_geojson, shapely.GeometryCollection)
|
162 |
assert len(output_geojson.geoms) == 3
|
163 |
|
|
|
177 |
}
|
178 |
samexporter_predict_mocked.return_value = samexporter_output
|
179 |
|
180 |
+
response = client.post(infer_samgis, json=event)
|
181 |
+
print(response_status_code.format(response.status_code))
|
182 |
assert response.status_code == 200
|
183 |
response_json = response.json()
|
184 |
body_loaded = json.loads(response_json["body"])
|
185 |
+
print(response_body_loaded.format(body_loaded))
|
186 |
self.assertDictEqual(body_loaded, {'duration_run': 0, 'output': samexporter_output})
|
tests/utilities/__init__.py
DELETED
File without changes
|
tests/utilities/test_serialize.py
DELETED
@@ -1,96 +0,0 @@
|
|
1 |
-
import unittest
|
2 |
-
|
3 |
-
import numpy as np
|
4 |
-
|
5 |
-
from samgis.utilities.serialize import serialize
|
6 |
-
|
7 |
-
test_dict_list_dict = {
|
8 |
-
"type": "FeatureCollection",
|
9 |
-
"name": "volcanoes",
|
10 |
-
"crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}},
|
11 |
-
"features": [
|
12 |
-
{"type": "Feature", "properties": {"Volcano_Number": 283010, "Volcano_Name": "Izu-Tobu", "prop_none": None},
|
13 |
-
"geometry": {"type": "Point", "coordinates": [139.098, 34.9]}},
|
14 |
-
{"type": "Feature",
|
15 |
-
"properties": {"Volcano_Number": 283020, "Volcano_Name": "Hakoneyama", "ndarray": np.array([1])},
|
16 |
-
"geometry": {"type": "Point", "coordinates": [139.021, 35.233]}}
|
17 |
-
]
|
18 |
-
}
|
19 |
-
|
20 |
-
|
21 |
-
class TestSerialize(unittest.TestCase):
|
22 |
-
def test_serialize(self):
|
23 |
-
from bson import ObjectId
|
24 |
-
|
25 |
-
# remove keys with values as bson.ObjectId
|
26 |
-
d1 = {"_id": ObjectId()}
|
27 |
-
self.assertDictEqual(serialize(d1), dict())
|
28 |
-
|
29 |
-
# test: serialize nd.float*, number as key => str
|
30 |
-
np_int_4 = np.asarray([87], dtype=np.integer)[0]
|
31 |
-
d2 = {"b": np.float32(45.0), 3: 33, 1.56: np_int_4, 3.5: 44.0, "d": "b", "tuple": (1, 2)}
|
32 |
-
expected_d2 = {
|
33 |
-
'b': 45.0,
|
34 |
-
3: 33,
|
35 |
-
1.56: 87,
|
36 |
-
3.5: 44.0,
|
37 |
-
'd': 'b',
|
38 |
-
"tuple": [1, 2]
|
39 |
-
}
|
40 |
-
serialized_d2 = serialize(d2)
|
41 |
-
self.assertDictEqual(serialized_d2, expected_d2)
|
42 |
-
|
43 |
-
# # nested dict of list of dict, serialize nd.array
|
44 |
-
d3 = {"e": [{"q": 123}, {"q": 456}], "a": np.arange(1.1, 16.88).reshape(4, 4)}
|
45 |
-
expected_d3 = {
|
46 |
-
"e": [{"q": 123}, {"q": 456}],
|
47 |
-
'a': [[1.1, 2.1, 3.1, 4.1], [5.1, 6.1, 7.1, 8.1], [9.1, 10.1, 11.1, 12.1], [13.1, 14.1, 15.1, 16.1]]
|
48 |
-
}
|
49 |
-
self.assertDictEqual(serialize(d3), expected_d3)
|
50 |
-
|
51 |
-
def test_serialize_dict_exception(self):
|
52 |
-
from json import JSONDecodeError
|
53 |
-
|
54 |
-
e = JSONDecodeError(msg="x", doc="what we are?", pos=111)
|
55 |
-
exception = serialize({"k": e})
|
56 |
-
self.assertDictEqual(
|
57 |
-
exception,
|
58 |
-
{'k': {'msg': 'x', 'type': "<class 'json.decoder.JSONDecodeError'>", 'doc': 'what we are?', 'pos': 111,
|
59 |
-
'lineno': 1, 'colno': 112}}
|
60 |
-
)
|
61 |
-
|
62 |
-
def test_serialize_bytes(self):
|
63 |
-
self.assertDictEqual(
|
64 |
-
serialize({"k": b"x"}),
|
65 |
-
{'k': {'value': 'eA==', 'type': 'bytes'}}
|
66 |
-
)
|
67 |
-
|
68 |
-
def test_serialize_dict_list_dict(self):
|
69 |
-
serialized_dict_no_none = serialize(test_dict_list_dict, include_none=False)
|
70 |
-
self.assertDictEqual(serialized_dict_no_none, {
|
71 |
-
'type': 'FeatureCollection',
|
72 |
-
'name': 'volcanoes',
|
73 |
-
'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'}},
|
74 |
-
'features': [
|
75 |
-
{'type': 'Feature', 'properties': {'Volcano_Number': 283010, 'Volcano_Name': 'Izu-Tobu'},
|
76 |
-
'geometry': {'type': 'Point', 'coordinates': [139.098, 34.9]}},
|
77 |
-
{'type': 'Feature',
|
78 |
-
'properties': {'Volcano_Number': 283020, 'Volcano_Name': 'Hakoneyama', 'ndarray': [1]},
|
79 |
-
'geometry': {'type': 'Point', 'coordinates': [139.021, 35.233]}}
|
80 |
-
]
|
81 |
-
})
|
82 |
-
|
83 |
-
serialized_dict_wiht_none = serialize(test_dict_list_dict, include_none=True)
|
84 |
-
self.assertDictEqual(serialized_dict_wiht_none, {
|
85 |
-
'type': 'FeatureCollection',
|
86 |
-
'name': 'volcanoes',
|
87 |
-
'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'}},
|
88 |
-
'features': [
|
89 |
-
{'type': 'Feature',
|
90 |
-
'properties': {'Volcano_Number': 283010, 'Volcano_Name': 'Izu-Tobu', 'prop_none': None},
|
91 |
-
'geometry': {'type': 'Point', 'coordinates': [139.098, 34.9]}},
|
92 |
-
{'type': 'Feature',
|
93 |
-
'properties': {'Volcano_Number': 283020, 'Volcano_Name': 'Hakoneyama', 'ndarray': [1]},
|
94 |
-
'geometry': {'type': 'Point', 'coordinates': [139.021, 35.233]}}
|
95 |
-
]
|
96 |
-
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
wrappers/fastapi_wrapper.py
CHANGED
@@ -10,7 +10,7 @@ from pydantic import ValidationError
|
|
10 |
from samgis import PROJECT_ROOT_FOLDER
|
11 |
from samgis.io.wrappers_helpers import get_parsed_bbox_points
|
12 |
from samgis.utilities.type_hints import ApiRequestBody
|
13 |
-
from
|
14 |
from samgis.prediction_api.predictors import samexporter_predict
|
15 |
|
16 |
|
@@ -33,8 +33,9 @@ async def request_middleware(request, call_next):
|
|
33 |
|
34 |
finally:
|
35 |
response.headers["X-Request-ID"] = request_id
|
36 |
-
app_logger.info(
|
37 |
-
|
|
|
38 |
|
39 |
|
40 |
@app.post("/post_test")
|
@@ -49,7 +50,10 @@ async def post_test(request_input: ApiRequestBody) -> JSONResponse:
|
|
49 |
|
50 |
@app.get("/health")
|
51 |
async def health() -> JSONResponse:
|
52 |
-
|
|
|
|
|
|
|
53 |
return JSONResponse(status_code=200, content={"msg": "still alive..."})
|
54 |
|
55 |
|
|
|
10 |
from samgis import PROJECT_ROOT_FOLDER
|
11 |
from samgis.io.wrappers_helpers import get_parsed_bbox_points
|
12 |
from samgis.utilities.type_hints import ApiRequestBody
|
13 |
+
from samgis_core.utilities.fastapi_logger import setup_logging
|
14 |
from samgis.prediction_api.predictors import samexporter_predict
|
15 |
|
16 |
|
|
|
33 |
|
34 |
finally:
|
35 |
response.headers["X-Request-ID"] = request_id
|
36 |
+
app_logger.info("Request ended")
|
37 |
+
|
38 |
+
return response
|
39 |
|
40 |
|
41 |
@app.post("/post_test")
|
|
|
50 |
|
51 |
@app.get("/health")
|
52 |
async def health() -> JSONResponse:
|
53 |
+
from samgis.__version__ import __version__ as version
|
54 |
+
from samgis_core.__version__ import __version__ as version_core
|
55 |
+
|
56 |
+
app_logger.info(f"still alive, version:{version}, version_core:{version_core}.")
|
57 |
return JSONResponse(status_code=200, content={"msg": "still alive..."})
|
58 |
|
59 |
|