From 51ca4e8cf84aae2d7e25823bac9dc8c3f5e8cd5e Mon Sep 17 00:00:00 2001 From: Marc Lichtman Date: Fri, 30 Aug 2024 13:52:28 -0400 Subject: [PATCH] Fixed searchid bug, added unit tests, improved main readme (#236) * fixed searchid bug and added unit tests * readme tweaks * black line length * change line length in flake settings * revert line length change, going to do it in separate PR * autoformatter * retrigger github actions * Delete scripts/test-mosaic * hack for not needing secrets AZURITE_ACCOUNT_KEY * shorten command * testing output * remove grep * quotes * trying another method * trying another method * remove extra echo * tweaks * remove echo * use env * test failure * fail step * try removing azurite from api validator * autoformatter * pipeline tweak * Delete storage-use-azurite?tabs=visual-studio,blob-storage --------- Co-authored-by: Marc Lichtman --- .github/workflows/pr.yml | 14 ++-- README.md | 25 ++++--- pctiler/pctiler/endpoints/pg_mosaic.py | 6 +- pctiler/tests/endpoints/test_pg_mosaic.py | 85 +++++++++++++++++++++-- scripts/format-local | 19 +++++ 5 files changed, 123 insertions(+), 26 deletions(-) create mode 100755 scripts/format-local diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 71480a36..e31975b8 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -12,9 +12,11 @@ jobs: - uses: actions/checkout@v3 - name: Set Azurite Default Key - run: | - echo "AZURITE_ACCOUNT_KEY=${{ secrets.AZURITE_ACCOUNT_KEY }}" >> $GITHUB_ENV - echo "Using Azurite default key: $AZURITE_ACCOUNT_KEY" + run: echo "AZURITE_ACCOUNT_KEY=$(curl https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite | grep "Account key:" | cut -b 24-111)" >> $GITHUB_ENV + + - name: Verify Azurite Key was retrieved correctly + if: "!startsWith(env.AZURITE_ACCOUNT_KEY, 'Eby8')" + run: echo Failed to find key at learn.microsoft.com && exit 1 - name: Run cibuild run: ./scripts/cibuild @@ -29,9 +31,7 @@ jobs: cache: "pip" - name: Set Azurite Default Key - run: | - echo "AZURITE_ACCOUNT_KEY=${{ secrets.AZURITE_ACCOUNT_KEY }}" >> $GITHUB_ENV - echo "Using Azurite default key: $AZURITE_ACCOUNT_KEY" + run: echo "AZURITE_ACCOUNT_KEY=$(curl https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite | grep "Account key:" | cut -b 24-111)" >> $GITHUB_ENV - - name: Validate + - name: API Validator run: ./scripts/validate diff --git a/README.md b/README.md index 30a7bbf3..4cea8d0b 100644 --- a/README.md +++ b/README.md @@ -11,9 +11,9 @@ This repository contains two components of the Planetary Computer APIs: the STAC The `pcstac` project provides a STAC API which indexes Microsoft's publicly available [geospatial data](https://planetarycomputer.microsoft.com/catalog) and an API for searching through this large collection. The `pctiler` provides visualization and data access capabilities for the data in the Planetary Computer. -## Azure Functions +## Azure Functions (a.k.a. Funcs) -This repository also contains Azure Functions that provide additional endpoints for working with Planetary Computer data and metadata. This includes Function endpoints for generating images and animations based on STAC searches, using the tiler to render mosaiced data from Collections +This repository also contains Azure Functions that provide additional endpoints for working with Planetary Computer data and metadata. This includes Function endpoints for generating images and animations based on STAC searches, using the tiler to render mosaiced data from Collections. ## Collection configuration @@ -61,26 +61,27 @@ This project uses a variation on [scripts to rule them all](https://github.com/g #### Environment setup and building images -Before setting up the local environment, ensure that you have set the AZURITE_ACCOUNT_KEY environment variable. -The account key can be found in the [Azurite GitHub repository](https://github.com/Azure/Azurite?tab=readme-ov-file#usage-with-azure-storage-sdks-or-tools) +Before setting up the local environment, you must set the AZURITE_ACCOUNT_KEY environment variable to the Azurite default +account key, a string that can be [found here](https://github.com/Azure/Azurite?tab=readme-ov-file#usage-with-azure-storage-sdks-or-tools). For example, you can set the environment variable in your terminal with: + ```console -> export AZURITE_ACCOUNT_KEY= +export AZURITE_ACCOUNT_KEY= ``` -To set up a local environment, use +To set up a local environment, use: ```console -> ./scripts/setup +./scripts/setup ``` This will build containers, apply database migrations, and load the development data. -After migrations and development database loading are in place, you can rebuild the docker images with +After migrations and development database loading are in place, you can rebuild the docker images with: ```console -> ./scripts/update +./scripts/update ``` `pip` dependencies in `setup.py` are collected and installed through requirements files. @@ -99,11 +100,13 @@ az login To run the servers, use ```console -> ./scripts/server +./scripts/server ``` This will bring up the development database, STAC API, Tiler, Azure Functions, and other services. +To test the tiler, try going to . + #### Testing and and formatting To run tests, use @@ -148,4 +151,4 @@ See the [Helm chart repository](https://microsoft.github.io/planetary-computer-a ### Functions -See the [Function package repository](https://microsoft.github.io/planetary-computer-apis) published to GitHub pages for the published Azure Functions. +See the [Function package repository](https://microsoft.github.io/planetary-computer-apis) published to GitHub pages for the published Azure Functions. \ No newline at end of file diff --git a/pctiler/pctiler/endpoints/pg_mosaic.py b/pctiler/pctiler/endpoints/pg_mosaic.py index 3dd77839..f8e692fb 100644 --- a/pctiler/pctiler/endpoints/pg_mosaic.py +++ b/pctiler/pctiler/endpoints/pg_mosaic.py @@ -10,6 +10,7 @@ from titiler.core.factory import img_endpoint_params from titiler.core.resources.enums import ImageType from titiler.pgstac.dependencies import SearchIdParams, TmsTileParams +from titiler.pgstac.extensions import searchInfoExtension from titiler.pgstac.factory import MosaicTilerFactory from pccommon.config import get_collection_config @@ -22,7 +23,6 @@ @dataclass class AssetsBidxExprParams(dependencies.AssetsBidxExprParams): - collection: str = Query(None, description="STAC Collection ID") @@ -45,9 +45,11 @@ def __init__(self, request: Request): colormap_dependency=PCColorMapParams, layer_dependency=AssetsBidxExprParams, reader_dependency=ReaderParams, - router_prefix=get_settings().mosaic_endpoint_prefix + "/{search_id}", + router_prefix=get_settings().mosaic_endpoint_prefix + + "/{search_id}", # reverts /searches back to /mosaic backend_dependency=BackendParams, add_statistics=False, + extensions=[searchInfoExtension()], ) diff --git a/pctiler/tests/endpoints/test_pg_mosaic.py b/pctiler/tests/endpoints/test_pg_mosaic.py index 508bb5aa..a17e7150 100644 --- a/pctiler/tests/endpoints/test_pg_mosaic.py +++ b/pctiler/tests/endpoints/test_pg_mosaic.py @@ -10,7 +10,6 @@ @pytest.fixture async def register_search(client: AsyncClient) -> REGISTER_TYPE: - cql = { "filter-lang": "cql2-json", "filter": { @@ -18,13 +17,40 @@ async def register_search(client: AsyncClient) -> REGISTER_TYPE: "args": [{"op": "=", "args": [{"property": "collection"}, "naip"]}], }, } - expected_content_hash = "8b989f86a149628eabfde894fb965982" response = await client.post("/mosaic/register", json=cql) + expected_content_hash = response.json()["searchid"] resp = response.json() - return (expected_content_hash, resp) +async def test_register_search_with_geom(client: AsyncClient) -> None: + geom = { + "type": "Polygon", + "coordinates": [ + [ + [-79.09062791441062, 43.08554661560049], + [-79.0629876337021, 43.08554661560049], + [-79.0629876337021, 43.067969831431895], + [-79.09062791441062, 43.067969831431895], + [-79.09062791441062, 43.08554661560049], + ] + ], + } + cql = { + "filter-lang": "cql2-json", + "filter": { + "op": "and", + "args": [ + {"op": "=", "args": [{"property": "collection"}, "naip"]}, + {"op": "s_intersects", "args": [{"property": "geometry"}, geom]}, + ], + }, + } + response = await client.post("/mosaic/register", json=cql) + assert response.status_code == 200 + assert response.json()["searchid"] == "2607eab51afd5d626da8d50d9df3bbf0" + + @pytest.mark.asyncio async def test_mosaic_info(client: AsyncClient) -> None: response = await client.get("/mosaic/info?collection=naip") @@ -36,18 +62,20 @@ async def test_mosaic_info(client: AsyncClient) -> None: @pytest.mark.asyncio async def test_register(client: AsyncClient, register_search: REGISTER_TYPE) -> None: - expected_content_hash, register_resp = register_search # Test that `searchid` which has been removed in titiler remains in pctiler, # and that the search hash remains consistent assert register_resp["searchid"] == expected_content_hash # Test that the links have had the {tileMatrixSetId} template string removed - assert len(register_resp["links"]) == 2 + assert len(register_resp["links"]) == 3 assert register_resp["links"][0]["href"].endswith( + f"/mosaic/{expected_content_hash}/info" + ) # gets added by searchInfoExtension + assert register_resp["links"][1]["href"].endswith( f"/mosaic/{expected_content_hash}/tilejson.json" ) - assert register_resp["links"][1]["href"].endswith( + assert register_resp["links"][2]["href"].endswith( f"/mosaic/{expected_content_hash}/WMTSCapabilities.xml" ) @@ -101,6 +129,51 @@ async def test_mosaic_tile_routes( assert response.status_code == 200 +async def test_info_path_with_searchid( + client: AsyncClient, register_search: REGISTER_TYPE +) -> None: + # route source code is in titiler/mosaic/titiler/mosaic/factory.py#L157 + # the searchId functionality is added by titiler-pgstac + route = "mosaic/{searchId}/info" + expected_content_hash, _ = register_search + formatted_route = route.format(searchId=expected_content_hash) + url = ( + f"/{formatted_route}?asset_bidx=image%7C1%2C2%2C3&assets=image&collection=naip" + ) + response = await client.get(url) + assert response.status_code == 200 + + +async def test_info_path_with_bad_searchid(client: AsyncClient) -> None: + route = "mosaic/{searchId}/info" + + # does not match the one we registered in the fixture + expected_content_hash = "9b989f86a149628eabfde894fb965982" + + formatted_route = route.format(searchId=expected_content_hash) + url = ( + f"/{formatted_route}?asset_bidx=image%7C1%2C2%2C3&assets=image&collection=naip" + ) + response = await client.get(url) + assert response.status_code == 404 + + +async def test_bad_searchid(client: AsyncClient) -> None: + route = "mosaic/tiles/{searchId}/{z}/{x}/{y}" + + # does not match the one we registered in the fixture + expected_content_hash = "9b989f86a149628eabfde894fb965982" + + formatted_route = route.format( + searchId=expected_content_hash, z=16, x=17218, y=26838 + ) + url = ( + f"/{formatted_route}?asset_bidx=image%7C1%2C2%2C3&assets=image&collection=naip" + ) + response = await client.get(url) + assert response.status_code == 404 + + @pytest.mark.asyncio @pytest.mark.parametrize( "route", diff --git a/scripts/format-local b/scripts/format-local new file mode 100755 index 00000000..5a80bb3f --- /dev/null +++ b/scripts/format-local @@ -0,0 +1,19 @@ +#!/bin/bash +isort --overwrite-in-place pccommon +isort --overwrite-in-place pccommon/pccommon +isort --overwrite-in-place pccommon/tests +isort --overwrite-in-place pcfuncs +isort --overwrite-in-place scripts/bin +isort --overwrite-in-place pctiler/pctiler +isort --overwrite-in-place pctiler/tests +isort --overwrite-in-place pcstac/pcstac +isort --overwrite-in-place pcstac/tests +black pccommon/pccommon +black pccommon/tests +black pcfuncs +black scripts/bin +black pcstac/pcstac +black pcstac/tests +black pccommon +black pctiler/pctiler +black pctiler/tests