From ab89453ab7c307a8edebcbda17bebfdde6594f68 Mon Sep 17 00:00:00 2001 From: Antoine Lavenant Date: Fri, 28 Jun 2024 11:00:44 +0200 Subject: [PATCH 01/13] add fn to remove dimensions into las --- .gitignore | 1 + pdaltools/las_remove_dimensions.py | 58 ++++++++++++++++++++++++++++++ test/test_las_remove_dimensions.py | 51 ++++++++++++++++++++++++++ 3 files changed, 110 insertions(+) create mode 100644 pdaltools/las_remove_dimensions.py create mode 100644 test/test_las_remove_dimensions.py diff --git a/.gitignore b/.gitignore index b5f6a44..08af87f 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ tmp __pycache__ ign_pdal_tools.egg-info dist +.idea* diff --git a/pdaltools/las_remove_dimensions.py b/pdaltools/las_remove_dimensions.py new file mode 100644 index 0000000..0063d0a --- /dev/null +++ b/pdaltools/las_remove_dimensions.py @@ -0,0 +1,58 @@ +import argparse +import os + +import pdal +from pdaltools.las_info import get_writer_parameters_from_reader_metadata + +def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str): + """ + export new las without some dimensions + """ + pipeline = pdal.Pipeline() | pdal.Reader.las(input_las) + pipeline.execute() + points = pipeline.arrays[0] + input_dimensions = list(points.dtype.fields.keys()) + output_dimensions = [dim for dim in input_dimensions if dim not in dimensions] + points_pruned = points[output_dimensions] + params = get_writer_parameters_from_reader_metadata(pipeline.metadata) + pipeline_end = pdal.Pipeline(arrays=[points_pruned]) + pipeline_end |= pdal.Writer.las(output_las, forward="all", **params) + pipeline_end.execute() + + +def parse_args(): + parser = argparse.ArgumentParser("Remove dimensions from las") + parser.add_argument( + "--input_las", + "-i", + type=str, + required=True, + help="Path to the the las for which the dimensions will be removed", + ) + parser.add_argument( + "--output_las", + "-o", + type=str, + required=False, + help="Path to the the output las ; if none, we replace the input las", + ) + parser.add_argument( + "--dimensions", + "-d", + type=str, + required=True, + nargs="+", + help="The dimension we would like to remove from the point cloud file ; be aware to not remove mandatory " + "dimensions of las" + ) + + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + remove_dimensions_from_las( + input_las=args.input_las, + dimensions=args.dimensions, + output_las=args.input_las if args.output_las is None else args.output_las, + ) diff --git a/test/test_las_remove_dimensions.py b/test/test_las_remove_dimensions.py new file mode 100644 index 0000000..825931a --- /dev/null +++ b/test/test_las_remove_dimensions.py @@ -0,0 +1,51 @@ +import tempfile +import pdal +import numpy +import os +import logging +import pytest + +from pdaltools import las_remove_dimensions + +TEST_PATH = os.path.dirname(os.path.abspath(__file__)) +INPUT_DIR = os.path.join(TEST_PATH, "data") + + +def get_points(input_las): + pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las) + pipeline_read_ini.execute() + return pipeline_read_ini.arrays[0] + + +def test_remove_dimension(): + ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") + added_dimensions = ["DIM_1", "DIM_2"] + + # get initial data + points_ini = get_points(ini_las) + + with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: + # append dimensions + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.las(ini_las) + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) + pipeline |= pdal.Writer.las(tmp_las.name, extra_dims="all", forward="all", ) + pipeline.execute() + + # remove all dimensions + with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: + las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions, tmp_las_rm.name) + points_end = get_points(tmp_las_rm.name) + assert numpy.array_equal(points_ini, points_end) # output data should be the same + + # remove one dimension + with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: + las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions[:1] , tmp_las_rm.name) + points_end = get_points(tmp_las_rm.name) + with pytest.raises(Exception): + numpy.array_equal(points_ini, points_end) # output data should not be the same + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + test_remove_dimension() From 962978d84ca5acb638e5fdec8981f0f385c2d408 Mon Sep 17 00:00:00 2001 From: Antoine Lavenant Date: Fri, 28 Jun 2024 11:17:28 +0200 Subject: [PATCH 02/13] update gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 08af87f..b0e7f80 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,4 @@ tmp __pycache__ ign_pdal_tools.egg-info dist -.idea* +*.idea* From cff006b493077405d756e047acaeb337bf322310 Mon Sep 17 00:00:00 2001 From: Antoine Lavenant Date: Fri, 28 Jun 2024 11:49:38 +0200 Subject: [PATCH 03/13] split tests --- CHANGELOG.md | 3 +++ test/test_las_remove_dimensions.py | 23 +++++++++++++++++++---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04278b2..79319ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +# 1.7.0 +- las_remove_dimension: new tool to remove one or many dimensions + # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) - color: detect white images. diff --git a/test/test_las_remove_dimensions.py b/test/test_las_remove_dimensions.py index 825931a..1532630 100644 --- a/test/test_las_remove_dimensions.py +++ b/test/test_las_remove_dimensions.py @@ -10,6 +10,8 @@ TEST_PATH = os.path.dirname(os.path.abspath(__file__)) INPUT_DIR = os.path.join(TEST_PATH, "data") +ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") +added_dimensions = ["DIM_1", "DIM_2"] def get_points(input_las): pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las) @@ -17,9 +19,7 @@ def get_points(input_las): return pipeline_read_ini.arrays[0] -def test_remove_dimension(): - ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") - added_dimensions = ["DIM_1", "DIM_2"] +def test_remove_all_dimension(): # get initial data points_ini = get_points(ini_las) @@ -38,6 +38,20 @@ def test_remove_dimension(): points_end = get_points(tmp_las_rm.name) assert numpy.array_equal(points_ini, points_end) # output data should be the same + +def test_remove_one_dimension(): + + # get initial data + points_ini = get_points(ini_las) + + with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: + # append dimensions + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.las(ini_las) + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) + pipeline |= pdal.Writer.las(tmp_las.name, extra_dims="all", forward="all", ) + pipeline.execute() + # remove one dimension with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions[:1] , tmp_las_rm.name) @@ -48,4 +62,5 @@ def test_remove_dimension(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) - test_remove_dimension() + test_remove_all_dimension() + test_remove_one_dimension() From ce9ab5eedde4c4a68a24f080acacd8fceb6ea711 Mon Sep 17 00:00:00 2001 From: Antoine Lavenant Date: Fri, 28 Jun 2024 15:34:43 +0200 Subject: [PATCH 04/13] update tests --- CHANGELOG.md | 4 +-- test/test_las_remove_dimensions.py | 39 +++++++++++++++--------------- 2 files changed, 21 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 79319ad..a35552f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,7 @@ -# 1.7.0 -- las_remove_dimension: new tool to remove one or many dimensions - # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) - color: detect white images. +- las_remove_dimension: new tool to remove one or many dimensions # 1.5.2 - refactor tool to propagate header infos from one pipeline to another to use it by itself diff --git a/test/test_las_remove_dimensions.py b/test/test_las_remove_dimensions.py index 1532630..00eb282 100644 --- a/test/test_las_remove_dimensions.py +++ b/test/test_las_remove_dimensions.py @@ -13,11 +13,18 @@ ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") added_dimensions = ["DIM_1", "DIM_2"] -def get_points(input_las): +def get_points(input_las : str): pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las) pipeline_read_ini.execute() return pipeline_read_ini.arrays[0] +def append_dimension(input_las : str, output_las : str): + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.las(input_las) + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) + pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", ) + pipeline.execute() + def test_remove_all_dimension(): @@ -25,15 +32,9 @@ def test_remove_all_dimension(): points_ini = get_points(ini_las) with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: - # append dimensions - pipeline = pdal.Pipeline() - pipeline |= pdal.Reader.las(ini_las) - pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) - pipeline |= pdal.Writer.las(tmp_las.name, extra_dims="all", forward="all", ) - pipeline.execute() - - # remove all dimensions + append_dimension(ini_las, tmp_las.name) with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: + # remove all dimensions las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions, tmp_las_rm.name) points_end = get_points(tmp_las_rm.name) assert numpy.array_equal(points_ini, points_end) # output data should be the same @@ -45,18 +46,18 @@ def test_remove_one_dimension(): points_ini = get_points(ini_las) with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: - # append dimensions - pipeline = pdal.Pipeline() - pipeline |= pdal.Reader.las(ini_las) - pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) - pipeline |= pdal.Writer.las(tmp_las.name, extra_dims="all", forward="all", ) - pipeline.execute() - - # remove one dimension + append_dimension(ini_las, tmp_las.name) with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: - las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions[:1] , tmp_las_rm.name) + # remove one dimension + las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name) points_end = get_points(tmp_las_rm.name) - with pytest.raises(Exception): + + assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2 + + with pytest.raises(ValueError): + list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1 + + with pytest.raises(TypeError): numpy.array_equal(points_ini, points_end) # output data should not be the same From 71f73ea7e1d4ac8a7edbbdb65ae3216622a57919 Mon Sep 17 00:00:00 2001 From: Antoine Lavenant Date: Fri, 28 Jun 2024 23:25:14 +0200 Subject: [PATCH 05/13] update changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a35552f..62e1336 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,8 @@ +- las_remove_dimension: new tool to remove one or many dimensions + # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) - color: detect white images. -- las_remove_dimension: new tool to remove one or many dimensions # 1.5.2 - refactor tool to propagate header infos from one pipeline to another to use it by itself From 6830c31509d4e92a3dd8f694e37659a157c6d904 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 15:20:12 +0200 Subject: [PATCH 06/13] Deploy on ghcr.io instead of dockerhub --- .github/workflows/cicd_deploy.yml | 65 +++++++++++++++++-------------- .github/workflows/cicd_full.yml | 40 +++++++------------ CHANGELOG.md | 1 + Makefile | 14 ++++--- 4 files changed, 61 insertions(+), 59 deletions(-) diff --git a/.github/workflows/cicd_deploy.yml b/.github/workflows/cicd_deploy.yml index 52d4ec3..9728eba 100644 --- a/.github/workflows/cicd_deploy.yml +++ b/.github/workflows/cicd_deploy.yml @@ -5,68 +5,75 @@ on: # Also run when the pull request merges (which generates a push) # So that we can tag the docker image appropriately. push: - branches: - - master + branches: [ "master" ] + tags: [ 'v*.*.*' ] env: - DOCKER_REPO: ignimagelidar/ign-pdal-tools + IMAGE_NAME: ${{ github.repository }} + REGISTRY: ghcr.io + TEST_TAG: ${{ github.repository }}:test jobs: deploy_docker: runs-on: ubuntu-latest + permissions: + packages: write + steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Login to Docker Hub - uses: docker/login-action@v2 + - name: Build the Docker image + id: build + uses: docker/build-push-action@v5 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Build and export to Docker - uses: docker/build-push-action@v4 - with: - context: . load: true - tags: ${{ env.DOCKER_REPO }}:test + tags: ${{ env.TEST_TAG }} - - name: Set version number - run: | - echo "VERSION=$(docker run ${{ env.DOCKER_REPO }}:test python -m pdaltools._version)" >> $GITHUB_ENV - - - name: Run pytest + # run the test on the docker image + - name: Run tests in docker image run: > docker run --ipc=host - ${{ env.DOCKER_REPO }}:test + ${{ env.TEST_TAG }} python -m pytest ./test -s --log-cli-level DEBUG - - - name: Build and push - uses: docker/build-push-action@v4 + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + if: github.event_name != 'pull_request' + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build an Docker image with Buildx (don't on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v5 with: context: . push: true - tags: ${{ env.DOCKER_REPO }}:latest,${{ env.DOCKER_REPO }}:${{ env.VERSION }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} deploy-pypi: runs-on: ubuntu-latest + environment: name: pypi url: https://pypi.org/p/ign-pdal-tools + permissions: contents: read packages: write id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba diff --git a/.github/workflows/cicd_full.yml b/.github/workflows/cicd_full.yml index d51bfdb..e1754b3 100644 --- a/.github/workflows/cicd_full.yml +++ b/.github/workflows/cicd_full.yml @@ -13,44 +13,36 @@ on: - dev env: - DOCKER_REPO: ignimagelidar/ign-pdal-tools + IMAGE_NAME: ${{ github.repository }} + REGISTRY: ghcr.io + TEST_TAG: ${{ github.repository }}:test jobs: - test_docker: + deploy_docker: runs-on: ubuntu-latest + permissions: + packages: write + steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Login to Docker Hub - uses: docker/login-action@v2 + - name: Build the Docker image + id: build + uses: docker/build-push-action@v5 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Build and export to Docker - uses: docker/build-push-action@v4 - with: - context: . load: true - tags: ${{ env.DOCKER_REPO }}:test - - - name: Set version number - run: | - echo "VERSION=$(docker run ${{ env.DOCKER_REPO }}:test python -m pdaltools._version)" >> $GITHUB_ENV + tags: ${{ env.TEST_TAG }} - - name: Run pytest + # run the test on the docker image + - name: Run tests in docker image run: > docker run --ipc=host - ${{ env.DOCKER_REPO }}:test + ${{ env.TEST_TAG }} python -m pytest ./test -s --log-cli-level DEBUG - test_local: runs-on: ubuntu-latest environment: @@ -59,8 +51,6 @@ jobs: permissions: contents: read packages: write - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - steps: - name: Checkout branch diff --git a/CHANGELOG.md b/CHANGELOG.md index 62e1336..d8c4e2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ - las_remove_dimension: new tool to remove one or many dimensions +- deploy on ghcr.io instead of dockerhub # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) diff --git a/Makefile b/Makefile index 04bcec9..edc090a 100644 --- a/Makefile +++ b/Makefile @@ -53,17 +53,21 @@ clean: # Build/deploy Docker image ############################## -PROJECT_NAME=ignimagelidar/ign-pdal-tools +REGISTRY=ghcr.io +NAMESPACE=ignf +IMAGE_NAME=ign-pdal-tools VERSION=`python -m pdaltools._version` +FULL_IMAGE_NAME=${REGISTRY}/${NAMESPACE}/${IMAGE_NAME}:${VERSION} docker-build: clean - docker build --no-cache -t ${PROJECT_NAME}:${VERSION} -f Dockerfile . + docker build --no-cache -t ${IMAGE_NAME}:${VERSION} -f Dockerfile . docker-test: - docker run --rm -it ${PROJECT_NAME}:${VERSION} python -m pytest -s + docker run --rm -it ${IMAGE_NAME}:${VERSION} python -m pytest -s docker-remove: - docker rmi -f `docker images | grep ${PROJECT_NAME} | tr -s ' ' | cut -d ' ' -f 3` + docker rmi -f `docker images | grep ${IMAGE_NAME}:${VERSION} | tr -s ' ' | cut -d ' ' -f 3` docker-deploy: - docker push ${PROJECT_NAME}:${VERSION} + docker tag ${IMAGE_NAME}:${VERSION} ${FULL_IMAGE_NAME} + docker push ${FULL_IMAGE_NAME} From 909bff5130af1e172a8fbe7350a48b7a7d1372c0 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 15:22:46 +0200 Subject: [PATCH 07/13] Update github actions versions --- .github/workflows/cicd_deploy.yml | 2 +- .github/workflows/cicd_full.yml | 2 +- .github/workflows/cicd_light.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/cicd_deploy.yml b/.github/workflows/cicd_deploy.yml index 9728eba..bea78c1 100644 --- a/.github/workflows/cicd_deploy.yml +++ b/.github/workflows/cicd_deploy.yml @@ -77,7 +77,7 @@ jobs: # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: environment-file: environment.yml environment-name: pdaltools # activate the environment diff --git a/.github/workflows/cicd_full.yml b/.github/workflows/cicd_full.yml index e1754b3..b0a6033 100644 --- a/.github/workflows/cicd_full.yml +++ b/.github/workflows/cicd_full.yml @@ -58,7 +58,7 @@ jobs: # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: environment-file: environment.yml environment-name: pdaltools # activate the environment diff --git a/.github/workflows/cicd_light.yml b/.github/workflows/cicd_light.yml index a77f55d..eb3f3f4 100644 --- a/.github/workflows/cicd_light.yml +++ b/.github/workflows/cicd_light.yml @@ -16,11 +16,11 @@ jobs: steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: environment-file: environment.yml environment-name: pdaltools # activate the environment From 971c9fb2f4d5654b5ab003a034635b35ff67e955 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 15:40:28 +0200 Subject: [PATCH 08/13] Temporarily fic micromamba version in github action Prevents to hang indefinitely during the setup-micromamba action --- .github/workflows/cicd_deploy.yml | 1 + .github/workflows/cicd_full.yml | 1 + .github/workflows/cicd_light.yml | 1 + 3 files changed, 3 insertions(+) diff --git a/.github/workflows/cicd_deploy.yml b/.github/workflows/cicd_deploy.yml index bea78c1..b913dce 100644 --- a/.github/workflows/cicd_deploy.yml +++ b/.github/workflows/cicd_deploy.yml @@ -79,6 +79,7 @@ jobs: - name: setup-micromamba uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true diff --git a/.github/workflows/cicd_full.yml b/.github/workflows/cicd_full.yml index b0a6033..6952358 100644 --- a/.github/workflows/cicd_full.yml +++ b/.github/workflows/cicd_full.yml @@ -60,6 +60,7 @@ jobs: - name: setup-micromamba uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true diff --git a/.github/workflows/cicd_light.yml b/.github/workflows/cicd_light.yml index eb3f3f4..c632b6c 100644 --- a/.github/workflows/cicd_light.yml +++ b/.github/workflows/cicd_light.yml @@ -22,6 +22,7 @@ jobs: - name: setup-micromamba uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true From 2aa29e7b0b659aee891108af8b6ca13b06e9aad6 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Wed, 10 Jul 2024 18:13:24 +0200 Subject: [PATCH 09/13] Add optional dimension to tag points added as a buffer and how to remove them --- pdaltools/las_add_buffer.py | 81 ++++++++++++++++++++++++-------- test/test_las_add_buffer.py | 92 ++++++++++++++++++++++++++++++++++++- 2 files changed, 151 insertions(+), 22 deletions(-) diff --git a/pdaltools/las_add_buffer.py b/pdaltools/las_add_buffer.py index 9328cdf..91ec7fa 100644 --- a/pdaltools/las_add_buffer.py +++ b/pdaltools/las_add_buffer.py @@ -1,6 +1,7 @@ import argparse import logging import os +import tempfile from typing import List import pdal @@ -10,6 +11,9 @@ get_writer_parameters_from_reader_metadata, ) from pdaltools.las_merge import create_list +from pdaltools.las_remove_dimensions import remove_dimensions_from_las + +ORIGINAL_TILE_TAG = "is_in_original" def create_las_with_buffer( @@ -20,19 +24,27 @@ def create_las_with_buffer( spatial_ref: str = "EPSG:2154", tile_width: int = 1000, tile_coord_scale: int = 1000, + tag_original_tile: bool = False, ): """Merge lidar tiles around the queried tile and crop them in order to add a buffer to the tile (usually 100m). + Args: - input_dir (str): directory of pointclouds (where you look for neigbors) - tile_filename (str): full path to the queried LIDAR tile - output_filename (str) : full path to the saved cropped tile - buffer_width (int): width of the border to add to the tile (in pixels) - spatial_ref (str): Spatial reference to use to override the one from input las. - tile width (int): width of tiles in meters (usually 1000m) - tile_coord_scale (int) : scale used in the filename to describe coordinates in meters - (usually 1000m) + input_dir (str): directory of pointclouds (where you look for neighbors) + tile_filename (str): full path to the queried LIDAR tile + output_filename (str): full path to the saved cropped tile + buffer_width (int, optional): width of the border to add to the tile (in meters). + Defaults to 100. + spatial_ref (_type_, optional): Spatial reference to use to override the one from input las. + Defaults to "EPSG:2154". + tile_width (int, optional): width of tiles in meters. Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates + in meters. Defaults to 1000. + tag_original_tile (bool, optional): if true, add a new "is_in_original" dimension + to the output las, equal to 1 on points that belong to the original tile, 0 on points + that belong to the added buffer. Defaults to False. """ + bounds = get_buffered_bounds_from_filename( tile_filename, buffer_width=buffer_width, tile_width=tile_width, tile_coord_scale=tile_coord_scale ) @@ -46,6 +58,7 @@ def create_las_with_buffer( spatial_ref, tile_width=tile_width, tile_coord_scale=tile_coord_scale, + tag_original_tile=tag_original_tile, ) @@ -57,6 +70,7 @@ def las_merge_and_crop( spatial_ref: str = "EPSG:2154", tile_width=1000, tile_coord_scale=1000, + tag_original_tile: bool = False, ): """Merge and crop las in a single pipeline (for buffer addition) @@ -65,29 +79,40 @@ def las_merge_and_crop( - For each file: - read it - crop it according to the bounds + - optionally add a dimension to differentiate points from the central pointscloud + from those added as a buffer - keep the crop in memory - delete the pipeline object to release the memory taken by the las reader - Merge the already cropped data Args: - input_dir (str): directory of pointclouds (where you look for neigbors) + input_dir (str): directory of pointclouds (where you look for neighbors) tile_filename (str): full path to the queried LIDAR tile - bounds : 2D bounding box to crop to : provided as ([xmin, xmax], [ymin, ymax]) - output_filename (str) : full path to the saved cropped tile - spatial_ref (str): spatial reference for the writer - tile width (int): width of tiles in meters (usually 1000m) - tile_coord_scale (int) : scale used in the filename to describe coordinates in meters - (usually 1000m) + bounds (List): 2D bounding box to crop to : provided as ([xmin, xmax], [ymin, ymax]) + output_filename (str): full path to the saved cropped tile + spatial_ref (_type_, optional): spatial reference for the writer. Defaults to "EPSG:2154". + tile_width (int, optional): width of tiles in meters (usually 1000m). Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates in meters. + Defaults to 1000. + tag_original_tile (bool, optional): if true, add a new "is_in_original" dimension + to the output las, equal to 1 on points that belong to the original tile, 0 on points + that belong to the added buffer. Defaults to False. + Raises: + ValueError: if the list of tiles to merge is empty """ + # List files to merge files_to_merge = create_list(input_dir, tile_filename, tile_width, tile_coord_scale) - + central_file = files_to_merge[-1] if len(files_to_merge) > 0: # Read and crop each file crops = [] for f in files_to_merge: pipeline = pdal.Pipeline() pipeline |= pdal.Reader.las(filename=f, override_srs=spatial_ref) + if tag_original_tile: + pipeline |= pdal.Filter.ferry(dimensions=f"=>{ORIGINAL_TILE_TAG}") + pipeline |= pdal.Filter.assign(value=f"{ORIGINAL_TILE_TAG}={int(f == central_file)}") pipeline |= pdal.Filter.crop(bounds=str(bounds)) pipeline.execute() if len(pipeline.arrays[0]) == 0: @@ -95,10 +120,9 @@ def las_merge_and_crop( else: crops.append(pipeline.arrays[0]) - # Retrieve metadata before the pipeline is deleted - # As the last file of files_to_merge is the central one, metadata will contain the info - # from the central file after the last iteration of the for loop - metadata = pipeline.metadata + if f == central_file: + # Retrieve metadata before the pipeline is deleted + metadata = pipeline.metadata del pipeline params = get_writer_parameters_from_reader_metadata(metadata, a_srs=spatial_ref) @@ -116,6 +140,23 @@ def las_merge_and_crop( pass +def remove_points_from_buffer(input_file: str, output_file: str): + """Remove the points that were added as a buffer to a las file using the "is_in_original" + dimension that has been added by create_las_with_buffer + + Args: + input_file (str): path to the input file containing the "is_in_original" dimension + output_file (str): path to the output_file + """ + with tempfile.NamedTemporaryFile(suffix="_with_additional_dim.las") as tmp_las: + pipeline = pdal.Pipeline() | pdal.Reader.las(input_file) + pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]") + pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all") + pipeline.execute() + + remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file) + + def parse_args(): parser = argparse.ArgumentParser("Add a buffer to a las tile by stitching with its neighbors") parser.add_argument( diff --git a/test/test_las_add_buffer.py b/test/test_las_add_buffer.py index 25c9564..b1cb1f0 100644 --- a/test/test_las_add_buffer.py +++ b/test/test_las_add_buffer.py @@ -10,7 +10,7 @@ from pdaltools.count_occurences.count_occurences_for_attribute import ( compute_count_one_file, ) -from pdaltools.las_add_buffer import create_las_with_buffer +from pdaltools.las_add_buffer import create_las_with_buffer, remove_points_from_buffer TEST_PATH = os.path.dirname(os.path.abspath(__file__)) TMP_PATH = os.path.join(TEST_PATH, "tmp") @@ -44,7 +44,6 @@ def get_2d_bounding_box(path): return mins[:2], maxs[:2] -# Tests def test_create_las_with_buffer(): output_file = os.path.join(TMP_PATH, "buffer.las") # Note: this tile does not have a tile at its bottom @@ -88,6 +87,11 @@ def test_create_las_with_buffer(): # Check the input header infos are preserved in the output assert_header_info_are_similar(output_file, input_file) + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + # Check that classes are preserved (in particular classes over 31) # Warning: classification values > 31 exist only for las 1.4 with dataformat_id >= 6 classes_counts = compute_count_one_file(output_file) @@ -95,6 +99,90 @@ def test_create_las_with_buffer(): assert set(classes_counts.keys()) == {"1", "2", "3", "4", "5", "6", "64"} +def test_create_las_with_buffer_with_tag(): + output_file = os.path.join(TMP_PATH, "buffer_with_tag.las") + # Note: this tile does not have a tile at its bottom + # And its left-side tile has been crop to have no data in the buffer area. This case must not generate any error + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + tile_width = 50 + tile_coord_scale = 10 + input_nb_points = 72770 + expected_out_mins = [770545.0, 6277500.0] + expected_out_maxs = [770605.0, 6277555.0] + + buffer_width = 5 + create_las_with_buffer( + INPUT_DIR, + input_file, + output_file, + buffer_width=buffer_width, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + logging.info(get_nb_points(input_file)) + # check file exists + assert os.path.isfile(output_file) + + # check number of points with the additional tag + assert get_nb_points(output_file) > input_nb_points + count_points_from_original = compute_count_one_file(output_file, attribute="is_in_original") + print(count_points_from_original) + assert count_points_from_original["1"] == input_nb_points + + # Check boundaries + out_mins, out_maxs = get_2d_bounding_box(output_file) + assert np.all(out_mins == expected_out_mins) + assert np.all(out_maxs == expected_out_maxs) + + # Check the input header infos are preserved in the output + assert_header_info_are_similar(output_file, input_file) + + # Check output dimensions are the same as input dimensions with one additional dimension + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + ", is_in_original" + + # Check that classes are preserved (in particular classes over 31) + # Warning: classification values > 31 exist only for las 1.4 with dataformat_id >= 6 + classes_counts = compute_count_one_file(output_file) + + assert set(classes_counts.keys()) == {"1", "2", "3", "4", "5", "6", "64"} + + +def test_remove_points_from_buffer(): + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + buffered_file = os.path.join(TMP_PATH, "remove_points_from_buffer_intermediate.las") + output_file = os.path.join(TMP_PATH, "remove_points_from_buffer_output.las") + tile_width = 50 + tile_coord_scale = 10 + + buffer_width = 5 + create_las_with_buffer( + INPUT_DIR, + input_file, + buffered_file, + buffer_width=buffer_width, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + + remove_points_from_buffer(buffered_file, output_file) + assert os.path.isfile(output_file) + assert get_nb_points(output_file) == get_nb_points(input_file) + + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + buffered_dimensions = tu.get_pdal_infos_summary(buffered_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert buffered_dimensions == input_dimensions + ", is_in_original" + assert output_dimensions == input_dimensions + + # Check the input header infos are preserved in the output + assert_header_info_are_similar(output_file, input_file) + + if __name__ == "__main__": logging.basicConfig(level=logging.INFO) test_create_las_with_buffer() From aeb957bd8daea26786a25ae549421674e71f450d Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 13:49:31 +0200 Subject: [PATCH 10/13] Add decorator to run function on buffered las --- pdaltools/las_add_buffer.py | 82 ++++++++++++++++++++++++++++++++++++- test/test_las_add_buffer.py | 36 +++++++++++++--- 2 files changed, 111 insertions(+), 7 deletions(-) diff --git a/pdaltools/las_add_buffer.py b/pdaltools/las_add_buffer.py index 91ec7fa..9ead97a 100644 --- a/pdaltools/las_add_buffer.py +++ b/pdaltools/las_add_buffer.py @@ -2,7 +2,9 @@ import logging import os import tempfile -from typing import List +from functools import wraps +from pathlib import Path +from typing import Callable, List import pdal @@ -90,7 +92,7 @@ def las_merge_and_crop( tile_filename (str): full path to the queried LIDAR tile bounds (List): 2D bounding box to crop to : provided as ([xmin, xmax], [ymin, ymax]) output_filename (str): full path to the saved cropped tile - spatial_ref (_type_, optional): spatial reference for the writer. Defaults to "EPSG:2154". + spatial_ref (str, optional): spatial reference for the writer. Defaults to "EPSG:2154". tile_width (int, optional): width of tiles in meters (usually 1000m). Defaults to 1000. tile_coord_scale (int, optional): scale used in the filename to describe coordinates in meters. Defaults to 1000. @@ -157,6 +159,82 @@ def remove_points_from_buffer(input_file: str, output_file: str): remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file) +def run_on_buffered_las( + buffer_width: int, spatial_ref: str, tile_width: int = 1000, tile_coord_scale: int = 1000 +) -> Callable: + """Decorator to apply a function that takes a las/laz as input and returns a las/laz output + on an input with an additional buffer, then remove the buffer points from the output + + The first argument of the decorated function must be an input path + The second argument of the decorated function must be an output path + + The buffer is added by merging lidar tiles around the queried tile and crop them based + on their filenames + + Args: + buffer_width (int): width of the border to add to the tile (in meters) + spatial_ref (str): spatial reference for the writer. Example: "EPSG:2154". + tile_width (int, optional): width of tiles in meters (usually 1000m). Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates in meters. + Defaults to 1000. + + Raises: + FileNotFoundError: when the first argument of the decorated function is not an existing + file + FileNotFoundError: when the second argument of the decorated function is not a path + with an existing parent folder + + Returns: + Callable: decorated function + """ + """Decorator to run a function that takes a las as input and returns a las output + on a las with an additional buffer, then remove the buffer points from the buffer points + + """ + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + input_file = args[0] + output_file = args[1] + if not Path(input_file).is_file(): + raise FileNotFoundError( + f"File {args[0]} not found. The first argument of a function decorated by " + "'run_on_buffered_las' is expected to be the path to an existing input file." + ) + + if not Path(output_file).parent.is_dir(): + raise FileNotFoundError( + f"Parent folder for file {args[1]} not found. The second argument of a function " + "decorated by 'run_on_buffered_las' is expected to be the path to an output " + "file in an existing folder" + ) + + with ( + tempfile.NamedTemporaryFile(suffix="_buffered_input.laz", dir=".") as buf_in, + tempfile.NamedTemporaryFile(suffix="_buffered_output.laz", dir=".") as buf_out, + ): + create_las_with_buffer( + Path(input_file).parent, + input_file, + buf_in.name, + buffer_width=buffer_width, + spatial_ref=spatial_ref, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + func(buf_in.name, buf_out.name, *args[2:], **kwargs) + + remove_points_from_buffer(buf_out.name, output_file) + + return + + return wrapper + + return decorator + + def parse_args(): parser = argparse.ArgumentParser("Add a buffer to a las tile by stitching with its neighbors") parser.add_argument( diff --git a/test/test_las_add_buffer.py b/test/test_las_add_buffer.py index b1cb1f0..160aaf8 100644 --- a/test/test_las_add_buffer.py +++ b/test/test_las_add_buffer.py @@ -10,7 +10,11 @@ from pdaltools.count_occurences.count_occurences_for_attribute import ( compute_count_one_file, ) -from pdaltools.las_add_buffer import create_las_with_buffer, remove_points_from_buffer +from pdaltools.las_add_buffer import ( + create_las_with_buffer, + remove_points_from_buffer, + run_on_buffered_las, +) TEST_PATH = os.path.dirname(os.path.abspath(__file__)) TMP_PATH = os.path.join(TEST_PATH, "tmp") @@ -127,7 +131,6 @@ def test_create_las_with_buffer_with_tag(): # check number of points with the additional tag assert get_nb_points(output_file) > input_nb_points count_points_from_original = compute_count_one_file(output_file, attribute="is_in_original") - print(count_points_from_original) assert count_points_from_original["1"] == input_nb_points # Check boundaries @@ -170,6 +173,7 @@ def test_remove_points_from_buffer(): remove_points_from_buffer(buffered_file, output_file) assert os.path.isfile(output_file) + assert get_nb_points(buffered_file) > get_nb_points(input_file) assert get_nb_points(output_file) == get_nb_points(input_file) # Check output dimensions are the same as input dimensions @@ -183,6 +187,28 @@ def test_remove_points_from_buffer(): assert_header_info_are_similar(output_file, input_file) -if __name__ == "__main__": - logging.basicConfig(level=logging.INFO) - test_create_las_with_buffer() +def test_run_on_buffered_las(): + # Dummy example with copy only + buffer_width = 5 + tile_width = 50 + tile_coord_scale = 10 + spatial_ref = "EPSG:2154" + + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + output_dir = os.path.join(TMP_PATH, "run_with_buffer") + os.makedirs(output_dir) + output_file = os.path.join(output_dir, "copied.laz") + decorated_copy = run_on_buffered_las( + buffer_width, spatial_ref=spatial_ref, tile_width=tile_width, tile_coord_scale=tile_coord_scale + )(shutil.copy) + + decorated_copy(input_file, output_file) + + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + + # Check las content + assert get_nb_points(output_file) == get_nb_points(input_file) + assert compute_count_one_file(output_file) == compute_count_one_file(input_file) From cdf64754c88e63cc41ef802f555d661006dc1498 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 13:54:08 +0200 Subject: [PATCH 11/13] Update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d8c4e2c..9ab0d80 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ - las_remove_dimension: new tool to remove one or many dimensions - deploy on ghcr.io instead of dockerhub +- Add tools to run functions on buffered las: + - update create_las_with_buffer to enable saving which points are from the central las on a new dimension + - add a remove_points_from_buffer to remove the points that have this new dimension not set to 1 + - add a decorator to run a function on a buffered las and return an output las only with the points from the original input # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) From cac2eb584d07089dc4eeb20ce1c9aeb217121845 Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 14:20:56 +0200 Subject: [PATCH 12/13] Update documentation for buffer points removal --- pdaltools/las_add_buffer.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pdaltools/las_add_buffer.py b/pdaltools/las_add_buffer.py index 9ead97a..5f07a32 100644 --- a/pdaltools/las_add_buffer.py +++ b/pdaltools/las_add_buffer.py @@ -146,6 +146,9 @@ def remove_points_from_buffer(input_file: str, output_file: str): """Remove the points that were added as a buffer to a las file using the "is_in_original" dimension that has been added by create_las_with_buffer + Limitation: if any point has been added to the point cloud after adding the buffer, it + won't be preserved by this operation (only points from the original file are kept) + Args: input_file (str): path to the input file containing the "is_in_original" dimension output_file (str): path to the output_file @@ -169,7 +172,11 @@ def run_on_buffered_las( The second argument of the decorated function must be an output path The buffer is added by merging lidar tiles around the queried tile and crop them based - on their filenames + on their filenames. + + Limitation: if any point has been added to the point cloud by the decorated function, it + won't be preserved by this operation (only points from the original file are kept) + Args: buffer_width (int): width of the border to add to the tile (in meters) From 300c24a48236307d310e30c80e1590b543eace1c Mon Sep 17 00:00:00 2001 From: Lea Vauchier Date: Tue, 16 Jul 2024 16:55:56 +0200 Subject: [PATCH 13/13] Bump version to 1.7.0 --- CHANGELOG.md | 1 + pdaltools/_version.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ab0d80..47de613 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,4 @@ +# 1.7.0 - las_remove_dimension: new tool to remove one or many dimensions - deploy on ghcr.io instead of dockerhub - Add tools to run functions on buffered las: diff --git a/pdaltools/_version.py b/pdaltools/_version.py index 6a32fb0..121c53d 100644 --- a/pdaltools/_version.py +++ b/pdaltools/_version.py @@ -1,4 +1,4 @@ -__version__ = "1.6.0" +__version__ = "1.7.0" if __name__ == "__main__":