Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add fn to remove points during standardisation #62

Merged
merged 3 commits into from
Sep 26, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# 1.7.4
gliegard marked this conversation as resolved.
Show resolved Hide resolved
- Add possibility to remove points of somes classes in standardize

# 1.7.3
- Add method to get a point cloud origin

Expand Down
3 changes: 2 additions & 1 deletion pdaltools/las_remove_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import pdal
from pdaltools.las_info import get_writer_parameters_from_reader_metadata


def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str):
"""
export new las without some dimensions
Expand Down Expand Up @@ -43,7 +44,7 @@ def parse_args():
required=True,
nargs="+",
help="The dimension we would like to remove from the point cloud file ; be aware to not remove mandatory "
"dimensions of las"
"dimensions of las",
)

return parser.parse_args()
Expand Down
62 changes: 51 additions & 11 deletions pdaltools/standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,14 @@
import os
import subprocess as sp
import tempfile
import platform
import numpy as np
from typing import Dict

import pdal

from pdaltools.unlock_file import copy_and_hack_decorator
from pdaltools.las_info import get_writer_parameters_from_reader_metadata

STANDARD_PARAMETERS = dict(
major_version="1",
Expand All @@ -32,6 +35,7 @@
offset_z=0,
dataformat_id=6, # No color by default
a_srs="EPSG:2154",
class_points_removed=[], # remove points from class
)


Expand All @@ -43,6 +47,13 @@ def parse_args():
"--record_format", choices=[6, 8], type=int, help="Record format: 6 (no color) or 8 (4 color channels)"
)
parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154")
parser.add_argument(
"--class_points_removed",
default=[],
nargs="*",
type=str,
help="List of classes number. Points of this classes will be removed from the file",
)
parser.add_argument(
"--extra_dims",
default=[],
Expand All @@ -51,7 +62,6 @@ def parse_args():
help="List of extra dims to keep in the output (default=[], use 'all' to keep all extra dims), "
"extra_dims must be specified with their type (see pdal.writers.las documentation, eg 'dim1=double')",
)

return parser.parse_args()


Expand All @@ -61,20 +71,46 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
override the standard ones
"""
params = STANDARD_PARAMETERS | new_parameters

return params


def rewrite_with_pdal(input_file: str, output_file: str, params_from_parser: Dict) -> None:
def remove_points_from_class(points, class_points_removed: []) :
input_dimensions = list(points.dtype.fields.keys())
dim_class = input_dimensions.index("Classification")

indice_pts_delete = [id for id in range(0, len(points)) if points[id][dim_class] in class_points_removed]
points_preserved = np.delete(points, indice_pts_delete)

if len(points_preserved) == 0:
raise Exception("All points removed !")
gliegard marked this conversation as resolved.
Show resolved Hide resolved

return points_preserved


def rewrite_with_pdal(input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: []) -> None:
# Update parameters with command line values
params = get_writer_parameters(params_from_parser)
pipeline = pdal.Reader.las(input_file)
pipeline |= pdal.Writer(filename=output_file, forward="all", **params)
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(input_file)
pipeline.execute()
points = pipeline.arrays[0]

if class_points_removed:
points = remove_points_from_class(points, class_points_removed)

#ToDo : it seems that the forward="all" doesn't work because we use a new pipeline
gliegard marked this conversation as resolved.
Show resolved Hide resolved

params = get_writer_parameters(params_from_parser)
pipeline_end = pdal.Pipeline(arrays=[points])
gliegard marked this conversation as resolved.
Show resolved Hide resolved
pipeline_end |= pdal.Writer.las(output_file, forward="all", **params)
pipeline_end.execute()


def exec_las2las(input_file: str, output_file: str):
r = sp.run(["las2las", "-i", input_file, "-o", output_file], stderr=sp.PIPE, stdout=sp.PIPE)
if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
las2las = "las2las64"
else:
las2las = "las2las"
r = sp.run([las2las, "-i", input_file, "-o", output_file], stderr=sp.PIPE, stdout=sp.PIPE)
if r.returncode == 1:
msg = r.stderr.decode()
print(msg)
Expand All @@ -86,14 +122,18 @@ def exec_las2las(input_file: str, output_file: str):


@copy_and_hack_decorator
def standardize(input_file: str, output_file: str, params_from_parser: Dict) -> None:
def standardize(input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: []) -> None:
filename = os.path.basename(output_file)
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
rewrite_with_pdal(input_file, tmp.name, params_from_parser)
rewrite_with_pdal(input_file, tmp.name, params_from_parser, class_points_removed)
exec_las2las(tmp.name, output_file)


if __name__ == "__main__":
args = parse_args()
params_from_parser = dict(dataformat_id=args.record_format, a_srs=args.projection, extra_dims=args.extra_dims)
standardize(args.input_file, args.output_file, params_from_parser)
params_from_parser = dict(
dataformat_id=args.record_format,
a_srs=args.projection,
extra_dims=args.extra_dims,
)
standardize(args.input_file, args.output_file, params_from_parser, args.class_points_removed)
5 changes: 5 additions & 0 deletions script/test/test_run_remove_classes_in_las.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
python -m pdaltools.standardize_format \
--input_file test/data/classified_laz/test_data_77050_627755_LA93_IGN69.laz \
--output_file test/tmp/replaced_cmdline.laz \
--record_format 6 \
--class_points_removed 2 \
17 changes: 11 additions & 6 deletions test/test_las_remove_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,22 @@
ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
added_dimensions = ["DIM_1", "DIM_2"]

def get_points(input_las : str):

def get_points(input_las: str):
pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las)
pipeline_read_ini.execute()
return pipeline_read_ini.arrays[0]

def append_dimension(input_las : str, output_las : str):

def append_dimension(input_las: str, output_las: str):
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(input_las)
pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))
pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", )
pipeline |= pdal.Writer.las(
output_las,
extra_dims="all",
forward="all",
)
pipeline.execute()


Expand Down Expand Up @@ -52,10 +58,9 @@ def test_remove_one_dimension():
las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name)
points_end = get_points(tmp_las_rm.name)

assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2
assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0 # should still contains DIM_2

with pytest.raises(ValueError):
list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1
assert "DIM_1" not in points_end.dtype.fields.keys(), "LAS should not have dimension DIM_1"

with pytest.raises(TypeError):
numpy.array_equal(points_ini, points_end) # output data should not be the same
Expand Down
73 changes: 68 additions & 5 deletions test/test_standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@
import os
import shutil
import subprocess as sp
import platform
import json
from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary

import pdal
import pytest

from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize
from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, remove_points_from_class

TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
Expand All @@ -31,7 +33,7 @@ def setup_module(module):


def _test_standardize_format_one_params_set(input_file, output_file, params):
rewrite_with_pdal(input_file, output_file, params)
rewrite_with_pdal(input_file, output_file, params, [])
# check file exists
assert os.path.isfile(output_file)
# check values from metadata
Expand Down Expand Up @@ -66,7 +68,11 @@ def test_standardize_format():


def exec_lasinfo(input_file: str):
r = sp.run(["lasinfo", "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
lasinfo = "lasinfo64"
else:
lasinfo = "lasinfo"
r = sp.run([lasinfo, "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
if r.returncode == 1:
msg = r.stderr.decode()
print(msg)
Expand Down Expand Up @@ -102,17 +108,74 @@ def test_standardize_does_NOT_produce_any_warning_with_Lasinfo():
# if you want to see input_file warnings
# assert_lasinfo_no_warning(input_file)

standardize(input_file, output_file, MUTLIPLE_PARAMS[0])
standardize(input_file, output_file, MUTLIPLE_PARAMS[0], [])
assert_lasinfo_no_warning(output_file)


def test_standardize_malformed_laz():
input_file = os.path.join(TEST_PATH, "data/test_pdalfail_0643_6319_LA93_IGN69.laz")
output_file = os.path.join(TMP_PATH, "standardize_pdalfail_0643_6319_LA93_IGN69.laz")
standardize(input_file, output_file, MUTLIPLE_PARAMS[0])
standardize(input_file, output_file, MUTLIPLE_PARAMS[0], [])
assert os.path.isfile(output_file)


def get_pipeline_metadata_cross_plateform(pipeline):
try:
metadata = json.loads(pipeline.metadata)
except TypeError:
d_metadata = json.dumps(pipeline.metadata)
metadata = json.loads(d_metadata)
return metadata

def get_statistics_from_las_points(points):
pipeline = pdal.Pipeline(arrays=[points])
pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification")
pipeline.execute()
metadata = get_pipeline_metadata_cross_plateform(pipeline)
statistic = metadata["metadata"]["filters.stats"]["statistic"]
return statistic[0]["count"], statistic[0]["values"]

@pytest.mark.parametrize(
"classes_to_remove",
[
[2, 3],
[2, 3, 4],
[0, 1, 2, 3, 4, 5, 6],
],
)
def test_remove_points_from_class(classes_to_remove):
input_file = os.path.join(TEST_PATH, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz")
output_file = os.path.join(TMP_PATH, "test_remove_points_from_class.laz")

# count points of class not in classes_to_remove (get the point we should have in fine)
pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)

where = ' && '.join(["CLassification != " + str(cl) for cl in classes_to_remove])
pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification", where=where)
pipeline.execute()

points = pipeline.arrays[0]
nb_points_before, class_before = get_statistics_from_las_points(points)

metadata = get_pipeline_metadata_cross_plateform(pipeline)
statistic = metadata["metadata"]["filters.stats"]["statistic"]
nb_points_to_get = statistic[0]["count"]

try:
points = remove_points_from_class(points, classes_to_remove)
except Exception as error: # error because all points are removed
assert nb_points_to_get == 0
return

nb_points_after, class_after = get_statistics_from_las_points(points)

assert nb_points_before > 0
assert nb_points_before > nb_points_after
assert set(classes_to_remove).issubset(set(class_before))
assert not set(classes_to_remove).issubset(set(class_after))
assert nb_points_after == nb_points_to_get


if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
test_standardize_format()