diff --git a/CHANGELOG.md b/CHANGELOG.md index 3aba1e9..4867850 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +# 1.7.4 +- Add possibility to remove points of somes classes in standardize + # 1.7.3 - Add method to get a point cloud origin diff --git a/pdaltools/_version.py b/pdaltools/_version.py index d868670..31890a2 100644 --- a/pdaltools/_version.py +++ b/pdaltools/_version.py @@ -1,4 +1,4 @@ -__version__ = "1.7.3" +__version__ = "1.7.4" if __name__ == "__main__": diff --git a/pdaltools/standardize_format.py b/pdaltools/standardize_format.py index fb55046..595437a 100644 --- a/pdaltools/standardize_format.py +++ b/pdaltools/standardize_format.py @@ -35,7 +35,7 @@ offset_z=0, dataformat_id=6, # No color by default a_srs="EPSG:2154", - remove_class=[], # Save no extra_dims + remove_points_from_class=[], # remove points from class ) @@ -48,7 +48,7 @@ def parse_args(): ) parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154") parser.add_argument( - "--remove_class", + "--remove_points_from_class", default=[], nargs="*", type=str, @@ -75,19 +75,18 @@ def get_writer_parameters(new_parameters: Dict) -> Dict: return params -def remove_points_from_las(input_file: str, output_file: str, class_removed) -> None: +def remove_points_from_class(input_file: str, output_file: str, class_removed) -> None: pipeline = pdal.Pipeline() | pdal.Reader.las(input_file) pipeline.execute() points = pipeline.arrays[0] input_dimensions = list(points.dtype.fields.keys()) dim_class = input_dimensions.index("Classification") - indice_pnts_delete = [] - for i in range(0, len(points)): - if points[i][dim_class] in class_removed: - indice_pnts_delete.append(i) + indice_pts_delete = [id for id in range(0, len(points)) if points[id][dim_class] in class_removed] + points_pruned = np.delete(points, indice_pts_delete) - points_pruned = np.delete(points, indice_pnts_delete) + if len(points_pruned) == 0: + raise Exception("All points removed !") params = get_writer_parameters_from_reader_metadata(pipeline.metadata) pipeline_end = pdal.Pipeline(arrays=[points_pruned]) @@ -138,7 +137,7 @@ def standardize(input_file: str, output_file: str, params_from_parser: Dict) -> if args.remove_class: filename = os.path.basename(args.input_file) with tempfile.NamedTemporaryFile(suffix=filename) as tmp: - remove_points_from_las(args.input_file, tmp.name, args.remove_class) + remove_points_from_class(args.input_file, tmp.name, args.remove_points_from_class) standardize(tmp.name, args.output_file, params_from_parser) else: standardize(args.input_file, args.output_file, params_from_parser) diff --git a/test/test_las_remove_dimensions.py b/test/test_las_remove_dimensions.py index 00eb282..1914d55 100644 --- a/test/test_las_remove_dimensions.py +++ b/test/test_las_remove_dimensions.py @@ -13,16 +13,22 @@ ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") added_dimensions = ["DIM_1", "DIM_2"] -def get_points(input_las : str): + +def get_points(input_las: str): pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las) pipeline_read_ini.execute() return pipeline_read_ini.arrays[0] -def append_dimension(input_las : str, output_las : str): + +def append_dimension(input_las: str, output_las: str): pipeline = pdal.Pipeline() pipeline |= pdal.Reader.las(input_las) pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) - pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", ) + pipeline |= pdal.Writer.las( + output_las, + extra_dims="all", + forward="all", + ) pipeline.execute() @@ -52,10 +58,10 @@ def test_remove_one_dimension(): las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name) points_end = get_points(tmp_las_rm.name) - assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2 + assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0 # should still contains DIM_2 with pytest.raises(ValueError): - list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1 + list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1 with pytest.raises(TypeError): numpy.array_equal(points_ini, points_end) # output data should not be the same diff --git a/test/test_standardize_format.py b/test/test_standardize_format.py index 97c8a05..8858369 100644 --- a/test/test_standardize_format.py +++ b/test/test_standardize_format.py @@ -3,12 +3,13 @@ import shutil import subprocess as sp import platform +import json from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary import pdal import pytest -from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, remove_points_from_las +from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, remove_points_from_class TEST_PATH = os.path.dirname(os.path.abspath(__file__)) TMP_PATH = os.path.join(TEST_PATH, "tmp") @@ -117,30 +118,64 @@ def test_standardize_malformed_laz(): standardize(input_file, output_file, MUTLIPLE_PARAMS[0]) assert os.path.isfile(output_file) -def test_remove_points_from_class(): + +@pytest.mark.parametrize( + "classes_to_remove", + [ + [2, 3], + [2, 3, 4], + [0, 1, 2, 3, 4, 5, 6], + ], +) +def test_remove_points_from_class(classes_to_remove): input_file = os.path.join(TEST_PATH, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz") output_file = os.path.join(TMP_PATH, "test_remove_points_from_class.laz") - def count_points_by_classes(las_file: str, classes) -> int: - pipeline = pdal.Pipeline() | pdal.Reader.las(las_file) - pipeline.execute() - points = pipeline.arrays[0] - input_dimensions = list(points.dtype.fields.keys()) - dim_class = input_dimensions.index('Classification') - nbPts = 0 - for pt in points: - if pt[dim_class] in classes: - nbPts +=1 - return nbPts - - classes_to_remove = [2, 3] + def get_pipeline_metadata(pipeline): + try: + metadata = json.loads(pipeline.metadata) + except TypeError: + d_metadata = json.dumps(pipeline.metadata) + metadata = json.loads(d_metadata) + return metadata + + # count points of class not in classes_to_remove (get the point we should have in fine) + pipeline = pdal.Pipeline() | pdal.Reader.las(input_file) + where = "" + for cl in classes_to_remove: + where += "Classification != " + str(cl) + if cl != classes_to_remove[-1]: + where += " && " + pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification", where=where) + pipeline.execute() + metadata = get_pipeline_metadata(pipeline) + statistic = metadata["metadata"]["filters.stats"]["statistic"] + nb_points_to_get = statistic[0]["count"] - remove_points_from_las(input_file, output_file, classes_to_remove) + try: + remove_points_from_class(input_file, output_file, classes_to_remove) + except Exception as error: # error because all points are removed + assert nb_points_to_get == 0 + return assert os.path.isfile(output_file) - assert count_points_by_classes(input_file, classes_to_remove) > 0 - assert count_points_by_classes(output_file, classes_to_remove) == 0 + def get_statistics(las_file: str): + pipeline = pdal.Pipeline() | pdal.Reader.las(las_file) + pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification") + pipeline.execute() + metadata = get_pipeline_metadata(pipeline) + statistic = metadata["metadata"]["filters.stats"]["statistic"] + return statistic[0]["count"], statistic[0]["values"] + + nb_points_before, class_before = get_statistics(input_file) + nb_points_after, class_after = get_statistics(output_file) + + assert nb_points_before > 0 + assert nb_points_before > nb_points_after + assert set(classes_to_remove).issubset(set(class_before)) + assert not set(classes_to_remove).issubset(set(class_after)) + assert nb_points_after == nb_points_to_get if __name__ == "__main__":