Skip to content

Commit

Permalink
update code, test and version number
Browse files Browse the repository at this point in the history
  • Loading branch information
alavenant committed Sep 17, 2024
1 parent f9df8f1 commit d1332ac
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 33 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# 1.7.4
- Add possibility to remove points of somes classes in standardize

# 1.7.3
- Add method to get a point cloud origin

Expand Down
2 changes: 1 addition & 1 deletion pdaltools/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.7.3"
__version__ = "1.7.4"


if __name__ == "__main__":
Expand Down
17 changes: 8 additions & 9 deletions pdaltools/standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
offset_z=0,
dataformat_id=6, # No color by default
a_srs="EPSG:2154",
remove_class=[], # Save no extra_dims
remove_points_from_class=[], # remove points from class
)


Expand All @@ -48,7 +48,7 @@ def parse_args():
)
parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154")
parser.add_argument(
"--remove_class",
"--remove_points_from_class",
default=[],
nargs="*",
type=str,
Expand All @@ -75,19 +75,18 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
return params


def remove_points_from_las(input_file: str, output_file: str, class_removed) -> None:
def remove_points_from_class(input_file: str, output_file: str, class_removed) -> None:
pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)
pipeline.execute()
points = pipeline.arrays[0]
input_dimensions = list(points.dtype.fields.keys())
dim_class = input_dimensions.index("Classification")

indice_pnts_delete = []
for i in range(0, len(points)):
if points[i][dim_class] in class_removed:
indice_pnts_delete.append(i)
indice_pts_delete = [id for id in range(0, len(points)) if points[id][dim_class] in class_removed]
points_pruned = np.delete(points, indice_pts_delete)

points_pruned = np.delete(points, indice_pnts_delete)
if len(points_pruned) == 0:
raise Exception("All points removed !")

params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
pipeline_end = pdal.Pipeline(arrays=[points_pruned])
Expand Down Expand Up @@ -138,7 +137,7 @@ def standardize(input_file: str, output_file: str, params_from_parser: Dict) ->
if args.remove_class:
filename = os.path.basename(args.input_file)
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
remove_points_from_las(args.input_file, tmp.name, args.remove_class)
remove_points_from_class(args.input_file, tmp.name, args.remove_points_from_class)
standardize(tmp.name, args.output_file, params_from_parser)
else:
standardize(args.input_file, args.output_file, params_from_parser)
16 changes: 11 additions & 5 deletions test/test_las_remove_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,22 @@
ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
added_dimensions = ["DIM_1", "DIM_2"]

def get_points(input_las : str):

def get_points(input_las: str):
pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las)
pipeline_read_ini.execute()
return pipeline_read_ini.arrays[0]

def append_dimension(input_las : str, output_las : str):

def append_dimension(input_las: str, output_las: str):
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(input_las)
pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))
pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", )
pipeline |= pdal.Writer.las(
output_las,
extra_dims="all",
forward="all",
)
pipeline.execute()


Expand Down Expand Up @@ -52,10 +58,10 @@ def test_remove_one_dimension():
las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name)
points_end = get_points(tmp_las_rm.name)

assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2
assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0 # should still contains DIM_2

with pytest.raises(ValueError):
list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1
list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1

with pytest.raises(TypeError):
numpy.array_equal(points_ini, points_end) # output data should not be the same
Expand Down
71 changes: 53 additions & 18 deletions test/test_standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@
import shutil
import subprocess as sp
import platform
import json
from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary

import pdal
import pytest

from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, remove_points_from_las
from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, remove_points_from_class

TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
Expand Down Expand Up @@ -117,30 +118,64 @@ def test_standardize_malformed_laz():
standardize(input_file, output_file, MUTLIPLE_PARAMS[0])
assert os.path.isfile(output_file)

def test_remove_points_from_class():

@pytest.mark.parametrize(
"classes_to_remove",
[
[2, 3],
[2, 3, 4],
[0, 1, 2, 3, 4, 5, 6],
],
)
def test_remove_points_from_class(classes_to_remove):
input_file = os.path.join(TEST_PATH, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz")
output_file = os.path.join(TMP_PATH, "test_remove_points_from_class.laz")

def count_points_by_classes(las_file: str, classes) -> int:
pipeline = pdal.Pipeline() | pdal.Reader.las(las_file)
pipeline.execute()
points = pipeline.arrays[0]
input_dimensions = list(points.dtype.fields.keys())
dim_class = input_dimensions.index('Classification')
nbPts = 0
for pt in points:
if pt[dim_class] in classes:
nbPts +=1
return nbPts

classes_to_remove = [2, 3]
def get_pipeline_metadata(pipeline):
try:
metadata = json.loads(pipeline.metadata)
except TypeError:
d_metadata = json.dumps(pipeline.metadata)
metadata = json.loads(d_metadata)
return metadata

# count points of class not in classes_to_remove (get the point we should have in fine)
pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)
where = ""
for cl in classes_to_remove:
where += "Classification != " + str(cl)
if cl != classes_to_remove[-1]:
where += " && "
pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification", where=where)
pipeline.execute()
metadata = get_pipeline_metadata(pipeline)
statistic = metadata["metadata"]["filters.stats"]["statistic"]
nb_points_to_get = statistic[0]["count"]

remove_points_from_las(input_file, output_file, classes_to_remove)
try:
remove_points_from_class(input_file, output_file, classes_to_remove)
except Exception as error: # error because all points are removed
assert nb_points_to_get == 0
return

assert os.path.isfile(output_file)
assert count_points_by_classes(input_file, classes_to_remove) > 0
assert count_points_by_classes(output_file, classes_to_remove) == 0

def get_statistics(las_file: str):
pipeline = pdal.Pipeline() | pdal.Reader.las(las_file)
pipeline |= pdal.Filter.stats(dimensions="Classification", enumerate="Classification")
pipeline.execute()
metadata = get_pipeline_metadata(pipeline)
statistic = metadata["metadata"]["filters.stats"]["statistic"]
return statistic[0]["count"], statistic[0]["values"]

nb_points_before, class_before = get_statistics(input_file)
nb_points_after, class_after = get_statistics(output_file)

assert nb_points_before > 0
assert nb_points_before > nb_points_after
assert set(classes_to_remove).issubset(set(class_before))
assert not set(classes_to_remove).issubset(set(class_after))
assert nb_points_after == nb_points_to_get


if __name__ == "__main__":
Expand Down

0 comments on commit d1332ac

Please sign in to comment.