Skip to content

Commit

Permalink
add_buffer: use input header infos as output parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
leavauchier committed Jan 24, 2024
1 parent bb29d04 commit b223715
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 26 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# dev
- fix add_buffer: propagate header infos from input to the output

# 1.5.0
- switch colorisation source from Geoportail to Geoplateforme
- use absolute value comparison in tests
Expand Down
50 changes: 45 additions & 5 deletions pdaltools/las_add_buffer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import argparse
import logging
import os
from typing import List
from typing import Dict, List

import pdal

Expand Down Expand Up @@ -46,6 +46,40 @@ def create_las_with_buffer(
)


def get_writer_parameters_from_reader_metadata(metadata: Dict, a_srs=None) -> Dict:
"""As pdal las writers does not permit to pass easily metadata from one file as
parameters for a writer, use a trick to generate writer parameters from the
reader metadata of a previous pipeline:
This function uses the metadata from the reader of a pipeline to provide parameters
to pass to the writer of another pipeline
To be removed once https://github.com/PDAL/python/issues/147 is solved
Args:
metadata (Dict): metadata of an executed pipeline (that can be accessed using pipeline.metadata)
Returns:
Dict: parameters to pass to a pdal writer
"""

reader_metadata = metadata["metadata"]["readers.las"]

params = {
"major_version": reader_metadata["major_version"],
"minor_version": reader_metadata["minor_version"],
"global_encoding": reader_metadata["global_encoding"],
"extra_dims": "all",
"scale_x": reader_metadata["scale_x"],
"scale_y": reader_metadata["scale_y"],
"scale_z": reader_metadata["scale_z"],
"offset_x": reader_metadata["offset_x"],
"offset_y": reader_metadata["offset_y"],
"offset_z": reader_metadata["offset_z"],
"dataformat_id": reader_metadata["dataformat_id"],
"a_srs": a_srs if a_srs else reader_metadata["comp_spatialreference"],
}
return params


def las_merge_and_crop(
input_dir: str,
tile_filename: str,
Expand Down Expand Up @@ -77,12 +111,12 @@ def las_merge_and_crop(
(usually 1000m)
"""
# List files to merge
Listfiles = create_list(input_dir, tile_filename, tile_width, tile_coord_scale)
files_to_merge = create_list(input_dir, tile_filename, tile_width, tile_coord_scale)

if len(Listfiles) > 0:
if len(files_to_merge) > 0:
# Read and crop each file
crops = []
for f in Listfiles:
for f in files_to_merge:
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(filename=f, override_srs=spatial_ref)
pipeline |= pdal.Filter.crop(bounds=str(bounds))
Expand All @@ -92,13 +126,19 @@ def las_merge_and_crop(
else:
crops.append(pipeline.arrays[0])

# Retrieve metadata before the pipeline is deleted
# As the last file of files_to_merge is the central one, metadata will contain the info
# from the central file after the last iteration of the for loop
metadata = pipeline.metadata
del pipeline

params = get_writer_parameters_from_reader_metadata(metadata, a_srs=spatial_ref)

# Merge
pipeline = pdal.Filter.merge().pipeline(*crops)

# Write
pipeline |= pdal.Writer(filename=output_filename, a_srs=spatial_ref)
pipeline |= pdal.Writer(filename=output_filename, forward="all", **params)

logging.info(pipeline.toJSON())
pipeline.execute()
Expand Down
Binary file modified test/data/test_data_77055_627760_LA93_IGN69_ground.las
Binary file not shown.
46 changes: 25 additions & 21 deletions test/test_las_add_buffer.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,25 @@
import logging
import os
import shutil
import test.utils as tu

import laspy
import numpy as np

from pdaltools.las_add_buffer import create_las_with_buffer

test_path = os.path.dirname(os.path.abspath(__file__))
tmp_path = os.path.join(test_path, "tmp")
input_dir = os.path.join(test_path, "data")
output_file = os.path.join(tmp_path, "cropped.las")

coord_x = 77055
coord_y = 627760
# Note: neighbor tile 77050_627760 is cropped to simulate missing data in neighbors during merge
input_file = os.path.join(input_dir, f"test_data_{coord_x}_{coord_y}_LA93_IGN69_ground.las")
tile_width = 50
tile_coord_scale = 10

input_nb_points = 22343
expected_output_nb_points = 40177
expected_out_mins = [770540.01, 6277540.0]
expected_out_maxs = [770610.0, 6277600.0]
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
INPUT_DIR = os.path.join(TEST_PATH, "data")


def setup_module(module):
try:
shutil.rmtree(tmp_path)
shutil.rmtree(TMP_PATH)

except FileNotFoundError:
pass
os.mkdir(tmp_path)
os.mkdir(TMP_PATH)


# Utils functions
Expand All @@ -54,9 +42,21 @@ def get_2d_bounding_box(path):

# Tests
def test_create_las_with_buffer():
output_file = os.path.join(TMP_PATH, "buffer.las")

coord_x = 77055
coord_y = 627760
# Note: neighbor tile 77050_627760 is cropped to simulate missing data in neighbors during merge
input_file = os.path.join(INPUT_DIR, f"test_data_{coord_x}_{coord_y}_LA93_IGN69_ground.las")
tile_width = 50
tile_coord_scale = 10
expected_output_nb_points = 40177
expected_out_mins = [770540.01, 6277540.0]
expected_out_maxs = [770610.0, 6277600.0]

buffer_width = 10
create_las_with_buffer(
input_dir,
INPUT_DIR,
input_file,
output_file,
buffer_width=buffer_width,
Expand All @@ -73,8 +73,8 @@ def test_create_las_with_buffer():

# The following test does not work on the current test case as there is no tile on the left
# and the top of the tile
assert np.all(np.isclose(out_mins, in_mins - buffer_width))
assert np.all(np.isclose(out_maxs, in_maxs + buffer_width))
tu.allclose_absolute(out_mins, in_mins - buffer_width, 1e-3)
tu.allclose_absolute(out_maxs, in_maxs + buffer_width, 1e-3)

# check number of points
assert get_nb_points(output_file) == expected_output_nb_points
Expand All @@ -83,6 +83,10 @@ def test_create_las_with_buffer():
assert np.all(out_mins == expected_out_mins)
assert np.all(out_maxs == expected_out_maxs)

# Check output las version (input is 1.4)
json_info = tu.get_pdal_infos_summary(output_file)
assert json_info["summary"]["metadata"]["minor_version"] == 4


if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
Expand Down

0 comments on commit b223715

Please sign in to comment.