LAStools merge in split-merge

pull/1057/head
Piero Toffanin 2019-10-29 14:25:12 -04:00
rodzic 677370dd50
commit 8a8d51a695
5 zmienionych plików z 77 dodań i 84 usunięć

Wyświetl plik

@ -168,3 +168,16 @@ externalproject_add(dem2points
BUILD_COMMAND make BUILD_COMMAND make
INSTALL_COMMAND "" INSTALL_COMMAND ""
) )
externalproject_add(lastools
GIT_REPOSITORY https://github.com/LAStools/LAStools.git
GIT_TAG 2ef44281645999ec7217facec84a5913bbbbe165
SOURCE_DIR ${SB_SOURCE_DIR}/lastools
CONFIGURE_COMMAND ""
CMAKE_COMMAND ""
CMAKE_GENERATOR ""
UPDATE_COMMAND ""
BUILD_IN_SOURCE 1
BUILD_COMMAND make -C LASlib -j$(nproc) CXXFLAGS='-std=c++11' && make -C src -j$(nproc) CXXFLAGS='-std=c++11' lasmerge
INSTALL_COMMAND mv ${SB_SOURCE_DIR}/lastools/bin/lasmerge ${SB_INSTALL_DIR}/bin
)

Wyświetl plik

@ -1,33 +1,10 @@
import os import os
import json
import shutil import shutil
from pipes import quote from pipes import quote
from opendm import io from opendm import io
from opendm import log from opendm import log
from opendm import system from opendm import system
from opendm import concurrency from opendm import concurrency
import math
def closest_power_of_4(x):
if x <= 0:
return 1
n = 1
while n < x:
n *= 4
return n
def get_num_points(scan_file):
if not os.path.exists(scan_file):
log.ODM_WARNING("%s does not exist, cannot get number of points." % scan_file)
return 0
with open(scan_file, "r") as f:
scan = json.loads(f.read())
if not 'points' in scan:
log.ODM_WARNING("Cannot find number of points in point clouds (points key missing from scan.json). Returning 0")
return 0
return scan['points']
def build(input_point_cloud_files, output_path, max_concurrency=8, rerun=False): def build(input_point_cloud_files, output_path, max_concurrency=8, rerun=False):
num_files = len(input_point_cloud_files) num_files = len(input_point_cloud_files)
@ -45,29 +22,20 @@ def build(input_point_cloud_files, output_path, max_concurrency=8, rerun=False):
'threads': max_concurrency, 'threads': max_concurrency,
'tmpdir': tmpdir, 'tmpdir': tmpdir,
'all_inputs': "-i " + " ".join(map(quote, input_point_cloud_files)), 'all_inputs': "-i " + " ".join(map(quote, input_point_cloud_files)),
'outputdir': output_path, 'outputdir': output_path
'scan_file': os.path.join(output_path, "scan.json")
} }
# Run scan to compute number of points # Run scan to compute dataset bounds
system.run('entwine scan --threads {threads} --tmp "{tmpdir}" {all_inputs} -o "{outputdir}"'.format(**kwargs)) system.run('entwine scan --threads {threads} --tmp "{tmpdir}" {all_inputs} -o "{outputdir}"'.format(**kwargs))
num_points = get_num_points(kwargs['scan_file']) scan_json = os.path.join(output_path, "scan.json")
# TODO: choose subset if os.path.exists(scan_json):
kwargs['input'] = scan_json
entwine_cmd = "entwine build --threads {threads} --tmp {tmpdir} -i {scan_file} -o {outputdir}".format(**kwargs) for _ in range(num_files):
# One at a time
# Need to split into subsets? system.run('entwine build --threads {threads} --tmp "{tmpdir}" -i "{input}" -o "{outputdir}" --run 1'.format(**kwargs))
if num_files > 1:
subsets = closest_power_of_4(num_files)
for s in range(1, subsets + 1):
system.run(entwine_cmd + " --subset %s %s" % (s, subsets))
# Merge
system.run("entwine merge --threads {threads} --tmp {tmpdir} -o {outputdir}".format(**kwargs))
else: else:
# Single run log.ODM_WARNING("%s does not exist, no point cloud will be built." % scan_json)
system.run(entwine_cmd)
if os.path.exists(tmpdir): if os.path.exists(tmpdir):

Wyświetl plik

@ -3,6 +3,9 @@ from opendm import system
from opendm import log from opendm import log
from opendm import context from opendm import context
from opendm.system import run from opendm.system import run
from opendm import entwine
from opendm import io
from pipes import quote
def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, confidence=None, verbose=False): def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, confidence=None, verbose=False):
""" """
@ -95,3 +98,47 @@ def get_extent(input_point_cloud):
return bounds return bounds
def merge(input_point_cloud_files, output_file, rerun=False):
num_files = len(input_point_cloud_files)
if num_files == 0:
log.ODM_WARNING("No input point cloud files to process")
return
if rerun and io.file_exists(output_file):
log.ODM_WARNING("Removing previous point cloud: %s" % output_file)
os.remove(output_file)
kwargs = {
'all_inputs': " ".join(map(quote, input_point_cloud_files)),
'output': output_file
}
system.run('lasmerge -i {all_inputs} -o "{output}"'.format(**kwargs))
def post_point_cloud_steps(args, tree):
# XYZ point cloud output
if args.pc_csv:
log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)")
system.run("pdal translate -i \"{}\" "
"-o \"{}\" "
"--writers.text.format=csv "
"--writers.text.order=\"X,Y,Z\" "
"--writers.text.keep_unspecified=false ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_xyz_file))
# LAS point cloud output
if args.pc_las:
log.ODM_INFO("Creating geo-referenced LAS file")
system.run("pdal translate -i \"{}\" "
"-o \"{}\" ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_model_las))
# EPT point cloud output
if args.pc_ept:
log.ODM_INFO("Creating geo-referenced Entwine Point Tile output")
entwine.build([tree.odm_georeferencing_model_laz], tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=False)

Wyświetl plik

@ -9,7 +9,6 @@ from opendm import system
from opendm import context from opendm import context
from opendm.cropper import Cropper from opendm.cropper import Cropper
from opendm import point_cloud from opendm import point_cloud
from opendm import entwine
class ODMGeoreferencingStage(types.ODM_Stage): class ODMGeoreferencingStage(types.ODM_Stage):
def process(self, args, outputs): def process(self, args, outputs):
@ -97,32 +96,7 @@ class ODMGeoreferencingStage(types.ODM_Stage):
if doPointCloudGeo: if doPointCloudGeo:
reconstruction.georef.extract_offsets(odm_georeferencing_model_txt_geo_file) reconstruction.georef.extract_offsets(odm_georeferencing_model_txt_geo_file)
point_cloud.post_point_cloud_steps(args, tree)
# XYZ point cloud output
if args.pc_csv:
log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)")
system.run("pdal translate -i \"{}\" "
"-o \"{}\" "
"--writers.text.format=csv "
"--writers.text.order=\"X,Y,Z\" "
"--writers.text.keep_unspecified=false ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_xyz_file))
# LAS point cloud output
if args.pc_las:
log.ODM_INFO("Creating geo-referenced LAS file")
system.run("pdal translate -i \"{}\" "
"-o \"{}\" ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_model_las))
# EPT point cloud output
if args.pc_ept:
log.ODM_INFO("Creating geo-referenced Entwine Point Tile output")
entwine.build([tree.odm_georeferencing_model_laz], tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=self.rerun())
if args.crop > 0: if args.crop > 0:
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud") log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")

Wyświetl plik

@ -13,7 +13,7 @@ from opensfm.large import metadataset
from opendm.cropper import Cropper from opendm.cropper import Cropper
from opendm.concurrency import get_max_memory from opendm.concurrency import get_max_memory
from opendm.remote import LocalRemoteExecutor from opendm.remote import LocalRemoteExecutor
from opendm import entwine from opendm import point_cloud
from pipes import quote from pipes import quote
class ODMSplitStage(types.ODM_Stage): class ODMSplitStage(types.ODM_Stage):
@ -175,27 +175,18 @@ class ODMMergeStage(types.ODM_Stage):
# Merge point clouds # Merge point clouds
if args.merge in ['all', 'pointcloud']: if args.merge in ['all', 'pointcloud']:
if not io.dir_exists(tree.entwine_pointcloud) or self.rerun(): if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz") all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz")
try: try:
entwine.build(all_point_clouds, tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=self.rerun()) point_cloud.merge(all_point_clouds, tree.odm_georeferencing_model_laz, rerun=self.rerun())
point_cloud.post_point_cloud_steps(args, tree)
except Exception as e: except Exception as e:
log.ODM_WARNING("Could not merge EPT point cloud: %s (skipping)" % str(e)) log.ODM_WARNING("Could not merge point cloud: %s (skipping)" % str(e))
else:
log.ODM_WARNING("Found merged EPT point cloud in %s" % tree.entwine_pointcloud)
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
if io.dir_exists(tree.entwine_pointcloud):
try:
system.run('pdal translate "ept://{}" "{}"'.format(tree.entwine_pointcloud, tree.odm_georeferencing_model_laz))
except Exception as e:
log.ODM_WARNING("Cannot export EPT dataset to LAZ: %s" % str(e))
else:
log.ODM_WARNING("No EPT point cloud found (%s), skipping LAZ conversion)" % tree.entwine_pointcloud)
else: else:
log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz) log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz)
self.update_progress(25) self.update_progress(25)
# Merge crop bounds # Merge crop bounds