Removed lidar2dems, refactored DEM code, added --pc-classify flag

Former-commit-id: 5a4a552d0a
pull/1161/head
Piero Toffanin 2018-01-16 14:46:30 -05:00
rodzic 008aef0b1e
commit 87de9f513d
9 zmienionych plików z 470 dodań i 110 usunięć

Wyświetl plik

@ -121,7 +121,6 @@ set(custom_libs OpenGV
Ecto
PDAL
MvsTexturing
Lidar2dems
)
# Dependencies of the SLAM module

Wyświetl plik

@ -1,24 +0,0 @@
set(_proj_name lidar2dems)
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
ExternalProject_Add(${_proj_name}
PREFIX ${_SB_BINARY_DIR}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL https://github.com/OpenDroneMap/lidar2dems/archive/master.zip
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CONFIGURE_COMMAND ""
#--Build step-----------------
BUILD_COMMAND ""
#--Install step---------------
INSTALL_COMMAND "${SB_SOURCE_DIR}/${_proj_name}/install.sh" "${SB_INSTALL_DIR}"
#--Output logging-------------
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)

Wyświetl plik

@ -285,6 +285,17 @@ def config():
'Use 0 to disable cropping. '
'Default: %(default)s'))
parser.add_argument('--pc-classify',
metavar='<string>',
default='none',
choices=['none', 'smrf', 'pmf'],
help='Classify the .LAS point cloud output using either '
'a Simple Morphological Filter or a Progressive Morphological Filter. '
'If --dtm is set this parameter defaults to smrf. '
'You can control the behavior of both smrf and pmf by tweaking the --dem-* parameters. '
'Default: '
'%(default)s')
parser.add_argument('--texturing-data-term',
metavar='<string>',
default='gmi',
@ -519,4 +530,8 @@ def config():
log.ODM_INFO('Fast orthophoto is turned on, cannot use pmvs (removing --use-pmvs)')
args.use_pmvs = False
if args.dtm and args.pc_classify == 'none':
log.ODM_INFO("DTM is turned on, automatically turning on point cloud classification")
args.pc_classify = "smrf"
return args

Wyświetl plik

@ -1,13 +1,9 @@
from opendm import context
from opendm import system
from opendm.system import run
from opendm import log
from osgeo import ogr
import json, os
def run(command):
env_paths = [context.superbuild_bin_path]
return system.run(command, env_paths)
class Cropper:
def __init__(self, storage_dir, files_prefix = "crop"):
self.storage_dir = storage_dir

Wyświetl plik

Wyświetl plik

@ -0,0 +1,132 @@
import os, glob
import gippy
import numpy
from datetime import datetime
from . import pdal
def classify(lasFile, smrf=False, slope=1, cellsize=3, maxWindowSize=10, maxDistance=1,
approximate=False, initialDistance=0.7, verbose=False):
start = datetime.now()
try:
if smrf:
pdal.run_pdaltranslate_smrf(lasFile, lasFile, slope, cellsize, maxWindowSize, verbose)
else:
pdal.run_pdalground(lasFile, lasFile, slope, cellsize, maxWindowSize, maxDistance, approximate=approximate, initialDistance=initialDistance, verbose=verbose)
except:
raise Exception("Error creating classified file %s" % fout)
print 'Created %s in %s' % (os.path.relpath(lasFile), datetime.now() - start)
return lasFile
def create_dems(filenames, demtype, radius=['0.56'], gapfill=False,
outdir='', suffix='', resolution=0.1, **kwargs):
""" Create DEMS for multiple radii, and optionally gapfill """
fouts = []
for rad in radius:
fouts.append(
create_dem(filenames, demtype,
radius=rad, outdir=outdir, suffix=suffix, resolution=resolution, **kwargs))
fnames = {}
# convert from list of dicts, to dict of lists
for product in fouts[0].keys():
fnames[product] = [f[product] for f in fouts]
fouts = fnames
# gapfill all products
_fouts = {}
if gapfill:
for product in fouts.keys():
# output filename
fout = os.path.join(outdir, '%s%s.tif' % (demtype, suffix))
gap_fill(fouts[product], fout)
_fouts[product] = fout
else:
# only return single filename (first radius run)
for product in fouts.keys():
_fouts[product] = fouts[product][0]
return _fouts
def create_dem(filenames, demtype, radius='0.56', decimation=None,
maxsd=None, maxz=None, maxangle=None, returnnum=None,
products=['idw'], outdir='', suffix='', verbose=False, resolution=0.1):
""" Create DEM from collection of LAS files """
start = datetime.now()
# filename based on demtype, radius, and optional suffix
bname = os.path.join(os.path.abspath(outdir), '%s_r%s%s' % (demtype, radius, suffix))
ext = 'tif'
fouts = {o: bname + '.%s.%s' % (o, ext) for o in products}
prettyname = os.path.relpath(bname) + ' [%s]' % (' '.join(products))
# run if any products missing (any extension version is ok, i.e. vrt or tif)
run = False
for f in fouts.values():
if len(glob.glob(f[:-3] + '*')) == 0:
run = True
if run:
print 'Creating %s from %s files' % (prettyname, len(filenames))
# JSON pipeline
json = pdal.json_gdal_base(bname, products, radius, resolution)
json = pdal.json_add_filters(json, maxsd, maxz, maxangle, returnnum)
if demtype == 'dsm':
json = pdal.json_add_classification_filter(json, 2, equality='max')
elif demtype == 'dtm':
json = pdal.json_add_classification_filter(json, 2)
if decimation is not None:
json = pdal.json_add_decimation_filter(json, decimation)
pdal.json_add_readers(json, filenames)
pdal.run_pipeline(json, verbose=verbose)
# verify existence of fout
exists = True
for f in fouts.values():
if not os.path.exists(f):
exists = False
if not exists:
raise Exception("Error creating dems: %s" % ' '.join(fouts))
print 'Completed %s in %s' % (prettyname, datetime.now() - start)
return fouts
def gap_fill(filenames, fout, interpolation='nearest'):
""" Gap fill from higher radius DTMs, then fill remainder with interpolation """
start = datetime.now()
from scipy.interpolate import griddata
if len(filenames) == 0:
raise Exception('No filenames provided!')
filenames = sorted(filenames)
imgs = gippy.GeoImages(filenames)
nodata = imgs[0][0].NoDataValue()
arr = imgs[0][0].Read()
for i in range(1, imgs.size()):
locs = numpy.where(arr == nodata)
arr[locs] = imgs[i][0].Read()[locs]
# interpolation at bad points
goodlocs = numpy.where(arr != nodata)
badlocs = numpy.where(arr == nodata)
arr[badlocs] = griddata(goodlocs, arr[goodlocs], badlocs, method=interpolation)
# write output
imgout = gippy.GeoImage(fout, imgs[0])
imgout.SetNoData(nodata)
imgout[0].Write(arr)
fout = imgout.Filename()
imgout = None
print 'Completed gap-filling to create %s in %s' % (os.path.relpath(fout), datetime.now() - start)
return fout

273
opendm/dem/pdal.py 100644
Wyświetl plik

@ -0,0 +1,273 @@
#!/usr/bin/env python
################################################################################
# lidar2dems - utilties for creating DEMs from LiDAR data
#
# AUTHOR: Matthew Hanson, matt.a.hanson@gmail.com
#
# Copyright (C) 2015 Applied Geosolutions LLC, oss@appliedgeosolutions.com
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
################################################################################
# Library functions for creating DEMs from Lidar data
import os
import json as jsonlib
import tempfile
from opendm import system
import glob
from datetime import datetime
import uuid
""" JSON Functions """
def json_base():
""" Create initial JSON for PDAL pipeline """
return {'pipeline': []}
def json_gdal_base(fout, output, radius, resolution=1):
""" Create initial JSON for PDAL pipeline containing a Writer element """
json = json_base()
if len(output) > 1:
# TODO: we might want to create a multiband raster with max/min/idw
# in the future
print "More than 1 output, will only create {0}".format(output[0])
output = [output[0]]
json['pipeline'].insert(0, {
'type': 'writers.gdal',
'resolution': resolution,
'radius': radius,
'filename': '{0}.{1}.tif'.format(fout, output[0]),
'output_type': output[0]
})
return json
def json_las_base(fout):
""" Create initial JSON for writing to a LAS file """
json = json_base()
json['pipeline'].insert(0, {
'type': 'writers.las',
'filename': fout
})
return json
def json_add_decimation_filter(json, step):
""" Add decimation Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.decimation',
'step': step
})
return json
def json_add_classification_filter(json, classification, equality="equals"):
""" Add classification Filter element and return """
limits = 'Classification[{0}:{0}]'.format(classification)
if equality == 'max':
limits = 'Classification[:{0}]'.format(classification)
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': limits
})
return json
def json_add_maxsd_filter(json, meank=20, thresh=3.0):
""" Add outlier Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.outlier',
'method': 'statistical',
'mean_k': meank,
'multiplier': thresh
})
return json
def json_add_maxz_filter(json, maxz):
""" Add max elevation Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'Z[:{0}]'.format(maxz)
})
return json
def json_add_maxangle_filter(json, maxabsangle):
""" Add scan angle Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'ScanAngleRank[{0}:{1}]'.format(str(-float(maxabsangle)), maxabsangle)
})
return json
def json_add_scanedge_filter(json, value):
""" Add EdgeOfFlightLine Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'EdgeOfFlightLine[{0}:{0}]'.format(value)
})
return json
def json_add_returnnum_filter(json, value):
""" Add ReturnNum Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'ReturnNum[{0}:{0}]'.format(value)
})
return json
def json_add_filters(json, maxsd=None, maxz=None, maxangle=None, returnnum=None):
if maxsd is not None:
json = json_add_maxsd_filter(json, thresh=maxsd)
if maxz is not None:
json = json_add_maxz_filter(json, maxz)
if maxangle is not None:
json = json_add_maxangle_filter(json, maxangle)
if returnnum is not None:
json = json_add_returnnum_filter(json, returnnum)
return json
def json_add_crop_filter(json, wkt):
""" Add cropping polygon as Filter Element and return """
json['pipeline'].insert(0, {
'type': 'filters.crop',
'polygon': wkt
})
return json
def json_add_reader(json, filename):
""" Add LAS Reader Element and return """
json['pipeline'].insert(0, {
'type': 'readers.las',
'filename': os.path.abspath(filename)
})
return json
def json_add_readers(json, filenames):
""" Add merge Filter element and readers to a Writer element and return Filter element """
for f in filenames:
json_add_reader(json, f)
if len(filenames) > 1:
json['pipeline'].insert(0, {
'type': 'filters.merge'
})
return json
def json_print(json):
""" Pretty print JSON """
print jsonlib.dumps(json, indent=4, separators=(',', ': '))
""" Run PDAL commands """
def run_pipeline(json, verbose=False):
""" Run PDAL Pipeline with provided JSON """
if verbose:
json_print(json)
# write to temp file
f, jsonfile = tempfile.mkstemp(suffix='.json')
if verbose:
print 'Pipeline file: %s' % jsonfile
os.write(f, jsonlib.dumps(json))
os.close(f)
cmd = [
'pdal',
'pipeline',
'-i %s' % jsonfile
]
if verbose:
out = system.run(' '.join(cmd))
else:
out = system.run(' '.join(cmd) + ' > /dev/null 2>&1')
os.remove(jsonfile)
def run_pdalground(fin, fout, slope, cellsize, maxWindowSize, maxDistance, approximate=False, initialDistance=0.7, verbose=False):
""" Run PDAL ground """
cmd = [
'pdal',
'ground',
'-i %s' % fin,
'-o %s' % fout,
'--slope %s' % slope,
'--cell_size %s' % cellsize,
'--initial_distance %s' % initialDistance
]
if maxWindowSize is not None:
cmd.append('--max_window_size %s' %maxWindowSize)
if maxDistance is not None:
cmd.append('--max_distance %s' %maxDistance)
if approximate:
cmd.append('--approximate')
if verbose:
cmd.append('--developer-debug')
print ' '.join(cmd)
print ' '.join(cmd)
out = system.run(' '.join(cmd))
if verbose:
print out
def run_pdaltranslate_smrf(fin, fout, slope, cellsize, maxWindowSize, verbose=False):
""" Run PDAL translate """
cmd = [
'pdal',
'translate',
'-i %s' % fin,
'-o %s' % fout,
'smrf',
'--filters.smrf.cell=%s' % cellsize,
'--filters.smrf.slope=%s' % slope,
]
if maxWindowSize is not None:
cmd.append('--filters.smrf.window=%s' % maxWindowSize)
if verbose:
print ' '.join(cmd)
out = system.run(' '.join(cmd))
if verbose:
print out

Wyświetl plik

@ -17,7 +17,7 @@ def get_ccd_widths():
return dict(zip(map(string.lower, sensor_data.keys()), sensor_data.values()))
def run(cmd, env_paths=[]):
def run(cmd, env_paths=[context.superbuild_bin_path]):
"""Run a system command"""
log.ODM_DEBUG('running %s' % cmd)

Wyświetl plik

@ -6,6 +6,8 @@ from opendm import log
from opendm import system
from opendm import context
from opendm import types
from opendm.dem import commands
from opendm.cropper import Cropper
class ODMDEMCell(ecto.Cell):
@ -27,22 +29,35 @@ class ODMDEMCell(ecto.Cell):
args = self.inputs.args
tree = self.inputs.tree
las_model_found = io.file_exists(tree.odm_georeferencing_model_las)
env_paths = [context.superbuild_bin_path]
# Just to make sure
l2d_module_installed = True
try:
system.run('l2d_classify --help > /dev/null', env_paths)
except:
log.ODM_WARNING('lidar2dems is not installed properly')
l2d_module_installed = False
log.ODM_INFO('Classify: ' + str(args.pc_classify != "none"))
log.ODM_INFO('Create DSM: ' + str(args.dsm))
log.ODM_INFO('Create DTM: ' + str(args.dtm))
log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_las, str(las_model_found)))
# Setup terrain parameters
terrain_params_map = {
'flatnonforest': (1, 3),
'flatforest': (1, 2),
'complexnonforest': (5, 2),
'complexforest': (10, 2)
}
terrain_params = terrain_params_map[args.dem_terrain_type.lower()]
slope, cellsize = terrain_params
if args.pc_classify != "none" and las_model_found:
log.ODM_INFO("Classifying {} using {}".format(tree.odm_georeferencing_model_las, args.pc_classify))
commands.classify(tree.odm_georeferencing_model_las,
args.pc_classify == "smrf",
slope,
cellsize,
approximate=args.dem_approximate,
initialDistance=args.dem_initial_distance,
verbose=args.verbose
)
# Do we need to process anything here?
if (args.dsm or args.dtm) and las_model_found and l2d_module_installed:
if (args.dsm or args.dtm) and las_model_found:
# define paths and create working directories
odm_dem_root = tree.path('odm_dem')
@ -61,48 +76,6 @@ class ODMDEMCell(ecto.Cell):
if (args.dtm and not io.file_exists(dtm_output_filename)) or \
(args.dsm and not io.file_exists(dsm_output_filename)) or \
rerun_cell:
# Process with lidar2dems
terrain_params_map = {
'flatnonforest': (1, 3),
'flatforest': (1, 2),
'complexnonforest': (5, 2),
'complexforest': (10, 2)
}
terrain_params = terrain_params_map[args.dem_terrain_type.lower()]
kwargs = {
'verbose': '-v' if self.params.verbose else '',
'slope': terrain_params[0],
'cellsize': terrain_params[1],
'outdir': odm_dem_root,
'site': ''
}
if args.crop > 0:
bounds_shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
if os.path.exists(bounds_shapefile_path):
kwargs['site'] = '-s {}'.format(bounds_shapefile_path)
l2d_params = '--slope {slope} --cellsize {cellsize} ' \
'{verbose} ' \
'-o {site} ' \
'--outdir {outdir}'.format(**kwargs)
approximate = '--approximate' if args.dem_approximate else ''
# Classify only if we need a DTM
run_classification = args.dtm
if run_classification:
system.run('l2d_classify {0} --decimation {1} '
'{2} --initialDistance {3} {4}'.format(
l2d_params, args.dem_decimation, approximate,
args.dem_initial_distance, tree.odm_georeferencing), env_paths)
else:
log.ODM_INFO("Will skip classification, only DSM is needed")
l2d_classified_pattern = 'odm_georeferenced_model.bounds-0_l2d_s{slope}c{cellsize}.las' if args.crop > 0 else 'l2d_s{slope}c{cellsize}.las'
copyfile(tree.odm_georeferencing_model_las, os.path.join(odm_dem_root, l2d_classified_pattern.format(**kwargs)))
products = []
if args.dsm: products.append('dsm')
@ -113,34 +86,30 @@ class ODMDEMCell(ecto.Cell):
radius_steps.append(radius_steps[-1] * 3) # 3 is arbitrary, maybe there's a better value?
for product in products:
demargs = {
'product': product,
'indir': odm_dem_root,
'l2d_params': l2d_params,
'maxsd': args.dem_maxsd,
'maxangle': args.dem_maxangle,
'resolution': args.dem_resolution,
'radius_steps': ' '.join(map(str, radius_steps)),
'gapfill': '--gapfill' if args.dem_gapfill_steps > 0 else '',
# If we didn't run a classification, we should pass the decimate parameter here
'decimation': '--decimation {0}'.format(args.dem_decimation) if not run_classification else ''
}
system.run('l2d_dems {product} {indir} {l2d_params} '
'--maxsd {maxsd} --maxangle {maxangle} '
'--resolution {resolution} --radius {radius_steps} '
'{decimation} '
'{gapfill} '.format(**demargs), env_paths)
# Rename final output
if product == 'dsm':
dsm_pattern = 'odm_georeferenced_model.bounds-0_dsm.idw.tif' if args.crop > 0 else 'dsm.idw.tif'
os.rename(os.path.join(odm_dem_root, dsm_pattern), dsm_output_filename)
elif product == 'dtm':
dtm_pattern = 'odm_georeferenced_model.bounds-0_dsm.idw.tif' if args.crop > 0 else 'dtm.idw.tif'
os.rename(os.path.join(odm_dem_root, dtm_pattern), dtm_output_filename)
commands.create_dems(
[tree.odm_georeferencing_model_las],
product,
radius=map(str, radius_steps),
gapfill=True,
outdir=odm_dem_root,
resolution=args.dem_resolution,
maxsd=args.dem_maxsd,
maxangle=args.dem_maxangle,
decimation=args.dem_decimation,
verbose=args.verbose
)
if args.crop > 0:
bounds_shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
if os.path.exists(bounds_shapefile_path):
Cropper.crop(bounds_shapefile_path, os.path.join(odm_dem_root, "{}.tif".format(product)), {
'TILED': 'YES',
'COMPRESS': 'LZW',
'PREDICTOR': '2',
'BLOCKXSIZE': 512,
'BLOCKYSIZE': 512,
'NUM_THREADS': 'ALL_CPUS'
})
else:
log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
else: