DEM merging complete

pull/979/head
Piero Toffanin 2019-04-30 16:09:03 -04:00
rodzic 80c6670a35
commit 6ec75d4970
8 zmienionych plików z 183 dodań i 113 usunięć

Wyświetl plik

@ -430,9 +430,8 @@ def config():
default=False,
help='Computes an euclidean raster map for each DEM. '
'The map reports the distance from each cell to the nearest '
'NODATA value (before any nearest neighbor hole filling takes place). '
'This can be useful to isolate the areas that have been filled with '
'nearest neighbor interpolation. '
'NODATA value (before any hole filling takes place). '
'This can be useful to isolate the areas that have been filled. '
'Default: '
'%(default)s')
@ -526,6 +525,15 @@ def config():
'performed independently on the submodels instead of being computed at once. '
'Default: %(default)s')
parser.add_argument('--merge',
metavar='<string>',
default='all',
choices=['all', 'pointcloud', 'orthophoto', 'dem'],
help=('Choose what to merge in the merge step in a split dataset. '
'By default all available outputs are merged. '
'Default: '
'%(default)s'))
args = parser.parse_args()
# check that the project path setting has been set properly

Wyświetl plik

@ -22,6 +22,8 @@ class Cropper:
log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(gpkg_path, geotiff_path))
return geotiff_path
log.ODM_INFO("Cropping %s" % geotiff_path)
# Rename original file
# path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif

Wyświetl plik

@ -6,6 +6,7 @@ import math
import time
from opendm.system import run
from opendm import point_cloud
from opendm import io
from opendm.concurrency import get_max_memory
from scipy import ndimage, signal
from datetime import datetime
@ -33,7 +34,7 @@ error = None
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
outdir='', resolution=0.1, max_workers=1, max_tile_size=2048,
verbose=False, decimation=None):
verbose=False, decimation=None, keep_unfilled_copy=False):
""" Create DEM from multiple radii, and optionally gapfill """
global error
error = None
@ -215,11 +216,17 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
'-co NUM_THREADS={threads} '
'--config GDAL_CACHEMAX {max_memory}% '
'{vrt} {geotiff}'.format(**kwargs))
post_process(geotiff_path, output_path)
os.remove(geotiff_path)
if os.path.exists(geotiff_tmp_path): os.remove(geotiff_tmp_path)
if os.path.exists(geotiff_tmp_path):
if not keep_unfilled_copy:
os.remove(geotiff_tmp_path)
else:
os.rename(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
if os.path.exists(vrt_path): os.remove(vrt_path)
for t in tiles:
if os.path.exists(t['filename']): os.remove(t['filename'])
@ -227,6 +234,28 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
if not os.path.exists(geotiff_path):
log.ODM_WARNING("Cannot compute euclidean map (file does not exist: %s)" % geotiff_path)
return
nodata = -9999
with rasterio.open(geotiff_path) as f:
nodata = f.nodatavals[0]
if not os.path.exists(output_path) or overwrite:
log.ODM_INFO("Computing euclidean distance: %s" % output_path)
run('gdal_proximity.py "%s" "%s" -values %s' % (geotiff_path, output_path, nodata))
if os.path.exists(output_path):
return output_path
else:
log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
else:
log.ODM_WARNING("Already found a euclidean distance map: %s" % output_path)
return output_path
def post_process(geotiff_path, output_path, smoothing_iterations=1):
""" Apply median smoothing """
start = datetime.now()

Wyświetl plik

@ -3,6 +3,7 @@ import numpy as np
import rasterio
from rasterio.transform import Affine, rowcol
from opendm import system
from opendm.dem.commands import compute_euclidean_map
from opendm import log
from opendm import io
import os
@ -15,9 +16,9 @@ def euclidean_merge_dems(input_dems, output_dem, creation_options={}):
and ideas from Anna Petrasova
implementation by Piero Toffanin
Computes a merged DEM by computing a euclidean distance map for all DEMs
to all NODATA cells (how far from the edge of the DEM cells are) and then blending
all overlapping DEM cells by a weighted average based on such euclidean distance.
Computes a merged DEM by computing/using a euclidean
distance to NODATA cells map for all DEMs and then blending all overlapping DEM cells
by a weighted average based on such euclidean distance.
"""
inputs = []
bounds=None
@ -47,23 +48,9 @@ def euclidean_merge_dems(input_dems, output_dem, creation_options={}):
profile = first.profile
for dem in existing_dems:
# Compute euclidean distance support files
path, filename = os.path.split(dem)
# path = path/to
# filename = dsm.tif
basename, ext = os.path.splitext(filename)
# basename = dsm
# ext = .tif
euclidean_geotiff = os.path.join(path, "{}.euclideand{}".format(basename, ext))
log.ODM_INFO("Computing euclidean distance: %s" % euclidean_geotiff)
system.run('gdal_proximity.py "%s" "%s" -values -9999' % (dem, euclidean_geotiff))
if io.file_exists(euclidean_geotiff):
inputs.append((dem, euclidean_geotiff))
else:
log.ODM_WARNING("Cannot compute euclidean distance file: %s" % euclidean_geotiff)
eumap = compute_euclidean_map(dem, io.related_file_path(dem, postfix=".euclideand"), overwrite=False)
if eumap and io.file_exists(eumap):
inputs.append((dem, eumap))
log.ODM_INFO("%s valid DEM rasters to merge" % len(inputs))
@ -164,20 +151,24 @@ def euclidean_merge_dems(input_dems, output_dem, creation_options={}):
out=temp_e, window=src_window, boundless=True, masked=False
)
# Set NODATA areas in the euclidean map to a very low value
# so that:
# - Areas with overlap prioritize DEM layers' cells that
# are far away from NODATA areas
# - Areas that have no overlap are included in the final result
# even if they are very close to a NODATA cell
temp_e[temp_e==0] = 0.001953125
temp_e[temp_d==nodata] = 0
np.multiply(temp_d, temp_e, out=temp_d)
np.add(dstarr, temp_d, out=dstarr)
np.add(distsum, temp_e, out=distsum)
np.divide(dstarr, distsum, out=dstarr, where=distsum[0] != 0.0)
dstarr[dstarr == 0.0] = src_nodata
dstrast.write(dstarr, window=dst_window)
# Cleanup
for _, euclidean_geotiff in inputs:
if io.file_exists(euclidean_geotiff):
os.remove(euclidean_geotiff)
# Restore logging
if debug_enabled:
logging.disable(logging.NOTSET)

Wyświetl plik

@ -56,3 +56,20 @@ def rename_file(src, dst):
def find(filename, folder):
for root, dirs, files in os.walk(folder):
return '/'.join((root, filename)) if filename in files else None
def related_file_path(input_file_path, prefix="", postfix=""):
"""
For example: related_file_path("/path/to/file.ext", "a.", ".b")
--> "/path/to/a.file.b.ext"
"""
path, filename = os.path.split(input_file_path)
# path = path/to
# filename = file.ext
basename, ext = os.path.splitext(filename)
# basename = file
# ext = .ext
return os.path.join(path, "{}{}{}{}".format(prefix, basename, postfix, ext))

Wyświetl plik

@ -186,6 +186,7 @@ def get_submodel_argv(args, submodels_path, submodel_name):
setting/replacing --project-path and name
removing --rerun-from, --rerun, --rerun-all
adding --compute-cutline
adding --skip-3dmodel (split-merge does not support 3D model merging)
"""
argv = sys.argv
@ -194,6 +195,7 @@ def get_submodel_argv(args, submodels_path, submodel_name):
project_path_found = False
project_name_added = False
orthophoto_cutline_found = False
skip_3dmodel_found = False
# TODO: what about GCP paths?
@ -218,6 +220,10 @@ def get_submodel_argv(args, submodels_path, submodel_name):
result.append(arg)
orthophoto_cutline_found = True
i += 1
elif arg == '--skip-3dmodel':
result.append(arg)
skip_3dmodel_found = True
i += 1
elif arg == '--split':
i += 2
elif arg == '--rerun-from':
@ -240,6 +246,9 @@ def get_submodel_argv(args, submodels_path, submodel_name):
if not orthophoto_cutline_found:
result.append("--orthophoto-cutline")
if not skip_3dmodel_found:
result.append("--skip-3dmodel")
return result

Wyświetl plik

@ -74,13 +74,27 @@ class ODMDEMStage(types.ODM_Stage):
resolution=resolution / 100.0,
decimation=args.dem_decimation,
verbose=args.verbose,
max_workers=args.max_concurrency
max_workers=args.max_concurrency,
keep_unfilled_copy=args.dem_euclidean_map
)
dem_geotiff_path = os.path.join(odm_dem_root, "{}.tif".format(product))
bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
if args.crop > 0:
bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
if os.path.exists(bounds_file_path):
Cropper.crop(bounds_file_path, os.path.join(odm_dem_root, "{}.tif".format(product)), utils.get_dem_vars(args))
# Crop DEM
Cropper.crop(bounds_file_path, dem_geotiff_path, utils.get_dem_vars(args))
if args.dem_euclidean_map:
unfilled_dem_path = io.related_file_path(dem_geotiff_path, postfix=".unfilled")
if args.crop > 0:
# Crop unfilled DEM
Cropper.crop(bounds_file_path, unfilled_dem_path, utils.get_dem_vars(args))
commands.compute_euclidean_map(unfilled_dem_path,
io.related_file_path(dem_geotiff_path, postfix=".euclideand"),
overwrite=True)
else:
log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
else:

Wyświetl plik

@ -133,12 +133,12 @@ class ODMMergeStage(types.ODM_Stage):
if outputs['large']:
# Merge point clouds
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
pass
# all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz")
# pdal.merge_point_clouds(all_point_clouds, tree.odm_georeferencing_model_laz, args.verbose)
else:
log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz)
if args.merge in ['all', 'pointcloud']:
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz")
pdal.merge_point_clouds(all_point_clouds, tree.odm_georeferencing_model_laz, args.verbose)
else:
log.ODM_WARNING("Found merged point cloud in %s" % tree.odm_georeferencing_model_laz)
# Merge crop bounds
merged_bounds_file = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
@ -154,92 +154,92 @@ class ODMMergeStage(types.ODM_Stage):
log.ODM_WARNING("No bounds found for any submodel.")
# Merge orthophotos
if not io.file_exists(tree.odm_orthophoto_tif) or self.rerun():
all_orthos_and_cutlines = get_all_submodel_paths(tree.submodels_path,
os.path.join("odm_orthophoto", "odm_orthophoto.tif"),
os.path.join("odm_orthophoto", "cutline.gpkg"),
)
if args.merge in ['all', 'orthophoto']:
if not io.file_exists(tree.odm_orthophoto_tif) or self.rerun():
all_orthos_and_cutlines = get_all_submodel_paths(tree.submodels_path,
os.path.join("odm_orthophoto", "odm_orthophoto.tif"),
os.path.join("odm_orthophoto", "cutline.gpkg"),
)
if len(all_orthos_and_cutlines) > 1:
log.ODM_DEBUG("Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_cutlines))
# TODO: histogram matching via rasterio
# currently parts have different color tones
if len(all_orthos_and_cutlines) > 1:
log.ODM_DEBUG("Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_cutlines))
# TODO: histogram matching via rasterio
# currently parts have different color tones
merged_geotiff = os.path.join(tree.odm_orthophoto, "odm_orthophoto.merged.tif")
merged_geotiff = os.path.join(tree.odm_orthophoto, "odm_orthophoto.merged.tif")
kwargs = {
'orthophoto_merged': merged_geotiff,
'input_files': ' '.join(map(lambda i: quote(i[0]), all_orthos_and_cutlines)),
'max_memory': get_max_memory(),
'max_memory_mb': get_max_memory_mb(300)
}
kwargs = {
'orthophoto_merged': merged_geotiff,
'input_files': ' '.join(map(lambda i: quote(i[0]), all_orthos_and_cutlines)),
'max_memory': get_max_memory(),
'max_memory_mb': get_max_memory_mb(300)
}
# use bounds as cutlines (blending)
if io.file_exists(merged_geotiff):
os.remove(merged_geotiff)
# use bounds as cutlines (blending)
if io.file_exists(merged_geotiff):
os.remove(merged_geotiff)
system.run('gdal_merge.py -o {orthophoto_merged} '
#'-createonly '
'-co "BIGTIFF=YES" '
'-co "BLOCKXSIZE=512" '
'-co "BLOCKYSIZE=512" '
'--config GDAL_CACHEMAX {max_memory}%'
'{input_files} '.format(**kwargs)
system.run('gdal_merge.py -o {orthophoto_merged} '
#'-createonly '
'-co "BIGTIFF=YES" '
'-co "BLOCKXSIZE=512" '
'-co "BLOCKYSIZE=512" '
'--config GDAL_CACHEMAX {max_memory}%'
'{input_files} '.format(**kwargs)
)
for ortho_cutline in all_orthos_and_cutlines:
kwargs['input_file'], kwargs['cutline'] = ortho_cutline
# Note: cblend has a high performance penalty
system.run('gdalwarp -cutline {cutline} '
'-cblend 20 '
'-r lanczos -multi '
'-wm {max_memory_mb} '
'--config GDAL_CACHEMAX {max_memory}%'
' {input_file} {orthophoto_merged}'.format(**kwargs)
)
for ortho_cutline in all_orthos_and_cutlines:
kwargs['input_file'], kwargs['cutline'] = ortho_cutline
# Apply orthophoto settings (compression, tiling, etc.)
orthophoto_vars = orthophoto.get_orthophoto_vars(args)
# Note: cblend has a high performance penalty
system.run('gdalwarp -cutline {cutline} '
'-cblend 20 '
'-r lanczos -multi '
'-wm {max_memory_mb} '
'--config GDAL_CACHEMAX {max_memory}%'
' {input_file} {orthophoto_merged}'.format(**kwargs)
)
if io.file_exists(tree.odm_orthophoto_tif):
os.remove(tree.odm_orthophoto_tif)
# Apply orthophoto settings (compression, tiling, etc.)
orthophoto_vars = orthophoto.get_orthophoto_vars(args)
kwargs = {
'vars': ' '.join(['-co %s=%s' % (k, orthophoto_vars[k]) for k in orthophoto_vars]),
'max_memory': get_max_memory(),
'merged': merged_geotiff,
'log': tree.odm_orthophoto_tif_log,
'orthophoto': tree.odm_orthophoto_tif,
}
if io.file_exists(tree.odm_orthophoto_tif):
os.remove(tree.odm_orthophoto_tif)
system.run('gdal_translate '
'{vars} '
'--config GDAL_CACHEMAX {max_memory}% '
'{merged} {orthophoto} > {log}'.format(**kwargs))
kwargs = {
'vars': ' '.join(['-co %s=%s' % (k, orthophoto_vars[k]) for k in orthophoto_vars]),
'max_memory': get_max_memory(),
'merged': merged_geotiff,
'log': tree.odm_orthophoto_tif_log,
'orthophoto': tree.odm_orthophoto_tif,
}
os.remove(merged_geotiff)
system.run('gdal_translate '
'{vars} '
'--config GDAL_CACHEMAX {max_memory}% '
'{merged} {orthophoto} > {log}'.format(**kwargs))
# Crop
if args.crop > 0:
Cropper.crop(merged_bounds_file, tree.odm_orthophoto_tif, orthophoto_vars)
os.remove(merged_geotiff)
# Crop
if args.crop > 0:
Cropper.crop(merged_bounds_file, tree.odm_orthophoto_tif, orthophoto_vars)
# Overviews
if args.build_overviews:
orthophoto.build_overviews(tree.odm_orthophoto_tif)
elif len(all_orthos_and_cutlines) == 1:
# Simply copy
log.ODM_WARNING("A single orthophoto/cutline pair was found between all submodels.")
shutil.copyfile(all_orthos_and_cutlines[0][0], tree.odm_orthophoto_tif)
# Overviews
if args.build_overviews:
orthophoto.build_overviews(tree.odm_orthophoto_tif)
elif len(all_orthos_and_cutlines) == 1:
# Simply copy
log.ODM_WARNING("A single orthophoto/cutline pair was found between all submodels.")
shutil.copyfile(all_orthos_and_cutlines[0][0], tree.odm_orthophoto_tif)
else:
log.ODM_WARNING("No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated.")
else:
log.ODM_WARNING("No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated.")
else:
log.ODM_WARNING("Found merged orthophoto in %s" % tree.odm_orthophoto_tif)
log.ODM_WARNING("Found merged orthophoto in %s" % tree.odm_orthophoto_tif)
# Merge DEMs
def merge_dems(dem_filename, human_name):
dem_file = tree.path("odm_dem", dem_filename)
if not io.file_exists(dem_file) or self.rerun():
@ -260,10 +260,10 @@ class ODMMergeStage(types.ODM_Stage):
else:
log.ODM_WARNING("Found merged %s in %s" % (human_name, dsm_file))
if args.dsm:
if args.merge in ['all', 'dem'] and args.dsm:
merge_dems("dsm.tif", "DSM")
if args.dtm:
if args.merge in ['all', 'dem'] and args.dtm:
merge_dems("dtm.tif", "DTM")
# Stop the pipeline short! We're done.