2019-04-22 19:14:39 +00:00
|
|
|
import os, json
|
2017-06-27 17:14:09 +00:00
|
|
|
from shutil import copyfile
|
2017-06-23 15:20:46 +00:00
|
|
|
|
|
|
|
from opendm import io
|
|
|
|
from opendm import log
|
|
|
|
from opendm import system
|
|
|
|
from opendm import context
|
|
|
|
from opendm import types
|
2018-08-08 19:41:08 +00:00
|
|
|
from opendm import gsd
|
2019-04-29 18:01:55 +00:00
|
|
|
from opendm.dem import commands, utils
|
2018-01-16 19:46:30 +00:00
|
|
|
from opendm.cropper import Cropper
|
2020-02-04 17:56:20 +00:00
|
|
|
from opendm import pseudogeo
|
2020-09-17 15:28:03 +00:00
|
|
|
from opendm.tiles.tiler import generate_dem_tiles
|
2021-06-04 19:35:56 +00:00
|
|
|
from opendm.cogeo import convert_to_cogeo
|
2017-06-23 15:20:46 +00:00
|
|
|
|
2021-08-23 14:14:55 +00:00
|
|
|
|
2019-04-22 19:14:39 +00:00
|
|
|
class ODMDEMStage(types.ODM_Stage):
|
|
|
|
def process(self, args, outputs):
|
|
|
|
tree = outputs['tree']
|
2020-02-04 17:56:20 +00:00
|
|
|
reconstruction = outputs['reconstruction']
|
|
|
|
|
|
|
|
dem_input = tree.odm_georeferencing_model_laz
|
|
|
|
pc_model_found = io.file_exists(dem_input)
|
|
|
|
ignore_resolution = False
|
|
|
|
pseudo_georeference = False
|
|
|
|
|
2020-02-04 20:13:37 +00:00
|
|
|
if not reconstruction.is_georeferenced():
|
|
|
|
log.ODM_WARNING("Not georeferenced, using ungeoreferenced point cloud...")
|
2020-02-04 17:56:20 +00:00
|
|
|
ignore_resolution = True
|
|
|
|
pseudo_georeference = True
|
|
|
|
|
2021-08-23 14:14:55 +00:00
|
|
|
# It is probably not reasonable to have accurate DEMs a the same resolution as the source photos, so reduce it
|
|
|
|
# by a factor!
|
|
|
|
gsd_scaling = 2.0
|
|
|
|
|
2020-02-04 17:56:20 +00:00
|
|
|
resolution = gsd.cap_resolution(args.dem_resolution, tree.opensfm_reconstruction,
|
2021-08-23 14:14:55 +00:00
|
|
|
gsd_scaling=gsd_scaling,
|
|
|
|
ignore_gsd=args.ignore_gsd,
|
2022-01-14 15:18:35 +00:00
|
|
|
ignore_resolution=ignore_resolution and args.ignore_gsd,
|
2021-08-23 14:14:55 +00:00
|
|
|
has_gcp=reconstruction.has_gcp())
|
2017-06-23 19:28:46 +00:00
|
|
|
|
2019-03-05 15:50:27 +00:00
|
|
|
log.ODM_INFO('Classify: ' + str(args.pc_classify))
|
2017-06-23 19:28:46 +00:00
|
|
|
log.ODM_INFO('Create DSM: ' + str(args.dsm))
|
|
|
|
log.ODM_INFO('Create DTM: ' + str(args.dtm))
|
2020-02-04 17:56:20 +00:00
|
|
|
log.ODM_INFO('DEM input file {0} found: {1}'.format(dem_input, str(pc_model_found)))
|
2017-06-23 19:28:46 +00:00
|
|
|
|
2018-01-26 15:40:27 +00:00
|
|
|
# define paths and create working directories
|
|
|
|
odm_dem_root = tree.path('odm_dem')
|
|
|
|
if not io.dir_exists(odm_dem_root):
|
|
|
|
system.mkdir_p(odm_dem_root)
|
|
|
|
|
2020-02-04 17:56:20 +00:00
|
|
|
if args.pc_classify and pc_model_found:
|
2018-01-26 15:40:27 +00:00
|
|
|
pc_classify_marker = os.path.join(odm_dem_root, 'pc_classify_done.txt')
|
|
|
|
|
2019-04-22 19:14:39 +00:00
|
|
|
if not io.file_exists(pc_classify_marker) or self.rerun():
|
2020-02-04 17:56:20 +00:00
|
|
|
log.ODM_INFO("Classifying {} using Simple Morphological Filter".format(dem_input))
|
|
|
|
commands.classify(dem_input,
|
2019-04-20 14:56:42 +00:00
|
|
|
args.smrf_scalar,
|
|
|
|
args.smrf_slope,
|
|
|
|
args.smrf_threshold,
|
|
|
|
args.smrf_window,
|
2018-01-26 15:40:27 +00:00
|
|
|
verbose=args.verbose
|
|
|
|
)
|
2019-03-05 15:50:27 +00:00
|
|
|
|
2018-07-01 23:24:37 +00:00
|
|
|
with open(pc_classify_marker, 'w') as f:
|
2019-03-05 15:50:27 +00:00
|
|
|
f.write('Classify: smrf\n')
|
2019-04-20 14:56:42 +00:00
|
|
|
f.write('Scalar: {}\n'.format(args.smrf_scalar))
|
|
|
|
f.write('Slope: {}\n'.format(args.smrf_slope))
|
|
|
|
f.write('Threshold: {}\n'.format(args.smrf_threshold))
|
|
|
|
f.write('Window: {}\n'.format(args.smrf_window))
|
2019-05-15 22:01:46 +00:00
|
|
|
|
|
|
|
progress = 20
|
|
|
|
self.update_progress(progress)
|
2018-01-16 19:46:30 +00:00
|
|
|
|
2020-03-10 03:27:04 +00:00
|
|
|
if args.pc_rectify:
|
2020-03-10 01:52:12 +00:00
|
|
|
commands.rectify(dem_input, args.debug)
|
|
|
|
|
2017-06-23 19:28:46 +00:00
|
|
|
# Do we need to process anything here?
|
2020-02-04 17:56:20 +00:00
|
|
|
if (args.dsm or args.dtm) and pc_model_found:
|
2017-06-27 16:43:36 +00:00
|
|
|
dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
|
|
|
|
dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')
|
2017-06-23 19:28:46 +00:00
|
|
|
|
|
|
|
if (args.dtm and not io.file_exists(dtm_output_filename)) or \
|
|
|
|
(args.dsm and not io.file_exists(dsm_output_filename)) or \
|
2019-04-22 19:14:39 +00:00
|
|
|
self.rerun():
|
2017-06-23 19:28:46 +00:00
|
|
|
|
|
|
|
products = []
|
2019-11-04 21:29:43 +00:00
|
|
|
|
|
|
|
if args.dsm or (args.dtm and args.dem_euclidean_map): products.append('dsm')
|
2017-06-23 19:28:46 +00:00
|
|
|
if args.dtm: products.append('dtm')
|
2020-02-04 17:56:20 +00:00
|
|
|
|
2018-08-08 19:41:08 +00:00
|
|
|
radius_steps = [(resolution / 100.0) / 2.0]
|
2017-06-23 19:28:46 +00:00
|
|
|
for _ in range(args.dem_gapfill_steps - 1):
|
2018-06-18 13:54:09 +00:00
|
|
|
radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?
|
2017-06-23 19:28:46 +00:00
|
|
|
|
|
|
|
for product in products:
|
2019-04-11 20:29:53 +00:00
|
|
|
commands.create_dem(
|
2020-02-04 17:56:20 +00:00
|
|
|
dem_input,
|
2018-01-16 19:46:30 +00:00
|
|
|
product,
|
2019-04-12 17:45:47 +00:00
|
|
|
output_type='idw' if product == 'dtm' else 'max',
|
2020-09-11 15:05:34 +00:00
|
|
|
radiuses=list(map(str, radius_steps)),
|
2019-04-11 20:29:53 +00:00
|
|
|
gapfill=args.dem_gapfill_steps > 0,
|
2018-01-16 19:46:30 +00:00
|
|
|
outdir=odm_dem_root,
|
2018-08-08 19:41:08 +00:00
|
|
|
resolution=resolution / 100.0,
|
2018-01-16 19:46:30 +00:00
|
|
|
decimation=args.dem_decimation,
|
2018-07-03 17:01:18 +00:00
|
|
|
verbose=args.verbose,
|
2019-04-30 20:09:03 +00:00
|
|
|
max_workers=args.max_concurrency,
|
|
|
|
keep_unfilled_copy=args.dem_euclidean_map
|
2018-01-16 19:46:30 +00:00
|
|
|
)
|
|
|
|
|
2019-04-30 20:09:03 +00:00
|
|
|
dem_geotiff_path = os.path.join(odm_dem_root, "{}.tif".format(product))
|
|
|
|
bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
|
|
|
|
|
2021-10-12 18:19:32 +00:00
|
|
|
if args.crop > 0 or args.boundary:
|
2019-04-30 20:09:03 +00:00
|
|
|
# Crop DEM
|
2020-03-30 14:32:21 +00:00
|
|
|
Cropper.crop(bounds_file_path, dem_geotiff_path, utils.get_dem_vars(args), keep_original=not args.optimize_disk_space)
|
2019-04-30 20:09:03 +00:00
|
|
|
|
|
|
|
if args.dem_euclidean_map:
|
|
|
|
unfilled_dem_path = io.related_file_path(dem_geotiff_path, postfix=".unfilled")
|
|
|
|
|
2021-10-12 18:19:32 +00:00
|
|
|
if args.crop > 0 or args.boundary:
|
2019-04-30 20:09:03 +00:00
|
|
|
# Crop unfilled DEM
|
2020-03-30 14:32:21 +00:00
|
|
|
Cropper.crop(bounds_file_path, unfilled_dem_path, utils.get_dem_vars(args), keep_original=not args.optimize_disk_space)
|
2019-04-30 20:09:03 +00:00
|
|
|
|
|
|
|
commands.compute_euclidean_map(unfilled_dem_path,
|
|
|
|
io.related_file_path(dem_geotiff_path, postfix=".euclideand"),
|
|
|
|
overwrite=True)
|
2020-09-17 15:28:03 +00:00
|
|
|
|
2020-02-04 17:56:20 +00:00
|
|
|
if pseudo_georeference:
|
2020-05-15 20:30:08 +00:00
|
|
|
pseudogeo.add_pseudo_georeferencing(dem_geotiff_path)
|
2020-09-17 15:28:03 +00:00
|
|
|
|
|
|
|
if args.tiles:
|
|
|
|
generate_dem_tiles(dem_geotiff_path, tree.path("%s_tiles" % product), args.max_concurrency)
|
2019-05-15 22:01:46 +00:00
|
|
|
|
2021-06-04 19:35:56 +00:00
|
|
|
if args.cog:
|
|
|
|
convert_to_cogeo(dem_geotiff_path, max_workers=args.max_concurrency)
|
|
|
|
|
2019-05-15 22:01:46 +00:00
|
|
|
progress += 30
|
|
|
|
self.update_progress(progress)
|
2017-06-23 19:28:46 +00:00
|
|
|
else:
|
|
|
|
log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
|
2017-06-23 15:20:46 +00:00
|
|
|
else:
|
2017-06-23 19:28:46 +00:00
|
|
|
log.ODM_WARNING('DEM will not be generated')
|