Faster crop, crop merging algo, fixed opensfm pipeline

Former-commit-id: 078a6a0678
pull/1161/head
Piero Toffanin 2019-04-27 15:51:13 -04:00
rodzic 3448801acf
commit 92db0e95c3
8 zmienionych plików z 120 dodań i 61 usunięć

Wyświetl plik

@ -17,9 +17,9 @@ class Cropper:
return os.path.join(self.storage_dir, '{}.{}'.format(self.files_prefix, suffix))
@staticmethod
def crop(shapefile_path, geotiff_path, gdal_options, keep_original=True):
if not os.path.exists(shapefile_path) or not os.path.exists(geotiff_path):
log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(shapefile_path, geotiff_path))
def crop(gpkg_path, geotiff_path, gdal_options, keep_original=True):
if not os.path.exists(gpkg_path) or not os.path.exists(geotiff_path):
log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(gpkg_path, geotiff_path))
return geotiff_path
# Rename original file
@ -38,14 +38,14 @@ class Cropper:
try:
kwargs = {
'shapefile_path': shapefile_path,
'gpkg_path': gpkg_path,
'geotiffInput': original_geotiff,
'geotiffOutput': geotiff_path,
'options': ' '.join(map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)),
'max_memory': get_max_memory()
}
run('gdalwarp -cutline {shapefile_path} '
run('gdalwarp -cutline {gpkg_path} '
'-crop_to_cutline '
'{options} '
'{geotiffInput} '
@ -63,7 +63,58 @@ class Cropper:
return geotiff_path
def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
@staticmethod
def merge_bounds(input_bound_files, output_bounds, buffer_distance = 0):
"""
Merge multiple bound files into a single bound computed from the convex hull
of all bounds (minus a buffer distance in meters)
"""
geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
driver = ogr.GetDriverByName('GPKG')
for input_bound_file in input_bound_files:
ds = driver.Open(input_bound_file, 0) # ready-only
layer = ds.GetLayer()
# Collect all Geometry
for feature in layer:
geomcol.AddGeometry(feature.GetGeometryRef())
# Close
ds = None
# Calculate convex hull
convexhull = geomcol.ConvexHull()
# If buffer distance is specified
# Create two buffers, one shrinked by
# N + 3 and then that buffer expanded by 3
# so that we get smooth corners. \m/
BUFFER_SMOOTH_DISTANCE = 3
if buffer_distance > 0:
convexhull = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
convexhull = convexhull.Buffer(BUFFER_SMOOTH_DISTANCE)
# Save to a new file
if os.path.exists(output_bounds):
driver.DeleteDataSource(output_bounds)
out_ds = driver.CreateDataSource(output_bounds)
layer = out_ds.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
feature_def = layer.GetLayerDefn()
feature = ogr.Feature(feature_def)
feature.SetGeometry(convexhull)
layer.CreateFeature(feature)
feature = None
# Save and close output data source
out_ds = None
def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
"""
Compute a buffered polygon around the data extents (not just a bounding box)
of the given point cloud.
@ -71,23 +122,19 @@ class Cropper:
@return filename to GeoJSON containing the polygon
"""
if not os.path.exists(pointcloud_path):
log.ODM_WARNING('Point cloud does not exist, cannot generate shapefile bounds {}'.format(pointcloud_path))
log.ODM_WARNING('Point cloud does not exist, cannot generate bounds {}'.format(pointcloud_path))
return ''
# Do basic outlier removal prior to extracting boundary information
filtered_pointcloud_path = self.path('filtered.las')
# Do decimation prior to extracting boundary information
decimated_pointcloud_path = self.path('decimated.las')
run("pdal translate -i \"{}\" "
"-o \"{}\" "
"decimation outlier range "
"--filters.decimation.step={} "
"--filters.outlier.method=radius "
"--filters.outlier.radius={} "
"--filters.outlier.min_k=2 "
"--filters.range.limits='Classification![7:7]'".format(pointcloud_path, filtered_pointcloud_path, decimation_step, outlier_radius))
"decimation "
"--filters.decimation.step={} ".format(pointcloud_path, decimated_pointcloud_path, decimation_step))
if not os.path.exists(filtered_pointcloud_path):
log.ODM_WARNING('Could not filter point cloud, cannot generate shapefile bounds {}'.format(filtered_pointcloud_path))
if not os.path.exists(decimated_pointcloud_path):
log.ODM_WARNING('Could not decimate point cloud, thus cannot generate GPKG bounds {}'.format(decimated_pointcloud_path))
return ''
# Use PDAL to dump boundary information
@ -95,7 +142,7 @@ class Cropper:
boundary_file_path = self.path('boundary.json')
run('pdal info --boundary --filters.hexbin.edge_length=1 --filters.hexbin.threshold=0 {0} > {1}'.format(filtered_pointcloud_path, boundary_file_path))
run('pdal info --boundary --filters.hexbin.edge_length=1 --filters.hexbin.threshold=0 {0} > {1}'.format(decimated_pointcloud_path, boundary_file_path))
pc_geojson_boundary_feature = None
@ -157,25 +204,25 @@ class Cropper:
# Save and close data sources
out_ds = ds = None
# Remove filtered point cloud
if os.path.exists(filtered_pointcloud_path):
os.remove(filtered_pointcloud_path)
# Remove decimated point cloud
if os.path.exists(decimated_pointcloud_path):
os.remove(decimated_pointcloud_path)
return bounds_geojson_path
def create_bounds_shapefile(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
def create_bounds_gpkg(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
"""
Compute a buffered polygon around the data extents (not just a bounding box)
of the given point cloud.
@return filename to Shapefile containing the polygon
@return filename to Geopackage containing the polygon
"""
if not os.path.exists(pointcloud_path):
log.ODM_WARNING('Point cloud does not exist, cannot generate shapefile bounds {}'.format(pointcloud_path))
log.ODM_WARNING('Point cloud does not exist, cannot generate GPKG bounds {}'.format(pointcloud_path))
return ''
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step, outlier_radius)
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step)
summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
run('pdal info --summary {0} > {1}'.format(pointcloud_path, summary_file_path))
@ -187,16 +234,16 @@ class Cropper:
if pc_proj4 is None: raise RuntimeError("Could not determine point cloud proj4 declaration")
bounds_shapefile_path = os.path.join(self.storage_dir, '{}.bounds.shp'.format(self.files_prefix))
bounds_gpkg_path = os.path.join(self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix))
# Convert bounds to Shapefile
# Convert bounds to GPKG
kwargs = {
'input': bounds_geojson_path,
'output': bounds_shapefile_path,
'output': bounds_gpkg_path,
'proj4': pc_proj4
}
run('ogr2ogr -overwrite -a_srs "{proj4}" {output} {input}'.format(**kwargs))
run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
return bounds_shapefile_path
return bounds_gpkg_path

Wyświetl plik

@ -152,7 +152,7 @@ def get_submodel_argv(args, submodels_path, submodel_name):
"""
:return the same as argv, but removing references to --split,
setting/replacing --project-path and name
setting/replacing --crop to 0 (never crop on submodels)
setting/replacing --crop to 0.01 (always crop on submodels)
removing --rerun-from, --rerun, --rerun-all
"""
argv = sys.argv
@ -184,7 +184,7 @@ def get_submodel_argv(args, submodels_path, submodel_name):
i += 2
elif arg == '--crop':
result.append(arg)
result.append('0')
result.append("0.01")
crop_found = True
i += 2
elif arg == '--split':
@ -205,7 +205,7 @@ def get_submodel_argv(args, submodels_path, submodel_name):
if not crop_found:
result.append('--crop')
result.append('0')
result.append('0.01')
if not project_name_added:
result.append(submodel_name)

1
run.py
Wyświetl plik

@ -43,6 +43,7 @@ if __name__ == '__main__':
app.execute()
# Do not show ASCII art for local submodels runs
# TODO: remove false
if False and not "submodels/submodel_" in args.project_path:
log.ODM_INFO('MMMMMMMMMMMNNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNNMMMMMMMMMMM')
log.ODM_INFO('MMMMMMdo:..---../sNMMMMMMMMMMMMMMMMMMMMMMMMMMNs/..---..:odMMMMMM')

Wyświetl plik

@ -78,9 +78,9 @@ class ODMDEMStage(types.ODM_Stage):
)
if args.crop > 0:
bounds_shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
if os.path.exists(bounds_shapefile_path):
Cropper.crop(bounds_shapefile_path, os.path.join(odm_dem_root, "{}.tif".format(product)), {
bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
if os.path.exists(bounds_file_path):
Cropper.crop(bounds_file_path, os.path.join(odm_dem_root, "{}.tif".format(product)), {
'TILED': 'YES',
'COMPRESS': 'LZW',
'BLOCKXSIZE': 512,

Wyświetl plik

@ -147,9 +147,8 @@ class ODMGeoreferencingStage(types.ODM_Stage):
if not args.fast_orthophoto:
decimation_step *= int(len(reconstruction.photos) / 1000) + 1
cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop,
decimation_step=decimation_step,
outlier_radius=20 if args.fast_orthophoto else 2)
cropper.create_bounds_gpkg(tree.odm_georeferencing_model_laz, args.crop,
decimation_step=decimation_step)
# Do not execute a second time, since
# We might be doing georeferencing for

Wyświetl plik

@ -115,8 +115,8 @@ class ODMOrthoPhotoStage(types.ODM_Stage):
'{png} {tiff} > {log}'.format(**kwargs))
if args.crop > 0:
shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
Cropper.crop(shapefile_path, tree.odm_orthophoto_tif, {
bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
Cropper.crop(bounds_file_path, tree.odm_orthophoto_tif, {
'TILED': 'NO' if self.params.get('no_tiled') else 'YES',
'COMPRESS': self.params.get('compress'),
'PREDICTOR': '2' if self.params.get('compress') in ['LZW', 'DEFLATE'] else '1',

Wyświetl plik

@ -32,39 +32,49 @@ class ODMOpenSfMStage(types.ODM_Stage):
else:
output_file = tree.opensfm_reconstruction
if not io.file_exists(output_file) or self.rerun():
# Always export VisualSFM's reconstruction and undistort images
# as we'll use these for texturing (after GSD estimation and resizing)
if not args.ignore_gsd:
image_scale = gsd.image_scale_factor(args.orthophoto_resolution, tree.opensfm_reconstruction)
else:
image_scale = 1.0
# Always export VisualSFM's reconstruction and undistort images
# as we'll use these for texturing (after GSD estimation and resizing)
if not args.ignore_gsd:
image_scale = gsd.image_scale_factor(args.orthophoto_resolution, tree.opensfm_reconstruction)
else:
image_scale = 1.0
if not io.file_exists(tree.opensfm_reconstruction_nvm) or self.rerun():
octx.run('export_visualsfm --image_extension png --scale_focal %s' % image_scale)
else:
log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
tree.opensfm_reconstruction_nvm)
if not io.file_exists(tree.opensfm_reconstruction_nvm) or self.rerun():
octx.run('export_visualsfm --image_extension png --scale_focal %s' % image_scale)
else:
log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
tree.opensfm_reconstruction_nvm)
# These will be used for texturing
# These will be used for texturing
undistorted_images_path = octx.path("undistorted")
if not io.dir_exists(undistorted_images_path) or self.rerun():
octx.run('undistort --image_format png --image_scale %s' % image_scale)
else:
log.ODM_WARNING("Found an undistorted directory in %s" % undistorted_images_path)
# Skip dense reconstruction if necessary and export
# sparse reconstruction instead
if args.fast_orthophoto:
octx.run('export_ply --no-cameras' % image_scale)
if not io.file_exists(output_file) or self.rerun():
octx.run('export_ply --no-cameras' % image_scale)
else:
log.ODM_WARNING("Found a valid PLY reconstruction in %s" % output_file)
elif args.use_opensfm_dense:
# Undistort images at full scale in JPG
# (TODO: we could compare the size of the PNGs if they are < than depthmap_resolution
# and use those instead of re-exporting full resolution JPGs)
octx.run('undistort')
octx.run('compute_depthmaps')
else:
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
tree.opensfm_reconstruction)
if not io.file_exists(output_file) or self.rerun():
octx.run('undistort')
octx.run('compute_depthmaps')
else:
log.ODM_WARNING("Found a valid dense reconstruction in %s" % output_file)
# check if reconstruction was exported to bundler before
octx.export_bundler(tree.opensfm_bundle_list, self.rerun())
if reconstruction.georef:
if reconstruction.georef and (not io.file_exists(tree.opensfm_transformation) or self.rerun()):
octx.run('export_geocoords --transformation --proj \'%s\'' % reconstruction.georef.projection.srs)
else:
log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_transformation)

Wyświetl plik

@ -131,6 +131,8 @@ class ODMMergeStage(types.ODM_Stage):
# all_point_clouds = get_submodel_paths(tree.submodels_path, "odm_georeferencing", "odm_georeferenced_model.laz")
# pdal.merge_point_clouds(all_point_clouds, tree.odm_georeferencing_model_laz, args.verbose)
#
# Merge orthophotos
all_orthophotos = get_submodel_paths(tree.submodels_path, "odm_orthophoto", "odm_orthophoto.tif")
if len(all_orthophotos) > 1: