2018-01-04 17:08:59 +00:00
|
|
|
from opendm import context
|
2018-01-16 19:46:30 +00:00
|
|
|
from opendm.system import run
|
2018-01-04 17:08:59 +00:00
|
|
|
from opendm import log
|
2021-01-11 19:41:54 +00:00
|
|
|
from opendm.point_cloud import export_summary_json
|
2018-01-04 17:08:59 +00:00
|
|
|
from osgeo import ogr
|
|
|
|
import json, os
|
2018-12-12 19:24:27 +00:00
|
|
|
from opendm.concurrency import get_max_memory
|
2021-10-12 18:05:07 +00:00
|
|
|
from opendm.utils import double_quote
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
class Cropper:
|
|
|
|
def __init__(self, storage_dir, files_prefix = "crop"):
|
|
|
|
self.storage_dir = storage_dir
|
|
|
|
self.files_prefix = files_prefix
|
|
|
|
|
|
|
|
def path(self, suffix):
|
|
|
|
"""
|
|
|
|
@return a path relative to storage_dir and prefixed with files_prefix
|
|
|
|
"""
|
|
|
|
return os.path.join(self.storage_dir, '{}.{}'.format(self.files_prefix, suffix))
|
|
|
|
|
|
|
|
@staticmethod
|
2019-10-24 20:07:19 +00:00
|
|
|
def crop(gpkg_path, geotiff_path, gdal_options, keep_original=True, warp_options=[]):
|
2019-04-27 19:51:13 +00:00
|
|
|
if not os.path.exists(gpkg_path) or not os.path.exists(geotiff_path):
|
|
|
|
log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(gpkg_path, geotiff_path))
|
2018-01-04 17:08:59 +00:00
|
|
|
return geotiff_path
|
|
|
|
|
2019-04-30 20:09:03 +00:00
|
|
|
log.ODM_INFO("Cropping %s" % geotiff_path)
|
|
|
|
|
2018-01-04 17:08:59 +00:00
|
|
|
# Rename original file
|
|
|
|
# path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif
|
|
|
|
|
|
|
|
path, filename = os.path.split(geotiff_path)
|
|
|
|
# path = path/to
|
|
|
|
# filename = odm_orthophoto.tif
|
|
|
|
|
|
|
|
basename, ext = os.path.splitext(filename)
|
|
|
|
# basename = odm_orthophoto
|
|
|
|
# ext = .tif
|
|
|
|
|
|
|
|
original_geotiff = os.path.join(path, "{}.original{}".format(basename, ext))
|
2021-05-04 18:46:55 +00:00
|
|
|
os.replace(geotiff_path, original_geotiff)
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
kwargs = {
|
2021-10-12 18:05:07 +00:00
|
|
|
'gpkg_path': double_quote(gpkg_path),
|
|
|
|
'geotiffInput': double_quote(original_geotiff),
|
|
|
|
'geotiffOutput': double_quote(geotiff_path),
|
2018-03-19 13:08:22 +00:00
|
|
|
'options': ' '.join(map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)),
|
2019-10-24 20:07:19 +00:00
|
|
|
'warpOptions': ' '.join(warp_options),
|
2018-12-12 19:24:27 +00:00
|
|
|
'max_memory': get_max_memory()
|
2018-01-04 17:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
run('gdalwarp -cutline {gpkg_path} '
|
2018-01-04 17:08:59 +00:00
|
|
|
'-crop_to_cutline '
|
|
|
|
'{options} '
|
2019-10-24 20:07:19 +00:00
|
|
|
'{warpOptions} '
|
2018-01-04 17:08:59 +00:00
|
|
|
'{geotiffInput} '
|
2018-03-19 13:08:22 +00:00
|
|
|
'{geotiffOutput} '
|
|
|
|
'--config GDAL_CACHEMAX {max_memory}%'.format(**kwargs))
|
2018-01-04 17:08:59 +00:00
|
|
|
|
2018-01-16 20:23:19 +00:00
|
|
|
if not keep_original:
|
|
|
|
os.remove(original_geotiff)
|
|
|
|
|
2018-01-04 17:08:59 +00:00
|
|
|
except Exception as e:
|
2020-09-11 17:25:17 +00:00
|
|
|
log.ODM_WARNING('Something went wrong while cropping: {}'.format(e))
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
# Revert rename
|
2021-05-04 18:46:55 +00:00
|
|
|
os.replace(original_geotiff, geotiff_path)
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
return geotiff_path
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
@staticmethod
|
|
|
|
def merge_bounds(input_bound_files, output_bounds, buffer_distance = 0):
|
|
|
|
"""
|
|
|
|
Merge multiple bound files into a single bound computed from the convex hull
|
|
|
|
of all bounds (minus a buffer distance in meters)
|
|
|
|
"""
|
|
|
|
geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
|
|
|
|
|
|
|
|
driver = ogr.GetDriverByName('GPKG')
|
2019-04-28 16:20:03 +00:00
|
|
|
srs = None
|
2019-04-27 19:51:13 +00:00
|
|
|
|
|
|
|
for input_bound_file in input_bound_files:
|
|
|
|
ds = driver.Open(input_bound_file, 0) # ready-only
|
2019-04-28 16:20:03 +00:00
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
layer = ds.GetLayer()
|
2019-04-28 16:20:03 +00:00
|
|
|
srs = layer.GetSpatialRef()
|
2019-04-27 19:51:13 +00:00
|
|
|
|
|
|
|
# Collect all Geometry
|
|
|
|
for feature in layer:
|
|
|
|
geomcol.AddGeometry(feature.GetGeometryRef())
|
2019-04-28 16:20:03 +00:00
|
|
|
|
|
|
|
ds = None
|
2019-04-27 19:51:13 +00:00
|
|
|
|
|
|
|
# Calculate convex hull
|
|
|
|
convexhull = geomcol.ConvexHull()
|
|
|
|
|
|
|
|
# If buffer distance is specified
|
2022-07-10 19:02:10 +00:00
|
|
|
# Create two buffers, one shrunk by
|
2019-04-27 19:51:13 +00:00
|
|
|
# N + 3 and then that buffer expanded by 3
|
|
|
|
# so that we get smooth corners. \m/
|
|
|
|
BUFFER_SMOOTH_DISTANCE = 3
|
|
|
|
|
|
|
|
if buffer_distance > 0:
|
|
|
|
convexhull = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
|
|
|
|
convexhull = convexhull.Buffer(BUFFER_SMOOTH_DISTANCE)
|
|
|
|
|
|
|
|
# Save to a new file
|
|
|
|
if os.path.exists(output_bounds):
|
|
|
|
driver.DeleteDataSource(output_bounds)
|
|
|
|
|
|
|
|
out_ds = driver.CreateDataSource(output_bounds)
|
2019-04-28 16:20:03 +00:00
|
|
|
layer = out_ds.CreateLayer("convexhull", srs=srs, geom_type=ogr.wkbPolygon)
|
2019-04-27 19:51:13 +00:00
|
|
|
|
|
|
|
feature_def = layer.GetLayerDefn()
|
|
|
|
feature = ogr.Feature(feature_def)
|
|
|
|
feature.SetGeometry(convexhull)
|
|
|
|
layer.CreateFeature(feature)
|
|
|
|
feature = None
|
|
|
|
|
|
|
|
# Save and close output data source
|
|
|
|
out_ds = None
|
|
|
|
|
|
|
|
def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
|
2018-01-04 17:08:59 +00:00
|
|
|
"""
|
|
|
|
Compute a buffered polygon around the data extents (not just a bounding box)
|
|
|
|
of the given point cloud.
|
|
|
|
|
|
|
|
@return filename to GeoJSON containing the polygon
|
|
|
|
"""
|
|
|
|
if not os.path.exists(pointcloud_path):
|
2019-04-27 19:51:13 +00:00
|
|
|
log.ODM_WARNING('Point cloud does not exist, cannot generate bounds {}'.format(pointcloud_path))
|
2018-01-04 17:08:59 +00:00
|
|
|
return ''
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
# Do decimation prior to extracting boundary information
|
|
|
|
decimated_pointcloud_path = self.path('decimated.las')
|
2018-01-17 21:11:01 +00:00
|
|
|
|
|
|
|
run("pdal translate -i \"{}\" "
|
|
|
|
"-o \"{}\" "
|
2019-04-27 19:51:13 +00:00
|
|
|
"decimation "
|
|
|
|
"--filters.decimation.step={} ".format(pointcloud_path, decimated_pointcloud_path, decimation_step))
|
|
|
|
|
|
|
|
if not os.path.exists(decimated_pointcloud_path):
|
|
|
|
log.ODM_WARNING('Could not decimate point cloud, thus cannot generate GPKG bounds {}'.format(decimated_pointcloud_path))
|
2018-01-17 21:11:01 +00:00
|
|
|
return ''
|
|
|
|
|
2018-01-04 17:08:59 +00:00
|
|
|
# Use PDAL to dump boundary information
|
|
|
|
# then read the information back
|
|
|
|
|
|
|
|
boundary_file_path = self.path('boundary.json')
|
|
|
|
|
2021-05-17 17:25:52 +00:00
|
|
|
run('pdal info --boundary --filters.hexbin.edge_size=1 --filters.hexbin.threshold=0 "{0}" > "{1}"'.format(decimated_pointcloud_path, boundary_file_path))
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
pc_geojson_boundary_feature = None
|
|
|
|
|
|
|
|
with open(boundary_file_path, 'r') as f:
|
|
|
|
json_f = json.loads(f.read())
|
|
|
|
pc_geojson_boundary_feature = json_f['boundary']['boundary_json']
|
|
|
|
|
|
|
|
if pc_geojson_boundary_feature is None: raise RuntimeError("Could not determine point cloud boundaries")
|
|
|
|
|
|
|
|
# Write bounds to GeoJSON
|
2021-05-04 18:46:55 +00:00
|
|
|
tmp_bounds_geojson_path = self.path('tmp-bounds.geojson')
|
|
|
|
with open(tmp_bounds_geojson_path, "w") as f:
|
2018-01-04 17:08:59 +00:00
|
|
|
f.write(json.dumps({
|
|
|
|
"type": "FeatureCollection",
|
|
|
|
"features": [{
|
|
|
|
"type": "Feature",
|
|
|
|
"geometry": pc_geojson_boundary_feature
|
|
|
|
}]
|
|
|
|
}))
|
|
|
|
|
|
|
|
# Create a convex hull around the boundary
|
|
|
|
# as to encompass the entire area (no holes)
|
|
|
|
driver = ogr.GetDriverByName('GeoJSON')
|
2021-05-04 18:46:55 +00:00
|
|
|
ds = driver.Open(tmp_bounds_geojson_path, 0) # ready-only
|
2018-01-04 17:08:59 +00:00
|
|
|
layer = ds.GetLayer()
|
|
|
|
|
|
|
|
# Collect all Geometry
|
|
|
|
geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
|
|
|
|
for feature in layer:
|
|
|
|
geomcol.AddGeometry(feature.GetGeometryRef())
|
|
|
|
|
|
|
|
# Calculate convex hull
|
|
|
|
convexhull = geomcol.ConvexHull()
|
|
|
|
|
|
|
|
# If buffer distance is specified
|
2022-07-10 19:02:10 +00:00
|
|
|
# Create two buffers, one shrunk by
|
2018-01-04 17:08:59 +00:00
|
|
|
# N + 3 and then that buffer expanded by 3
|
|
|
|
# so that we get smooth corners. \m/
|
|
|
|
BUFFER_SMOOTH_DISTANCE = 3
|
|
|
|
|
|
|
|
if buffer_distance > 0:
|
2020-09-16 17:59:00 +00:00
|
|
|
# For small areas, check that buffering doesn't obliterate
|
|
|
|
# our hull
|
|
|
|
tmp = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
|
|
|
|
tmp = tmp.Buffer(BUFFER_SMOOTH_DISTANCE)
|
|
|
|
if tmp.Area() > 0:
|
|
|
|
convexhull = tmp
|
|
|
|
else:
|
|
|
|
log.ODM_WARNING("Very small crop area detected, we will not smooth it.")
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
# Save to a new file
|
|
|
|
bounds_geojson_path = self.path('bounds.geojson')
|
|
|
|
if os.path.exists(bounds_geojson_path):
|
2021-05-04 18:46:55 +00:00
|
|
|
os.remove(bounds_geojson_path)
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
out_ds = driver.CreateDataSource(bounds_geojson_path)
|
|
|
|
layer = out_ds.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
|
|
|
|
|
|
|
|
feature_def = layer.GetLayerDefn()
|
|
|
|
feature = ogr.Feature(feature_def)
|
|
|
|
feature.SetGeometry(convexhull)
|
|
|
|
layer.CreateFeature(feature)
|
|
|
|
feature = None
|
|
|
|
|
|
|
|
# Save and close data sources
|
|
|
|
out_ds = ds = None
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
# Remove decimated point cloud
|
|
|
|
if os.path.exists(decimated_pointcloud_path):
|
|
|
|
os.remove(decimated_pointcloud_path)
|
2021-05-04 18:46:55 +00:00
|
|
|
|
|
|
|
# Remove tmp bounds
|
|
|
|
if os.path.exists(tmp_bounds_geojson_path):
|
|
|
|
os.remove(tmp_bounds_geojson_path)
|
2018-01-17 21:11:01 +00:00
|
|
|
|
2018-01-04 17:08:59 +00:00
|
|
|
return bounds_geojson_path
|
|
|
|
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
def create_bounds_gpkg(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
|
2018-01-04 17:08:59 +00:00
|
|
|
"""
|
|
|
|
Compute a buffered polygon around the data extents (not just a bounding box)
|
|
|
|
of the given point cloud.
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
@return filename to Geopackage containing the polygon
|
2018-01-04 17:08:59 +00:00
|
|
|
"""
|
|
|
|
if not os.path.exists(pointcloud_path):
|
2019-04-27 19:51:13 +00:00
|
|
|
log.ODM_WARNING('Point cloud does not exist, cannot generate GPKG bounds {}'.format(pointcloud_path))
|
2018-01-04 17:08:59 +00:00
|
|
|
return ''
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step)
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
|
2021-01-11 19:41:54 +00:00
|
|
|
export_summary_json(pointcloud_path, summary_file_path)
|
2018-01-04 17:08:59 +00:00
|
|
|
|
|
|
|
pc_proj4 = None
|
|
|
|
with open(summary_file_path, 'r') as f:
|
|
|
|
json_f = json.loads(f.read())
|
|
|
|
pc_proj4 = json_f['summary']['srs']['proj4']
|
|
|
|
|
|
|
|
if pc_proj4 is None: raise RuntimeError("Could not determine point cloud proj4 declaration")
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
bounds_gpkg_path = os.path.join(self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix))
|
2018-01-04 17:08:59 +00:00
|
|
|
|
2021-10-12 18:05:07 +00:00
|
|
|
if os.path.isfile(bounds_gpkg_path):
|
|
|
|
os.remove(bounds_gpkg_path)
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
# Convert bounds to GPKG
|
2018-01-04 17:08:59 +00:00
|
|
|
kwargs = {
|
2021-10-12 18:05:07 +00:00
|
|
|
'input': double_quote(bounds_geojson_path),
|
|
|
|
'output': double_quote(bounds_gpkg_path),
|
2018-01-04 17:08:59 +00:00
|
|
|
'proj4': pc_proj4
|
|
|
|
}
|
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
|
2018-01-04 17:08:59 +00:00
|
|
|
|
2019-04-27 19:51:13 +00:00
|
|
|
return bounds_gpkg_path
|
2018-01-04 17:08:59 +00:00
|
|
|
|