kopia lustrzana https://github.com/OpenDroneMap/ODM
Add postprocess stage, embed GCP info in point clouds, rasters
rodzic
335802b563
commit
92d868e33e
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.5.8
|
||||
2.5.9
|
||||
|
|
|
@ -11,7 +11,7 @@ import sys
|
|||
# parse arguments
|
||||
processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints',
|
||||
'odm_meshing', 'mvs_texturing', 'odm_georeferencing',
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report']
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess']
|
||||
|
||||
with open(os.path.join(context.root_path, 'VERSION')) as version_file:
|
||||
__version__ = version_file.read().strip()
|
||||
|
@ -95,7 +95,7 @@ def config(argv=None, parser=None):
|
|||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='odm_report',
|
||||
default='odm_postprocess',
|
||||
choices=processopts,
|
||||
help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s')
|
||||
|
||||
|
|
|
@ -488,9 +488,11 @@ class OSFMContext:
|
|||
|
||||
if triangulated is None:
|
||||
continue
|
||||
|
||||
triangulated_topocentric = np.dot(A.T, triangulated)
|
||||
|
||||
coordinates = np.array(gcp.coordinates.value)
|
||||
coordinates = np.dot(A, coordinates) + b
|
||||
coordinates_topocentric = np.array(gcp.coordinates.value)
|
||||
coordinates = np.dot(A, coordinates_topocentric) + b
|
||||
triangulated = triangulated + b
|
||||
|
||||
result.append({
|
||||
|
@ -498,7 +500,7 @@ class OSFMContext:
|
|||
'observations': [obs.shot_id for obs in gcp.observations],
|
||||
'triangulated': triangulated,
|
||||
'coordinates': coordinates,
|
||||
'error': np.abs(triangulated - coordinates)
|
||||
'error': np.abs(triangulated_topocentric - coordinates_topocentric)
|
||||
})
|
||||
|
||||
return result
|
||||
|
|
|
@ -359,6 +359,13 @@ class ODM_Stage:
|
|||
progressbc.send_update(self.previous_stages_progress() +
|
||||
(self.delta_progress() / 100.0) * float(progress))
|
||||
|
||||
def last_stage(self):
|
||||
if self.next_stage:
|
||||
return self.next_stage.last_stage()
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
def process(self, args, outputs):
|
||||
raise NotImplementedError
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@ from stages.odm_filterpoints import ODMFilterPoints
|
|||
from stages.splitmerge import ODMSplitStage, ODMMergeStage
|
||||
|
||||
from stages.odm_report import ODMReport
|
||||
from stages.odm_postprocess import ODMPostProcess
|
||||
|
||||
|
||||
class ODMApp:
|
||||
def __init__(self, args):
|
||||
|
@ -61,7 +63,9 @@ class ODMApp:
|
|||
max_concurrency=args.max_concurrency,
|
||||
verbose=args.verbose)
|
||||
orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0)
|
||||
report = ODMReport('odm_report', args, progress=100.0)
|
||||
report = ODMReport('odm_report', args, progress=99.0)
|
||||
postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0)
|
||||
|
||||
|
||||
# Normal pipeline
|
||||
self.first_stage = dataset
|
||||
|
@ -82,7 +86,8 @@ class ODMApp:
|
|||
.connect(georeferencing) \
|
||||
.connect(dem) \
|
||||
.connect(orthophoto) \
|
||||
.connect(report)
|
||||
.connect(report) \
|
||||
.connect(postprocess)
|
||||
|
||||
def execute(self):
|
||||
try:
|
||||
|
|
|
@ -12,70 +12,20 @@ from opendm import system
|
|||
from opendm import context
|
||||
from opendm.cropper import Cropper
|
||||
from opendm import point_cloud
|
||||
from opendm.osfm import OSFMContext
|
||||
from opendm.multispectral import get_primary_band_name
|
||||
from opendm.osfm import OSFMContext
|
||||
|
||||
class ODMGeoreferencingStage(types.ODM_Stage):
|
||||
def process(self, args, outputs):
|
||||
tree = outputs['tree']
|
||||
reconstruction = outputs['reconstruction']
|
||||
|
||||
# if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||
# cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
|
||||
# stages = ["ferry"]
|
||||
# params = [
|
||||
# '--filters.ferry.dimensions="views => UserData"',
|
||||
# '--writers.las.compression="lazip"',
|
||||
# ]
|
||||
|
||||
# if reconstruction.is_georeferenced():
|
||||
# log.ODM_INFO("Georeferencing point cloud")
|
||||
|
||||
# stages.append("transformation")
|
||||
# params += [
|
||||
# '--filters.transformation.matrix="1 0 0 %s 0 1 0 %s 0 0 1 0 0 0 0 1"' % reconstruction.georef.utm_offset(),
|
||||
# '--writers.las.offset_x=%s' % reconstruction.georef.utm_east_offset,
|
||||
# '--writers.las.offset_y=%s' % reconstruction.georef.utm_north_offset,
|
||||
# '--writers.las.offset_z=0',
|
||||
# '--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
|
||||
# ]
|
||||
|
||||
# system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
|
||||
# self.update_progress(50)
|
||||
|
||||
# if args.crop > 0:
|
||||
# log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
|
||||
# cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
|
||||
|
||||
# if args.fast_orthophoto:
|
||||
# decimation_step = 10
|
||||
# else:
|
||||
# decimation_step = 40
|
||||
|
||||
# # More aggressive decimation for large datasets
|
||||
# if not args.fast_orthophoto:
|
||||
# decimation_step *= int(len(reconstruction.photos) / 1000) + 1
|
||||
# decimation_step = min(decimation_step, 95)
|
||||
|
||||
# try:
|
||||
# cropper.create_bounds_gpkg(tree.odm_georeferencing_model_laz, args.crop,
|
||||
# decimation_step=decimation_step)
|
||||
# except:
|
||||
# log.ODM_WARNING("Cannot calculate crop bounds! We will skip cropping")
|
||||
# args.crop = 0
|
||||
# else:
|
||||
# log.ODM_INFO("Converting point cloud (non-georeferenced)")
|
||||
# system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
|
||||
# point_cloud.post_point_cloud_steps(args, tree, self.rerun())
|
||||
# else:
|
||||
# log.ODM_WARNING('Found a valid georeferenced model in: %s'
|
||||
# % tree.odm_georeferencing_model_laz)
|
||||
|
||||
# Export GCP information if available
|
||||
|
||||
gcp_export_file = tree.path("odm_georeferencing", "ground_control_points.gpkg")
|
||||
if (reconstruction.has_gcp() and not io.file_exists(gcp_export_file)) or self.rerun():
|
||||
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
|
||||
|
||||
if reconstruction.has_gcp() and (not io.file_exists(gcp_export_file) or self.rerun()):
|
||||
octx = OSFMContext(tree.opensfm)
|
||||
gcps = octx.ground_control_points(reconstruction.georef.proj4())
|
||||
|
||||
|
@ -117,9 +67,75 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
('error_z', gcp['error'][2]),
|
||||
])
|
||||
})
|
||||
|
||||
# Write GML
|
||||
try:
|
||||
system.run('ogr2ogr -of GML "{}" "{}"'.format(gcp_gml_export_file, gcp_export_file))
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot generate ground control points GML file: %s" % str(e))
|
||||
else:
|
||||
log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file)
|
||||
|
||||
|
||||
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||
cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
|
||||
stages = ["ferry"]
|
||||
params = [
|
||||
'--filters.ferry.dimensions="views => UserData"',
|
||||
'--writers.las.compression="lazip"',
|
||||
]
|
||||
|
||||
if reconstruction.is_georeferenced():
|
||||
log.ODM_INFO("Georeferencing point cloud")
|
||||
|
||||
stages.append("transformation")
|
||||
params += [
|
||||
'--filters.transformation.matrix="1 0 0 %s 0 1 0 %s 0 0 1 0 0 0 0 1"' % reconstruction.georef.utm_offset(),
|
||||
'--writers.las.offset_x=%s' % reconstruction.georef.utm_east_offset,
|
||||
'--writers.las.offset_y=%s' % reconstruction.georef.utm_north_offset,
|
||||
'--writers.las.offset_z=0',
|
||||
'--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
|
||||
]
|
||||
|
||||
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
|
||||
log.ODM_INFO("Embedding GCP info in point cloud")
|
||||
params += [
|
||||
'--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM_GCP\\\", \\\"description\\\": \\\"Ground Control Points (GML)\\\"}"' % gcp_gml_export_file
|
||||
]
|
||||
|
||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
|
||||
self.update_progress(50)
|
||||
|
||||
if args.crop > 0:
|
||||
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
|
||||
cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
|
||||
|
||||
if args.fast_orthophoto:
|
||||
decimation_step = 10
|
||||
else:
|
||||
decimation_step = 40
|
||||
|
||||
# More aggressive decimation for large datasets
|
||||
if not args.fast_orthophoto:
|
||||
decimation_step *= int(len(reconstruction.photos) / 1000) + 1
|
||||
decimation_step = min(decimation_step, 95)
|
||||
|
||||
try:
|
||||
cropper.create_bounds_gpkg(tree.odm_georeferencing_model_laz, args.crop,
|
||||
decimation_step=decimation_step)
|
||||
except:
|
||||
log.ODM_WARNING("Cannot calculate crop bounds! We will skip cropping")
|
||||
args.crop = 0
|
||||
else:
|
||||
log.ODM_INFO("Converting point cloud (non-georeferenced)")
|
||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
|
||||
point_cloud.post_point_cloud_steps(args, tree, self.rerun())
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid georeferenced model in: %s'
|
||||
% tree.odm_georeferencing_model_laz)
|
||||
|
||||
if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud):
|
||||
os.remove(tree.filtered_point_cloud)
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
import os
|
||||
|
||||
from osgeo import gdal
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import types
|
||||
from opendm.utils import copy_paths, get_processing_results_paths
|
||||
|
||||
class ODMPostProcess(types.ODM_Stage):
|
||||
def process(self, args, outputs):
|
||||
tree = outputs['tree']
|
||||
reconstruction = outputs['reconstruction']
|
||||
|
||||
log.ODM_INFO("Post Processing")
|
||||
|
||||
if not outputs['large']:
|
||||
# TODO: support for split-merge?
|
||||
|
||||
# Embed GCP info in 2D results via
|
||||
# XML metadata fields
|
||||
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
|
||||
|
||||
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
|
||||
skip_embed_gcp = False
|
||||
with open(gcp_gml_export_file) as f:
|
||||
gcp_xml = f.read()
|
||||
|
||||
for product in [tree.odm_orthophoto_tif,
|
||||
tree.path("odm_dem", "dsm.tif"),
|
||||
tree.path("odm_dem", "dtm.tif")]:
|
||||
if os.path.isfile(product):
|
||||
ds = gdal.Open(product)
|
||||
if ds is not None:
|
||||
if ds.GetMetadata('xml:GROUND_CONTROL_POINTS') is None or self.rerun():
|
||||
ds.SetMetadata(gcp_xml, 'xml:GROUND_CONTROL_POINTS')
|
||||
ds = None
|
||||
log.ODM_INFO("Wrote xml:GROUND_CONTROL_POINTS metadata to %s" % product)
|
||||
else:
|
||||
skip_embed_gcp = True
|
||||
log.ODM_WARNING("Already embedded ground control point information.")
|
||||
break
|
||||
else:
|
||||
log.ODM_WARNING("Cannot open %s for writing, skipping GCP embedding" % product)
|
||||
|
||||
if args.copy_to:
|
||||
try:
|
||||
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))
|
|
@ -14,7 +14,7 @@ from opendm.point_cloud import export_info_json
|
|||
from opendm.cropper import Cropper
|
||||
from opendm.orthophoto import get_orthophoto_vars, get_max_memory, generate_png
|
||||
from opendm.tiles.tiler import generate_colored_hillshade
|
||||
from opendm.utils import get_raster_stats, copy_paths, get_processing_results_paths
|
||||
from opendm.utils import get_raster_stats
|
||||
|
||||
def hms(seconds):
|
||||
h = seconds // 3600
|
||||
|
@ -196,10 +196,3 @@ class ODMReport(types.ODM_Stage):
|
|||
log.ODM_WARNING("Cannot generate overlap diagram, point cloud stats missing")
|
||||
|
||||
octx.export_report(os.path.join(tree.odm_report, "report.pdf"), odm_stats, self.rerun())
|
||||
|
||||
# TODO: does this warrant a new stage?
|
||||
if args.copy_to:
|
||||
try:
|
||||
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))
|
||||
|
|
|
@ -55,6 +55,18 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
|||
cleanup_disk_space()
|
||||
return
|
||||
|
||||
# Stats are computed in the local CRS (before geoprojection)
|
||||
if not args.skip_report:
|
||||
|
||||
# TODO: this will fail to compute proper statistics if
|
||||
# the pipeline is run with --skip-report and is subsequently
|
||||
# rerun without --skip-report a --rerun-* parameter (due to the reconstruction.json file)
|
||||
# being replaced below. It's an isolated use case.
|
||||
|
||||
octx.export_stats(self.rerun())
|
||||
|
||||
self.update_progress(75)
|
||||
|
||||
# We now switch to a geographic CRS
|
||||
geocoords_flag_file = octx.path("exported_geocoords.txt")
|
||||
|
||||
|
@ -67,12 +79,6 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction)
|
||||
|
||||
self.update_progress(75)
|
||||
|
||||
# Stats are computed in the geographic CRS
|
||||
if not args.skip_report:
|
||||
octx.export_stats(self.rerun())
|
||||
|
||||
self.update_progress(80)
|
||||
|
||||
updated_config_flag_file = octx.path('updated_config.txt')
|
||||
|
|
|
@ -367,8 +367,9 @@ class ODMMergeStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING("Found merged shots.geojson in %s" % tree.odm_report)
|
||||
|
||||
# Stop the pipeline short! We're done.
|
||||
self.next_stage = None
|
||||
# Stop the pipeline short by skipping to the postprocess stage.
|
||||
# Afterwards, we're done.
|
||||
self.next_stage = self.last_stage()
|
||||
else:
|
||||
log.ODM_INFO("Normal dataset, nothing to merge.")
|
||||
self.progress = 0.0
|
||||
|
|
Ładowanie…
Reference in New Issue