Merge pull request #1318 from pierotofy/gcpimp

Embed GCP information in outputs
pull/1325/head
Piero Toffanin 2021-08-02 09:49:27 -05:00 zatwierdzone przez GitHub
commit a16505861c
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
9 zmienionych plików z 215 dodań i 16 usunięć

Wyświetl plik

@ -1 +1 @@
2.5.8
2.5.9

Wyświetl plik

@ -11,7 +11,7 @@ import sys
# parse arguments
processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints',
'odm_meshing', 'mvs_texturing', 'odm_georeferencing',
'odm_dem', 'odm_orthophoto', 'odm_report']
'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess']
with open(os.path.join(context.root_path, 'VERSION')) as version_file:
__version__ = version_file.read().strip()
@ -95,7 +95,7 @@ def config(argv=None, parser=None):
parser.add_argument('--end-with', '-e',
metavar='<string>',
action=StoreValue,
default='odm_report',
default='odm_postprocess',
choices=processopts,
help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s')

Wyświetl plik

@ -4,11 +4,15 @@ OpenSfM related utils
import os, shutil, sys, json, argparse
import yaml
import numpy as np
import pyproj
from pyproj import CRS
from opendm import io
from opendm import log
from opendm import system
from opendm import context
from opendm import camera
from opendm import location
from opendm.utils import get_depthmap_resolution
from opendm.photo import find_largest_photo_dim
from opensfm.large import metadataset
@ -18,6 +22,8 @@ from opensfm.dataset import DataSet
from opensfm import report
from opendm.multispectral import get_photos_by_band
from opendm.gpu import has_gpus
from opensfm import multiview
from opensfm.actions.export_geocoords import _get_transformation
class OSFMContext:
def __init__(self, opensfm_project_path):
@ -255,6 +261,10 @@ class OSFMContext:
config_filename = self.get_config_file_path()
with open(config_filename, 'w') as fout:
fout.write("\n".join(config))
# We impose our own reference_lla
if reconstruction.is_georeferenced():
self.write_reference_lla(reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset, reconstruction.georef.proj4())
else:
log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path)
@ -428,6 +438,71 @@ class OSFMContext:
log.ODM_WARNING("Report could not be generated")
else:
log.ODM_WARNING("Report %s already exported" % report_path)
def write_reference_lla(self, offset_x, offset_y, proj4):
reference_lla = self.path("reference_lla.json")
longlat = CRS.from_epsg("4326")
lon, lat = location.transform2(CRS.from_proj4(proj4), longlat, offset_x, offset_y)
with open(reference_lla, 'w') as f:
f.write(json.dumps({
'latitude': lat,
'longitude': lon,
'altitude': 0.0
}, indent=4))
log.ODM_INFO("Wrote reference_lla.json")
def ground_control_points(self, proj4):
"""
Load ground control point information.
"""
ds = DataSet(self.opensfm_project_path)
gcps = ds.load_ground_control_points()
if not gcps:
return []
reconstructions = ds.load_reconstruction()
reference = ds.load_reference()
projection = pyproj.Proj(proj4)
t = _get_transformation(reference, projection, (0, 0))
A, b = t[:3, :3], t[:3, 3]
result = []
for gcp in gcps:
if not gcp.coordinates.has_value:
continue
triangulated = None
for rec in reconstructions:
triangulated = multiview.triangulate_gcp(gcp, rec.shots, 1.0, 0.1)
if triangulated is None:
continue
else:
break
if triangulated is None:
continue
triangulated_topocentric = np.dot(A.T, triangulated)
coordinates_topocentric = np.array(gcp.coordinates.value)
coordinates = np.dot(A, coordinates_topocentric) + b
triangulated = triangulated + b
result.append({
'id': gcp.id,
'observations': [obs.shot_id for obs in gcp.observations],
'triangulated': triangulated,
'coordinates': coordinates,
'error': np.abs(triangulated_topocentric - coordinates_topocentric)
})
return result
def name(self):
return os.path.basename(os.path.abspath(self.path("..")))

Wyświetl plik

@ -359,6 +359,13 @@ class ODM_Stage:
progressbc.send_update(self.previous_stages_progress() +
(self.delta_progress() / 100.0) * float(progress))
def last_stage(self):
if self.next_stage:
return self.next_stage.last_stage()
else:
return self
def process(self, args, outputs):
raise NotImplementedError

Wyświetl plik

@ -18,6 +18,8 @@ from stages.odm_filterpoints import ODMFilterPoints
from stages.splitmerge import ODMSplitStage, ODMMergeStage
from stages.odm_report import ODMReport
from stages.odm_postprocess import ODMPostProcess
class ODMApp:
def __init__(self, args):
@ -61,7 +63,9 @@ class ODMApp:
max_concurrency=args.max_concurrency,
verbose=args.verbose)
orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0)
report = ODMReport('odm_report', args, progress=100.0)
report = ODMReport('odm_report', args, progress=99.0)
postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0)
# Normal pipeline
self.first_stage = dataset
@ -82,7 +86,8 @@ class ODMApp:
.connect(georeferencing) \
.connect(dem) \
.connect(orthophoto) \
.connect(report)
.connect(report) \
.connect(postprocess)
def execute(self):
try:

Wyświetl plik

@ -1,6 +1,9 @@
import os
import struct
import pipes
import fiona
import fiona.crs
from collections import OrderedDict
from opendm import io
from opendm import log
@ -10,12 +13,70 @@ from opendm import context
from opendm.cropper import Cropper
from opendm import point_cloud
from opendm.multispectral import get_primary_band_name
from opendm.osfm import OSFMContext
class ODMGeoreferencingStage(types.ODM_Stage):
def process(self, args, outputs):
tree = outputs['tree']
reconstruction = outputs['reconstruction']
# Export GCP information if available
gcp_export_file = tree.path("odm_georeferencing", "ground_control_points.gpkg")
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
if reconstruction.has_gcp() and (not io.file_exists(gcp_export_file) or self.rerun()):
octx = OSFMContext(tree.opensfm)
gcps = octx.ground_control_points(reconstruction.georef.proj4())
if len(gcps):
gcp_schema = {
'geometry': 'Point',
'properties': OrderedDict([
('id', 'str'),
('observations_count', 'int'),
('observations_list', 'str'),
('triangulated_x', 'float'),
('triangulated_y', 'float'),
('triangulated_z', 'float'),
('error_x', 'float'),
('error_y', 'float'),
('error_z', 'float'),
])
}
# Write GeoPackage
with fiona.open(gcp_export_file, 'w', driver="GPKG",
crs=fiona.crs.from_string(reconstruction.georef.proj4()),
schema=gcp_schema) as f:
for gcp in gcps:
f.write({
'geometry': {
'type': 'Point',
'coordinates': gcp['coordinates'],
},
'properties': OrderedDict([
('id', gcp['id']),
('observations_count', len(gcp['observations'])),
('observations_list', ",".join(gcp['observations'])),
('triangulated_x', gcp['triangulated'][0]),
('triangulated_y', gcp['triangulated'][1]),
('triangulated_z', gcp['triangulated'][2]),
('error_x', gcp['error'][0]),
('error_y', gcp['error'][1]),
('error_z', gcp['error'][2]),
])
})
# Write GML
try:
system.run('ogr2ogr -of GML "{}" "{}"'.format(gcp_gml_export_file, gcp_export_file))
except Exception as e:
log.ODM_WARNING("Cannot generate ground control points GML file: %s" % str(e))
else:
log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file)
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
stages = ["ferry"]
@ -35,6 +96,12 @@ class ODMGeoreferencingStage(types.ODM_Stage):
'--writers.las.offset_z=0',
'--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
]
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
log.ODM_INFO("Embedding GCP info in point cloud")
params += [
'--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM_GCP\\\", \\\"description\\\": \\\"Ground Control Points (GML)\\\"}"' % gcp_gml_export_file
]
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
@ -68,7 +135,7 @@ class ODMGeoreferencingStage(types.ODM_Stage):
else:
log.ODM_WARNING('Found a valid georeferenced model in: %s'
% tree.odm_georeferencing_model_laz)
if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud):
os.remove(tree.filtered_point_cloud)

Wyświetl plik

@ -0,0 +1,51 @@
import os
from osgeo import gdal
from opendm import io
from opendm import log
from opendm import types
from opendm.utils import copy_paths, get_processing_results_paths
class ODMPostProcess(types.ODM_Stage):
def process(self, args, outputs):
tree = outputs['tree']
reconstruction = outputs['reconstruction']
log.ODM_INFO("Post Processing")
if not outputs['large']:
# TODO: support for split-merge?
# Embed GCP info in 2D results via
# XML metadata fields
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
skip_embed_gcp = False
gcp_xml = ""
with open(gcp_gml_export_file) as f:
gcp_xml = f.read()
for product in [tree.odm_orthophoto_tif,
tree.path("odm_dem", "dsm.tif"),
tree.path("odm_dem", "dtm.tif")]:
if os.path.isfile(product):
ds = gdal.Open(product)
if ds is not None:
if ds.GetMetadata('xml:GROUND_CONTROL_POINTS') is None or self.rerun():
ds.SetMetadata(gcp_xml, 'xml:GROUND_CONTROL_POINTS')
ds = None
log.ODM_INFO("Wrote xml:GROUND_CONTROL_POINTS metadata to %s" % product)
else:
skip_embed_gcp = True
log.ODM_WARNING("Already embedded ground control point information")
break
else:
log.ODM_WARNING("Cannot open %s for writing, skipping GCP embedding" % product)
if args.copy_to:
try:
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
except Exception as e:
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))

Wyświetl plik

@ -14,7 +14,7 @@ from opendm.point_cloud import export_info_json
from opendm.cropper import Cropper
from opendm.orthophoto import get_orthophoto_vars, get_max_memory, generate_png
from opendm.tiles.tiler import generate_colored_hillshade
from opendm.utils import get_raster_stats, copy_paths, get_processing_results_paths
from opendm.utils import get_raster_stats
def hms(seconds):
h = seconds // 3600
@ -196,10 +196,3 @@ class ODMReport(types.ODM_Stage):
log.ODM_WARNING("Cannot generate overlap diagram, point cloud stats missing")
octx.export_report(os.path.join(tree.odm_report, "report.pdf"), odm_stats, self.rerun())
# TODO: does this warrant a new stage?
if args.copy_to:
try:
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
except Exception as e:
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))

Wyświetl plik

@ -367,8 +367,9 @@ class ODMMergeStage(types.ODM_Stage):
else:
log.ODM_WARNING("Found merged shots.geojson in %s" % tree.odm_report)
# Stop the pipeline short! We're done.
self.next_stage = None
# Stop the pipeline short by skipping to the postprocess stage.
# Afterwards, we're done.
self.next_stage = self.last_stage()
else:
log.ODM_INFO("Normal dataset, nothing to merge.")
self.progress = 0.0