diff --git a/.github/workflows/publish-docker-and-wsl.yaml b/.github/workflows/publish-docker-and-wsl.yaml index 45618e6d..07e9612b 100644 --- a/.github/workflows/publish-docker-and-wsl.yaml +++ b/.github/workflows/publish-docker-and-wsl.yaml @@ -85,4 +85,8 @@ jobs: run: | echo "Docker image digest: ${{ steps.docker_build.outputs.digest }}" echo "WSL AMD64 rootfs URL: ${{ steps.upload-amd64-wsl-rootfs.browser_download_url }}" - + # Trigger NodeODM build + - name: Dispatch NodeODM Build Event + id: nodeodm_dispatch + run: | + curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker.yaml/dispatches --data '{"ref": "master"}' \ No newline at end of file diff --git a/.github/workflows/publish-docker-gpu.yaml b/.github/workflows/publish-docker-gpu.yaml index 06f284b5..3ef72f89 100644 --- a/.github/workflows/publish-docker-gpu.yaml +++ b/.github/workflows/publish-docker-gpu.yaml @@ -34,3 +34,8 @@ jobs: platforms: linux/amd64 push: true tags: opendronemap/odm:gpu + # Trigger NodeODM build + - name: Dispatch NodeODM Build Event + id: nodeodm_dispatch + run: | + curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker-gpu.yaml/dispatches --data '{"ref": "master"}' \ No newline at end of file diff --git a/SuperBuild/cmake/External-OpenSfM.cmake b/SuperBuild/cmake/External-OpenSfM.cmake index 19772065..548082ca 100644 --- a/SuperBuild/cmake/External-OpenSfM.cmake +++ b/SuperBuild/cmake/External-OpenSfM.cmake @@ -19,7 +19,7 @@ ExternalProject_Add(${_proj_name} #--Download step-------------- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} GIT_REPOSITORY https://github.com/OpenDroneMap/OpenSfM/ - GIT_TAG 257 + GIT_TAG 261 #--Update/Patch step---------- UPDATE_COMMAND git submodule update --init --recursive #--Configure step------------- diff --git a/VERSION b/VERSION index 35d16fb1..6a6a3d8e 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.5.7 +2.6.1 diff --git a/opendm/config.py b/opendm/config.py index bd462717..c064cae6 100755 --- a/opendm/config.py +++ b/opendm/config.py @@ -11,7 +11,7 @@ import sys # parse arguments processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', - 'odm_dem', 'odm_orthophoto', 'odm_report'] + 'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess'] with open(os.path.join(context.root_path, 'VERSION')) as version_file: __version__ = version_file.read().strip() @@ -95,7 +95,7 @@ def config(argv=None, parser=None): parser.add_argument('--end-with', '-e', metavar='', action=StoreValue, - default='odm_report', + default='odm_postprocess', choices=processopts, help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s') diff --git a/opendm/osfm.py b/opendm/osfm.py index 9794b77f..3fdec0ab 100644 --- a/opendm/osfm.py +++ b/opendm/osfm.py @@ -4,11 +4,15 @@ OpenSfM related utils import os, shutil, sys, json, argparse import yaml +import numpy as np +import pyproj +from pyproj import CRS from opendm import io from opendm import log from opendm import system from opendm import context from opendm import camera +from opendm import location from opendm.utils import get_depthmap_resolution from opendm.photo import find_largest_photo_dim from opensfm.large import metadataset @@ -18,6 +22,8 @@ from opensfm.dataset import DataSet from opensfm import report from opendm.multispectral import get_photos_by_band from opendm.gpu import has_gpus +from opensfm import multiview +from opensfm.actions.export_geocoords import _get_transformation class OSFMContext: def __init__(self, opensfm_project_path): @@ -255,6 +261,10 @@ class OSFMContext: config_filename = self.get_config_file_path() with open(config_filename, 'w') as fout: fout.write("\n".join(config)) + + # We impose our own reference_lla + if reconstruction.is_georeferenced(): + self.write_reference_lla(reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset, reconstruction.georef.proj4()) else: log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path) @@ -428,6 +438,71 @@ class OSFMContext: log.ODM_WARNING("Report could not be generated") else: log.ODM_WARNING("Report %s already exported" % report_path) + + def write_reference_lla(self, offset_x, offset_y, proj4): + reference_lla = self.path("reference_lla.json") + + longlat = CRS.from_epsg("4326") + lon, lat = location.transform2(CRS.from_proj4(proj4), longlat, offset_x, offset_y) + + with open(reference_lla, 'w') as f: + f.write(json.dumps({ + 'latitude': lat, + 'longitude': lon, + 'altitude': 0.0 + }, indent=4)) + + log.ODM_INFO("Wrote reference_lla.json") + + def ground_control_points(self, proj4): + """ + Load ground control point information. + """ + ds = DataSet(self.opensfm_project_path) + gcps = ds.load_ground_control_points() + if not gcps: + return [] + + reconstructions = ds.load_reconstruction() + reference = ds.load_reference() + + projection = pyproj.Proj(proj4) + t = _get_transformation(reference, projection, (0, 0)) + A, b = t[:3, :3], t[:3, 3] + + result = [] + + for gcp in gcps: + if not gcp.coordinates.has_value: + continue + triangulated = None + + for rec in reconstructions: + triangulated = multiview.triangulate_gcp(gcp, rec.shots, 1.0, 0.1) + if triangulated is None: + continue + else: + break + + if triangulated is None: + continue + + triangulated_topocentric = np.dot(A.T, triangulated) + + coordinates_topocentric = np.array(gcp.coordinates.value) + coordinates = np.dot(A, coordinates_topocentric) + b + triangulated = triangulated + b + + result.append({ + 'id': gcp.id, + 'observations': [obs.shot_id for obs in gcp.observations], + 'triangulated': triangulated, + 'coordinates': coordinates, + 'error': np.abs(triangulated_topocentric - coordinates_topocentric) + }) + + return result + def name(self): return os.path.basename(os.path.abspath(self.path(".."))) diff --git a/opendm/photo.py b/opendm/photo.py index 3f6fc2b0..580c5b52 100644 --- a/opendm/photo.py +++ b/opendm/photo.py @@ -201,6 +201,7 @@ class ODM_Photo: '@Camera:RigCameraIndex', # Parrot Sequoia, Sentera 21244-00_3.2MP-GS-0001 'Camera:RigCameraIndex', # MicaSense Altum ]) + self.set_attr_from_xmp_tag('radiometric_calibration', tags, [ 'MicaSense:RadiometricCalibration', ]) @@ -233,7 +234,8 @@ class ODM_Photo: ], float) self.set_attr_from_xmp_tag('capture_uuid', tags, [ - '@drone-dji:CaptureUUID' + '@drone-dji:CaptureUUID', # DJI + '@Camera:ImageUniqueID', # sentera 6x ]) # Phantom 4 RTK diff --git a/opendm/types.py b/opendm/types.py index 8a673552..568d101d 100644 --- a/opendm/types.py +++ b/opendm/types.py @@ -359,6 +359,13 @@ class ODM_Stage: progressbc.send_update(self.previous_stages_progress() + (self.delta_progress() / 100.0) * float(progress)) + def last_stage(self): + if self.next_stage: + return self.next_stage.last_stage() + else: + return self + + def process(self, args, outputs): raise NotImplementedError diff --git a/stages/odm_app.py b/stages/odm_app.py index f488f380..15b11a07 100644 --- a/stages/odm_app.py +++ b/stages/odm_app.py @@ -18,6 +18,8 @@ from stages.odm_filterpoints import ODMFilterPoints from stages.splitmerge import ODMSplitStage, ODMMergeStage from stages.odm_report import ODMReport +from stages.odm_postprocess import ODMPostProcess + class ODMApp: def __init__(self, args): @@ -61,7 +63,9 @@ class ODMApp: max_concurrency=args.max_concurrency, verbose=args.verbose) orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0) - report = ODMReport('odm_report', args, progress=100.0) + report = ODMReport('odm_report', args, progress=99.0) + postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0) + # Normal pipeline self.first_stage = dataset @@ -82,7 +86,8 @@ class ODMApp: .connect(georeferencing) \ .connect(dem) \ .connect(orthophoto) \ - .connect(report) + .connect(report) \ + .connect(postprocess) def execute(self): try: diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index 99888b82..6e908466 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -1,6 +1,9 @@ import os import struct import pipes +import fiona +import fiona.crs +from collections import OrderedDict from opendm import io from opendm import log @@ -10,12 +13,70 @@ from opendm import context from opendm.cropper import Cropper from opendm import point_cloud from opendm.multispectral import get_primary_band_name +from opendm.osfm import OSFMContext class ODMGeoreferencingStage(types.ODM_Stage): def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] + # Export GCP information if available + + gcp_export_file = tree.path("odm_georeferencing", "ground_control_points.gpkg") + gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml") + + if reconstruction.has_gcp() and (not io.file_exists(gcp_export_file) or self.rerun()): + octx = OSFMContext(tree.opensfm) + gcps = octx.ground_control_points(reconstruction.georef.proj4()) + + if len(gcps): + gcp_schema = { + 'geometry': 'Point', + 'properties': OrderedDict([ + ('id', 'str'), + ('observations_count', 'int'), + ('observations_list', 'str'), + ('triangulated_x', 'float'), + ('triangulated_y', 'float'), + ('triangulated_z', 'float'), + ('error_x', 'float'), + ('error_y', 'float'), + ('error_z', 'float'), + ]) + } + + # Write GeoPackage + with fiona.open(gcp_export_file, 'w', driver="GPKG", + crs=fiona.crs.from_string(reconstruction.georef.proj4()), + schema=gcp_schema) as f: + for gcp in gcps: + f.write({ + 'geometry': { + 'type': 'Point', + 'coordinates': gcp['coordinates'], + }, + 'properties': OrderedDict([ + ('id', gcp['id']), + ('observations_count', len(gcp['observations'])), + ('observations_list', ",".join(gcp['observations'])), + ('triangulated_x', gcp['triangulated'][0]), + ('triangulated_y', gcp['triangulated'][1]), + ('triangulated_z', gcp['triangulated'][2]), + ('error_x', gcp['error'][0]), + ('error_y', gcp['error'][1]), + ('error_z', gcp['error'][2]), + ]) + }) + + # Write GML + try: + system.run('ogr2ogr -of GML "{}" "{}"'.format(gcp_gml_export_file, gcp_export_file)) + except Exception as e: + log.ODM_WARNING("Cannot generate ground control points GML file: %s" % str(e)) + else: + log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file) + + if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun(): cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz)) stages = ["ferry"] @@ -35,6 +96,12 @@ class ODMGeoreferencingStage(types.ODM_Stage): '--writers.las.offset_z=0', '--writers.las.a_srs="%s"' % reconstruction.georef.proj4() ] + + if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file): + log.ODM_INFO("Embedding GCP info in point cloud") + params += [ + '--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM_GCP\\\", \\\"description\\\": \\\"Ground Control Points (GML)\\\"}"' % gcp_gml_export_file + ] system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params)) @@ -68,7 +135,7 @@ class ODMGeoreferencingStage(types.ODM_Stage): else: log.ODM_WARNING('Found a valid georeferenced model in: %s' % tree.odm_georeferencing_model_laz) - + if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud): os.remove(tree.filtered_point_cloud) diff --git a/stages/odm_postprocess.py b/stages/odm_postprocess.py new file mode 100644 index 00000000..703b6586 --- /dev/null +++ b/stages/odm_postprocess.py @@ -0,0 +1,51 @@ +import os + +from osgeo import gdal +from opendm import io +from opendm import log +from opendm import types +from opendm.utils import copy_paths, get_processing_results_paths + +class ODMPostProcess(types.ODM_Stage): + def process(self, args, outputs): + tree = outputs['tree'] + reconstruction = outputs['reconstruction'] + + log.ODM_INFO("Post Processing") + + if not outputs['large']: + # TODO: support for split-merge? + + # Embed GCP info in 2D results via + # XML metadata fields + gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml") + + if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file): + skip_embed_gcp = False + gcp_xml = "" + + with open(gcp_gml_export_file) as f: + gcp_xml = f.read() + + for product in [tree.odm_orthophoto_tif, + tree.path("odm_dem", "dsm.tif"), + tree.path("odm_dem", "dtm.tif")]: + if os.path.isfile(product): + ds = gdal.Open(product) + if ds is not None: + if ds.GetMetadata('xml:GROUND_CONTROL_POINTS') is None or self.rerun(): + ds.SetMetadata(gcp_xml, 'xml:GROUND_CONTROL_POINTS') + ds = None + log.ODM_INFO("Wrote xml:GROUND_CONTROL_POINTS metadata to %s" % product) + else: + skip_embed_gcp = True + log.ODM_WARNING("Already embedded ground control point information") + break + else: + log.ODM_WARNING("Cannot open %s for writing, skipping GCP embedding" % product) + + if args.copy_to: + try: + copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun()) + except Exception as e: + log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e))) diff --git a/stages/odm_report.py b/stages/odm_report.py index 9e07c4f1..552a4664 100644 --- a/stages/odm_report.py +++ b/stages/odm_report.py @@ -14,7 +14,7 @@ from opendm.point_cloud import export_info_json from opendm.cropper import Cropper from opendm.orthophoto import get_orthophoto_vars, get_max_memory, generate_png from opendm.tiles.tiler import generate_colored_hillshade -from opendm.utils import get_raster_stats, copy_paths, get_processing_results_paths +from opendm.utils import get_raster_stats def hms(seconds): h = seconds // 3600 @@ -196,10 +196,3 @@ class ODMReport(types.ODM_Stage): log.ODM_WARNING("Cannot generate overlap diagram, point cloud stats missing") octx.export_report(os.path.join(tree.odm_report, "report.pdf"), odm_stats, self.rerun()) - - # TODO: does this warrant a new stage? - if args.copy_to: - try: - copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun()) - except Exception as e: - log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e))) diff --git a/stages/run_opensfm.py b/stages/run_opensfm.py index 25f32564..7aa8411f 100644 --- a/stages/run_opensfm.py +++ b/stages/run_opensfm.py @@ -40,7 +40,7 @@ class ODMOpenSfMStage(types.ODM_Stage): def cleanup_disk_space(): if args.optimize_disk_space: - for folder in ["features", "matches", "exif", "reports"]: + for folder in ["features", "matches", "reports"]: folder_path = octx.path(folder) if os.path.exists(folder_path): if os.path.islink(folder_path): diff --git a/stages/splitmerge.py b/stages/splitmerge.py index 68b9babd..7d1b7c1f 100644 --- a/stages/splitmerge.py +++ b/stages/splitmerge.py @@ -367,8 +367,9 @@ class ODMMergeStage(types.ODM_Stage): else: log.ODM_WARNING("Found merged shots.geojson in %s" % tree.odm_report) - # Stop the pipeline short! We're done. - self.next_stage = None + # Stop the pipeline short by skipping to the postprocess stage. + # Afterwards, we're done. + self.next_stage = self.last_stage() else: log.ODM_INFO("Normal dataset, nothing to merge.") self.progress = 0.0