kopia lustrzana https://github.com/OpenDroneMap/ODM
Merge branch 'master' of https://github.com/OpenDroneMap/ODM into m1
commit
ea12d56880
|
@ -85,4 +85,8 @@ jobs:
|
|||
run: |
|
||||
echo "Docker image digest: ${{ steps.docker_build.outputs.digest }}"
|
||||
echo "WSL AMD64 rootfs URL: ${{ steps.upload-amd64-wsl-rootfs.browser_download_url }}"
|
||||
|
||||
# Trigger NodeODM build
|
||||
- name: Dispatch NodeODM Build Event
|
||||
id: nodeodm_dispatch
|
||||
run: |
|
||||
curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker.yaml/dispatches --data '{"ref": "master"}'
|
|
@ -34,3 +34,8 @@ jobs:
|
|||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: opendronemap/odm:gpu
|
||||
# Trigger NodeODM build
|
||||
- name: Dispatch NodeODM Build Event
|
||||
id: nodeodm_dispatch
|
||||
run: |
|
||||
curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker-gpu.yaml/dispatches --data '{"ref": "master"}'
|
|
@ -19,7 +19,7 @@ ExternalProject_Add(${_proj_name}
|
|||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/OpenSfM/
|
||||
GIT_TAG 257
|
||||
GIT_TAG 261
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND git submodule update --init --recursive
|
||||
#--Configure step-------------
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.5.7
|
||||
2.6.1
|
||||
|
|
|
@ -11,7 +11,7 @@ import sys
|
|||
# parse arguments
|
||||
processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints',
|
||||
'odm_meshing', 'mvs_texturing', 'odm_georeferencing',
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report']
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess']
|
||||
|
||||
with open(os.path.join(context.root_path, 'VERSION')) as version_file:
|
||||
__version__ = version_file.read().strip()
|
||||
|
@ -95,7 +95,7 @@ def config(argv=None, parser=None):
|
|||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='odm_report',
|
||||
default='odm_postprocess',
|
||||
choices=processopts,
|
||||
help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s')
|
||||
|
||||
|
|
|
@ -4,11 +4,15 @@ OpenSfM related utils
|
|||
|
||||
import os, shutil, sys, json, argparse
|
||||
import yaml
|
||||
import numpy as np
|
||||
import pyproj
|
||||
from pyproj import CRS
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
from opendm import camera
|
||||
from opendm import location
|
||||
from opendm.utils import get_depthmap_resolution
|
||||
from opendm.photo import find_largest_photo_dim
|
||||
from opensfm.large import metadataset
|
||||
|
@ -18,6 +22,8 @@ from opensfm.dataset import DataSet
|
|||
from opensfm import report
|
||||
from opendm.multispectral import get_photos_by_band
|
||||
from opendm.gpu import has_gpus
|
||||
from opensfm import multiview
|
||||
from opensfm.actions.export_geocoords import _get_transformation
|
||||
|
||||
class OSFMContext:
|
||||
def __init__(self, opensfm_project_path):
|
||||
|
@ -255,6 +261,10 @@ class OSFMContext:
|
|||
config_filename = self.get_config_file_path()
|
||||
with open(config_filename, 'w') as fout:
|
||||
fout.write("\n".join(config))
|
||||
|
||||
# We impose our own reference_lla
|
||||
if reconstruction.is_georeferenced():
|
||||
self.write_reference_lla(reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset, reconstruction.georef.proj4())
|
||||
else:
|
||||
log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path)
|
||||
|
||||
|
@ -428,6 +438,71 @@ class OSFMContext:
|
|||
log.ODM_WARNING("Report could not be generated")
|
||||
else:
|
||||
log.ODM_WARNING("Report %s already exported" % report_path)
|
||||
|
||||
def write_reference_lla(self, offset_x, offset_y, proj4):
|
||||
reference_lla = self.path("reference_lla.json")
|
||||
|
||||
longlat = CRS.from_epsg("4326")
|
||||
lon, lat = location.transform2(CRS.from_proj4(proj4), longlat, offset_x, offset_y)
|
||||
|
||||
with open(reference_lla, 'w') as f:
|
||||
f.write(json.dumps({
|
||||
'latitude': lat,
|
||||
'longitude': lon,
|
||||
'altitude': 0.0
|
||||
}, indent=4))
|
||||
|
||||
log.ODM_INFO("Wrote reference_lla.json")
|
||||
|
||||
def ground_control_points(self, proj4):
|
||||
"""
|
||||
Load ground control point information.
|
||||
"""
|
||||
ds = DataSet(self.opensfm_project_path)
|
||||
gcps = ds.load_ground_control_points()
|
||||
if not gcps:
|
||||
return []
|
||||
|
||||
reconstructions = ds.load_reconstruction()
|
||||
reference = ds.load_reference()
|
||||
|
||||
projection = pyproj.Proj(proj4)
|
||||
t = _get_transformation(reference, projection, (0, 0))
|
||||
A, b = t[:3, :3], t[:3, 3]
|
||||
|
||||
result = []
|
||||
|
||||
for gcp in gcps:
|
||||
if not gcp.coordinates.has_value:
|
||||
continue
|
||||
triangulated = None
|
||||
|
||||
for rec in reconstructions:
|
||||
triangulated = multiview.triangulate_gcp(gcp, rec.shots, 1.0, 0.1)
|
||||
if triangulated is None:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if triangulated is None:
|
||||
continue
|
||||
|
||||
triangulated_topocentric = np.dot(A.T, triangulated)
|
||||
|
||||
coordinates_topocentric = np.array(gcp.coordinates.value)
|
||||
coordinates = np.dot(A, coordinates_topocentric) + b
|
||||
triangulated = triangulated + b
|
||||
|
||||
result.append({
|
||||
'id': gcp.id,
|
||||
'observations': [obs.shot_id for obs in gcp.observations],
|
||||
'triangulated': triangulated,
|
||||
'coordinates': coordinates,
|
||||
'error': np.abs(triangulated_topocentric - coordinates_topocentric)
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def name(self):
|
||||
return os.path.basename(os.path.abspath(self.path("..")))
|
||||
|
|
|
@ -201,6 +201,7 @@ class ODM_Photo:
|
|||
'@Camera:RigCameraIndex', # Parrot Sequoia, Sentera 21244-00_3.2MP-GS-0001
|
||||
'Camera:RigCameraIndex', # MicaSense Altum
|
||||
])
|
||||
|
||||
self.set_attr_from_xmp_tag('radiometric_calibration', tags, [
|
||||
'MicaSense:RadiometricCalibration',
|
||||
])
|
||||
|
@ -233,7 +234,8 @@ class ODM_Photo:
|
|||
], float)
|
||||
|
||||
self.set_attr_from_xmp_tag('capture_uuid', tags, [
|
||||
'@drone-dji:CaptureUUID'
|
||||
'@drone-dji:CaptureUUID', # DJI
|
||||
'@Camera:ImageUniqueID', # sentera 6x
|
||||
])
|
||||
|
||||
# Phantom 4 RTK
|
||||
|
|
|
@ -359,6 +359,13 @@ class ODM_Stage:
|
|||
progressbc.send_update(self.previous_stages_progress() +
|
||||
(self.delta_progress() / 100.0) * float(progress))
|
||||
|
||||
def last_stage(self):
|
||||
if self.next_stage:
|
||||
return self.next_stage.last_stage()
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
def process(self, args, outputs):
|
||||
raise NotImplementedError
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@ from stages.odm_filterpoints import ODMFilterPoints
|
|||
from stages.splitmerge import ODMSplitStage, ODMMergeStage
|
||||
|
||||
from stages.odm_report import ODMReport
|
||||
from stages.odm_postprocess import ODMPostProcess
|
||||
|
||||
|
||||
class ODMApp:
|
||||
def __init__(self, args):
|
||||
|
@ -61,7 +63,9 @@ class ODMApp:
|
|||
max_concurrency=args.max_concurrency,
|
||||
verbose=args.verbose)
|
||||
orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0)
|
||||
report = ODMReport('odm_report', args, progress=100.0)
|
||||
report = ODMReport('odm_report', args, progress=99.0)
|
||||
postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0)
|
||||
|
||||
|
||||
# Normal pipeline
|
||||
self.first_stage = dataset
|
||||
|
@ -82,7 +86,8 @@ class ODMApp:
|
|||
.connect(georeferencing) \
|
||||
.connect(dem) \
|
||||
.connect(orthophoto) \
|
||||
.connect(report)
|
||||
.connect(report) \
|
||||
.connect(postprocess)
|
||||
|
||||
def execute(self):
|
||||
try:
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import os
|
||||
import struct
|
||||
import pipes
|
||||
import fiona
|
||||
import fiona.crs
|
||||
from collections import OrderedDict
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
|
@ -10,12 +13,70 @@ from opendm import context
|
|||
from opendm.cropper import Cropper
|
||||
from opendm import point_cloud
|
||||
from opendm.multispectral import get_primary_band_name
|
||||
from opendm.osfm import OSFMContext
|
||||
|
||||
class ODMGeoreferencingStage(types.ODM_Stage):
|
||||
def process(self, args, outputs):
|
||||
tree = outputs['tree']
|
||||
reconstruction = outputs['reconstruction']
|
||||
|
||||
# Export GCP information if available
|
||||
|
||||
gcp_export_file = tree.path("odm_georeferencing", "ground_control_points.gpkg")
|
||||
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
|
||||
|
||||
if reconstruction.has_gcp() and (not io.file_exists(gcp_export_file) or self.rerun()):
|
||||
octx = OSFMContext(tree.opensfm)
|
||||
gcps = octx.ground_control_points(reconstruction.georef.proj4())
|
||||
|
||||
if len(gcps):
|
||||
gcp_schema = {
|
||||
'geometry': 'Point',
|
||||
'properties': OrderedDict([
|
||||
('id', 'str'),
|
||||
('observations_count', 'int'),
|
||||
('observations_list', 'str'),
|
||||
('triangulated_x', 'float'),
|
||||
('triangulated_y', 'float'),
|
||||
('triangulated_z', 'float'),
|
||||
('error_x', 'float'),
|
||||
('error_y', 'float'),
|
||||
('error_z', 'float'),
|
||||
])
|
||||
}
|
||||
|
||||
# Write GeoPackage
|
||||
with fiona.open(gcp_export_file, 'w', driver="GPKG",
|
||||
crs=fiona.crs.from_string(reconstruction.georef.proj4()),
|
||||
schema=gcp_schema) as f:
|
||||
for gcp in gcps:
|
||||
f.write({
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': gcp['coordinates'],
|
||||
},
|
||||
'properties': OrderedDict([
|
||||
('id', gcp['id']),
|
||||
('observations_count', len(gcp['observations'])),
|
||||
('observations_list', ",".join(gcp['observations'])),
|
||||
('triangulated_x', gcp['triangulated'][0]),
|
||||
('triangulated_y', gcp['triangulated'][1]),
|
||||
('triangulated_z', gcp['triangulated'][2]),
|
||||
('error_x', gcp['error'][0]),
|
||||
('error_y', gcp['error'][1]),
|
||||
('error_z', gcp['error'][2]),
|
||||
])
|
||||
})
|
||||
|
||||
# Write GML
|
||||
try:
|
||||
system.run('ogr2ogr -of GML "{}" "{}"'.format(gcp_gml_export_file, gcp_export_file))
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot generate ground control points GML file: %s" % str(e))
|
||||
else:
|
||||
log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file)
|
||||
|
||||
|
||||
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||
cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
|
||||
stages = ["ferry"]
|
||||
|
@ -35,6 +96,12 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
'--writers.las.offset_z=0',
|
||||
'--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
|
||||
]
|
||||
|
||||
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
|
||||
log.ODM_INFO("Embedding GCP info in point cloud")
|
||||
params += [
|
||||
'--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM_GCP\\\", \\\"description\\\": \\\"Ground Control Points (GML)\\\"}"' % gcp_gml_export_file
|
||||
]
|
||||
|
||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
|
||||
|
@ -68,7 +135,7 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING('Found a valid georeferenced model in: %s'
|
||||
% tree.odm_georeferencing_model_laz)
|
||||
|
||||
|
||||
if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud):
|
||||
os.remove(tree.filtered_point_cloud)
|
||||
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
import os
|
||||
|
||||
from osgeo import gdal
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import types
|
||||
from opendm.utils import copy_paths, get_processing_results_paths
|
||||
|
||||
class ODMPostProcess(types.ODM_Stage):
|
||||
def process(self, args, outputs):
|
||||
tree = outputs['tree']
|
||||
reconstruction = outputs['reconstruction']
|
||||
|
||||
log.ODM_INFO("Post Processing")
|
||||
|
||||
if not outputs['large']:
|
||||
# TODO: support for split-merge?
|
||||
|
||||
# Embed GCP info in 2D results via
|
||||
# XML metadata fields
|
||||
gcp_gml_export_file = tree.path("odm_georeferencing", "ground_control_points.gml")
|
||||
|
||||
if reconstruction.has_gcp() and io.file_exists(gcp_gml_export_file):
|
||||
skip_embed_gcp = False
|
||||
gcp_xml = ""
|
||||
|
||||
with open(gcp_gml_export_file) as f:
|
||||
gcp_xml = f.read()
|
||||
|
||||
for product in [tree.odm_orthophoto_tif,
|
||||
tree.path("odm_dem", "dsm.tif"),
|
||||
tree.path("odm_dem", "dtm.tif")]:
|
||||
if os.path.isfile(product):
|
||||
ds = gdal.Open(product)
|
||||
if ds is not None:
|
||||
if ds.GetMetadata('xml:GROUND_CONTROL_POINTS') is None or self.rerun():
|
||||
ds.SetMetadata(gcp_xml, 'xml:GROUND_CONTROL_POINTS')
|
||||
ds = None
|
||||
log.ODM_INFO("Wrote xml:GROUND_CONTROL_POINTS metadata to %s" % product)
|
||||
else:
|
||||
skip_embed_gcp = True
|
||||
log.ODM_WARNING("Already embedded ground control point information")
|
||||
break
|
||||
else:
|
||||
log.ODM_WARNING("Cannot open %s for writing, skipping GCP embedding" % product)
|
||||
|
||||
if args.copy_to:
|
||||
try:
|
||||
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))
|
|
@ -14,7 +14,7 @@ from opendm.point_cloud import export_info_json
|
|||
from opendm.cropper import Cropper
|
||||
from opendm.orthophoto import get_orthophoto_vars, get_max_memory, generate_png
|
||||
from opendm.tiles.tiler import generate_colored_hillshade
|
||||
from opendm.utils import get_raster_stats, copy_paths, get_processing_results_paths
|
||||
from opendm.utils import get_raster_stats
|
||||
|
||||
def hms(seconds):
|
||||
h = seconds // 3600
|
||||
|
@ -196,10 +196,3 @@ class ODMReport(types.ODM_Stage):
|
|||
log.ODM_WARNING("Cannot generate overlap diagram, point cloud stats missing")
|
||||
|
||||
octx.export_report(os.path.join(tree.odm_report, "report.pdf"), odm_stats, self.rerun())
|
||||
|
||||
# TODO: does this warrant a new stage?
|
||||
if args.copy_to:
|
||||
try:
|
||||
copy_paths([os.path.join(args.project_path, p) for p in get_processing_results_paths()], args.copy_to, self.rerun())
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot copy to %s: %s" % (args.copy_to, str(e)))
|
||||
|
|
|
@ -40,7 +40,7 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
|||
|
||||
def cleanup_disk_space():
|
||||
if args.optimize_disk_space:
|
||||
for folder in ["features", "matches", "exif", "reports"]:
|
||||
for folder in ["features", "matches", "reports"]:
|
||||
folder_path = octx.path(folder)
|
||||
if os.path.exists(folder_path):
|
||||
if os.path.islink(folder_path):
|
||||
|
|
|
@ -367,8 +367,9 @@ class ODMMergeStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING("Found merged shots.geojson in %s" % tree.odm_report)
|
||||
|
||||
# Stop the pipeline short! We're done.
|
||||
self.next_stage = None
|
||||
# Stop the pipeline short by skipping to the postprocess stage.
|
||||
# Afterwards, we're done.
|
||||
self.next_stage = self.last_stage()
|
||||
else:
|
||||
log.ODM_INFO("Normal dataset, nothing to merge.")
|
||||
self.progress = 0.0
|
||||
|
|
Ładowanie…
Reference in New Issue