kopia lustrzana https://github.com/OpenDroneMap/ODM
Started writing GCP export functionality
rodzic
4e9f53e0a2
commit
335802b563
|
@ -4,11 +4,15 @@ OpenSfM related utils
|
||||||
|
|
||||||
import os, shutil, sys, json, argparse
|
import os, shutil, sys, json, argparse
|
||||||
import yaml
|
import yaml
|
||||||
|
import numpy as np
|
||||||
|
import pyproj
|
||||||
|
from pyproj import CRS
|
||||||
from opendm import io
|
from opendm import io
|
||||||
from opendm import log
|
from opendm import log
|
||||||
from opendm import system
|
from opendm import system
|
||||||
from opendm import context
|
from opendm import context
|
||||||
from opendm import camera
|
from opendm import camera
|
||||||
|
from opendm import location
|
||||||
from opendm.utils import get_depthmap_resolution
|
from opendm.utils import get_depthmap_resolution
|
||||||
from opendm.photo import find_largest_photo_dim
|
from opendm.photo import find_largest_photo_dim
|
||||||
from opensfm.large import metadataset
|
from opensfm.large import metadataset
|
||||||
|
@ -18,6 +22,8 @@ from opensfm.dataset import DataSet
|
||||||
from opensfm import report
|
from opensfm import report
|
||||||
from opendm.multispectral import get_photos_by_band
|
from opendm.multispectral import get_photos_by_band
|
||||||
from opendm.gpu import has_gpus
|
from opendm.gpu import has_gpus
|
||||||
|
from opensfm import multiview
|
||||||
|
from opensfm.actions.export_geocoords import _get_transformation
|
||||||
|
|
||||||
class OSFMContext:
|
class OSFMContext:
|
||||||
def __init__(self, opensfm_project_path):
|
def __init__(self, opensfm_project_path):
|
||||||
|
@ -255,6 +261,10 @@ class OSFMContext:
|
||||||
config_filename = self.get_config_file_path()
|
config_filename = self.get_config_file_path()
|
||||||
with open(config_filename, 'w') as fout:
|
with open(config_filename, 'w') as fout:
|
||||||
fout.write("\n".join(config))
|
fout.write("\n".join(config))
|
||||||
|
|
||||||
|
# We impose our own reference_lla
|
||||||
|
if reconstruction.is_georeferenced():
|
||||||
|
self.write_reference_lla(reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset, reconstruction.georef.proj4())
|
||||||
else:
|
else:
|
||||||
log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path)
|
log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path)
|
||||||
|
|
||||||
|
@ -428,6 +438,71 @@ class OSFMContext:
|
||||||
log.ODM_WARNING("Report could not be generated")
|
log.ODM_WARNING("Report could not be generated")
|
||||||
else:
|
else:
|
||||||
log.ODM_WARNING("Report %s already exported" % report_path)
|
log.ODM_WARNING("Report %s already exported" % report_path)
|
||||||
|
|
||||||
|
def write_reference_lla(self, offset_x, offset_y, proj4):
|
||||||
|
reference_lla = self.path("reference_lla.json")
|
||||||
|
|
||||||
|
longlat = CRS.from_epsg("4326")
|
||||||
|
lon, lat = location.transform2(CRS.from_proj4(proj4), longlat, offset_x, offset_y)
|
||||||
|
|
||||||
|
with open(reference_lla, 'w') as f:
|
||||||
|
f.write(json.dumps({
|
||||||
|
'latitude': lat,
|
||||||
|
'longitude': lon,
|
||||||
|
'altitude': 0.0
|
||||||
|
}, indent=4))
|
||||||
|
|
||||||
|
log.ODM_INFO("Wrote reference_lla.json")
|
||||||
|
|
||||||
|
def ground_control_points(self, proj4):
|
||||||
|
"""
|
||||||
|
Load ground control point information.
|
||||||
|
Make sure this function is called *after* update_reference_lla()
|
||||||
|
has been called.
|
||||||
|
"""
|
||||||
|
ds = DataSet(self.opensfm_project_path)
|
||||||
|
gcps = ds.load_ground_control_points()
|
||||||
|
if not gcps:
|
||||||
|
return []
|
||||||
|
|
||||||
|
reconstructions = ds.load_reconstruction()
|
||||||
|
reference = ds.load_reference()
|
||||||
|
|
||||||
|
projection = pyproj.Proj(proj4)
|
||||||
|
t = _get_transformation(reference, projection, (0, 0))
|
||||||
|
A, b = t[:3, :3], t[:3, 3]
|
||||||
|
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for gcp in gcps:
|
||||||
|
if not gcp.coordinates.has_value:
|
||||||
|
continue
|
||||||
|
triangulated = None
|
||||||
|
|
||||||
|
for rec in reconstructions:
|
||||||
|
triangulated = multiview.triangulate_gcp(gcp, rec.shots, 1.0, 0.1)
|
||||||
|
if triangulated is None:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if triangulated is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
coordinates = np.array(gcp.coordinates.value)
|
||||||
|
coordinates = np.dot(A, coordinates) + b
|
||||||
|
triangulated = triangulated + b
|
||||||
|
|
||||||
|
result.append({
|
||||||
|
'id': gcp.id,
|
||||||
|
'observations': [obs.shot_id for obs in gcp.observations],
|
||||||
|
'triangulated': triangulated,
|
||||||
|
'coordinates': coordinates,
|
||||||
|
'error': np.abs(triangulated - coordinates)
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def name(self):
|
def name(self):
|
||||||
return os.path.basename(os.path.abspath(self.path("..")))
|
return os.path.basename(os.path.abspath(self.path("..")))
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
import os
|
import os
|
||||||
import struct
|
import struct
|
||||||
import pipes
|
import pipes
|
||||||
|
import fiona
|
||||||
|
import fiona.crs
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
from opendm import io
|
from opendm import io
|
||||||
from opendm import log
|
from opendm import log
|
||||||
|
@ -9,6 +12,7 @@ from opendm import system
|
||||||
from opendm import context
|
from opendm import context
|
||||||
from opendm.cropper import Cropper
|
from opendm.cropper import Cropper
|
||||||
from opendm import point_cloud
|
from opendm import point_cloud
|
||||||
|
from opendm.osfm import OSFMContext
|
||||||
from opendm.multispectral import get_primary_band_name
|
from opendm.multispectral import get_primary_band_name
|
||||||
|
|
||||||
class ODMGeoreferencingStage(types.ODM_Stage):
|
class ODMGeoreferencingStage(types.ODM_Stage):
|
||||||
|
@ -16,58 +20,105 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
||||||
tree = outputs['tree']
|
tree = outputs['tree']
|
||||||
reconstruction = outputs['reconstruction']
|
reconstruction = outputs['reconstruction']
|
||||||
|
|
||||||
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
# if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||||
cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
|
# cmd = ('pdal translate -i "%s" -o \"%s\"' % (tree.filtered_point_cloud, tree.odm_georeferencing_model_laz))
|
||||||
stages = ["ferry"]
|
# stages = ["ferry"]
|
||||||
params = [
|
# params = [
|
||||||
'--filters.ferry.dimensions="views => UserData"',
|
# '--filters.ferry.dimensions="views => UserData"',
|
||||||
'--writers.las.compression="lazip"',
|
# '--writers.las.compression="lazip"',
|
||||||
]
|
# ]
|
||||||
|
|
||||||
if reconstruction.is_georeferenced():
|
# if reconstruction.is_georeferenced():
|
||||||
log.ODM_INFO("Georeferencing point cloud")
|
# log.ODM_INFO("Georeferencing point cloud")
|
||||||
|
|
||||||
stages.append("transformation")
|
# stages.append("transformation")
|
||||||
params += [
|
# params += [
|
||||||
'--filters.transformation.matrix="1 0 0 %s 0 1 0 %s 0 0 1 0 0 0 0 1"' % reconstruction.georef.utm_offset(),
|
# '--filters.transformation.matrix="1 0 0 %s 0 1 0 %s 0 0 1 0 0 0 0 1"' % reconstruction.georef.utm_offset(),
|
||||||
'--writers.las.offset_x=%s' % reconstruction.georef.utm_east_offset,
|
# '--writers.las.offset_x=%s' % reconstruction.georef.utm_east_offset,
|
||||||
'--writers.las.offset_y=%s' % reconstruction.georef.utm_north_offset,
|
# '--writers.las.offset_y=%s' % reconstruction.georef.utm_north_offset,
|
||||||
'--writers.las.offset_z=0',
|
# '--writers.las.offset_z=0',
|
||||||
'--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
|
# '--writers.las.a_srs="%s"' % reconstruction.georef.proj4()
|
||||||
]
|
# ]
|
||||||
|
|
||||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
# system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||||
|
|
||||||
self.update_progress(50)
|
# self.update_progress(50)
|
||||||
|
|
||||||
if args.crop > 0:
|
# if args.crop > 0:
|
||||||
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
|
# log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
|
||||||
cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
|
# cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
|
||||||
|
|
||||||
if args.fast_orthophoto:
|
# if args.fast_orthophoto:
|
||||||
decimation_step = 10
|
# decimation_step = 10
|
||||||
else:
|
# else:
|
||||||
decimation_step = 40
|
# decimation_step = 40
|
||||||
|
|
||||||
# More aggressive decimation for large datasets
|
# # More aggressive decimation for large datasets
|
||||||
if not args.fast_orthophoto:
|
# if not args.fast_orthophoto:
|
||||||
decimation_step *= int(len(reconstruction.photos) / 1000) + 1
|
# decimation_step *= int(len(reconstruction.photos) / 1000) + 1
|
||||||
decimation_step = min(decimation_step, 95)
|
# decimation_step = min(decimation_step, 95)
|
||||||
|
|
||||||
try:
|
# try:
|
||||||
cropper.create_bounds_gpkg(tree.odm_georeferencing_model_laz, args.crop,
|
# cropper.create_bounds_gpkg(tree.odm_georeferencing_model_laz, args.crop,
|
||||||
decimation_step=decimation_step)
|
# decimation_step=decimation_step)
|
||||||
except:
|
# except:
|
||||||
log.ODM_WARNING("Cannot calculate crop bounds! We will skip cropping")
|
# log.ODM_WARNING("Cannot calculate crop bounds! We will skip cropping")
|
||||||
args.crop = 0
|
# args.crop = 0
|
||||||
else:
|
# else:
|
||||||
log.ODM_INFO("Converting point cloud (non-georeferenced)")
|
# log.ODM_INFO("Converting point cloud (non-georeferenced)")
|
||||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
# system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||||
|
|
||||||
point_cloud.post_point_cloud_steps(args, tree, self.rerun())
|
# point_cloud.post_point_cloud_steps(args, tree, self.rerun())
|
||||||
else:
|
# else:
|
||||||
log.ODM_WARNING('Found a valid georeferenced model in: %s'
|
# log.ODM_WARNING('Found a valid georeferenced model in: %s'
|
||||||
% tree.odm_georeferencing_model_laz)
|
# % tree.odm_georeferencing_model_laz)
|
||||||
|
|
||||||
|
# Export GCP information if available
|
||||||
|
gcp_export_file = tree.path("odm_georeferencing", "ground_control_points.gpkg")
|
||||||
|
if (reconstruction.has_gcp() and not io.file_exists(gcp_export_file)) or self.rerun():
|
||||||
|
octx = OSFMContext(tree.opensfm)
|
||||||
|
gcps = octx.ground_control_points(reconstruction.georef.proj4())
|
||||||
|
|
||||||
|
if len(gcps):
|
||||||
|
gcp_schema = {
|
||||||
|
'geometry': 'Point',
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('id', 'str'),
|
||||||
|
('observations_count', 'int'),
|
||||||
|
('observations_list', 'str'),
|
||||||
|
('triangulated_x', 'float'),
|
||||||
|
('triangulated_y', 'float'),
|
||||||
|
('triangulated_z', 'float'),
|
||||||
|
('error_x', 'float'),
|
||||||
|
('error_y', 'float'),
|
||||||
|
('error_z', 'float'),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write GeoPackage
|
||||||
|
with fiona.open(gcp_export_file, 'w', driver="GPKG",
|
||||||
|
crs=fiona.crs.from_string(reconstruction.georef.proj4()),
|
||||||
|
schema=gcp_schema) as f:
|
||||||
|
for gcp in gcps:
|
||||||
|
f.write({
|
||||||
|
'geometry': {
|
||||||
|
'type': 'Point',
|
||||||
|
'coordinates': gcp['coordinates'],
|
||||||
|
},
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('id', gcp['id']),
|
||||||
|
('observations_count', len(gcp['observations'])),
|
||||||
|
('observations_list', ",".join(gcp['observations'])),
|
||||||
|
('triangulated_x', gcp['triangulated'][0]),
|
||||||
|
('triangulated_y', gcp['triangulated'][1]),
|
||||||
|
('triangulated_z', gcp['triangulated'][2]),
|
||||||
|
('error_x', gcp['error'][0]),
|
||||||
|
('error_y', gcp['error'][1]),
|
||||||
|
('error_z', gcp['error'][2]),
|
||||||
|
])
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file)
|
||||||
|
|
||||||
if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud):
|
if args.optimize_disk_space and io.file_exists(tree.odm_georeferencing_model_laz) and io.file_exists(tree.filtered_point_cloud):
|
||||||
os.remove(tree.filtered_point_cloud)
|
os.remove(tree.filtered_point_cloud)
|
||||||
|
|
|
@ -55,18 +55,6 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
||||||
cleanup_disk_space()
|
cleanup_disk_space()
|
||||||
return
|
return
|
||||||
|
|
||||||
# Stats are computed in the local CRS (before geoprojection)
|
|
||||||
if not args.skip_report:
|
|
||||||
|
|
||||||
# TODO: this will fail to compute proper statistics if
|
|
||||||
# the pipeline is run with --skip-report and is subsequently
|
|
||||||
# rerun without --skip-report a --rerun-* parameter (due to the reconstruction.json file)
|
|
||||||
# being replaced below. It's an isolated use case.
|
|
||||||
|
|
||||||
octx.export_stats(self.rerun())
|
|
||||||
|
|
||||||
self.update_progress(75)
|
|
||||||
|
|
||||||
# We now switch to a geographic CRS
|
# We now switch to a geographic CRS
|
||||||
geocoords_flag_file = octx.path("exported_geocoords.txt")
|
geocoords_flag_file = octx.path("exported_geocoords.txt")
|
||||||
|
|
||||||
|
@ -79,6 +67,12 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
||||||
else:
|
else:
|
||||||
log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction)
|
log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction)
|
||||||
|
|
||||||
|
self.update_progress(75)
|
||||||
|
|
||||||
|
# Stats are computed in the geographic CRS
|
||||||
|
if not args.skip_report:
|
||||||
|
octx.export_stats(self.rerun())
|
||||||
|
|
||||||
self.update_progress(80)
|
self.update_progress(80)
|
||||||
|
|
||||||
updated_config_flag_file = octx.path('updated_config.txt')
|
updated_config_flag_file = octx.path('updated_config.txt')
|
||||||
|
|
Ładowanie…
Reference in New Issue