kopia lustrzana https://github.com/OpenDroneMap/ODM
WIP:fixing geo coord exporting accuracy
rodzic
6114e5e934
commit
9e1e2496f4
|
@ -25,9 +25,9 @@ check_version(){
|
|||
}
|
||||
|
||||
if [[ $2 =~ ^[0-9]+$ ]] ; then
|
||||
processes=$2
|
||||
processes=2
|
||||
else
|
||||
processes=$(nproc)
|
||||
processes=2
|
||||
fi
|
||||
|
||||
ensure_prereqs() {
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
import numpy as np
|
||||
from numpy import ndarray
|
||||
from typing import Tuple
|
||||
from pyproj import Proj
|
||||
import argparse
|
||||
from opensfm.geo import TopocentricConverter
|
||||
|
||||
def parse_pdal_args(pdal_args: dict) -> argparse.Namespace:
|
||||
def validate_arg(name: str, data_type: type):
|
||||
if name not in pdal_args:
|
||||
raise ValueError(f"PDAL arguments should contain {name}.")
|
||||
try:
|
||||
data_type(pdal_args[name])
|
||||
except ValueError:
|
||||
raise ValueError(f"PDAL argument {name} should be of type {data_type}.")
|
||||
return data_type(pdal_args[name])
|
||||
return argparse.Namespace(
|
||||
reflat=validate_arg('reflat', float),
|
||||
reflon=validate_arg('reflon', float),
|
||||
refalt=validate_arg('refalt', float),
|
||||
x_offset=validate_arg('x_offset', float),
|
||||
y_offset=validate_arg('y_offset', float),
|
||||
a_srs=validate_arg('a_srs', str)
|
||||
)
|
||||
|
||||
def topocentric_to_georef_pdal(ins, outs):
|
||||
args = parse_pdal_args(ins)
|
||||
outs['X'], outs['Y'], outs['Z'] = topocentric_to_georef(args.reflat, args.reflon, args.refalt, args.a_srs, ins['X'], ins['Y'], ins['Z'], args.x_offset, args.y_offset)
|
||||
return True
|
||||
|
||||
def topocentric_to_georef(
|
||||
reflat: float,
|
||||
reflon: float,
|
||||
refalt: float,
|
||||
a_srs: str,
|
||||
x: ndarray,
|
||||
y: ndarray,
|
||||
z: ndarray,
|
||||
x_offset: float = 0,
|
||||
y_offset: float = 0,
|
||||
) -> Tuple[ndarray, ndarray, ndarray]:
|
||||
reference = TopocentricConverter(reflat, reflon, refalt)
|
||||
projection = Proj(a_srs)
|
||||
lat, lon, alt = reference.to_lla(x, y, z)
|
||||
easting, northing = projection(lon, lat)
|
||||
return easting - x_offset, northing - y_offset, alt
|
||||
|
||||
|
||||
|
|
@ -629,9 +629,12 @@ class OSFMContext:
|
|||
|
||||
return result
|
||||
|
||||
def reference(self):
|
||||
ds = DataSet(self.opensfm_project_path)
|
||||
return ds.load_reference_lla()
|
||||
|
||||
def name(self):
|
||||
return os.path.basename(os.path.abspath(self.path("..")))
|
||||
return os.path.basename(os.path.abspath(self.path("..")))
|
||||
|
||||
def get_submodel_argv(args, submodels_path = None, submodel_name = None):
|
||||
"""
|
||||
|
|
|
@ -361,16 +361,20 @@ class ODM_Tree(object):
|
|||
self.openmvs_model = os.path.join(self.openmvs, 'scene_dense_dense_filtered.ply')
|
||||
|
||||
# filter points
|
||||
self.filtered_point_cloud_topo = os.path.join(self.odm_filterpoints, "point_cloud_topocentric.ply")
|
||||
self.filtered_point_cloud = os.path.join(self.odm_filterpoints, "point_cloud.ply")
|
||||
self.filtered_point_cloud_stats = os.path.join(self.odm_filterpoints, "point_cloud_stats.json")
|
||||
|
||||
# odm_meshing
|
||||
self.odm_mesh_topo = os.path.join(self.odm_meshing, 'odm_mesh_topocentric.ply')
|
||||
self.odm_mesh = os.path.join(self.odm_meshing, 'odm_mesh.ply')
|
||||
self.odm_meshing_log = os.path.join(self.odm_meshing, 'odm_meshing_log.txt')
|
||||
self.odm_25dmesh_topo = os.path.join(self.odm_meshing, 'odm_25dmesh_topocentric.ply')
|
||||
self.odm_25dmesh = os.path.join(self.odm_meshing, 'odm_25dmesh.ply')
|
||||
self.odm_25dmeshing_log = os.path.join(self.odm_meshing, 'odm_25dmeshing_log.txt')
|
||||
|
||||
# texturing
|
||||
self.odm_textured_model_obj_topo = os.path.join(self.odm_texturing, 'odm_textured_model_topocentric.obj')
|
||||
self.odm_textured_model_obj = 'odm_textured_model_geo.obj'
|
||||
self.odm_textured_model_glb = 'odm_textured_model_geo.glb'
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ class ODMMvsTexStage(types.ODM_Stage):
|
|||
if not args.skip_3dmodel and (primary or args.use_3dmesh):
|
||||
nonloc.runs += [{
|
||||
'out_dir': os.path.join(tree.odm_texturing, subdir),
|
||||
'model': tree.odm_mesh,
|
||||
'model': tree.odm_mesh_topo,
|
||||
'nadir': False,
|
||||
'primary': primary,
|
||||
'nvm_file': nvm_file,
|
||||
|
@ -43,7 +43,7 @@ class ODMMvsTexStage(types.ODM_Stage):
|
|||
if not args.use_3dmesh:
|
||||
nonloc.runs += [{
|
||||
'out_dir': os.path.join(tree.odm_25dtexturing, subdir),
|
||||
'model': tree.odm_25dmesh,
|
||||
'model': tree.odm_25dmesh_topo,
|
||||
'nadir': True,
|
||||
'primary': primary,
|
||||
'nvm_file': nvm_file,
|
||||
|
@ -69,7 +69,7 @@ class ODMMvsTexStage(types.ODM_Stage):
|
|||
if not io.dir_exists(r['out_dir']):
|
||||
system.mkdir_p(r['out_dir'])
|
||||
|
||||
odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj)
|
||||
odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo)
|
||||
unaligned_obj = io.related_file_path(odm_textured_model_obj, postfix="_unaligned")
|
||||
|
||||
if not io.file_exists(odm_textured_model_obj) or self.rerun():
|
||||
|
@ -147,7 +147,7 @@ class ODMMvsTexStage(types.ODM_Stage):
|
|||
shutil.rmtree(packed_dir)
|
||||
|
||||
try:
|
||||
obj_pack(os.path.join(r['out_dir'], tree.odm_textured_model_obj), packed_dir, _info=log.ODM_INFO)
|
||||
obj_pack(os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo), packed_dir, _info=log.ODM_INFO)
|
||||
|
||||
# Move packed/* into texturing folder
|
||||
system.delete_files(r['out_dir'], (".vec", ))
|
||||
|
|
|
@ -19,7 +19,7 @@ class ODMFilterPoints(types.ODM_Stage):
|
|||
inputPointCloud = ""
|
||||
|
||||
# check if reconstruction was done before
|
||||
if not io.file_exists(tree.filtered_point_cloud) or self.rerun():
|
||||
if not io.file_exists(tree.filtered_point_cloud_topo) or self.rerun():
|
||||
if args.fast_orthophoto:
|
||||
inputPointCloud = os.path.join(tree.opensfm, 'reconstruction.ply')
|
||||
else:
|
||||
|
@ -49,14 +49,14 @@ class ODMFilterPoints(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING("Not a georeferenced reconstruction, will ignore --auto-boundary")
|
||||
|
||||
point_cloud.filter(inputPointCloud, tree.filtered_point_cloud, tree.filtered_point_cloud_stats,
|
||||
point_cloud.filter(inputPointCloud, tree.filtered_point_cloud_topo, tree.filtered_point_cloud_stats,
|
||||
standard_deviation=args.pc_filter,
|
||||
sample_radius=args.pc_sample,
|
||||
boundary=boundary_offset(outputs.get('boundary'), reconstruction.get_proj_offset()),
|
||||
max_concurrency=args.max_concurrency)
|
||||
|
||||
# Quick check
|
||||
info = point_cloud.ply_info(tree.filtered_point_cloud)
|
||||
info = point_cloud.ply_info(tree.filtered_point_cloud_topo)
|
||||
if info["vertex_count"] == 0:
|
||||
extra_msg = ''
|
||||
if 'boundary' in outputs:
|
||||
|
@ -64,7 +64,7 @@ class ODMFilterPoints(types.ODM_Stage):
|
|||
raise system.ExitException("Uh oh! We ended up with an empty point cloud. This means that the reconstruction did not succeed. Have you followed best practices for data acquisition? See https://docs.opendronemap.org/flying/%s" % extra_msg)
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid point cloud file in: %s' %
|
||||
tree.filtered_point_cloud)
|
||||
tree.filtered_point_cloud_topo)
|
||||
|
||||
if args.optimize_disk_space and inputPointCloud:
|
||||
if os.path.isfile(inputPointCloud):
|
||||
|
|
|
@ -8,7 +8,9 @@ import json
|
|||
import zipfile
|
||||
import math
|
||||
from collections import OrderedDict
|
||||
from numpy import rec
|
||||
from pyproj import CRS
|
||||
import pdal
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
|
@ -114,18 +116,83 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file)
|
||||
|
||||
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||
cmd = f'pdal translate -i "{tree.filtered_point_cloud}" -o \"{tree.odm_georeferencing_model_laz}\"'
|
||||
stages = ["ferry"]
|
||||
params = [
|
||||
'--filters.ferry.dimensions="views => UserData"'
|
||||
]
|
||||
if reconstruction.is_georeferenced():
|
||||
# prepare pipeline stage for topocentric to georeferenced conversion
|
||||
octx = OSFMContext(tree.opensfm)
|
||||
reference = octx.reference()
|
||||
pdalargs = {
|
||||
'reflat': reference.lat,
|
||||
'reflon': reference.lon,
|
||||
'refalt': reference.alt,
|
||||
'a_srs': reconstruction.georef.proj4(),
|
||||
'x_offset': reconstruction.georef.utm_east_offset,
|
||||
'y_offset': reconstruction.georef.utm_north_offset
|
||||
}
|
||||
topo_to_georef_def = {
|
||||
"type": "filters.python",
|
||||
"module": "opendm.georef",
|
||||
"function": "topocentric_to_georef_pdal",
|
||||
"pdalargs": json.dumps(pdalargs)
|
||||
}
|
||||
|
||||
if not io.file_exists(tree.filtered_point_cloud) or self.rerun():
|
||||
log.ODM_INFO("Georeferecing filtered point cloud")
|
||||
if reconstruction.is_georeferenced():
|
||||
ply_georeferencing_pipeline = [
|
||||
{
|
||||
"type": "readers.ply",
|
||||
"filename": tree.filtered_point_cloud_topo
|
||||
},
|
||||
topo_to_georef_def,
|
||||
{
|
||||
"type": "writers.ply",
|
||||
"filename": tree.filtered_point_cloud
|
||||
}
|
||||
]
|
||||
pipeline = pdal.Pipeline(json.dumps(ply_georeferencing_pipeline))
|
||||
pipeline.execute()
|
||||
else:
|
||||
shutil.copy(tree.filtered_point_cloud_topo, tree.filtered_point_cloud)
|
||||
|
||||
if not io.file_exists(tree.odm_textured_model_geo_obj) or self.rerun():
|
||||
log.ODM_INFO("Georeferecing textured model")
|
||||
if reconstruction.is_georeferenced():
|
||||
obj_georeferencing_pipeline = [
|
||||
{
|
||||
"type": "readers.obj",
|
||||
"filename": tree.odm_textured_model_obj
|
||||
},
|
||||
topo_to_georef_def,
|
||||
{
|
||||
"type": "writers.obj",
|
||||
"filename": tree.odm_textured_model_geo_obj
|
||||
}
|
||||
]
|
||||
pipeline = pdal.Pipeline(json.dumps(obj_georeferencing_pipeline))
|
||||
pipeline.execute()
|
||||
else:
|
||||
shutil.copy(tree.odm_textured_model_obj, tree.odm_textured_model_geo_obj)
|
||||
|
||||
if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun():
|
||||
las_georeferencing_pipeline = [
|
||||
{
|
||||
"type": "readers.las",
|
||||
"filename": tree.filtered_point_cloud
|
||||
},
|
||||
{
|
||||
"type": "filters.ferry",
|
||||
"dimensions": "views => UserData"
|
||||
}
|
||||
]
|
||||
|
||||
if reconstruction.is_georeferenced():
|
||||
log.ODM_INFO("Georeferencing point cloud")
|
||||
|
||||
stages.append("transformation")
|
||||
utmoffset = reconstruction.georef.utm_offset()
|
||||
las_georeferencing_pipeline.append({
|
||||
"type": "filters.transformation",
|
||||
"matrix": f"1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1"
|
||||
})
|
||||
|
||||
# Establish appropriate las scale for export
|
||||
las_scale = 0.001
|
||||
|
@ -148,27 +215,40 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
else:
|
||||
log.ODM_INFO("No point_cloud_stats.json found. Using default las scale: %s" % las_scale)
|
||||
|
||||
params += [
|
||||
f'--filters.transformation.matrix="1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1"',
|
||||
f'--writers.las.offset_x={reconstruction.georef.utm_east_offset}' ,
|
||||
f'--writers.las.offset_y={reconstruction.georef.utm_north_offset}',
|
||||
f'--writers.las.scale_x={las_scale}',
|
||||
f'--writers.las.scale_y={las_scale}',
|
||||
f'--writers.las.scale_z={las_scale}',
|
||||
'--writers.las.offset_z=0',
|
||||
f'--writers.las.a_srs="{reconstruction.georef.proj4()}"' # HOBU this should maybe be WKT
|
||||
]
|
||||
|
||||
las_writer_def = {
|
||||
"type": "writers.las",
|
||||
"filename": tree.odm_georeferencing_model_laz,
|
||||
"a_srs": reconstruction.georef.proj4(),
|
||||
"offset_x": utmoffset[0],
|
||||
"offset_y": utmoffset[1],
|
||||
"offset_z": 0,
|
||||
"scale_x": las_scale,
|
||||
"scale_y": las_scale,
|
||||
"scale_z": las_scale,
|
||||
}
|
||||
|
||||
if reconstruction.has_gcp() and io.file_exists(gcp_geojson_zip_export_file):
|
||||
if os.path.getsize(gcp_geojson_zip_export_file) <= 65535:
|
||||
log.ODM_INFO("Embedding GCP info in point cloud")
|
||||
params += [
|
||||
'--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM\\\", \\\"record_id\\\": 2, \\\"description\\\": \\\"Ground Control Points (zip)\\\"}"' % gcp_geojson_zip_export_file.replace(os.sep, "/")
|
||||
]
|
||||
las_writer_def["vlrs"] = json.dumps(
|
||||
{
|
||||
"filename": gcp_geojson_zip_export_file.replace(os.sep, "/"),
|
||||
"user_id": "ODM",
|
||||
"record_id": 2,
|
||||
"description": "Ground Control Points (zip)"
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
log.ODM_WARNING("Cannot embed GCP info in point cloud, %s is too large" % gcp_geojson_zip_export_file)
|
||||
|
||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
las_georeferencing_pipeline.append(las_writer_def)
|
||||
|
||||
pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline))
|
||||
pipeline.execute()
|
||||
|
||||
self.update_progress(50)
|
||||
|
||||
|
@ -202,7 +282,12 @@ class ODMGeoreferencingStage(types.ODM_Stage):
|
|||
export_to_bounds_files(outputs['boundary'], reconstruction.get_proj_srs(), bounds_json, bounds_gpkg)
|
||||
else:
|
||||
log.ODM_INFO("Converting point cloud (non-georeferenced)")
|
||||
system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params))
|
||||
las_georeferencing_pipeline.append({
|
||||
"type": "writers.las",
|
||||
"filename": tree.odm_georeferencing_model_laz
|
||||
})
|
||||
pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline))
|
||||
pipeline.execute()
|
||||
|
||||
|
||||
stats_dir = tree.path("opensfm", "stats", "codem")
|
||||
|
|
|
@ -19,11 +19,11 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
|
||||
# Create full 3D model unless --skip-3dmodel is set
|
||||
if not args.skip_3dmodel:
|
||||
if not io.file_exists(tree.odm_mesh) or self.rerun():
|
||||
log.ODM_INFO('Writing ODM Mesh file in: %s' % tree.odm_mesh)
|
||||
if not io.file_exists(tree.odm_mesh_topo) or self.rerun():
|
||||
log.ODM_INFO('Writing ODM Mesh file in: %s' % tree.odm_mesh_topo)
|
||||
|
||||
mesh.screened_poisson_reconstruction(tree.filtered_point_cloud,
|
||||
tree.odm_mesh,
|
||||
mesh.screened_poisson_reconstruction(tree.filtered_point_cloud_topo,
|
||||
tree.odm_mesh_topo,
|
||||
depth=self.params.get('oct_tree'),
|
||||
samples=self.params.get('samples'),
|
||||
maxVertexCount=self.params.get('max_vertex'),
|
||||
|
@ -31,16 +31,16 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
threads=max(1, self.params.get('max_concurrency') - 1)), # poissonrecon can get stuck on some machines if --threads == all cores
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
|
||||
tree.odm_mesh)
|
||||
tree.odm_mesh_topo)
|
||||
|
||||
self.update_progress(50)
|
||||
|
||||
# Always generate a 2.5D mesh
|
||||
# unless --use-3dmesh is set.
|
||||
if not args.use_3dmesh:
|
||||
if not io.file_exists(tree.odm_25dmesh) or self.rerun():
|
||||
if not io.file_exists(tree.odm_25dmesh_topo) or self.rerun():
|
||||
|
||||
log.ODM_INFO('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh)
|
||||
log.ODM_INFO('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh_topo)
|
||||
|
||||
multiplier = math.pi / 2.0
|
||||
radius_steps = commands.get_dem_radius_steps(tree.filtered_point_cloud_stats, 3, args.orthophoto_resolution, multiplier=multiplier)
|
||||
|
@ -51,7 +51,7 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
if args.fast_orthophoto:
|
||||
dsm_resolution *= 8.0
|
||||
|
||||
mesh.create_25dmesh(tree.filtered_point_cloud, tree.odm_25dmesh,
|
||||
mesh.create_25dmesh(tree.filtered_point_cloud_topo, tree.odm_25dmesh_topo,
|
||||
radius_steps,
|
||||
dsm_resolution=dsm_resolution,
|
||||
depth=self.params.get('oct_tree'),
|
||||
|
@ -63,5 +63,5 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
max_tiles=None if reconstruction.has_geotagged_photos() else math.ceil(len(reconstruction.photos) / 2))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid ODM 2.5D Mesh file in: %s' %
|
||||
tree.odm_25dmesh)
|
||||
tree.odm_25dmesh_topo)
|
||||
|
||||
|
|
|
@ -69,13 +69,13 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
|||
self.update_progress(75)
|
||||
|
||||
# We now switch to a geographic CRS
|
||||
if reconstruction.is_georeferenced() and (not io.file_exists(tree.opensfm_topocentric_reconstruction) or self.rerun()):
|
||||
octx.run('export_geocoords --reconstruction --proj "%s" --offset-x %s --offset-y %s' %
|
||||
(reconstruction.georef.proj4(), reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset))
|
||||
shutil.move(tree.opensfm_reconstruction, tree.opensfm_topocentric_reconstruction)
|
||||
shutil.move(tree.opensfm_geocoords_reconstruction, tree.opensfm_reconstruction)
|
||||
else:
|
||||
log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction)
|
||||
# if reconstruction.is_georeferenced() and (not io.file_exists(tree.opensfm_topocentric_reconstruction) or self.rerun()):
|
||||
# octx.run('export_geocoords --reconstruction --proj "%s" --offset-x %s --offset-y %s' %
|
||||
# (reconstruction.georef.proj4(), reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset))
|
||||
# shutil.move(tree.opensfm_reconstruction, tree.opensfm_topocentric_reconstruction)
|
||||
# shutil.move(tree.opensfm_geocoords_reconstruction, tree.opensfm_reconstruction)
|
||||
# else:
|
||||
# log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction)
|
||||
|
||||
self.update_progress(80)
|
||||
|
||||
|
|
Ładowanie…
Reference in New Issue