2019-04-25 00:01:44 +00:00
import os
import shutil
2019-04-23 17:59:54 +00:00
from opendm import log
2019-04-28 16:20:03 +00:00
from opendm . osfm import OSFMContext , get_submodel_argv , get_submodel_paths , get_all_submodel_paths
2019-04-23 18:45:47 +00:00
from opendm import types
2019-04-23 19:38:06 +00:00
from opendm import io
2019-04-24 18:28:44 +00:00
from opendm import system
2019-04-28 16:20:03 +00:00
from opendm import orthophoto
2024-10-14 15:06:10 +00:00
from opendm . dem import utils
2019-04-29 18:01:55 +00:00
from opendm . dem . merge import euclidean_merge_dems
2019-04-23 22:01:14 +00:00
from opensfm . large import metadataset
2019-04-28 16:20:03 +00:00
from opendm . cropper import Cropper
2019-05-08 20:19:05 +00:00
from opendm . remote import LocalRemoteExecutor
2024-09-13 11:26:43 +00:00
from opendm . shots import merge_geojson_shots , merge_cameras
2019-10-29 18:25:12 +00:00
from opendm import point_cloud
2021-05-04 18:46:55 +00:00
from opendm . utils import double_quote
2020-09-17 15:28:03 +00:00
from opendm . tiles . tiler import generate_dem_tiles
2021-06-04 19:35:56 +00:00
from opendm . cogeo import convert_to_cogeo
2023-01-09 19:05:38 +00:00
from opendm import multispectral
2020-04-27 10:31:31 +00:00
2019-04-23 17:59:54 +00:00
class ODMSplitStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
photos = reconstruction . photos
2022-08-05 15:16:41 +00:00
outputs [ ' large ' ] = False
2022-08-04 19:06:20 +00:00
2024-07-22 13:16:15 +00:00
image_groups_file = os . path . join ( args . project_path , " image_groups.txt " )
if ' split_image_groups_is_set ' in args :
image_groups_file = os . path . abspath ( args . split_image_groups )
2019-04-23 18:45:47 +00:00
2024-07-22 13:16:15 +00:00
if io . file_exists ( image_groups_file ) :
outputs [ ' large ' ] = True
elif len ( photos ) > args . split :
# check for availability of geotagged photos
if reconstruction . has_geotagged_photos ( ) :
2022-08-05 15:16:41 +00:00
outputs [ ' large ' ] = True
else :
log . ODM_WARNING ( ' Could not perform split-merge as GPS information in photos or image_groups.txt is missing. ' )
2022-08-05 15:10:20 +00:00
if outputs [ ' large ' ] :
2019-05-06 15:35:23 +00:00
# If we have a cluster address, we'll use a distributed workflow
local_workflow = not bool ( args . sm_cluster )
2019-04-25 15:40:45 +00:00
octx = OSFMContext ( tree . opensfm )
split_done_file = octx . path ( " split_done.txt " )
2019-04-24 22:33:12 +00:00
if not io . file_exists ( split_done_file ) or self . rerun ( ) :
2019-05-10 15:01:42 +00:00
orig_max_concurrency = args . max_concurrency
2019-05-10 12:03:21 +00:00
if not local_workflow :
args . max_concurrency = max ( 1 , args . max_concurrency - 1 )
log . ODM_INFO ( " Setting max-concurrency to %s to better handle remote splits " % args . max_concurrency )
2019-04-24 22:33:12 +00:00
log . ODM_INFO ( " Large dataset detected ( %s photos) and split set at %s . Preparing split merge. " % ( len ( photos ) , args . split ) )
2023-01-09 20:57:26 +00:00
multiplier = ( 1.0 / len ( reconstruction . multi_camera ) ) if reconstruction . multi_camera else 1.0
2019-04-24 22:33:12 +00:00
config = [
2021-05-11 19:04:28 +00:00
" submodels_relpath: " + os . path . join ( " .. " , " submodels " , " opensfm " ) ,
" submodel_relpath_template: " + os . path . join ( " .. " , " submodels " , " submodel_ %04d " , " opensfm " ) ,
" submodel_images_relpath_template: " + os . path . join ( " .. " , " submodels " , " submodel_ %04d " , " images " ) ,
2023-01-09 20:57:26 +00:00
" submodel_size: %s " % max ( 2 , int ( float ( args . split ) * multiplier ) ) ,
2019-04-24 22:33:12 +00:00
" submodel_overlap: %s " % args . split_overlap ,
]
2019-04-25 15:40:45 +00:00
2020-12-02 18:04:12 +00:00
octx . setup ( args , tree . dataset_raw , reconstruction = reconstruction , append_config = config , rerun = self . rerun ( ) )
2022-06-02 19:44:11 +00:00
octx . photos_to_metadata ( photos , args . rolling_shutter , args . rolling_shutter_readout , self . rerun ( ) )
2019-05-06 15:35:23 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 5 )
2019-05-06 15:35:23 +00:00
if local_workflow :
octx . feature_matching ( self . rerun ( ) )
2019-04-23 17:59:54 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 20 )
2019-04-24 22:33:12 +00:00
# Create submodels
if not io . dir_exists ( tree . submodels_path ) or self . rerun ( ) :
if io . dir_exists ( tree . submodels_path ) :
log . ODM_WARNING ( " Removing existing submodels directory: %s " % tree . submodels_path )
shutil . rmtree ( tree . submodels_path )
2019-04-23 19:38:06 +00:00
2019-04-25 15:40:45 +00:00
octx . run ( " create_submodels " )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( " Submodels directory already exist at: %s " % tree . submodels_path )
2019-04-25 00:01:44 +00:00
2019-04-24 22:33:12 +00:00
# Find paths of all submodels
mds = metadataset . MetaDataSet ( tree . opensfm )
submodel_paths = [ os . path . abspath ( p ) for p in mds . get_submodel_paths ( ) ]
2019-04-24 18:28:44 +00:00
2019-04-29 15:35:42 +00:00
for sp in submodel_paths :
2019-05-01 19:11:26 +00:00
sp_octx = OSFMContext ( sp )
2023-01-09 19:05:38 +00:00
submodel_images_dir = os . path . abspath ( sp_octx . path ( " .. " , " images " ) )
2019-05-01 19:11:26 +00:00
2019-05-01 20:38:55 +00:00
# Copy filtered GCP file if needed
# One in OpenSfM's directory, one in the submodel project directory
2019-06-21 18:47:00 +00:00
if reconstruction . gcp and reconstruction . gcp . exists ( ) :
2019-05-01 20:38:55 +00:00
submodel_gcp_file = os . path . abspath ( sp_octx . path ( " .. " , " gcp_list.txt " ) )
2019-06-21 18:47:00 +00:00
if reconstruction . gcp . make_filtered_copy ( submodel_gcp_file , submodel_images_dir ) :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Copied filtered GCP file to %s " % submodel_gcp_file )
2019-05-01 20:38:55 +00:00
io . copy ( submodel_gcp_file , os . path . abspath ( sp_octx . path ( " gcp_list.txt " ) ) )
else :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " No GCP will be copied for %s , not enough images in the submodel are referenced by the GCP " % sp_octx . name ( ) )
2021-11-15 17:09:21 +00:00
# Copy GEO file if needed (one for each submodel project directory)
if tree . odm_geo_file is not None and os . path . isfile ( tree . odm_geo_file ) :
geo_dst_path = os . path . abspath ( sp_octx . path ( " .. " , " geo.txt " ) )
io . copy ( tree . odm_geo_file , geo_dst_path )
log . ODM_INFO ( " Copied GEO file to %s " % geo_dst_path )
2023-01-09 19:05:38 +00:00
# If this is a multispectral dataset,
# we need to link the multispectral images
if reconstruction . multi_camera :
submodel_images = os . listdir ( submodel_images_dir )
primary_band_name = multispectral . get_primary_band_name ( reconstruction . multi_camera , args . primary_band )
_ , p2s = multispectral . compute_band_maps ( reconstruction . multi_camera , primary_band_name )
for filename in p2s :
if filename in submodel_images :
secondary_band_photos = p2s [ filename ]
for p in secondary_band_photos :
system . link_file ( os . path . join ( tree . dataset_raw , p . filename ) , submodel_images_dir )
2019-04-24 22:33:12 +00:00
# Reconstruct each submodel
log . ODM_INFO ( " Dataset has been split into %s submodels. Reconstructing each submodel... " % len ( submodel_paths ) )
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
if local_workflow :
for sp in submodel_paths :
log . ODM_INFO ( " Reconstructing %s " % sp )
2022-06-15 12:40:34 +00:00
local_sp_octx = OSFMContext ( sp )
local_sp_octx . create_tracks ( self . rerun ( ) )
2023-05-01 20:56:49 +00:00
local_sp_octx . reconstruct ( args . rolling_shutter , not args . sfm_no_partial , self . rerun ( ) )
2019-05-06 15:35:23 +00:00
else :
2022-06-17 18:41:29 +00:00
lre = LocalRemoteExecutor ( args . sm_cluster , args . rolling_shutter , self . rerun ( ) )
2019-05-08 20:19:05 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
2019-05-09 00:32:02 +00:00
lre . run_reconstruction ( )
2019-04-24 19:15:22 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 50 )
2022-10-17 20:19:52 +00:00
remove_paths = [ ]
2019-04-24 22:33:12 +00:00
# Align
2022-10-17 20:19:52 +00:00
if not args . sm_no_align :
octx . align_reconstructions ( self . rerun ( ) )
2019-04-24 18:28:44 +00:00
2022-10-17 20:19:52 +00:00
self . update_progress ( 55 )
2019-05-15 22:01:46 +00:00
2022-10-17 20:19:52 +00:00
# Aligned reconstruction is in reconstruction.aligned.json
# We need to rename it to reconstruction.json
for sp in submodel_paths :
sp_octx = OSFMContext ( sp )
2019-04-25 15:40:45 +00:00
2022-10-17 20:19:52 +00:00
aligned_recon = sp_octx . path ( ' reconstruction.aligned.json ' )
unaligned_recon = sp_octx . path ( ' reconstruction.unaligned.json ' )
main_recon = sp_octx . path ( ' reconstruction.json ' )
2019-04-25 15:40:45 +00:00
2022-10-17 20:19:52 +00:00
if io . file_exists ( main_recon ) and io . file_exists ( unaligned_recon ) and not self . rerun ( ) :
log . ODM_INFO ( " Submodel %s has already been aligned. " % sp_octx . name ( ) )
continue
2019-06-06 21:35:14 +00:00
2022-10-17 20:19:52 +00:00
if not io . file_exists ( aligned_recon ) :
log . ODM_WARNING ( " Submodel %s does not have an aligned reconstruction ( %s ). "
" This could mean that the submodel could not be reconstructed "
" (are there enough features to reconstruct it?). Skipping. " % ( sp_octx . name ( ) , aligned_recon ) )
remove_paths . append ( sp )
continue
2019-04-25 17:15:54 +00:00
2022-10-17 20:19:52 +00:00
if io . file_exists ( main_recon ) :
shutil . move ( main_recon , unaligned_recon )
2019-04-24 19:15:22 +00:00
2022-10-17 20:19:52 +00:00
shutil . move ( aligned_recon , main_recon )
log . ODM_INFO ( " %s is now %s " % ( aligned_recon , main_recon ) )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
# Remove invalid submodels
submodel_paths = [ p for p in submodel_paths if not p in remove_paths ]
# Run ODM toolchain for each submodel
if local_workflow :
for sp in submodel_paths :
sp_octx = OSFMContext ( sp )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
log . ODM_INFO ( " ======================== " )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Processing %s " % sp_octx . name ( ) )
2019-05-06 15:35:23 +00:00
log . ODM_INFO ( " ======================== " )
2019-04-24 22:33:12 +00:00
2020-03-18 19:55:33 +00:00
argv = get_submodel_argv ( args , tree . submodels_path , sp_octx . name ( ) )
2019-05-06 15:35:23 +00:00
# Re-run the ODM toolchain on the submodel
2021-05-04 18:46:55 +00:00
system . run ( " " . join ( map ( double_quote , map ( str , argv ) ) ) , env_vars = os . environ . copy ( ) )
2019-05-06 15:35:23 +00:00
else :
2019-05-09 00:32:02 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
lre . run_toolchain ( )
2019-04-24 22:33:12 +00:00
2019-05-10 15:01:42 +00:00
# Restore max_concurrency value
args . max_concurrency = orig_max_concurrency
2019-06-06 21:35:14 +00:00
octx . touch ( split_done_file )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( ' Found a split done file in: %s ' % split_done_file )
2019-04-23 18:45:47 +00:00
else :
log . ODM_INFO ( " Normal dataset, will process all at once. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-23 17:59:54 +00:00
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
class ODMMergeStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
if outputs [ ' large ' ] :
2019-05-01 19:11:26 +00:00
if not os . path . exists ( tree . submodels_path ) :
2021-06-09 15:46:56 +00:00
raise system . ExitException ( " We reached the merge stage, but %s folder does not exist. Something must have gone wrong at an earlier stage. Check the log and fix possible problem before restarting? " % tree . submodels_path )
2019-05-01 19:11:26 +00:00
2019-04-24 21:36:45 +00:00
# Merge point clouds
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' pointcloud ' ] :
2019-10-29 18:25:12 +00:00
if not io . file_exists ( tree . odm_georeferencing_model_laz ) or self . rerun ( ) :
2019-04-30 20:09:03 +00:00
all_point_clouds = get_submodel_paths ( tree . submodels_path , " odm_georeferencing " , " odm_georeferenced_model.laz " )
2019-05-06 18:17:23 +00:00
try :
2019-10-29 18:25:12 +00:00
point_cloud . merge ( all_point_clouds , tree . odm_georeferencing_model_laz , rerun = self . rerun ( ) )
2021-04-27 19:19:32 +00:00
point_cloud . post_point_cloud_steps ( args , tree , self . rerun ( ) )
2019-05-06 18:17:23 +00:00
except Exception as e :
2019-10-29 18:25:12 +00:00
log . ODM_WARNING ( " Could not merge point cloud: %s (skipping) " % str ( e ) )
2019-04-30 20:09:03 +00:00
else :
log . ODM_WARNING ( " Found merged point cloud in %s " % tree . odm_georeferencing_model_laz )
2019-10-29 18:25:12 +00:00
2019-04-28 16:20:03 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-28 16:20:03 +00:00
# Merge crop bounds
merged_bounds_file = os . path . join ( tree . odm_georeferencing , ' odm_georeferenced_model.bounds.gpkg ' )
if not io . file_exists ( merged_bounds_file ) or self . rerun ( ) :
all_bounds = get_submodel_paths ( tree . submodels_path , ' odm_georeferencing ' , ' odm_georeferenced_model.bounds.gpkg ' )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Merging all crop bounds: %s " % all_bounds )
2019-04-28 16:20:03 +00:00
if len ( all_bounds ) > 0 :
# Calculate a new crop area
# based on the convex hull of all crop areas of all submodels
# (without a buffer, otherwise we are double-cropping)
Cropper . merge_bounds ( all_bounds , merged_bounds_file , 0 )
else :
log . ODM_WARNING ( " No bounds found for any submodel. " )
2019-04-24 21:36:45 +00:00
2019-04-28 16:20:03 +00:00
# Merge orthophotos
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' orthophoto ' ] :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . odm_orthophoto ) :
system . mkdir_p ( tree . odm_orthophoto )
2019-04-30 20:09:03 +00:00
if not io . file_exists ( tree . odm_orthophoto_tif ) or self . rerun ( ) :
2019-10-28 18:40:40 +00:00
all_orthos_and_ortho_cuts = get_all_submodel_paths ( tree . submodels_path ,
2020-01-21 18:09:31 +00:00
os . path . join ( " odm_orthophoto " , " odm_orthophoto_feathered.tif " ) ,
2019-10-28 18:40:40 +00:00
os . path . join ( " odm_orthophoto " , " odm_orthophoto_cut.tif " ) ,
2019-04-30 20:09:03 +00:00
)
2019-10-28 18:40:40 +00:00
if len ( all_orthos_and_ortho_cuts ) > 1 :
2019-12-06 03:02:58 +00:00
log . ODM_INFO ( " Found %s submodels with valid orthophotos and cutlines " % len ( all_orthos_and_ortho_cuts ) )
2019-04-30 20:09:03 +00:00
# TODO: histogram matching via rasterio
# currently parts have different color tones
if io . file_exists ( tree . odm_orthophoto_tif ) :
os . remove ( tree . odm_orthophoto_tif )
2019-04-28 16:20:03 +00:00
2019-10-28 18:40:40 +00:00
orthophoto_vars = orthophoto . get_orthophoto_vars ( args )
orthophoto . merge ( all_orthos_and_ortho_cuts , tree . odm_orthophoto_tif , orthophoto_vars )
2023-10-12 20:06:53 +00:00
orthophoto . post_orthophoto_steps ( args , merged_bounds_file , tree . odm_orthophoto_tif , tree . orthophoto_tiles , args . orthophoto_resolution )
2019-12-06 03:02:58 +00:00
elif len ( all_orthos_and_ortho_cuts ) == 1 :
2019-04-30 20:09:03 +00:00
# Simply copy
log . ODM_WARNING ( " A single orthophoto/cutline pair was found between all submodels. " )
2019-12-06 03:02:58 +00:00
shutil . copyfile ( all_orthos_and_ortho_cuts [ 0 ] [ 0 ] , tree . odm_orthophoto_tif )
2019-04-30 20:09:03 +00:00
else :
log . ODM_WARNING ( " No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated. " )
2019-04-28 16:20:03 +00:00
else :
2019-04-30 20:09:03 +00:00
log . ODM_WARNING ( " Found merged orthophoto in %s " % tree . odm_orthophoto_tif )
2019-04-24 21:36:45 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 75 )
2019-04-29 18:01:55 +00:00
# Merge DEMs
def merge_dems ( dem_filename , human_name ) :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . path ( ' odm_dem ' ) ) :
system . mkdir_p ( tree . path ( ' odm_dem ' ) )
2019-04-29 20:18:08 +00:00
dem_file = tree . path ( " odm_dem " , dem_filename )
2019-04-29 18:01:55 +00:00
if not io . file_exists ( dem_file ) or self . rerun ( ) :
all_dems = get_submodel_paths ( tree . submodels_path , " odm_dem " , dem_filename )
log . ODM_INFO ( " Merging %s s " % human_name )
# Merge
dem_vars = utils . get_dem_vars ( args )
2019-11-04 21:29:43 +00:00
eu_map_source = None # Default
# Use DSM's euclidean map for DTMs
# (requires the DSM to be computed)
if human_name == " DTM " :
eu_map_source = " dsm "
euclidean_merge_dems ( all_dems , dem_file , dem_vars , euclidean_map_source = eu_map_source )
2019-04-29 18:01:55 +00:00
if io . file_exists ( dem_file ) :
# Crop
2021-10-12 18:19:32 +00:00
if args . crop > 0 or args . boundary :
2020-03-30 14:32:21 +00:00
Cropper . crop ( merged_bounds_file , dem_file , dem_vars , keep_original = not args . optimize_disk_space )
2019-04-29 18:01:55 +00:00
log . ODM_INFO ( " Created %s " % dem_file )
2020-09-17 15:28:03 +00:00
if args . tiles :
2023-10-15 21:26:56 +00:00
generate_dem_tiles ( dem_file , tree . path ( " %s _tiles " % human_name . lower ( ) ) , args . max_concurrency , args . dem_resolution )
2021-06-04 19:35:56 +00:00
if args . cog :
convert_to_cogeo ( dem_file , max_workers = args . max_concurrency )
2019-04-29 18:01:55 +00:00
else :
log . ODM_WARNING ( " Cannot merge %s , %s was not created " % ( human_name , dem_file ) )
2020-09-17 15:28:03 +00:00
2019-04-29 18:01:55 +00:00
else :
2019-07-22 14:09:30 +00:00
log . ODM_WARNING ( " Found merged %s in %s " % ( human_name , dem_filename ) )
2019-04-29 18:01:55 +00:00
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dsm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dsm.tif " , " DSM " )
2019-04-24 21:36:45 +00:00
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dtm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dtm.tif " , " DTM " )
2019-04-24 21:36:45 +00:00
2020-05-15 21:22:10 +00:00
self . update_progress ( 95 )
# Merge reports
if not io . dir_exists ( tree . odm_report ) :
system . mkdir_p ( tree . odm_report )
geojson_shots = tree . path ( tree . odm_report , " shots.geojson " )
if not io . file_exists ( geojson_shots ) or self . rerun ( ) :
geojson_shots_files = get_submodel_paths ( tree . submodels_path , " odm_report " , " shots.geojson " )
log . ODM_INFO ( " Merging %s shots.geojson files " % len ( geojson_shots_files ) )
merge_geojson_shots ( geojson_shots_files , geojson_shots )
else :
log . ODM_WARNING ( " Found merged shots.geojson in %s " % tree . odm_report )
2024-09-13 11:26:43 +00:00
# Merge cameras
cameras_json = tree . path ( " cameras.json " )
if not io . file_exists ( cameras_json ) or self . rerun ( ) :
cameras_json_files = get_submodel_paths ( tree . submodels_path , " cameras.json " )
log . ODM_INFO ( " Merging %s cameras.json files " % len ( cameras_json_files ) )
merge_cameras ( cameras_json_files , cameras_json )
else :
log . ODM_WARNING ( " Found merged cameras.json in %s " % tree . root_path )
2021-07-30 20:07:34 +00:00
# Stop the pipeline short by skipping to the postprocess stage.
# Afterwards, we're done.
self . next_stage = self . last_stage ( )
2019-04-24 19:15:22 +00:00
else :
log . ODM_INFO ( " Normal dataset, nothing to merge. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-24 21:36:45 +00:00
2022-07-29 04:13:50 +00:00