2019-04-25 00:01:44 +00:00
import os
import shutil
2019-04-23 17:59:54 +00:00
from opendm import log
2019-04-28 16:20:03 +00:00
from opendm . osfm import OSFMContext , get_submodel_argv , get_submodel_paths , get_all_submodel_paths
2019-04-23 18:45:47 +00:00
from opendm import types
2019-04-23 19:38:06 +00:00
from opendm import io
2019-04-24 18:28:44 +00:00
from opendm import system
2019-04-28 16:20:03 +00:00
from opendm import orthophoto
2019-05-01 20:38:55 +00:00
from opendm . gcp import GCPFile
2019-04-29 18:01:55 +00:00
from opendm . dem import pdal , utils
from opendm . dem . merge import euclidean_merge_dems
2019-04-23 22:01:14 +00:00
from opensfm . large import metadataset
2019-04-28 16:20:03 +00:00
from opendm . cropper import Cropper
2019-05-02 13:27:47 +00:00
from opendm . concurrency import get_max_memory
2019-05-08 20:19:05 +00:00
from opendm . remote import LocalRemoteExecutor
2019-05-31 15:25:44 +00:00
from opendm import entwine
2019-04-24 18:28:44 +00:00
from pipes import quote
2019-04-23 17:59:54 +00:00
class ODMSplitStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
photos = reconstruction . photos
2019-04-23 18:45:47 +00:00
outputs [ ' large ' ] = len ( photos ) > args . split
if outputs [ ' large ' ] :
2019-06-14 18:14:12 +00:00
# Split-merge is on, always use --use_fixed_camera_params
log . ODM_INFO ( " --use-fixed-camera-params will be turned ON " )
args . use_fixed_camera_params = True
2019-05-06 15:35:23 +00:00
# If we have a cluster address, we'll use a distributed workflow
local_workflow = not bool ( args . sm_cluster )
2019-04-25 15:40:45 +00:00
octx = OSFMContext ( tree . opensfm )
split_done_file = octx . path ( " split_done.txt " )
2019-04-24 22:33:12 +00:00
if not io . file_exists ( split_done_file ) or self . rerun ( ) :
2019-05-10 15:01:42 +00:00
orig_max_concurrency = args . max_concurrency
2019-05-10 12:03:21 +00:00
if not local_workflow :
args . max_concurrency = max ( 1 , args . max_concurrency - 1 )
log . ODM_INFO ( " Setting max-concurrency to %s to better handle remote splits " % args . max_concurrency )
2019-04-24 22:33:12 +00:00
log . ODM_INFO ( " Large dataset detected ( %s photos) and split set at %s . Preparing split merge. " % ( len ( photos ) , args . split ) )
config = [
" submodels_relpath: ../submodels/opensfm " ,
" submodel_relpath_template: ../submodels/submodel_ %04d /opensfm " ,
" submodel_images_relpath_template: ../submodels/submodel_ %04d /images " ,
" submodel_size: %s " % args . split ,
" submodel_overlap: %s " % args . split_overlap ,
]
2019-04-25 15:40:45 +00:00
2019-06-21 18:47:00 +00:00
octx . setup ( args , tree . dataset_raw , photos , gcp_path = reconstruction . gcp . gcp_path , append_config = config , rerun = self . rerun ( ) )
2019-05-06 15:35:23 +00:00
octx . extract_metadata ( self . rerun ( ) )
2019-05-15 22:01:46 +00:00
self . update_progress ( 5 )
2019-05-06 15:35:23 +00:00
if local_workflow :
octx . feature_matching ( self . rerun ( ) )
2019-04-23 17:59:54 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 20 )
2019-04-24 22:33:12 +00:00
# Create submodels
if not io . dir_exists ( tree . submodels_path ) or self . rerun ( ) :
if io . dir_exists ( tree . submodels_path ) :
log . ODM_WARNING ( " Removing existing submodels directory: %s " % tree . submodels_path )
shutil . rmtree ( tree . submodels_path )
2019-04-23 19:38:06 +00:00
2019-04-25 15:40:45 +00:00
octx . run ( " create_submodels " )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( " Submodels directory already exist at: %s " % tree . submodels_path )
2019-04-25 00:01:44 +00:00
2019-04-24 22:33:12 +00:00
# Find paths of all submodels
mds = metadataset . MetaDataSet ( tree . opensfm )
submodel_paths = [ os . path . abspath ( p ) for p in mds . get_submodel_paths ( ) ]
2019-04-24 18:28:44 +00:00
2019-04-29 15:35:42 +00:00
for sp in submodel_paths :
2019-05-01 19:11:26 +00:00
sp_octx = OSFMContext ( sp )
2019-05-01 20:38:55 +00:00
# Copy filtered GCP file if needed
# One in OpenSfM's directory, one in the submodel project directory
2019-06-21 18:47:00 +00:00
if reconstruction . gcp and reconstruction . gcp . exists ( ) :
2019-05-01 20:38:55 +00:00
submodel_gcp_file = os . path . abspath ( sp_octx . path ( " .. " , " gcp_list.txt " ) )
submodel_images_dir = os . path . abspath ( sp_octx . path ( " .. " , " images " ) )
2019-06-21 18:47:00 +00:00
if reconstruction . gcp . make_filtered_copy ( submodel_gcp_file , submodel_images_dir ) :
2019-05-01 20:38:55 +00:00
log . ODM_DEBUG ( " Copied filtered GCP file to %s " % submodel_gcp_file )
io . copy ( submodel_gcp_file , os . path . abspath ( sp_octx . path ( " gcp_list.txt " ) ) )
else :
2019-05-06 15:35:23 +00:00
log . ODM_DEBUG ( " No GCP will be copied for %s , not enough images in the submodel are referenced by the GCP " % sp_octx . name ( ) )
2019-05-01 20:38:55 +00:00
2019-04-24 22:33:12 +00:00
# Reconstruct each submodel
log . ODM_INFO ( " Dataset has been split into %s submodels. Reconstructing each submodel... " % len ( submodel_paths ) )
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
if local_workflow :
for sp in submodel_paths :
log . ODM_INFO ( " Reconstructing %s " % sp )
OSFMContext ( sp ) . reconstruct ( self . rerun ( ) )
else :
2019-06-06 21:35:14 +00:00
lre = LocalRemoteExecutor ( args . sm_cluster , self . rerun ( ) )
2019-05-08 20:19:05 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
2019-05-09 00:32:02 +00:00
lre . run_reconstruction ( )
2019-04-24 19:15:22 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 50 )
2019-04-24 22:33:12 +00:00
# Align
2019-05-15 18:27:16 +00:00
octx . align_reconstructions ( self . rerun ( ) )
2019-04-24 18:28:44 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 55 )
2019-05-06 15:35:23 +00:00
# Aligned reconstruction is in reconstruction.aligned.json
# We need to rename it to reconstruction.json
remove_paths = [ ]
2019-04-24 22:33:12 +00:00
for sp in submodel_paths :
2019-04-25 15:40:45 +00:00
sp_octx = OSFMContext ( sp )
aligned_recon = sp_octx . path ( ' reconstruction.aligned.json ' )
2019-05-30 12:17:36 +00:00
unaligned_recon = sp_octx . path ( ' reconstruction.unaligned.json ' )
2019-04-25 15:40:45 +00:00
main_recon = sp_octx . path ( ' reconstruction.json ' )
2019-06-06 21:35:14 +00:00
if io . file_exists ( main_recon ) and io . file_exists ( unaligned_recon ) and not self . rerun ( ) :
log . ODM_INFO ( " Submodel %s has already been aligned. " % sp_octx . name ( ) )
continue
2019-04-25 15:40:45 +00:00
if not io . file_exists ( aligned_recon ) :
log . ODM_WARNING ( " Submodel %s does not have an aligned reconstruction ( %s ). "
" This could mean that the submodel could not be reconstructed "
2019-05-06 15:35:23 +00:00
" (are there enough features to reconstruct it?). Skipping. " % ( sp_octx . name ( ) , aligned_recon ) )
remove_paths . append ( sp )
2019-04-25 15:40:45 +00:00
continue
2019-04-25 17:15:54 +00:00
2019-04-25 15:40:45 +00:00
if io . file_exists ( main_recon ) :
2019-05-30 12:17:36 +00:00
shutil . move ( main_recon , unaligned_recon )
2019-04-24 19:15:22 +00:00
2019-04-27 22:37:07 +00:00
shutil . move ( aligned_recon , main_recon )
2019-04-25 15:40:45 +00:00
log . ODM_DEBUG ( " %s is now %s " % ( aligned_recon , main_recon ) )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
# Remove invalid submodels
submodel_paths = [ p for p in submodel_paths if not p in remove_paths ]
# Run ODM toolchain for each submodel
if local_workflow :
for sp in submodel_paths :
sp_octx = OSFMContext ( sp )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
log . ODM_INFO ( " ======================== " )
log . ODM_INFO ( " Processing %s " % sp_octx . name ( ) )
log . ODM_INFO ( " ======================== " )
2019-04-24 22:33:12 +00:00
2019-05-09 17:11:02 +00:00
argv = get_submodel_argv ( args . name , tree . submodels_path , sp_octx . name ( ) )
2019-05-06 15:35:23 +00:00
# Re-run the ODM toolchain on the submodel
system . run ( " " . join ( map ( quote , argv ) ) , env_vars = os . environ . copy ( ) )
else :
2019-05-09 00:32:02 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
lre . run_toolchain ( )
2019-04-24 22:33:12 +00:00
2019-05-10 15:01:42 +00:00
# Restore max_concurrency value
args . max_concurrency = orig_max_concurrency
2019-06-06 21:35:14 +00:00
octx . touch ( split_done_file )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( ' Found a split done file in: %s ' % split_done_file )
2019-04-23 18:45:47 +00:00
else :
log . ODM_INFO ( " Normal dataset, will process all at once. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-23 17:59:54 +00:00
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
class ODMMergeStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
if outputs [ ' large ' ] :
2019-05-01 19:11:26 +00:00
if not os . path . exists ( tree . submodels_path ) :
log . ODM_ERROR ( " We reached the merge stage, but %s folder does not exist. Something must have gone wrong at an earlier stage. Check the log and fix possible problem before restarting? " % tree . submodels_path )
exit ( 1 )
2019-04-24 21:36:45 +00:00
# Merge point clouds
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' pointcloud ' ] :
if not io . file_exists ( tree . odm_georeferencing_model_laz ) or self . rerun ( ) :
all_point_clouds = get_submodel_paths ( tree . submodels_path , " odm_georeferencing " , " odm_georeferenced_model.laz " )
2019-05-06 18:17:23 +00:00
try :
2019-05-31 15:25:44 +00:00
# pdal.merge_point_clouds(all_point_clouds, tree.odm_georeferencing_model_laz, args.verbose)
2019-05-31 16:25:26 +00:00
entwine . build ( all_point_clouds , tree . entwine_pointcloud , max_concurrency = args . max_concurrency , rerun = self . rerun ( ) )
2019-05-06 18:17:23 +00:00
except Exception as e :
log . ODM_WARNING ( " Could not merge point cloud: %s (skipping) " % str ( e ) )
2019-06-03 20:38:02 +00:00
if io . dir_exists ( tree . entwine_pointcloud ) :
try :
system . run ( ' pdal translate " ept:// {} " " {} " ' . format ( tree . entwine_pointcloud , tree . odm_georeferencing_model_laz ) )
except Exception as e :
log . ODM_WARNING ( " Cannot export EPT dataset to LAZ: %s " % str ( e ) )
2019-04-30 20:09:03 +00:00
else :
log . ODM_WARNING ( " Found merged point cloud in %s " % tree . odm_georeferencing_model_laz )
2019-04-28 16:20:03 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-28 16:20:03 +00:00
# Merge crop bounds
merged_bounds_file = os . path . join ( tree . odm_georeferencing , ' odm_georeferenced_model.bounds.gpkg ' )
if not io . file_exists ( merged_bounds_file ) or self . rerun ( ) :
all_bounds = get_submodel_paths ( tree . submodels_path , ' odm_georeferencing ' , ' odm_georeferenced_model.bounds.gpkg ' )
log . ODM_DEBUG ( " Merging all crop bounds: %s " % all_bounds )
if len ( all_bounds ) > 0 :
# Calculate a new crop area
# based on the convex hull of all crop areas of all submodels
# (without a buffer, otherwise we are double-cropping)
Cropper . merge_bounds ( all_bounds , merged_bounds_file , 0 )
else :
log . ODM_WARNING ( " No bounds found for any submodel. " )
2019-04-24 21:36:45 +00:00
2019-04-28 16:20:03 +00:00
# Merge orthophotos
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' orthophoto ' ] :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . odm_orthophoto ) :
system . mkdir_p ( tree . odm_orthophoto )
2019-04-30 20:09:03 +00:00
if not io . file_exists ( tree . odm_orthophoto_tif ) or self . rerun ( ) :
all_orthos_and_cutlines = get_all_submodel_paths ( tree . submodels_path ,
os . path . join ( " odm_orthophoto " , " odm_orthophoto.tif " ) ,
os . path . join ( " odm_orthophoto " , " cutline.gpkg " ) ,
)
if len ( all_orthos_and_cutlines ) > 1 :
log . ODM_DEBUG ( " Found %s submodels with valid orthophotos and cutlines " % len ( all_orthos_and_cutlines ) )
# TODO: histogram matching via rasterio
# currently parts have different color tones
merged_geotiff = os . path . join ( tree . odm_orthophoto , " odm_orthophoto.merged.tif " )
kwargs = {
' orthophoto_merged ' : merged_geotiff ,
' input_files ' : ' ' . join ( map ( lambda i : quote ( i [ 0 ] ) , all_orthos_and_cutlines ) ) ,
2019-05-06 19:03:09 +00:00
' max_memory ' : get_max_memory ( ) ,
' threads ' : args . max_concurrency ,
2019-04-30 20:09:03 +00:00
}
# use bounds as cutlines (blending)
if io . file_exists ( merged_geotiff ) :
os . remove ( merged_geotiff )
system . run ( ' gdal_merge.py -o {orthophoto_merged} '
#'-createonly '
' -co " BIGTIFF=YES " '
' -co " BLOCKXSIZE=512 " '
' -co " BLOCKYSIZE=512 " '
2019-05-01 19:11:26 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
2019-04-30 20:09:03 +00:00
' {input_files} ' . format ( * * kwargs )
)
for ortho_cutline in all_orthos_and_cutlines :
kwargs [ ' input_file ' ] , kwargs [ ' cutline ' ] = ortho_cutline
# Note: cblend has a high performance penalty
system . run ( ' gdalwarp -cutline {cutline} '
' -cblend 20 '
2019-05-28 19:32:01 +00:00
' -r bilinear -multi '
2019-05-06 19:03:09 +00:00
' -wo NUM_THREADS= {threads} '
2019-05-01 20:38:55 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
' {input_file} {orthophoto_merged} ' . format ( * * kwargs )
2019-04-30 20:09:03 +00:00
)
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
# Apply orthophoto settings (compression, tiling, etc.)
orthophoto_vars = orthophoto . get_orthophoto_vars ( args )
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
if io . file_exists ( tree . odm_orthophoto_tif ) :
os . remove ( tree . odm_orthophoto_tif )
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
kwargs = {
' vars ' : ' ' . join ( [ ' -co %s = %s ' % ( k , orthophoto_vars [ k ] ) for k in orthophoto_vars ] ) ,
' max_memory ' : get_max_memory ( ) ,
' merged ' : merged_geotiff ,
' log ' : tree . odm_orthophoto_tif_log ,
' orthophoto ' : tree . odm_orthophoto_tif ,
}
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
system . run ( ' gdal_translate '
' {vars} '
' --config GDAL_CACHEMAX {max_memory} % '
' {merged} {orthophoto} > {log} ' . format ( * * kwargs ) )
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
os . remove ( merged_geotiff )
2019-04-28 16:20:03 +00:00
2019-04-30 20:09:03 +00:00
# Crop
if args . crop > 0 :
Cropper . crop ( merged_bounds_file , tree . odm_orthophoto_tif , orthophoto_vars )
# Overviews
if args . build_overviews :
orthophoto . build_overviews ( tree . odm_orthophoto_tif )
elif len ( all_orthos_and_cutlines ) == 1 :
# Simply copy
log . ODM_WARNING ( " A single orthophoto/cutline pair was found between all submodels. " )
shutil . copyfile ( all_orthos_and_cutlines [ 0 ] [ 0 ] , tree . odm_orthophoto_tif )
else :
log . ODM_WARNING ( " No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated. " )
2019-04-28 16:20:03 +00:00
else :
2019-04-30 20:09:03 +00:00
log . ODM_WARNING ( " Found merged orthophoto in %s " % tree . odm_orthophoto_tif )
2019-04-24 21:36:45 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 75 )
2019-04-29 18:01:55 +00:00
# Merge DEMs
def merge_dems ( dem_filename , human_name ) :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . path ( ' odm_dem ' ) ) :
system . mkdir_p ( tree . path ( ' odm_dem ' ) )
2019-04-29 20:18:08 +00:00
dem_file = tree . path ( " odm_dem " , dem_filename )
2019-04-29 18:01:55 +00:00
if not io . file_exists ( dem_file ) or self . rerun ( ) :
all_dems = get_submodel_paths ( tree . submodels_path , " odm_dem " , dem_filename )
log . ODM_INFO ( " Merging %s s " % human_name )
# Merge
dem_vars = utils . get_dem_vars ( args )
euclidean_merge_dems ( all_dems , dem_file , dem_vars )
if io . file_exists ( dem_file ) :
# Crop
if args . crop > 0 :
Cropper . crop ( merged_bounds_file , dem_file , dem_vars )
log . ODM_INFO ( " Created %s " % dem_file )
else :
log . ODM_WARNING ( " Cannot merge %s , %s was not created " % ( human_name , dem_file ) )
else :
log . ODM_WARNING ( " Found merged %s in %s " % ( human_name , dsm_file ) )
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dsm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dsm.tif " , " DSM " )
2019-04-24 21:36:45 +00:00
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dtm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dtm.tif " , " DTM " )
2019-04-24 21:36:45 +00:00
# Stop the pipeline short! We're done.
self . next_stage = None
2019-04-24 19:15:22 +00:00
else :
log . ODM_INFO ( " Normal dataset, nothing to merge. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-24 21:36:45 +00:00