2019-04-25 00:01:44 +00:00
import os
import shutil
2020-04-27 10:31:31 +00:00
import json
import yaml
2019-04-23 17:59:54 +00:00
from opendm import log
2019-04-28 16:20:03 +00:00
from opendm . osfm import OSFMContext , get_submodel_argv , get_submodel_paths , get_all_submodel_paths
2019-04-23 18:45:47 +00:00
from opendm import types
2019-04-23 19:38:06 +00:00
from opendm import io
2019-04-24 18:28:44 +00:00
from opendm import system
2019-04-28 16:20:03 +00:00
from opendm import orthophoto
2019-05-01 20:38:55 +00:00
from opendm . gcp import GCPFile
2019-04-29 18:01:55 +00:00
from opendm . dem import pdal , utils
from opendm . dem . merge import euclidean_merge_dems
2019-04-23 22:01:14 +00:00
from opensfm . large import metadataset
2019-04-28 16:20:03 +00:00
from opendm . cropper import Cropper
2019-05-02 13:27:47 +00:00
from opendm . concurrency import get_max_memory
2019-05-08 20:19:05 +00:00
from opendm . remote import LocalRemoteExecutor
2019-10-29 18:25:12 +00:00
from opendm import point_cloud
2019-04-24 18:28:44 +00:00
from pipes import quote
2019-04-23 17:59:54 +00:00
2020-04-27 10:31:31 +00:00
2019-04-23 17:59:54 +00:00
class ODMSplitStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
photos = reconstruction . photos
2019-04-23 18:45:47 +00:00
outputs [ ' large ' ] = len ( photos ) > args . split
if outputs [ ' large ' ] :
2019-05-06 15:35:23 +00:00
# If we have a cluster address, we'll use a distributed workflow
local_workflow = not bool ( args . sm_cluster )
2019-04-25 15:40:45 +00:00
octx = OSFMContext ( tree . opensfm )
split_done_file = octx . path ( " split_done.txt " )
2019-04-24 22:33:12 +00:00
if not io . file_exists ( split_done_file ) or self . rerun ( ) :
2019-05-10 15:01:42 +00:00
orig_max_concurrency = args . max_concurrency
2019-05-10 12:03:21 +00:00
if not local_workflow :
args . max_concurrency = max ( 1 , args . max_concurrency - 1 )
log . ODM_INFO ( " Setting max-concurrency to %s to better handle remote splits " % args . max_concurrency )
2019-04-24 22:33:12 +00:00
log . ODM_INFO ( " Large dataset detected ( %s photos) and split set at %s . Preparing split merge. " % ( len ( photos ) , args . split ) )
config = [
" submodels_relpath: ../submodels/opensfm " ,
" submodel_relpath_template: ../submodels/submodel_ %04d /opensfm " ,
" submodel_images_relpath_template: ../submodels/submodel_ %04d /images " ,
" submodel_size: %s " % args . split ,
" submodel_overlap: %s " % args . split_overlap ,
]
2019-04-25 15:40:45 +00:00
2019-12-07 22:05:59 +00:00
octx . setup ( args , tree . dataset_raw , photos , reconstruction = reconstruction , append_config = config , rerun = self . rerun ( ) )
2019-05-06 15:35:23 +00:00
octx . extract_metadata ( self . rerun ( ) )
2019-05-15 22:01:46 +00:00
self . update_progress ( 5 )
2019-05-06 15:35:23 +00:00
if local_workflow :
octx . feature_matching ( self . rerun ( ) )
2019-04-23 17:59:54 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 20 )
2019-04-24 22:33:12 +00:00
# Create submodels
if not io . dir_exists ( tree . submodels_path ) or self . rerun ( ) :
if io . dir_exists ( tree . submodels_path ) :
log . ODM_WARNING ( " Removing existing submodels directory: %s " % tree . submodels_path )
shutil . rmtree ( tree . submodels_path )
2019-04-23 19:38:06 +00:00
2019-04-25 15:40:45 +00:00
octx . run ( " create_submodels " )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( " Submodels directory already exist at: %s " % tree . submodels_path )
2019-04-25 00:01:44 +00:00
2019-04-24 22:33:12 +00:00
# Find paths of all submodels
mds = metadataset . MetaDataSet ( tree . opensfm )
submodel_paths = [ os . path . abspath ( p ) for p in mds . get_submodel_paths ( ) ]
2019-04-24 18:28:44 +00:00
2019-04-29 15:35:42 +00:00
for sp in submodel_paths :
2019-05-01 19:11:26 +00:00
sp_octx = OSFMContext ( sp )
2019-05-01 20:38:55 +00:00
# Copy filtered GCP file if needed
# One in OpenSfM's directory, one in the submodel project directory
2019-06-21 18:47:00 +00:00
if reconstruction . gcp and reconstruction . gcp . exists ( ) :
2019-05-01 20:38:55 +00:00
submodel_gcp_file = os . path . abspath ( sp_octx . path ( " .. " , " gcp_list.txt " ) )
submodel_images_dir = os . path . abspath ( sp_octx . path ( " .. " , " images " ) )
2019-06-21 18:47:00 +00:00
if reconstruction . gcp . make_filtered_copy ( submodel_gcp_file , submodel_images_dir ) :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Copied filtered GCP file to %s " % submodel_gcp_file )
2019-05-01 20:38:55 +00:00
io . copy ( submodel_gcp_file , os . path . abspath ( sp_octx . path ( " gcp_list.txt " ) ) )
else :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " No GCP will be copied for %s , not enough images in the submodel are referenced by the GCP " % sp_octx . name ( ) )
2019-05-01 20:38:55 +00:00
2019-04-24 22:33:12 +00:00
# Reconstruct each submodel
log . ODM_INFO ( " Dataset has been split into %s submodels. Reconstructing each submodel... " % len ( submodel_paths ) )
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
if local_workflow :
for sp in submodel_paths :
log . ODM_INFO ( " Reconstructing %s " % sp )
OSFMContext ( sp ) . reconstruct ( self . rerun ( ) )
else :
2019-06-06 21:35:14 +00:00
lre = LocalRemoteExecutor ( args . sm_cluster , self . rerun ( ) )
2019-05-08 20:19:05 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
2019-05-09 00:32:02 +00:00
lre . run_reconstruction ( )
2019-04-24 19:15:22 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 50 )
2020-04-27 10:31:31 +00:00
resplit_done_file = octx . path ( ' resplit_done.txt ' )
if not io . file_exists ( resplit_done_file ) and bool ( args . split_multitracks ) :
submodels = mds . get_submodel_paths ( )
i = 0
for s in submodels :
template = octx . path ( " ../aligned_submodels/submodel_ %04d " )
with open ( s + " /reconstruction.json " , " r " ) as f :
j = json . load ( f )
for k in range ( 0 , len ( j ) ) :
v = j [ k ]
2020-04-29 12:30:02 +00:00
path = template % i
2020-04-27 10:31:31 +00:00
#Create the submodel path up to opensfm
2020-04-29 12:30:02 +00:00
os . makedirs ( path + " /opensfm " )
os . makedirs ( path + " /images " )
2020-04-27 10:31:31 +00:00
#symlinks for common data
2020-04-29 12:24:14 +00:00
images = os . listdir ( octx . path ( " ../images " ) )
2020-04-27 10:31:31 +00:00
for image in images :
2020-04-29 12:30:02 +00:00
os . symlink ( " ../../../images/ " + image , path + " /images/ " + image )
os . symlink ( " ../../../opensfm/exif " , path + " /opensfm/exif " )
os . symlink ( " ../../../opensfm/features " , path + " /opensfm/features " )
os . symlink ( " ../../../opensfm/matches " , path + " /opensfm/matches " )
os . symlink ( " ../../../opensfm/reference_lla.json " , path + " /opensfm/reference_lla.json " )
os . symlink ( " ../../../opensfm/camera_models.json " , path + " /opensfm/camera_models.json " )
2020-04-27 10:31:31 +00:00
shutil . copy ( s + " /../cameras.json " , path + " /cameras.json " )
shutil . copy ( s + " /../images.json " , path + " /images.json " )
with open ( octx . path ( " config.yaml " ) ) as f :
doc = yaml . safe_load ( f )
dmcv = " depthmap_min_consistent_views "
if dmcv in doc :
if len ( v [ " shots " ] ) < doc [ dmcv ] :
doc [ dmcv ] = len ( v [ " shots " ] )
print ( " WARNING: Reduced " + dmcv + " to accommodate short track " )
with open ( path + " /opensfm/config.yaml " , " w " ) as f :
yaml . dump ( doc , f )
#We need the original tracks file for the visualsfm export, since
#there may still be point matches between the tracks
shutil . copy ( s + " /tracks.csv " , path + " /opensfm/tracks.csv " )
#Create our new reconstruction file with only the relevant track
with open ( path + " /opensfm/reconstruction.json " , " w " ) as o :
json . dump ( [ v ] , o )
#Create image lists
with open ( path + " /opensfm/image_list.txt " , " w " ) as o :
o . writelines ( map ( lambda x : " ../images/ " + x + ' \n ' , v [ " shots " ] . keys ( ) ) )
with open ( path + " /img_list.txt " , " w " ) as o :
o . writelines ( map ( lambda x : x + ' \n ' , v [ " shots " ] . keys ( ) ) )
i + = 1
os . rename ( octx . path ( " ../submodels " ) , octx . path ( " ../unaligned_submodels " ) )
os . rename ( octx . path ( " ../aligned_submodels " ) , octx . path ( " ../submodels " ) )
octx . touch ( resplit_done_file )
mds = metadataset . MetaDataSet ( tree . opensfm )
submodel_paths = [ os . path . abspath ( p ) for p in mds . get_submodel_paths ( ) ]
2019-04-24 22:33:12 +00:00
# Align
2019-05-15 18:27:16 +00:00
octx . align_reconstructions ( self . rerun ( ) )
2019-04-24 18:28:44 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 55 )
2019-05-06 15:35:23 +00:00
# Aligned reconstruction is in reconstruction.aligned.json
# We need to rename it to reconstruction.json
remove_paths = [ ]
2019-04-24 22:33:12 +00:00
for sp in submodel_paths :
2019-04-25 15:40:45 +00:00
sp_octx = OSFMContext ( sp )
aligned_recon = sp_octx . path ( ' reconstruction.aligned.json ' )
2019-05-30 12:17:36 +00:00
unaligned_recon = sp_octx . path ( ' reconstruction.unaligned.json ' )
2019-04-25 15:40:45 +00:00
main_recon = sp_octx . path ( ' reconstruction.json ' )
2019-06-06 21:35:14 +00:00
if io . file_exists ( main_recon ) and io . file_exists ( unaligned_recon ) and not self . rerun ( ) :
log . ODM_INFO ( " Submodel %s has already been aligned. " % sp_octx . name ( ) )
continue
2019-04-25 15:40:45 +00:00
if not io . file_exists ( aligned_recon ) :
log . ODM_WARNING ( " Submodel %s does not have an aligned reconstruction ( %s ). "
" This could mean that the submodel could not be reconstructed "
2019-05-06 15:35:23 +00:00
" (are there enough features to reconstruct it?). Skipping. " % ( sp_octx . name ( ) , aligned_recon ) )
remove_paths . append ( sp )
2019-04-25 15:40:45 +00:00
continue
2019-04-25 17:15:54 +00:00
2019-04-25 15:40:45 +00:00
if io . file_exists ( main_recon ) :
2019-05-30 12:17:36 +00:00
shutil . move ( main_recon , unaligned_recon )
2019-04-24 19:15:22 +00:00
2019-04-27 22:37:07 +00:00
shutil . move ( aligned_recon , main_recon )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " %s is now %s " % ( aligned_recon , main_recon ) )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
# Remove invalid submodels
submodel_paths = [ p for p in submodel_paths if not p in remove_paths ]
# Run ODM toolchain for each submodel
if local_workflow :
for sp in submodel_paths :
sp_octx = OSFMContext ( sp )
2019-04-24 22:33:12 +00:00
2019-05-06 15:35:23 +00:00
log . ODM_INFO ( " ======================== " )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Processing %s " % sp_octx . name ( ) )
2019-05-06 15:35:23 +00:00
log . ODM_INFO ( " ======================== " )
2019-04-24 22:33:12 +00:00
2020-03-18 19:55:33 +00:00
argv = get_submodel_argv ( args , tree . submodels_path , sp_octx . name ( ) )
2019-05-06 15:35:23 +00:00
# Re-run the ODM toolchain on the submodel
system . run ( " " . join ( map ( quote , argv ) ) , env_vars = os . environ . copy ( ) )
else :
2019-05-09 00:32:02 +00:00
lre . set_projects ( [ os . path . abspath ( os . path . join ( p , " .. " ) ) for p in submodel_paths ] )
lre . run_toolchain ( )
2019-04-24 22:33:12 +00:00
2019-05-10 15:01:42 +00:00
# Restore max_concurrency value
args . max_concurrency = orig_max_concurrency
2019-06-06 21:35:14 +00:00
octx . touch ( split_done_file )
2019-04-24 22:33:12 +00:00
else :
log . ODM_WARNING ( ' Found a split done file in: %s ' % split_done_file )
2019-04-23 18:45:47 +00:00
else :
log . ODM_INFO ( " Normal dataset, will process all at once. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-23 17:59:54 +00:00
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
class ODMMergeStage ( types . ODM_Stage ) :
def process ( self , args , outputs ) :
tree = outputs [ ' tree ' ]
reconstruction = outputs [ ' reconstruction ' ]
2019-04-23 22:01:14 +00:00
2019-04-24 19:15:22 +00:00
if outputs [ ' large ' ] :
2019-05-01 19:11:26 +00:00
if not os . path . exists ( tree . submodels_path ) :
log . ODM_ERROR ( " We reached the merge stage, but %s folder does not exist. Something must have gone wrong at an earlier stage. Check the log and fix possible problem before restarting? " % tree . submodels_path )
exit ( 1 )
2019-04-24 21:36:45 +00:00
# Merge point clouds
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' pointcloud ' ] :
2019-10-29 18:25:12 +00:00
if not io . file_exists ( tree . odm_georeferencing_model_laz ) or self . rerun ( ) :
2019-04-30 20:09:03 +00:00
all_point_clouds = get_submodel_paths ( tree . submodels_path , " odm_georeferencing " , " odm_georeferenced_model.laz " )
2019-05-06 18:17:23 +00:00
try :
2019-10-29 18:25:12 +00:00
point_cloud . merge ( all_point_clouds , tree . odm_georeferencing_model_laz , rerun = self . rerun ( ) )
point_cloud . post_point_cloud_steps ( args , tree )
2019-05-06 18:17:23 +00:00
except Exception as e :
2019-10-29 18:25:12 +00:00
log . ODM_WARNING ( " Could not merge point cloud: %s (skipping) " % str ( e ) )
2019-04-30 20:09:03 +00:00
else :
log . ODM_WARNING ( " Found merged point cloud in %s " % tree . odm_georeferencing_model_laz )
2019-10-29 18:25:12 +00:00
2019-04-28 16:20:03 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 25 )
2019-04-28 16:20:03 +00:00
# Merge crop bounds
merged_bounds_file = os . path . join ( tree . odm_georeferencing , ' odm_georeferenced_model.bounds.gpkg ' )
if not io . file_exists ( merged_bounds_file ) or self . rerun ( ) :
all_bounds = get_submodel_paths ( tree . submodels_path , ' odm_georeferencing ' , ' odm_georeferenced_model.bounds.gpkg ' )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Merging all crop bounds: %s " % all_bounds )
2019-04-28 16:20:03 +00:00
if len ( all_bounds ) > 0 :
# Calculate a new crop area
# based on the convex hull of all crop areas of all submodels
# (without a buffer, otherwise we are double-cropping)
Cropper . merge_bounds ( all_bounds , merged_bounds_file , 0 )
else :
log . ODM_WARNING ( " No bounds found for any submodel. " )
2019-04-24 21:36:45 +00:00
2019-04-28 16:20:03 +00:00
# Merge orthophotos
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' orthophoto ' ] :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . odm_orthophoto ) :
system . mkdir_p ( tree . odm_orthophoto )
2019-04-30 20:09:03 +00:00
if not io . file_exists ( tree . odm_orthophoto_tif ) or self . rerun ( ) :
2019-10-28 18:40:40 +00:00
all_orthos_and_ortho_cuts = get_all_submodel_paths ( tree . submodels_path ,
2020-01-21 18:09:31 +00:00
os . path . join ( " odm_orthophoto " , " odm_orthophoto_feathered.tif " ) ,
2019-10-28 18:40:40 +00:00
os . path . join ( " odm_orthophoto " , " odm_orthophoto_cut.tif " ) ,
2019-04-30 20:09:03 +00:00
)
2019-10-28 18:40:40 +00:00
if len ( all_orthos_and_ortho_cuts ) > 1 :
2019-12-06 03:02:58 +00:00
log . ODM_INFO ( " Found %s submodels with valid orthophotos and cutlines " % len ( all_orthos_and_ortho_cuts ) )
2019-04-30 20:09:03 +00:00
# TODO: histogram matching via rasterio
# currently parts have different color tones
if io . file_exists ( tree . odm_orthophoto_tif ) :
os . remove ( tree . odm_orthophoto_tif )
2019-04-28 16:20:03 +00:00
2019-10-28 18:40:40 +00:00
orthophoto_vars = orthophoto . get_orthophoto_vars ( args )
orthophoto . merge ( all_orthos_and_ortho_cuts , tree . odm_orthophoto_tif , orthophoto_vars )
2019-10-24 20:07:19 +00:00
orthophoto . post_orthophoto_steps ( args , merged_bounds_file , tree . odm_orthophoto_tif )
2019-12-06 03:02:58 +00:00
elif len ( all_orthos_and_ortho_cuts ) == 1 :
2019-04-30 20:09:03 +00:00
# Simply copy
log . ODM_WARNING ( " A single orthophoto/cutline pair was found between all submodels. " )
2019-12-06 03:02:58 +00:00
shutil . copyfile ( all_orthos_and_ortho_cuts [ 0 ] [ 0 ] , tree . odm_orthophoto_tif )
2019-04-30 20:09:03 +00:00
else :
log . ODM_WARNING ( " No orthophoto/cutline pairs were found in any of the submodels. No orthophoto will be generated. " )
2019-04-28 16:20:03 +00:00
else :
2019-04-30 20:09:03 +00:00
log . ODM_WARNING ( " Found merged orthophoto in %s " % tree . odm_orthophoto_tif )
2019-04-24 21:36:45 +00:00
2019-05-15 22:01:46 +00:00
self . update_progress ( 75 )
2019-04-29 18:01:55 +00:00
# Merge DEMs
def merge_dems ( dem_filename , human_name ) :
2019-05-01 20:38:55 +00:00
if not io . dir_exists ( tree . path ( ' odm_dem ' ) ) :
system . mkdir_p ( tree . path ( ' odm_dem ' ) )
2019-04-29 20:18:08 +00:00
dem_file = tree . path ( " odm_dem " , dem_filename )
2019-04-29 18:01:55 +00:00
if not io . file_exists ( dem_file ) or self . rerun ( ) :
all_dems = get_submodel_paths ( tree . submodels_path , " odm_dem " , dem_filename )
log . ODM_INFO ( " Merging %s s " % human_name )
# Merge
dem_vars = utils . get_dem_vars ( args )
2019-11-04 21:29:43 +00:00
eu_map_source = None # Default
# Use DSM's euclidean map for DTMs
# (requires the DSM to be computed)
if human_name == " DTM " :
eu_map_source = " dsm "
euclidean_merge_dems ( all_dems , dem_file , dem_vars , euclidean_map_source = eu_map_source )
2019-04-29 18:01:55 +00:00
if io . file_exists ( dem_file ) :
# Crop
if args . crop > 0 :
2020-03-30 14:32:21 +00:00
Cropper . crop ( merged_bounds_file , dem_file , dem_vars , keep_original = not args . optimize_disk_space )
2019-04-29 18:01:55 +00:00
log . ODM_INFO ( " Created %s " % dem_file )
else :
log . ODM_WARNING ( " Cannot merge %s , %s was not created " % ( human_name , dem_file ) )
else :
2019-07-22 14:09:30 +00:00
log . ODM_WARNING ( " Found merged %s in %s " % ( human_name , dem_filename ) )
2019-04-29 18:01:55 +00:00
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dsm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dsm.tif " , " DSM " )
2019-04-24 21:36:45 +00:00
2019-04-30 20:09:03 +00:00
if args . merge in [ ' all ' , ' dem ' ] and args . dtm :
2019-04-29 18:01:55 +00:00
merge_dems ( " dtm.tif " , " DTM " )
2019-04-24 21:36:45 +00:00
# Stop the pipeline short! We're done.
self . next_stage = None
2019-04-24 19:15:22 +00:00
else :
log . ODM_INFO ( " Normal dataset, nothing to merge. " )
2019-05-15 21:04:09 +00:00
self . progress = 0.0
2019-04-24 21:36:45 +00:00