2019-04-23 17:59:54 +00:00
"""
OpenSfM related utils
"""
2021-12-27 15:04:13 +00:00
import os , shutil , sys , json , argparse , copy
2019-05-30 19:58:37 +00:00
import yaml
2021-07-29 19:29:33 +00:00
import numpy as np
import pyproj
from pyproj import CRS
2019-04-23 17:59:54 +00:00
from opendm import io
from opendm import log
from opendm import system
from opendm import context
2019-06-25 16:22:27 +00:00
from opendm import camera
2021-07-29 19:29:33 +00:00
from opendm import location
2023-10-29 23:19:20 +00:00
from opendm . photo import find_largest_photo_dims , find_largest_photo
2019-05-15 18:27:16 +00:00
from opensfm . large import metadataset
from opensfm . large import tools
2020-10-19 20:02:13 +00:00
from opensfm . actions import undistort
from opensfm . dataset import DataSet
2022-02-28 18:17:07 +00:00
from opensfm . types import Reconstruction
2021-01-11 19:41:54 +00:00
from opensfm import report
2020-12-02 18:04:12 +00:00
from opendm . multispectral import get_photos_by_band
2021-12-17 16:57:03 +00:00
from opendm . gpu import has_popsift_and_can_handle_texsize , has_gpu
2021-12-14 21:35:30 +00:00
from opensfm import multiview , exif
2021-09-24 15:06:40 +00:00
from opensfm . actions . export_geocoords import _transform
2021-01-11 19:41:54 +00:00
2019-04-25 15:40:45 +00:00
class OSFMContext :
def __init__ ( self , opensfm_project_path ) :
self . opensfm_project_path = opensfm_project_path
def run ( self , command ) :
2021-05-04 17:04:13 +00:00
osfm_bin = os . path . join ( context . opensfm_path , ' bin ' , ' opensfm ' )
2021-12-22 16:23:41 +00:00
system . run ( ' " %s " %s " %s " ' %
2021-05-04 17:04:13 +00:00
( osfm_bin , command , self . opensfm_project_path ) )
2019-04-25 15:40:45 +00:00
2019-06-06 21:35:14 +00:00
def is_reconstruction_done ( self ) :
tracks_file = os . path . join ( self . opensfm_project_path , ' tracks.csv ' )
reconstruction_file = os . path . join ( self . opensfm_project_path , ' reconstruction.json ' )
return io . file_exists ( tracks_file ) and io . file_exists ( reconstruction_file )
2022-06-15 12:40:34 +00:00
def create_tracks ( self , rerun = False ) :
2019-04-25 15:40:45 +00:00
tracks_file = os . path . join ( self . opensfm_project_path , ' tracks.csv ' )
2022-06-15 12:40:34 +00:00
rs_file = self . path ( ' rs_done.txt ' )
2019-04-23 17:59:54 +00:00
2019-04-25 15:40:45 +00:00
if not io . file_exists ( tracks_file ) or rerun :
self . run ( ' create_tracks ' )
else :
log . ODM_WARNING ( ' Found a valid OpenSfM tracks file in: %s ' % tracks_file )
2019-04-23 18:45:47 +00:00
2023-02-26 21:04:04 +00:00
def reconstruct ( self , rolling_shutter_correct = False , merge_partial = False , rerun = False ) :
2022-06-15 12:40:34 +00:00
reconstruction_file = os . path . join ( self . opensfm_project_path , ' reconstruction.json ' )
2019-04-25 15:40:45 +00:00
if not io . file_exists ( reconstruction_file ) or rerun :
self . run ( ' reconstruct ' )
2023-02-26 21:04:04 +00:00
if merge_partial :
self . check_merge_partial_reconstructions ( )
2019-04-25 15:40:45 +00:00
else :
log . ODM_WARNING ( ' Found a valid OpenSfM reconstruction file in: %s ' % reconstruction_file )
# Check that a reconstruction file has been created
2019-11-04 19:19:24 +00:00
if not self . reconstructed ( ) :
2021-06-09 16:28:44 +00:00
raise system . ExitException ( " The program could not process this dataset using the current settings. "
2019-04-25 15:40:45 +00:00
" Check that the images have enough overlap, "
" that there are enough recognizable features "
" and that the images are in focus. "
" The program will now exit. " )
2022-06-15 12:40:34 +00:00
if rolling_shutter_correct :
rs_file = self . path ( ' rs_done.txt ' )
if not io . file_exists ( rs_file ) or rerun :
2022-06-16 17:28:42 +00:00
self . run ( ' rs_correct ' )
2022-06-15 20:14:17 +00:00
log . ODM_INFO ( " Re-running the reconstruction pipeline " )
2022-06-16 16:47:33 +00:00
2022-06-15 20:14:17 +00:00
self . match_features ( True )
self . create_tracks ( True )
2023-02-26 21:04:04 +00:00
self . reconstruct ( rolling_shutter_correct = False , merge_partial = merge_partial , rerun = True )
2022-06-15 20:14:17 +00:00
2022-06-15 12:40:34 +00:00
self . touch ( rs_file )
else :
log . ODM_WARNING ( " Rolling shutter correction already applied " )
2022-02-28 18:17:07 +00:00
def check_merge_partial_reconstructions ( self ) :
if self . reconstructed ( ) :
data = DataSet ( self . opensfm_project_path )
reconstructions = data . load_reconstruction ( )
tracks_manager = data . load_tracks_manager ( )
if len ( reconstructions ) > 1 :
log . ODM_WARNING ( " Multiple reconstructions detected ( %s ), this might be an indicator that some areas did not have sufficient overlap " % len ( reconstructions ) )
log . ODM_INFO ( " Attempting merge " )
merged = Reconstruction ( )
merged . set_reference ( reconstructions [ 0 ] . reference )
for ix_r , rec in enumerate ( reconstructions ) :
if merged . reference != rec . reference :
# Should never happen
continue
log . ODM_INFO ( " Merging reconstruction %s " % ix_r )
for camera in rec . cameras . values ( ) :
merged . add_camera ( camera )
for point in rec . points . values ( ) :
2022-03-03 15:35:29 +00:00
try :
new_point = merged . create_point ( point . id , point . coordinates )
new_point . color = point . color
except RuntimeError as e :
log . ODM_WARNING ( " Cannot merge shot id %s ( %s ) " % ( shot . id , str ( e ) ) )
continue
2022-02-28 18:17:07 +00:00
for shot in rec . shots . values ( ) :
merged . add_shot ( shot )
try :
obsdict = tracks_manager . get_shot_observations ( shot . id )
except RuntimeError :
log . ODM_WARNING ( " Shot id %s missing from tracks_manager! " % shot . id )
continue
for track_id , obs in obsdict . items ( ) :
if track_id in merged . points :
merged . add_observation ( shot . id , track_id , obs )
data . save_reconstruction ( [ merged ] )
2019-04-25 15:40:45 +00:00
2020-12-02 22:17:51 +00:00
def setup ( self , args , images_path , reconstruction , append_config = [ ] , rerun = False ) :
2019-04-25 15:40:45 +00:00
"""
Setup a OpenSfM project
"""
if rerun and io . dir_exists ( self . opensfm_project_path ) :
shutil . rmtree ( self . opensfm_project_path )
if not io . dir_exists ( self . opensfm_project_path ) :
system . mkdir_p ( self . opensfm_project_path )
2020-10-28 14:05:01 +00:00
list_path = os . path . join ( self . opensfm_project_path , ' image_list.txt ' )
2019-04-25 15:40:45 +00:00
if not io . file_exists ( list_path ) or rerun :
2020-12-02 22:17:51 +00:00
2020-12-02 18:04:12 +00:00
if reconstruction . multi_camera :
photos = get_photos_by_band ( reconstruction . multi_camera , args . primary_band )
if len ( photos ) < 1 :
raise Exception ( " Not enough images in selected band %s " % args . primary_band . lower ( ) )
2020-12-02 22:17:51 +00:00
log . ODM_INFO ( " Reconstruction will use %s images from %s band " % ( len ( photos ) , args . primary_band . lower ( ) ) )
2020-12-02 18:04:12 +00:00
else :
photos = reconstruction . photos
2019-04-25 15:40:45 +00:00
# create file list
2022-06-22 18:14:36 +00:00
num_zero_alt = 0
2019-04-25 15:40:45 +00:00
has_alt = True
2019-08-13 21:38:10 +00:00
has_gps = False
2019-04-25 15:40:45 +00:00
with open ( list_path , ' w ' ) as fout :
for photo in photos :
2022-06-22 14:38:11 +00:00
if photo . altitude is None :
2019-04-25 15:40:45 +00:00
has_alt = False
2022-06-22 18:14:36 +00:00
elif photo . altitude == 0 :
num_zero_alt + = 1
2019-08-13 21:38:10 +00:00
if photo . latitude is not None and photo . longitude is not None :
has_gps = True
2020-12-02 18:04:12 +00:00
2020-10-28 14:05:01 +00:00
fout . write ( ' %s \n ' % os . path . join ( images_path , photo . filename ) )
2019-06-17 15:27:59 +00:00
2022-06-22 18:14:36 +00:00
# check 0 altitude images percentage when has_alt is True
if has_alt and num_zero_alt / len ( photos ) > 0.05 :
log . ODM_WARNING ( " More than 5 % o f images have zero altitude, this might be an indicator that the images have no altitude information " )
has_alt = False
2019-06-17 15:27:59 +00:00
# check for image_groups.txt (split-merge)
image_groups_file = os . path . join ( args . project_path , " image_groups.txt " )
2021-02-05 20:43:19 +00:00
if ' split_image_groups_is_set ' in args :
image_groups_file = os . path . abspath ( args . split_image_groups )
2019-06-17 15:27:59 +00:00
if io . file_exists ( image_groups_file ) :
2021-02-05 20:43:19 +00:00
dst_groups_file = os . path . join ( self . opensfm_project_path , " image_groups.txt " )
io . copy ( image_groups_file , dst_groups_file )
log . ODM_INFO ( " Copied %s to %s " % ( image_groups_file , dst_groups_file ) )
2019-06-17 15:27:59 +00:00
2019-06-25 16:22:27 +00:00
# check for cameras
if args . cameras :
try :
2019-06-25 18:22:12 +00:00
camera_overrides = camera . get_opensfm_camera_models ( args . cameras )
2019-06-25 17:18:27 +00:00
with open ( os . path . join ( self . opensfm_project_path , " camera_models_overrides.json " ) , ' w ' ) as f :
2019-06-25 18:22:12 +00:00
f . write ( json . dumps ( camera_overrides ) )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Wrote camera_models_overrides.json to OpenSfM directory " )
2019-06-25 16:22:27 +00:00
except Exception as e :
log . ODM_WARNING ( " Cannot set camera_models_overrides.json: %s " % str ( e ) )
2019-04-25 15:40:45 +00:00
2020-09-15 17:57:36 +00:00
# Check image masks
masks = [ ]
for p in photos :
if p . mask is not None :
masks . append ( ( p . filename , os . path . join ( images_path , p . mask ) ) )
if masks :
log . ODM_INFO ( " Found %s image masks " % len ( masks ) )
with open ( os . path . join ( self . opensfm_project_path , " mask_list.txt " ) , ' w ' ) as f :
for fname , mask in masks :
f . write ( " {} {} \n " . format ( fname , mask ) )
2020-09-11 18:44:19 +00:00
# Compute feature_process_size
feature_process_size = 2048 # default
2020-11-12 15:23:02 +00:00
2022-11-10 19:25:26 +00:00
feature_quality_scale = {
' ultra ' : 1 ,
' high ' : 0.5 ,
' medium ' : 0.25 ,
' low ' : 0.125 ,
' lowest ' : 0.0675 ,
}
2023-10-29 23:19:20 +00:00
max_dims = find_largest_photo_dims ( photos )
2022-11-10 19:25:26 +00:00
2023-10-29 23:19:20 +00:00
if max_dims is not None :
w , h = max_dims
max_dim = max ( w , h )
2022-11-10 19:25:26 +00:00
log . ODM_INFO ( " Maximum photo dimensions: %s px " % str ( max_dim ) )
2023-10-29 23:19:20 +00:00
lower_limit = 320
upper_limit = 4480
megapixels = ( w * h ) / 1e6
multiplier = 1
if megapixels < 2 :
multiplier = 2
elif megapixels > 42 :
multiplier = 0.5
factor = min ( 1 , feature_quality_scale [ args . feature_quality ] * multiplier )
feature_process_size = min ( upper_limit , max ( lower_limit , int ( max_dim * factor ) ) )
2022-11-10 19:25:26 +00:00
log . ODM_INFO ( " Photo dimensions for feature extraction: %i px " % feature_process_size )
2020-09-11 18:44:19 +00:00
else :
2022-11-10 19:25:26 +00:00
log . ODM_WARNING ( " Cannot compute max image dimensions, going with defaults " )
2020-05-01 19:25:08 +00:00
2019-04-25 15:40:45 +00:00
# create config file for OpenSfM
2022-04-15 15:42:14 +00:00
if args . matcher_neighbors > 0 :
2022-04-15 15:40:31 +00:00
matcher_graph_rounds = 0
matcher_neighbors = args . matcher_neighbors
else :
matcher_graph_rounds = 50
matcher_neighbors = 0
2023-09-06 14:09:14 +00:00
2023-09-06 14:13:36 +00:00
# Always use matcher-neighbors if less than 4 pictures
if len ( photos ) < = 3 :
2023-09-06 14:09:14 +00:00
matcher_graph_rounds = 0
2023-09-06 14:13:36 +00:00
matcher_neighbors = 3
2022-04-15 15:40:31 +00:00
2019-04-25 15:40:45 +00:00
config = [
" use_exif_size: no " ,
2020-06-23 15:44:12 +00:00
" flann_algorithm: KDTREE " , # more stable, faster than KMEANS
2020-09-11 18:44:19 +00:00
" feature_process_size: %s " % feature_process_size ,
2019-04-25 15:40:45 +00:00
" feature_min_frames: %s " % args . min_num_features ,
" processes: %s " % args . max_concurrency ,
2022-04-15 15:40:31 +00:00
" matching_gps_neighbors: %s " % matcher_neighbors ,
2021-12-16 19:36:17 +00:00
" matching_gps_distance: 0 " ,
2022-04-15 15:40:31 +00:00
" matching_graph_rounds: %s " % matcher_graph_rounds ,
2022-06-17 18:29:42 +00:00
" optimize_camera_parameters: %s " % ( ' no ' if args . use_fixed_camera_params else ' yes ' ) ,
2021-12-14 17:51:09 +00:00
" reconstruction_algorithm: %s " % ( args . sfm_algorithm ) ,
2019-10-22 13:52:55 +00:00
" undistorted_image_format: tif " ,
2019-06-20 14:13:09 +00:00
" bundle_outlier_filtering_type: AUTO " ,
2021-07-03 13:52:47 +00:00
" sift_peak_threshold: 0.066 " ,
2019-08-17 03:42:55 +00:00
" align_orientation_prior: vertical " ,
2021-02-03 21:05:43 +00:00
" triangulation_type: ROBUST " ,
" retriangulation_ratio: 2 " ,
2019-04-25 15:40:45 +00:00
]
2023-05-17 07:18:24 +00:00
2023-05-19 19:14:44 +00:00
if args . matcher_order > 0 :
2023-06-12 19:12:13 +00:00
if not reconstruction . is_georeferenced ( ) :
config . append ( " matching_order_neighbors: %s " % args . matcher_order )
else :
log . ODM_WARNING ( " Georeferenced reconstruction, ignoring --matcher-order " )
2019-04-25 15:40:45 +00:00
2019-08-16 16:23:01 +00:00
if args . camera_lens != ' auto ' :
config . append ( " camera_projection_type: %s " % args . camera_lens . upper ( ) )
2019-08-15 14:55:18 +00:00
2021-11-15 17:09:21 +00:00
matcher_type = args . matcher_type
2020-02-11 14:25:27 +00:00
feature_type = args . feature_type . upper ( )
2021-11-15 17:09:21 +00:00
osfm_matchers = {
" bow " : " WORDS " ,
" flann " : " FLANN " ,
" bruteforce " : " BRUTEFORCE "
}
2019-06-21 18:47:00 +00:00
2021-11-15 17:29:04 +00:00
if not has_gps and not ' matcher_type_is_set ' in args :
2021-11-15 17:31:58 +00:00
log . ODM_INFO ( " No GPS information, using BOW matching by default (you can override this by setting --matcher-type explicitly) " )
2021-11-15 17:09:21 +00:00
matcher_type = " bow "
if matcher_type == " bow " :
# Cannot use anything other than HAHOG with BOW
if feature_type != " HAHOG " :
2020-02-11 14:25:27 +00:00
log . ODM_WARNING ( " Using BOW matching, will use HAHOG feature type, not SIFT " )
feature_type = " HAHOG "
2021-11-15 17:09:21 +00:00
config . append ( " matcher_type: %s " % osfm_matchers [ matcher_type ] )
2021-02-09 17:14:05 +00:00
# GPU acceleration?
2023-10-29 23:19:20 +00:00
if has_gpu ( args ) and max_dims is not None :
w , h = max_dims
2021-12-17 16:57:03 +00:00
if w > h :
2021-12-17 17:12:41 +00:00
h = int ( ( h / w ) * feature_process_size )
w = int ( feature_process_size )
2021-12-17 16:57:03 +00:00
else :
2021-12-17 17:12:41 +00:00
w = int ( ( w / h ) * feature_process_size )
h = int ( feature_process_size )
2021-12-17 16:57:03 +00:00
if has_popsift_and_can_handle_texsize ( w , h ) and feature_type == " SIFT " :
log . ODM_INFO ( " Using GPU for extracting SIFT features " )
feature_type = " SIFT_GPU "
2022-03-08 16:32:32 +00:00
self . gpu_sift_feature_extraction = True
2021-02-09 17:14:05 +00:00
2020-02-11 14:25:27 +00:00
config . append ( " feature_type: %s " % feature_type )
2019-04-25 15:40:45 +00:00
if has_alt :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Altitude data detected, enabling it for GPS alignment " )
2019-04-25 15:40:45 +00:00
config . append ( " use_altitude_tag: yes " )
2019-06-22 23:02:27 +00:00
2019-12-07 22:05:59 +00:00
gcp_path = reconstruction . gcp . gcp_path
2019-06-22 23:02:27 +00:00
if has_alt or gcp_path :
2019-08-17 03:42:55 +00:00
config . append ( " align_method: auto " )
2019-04-25 15:40:45 +00:00
else :
config . append ( " align_method: orientation_prior " )
2019-06-21 17:14:44 +00:00
2019-04-25 15:40:45 +00:00
if args . use_hybrid_bundle_adjustment :
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Enabling hybrid bundle adjustment " )
2019-04-25 15:40:45 +00:00
config . append ( " bundle_interval: 100 " ) # Bundle after adding 'bundle_interval' cameras
config . append ( " bundle_new_points_ratio: 1.2 " ) # Bundle when (new points) / (bundled points) > bundle_new_points_ratio
config . append ( " local_bundle_radius: 1 " ) # Max image graph distance for images to be included in local bundle adjustment
2020-10-21 19:19:12 +00:00
else :
config . append ( " local_bundle_radius: 0 " )
2019-04-25 15:40:45 +00:00
if gcp_path :
config . append ( " bundle_use_gcp: yes " )
2019-08-13 17:04:38 +00:00
if not args . force_gps :
config . append ( " bundle_use_gps: no " )
2022-06-14 15:23:39 +00:00
else :
config . append ( " bundle_compensate_gps_bias: yes " )
2022-06-16 21:12:41 +00:00
2019-05-08 20:19:05 +00:00
io . copy ( gcp_path , self . path ( " gcp_list.txt " ) )
2019-04-25 15:40:45 +00:00
config = config + append_config
# write config file
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( config )
2019-05-30 19:58:37 +00:00
config_filename = self . get_config_file_path ( )
2019-04-25 15:40:45 +00:00
with open ( config_filename , ' w ' ) as fout :
fout . write ( " \n " . join ( config ) )
2021-07-29 19:29:33 +00:00
# We impose our own reference_lla
if reconstruction . is_georeferenced ( ) :
self . write_reference_lla ( reconstruction . georef . utm_east_offset , reconstruction . georef . utm_north_offset , reconstruction . georef . proj4 ( ) )
2019-04-25 15:40:45 +00:00
else :
log . ODM_WARNING ( " %s already exists, not rerunning OpenSfM setup " % list_path )
2019-04-23 18:45:47 +00:00
2019-05-30 19:58:37 +00:00
def get_config_file_path ( self ) :
2020-10-28 14:05:01 +00:00
return os . path . join ( self . opensfm_project_path , ' config.yaml ' )
2019-05-30 19:58:37 +00:00
2019-11-04 19:19:24 +00:00
def reconstructed ( self ) :
if not io . file_exists ( self . path ( " reconstruction.json " ) ) :
return False
with open ( self . path ( " reconstruction.json " ) , ' r ' ) as f :
return f . readline ( ) . strip ( ) != " [] "
2019-05-06 15:35:23 +00:00
def extract_metadata ( self , rerun = False ) :
metadata_dir = self . path ( " exif " )
if not io . dir_exists ( metadata_dir ) or rerun :
2019-04-25 15:40:45 +00:00
self . run ( ' extract_metadata ' )
2021-12-14 19:47:48 +00:00
2022-06-02 19:44:11 +00:00
def photos_to_metadata ( self , photos , rolling_shutter , rolling_shutter_readout , rerun = False ) :
2021-12-14 19:47:48 +00:00
metadata_dir = self . path ( " exif " )
2021-12-14 21:35:30 +00:00
if io . dir_exists ( metadata_dir ) and not rerun :
log . ODM_WARNING ( " %s already exists, not rerunning photo to metadata " % metadata_dir )
return
if io . dir_exists ( metadata_dir ) :
2021-12-14 19:47:48 +00:00
shutil . rmtree ( metadata_dir )
2021-12-14 21:35:30 +00:00
os . makedirs ( metadata_dir , exist_ok = True )
2021-12-14 19:47:48 +00:00
2021-12-14 21:35:30 +00:00
camera_models = { }
data = DataSet ( self . opensfm_project_path )
2021-12-14 19:47:48 +00:00
for p in photos :
2022-06-02 19:44:11 +00:00
d = p . to_opensfm_exif ( rolling_shutter , rolling_shutter_readout )
2021-12-14 19:47:48 +00:00
with open ( os . path . join ( metadata_dir , " %s .exif " % p . filename ) , ' w ' ) as f :
2021-12-15 18:14:07 +00:00
f . write ( json . dumps ( d , indent = 4 ) )
2021-12-14 21:35:30 +00:00
camera_id = p . camera_id ( )
if camera_id not in camera_models :
camera = exif . camera_from_exif_metadata ( d , data )
camera_models [ camera_id ] = camera
# Override any camera specified in the camera models overrides file.
if data . camera_models_overrides_exists ( ) :
overrides = data . load_camera_models_overrides ( )
if " all " in overrides :
for key in camera_models :
camera_models [ key ] = copy . copy ( overrides [ " all " ] )
camera_models [ key ] . id = key
else :
for key , value in overrides . items ( ) :
camera_models [ key ] = value
data . save_camera_models ( camera_models )
2019-04-23 22:01:14 +00:00
2019-06-06 21:35:14 +00:00
def is_feature_matching_done ( self ) :
features_dir = self . path ( " features " )
matches_dir = self . path ( " matches " )
return io . dir_exists ( features_dir ) and io . dir_exists ( matches_dir )
2019-05-06 15:35:23 +00:00
def feature_matching ( self , rerun = False ) :
features_dir = self . path ( " features " )
if not io . dir_exists ( features_dir ) or rerun :
2022-03-08 16:32:32 +00:00
try :
self . run ( ' detect_features ' )
2022-03-08 16:37:38 +00:00
except system . SubprocessException as e :
2022-03-08 16:32:32 +00:00
# Sometimes feature extraction by GPU can fail
# for various reasons, so before giving up
# we try to fallback to CPU
if hasattr ( self , ' gpu_sift_feature_extraction ' ) :
2022-03-08 16:35:24 +00:00
log . ODM_WARNING ( " GPU SIFT extraction failed, maybe the graphics card is not supported? Attempting fallback to CPU " )
2022-03-08 16:32:32 +00:00
self . update_config ( { ' feature_type ' : " SIFT " } )
2022-07-22 03:43:42 +00:00
if os . path . exists ( features_dir ) :
shutil . rmtree ( features_dir )
2022-03-08 16:32:32 +00:00
self . run ( ' detect_features ' )
else :
raise e
2019-04-25 15:40:45 +00:00
else :
2019-05-06 15:35:23 +00:00
log . ODM_WARNING ( ' Detect features already done: %s exists ' % features_dir )
2019-04-23 22:01:14 +00:00
2022-06-15 20:14:17 +00:00
self . match_features ( rerun )
def match_features ( self , rerun = False ) :
matches_dir = self . path ( " matches " )
2019-05-06 15:35:23 +00:00
if not io . dir_exists ( matches_dir ) or rerun :
self . run ( ' match_features ' )
else :
log . ODM_WARNING ( ' Match features already done: %s exists ' % matches_dir )
2019-04-23 22:01:14 +00:00
2019-05-15 18:27:16 +00:00
def align_reconstructions ( self , rerun ) :
alignment_file = self . path ( ' alignment_done.txt ' )
if not io . file_exists ( alignment_file ) or rerun :
log . ODM_INFO ( " Aligning submodels... " )
meta_data = metadataset . MetaDataSet ( self . opensfm_project_path )
reconstruction_shots = tools . load_reconstruction_shots ( meta_data )
2019-05-30 20:54:18 +00:00
transformations = tools . align_reconstructions ( reconstruction_shots ,
tools . partial_reconstruction_name ,
2021-04-14 14:46:59 +00:00
False )
2019-05-15 18:27:16 +00:00
tools . apply_transformations ( transformations )
2019-06-05 16:18:22 +00:00
self . touch ( alignment_file )
2019-05-15 18:27:16 +00:00
else :
log . ODM_WARNING ( ' Found a alignment done progress file in: %s ' % alignment_file )
2019-06-05 16:18:22 +00:00
def touch ( self , file ) :
with open ( file , ' w ' ) as fout :
fout . write ( " Done! \n " )
2019-04-25 15:40:45 +00:00
def path ( self , * paths ) :
return os . path . join ( self . opensfm_project_path , * paths )
2019-04-23 22:01:14 +00:00
2019-06-25 15:04:54 +00:00
def extract_cameras ( self , output , rerun = False ) :
if not os . path . exists ( output ) or rerun :
2019-06-25 16:22:27 +00:00
try :
2019-06-25 17:18:27 +00:00
reconstruction_file = self . path ( " reconstruction.json " )
2019-06-25 15:04:54 +00:00
with open ( output , ' w ' ) as fout :
2019-06-25 17:28:51 +00:00
fout . write ( json . dumps ( camera . get_cameras_from_opensfm ( reconstruction_file ) , indent = 4 ) )
2019-06-25 16:22:27 +00:00
except Exception as e :
log . ODM_WARNING ( " Cannot export cameras to %s . %s . " % ( output , str ( e ) ) )
2019-06-25 15:04:54 +00:00
else :
log . ODM_INFO ( " Already extracted cameras " )
2020-02-26 18:11:03 +00:00
2020-12-05 20:15:07 +00:00
def convert_and_undistort ( self , rerun = False , imageFilter = None , image_list = None , runId = " nominal " ) :
2020-02-26 22:33:08 +00:00
log . ODM_INFO ( " Undistorting %s ... " % self . opensfm_project_path )
2020-12-05 20:15:07 +00:00
done_flag_file = self . path ( " undistorted " , " %s _done.txt " % runId )
2020-02-26 18:11:03 +00:00
2020-12-05 20:15:07 +00:00
if not io . file_exists ( done_flag_file ) or rerun :
2020-12-05 03:48:21 +00:00
ds = DataSet ( self . opensfm_project_path )
if image_list is not None :
ds . _set_image_list ( image_list )
undistort . run_dataset ( ds , " reconstruction.json " ,
2020-10-19 20:02:13 +00:00
0 , None , " undistorted " , imageFilter )
2020-12-05 20:15:07 +00:00
self . touch ( done_flag_file )
2020-02-26 18:11:03 +00:00
else :
2020-12-05 20:15:07 +00:00
log . ODM_WARNING ( " Already undistorted ( %s ) " % runId )
def restore_reconstruction_backup ( self ) :
if os . path . exists ( self . recon_backup_file ( ) ) :
# This time export the actual reconstruction.json
# (containing only the primary band)
if os . path . exists ( self . recon_file ( ) ) :
os . remove ( self . recon_file ( ) )
2021-05-04 18:46:55 +00:00
os . replace ( self . recon_backup_file ( ) , self . recon_file ( ) )
2020-12-05 20:15:07 +00:00
log . ODM_INFO ( " Restored reconstruction.json " )
def backup_reconstruction ( self ) :
if os . path . exists ( self . recon_backup_file ( ) ) :
os . remove ( self . recon_backup_file ( ) )
log . ODM_INFO ( " Backing up reconstruction " )
shutil . copyfile ( self . recon_file ( ) , self . recon_backup_file ( ) )
2020-02-26 18:11:03 +00:00
2020-12-05 20:15:07 +00:00
def recon_backup_file ( self ) :
return self . path ( " reconstruction.backup.json " )
def recon_file ( self ) :
return self . path ( " reconstruction.json " )
2020-12-05 03:48:21 +00:00
2020-12-05 20:15:07 +00:00
def add_shots_to_reconstruction ( self , p2s ) :
with open ( self . recon_file ( ) ) as f :
2020-12-05 03:48:21 +00:00
reconstruction = json . loads ( f . read ( ) )
# Augment reconstruction.json
for recon in reconstruction :
shots = recon [ ' shots ' ]
sids = list ( shots )
for shot_id in sids :
secondary_photos = p2s . get ( shot_id )
if secondary_photos is None :
log . ODM_WARNING ( " Cannot find secondary photos for %s " % shot_id )
continue
for p in secondary_photos :
shots [ p . filename ] = shots [ shot_id ]
2020-12-05 20:15:07 +00:00
with open ( self . recon_file ( ) , ' w ' ) as f :
2020-12-05 03:48:21 +00:00
f . write ( json . dumps ( reconstruction ) )
2019-06-25 15:04:54 +00:00
2019-05-30 19:58:37 +00:00
def update_config ( self , cfg_dict ) :
cfg_file = self . get_config_file_path ( )
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " Updating %s " % cfg_file )
2019-05-30 19:58:37 +00:00
if os . path . exists ( cfg_file ) :
try :
with open ( cfg_file ) as fin :
cfg = yaml . safe_load ( fin )
for k , v in cfg_dict . items ( ) :
cfg [ k ] = v
2019-06-28 15:10:08 +00:00
log . ODM_INFO ( " %s : %s " % ( k , v ) )
2019-05-30 19:58:37 +00:00
with open ( cfg_file , ' w ' ) as fout :
fout . write ( yaml . dump ( cfg , default_flow_style = False ) )
except Exception as e :
log . ODM_WARNING ( " Cannot update configuration file %s : %s " % ( cfg_file , str ( e ) ) )
else :
log . ODM_WARNING ( " Tried to update configuration, but %s does not exist. " % cfg_file )
2021-01-11 19:41:54 +00:00
def export_stats ( self , rerun = False ) :
log . ODM_INFO ( " Export reconstruction stats " )
stats_path = self . path ( " stats " , " stats.json " )
if not os . path . exists ( stats_path ) or rerun :
2021-01-18 19:11:35 +00:00
self . run ( " compute_statistics --diagram_max_points 100000 " )
2021-01-11 19:41:54 +00:00
else :
log . ODM_WARNING ( " Found existing reconstruction stats %s " % stats_path )
def export_report ( self , report_path , odm_stats , rerun = False ) :
log . ODM_INFO ( " Exporting report to %s " % report_path )
osfm_report_path = self . path ( " stats " , " report.pdf " )
if not os . path . exists ( report_path ) or rerun :
data = DataSet ( self . opensfm_project_path )
pdf_report = report . Report ( data , odm_stats )
pdf_report . generate_report ( )
pdf_report . save_report ( " report.pdf " )
if os . path . exists ( osfm_report_path ) :
2023-10-14 05:22:58 +00:00
if os . path . exists ( report_path ) :
os . unlink ( report_path )
2021-01-11 19:41:54 +00:00
shutil . move ( osfm_report_path , report_path )
else :
log . ODM_WARNING ( " Report could not be generated " )
else :
log . ODM_WARNING ( " Report %s already exported " % report_path )
2021-07-29 19:29:33 +00:00
def write_reference_lla ( self , offset_x , offset_y , proj4 ) :
reference_lla = self . path ( " reference_lla.json " )
longlat = CRS . from_epsg ( " 4326 " )
lon , lat = location . transform2 ( CRS . from_proj4 ( proj4 ) , longlat , offset_x , offset_y )
with open ( reference_lla , ' w ' ) as f :
f . write ( json . dumps ( {
' latitude ' : lat ,
' longitude ' : lon ,
' altitude ' : 0.0
} , indent = 4 ) )
log . ODM_INFO ( " Wrote reference_lla.json " )
def ground_control_points ( self , proj4 ) :
"""
Load ground control point information .
"""
2021-09-24 15:06:40 +00:00
gcp_stats_file = self . path ( " stats " , " ground_control_points.json " )
if not io . file_exists ( gcp_stats_file ) :
return [ ]
gcps_stats = { }
try :
with open ( gcp_stats_file ) as f :
gcps_stats = json . loads ( f . read ( ) )
except :
log . ODM_INFO ( " Cannot parse %s " % gcp_stats_file )
if not gcps_stats :
2021-07-29 19:29:33 +00:00
return [ ]
2021-09-24 15:06:40 +00:00
ds = DataSet ( self . opensfm_project_path )
2021-07-29 19:29:33 +00:00
reference = ds . load_reference ( )
projection = pyproj . Proj ( proj4 )
result = [ ]
2021-09-24 15:06:40 +00:00
for gcp in gcps_stats :
geocoords = _transform ( gcp [ ' coordinates ' ] , reference , projection )
2021-07-29 19:29:33 +00:00
result . append ( {
2021-09-24 15:06:40 +00:00
' id ' : gcp [ ' id ' ] ,
' observations ' : gcp [ ' observations ' ] ,
' coordinates ' : geocoords ,
' error ' : gcp [ ' error ' ]
2021-07-29 19:29:33 +00:00
} )
return result
2021-01-11 19:41:54 +00:00
2019-05-06 15:35:23 +00:00
def name ( self ) :
return os . path . basename ( os . path . abspath ( self . path ( " .. " ) ) )
2019-04-24 18:28:44 +00:00
2020-03-18 19:29:43 +00:00
def get_submodel_argv ( args , submodels_path = None , submodel_name = None ) :
2019-04-24 18:28:44 +00:00
"""
2020-03-18 19:29:43 +00:00
Gets argv for a submodel starting from the args passed to the application startup .
2019-05-09 17:11:02 +00:00
Additionally , if project_name , submodels_path and submodel_name are passed , the function
2019-05-08 22:27:17 +00:00
handles the < project name > value and - - project - path detection / override .
When all arguments are set to None , - - project - path and project name are always removed .
2019-04-24 18:28:44 +00:00
: return the same as argv , but removing references to - - split ,
setting / replacing - - project - path and name
2019-05-09 00:32:02 +00:00
removing - - rerun - from , - - rerun , - - rerun - all , - - sm - cluster
2020-09-17 15:28:03 +00:00
removing - - pc - las , - - pc - csv , - - pc - ept , - - tiles flags ( processing these is wasteful )
2019-04-30 20:24:08 +00:00
adding - - orthophoto - cutline
adding - - dem - euclidean - map
2019-04-30 20:09:03 +00:00
adding - - skip - 3 dmodel ( split - merge does not support 3 D model merging )
2019-09-15 20:31:24 +00:00
tweaking - - crop if necessary ( DEM merging makes assumption about the area of DEMs and their euclidean maps that require cropping . If cropping is skipped , this leads to errors . )
2019-05-01 19:11:26 +00:00
removing - - gcp ( the GCP path if specified is always " gcp_list.txt " )
2019-06-28 15:10:08 +00:00
reading the contents of - - cameras
2021-10-12 18:05:07 +00:00
reading the contents of - - boundary
2019-04-24 18:28:44 +00:00
"""
2021-01-15 14:59:02 +00:00
assure_always = [ ' orthophoto_cutline ' , ' dem_euclidean_map ' , ' skip_3dmodel ' , ' skip_report ' ]
2021-06-04 19:35:56 +00:00
remove_always = [ ' split ' , ' split_overlap ' , ' rerun_from ' , ' rerun ' , ' gcp ' , ' end_with ' , ' sm_cluster ' , ' rerun_all ' , ' pc_csv ' , ' pc_las ' , ' pc_ept ' , ' tiles ' , ' copy-to ' , ' cog ' ]
2021-10-12 18:05:07 +00:00
read_json_always = [ ' cameras ' , ' boundary ' ]
2019-05-01 19:11:26 +00:00
2019-04-24 18:28:44 +00:00
argv = sys . argv
2021-05-11 19:04:28 +00:00
# Startup script (/path/to/run.py)
startup_script = argv [ 0 ]
# On Windows, make sure we always invoke the "run.bat" file
if sys . platform == ' win32 ' :
startup_script_dir = os . path . dirname ( startup_script )
startup_script = os . path . join ( startup_script_dir , " run " )
result = [ startup_script ]
2019-04-24 18:28:44 +00:00
2020-03-18 19:29:43 +00:00
args_dict = vars ( args ) . copy ( )
set_keys = [ k [ : - len ( " _is_set " ) ] for k in args_dict . keys ( ) if k . endswith ( " _is_set " ) ]
2019-04-24 19:15:22 +00:00
2020-03-18 19:29:43 +00:00
# Handle project name and project path (special case)
if " name " in set_keys :
del args_dict [ " name " ]
set_keys . remove ( " name " )
if " project_path " in set_keys :
del args_dict [ " project_path " ]
set_keys . remove ( " project_path " )
# Remove parameters
set_keys = [ k for k in set_keys if k not in remove_always ]
# Assure parameters
for k in assure_always :
if not k in set_keys :
set_keys . append ( k )
args_dict [ k ] = True
# Read JSON always
for k in read_json_always :
if k in set_keys :
2019-06-28 15:10:08 +00:00
try :
2020-03-18 19:29:43 +00:00
if isinstance ( args_dict [ k ] , str ) :
args_dict [ k ] = io . path_or_json_string_to_dict ( args_dict [ k ] )
if isinstance ( args_dict [ k ] , dict ) :
args_dict [ k ] = json . dumps ( args_dict [ k ] )
2019-06-28 15:10:08 +00:00
except ValueError as e :
log . ODM_WARNING ( " Cannot parse/read JSON: {} " . format ( str ( e ) ) )
2020-03-18 19:29:43 +00:00
# Handle crop (cannot be zero for split/merge)
if " crop " in set_keys :
crop_value = float ( args_dict [ " crop " ] )
if crop_value == 0 :
crop_value = 0.015625
args_dict [ " crop " ] = crop_value
# Populate result
for k in set_keys :
result . append ( " -- %s " % k . replace ( " _ " , " - " ) )
# No second value for booleans
if isinstance ( args_dict [ k ] , bool ) and args_dict [ k ] == True :
continue
result . append ( str ( args_dict [ k ] ) )
2019-04-24 18:28:44 +00:00
2020-03-18 19:29:43 +00:00
if submodels_path :
result . append ( " --project-path " )
2019-05-01 02:29:43 +00:00
result . append ( submodels_path )
2020-03-18 19:29:43 +00:00
if submodel_name :
2019-06-28 15:12:40 +00:00
result . append ( submodel_name )
2019-06-02 13:16:23 +00:00
2019-04-24 18:28:44 +00:00
return result
2019-04-24 21:36:45 +00:00
2020-03-18 19:29:43 +00:00
def get_submodel_args_dict ( args ) :
submodel_argv = get_submodel_argv ( args )
2019-05-08 22:27:17 +00:00
result = { }
i = 0
while i < len ( submodel_argv ) :
arg = submodel_argv [ i ]
next_arg = None if i == len ( submodel_argv ) - 1 else submodel_argv [ i + 1 ]
if next_arg and arg . startswith ( " -- " ) :
if next_arg . startswith ( " -- " ) :
result [ arg [ 2 : ] ] = True
else :
result [ arg [ 2 : ] ] = next_arg
i + = 1
elif arg . startswith ( " -- " ) :
result [ arg [ 2 : ] ] = True
i + = 1
return result
2019-04-24 21:36:45 +00:00
def get_submodel_paths ( submodels_path , * paths ) :
"""
: return Existing paths for all submodels
"""
result = [ ]
2019-05-01 19:11:26 +00:00
if not os . path . exists ( submodels_path ) :
return result
2019-04-24 21:36:45 +00:00
for f in os . listdir ( submodels_path ) :
if f . startswith ( ' submodel ' ) :
p = os . path . join ( submodels_path , f , * paths )
if os . path . exists ( p ) :
result . append ( p )
else :
log . ODM_WARNING ( " Missing %s from submodel %s " % ( p , f ) )
2019-04-27 22:37:07 +00:00
return result
def get_all_submodel_paths ( submodels_path , * all_paths ) :
"""
: return Existing , multiple paths for all submodels as a nested list ( all or nothing for each submodel )
if a single file is missing from the submodule , no files are returned for that submodel .
( i . e . get_multi_submodel_paths ( " path/ " , " odm_orthophoto.tif " , " dem.tif " ) ) - - >
[ [ " path/submodel_0000/odm_orthophoto.tif " , " path/submodel_0000/dem.tif " ] ,
[ " path/submodel_0001/odm_orthophoto.tif " , " path/submodel_0001/dem.tif " ] ]
"""
result = [ ]
2019-05-01 19:11:26 +00:00
if not os . path . exists ( submodels_path ) :
return result
2019-04-27 22:37:07 +00:00
for f in os . listdir ( submodels_path ) :
if f . startswith ( ' submodel ' ) :
all_found = True
for ap in all_paths :
p = os . path . join ( submodels_path , f , ap )
if not os . path . exists ( p ) :
log . ODM_WARNING ( " Missing %s from submodel %s " % ( p , f ) )
all_found = False
if all_found :
result . append ( [ os . path . join ( submodels_path , f , ap ) for ap in all_paths ] )
2021-04-14 14:46:59 +00:00
return result
2024-05-17 18:55:26 +00:00
def is_submodel ( opensfm_root ) :
# A bit hackish, but works without introducing additional markers / flags
2024-05-17 19:23:10 +00:00
# Look at the path of the opensfm directory and see if "submodel_" is part of it
parts = os . path . abspath ( opensfm_root ) . split ( os . path . sep )
return ( len ( parts ) > = 2 and parts [ - 2 ] [ : 9 ] == " submodel_ " ) or \
2024-05-17 18:55:26 +00:00
os . path . isfile ( os . path . join ( opensfm_root , " split_merge_stop_at_reconstruction.txt " ) ) or \
os . path . isfile ( os . path . join ( opensfm_root , " features " , " empty " ) )