Better reruns on split-merge

Former-commit-id: 05385fe677
pull/1161/head
Piero Toffanin 2019-06-06 17:35:14 -04:00
rodzic 64566a9337
commit f18f4cbf8b
3 zmienionych plików z 80 dodań i 35 usunięć

Wyświetl plik

@ -27,6 +27,12 @@ class OSFMContext:
else:
log.ODM_WARNING('Found a valid Bundler file in: %s' % destination_bundle_file)
def is_reconstruction_done(self):
tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
return io.file_exists(tracks_file) and io.file_exists(reconstruction_file)
def reconstruct(self, rerun=False):
tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
@ -131,6 +137,12 @@ class OSFMContext:
if not io.dir_exists(metadata_dir) or rerun:
self.run('extract_metadata')
def is_feature_matching_done(self):
features_dir = self.path("features")
matches_dir = self.path("matches")
return io.dir_exists(features_dir) and io.dir_exists(matches_dir)
def feature_matching(self, rerun=False):
features_dir = self.path("features")
matches_dir = self.path("matches")

Wyświetl plik

@ -27,11 +27,12 @@ class LocalRemoteExecutor:
to use the processing power of the current machine as well as offloading tasks to a
network node.
"""
def __init__(self, nodeUrl):
def __init__(self, nodeUrl, rerun = False):
self.node = Node.from_url(nodeUrl)
self.params = {
'tasks': [],
'threads': []
'threads': [],
'rerun': rerun
}
self.node_online = True
@ -287,6 +288,10 @@ class Task:
def path(self, *paths):
return os.path.join(self.project_path, *paths)
def touch(self, file):
with open(file, 'w') as fout:
fout.write("Done!\n")
def create_seed_payload(self, paths, touch_files=[]):
paths = filter(os.path.exists, map(lambda p: self.path(p), paths))
outfile = self.path("seed.zip")
@ -321,7 +326,7 @@ class Task:
except Exception as e:
done(e)
def execute_remote_task(self, done, seed_files = [], seed_touch_files = [], outputs = []):
def execute_remote_task(self, done, seed_files = [], seed_touch_files = [], outputs = [], ):
"""
Run a task by creating a seed file with all files in seed_files, optionally
creating empty files (for flag checks) specified in seed_touch_files
@ -436,42 +441,67 @@ class ReconstructionTask(Task):
log.ODM_INFO("==================================")
log.ODM_INFO("Local Reconstruction %s" % octx.name())
log.ODM_INFO("==================================")
octx.feature_matching()
octx.reconstruct()
octx.feature_matching(self.params['rerun'])
octx.reconstruct(self.params['rerun'])
def process_remote(self, done):
self.execute_remote_task(done, seed_files=["opensfm/exif",
"opensfm/camera_models.json",
"opensfm/reference_lla.json"],
seed_touch_files=["opensfm/split_merge_stop_at_reconstruction.txt"],
outputs=["opensfm/matches", "opensfm/features",
"opensfm/reconstruction.json",
"opensfm/tracks.csv"])
octx = OSFMContext(self.path("opensfm"))
if not octx.is_feature_matching_done() or not octx.is_reconstruction_done() or self.params['rerun']:
self.execute_remote_task(done, seed_files=["opensfm/exif",
"opensfm/camera_models.json",
"opensfm/reference_lla.json"],
seed_touch_files=["opensfm/split_merge_stop_at_reconstruction.txt"],
outputs=["opensfm/matches", "opensfm/features",
"opensfm/reconstruction.json",
"opensfm/tracks.csv"])
else:
log.ODM_INFO("Already processed feature matching and reconstruction for %s" % octx.name())
done()
class ToolchainTask(Task):
def process_local(self):
log.ODM_INFO("=============================")
log.ODM_INFO("Local Toolchain %s" % self)
log.ODM_INFO("=============================")
completed_file = self.path("toolchain_completed.txt")
submodel_name = os.path.basename(self.project_path)
submodels_path = os.path.abspath(self.path(".."))
project_name = os.path.basename(os.path.abspath(os.path.join(submodels_path, "..")))
argv = get_submodel_argv(project_name, submodels_path, submodel_name)
if not os.path.exists(completed_file) or self.params['rerun']:
log.ODM_INFO("=============================")
log.ODM_INFO("Local Toolchain %s" % self)
log.ODM_INFO("=============================")
# Re-run the ODM toolchain on the submodel
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
submodels_path = os.path.abspath(self.path(".."))
project_name = os.path.basename(os.path.abspath(os.path.join(submodels_path, "..")))
argv = get_submodel_argv(project_name, submodels_path, submodel_name)
# Re-run the ODM toolchain on the submodel
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
# This will only get executed if the command above succeeds
self.touch(completed_file)
else:
log.ODM_INFO("Already processed toolchain for %s" % submodel_name)
def process_remote(self, done):
self.execute_remote_task(done, seed_files=["opensfm/camera_models.json",
"opensfm/reference_lla.json",
"opensfm/reconstruction.json",
"opensfm/tracks.csv"],
seed_touch_files=["opensfm/features/empty",
"opensfm/matches/empty",
"opensfm/exif/empty"],
outputs=["odm_orthophoto/odm_orthophoto.tif",
"odm_orthophoto/cutline.gpkg",
"odm_dem",
"odm_georeferencing"])
completed_file = self.path("toolchain_completed.txt")
submodel_name = os.path.basename(self.project_path)
def handle_result(error = None):
# Mark task as completed if no error
if error is None:
self.touch(completed_file)
done(error=error)
if not os.path.exists(completed_file) or self.params['rerun']:
self.execute_remote_task(handle_result, seed_files=["opensfm/camera_models.json",
"opensfm/reference_lla.json",
"opensfm/reconstruction.json",
"opensfm/tracks.csv"],
seed_touch_files=["opensfm/features/empty",
"opensfm/matches/empty",
"opensfm/exif/empty"],
outputs=["odm_orthophoto/odm_orthophoto.tif",
"odm_orthophoto/cutline.gpkg",
"odm_dem",
"odm_georeferencing"])
else:
log.ODM_INFO("Already processed toolchain for %s" % submodel_name)
handle_result()

Wyświetl plik

@ -96,7 +96,7 @@ class ODMSplitStage(types.ODM_Stage):
log.ODM_INFO("Reconstructing %s" % sp)
OSFMContext(sp).reconstruct(self.rerun())
else:
lre = LocalRemoteExecutor(args.sm_cluster)
lre = LocalRemoteExecutor(args.sm_cluster, self.rerun())
lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths])
lre.run_reconstruction()
@ -117,6 +117,10 @@ class ODMSplitStage(types.ODM_Stage):
unaligned_recon = sp_octx.path('reconstruction.unaligned.json')
main_recon = sp_octx.path('reconstruction.json')
if io.file_exists(main_recon) and io.file_exists(unaligned_recon) and not self.rerun():
log.ODM_INFO("Submodel %s has already been aligned." % sp_octx.name())
continue
if not io.file_exists(aligned_recon):
log.ODM_WARNING("Submodel %s does not have an aligned reconstruction (%s). "
"This could mean that the submodel could not be reconstructed "
@ -153,8 +157,7 @@ class ODMSplitStage(types.ODM_Stage):
# Restore max_concurrency value
args.max_concurrency = orig_max_concurrency
with open(split_done_file, 'w') as fout:
fout.write("Split done!\n")
octx.touch(split_done_file)
else:
log.ODM_WARNING('Found a split done file in: %s' % split_done_file)
else: