Add scripts to setup/split/reconstruct/align a metadataset

pull/753/head
Pau Gargallo 2017-05-16 15:38:12 +02:00 zatwierdzone przez Dakota Benjamin
rodzic 6d14d145d1
commit 3c20a25084
5 zmienionych plików z 247 dodań i 0 usunięć

Wyświetl plik

@ -0,0 +1,25 @@
#!/usr/bin/env python
import argparse
import os
import subprocess
from opendm import context
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Align metadaset submodels')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
command = os.path.join(context.opensfm_path, 'bin', 'opensfm')
path = os.path.join(args.dataset, 'opensfm')
run_command([command, 'align_submodels', path])

Wyświetl plik

@ -0,0 +1,27 @@
#!/usr/bin/env python
import argparse
import os
import subprocess
from opendm import context
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run matching on a metadataset')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
command = os.path.join(context.opensfm_path, 'bin', 'opensfm')
path = os.path.join(args.dataset, 'opensfm')
run_command([command, 'extract_metadata', path])
run_command([command, 'detect_features', path])
run_command([command, 'match_features', path])

Wyświetl plik

@ -0,0 +1,69 @@
#!/usr/bin/env python
import argparse
import logging
import multiprocessing
import os
import subprocess
from opensfm.large import metadataset
from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
class Reconstructor:
def __init__(self, command, run_matching):
self.command = command
self.run_matching = run_matching
def __call__(self, submodel_path):
logger.info("=======================================================")
logger.info("Reconstructing submodel {}".format(submodel_path))
logger.info("=======================================================")
if self.run_matching:
run_command([self.command, 'extract_metadata', submodel_path])
run_command([self.command, 'detect_features', submodel_path])
run_command([self.command, 'match_features', submodel_path])
run_command([self.command, 'create_tracks', submodel_path])
run_command([self.command, 'reconstruct', submodel_path])
logger.info("=======================================================")
logger.info("Submodel {} reconstructed".format(submodel_path))
logger.info("=======================================================")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Reconstruct all submodels')
parser.add_argument('dataset',
help='path to the dataset to be processed')
parser.add_argument('--run-matching',
help='Run matching for each submodel',
action='store_true')
args = parser.parse_args()
path = os.path.join(args.dataset, 'opensfm')
meta_data = metadataset.MetaDataSet(path)
command = os.path.join(context.opensfm_path, 'bin', 'opensfm')
submodel_paths = meta_data.get_submodel_paths()
reconstructor = Reconstructor(command, args.run_matching)
processes = meta_data.config['processes']
if processes == 1:
for submodel_path in submodel_paths:
reconstructor(submodel_path)
else:
p = multiprocessing.Pool(processes)
p.map(reconstructor, submodel_paths)

Wyświetl plik

@ -0,0 +1,101 @@
#!/usr/bin/env python
"""Setup an ODM metadataset.
A metadatase will be split into multiple submodel folders.
Each submodel is reconstructed independently. Before dense
reconstruction the different submodels are aligned to each
other.
"""
import argparse
import os
import yaml
from opensfm.io import mkdir_p
def is_image_file(filename):
extensions = {'jpg', 'jpeg', 'png', 'tif', 'tiff', 'pgm', 'pnm', 'gif'}
return filename.split('.')[-1].lower() in extensions
def create_image_list(image_path, opensfm_path):
image_files = filter(is_image_file, os.listdir(image_path))
lines = []
relpath = os.path.relpath(image_path, opensfm_path)
for image in image_files:
lines.append(os.path.join(relpath, image))
with open(os.path.join(opensfm_path, 'image_list.txt'), 'w') as fout:
fout.write("\n".join(lines))
def create_config(opensfm_path, args):
config = {
"submodels_relpath": "../submodels/opensfm",
"submodel_relpath_template": "../submodels/submodel_%04d/opensfm",
"feature_process_size": args.resize_to,
"feature_min_frames": args.min_num_features,
"processes": args.num_cores,
"matching_gps_neighbors": args.matcher_neighbors,
}
with open(os.path.join(opensfm_path, 'config.yaml'), 'w') as fout:
yaml.dump(config, fout, default_flow_style=False)
def parse_command_line():
parser = argparse.ArgumentParser(description='Setup an ODM metadataset')
parser.add_argument('dataset',
help='path to the dataset to be processed')
# TODO(pau): reduce redundancy with OpenDroneMap/opendm/config.py
parser.add_argument('--resize-to', # currently doesn't support 'orig'
metavar='<integer>',
default=2400,
type=int,
help='resizes images by the largest side')
parser.add_argument('--min-num-features',
metavar='<integer>',
default=4000,
type=int,
help=('Minimum number of features to extract per image. '
'More features leads to better results but slower '
'execution. Default: %(default)s'))
parser.add_argument('--num-cores',
metavar='<positive integer>',
default=4,
type=int,
help=('The maximum number of cores to use. '
'Default: %(default)s'))
parser.add_argument('--matcher-neighbors',
type=int,
metavar='<integer>',
default=8,
help='Number of nearest images to pre-match based on GPS '
'exif data. Set to 0 to skip pre-matching. '
'Neighbors works together with Distance parameter, '
'set both to 0 to not use pre-matching. OpenSFM '
'uses both parameters at the same time, Bundler '
'uses only one which has value, prefering the '
'Neighbors parameter. Default: %(default)s')
return parser.parse_args()
if __name__ == '__main__':
args = parse_command_line()
data_path = args.dataset
image_path = os.path.join(data_path, 'images')
opensfm_path = os.path.join(data_path, 'opensfm')
mkdir_p(opensfm_path)
create_image_list(image_path, opensfm_path)
create_config(opensfm_path, args)

Wyświetl plik

@ -0,0 +1,25 @@
#!/usr/bin/env python
import argparse
import os
import subprocess
from opendm import context
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Split metadaset into submodels')
parser.add_argument('dataset',
help='path to the dataset to be processed')
args = parser.parse_args()
command = os.path.join(context.opensfm_path, 'bin', 'opensfm')
path = os.path.join(args.dataset, 'opensfm')
run_command([command, 'create_submodels', path, '--size', '20', '--dist', '20'])