Update split-merge to latest changes

pull/753/head
Dakota Benjamin 2018-02-05 12:46:59 -05:00
rodzic f6881a769e
commit eb016aecb9
8 zmienionych plików z 211 dodań i 10 usunięć

Wyświetl plik

@ -1,16 +1,23 @@
#!/usr/bin/env python
import argparse
import logging
import os
import subprocess
from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
if __name__ == "__main__":

Wyświetl plik

@ -0,0 +1,140 @@
from opendm import io
from opendm import log
from opendm import system
import argparse
from osgeo import ogr
import os
from opensfm.large import metadataset
def create_bounds_file(clusters_geojson_path):
# Create a convex hull around the boundary
# as to encompass the entire area (no holes)
driver = ogr.GetDriverByName('GeoJSON')
ds = driver.Open(clusters_geojson_path, 0) # read-only
in_layer = ds.GetLayer()
# Save to a new file
out_path = io.extract_path_from_file(clusters_geojson_path)
bounds_geojson_path = os.path.join(out_path, 'bounds.geojson')
if os.path.exists(bounds_geojson_path):
driver.DeleteDataSource(bounds_geojson_path)
out_ds = driver.CreateDataSource(bounds_geojson_path)
out_layer = out_ds.CreateLayer("bounds.geojson", geom_type=ogr.wkbPolygon)
out_layer.CreateField(ogr.FieldDefn('ID', ogr.OFTInteger))
layer_def = in_layer.GetLayerDefn()
feature_def = in_layer.GetLayerDefn()
# For each submodel, create a convex hull
num_clusters = 0
# get number of submodels
for in_feat in in_layer:
x = in_feat.GetFieldAsInteger('submodel')
if x > num_clusters:
num_clusters = x
num_clusters += 1
log.ODM_DEBUG("Number of clusters: {}".format(num_clusters))
in_layer.ResetReading()
hull_collection = ogr.Geometry(ogr.wkbGeometryCollection)
for i in range(num_clusters):
# Collect all Geometry
geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
for in_feat in in_layer:
if in_feat.GetFieldAsInteger('submodel') == i:
# add point to geometry feature
geomcol.AddGeometry(in_feat.GetGeometryRef())
in_layer.ResetReading()
# Calculate convex hull for each feature
convexhull = geomcol.ConvexHull()
hull_collection.AddGeometry(convexhull)
## geomcol.Destroy()
feat_iter = 0
for feat in hull_collection:
out_feat = ogr.Feature(feature_def)
out_feat.SetGeometry(feat)
# add ID
out_feat.SetField(0, feat_iter)
feat_iter += 1
out_layer.CreateFeature(out_feat)
out_feat = None
# Save and close data sources
out_ds = ds = None
return bounds_geojson_path
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Align metadaset submodels')
parser.add_argument('dataset',
help='path to the dataset to be processed')
parser.add_argument('--overwrite', '-o',
action='store_true',
default=False,
help='Force overwrite of generated files')
args = parser.parse_args()
submodels_path = io.join_paths(args.dataset, 'submodels')
path = os.path.join(args.dataset, 'opensfm')
meta_data = metadataset.MetaDataSet(path)
data = metadataset.DataSet(path)
bounds_file = None
clusters_file = os.path.join(args.dataset, "submodels/opensfm/clusters_with_neighbors.geojson")
if io.file_exists(clusters_file):
log.ODM_DEBUG("Creating cluster bounds")
bounds_file = create_bounds_file(clusters_file)
else:
log.ODM_ERROR("Clusters file not found")
exit()
if not io.file_exists(bounds_file):
log.ODM_ERROR("Bounds file not created. Exiting...")
else:
# List of tifs paths to merge
ortho_tifs = {}
for folder in os.listdir(io.join_paths(args.dataset, 'submodels')):
if 'submodel' in folder:
folder_number = folder.split('_')[1] # string extract number
tif_file = io.join_paths(submodels_path, folder + "/odm_orthophoto/odm_orthophoto.tif")
if io.file_exists(tif_file):
ortho_tifs[folder_number] = tif_file
kwargs = {
'f_out': io.join_paths(submodels_path, 'big-ole-tiff.tif'),
'files': ' '.join(ortho_tifs.values()),
'clusters': bounds_file
}
if io.file_exists(kwargs['f_out']) and not args.overwrite:
log.ODM_ERROR("File {f_out} exists, use --overwrite to force overwrite of file.".format(**kwargs))
else:
# use bounds as cutlines (blending)
system.run('gdal_merge.py -o {f_out} '
'-createonly '
'-co "BIGTIFF=YES" '
'-co "BLOCKXSIZE=512" '
'-co "BLOCKYSIZE=512" {files}'.format(**kwargs)
)
for tif in ortho_tifs:
kwargs['name'] = '0' if tif == '0000' else tif.lstrip('0') # is tif a tuple?
kwargs['file'] = ortho_tifs[tif]
system.run('gdalwarp -cutline {clusters} '
'-cwhere "NAME = \'{name}\'" '
'-r lanczos -multi -wo NUM_THREADS=ALL_CPUS '
'{file} {f_out}'.format(**kwargs)
)

Wyświetl plik

@ -8,7 +8,7 @@ set -e
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
python $DIR/setup.py $1
python $DIR/setup.py "$@"
python $DIR/run_matching.py $1
python $DIR/split.py $1
python $DIR/run_reconstructions.py $1

Wyświetl plik

@ -12,13 +12,15 @@ from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
class DenseReconstructor:

Wyświetl plik

@ -1,16 +1,23 @@
#!/usr/bin/env python
import argparse
import logging
import os
import subprocess
from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
if __name__ == "__main__":

Wyświetl plik

@ -12,13 +12,15 @@ from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
class Reconstructor:

Wyświetl plik

@ -10,6 +10,7 @@ other.
import argparse
import os
import logging
import subprocess
import yaml
@ -18,11 +19,17 @@ from opensfm.io import mkdir_p
from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
def resize_images(data_path, args):
@ -59,6 +66,8 @@ def create_config(opensfm_path, args):
"submodels_relpath": "../submodels/opensfm",
"submodel_relpath_template": "../submodels/submodel_%04d/opensfm",
"submodel_images_relpath_template": "../submodels/submodel_%04d/images",
"submodel_size": args.submodel_size,
"submodel_overlap": args.submodel_overlap,
"feature_process_size": args.resize_to,
"feature_min_frames": args.min_num_features,
@ -69,6 +78,13 @@ def create_config(opensfm_path, args):
yaml.dump(config, fout, default_flow_style=False)
def link_image_groups(data_path, opensfm_path):
src = os.path.join(data_path, 'image_groups.txt')
dst = os.path.join(opensfm_path, 'image_groups.txt')
if os.path.isfile(src) and not os.path.isfile(dst):
os.symlink(src, dst)
def parse_command_line():
parser = argparse.ArgumentParser(description='Setup an ODM metadataset')
parser.add_argument('dataset',
@ -109,6 +125,25 @@ def parse_command_line():
'uses only one which has value, prefering the '
'Neighbors parameter. Default: %(default)s')
parser.add_argument('--submodel-size',
type=int,
default=80,
help='Average number of images per submodel. When '
'splitting a large dataset into smaller '
'submodels, images are grouped into clusters. '
'This value regulates the number of images that '
'each cluster should have on average.')
parser.add_argument('--submodel-overlap',
type=float,
metavar='<positive integer>',
default=150,
help='Radius of the overlap between submodels. '
'After grouping images into clusters, images '
'that are closer than this radius to a cluster '
'are added to the cluster. This is done to ensure '
'that neighboring submodels overlap.')
return parser.parse_args()
@ -124,3 +159,4 @@ if __name__ == '__main__':
mkdir_p(opensfm_path)
create_image_list(image_path, opensfm_path)
create_config(opensfm_path, args)
link_image_groups(data_path, opensfm_path)

Wyświetl plik

@ -1,16 +1,23 @@
#!/usr/bin/env python
import argparse
import logging
import os
import subprocess
from opendm import context
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def run_command(args):
result = subprocess.Popen(args).wait()
if result != 0:
raise RuntimeError(result)
logger.error("The command '{}' exited with return value {}". format(
' '.join(args), result))
if __name__ == "__main__":
@ -22,4 +29,4 @@ if __name__ == "__main__":
command = os.path.join(context.opensfm_path, 'bin', 'opensfm')
path = os.path.join(args.dataset, 'opensfm')
run_command([command, 'create_submodels', path, '--dist', '150'])
run_command([command, 'create_submodels', path])