Create better merge.py

Former-commit-id: b726ecd746
pull/1161/head
Dakota Benjamin 2018-03-03 11:48:43 -05:00
rodzic 1ad82173e3
commit 1aa2904267
9 zmienionych plików z 144 dodań i 121 usunięć

Wyświetl plik

@ -21,7 +21,7 @@ libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky scipy shapely numpy pyproj
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"

Wyświetl plik

@ -97,6 +97,9 @@ install() {
python-wheel \
libboost-log-dev
echo "Installing split-merge Dependencies"
pip install -U scipy shapely numpy pyproj
pip install -U https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz
echo "Compiling SuperBuild"

Wyświetl plik

@ -19,7 +19,7 @@ libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky scipy shapely numpy pyproj
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"

Wyświetl plik

@ -448,12 +448,13 @@ class ODM_Tree(object):
self.odm_georeferencing_latlon = io.join_paths(
self.odm_georeferencing, 'latlon.txt')
self.odm_georeferencing_coords = io.join_paths(
self.root_path, 'coords.txt') # Todo put this somewhere better
self.odm_georeferencing, 'coords.txt')
self.odm_georeferencing_gcp = gcp_file or io.find('gcp_list.txt', self.root_path)
self.odm_georeferencing_utm_log = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_utm_log.txt')
self.odm_georeferencing_log = 'odm_georeferencing_log.txt'
self.odm_georeferencing_transform_file = 'odm_georeferencing_transform.txt'
self.odm_georeferencing_proj = 'proj.txt'
self.odm_georeferencing_model_txt_geo = 'odm_georeferencing_model_geo.txt'
self.odm_georeferencing_model_ply_geo = 'odm_georeferenced_model.ply'
self.odm_georeferencing_model_obj_geo = 'odm_textured_model_geo.obj'

Wyświetl plik

@ -57,6 +57,10 @@ class ODMLoadDatasetCell(ecto.Cell):
system.mkdir_p(images_dir)
copied = [copyfile(io.join_paths(input_dir, f), io.join_paths(images_dir, f)) for f in get_images(input_dir)]
# define paths and create working directories
system.mkdir_p(tree.odm_georeferencing)
if args.use_25dmesh: system.mkdir_p(tree.odm_25dgeoreferencing)
log.ODM_DEBUG('Loading dataset from: %s' % images_dir)
files = get_images(images_dir)
@ -107,5 +111,9 @@ class ODMLoadDatasetCell(ecto.Cell):
else:
outputs.reconstruction = types.ODM_Reconstruction(photos, projstring=self.params.proj)
# Save proj to file for future use
with open(io.join_paths(tree.odm_georeferencing, tree.odm_georeferencing_proj), 'w') as f:
f.write(outputs.reconstruction.projection.srs)
log.ODM_INFO('Running ODM Load Dataset Cell - Finished')
return ecto.OK if args.end_with != 'dataset' else ecto.QUIT

Wyświetl plik

@ -2,78 +2,15 @@ from opendm import io
from opendm import log
from opendm import system
import argparse
from osgeo import ogr
from functools import partial
import os
from opensfm.large import metadataset
def create_bounds_file(clusters_geojson_path):
# Create a convex hull around the boundary
# as to encompass the entire area (no holes)
driver = ogr.GetDriverByName('GeoJSON')
ds = driver.Open(clusters_geojson_path, 0) # read-only
in_layer = ds.GetLayer()
# Save to a new file
out_path = io.extract_path_from_file(clusters_geojson_path)
bounds_geojson_path = os.path.join(out_path, 'bounds.geojson')
if os.path.exists(bounds_geojson_path):
driver.DeleteDataSource(bounds_geojson_path)
out_ds = driver.CreateDataSource(bounds_geojson_path)
out_layer = out_ds.CreateLayer("bounds.geojson", geom_type=ogr.wkbPolygon)
out_layer.CreateField(ogr.FieldDefn('ID', ogr.OFTInteger))
layer_def = in_layer.GetLayerDefn()
feature_def = in_layer.GetLayerDefn()
# For each submodel, create a convex hull
num_clusters = 0
# get number of submodels
for in_feat in in_layer:
x = in_feat.GetFieldAsInteger('submodel')
if x > num_clusters:
num_clusters = x
num_clusters += 1
log.ODM_DEBUG("Number of clusters: {}".format(num_clusters))
in_layer.ResetReading()
hull_collection = ogr.Geometry(ogr.wkbGeometryCollection)
for i in range(num_clusters):
# Collect all Geometry
geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
for in_feat in in_layer:
if in_feat.GetFieldAsInteger('submodel') == i:
# add point to geometry feature
geomcol.AddGeometry(in_feat.GetGeometryRef())
in_layer.ResetReading()
# Calculate convex hull for each feature
convexhull = geomcol.ConvexHull()
hull_collection.AddGeometry(convexhull)
## geomcol.Destroy()
feat_iter = 0
for feat in hull_collection:
out_feat = ogr.Feature(feature_def)
out_feat.SetGeometry(feat)
# add ID
out_feat.SetField(0, feat_iter)
feat_iter += 1
out_layer.CreateFeature(out_feat)
out_feat = None
# Save and close data sources
out_ds = ds = None
return bounds_geojson_path
from scipy.spatial import Voronoi
from shapely.geometry import shape, LineString, Point
import shapely.ops
import numpy as np
import json
import pyproj
if __name__ == "__main__":
@ -87,54 +24,129 @@ if __name__ == "__main__":
args = parser.parse_args()
submodels_path = io.join_paths(args.dataset, 'submodels')
sfm_path = io.join_paths(args.dataset, 'opensfm')
meta_data = metadataset.MetaDataSet(sfm_path)
data = metadataset.DataSet(sfm_path)
voronoi_file = io.join_paths(meta_data.data_path, 'voronoi.geojson')
proj_path = io.join_paths(args.dataset, "odm_georeferencing/proj.txt")
out_tif = io.join_paths(args.dataset, "merged.tif")
addo_log = io.join_paths(args.dataset, "gdal_addo.log")
path = os.path.join(args.dataset, 'opensfm')
meta_data = metadataset.MetaDataSet(path)
data = metadataset.DataSet(path)
bounds_file = None
bounds_files = {}
for folder in os.listdir(io.join_paths(args.dataset, 'submodels')):
if 'submodel' in folder:
folder_number = '0' if folder.split('_')[1] == '0000' else folder.split('_')[1].lstrip('0')
bounds_file = io.join_paths(submodels_path, folder +
"/odm_georeferencing/odm_georeferenced_model.bounds.geojson")
if io.file_exists(bounds_file):
bounds_files[folder_number] = bounds_file
clusters_file = os.path.join(args.dataset, "submodels/opensfm/clusters_with_neighbors.geojson")
if io.file_exists(clusters_file):
log.ODM_DEBUG("Creating cluster bounds")
bounds_file = create_bounds_file(clusters_file)
else:
log.ODM_ERROR("Clusters file not found")
exit()
# Do voronoi calcs
# # load clusters
images, positions, labels, centers = meta_data.load_clusters()
cluster_proj = pyproj.Proj(init='epsg:4326')
with open(proj_path, 'r') as fr:
transform_proj = pyproj.Proj(fr.read())
if not io.file_exists(bounds_file):
log.ODM_ERROR("Bounds file not created. Exiting...")
else:
# List of tifs paths to merge
ortho_tifs = {}
for folder in os.listdir(io.join_paths(args.dataset, 'submodels')):
if 'submodel' in folder:
folder_number = folder.split('_')[1] # string extract number
tif_file = io.join_paths(submodels_path, folder + "/odm_orthophoto/odm_orthophoto.tif")
if io.file_exists(tif_file):
ortho_tifs[folder_number] = tif_file
# projection transformation
project = partial(
pyproj.transform,
cluster_proj,
transform_proj)
kwargs = {
'f_out': io.join_paths(submodels_path, 'big-ole-tiff.tif'),
# turn this into a list of points
pos_transformed = [shapely.ops.transform(project, Point(x[1], x[0])) for x in positions]
#back to ndarray
positions = np.array([pos_transformed[x].coords[0] for x in range(len(pos_transformed))])
clust = np.concatenate((images, labels, positions), 1)
# Run voronoi on the whole cluster
vor = Voronoi(clust[:, [2, 3]].astype(float))
lines = [
LineString(vor.vertices[line])
for line in vor.ridge_vertices
if -1 not in line
]
# # For each part, build a boundary polygon
v_poly_dis_intersected = {}
for subnum in np.unique(clust[:, 1]):
submodel = clust[clust[:, 1] == subnum]
polygons = []
for poly in shapely.ops.polygonize(lines):
for point in submodel:
if poly.contains(Point(point[[2, 3]])):
polygons.append(poly) # Todo: this is expensive
break
# Dissolve list of polyogns
voronoi_polygons_dissolved = shapely.ops.unary_union(polygons)
# intersect with bounds
with open(bounds_files[subnum]) as f:
# There should only be one polygon here
bounds = shape(json.loads(f.read())['features'][0]['geometry'])
v_poly_dis_intersected[subnum] = voronoi_polygons_dissolved.intersection(bounds)
features = []
for submodel in v_poly_dis_intersected:
features.append({
"type": "Feature",
"geometry": shapely.geometry.mapping(v_poly_dis_intersected[submodel]),
"properties": {
"submodel": int(submodel)
}
})
polygons_layer = {
"type": "FeatureCollection",
"features": features,
"crs": {"type": "name", "properties": {"name": transform_proj.srs, "type": "proj4"}}
}
with open(voronoi_file, "w") as f:
json.dump(polygons_layer, f)
ortho_tifs = {}
for folder in os.listdir(io.join_paths(args.dataset, 'submodels')):
if 'submodel' in folder:
folder_number = folder.split('_')[1] # string extract number
tif_file = io.join_paths(submodels_path, folder + "/odm_orthophoto/odm_orthophoto.tif")
if io.file_exists(tif_file):
ortho_tifs[folder_number] = tif_file
kwargs = {
'f_out': out_tif,
'files': ' '.join(ortho_tifs.values()),
'clusters': bounds_file
'clusters': voronoi_file
}
if io.file_exists(kwargs['f_out']) and not args.overwrite:
log.ODM_ERROR("File {f_out} exists, use --overwrite to force overwrite of file.".format(**kwargs))
else:
# use bounds as cutlines (blending)
system.run('gdal_merge.py -o {f_out} '
'-createonly '
'-co "BIGTIFF=YES" '
'-co "BLOCKXSIZE=512" '
'-co "BLOCKYSIZE=512" {files}'.format(**kwargs)
)
if io.file_exists(kwargs['f_out']) and not args.overwrite:
log.ODM_ERROR("File {f_out} exists, use --overwrite to force overwrite of file.".format(**kwargs))
else:
# use bounds as cutlines (blending)
system.run('gdal_merge.py -o {f_out} '
'-createonly '
'-co "BIGTIFF=YES" '
'-co "BLOCKXSIZE=512" '
'-co "BLOCKYSIZE=512" {files}'.format(**kwargs)
)
for tif in ortho_tifs:
kwargs['name'] = '0' if tif == '0000' else tif.lstrip('0') # is tif a tuple?
kwargs['file'] = ortho_tifs[tif]
system.run('gdalwarp -cutline {clusters} '
'-cwhere "NAME = \'{name}\'" '
'-r lanczos -multi -wo NUM_THREADS=ALL_CPUS '
'{file} {f_out}'.format(**kwargs)
)
for tif in ortho_tifs:
kwargs['name'] = '0' if tif == '0000' else tif.lstrip('0') # is tif a tuple?
kwargs['file'] = ortho_tifs[tif]
system.run('gdalwarp -cutline {clusters} '
'-cwhere "submodel = \'{name}\'" '
'-r lanczos -multi -wo NUM_THREADS=ALL_CPUS '
' {file} {f_out}'.format(**kwargs)
)
log.ODM_INFO("Building Overviews")
kwargs = {
'orthophoto': out_tif,
'log': addo_log
}
# Run gdaladdo
system.run('gdaladdo -ro -r average '
'--config BIGTIFF_OVERVIEW IF_SAFER '
'--config COMPRESS_OVERVIEW JPEG '
'{orthophoto} 2 4 8 16 > {log}'.format(**kwargs))

Wyświetl plik

@ -14,3 +14,4 @@ python $DIR/split.py $1
python $DIR/run_reconstructions.py $1
python $DIR/align.py $1
python $DIR/run_dense.py $1
python $DIR/merge.py $1

Wyświetl plik

@ -40,10 +40,6 @@ class ODMGeoreferencingCell(ecto.Cell):
doPointCloudGeo = True
verbose = '-verbose' if self.params.verbose else ''
# define paths and create working directories
system.mkdir_p(tree.odm_georeferencing)
if args.use_25dmesh: system.mkdir_p(tree.odm_25dgeoreferencing)
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
args.rerun == 'odm_georeferencing') or \

Wyświetl plik

@ -76,7 +76,9 @@ class ODMOpenSfMCell(ecto.Cell):
"feature_min_frames: %s" % self.params.feature_min_frames,
"processes: %s" % self.params.processes,
"matching_gps_neighbors: %s" % self.params.matching_gps_neighbors,
"depthmap_resolution: 640",
# "depthmap_resolution: 2560",
# "depthmap_min_patch_sd: 4.0",
# "depthmap_min_consistent_views: 3",
"optimize_camera_parameters: %s" % ('no' if self.params.fixed_camera_params else 'yes')
]