2019-10-24 20:07:19 +00:00
|
|
|
import os
|
2019-04-28 16:20:03 +00:00
|
|
|
from opendm import log
|
|
|
|
from opendm import system
|
2019-10-24 20:07:19 +00:00
|
|
|
from opendm.cropper import Cropper
|
|
|
|
from opendm.concurrency import get_max_memory
|
2019-10-25 20:38:09 +00:00
|
|
|
import math
|
|
|
|
import numpy as np
|
|
|
|
import rasterio
|
2019-10-28 18:40:40 +00:00
|
|
|
import fiona
|
2020-09-11 16:34:38 +00:00
|
|
|
from edt import edt
|
2019-10-25 20:38:09 +00:00
|
|
|
from rasterio.transform import Affine, rowcol
|
2019-10-28 18:40:40 +00:00
|
|
|
from rasterio.mask import mask
|
2019-10-25 20:38:09 +00:00
|
|
|
from opendm import io
|
2020-09-17 15:28:03 +00:00
|
|
|
from opendm.tiles.tiler import generate_orthophoto_tiles
|
2021-06-04 19:35:56 +00:00
|
|
|
from opendm.cogeo import convert_to_cogeo
|
2021-01-15 18:07:00 +00:00
|
|
|
from osgeo import gdal
|
|
|
|
|
2019-04-28 16:20:03 +00:00
|
|
|
|
|
|
|
def get_orthophoto_vars(args):
|
|
|
|
return {
|
|
|
|
'TILED': 'NO' if args.orthophoto_no_tiled else 'YES',
|
|
|
|
'COMPRESS': args.orthophoto_compression,
|
|
|
|
'PREDICTOR': '2' if args.orthophoto_compression in ['LZW', 'DEFLATE'] else '1',
|
2019-09-15 19:59:42 +00:00
|
|
|
'BIGTIFF': 'IF_SAFER',
|
2019-04-28 16:20:03 +00:00
|
|
|
'BLOCKXSIZE': 512,
|
|
|
|
'BLOCKYSIZE': 512,
|
|
|
|
'NUM_THREADS': args.max_concurrency
|
|
|
|
}
|
|
|
|
|
|
|
|
def build_overviews(orthophoto_file):
|
2019-06-28 15:10:08 +00:00
|
|
|
log.ODM_INFO("Building Overviews")
|
2019-04-28 16:20:03 +00:00
|
|
|
kwargs = {'orthophoto': orthophoto_file}
|
|
|
|
|
|
|
|
# Run gdaladdo
|
2021-05-24 15:27:03 +00:00
|
|
|
system.run('gdaladdo -r average '
|
2019-04-28 16:20:03 +00:00
|
|
|
'--config BIGTIFF_OVERVIEW IF_SAFER '
|
|
|
|
'--config COMPRESS_OVERVIEW JPEG '
|
2019-10-24 20:07:19 +00:00
|
|
|
'{orthophoto} 2 4 8 16'.format(**kwargs))
|
|
|
|
|
2021-01-15 18:07:00 +00:00
|
|
|
def generate_png(orthophoto_file, output_file=None, outsize=None):
|
|
|
|
if output_file is None:
|
|
|
|
base, ext = os.path.splitext(orthophoto_file)
|
|
|
|
output_file = base + '.png'
|
|
|
|
|
|
|
|
# See if we need to select top three bands
|
|
|
|
bandparam = ""
|
2022-07-07 16:14:59 +00:00
|
|
|
|
2021-01-15 18:07:00 +00:00
|
|
|
gtif = gdal.Open(orthophoto_file)
|
|
|
|
if gtif.RasterCount > 4:
|
2022-07-07 16:14:59 +00:00
|
|
|
bands = []
|
|
|
|
for idx in range(1, gtif.RasterCount+1):
|
|
|
|
bands.append(gtif.GetRasterBand(idx).GetColorInterpretation())
|
|
|
|
bands = dict(zip(bands, range(1, len(bands)+1)))
|
2022-06-29 18:15:56 +00:00
|
|
|
|
2022-07-07 16:14:59 +00:00
|
|
|
try:
|
|
|
|
red = bands.get(gdal.GCI_RedBand)
|
|
|
|
green = bands.get(gdal.GCI_GreenBand)
|
|
|
|
blue = bands.get(gdal.GCI_BlueBand)
|
2022-07-25 20:09:48 +00:00
|
|
|
if red is None or green is None or blue is None:
|
|
|
|
raise Exception("Cannot find bands")
|
|
|
|
|
2022-07-07 16:14:59 +00:00
|
|
|
bandparam = "-b %s -b %s -b %s -a_nodata 0" % (red, green, blue)
|
|
|
|
except:
|
2022-06-29 18:15:56 +00:00
|
|
|
bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"
|
2022-07-07 16:14:59 +00:00
|
|
|
gtif = None
|
2021-01-15 18:07:00 +00:00
|
|
|
|
|
|
|
osparam = ""
|
|
|
|
if outsize is not None:
|
2022-07-07 16:14:59 +00:00
|
|
|
osparam = "-outsize %s 0" % outsize
|
2019-10-24 20:07:19 +00:00
|
|
|
|
2021-01-15 18:07:00 +00:00
|
|
|
system.run('gdal_translate -of png "%s" "%s" %s %s '
|
|
|
|
'--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, output_file, osparam, bandparam, get_max_memory()))
|
2019-10-24 20:07:19 +00:00
|
|
|
|
2021-04-16 03:21:55 +00:00
|
|
|
def generate_kmz(orthophoto_file, output_file=None, outsize=None):
|
|
|
|
if output_file is None:
|
|
|
|
base, ext = os.path.splitext(orthophoto_file)
|
|
|
|
output_file = base + '.kmz'
|
|
|
|
|
|
|
|
# See if we need to select top three bands
|
|
|
|
bandparam = ""
|
|
|
|
gtif = gdal.Open(orthophoto_file)
|
|
|
|
if gtif.RasterCount > 4:
|
|
|
|
bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"
|
2019-10-24 20:07:19 +00:00
|
|
|
|
2022-09-20 14:14:48 +00:00
|
|
|
system.run('gdal_translate -of KMLSUPEROVERLAY -co FORMAT=PNG "%s" "%s" %s '
|
2021-04-16 03:21:55 +00:00
|
|
|
'--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, output_file, bandparam, get_max_memory()))
|
|
|
|
|
2023-10-12 20:06:53 +00:00
|
|
|
def post_orthophoto_steps(args, bounds_file_path, orthophoto_file, orthophoto_tiles_dir, resolution):
|
2021-10-12 18:05:07 +00:00
|
|
|
if args.crop > 0 or args.boundary:
|
2020-03-30 14:32:21 +00:00
|
|
|
Cropper.crop(bounds_file_path, orthophoto_file, get_orthophoto_vars(args), keep_original=not args.optimize_disk_space, warp_options=['-dstalpha'])
|
2019-10-24 20:07:19 +00:00
|
|
|
|
2021-06-04 19:35:56 +00:00
|
|
|
if args.build_overviews and not args.cog:
|
2019-10-24 20:07:19 +00:00
|
|
|
build_overviews(orthophoto_file)
|
|
|
|
|
|
|
|
if args.orthophoto_png:
|
2019-10-25 20:38:09 +00:00
|
|
|
generate_png(orthophoto_file)
|
2021-04-16 03:21:55 +00:00
|
|
|
|
|
|
|
if args.orthophoto_kmz:
|
|
|
|
generate_kmz(orthophoto_file)
|
2019-10-25 20:38:09 +00:00
|
|
|
|
2020-09-17 15:28:03 +00:00
|
|
|
if args.tiles:
|
2023-10-12 20:06:53 +00:00
|
|
|
generate_orthophoto_tiles(orthophoto_file, orthophoto_tiles_dir, args.max_concurrency, resolution)
|
2020-09-17 15:28:03 +00:00
|
|
|
|
2021-06-04 19:35:56 +00:00
|
|
|
if args.cog:
|
2021-07-03 12:46:49 +00:00
|
|
|
convert_to_cogeo(orthophoto_file, max_workers=args.max_concurrency, compression=args.orthophoto_compression)
|
2019-10-28 18:40:40 +00:00
|
|
|
|
|
|
|
def compute_mask_raster(input_raster, vector_mask, output_raster, blend_distance=20, only_max_coords_feature=False):
|
|
|
|
if not os.path.exists(input_raster):
|
|
|
|
log.ODM_WARNING("Cannot mask raster, %s does not exist" % input_raster)
|
|
|
|
return
|
|
|
|
|
|
|
|
if not os.path.exists(vector_mask):
|
|
|
|
log.ODM_WARNING("Cannot mask raster, %s does not exist" % vector_mask)
|
|
|
|
return
|
|
|
|
|
2020-01-21 18:09:31 +00:00
|
|
|
log.ODM_INFO("Computing mask raster: %s" % output_raster)
|
|
|
|
|
2019-10-28 18:40:40 +00:00
|
|
|
with rasterio.open(input_raster, 'r') as rast:
|
|
|
|
with fiona.open(vector_mask) as src:
|
|
|
|
burn_features = src
|
|
|
|
|
|
|
|
if only_max_coords_feature:
|
|
|
|
max_coords_count = 0
|
|
|
|
max_coords_feature = None
|
|
|
|
for feature in src:
|
|
|
|
if feature is not None:
|
|
|
|
# No complex shapes
|
|
|
|
if len(feature['geometry']['coordinates'][0]) > max_coords_count:
|
|
|
|
max_coords_count = len(feature['geometry']['coordinates'][0])
|
|
|
|
max_coords_feature = feature
|
|
|
|
if max_coords_feature is not None:
|
|
|
|
burn_features = [max_coords_feature]
|
|
|
|
|
|
|
|
shapes = [feature["geometry"] for feature in burn_features]
|
|
|
|
out_image, out_transform = mask(rast, shapes, nodata=0)
|
|
|
|
|
|
|
|
if blend_distance > 0:
|
2019-12-05 20:28:06 +00:00
|
|
|
if out_image.shape[0] >= 4:
|
2019-12-23 15:38:14 +00:00
|
|
|
# alpha_band = rast.dataset_mask()
|
|
|
|
alpha_band = out_image[-1]
|
2020-09-11 16:34:38 +00:00
|
|
|
dist_t = edt(alpha_band, black_border=True, parallel=0)
|
2019-12-05 20:28:06 +00:00
|
|
|
dist_t[dist_t <= blend_distance] /= blend_distance
|
|
|
|
dist_t[dist_t > blend_distance] = 1
|
2019-12-23 15:38:14 +00:00
|
|
|
np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe")
|
2019-12-05 20:28:06 +00:00
|
|
|
else:
|
|
|
|
log.ODM_WARNING("%s does not have an alpha band, cannot blend cutline!" % input_raster)
|
2019-10-28 18:40:40 +00:00
|
|
|
|
2022-06-29 19:38:27 +00:00
|
|
|
with rasterio.open(output_raster, 'w', BIGTIFF="IF_SAFER", **rast.profile) as dst:
|
2019-12-23 15:38:14 +00:00
|
|
|
dst.colorinterp = rast.colorinterp
|
2019-10-28 18:40:40 +00:00
|
|
|
dst.write(out_image)
|
|
|
|
|
|
|
|
return output_raster
|
|
|
|
|
2020-01-21 18:09:31 +00:00
|
|
|
def feather_raster(input_raster, output_raster, blend_distance=20):
|
|
|
|
if not os.path.exists(input_raster):
|
|
|
|
log.ODM_WARNING("Cannot feather raster, %s does not exist" % input_raster)
|
|
|
|
return
|
|
|
|
|
|
|
|
log.ODM_INFO("Computing feather raster: %s" % output_raster)
|
|
|
|
|
|
|
|
with rasterio.open(input_raster, 'r') as rast:
|
|
|
|
out_image = rast.read()
|
|
|
|
if blend_distance > 0:
|
|
|
|
if out_image.shape[0] >= 4:
|
|
|
|
alpha_band = out_image[-1]
|
2020-09-11 16:34:38 +00:00
|
|
|
dist_t = edt(alpha_band, black_border=True, parallel=0)
|
2020-01-21 18:09:31 +00:00
|
|
|
dist_t[dist_t <= blend_distance] /= blend_distance
|
|
|
|
dist_t[dist_t > blend_distance] = 1
|
|
|
|
np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe")
|
|
|
|
else:
|
2020-01-21 20:18:50 +00:00
|
|
|
log.ODM_WARNING("%s does not have an alpha band, cannot feather raster!" % input_raster)
|
2020-01-21 18:09:31 +00:00
|
|
|
|
2022-06-29 19:38:27 +00:00
|
|
|
with rasterio.open(output_raster, 'w', BIGTIFF="IF_SAFER", **rast.profile) as dst:
|
2020-01-21 18:09:31 +00:00
|
|
|
dst.colorinterp = rast.colorinterp
|
|
|
|
dst.write(out_image)
|
|
|
|
|
|
|
|
return output_raster
|
2019-10-28 18:40:40 +00:00
|
|
|
|
|
|
|
def merge(input_ortho_and_ortho_cuts, output_orthophoto, orthophoto_vars={}):
|
2019-10-25 20:38:09 +00:00
|
|
|
"""
|
|
|
|
Based on https://github.com/mapbox/rio-merge-rgba/
|
|
|
|
Merge orthophotos around cutlines using a blend buffer.
|
|
|
|
"""
|
|
|
|
inputs = []
|
|
|
|
bounds=None
|
|
|
|
precision=7
|
|
|
|
|
2019-10-28 18:40:40 +00:00
|
|
|
for o, c in input_ortho_and_ortho_cuts:
|
2019-10-25 20:38:09 +00:00
|
|
|
if not io.file_exists(o):
|
|
|
|
log.ODM_WARNING("%s does not exist. Will skip from merged orthophoto." % o)
|
|
|
|
continue
|
|
|
|
if not io.file_exists(c):
|
|
|
|
log.ODM_WARNING("%s does not exist. Will skip from merged orthophoto." % c)
|
|
|
|
continue
|
|
|
|
inputs.append((o, c))
|
|
|
|
|
|
|
|
if len(inputs) == 0:
|
|
|
|
log.ODM_WARNING("No input orthophotos, skipping merge.")
|
|
|
|
return
|
|
|
|
|
|
|
|
with rasterio.open(inputs[0][0]) as first:
|
|
|
|
res = first.res
|
|
|
|
dtype = first.dtypes[0]
|
|
|
|
profile = first.profile
|
2019-12-23 15:38:14 +00:00
|
|
|
num_bands = first.meta['count'] - 1 # minus alpha
|
|
|
|
colorinterp = first.colorinterp
|
2019-10-25 20:38:09 +00:00
|
|
|
|
|
|
|
log.ODM_INFO("%s valid orthophoto rasters to merge" % len(inputs))
|
2019-10-28 18:40:40 +00:00
|
|
|
sources = [(rasterio.open(o), rasterio.open(c)) for o,c in inputs]
|
2019-10-25 20:38:09 +00:00
|
|
|
|
|
|
|
# scan input files.
|
|
|
|
# while we're at it, validate assumptions about inputs
|
|
|
|
xs = []
|
|
|
|
ys = []
|
2019-10-28 18:40:40 +00:00
|
|
|
for src, _ in sources:
|
2019-10-25 20:38:09 +00:00
|
|
|
left, bottom, right, top = src.bounds
|
|
|
|
xs.extend([left, right])
|
|
|
|
ys.extend([bottom, top])
|
2023-01-09 19:05:38 +00:00
|
|
|
if src.profile["count"] < 2:
|
|
|
|
raise ValueError("Inputs must be at least 2-band rasters")
|
2019-10-25 20:38:09 +00:00
|
|
|
dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)
|
|
|
|
log.ODM_INFO("Output bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
|
|
|
|
|
|
|
|
output_transform = Affine.translation(dst_w, dst_n)
|
|
|
|
output_transform *= Affine.scale(res[0], -res[1])
|
|
|
|
|
|
|
|
# Compute output array shape. We guarantee it will cover the output
|
|
|
|
# bounds completely.
|
|
|
|
output_width = int(math.ceil((dst_e - dst_w) / res[0]))
|
|
|
|
output_height = int(math.ceil((dst_n - dst_s) / res[1]))
|
|
|
|
|
|
|
|
# Adjust bounds to fit.
|
|
|
|
dst_e, dst_s = output_transform * (output_width, output_height)
|
|
|
|
log.ODM_INFO("Output width: %d, height: %d" % (output_width, output_height))
|
|
|
|
log.ODM_INFO("Adjusted bounds: %r %r %r %r" % (dst_w, dst_s, dst_e, dst_n))
|
|
|
|
|
|
|
|
profile["transform"] = output_transform
|
|
|
|
profile["height"] = output_height
|
|
|
|
profile["width"] = output_width
|
|
|
|
profile["tiled"] = orthophoto_vars.get('TILED', 'YES') == 'YES'
|
|
|
|
profile["blockxsize"] = orthophoto_vars.get('BLOCKXSIZE', 512)
|
|
|
|
profile["blockysize"] = orthophoto_vars.get('BLOCKYSIZE', 512)
|
|
|
|
profile["compress"] = orthophoto_vars.get('COMPRESS', 'LZW')
|
|
|
|
profile["predictor"] = orthophoto_vars.get('PREDICTOR', '2')
|
|
|
|
profile["bigtiff"] = orthophoto_vars.get('BIGTIFF', 'IF_SAFER')
|
|
|
|
profile.update()
|
|
|
|
|
|
|
|
# create destination file
|
2022-07-12 18:34:13 +00:00
|
|
|
with rasterio.open(output_orthophoto, "w", **profile) as dstrast:
|
2019-12-23 15:38:14 +00:00
|
|
|
dstrast.colorinterp = colorinterp
|
2019-10-25 20:38:09 +00:00
|
|
|
for idx, dst_window in dstrast.block_windows():
|
|
|
|
left, bottom, right, top = dstrast.window_bounds(dst_window)
|
|
|
|
|
|
|
|
blocksize = dst_window.width
|
|
|
|
dst_rows, dst_cols = (dst_window.height, dst_window.width)
|
|
|
|
|
|
|
|
# initialize array destined for the block
|
|
|
|
dst_count = first.count
|
|
|
|
dst_shape = (dst_count, dst_rows, dst_cols)
|
|
|
|
|
|
|
|
dstarr = np.zeros(dst_shape, dtype=dtype)
|
2019-10-28 18:40:40 +00:00
|
|
|
|
2020-01-21 18:09:31 +00:00
|
|
|
# First pass, write all rasters naively without blending
|
2019-10-28 18:40:40 +00:00
|
|
|
for src, _ in sources:
|
2019-10-25 20:38:09 +00:00
|
|
|
src_window = tuple(zip(rowcol(
|
|
|
|
src.transform, left, top, op=round, precision=precision
|
|
|
|
), rowcol(
|
|
|
|
src.transform, right, bottom, op=round, precision=precision
|
|
|
|
)))
|
|
|
|
|
|
|
|
temp = np.zeros(dst_shape, dtype=dtype)
|
|
|
|
temp = src.read(
|
|
|
|
out=temp, window=src_window, boundless=True, masked=False
|
|
|
|
)
|
|
|
|
|
|
|
|
# pixels without data yet are available to write
|
|
|
|
write_region = np.logical_and(
|
2019-12-23 15:38:14 +00:00
|
|
|
(dstarr[-1] == 0), (temp[-1] != 0) # 0 is nodata
|
2019-10-25 20:38:09 +00:00
|
|
|
)
|
|
|
|
np.copyto(dstarr, temp, where=write_region)
|
|
|
|
|
|
|
|
# check if dest has any nodata pixels available
|
2019-12-23 15:38:14 +00:00
|
|
|
if np.count_nonzero(dstarr[-1]) == blocksize:
|
2019-10-25 20:38:09 +00:00
|
|
|
break
|
|
|
|
|
2020-01-21 18:09:31 +00:00
|
|
|
# Second pass, write all feathered rasters
|
|
|
|
# blending the edges
|
|
|
|
for src, _ in sources:
|
|
|
|
src_window = tuple(zip(rowcol(
|
|
|
|
src.transform, left, top, op=round, precision=precision
|
|
|
|
), rowcol(
|
|
|
|
src.transform, right, bottom, op=round, precision=precision
|
|
|
|
)))
|
|
|
|
|
|
|
|
temp = np.zeros(dst_shape, dtype=dtype)
|
|
|
|
temp = src.read(
|
|
|
|
out=temp, window=src_window, boundless=True, masked=False
|
|
|
|
)
|
|
|
|
|
|
|
|
where = temp[-1] != 0
|
|
|
|
for b in range(0, num_bands):
|
|
|
|
blended = temp[-1] / 255.0 * temp[b] + (1 - temp[-1] / 255.0) * dstarr[b]
|
|
|
|
np.copyto(dstarr[b], blended, casting='unsafe', where=where)
|
2020-01-21 20:22:26 +00:00
|
|
|
dstarr[-1][where] = 255.0
|
2020-01-21 18:09:31 +00:00
|
|
|
|
|
|
|
# check if dest has any nodata pixels available
|
|
|
|
if np.count_nonzero(dstarr[-1]) == blocksize:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Third pass, write cut rasters
|
|
|
|
# blending the cutlines
|
2019-10-28 18:40:40 +00:00
|
|
|
for _, cut in sources:
|
|
|
|
src_window = tuple(zip(rowcol(
|
|
|
|
cut.transform, left, top, op=round, precision=precision
|
|
|
|
), rowcol(
|
|
|
|
cut.transform, right, bottom, op=round, precision=precision
|
|
|
|
)))
|
|
|
|
|
|
|
|
temp = np.zeros(dst_shape, dtype=dtype)
|
|
|
|
temp = cut.read(
|
|
|
|
out=temp, window=src_window, boundless=True, masked=False
|
|
|
|
)
|
|
|
|
|
|
|
|
# For each band, average alpha values between
|
|
|
|
# destination raster and cut raster
|
2019-12-23 15:38:14 +00:00
|
|
|
for b in range(0, num_bands):
|
|
|
|
blended = temp[-1] / 255.0 * temp[b] + (1 - temp[-1] / 255.0) * dstarr[b]
|
|
|
|
np.copyto(dstarr[b], blended, casting='unsafe', where=temp[-1]!=0)
|
2019-10-28 18:40:40 +00:00
|
|
|
|
2019-10-25 20:38:09 +00:00
|
|
|
dstrast.write(dstarr, window=dst_window)
|
|
|
|
|
|
|
|
return output_orthophoto
|