2019-04-12 17:45:47 +00:00
import os
import sys
2019-04-29 21:20:09 +00:00
import rasterio
2018-01-16 19:46:30 +00:00
import numpy
2019-04-11 20:29:53 +00:00
import math
2019-04-12 17:45:47 +00:00
import time
2019-05-31 01:09:25 +00:00
import shutil
2019-04-11 20:29:53 +00:00
from opendm . system import run
from opendm import point_cloud
2019-04-30 20:09:03 +00:00
from opendm import io
2020-03-10 01:52:12 +00:00
from opendm import system
2020-09-09 17:23:53 +00:00
from opendm . concurrency import get_max_memory , parallel_map
2019-06-19 12:17:42 +00:00
from scipy import ndimage
2018-01-16 19:46:30 +00:00
from datetime import datetime
2021-08-13 14:23:04 +00:00
from opendm . vendor . gdal_fillnodata import main as gdal_fillnodata
2018-01-16 20:23:19 +00:00
from opendm import log
2019-04-12 17:45:47 +00:00
try :
import Queue as queue
except :
import queue
import threading
2018-01-16 19:46:30 +00:00
2020-03-10 01:52:12 +00:00
from . ground_rectification . rectify import run_rectification
2018-01-16 19:46:30 +00:00
from . import pdal
2022-05-31 14:50:50 +00:00
try :
# GDAL >= 3.3
from osgeo_utils . gdal_proximity import main as gdal_proximity
except ModuleNotFoundError :
# GDAL <= 3.2
try :
from osgeo . utils . gdal_proximity import main as gdal_proximity
except :
pass
2019-04-20 14:56:42 +00:00
def classify ( lasFile , scalar , slope , threshold , window , verbose = False ) :
2018-01-16 19:46:30 +00:00
start = datetime . now ( )
try :
2019-04-20 14:56:42 +00:00
pdal . run_pdaltranslate_smrf ( lasFile , lasFile , scalar , slope , threshold , window , verbose )
2018-01-16 19:46:30 +00:00
except :
2020-03-11 13:28:58 +00:00
log . ODM_WARNING ( " Error creating classified file %s " % lasFile )
2018-01-16 19:46:30 +00:00
2021-05-23 15:00:48 +00:00
log . ODM_INFO ( ' Created %s in %s ' % ( lasFile , datetime . now ( ) - start ) )
2018-01-16 19:46:30 +00:00
return lasFile
2020-03-10 01:52:12 +00:00
def rectify ( lasFile , debug = False , reclassify_threshold = 5 , min_area = 750 , min_points = 500 ) :
start = datetime . now ( )
try :
# Currently, no Python 2 lib that supports reading and writing LAZ, so we will do it manually until ODM is migrated to Python 3
2022-01-08 10:38:36 +00:00
# When migration is done, we can move to pylas and avoid using PDAL for conversion
2020-03-10 01:52:12 +00:00
tempLasFile = os . path . join ( os . path . dirname ( lasFile ) , ' tmp.las ' )
# Convert LAZ to LAS
cmd = [
' pdal ' ,
' translate ' ,
' -i %s ' % lasFile ,
' -o %s ' % tempLasFile
]
system . run ( ' ' . join ( cmd ) )
log . ODM_INFO ( " Rectifying {} using with [reclassify threshold: {} , min area: {} , min points: {} ] " . format ( lasFile , reclassify_threshold , min_area , min_points ) )
run_rectification (
input = tempLasFile , output = tempLasFile , debug = debug , \
reclassify_plan = ' median ' , reclassify_threshold = reclassify_threshold , \
extend_plan = ' surrounding ' , extend_grid_distance = 5 , \
min_area = min_area , min_points = min_points )
# Convert LAS to LAZ
cmd = [
' pdal ' ,
' translate ' ,
' -i %s ' % tempLasFile ,
' -o %s ' % lasFile
]
system . run ( ' ' . join ( cmd ) )
os . remove ( tempLasFile )
2020-03-10 03:37:24 +00:00
except Exception as e :
raise Exception ( " Error rectifying ground in file %s : %s " % ( lasFile , str ( e ) ) )
2020-03-10 01:52:12 +00:00
2021-05-23 15:00:48 +00:00
log . ODM_INFO ( ' Created %s in %s ' % ( lasFile , datetime . now ( ) - start ) )
2020-03-10 01:52:12 +00:00
return lasFile
2019-04-12 17:45:47 +00:00
error = None
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
def create_dem ( input_point_cloud , dem_type , output_type = ' max ' , radiuses = [ ' 0.56 ' ] , gapfill = True ,
2019-06-11 18:18:27 +00:00
outdir = ' ' , resolution = 0.1 , max_workers = 1 , max_tile_size = 4096 ,
2019-06-08 23:28:58 +00:00
verbose = False , decimation = None , keep_unfilled_copy = False ,
apply_smoothing = True ) :
2019-04-11 20:29:53 +00:00
""" Create DEM from multiple radii, and optionally gapfill """
2020-07-07 20:14:55 +00:00
2019-04-12 17:45:47 +00:00
global error
error = None
2019-04-11 20:29:53 +00:00
start = datetime . now ( )
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
if not os . path . exists ( outdir ) :
log . ODM_INFO ( " Creating %s " % outdir )
os . mkdir ( outdir )
extent = point_cloud . get_extent ( input_point_cloud )
log . ODM_INFO ( " Point cloud bounds are [minx: %s , maxx: %s ] [miny: %s , maxy: %s ] " % ( extent [ ' minx ' ] , extent [ ' maxx ' ] , extent [ ' miny ' ] , extent [ ' maxy ' ] ) )
ext_width = extent [ ' maxx ' ] - extent [ ' minx ' ]
ext_height = extent [ ' maxy ' ] - extent [ ' miny ' ]
2019-09-08 18:36:10 +00:00
w , h = ( int ( math . ceil ( ext_width / float ( resolution ) ) ) ,
int ( math . ceil ( ext_height / float ( resolution ) ) ) )
# Set a floor, no matter the resolution parameter
# (sometimes a wrongly estimated scale of the model can cause the resolution
# to be set unrealistically low, causing errors)
RES_FLOOR = 64
if w < RES_FLOOR and h < RES_FLOOR :
prev_w , prev_h = w , h
if w > = h :
w , h = ( RES_FLOOR , int ( math . ceil ( ext_height / ext_width * RES_FLOOR ) ) )
else :
w , h = ( int ( math . ceil ( ext_width / ext_height * RES_FLOOR ) ) , RES_FLOOR )
floor_ratio = prev_w / float ( w )
resolution * = floor_ratio
radiuses = [ str ( float ( r ) * floor_ratio ) for r in radiuses ]
log . ODM_WARNING ( " Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s . The scale of this reconstruction might be off. " % ( ( prev_w , prev_h ) , RES_FLOOR , resolution ) )
final_dem_pixels = w * h
2019-04-12 17:45:47 +00:00
num_splits = int ( max ( 1 , math . ceil ( math . log ( math . ceil ( final_dem_pixels / float ( max_tile_size * max_tile_size ) ) ) / math . log ( 2 ) ) ) )
2019-04-11 20:29:53 +00:00
num_tiles = num_splits * num_splits
2019-09-08 18:36:10 +00:00
log . ODM_INFO ( " DEM resolution is %s , max tile size is %s , will split DEM generation into %s tiles " % ( ( h , w ) , max_tile_size , num_tiles ) )
2019-04-11 20:29:53 +00:00
tile_bounds_width = ext_width / float ( num_splits )
tile_bounds_height = ext_height / float ( num_splits )
tiles = [ ]
for r in radiuses :
minx = extent [ ' minx ' ]
for x in range ( num_splits ) :
miny = extent [ ' miny ' ]
if x == num_splits - 1 :
maxx = extent [ ' maxx ' ]
else :
maxx = minx + tile_bounds_width
for y in range ( num_splits ) :
if y == num_splits - 1 :
maxy = extent [ ' maxy ' ]
else :
maxy = miny + tile_bounds_height
filename = os . path . join ( os . path . abspath ( outdir ) , ' %s _r %s _x %s _y %s .tif ' % ( dem_type , r , x , y ) )
tiles . append ( {
' radius ' : r ,
' bounds ' : {
' minx ' : minx ,
' maxx ' : maxx ,
' miny ' : miny ,
' maxy ' : maxy
} ,
' filename ' : filename
} )
miny = maxy
minx = maxx
# Sort tiles by increasing radius
tiles . sort ( key = lambda t : float ( t [ ' radius ' ] ) , reverse = True )
2020-09-09 17:23:53 +00:00
def process_tile ( q ) :
2019-04-11 20:29:53 +00:00
log . ODM_INFO ( " Generating %s ( %s , radius: %s , resolution: %s ) " % ( q [ ' filename ' ] , output_type , q [ ' radius ' ] , resolution ) )
d = pdal . json_gdal_base ( q [ ' filename ' ] , output_type , q [ ' radius ' ] , resolution , q [ ' bounds ' ] )
2020-07-09 16:07:42 +00:00
if dem_type == ' dtm ' :
2019-04-11 20:29:53 +00:00
d = pdal . json_add_classification_filter ( d , 2 )
if decimation is not None :
d = pdal . json_add_decimation_filter ( d , decimation )
pdal . json_add_readers ( d , [ input_point_cloud ] )
pdal . run_pipeline ( d , verbose = verbose )
2019-04-12 17:45:47 +00:00
2020-09-09 17:23:53 +00:00
parallel_map ( process_tile , tiles , max_workers )
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
output_file = " %s .tif " % dem_type
output_path = os . path . abspath ( os . path . join ( outdir , output_file ) )
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
# Verify tile results
for t in tiles :
if not os . path . exists ( t [ ' filename ' ] ) :
raise Exception ( " Error creating %s , %s failed to be created " % ( output_file , t [ ' filename ' ] ) )
2022-01-07 20:50:22 +00:00
2019-04-11 20:29:53 +00:00
# Create virtual raster
2019-05-31 01:09:25 +00:00
tiles_vrt_path = os . path . abspath ( os . path . join ( outdir , " tiles.vrt " ) )
2021-06-09 18:30:46 +00:00
tiles_file_list = os . path . abspath ( os . path . join ( outdir , " tiles_list.txt " ) )
with open ( tiles_file_list , ' w ' ) as f :
for t in tiles :
f . write ( t [ ' filename ' ] + ' \n ' )
run ( ' gdalbuildvrt -input_file_list " %s " " %s " ' % ( tiles_file_list , tiles_vrt_path ) )
2018-01-16 19:46:30 +00:00
2019-05-31 01:09:25 +00:00
merged_vrt_path = os . path . abspath ( os . path . join ( outdir , " merged.vrt " ) )
geotiff_tmp_path = os . path . abspath ( os . path . join ( outdir , ' tiles.tmp.tif ' ) )
geotiff_small_path = os . path . abspath ( os . path . join ( outdir , ' tiles.small.tif ' ) )
geotiff_small_filled_path = os . path . abspath ( os . path . join ( outdir , ' tiles.small_filled.tif ' ) )
geotiff_path = os . path . abspath ( os . path . join ( outdir , ' tiles.tif ' ) )
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
# Build GeoTIFF
kwargs = {
' max_memory ' : get_max_memory ( ) ,
' threads ' : max_workers if max_workers else ' ALL_CPUS ' ,
2019-05-31 01:09:25 +00:00
' tiles_vrt ' : tiles_vrt_path ,
' merged_vrt ' : merged_vrt_path ,
2019-04-23 19:49:16 +00:00
' geotiff ' : geotiff_path ,
2019-05-31 01:09:25 +00:00
' geotiff_tmp ' : geotiff_tmp_path ,
' geotiff_small ' : geotiff_small_path ,
' geotiff_small_filled ' : geotiff_small_filled_path
2019-04-11 20:29:53 +00:00
}
2018-01-16 20:23:19 +00:00
2019-04-12 17:45:47 +00:00
if gapfill :
2019-04-23 19:49:16 +00:00
# Sometimes, for some reason gdal_fillnodata.py
# behaves strangely when reading data directly from a .VRT
# so we need to convert to GeoTIFF first.
run ( ' gdal_translate '
' -co NUM_THREADS= {threads} '
2020-11-24 13:25:12 +00:00
' -co BIGTIFF=IF_SAFER '
2019-04-23 19:49:16 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
2021-05-17 17:25:52 +00:00
' " {tiles_vrt} " " {geotiff_tmp} " ' . format ( * * kwargs ) )
2019-04-23 19:49:16 +00:00
2019-05-31 01:09:25 +00:00
# Scale to 10% size
run ( ' gdal_translate '
' -co NUM_THREADS= {threads} '
2020-11-24 13:25:12 +00:00
' -co BIGTIFF=IF_SAFER '
2019-05-31 01:09:25 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
' -outsize 10 % 0 '
2021-05-17 17:25:52 +00:00
' " {geotiff_tmp} " " {geotiff_small} " ' . format ( * * kwargs ) )
2019-05-31 01:09:25 +00:00
# Fill scaled
2021-05-04 17:04:13 +00:00
gdal_fillnodata ( [ ' . ' ,
' -co ' , ' NUM_THREADS= %s ' % kwargs [ ' threads ' ] ,
' -co ' , ' BIGTIFF=IF_SAFER ' ,
' --config ' , ' GDAL_CACHE_MAX ' , str ( kwargs [ ' max_memory ' ] ) + ' % ' ,
' -b ' , ' 1 ' ,
' -of ' , ' GTiff ' ,
kwargs [ ' geotiff_small ' ] , kwargs [ ' geotiff_small_filled ' ] ] )
2019-05-31 01:09:25 +00:00
# Merge filled scaled DEM with unfilled DEM using bilinear interpolation
run ( ' gdalbuildvrt -resolution highest -r bilinear " %s " " %s " " %s " ' % ( merged_vrt_path , geotiff_small_filled_path , geotiff_tmp_path ) )
run ( ' gdal_translate '
' -co NUM_THREADS= {threads} '
2020-09-14 14:44:45 +00:00
' -co TILED=YES '
2020-11-24 13:25:12 +00:00
' -co BIGTIFF=IF_SAFER '
2020-09-14 14:44:45 +00:00
' -co COMPRESS=DEFLATE '
2019-05-31 01:09:25 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
2021-05-17 17:25:52 +00:00
' " {merged_vrt} " " {geotiff} " ' . format ( * * kwargs ) )
2019-04-11 20:29:53 +00:00
else :
2019-04-12 17:45:47 +00:00
run ( ' gdal_translate '
' -co NUM_THREADS= {threads} '
2020-09-14 14:44:45 +00:00
' -co TILED=YES '
2020-11-24 13:25:12 +00:00
' -co BIGTIFF=IF_SAFER '
2020-09-14 14:44:45 +00:00
' -co COMPRESS=DEFLATE '
2019-04-12 17:45:47 +00:00
' --config GDAL_CACHEMAX {max_memory} % '
2021-05-17 17:25:52 +00:00
' " {tiles_vrt} " " {geotiff} " ' . format ( * * kwargs ) )
2019-06-08 23:28:58 +00:00
if apply_smoothing :
median_smoothing ( geotiff_path , output_path )
os . remove ( geotiff_path )
else :
2021-05-04 18:46:55 +00:00
os . replace ( geotiff_path , output_path )
2018-01-16 20:23:19 +00:00
2019-04-30 20:09:03 +00:00
if os . path . exists ( geotiff_tmp_path ) :
if not keep_unfilled_copy :
os . remove ( geotiff_tmp_path )
else :
2021-05-04 18:46:55 +00:00
os . replace ( geotiff_tmp_path , io . related_file_path ( output_path , postfix = " .unfilled " ) )
2019-05-31 01:09:25 +00:00
2021-06-09 18:30:46 +00:00
for cleanup_file in [ tiles_vrt_path , tiles_file_list , merged_vrt_path , geotiff_small_path , geotiff_small_filled_path ] :
2019-05-31 01:09:25 +00:00
if os . path . exists ( cleanup_file ) : os . remove ( cleanup_file )
2019-04-12 17:45:47 +00:00
for t in tiles :
if os . path . exists ( t [ ' filename ' ] ) : os . remove ( t [ ' filename ' ] )
2021-06-04 19:35:56 +00:00
2019-04-11 20:29:53 +00:00
log . ODM_INFO ( ' Completed %s in %s ' % ( output_file , datetime . now ( ) - start ) )
2018-01-16 20:23:19 +00:00
2018-01-16 19:46:30 +00:00
2019-04-30 20:09:03 +00:00
def compute_euclidean_map ( geotiff_path , output_path , overwrite = False ) :
if not os . path . exists ( geotiff_path ) :
log . ODM_WARNING ( " Cannot compute euclidean map (file does not exist: %s ) " % geotiff_path )
return
nodata = - 9999
with rasterio . open ( geotiff_path ) as f :
nodata = f . nodatavals [ 0 ]
if not os . path . exists ( output_path ) or overwrite :
log . ODM_INFO ( " Computing euclidean distance: %s " % output_path )
2022-05-31 14:50:50 +00:00
if gdal_proximity is not None :
try :
gdal_proximity ( [ ' gdal_proximity.py ' , geotiff_path , output_path , ' -values ' , str ( nodata ) ] )
except Exception as e :
log . ODM_WARNING ( " Cannot compute euclidean distance: %s " % str ( e ) )
if os . path . exists ( output_path ) :
return output_path
else :
log . ODM_WARNING ( " Cannot compute euclidean distance file: %s " % output_path )
2019-04-30 20:09:03 +00:00
else :
2022-05-31 14:50:50 +00:00
log . ODM_WARNING ( " Cannot compute euclidean map, gdal_proximity is missing " )
2019-04-30 20:09:03 +00:00
else :
2019-05-28 19:32:01 +00:00
log . ODM_INFO ( " Found a euclidean distance map: %s " % output_path )
2019-04-30 20:09:03 +00:00
return output_path
2019-06-08 23:28:58 +00:00
def median_smoothing ( geotiff_path , output_path , smoothing_iterations = 1 ) :
2019-04-12 17:45:47 +00:00
""" Apply median smoothing """
2018-01-16 19:46:30 +00:00
start = datetime . now ( )
2018-06-11 19:21:19 +00:00
2019-04-11 20:29:53 +00:00
if not os . path . exists ( geotiff_path ) :
raise Exception ( ' File %s does not exist! ' % geotiff_path )
2018-01-16 19:46:30 +00:00
2019-06-08 23:28:58 +00:00
log . ODM_INFO ( ' Starting smoothing... ' )
2018-01-16 19:46:30 +00:00
2019-04-29 21:20:09 +00:00
with rasterio . open ( geotiff_path ) as img :
nodata = img . nodatavals [ 0 ]
dtype = img . dtypes [ 0 ]
arr = img . read ( ) [ 0 ]
2022-01-07 20:50:22 +00:00
nodata_locs = numpy . where ( arr == nodata )
2019-04-29 21:20:09 +00:00
# Median filter (careful, changing the value 5 might require tweaking)
# the lines below. There's another numpy function that takes care of
# these edge cases, but it's slower.
for i in range ( smoothing_iterations ) :
log . ODM_INFO ( " Smoothing iteration %s " % str ( i + 1 ) )
2022-03-03 20:10:31 +00:00
arr = ndimage . median_filter ( arr , size = 9 , output = dtype , mode = ' nearest ' )
2019-04-29 21:20:09 +00:00
# Median filter leaves a bunch of zeros in nodata areas
2022-01-07 20:50:22 +00:00
arr [ nodata_locs ] = nodata
2019-04-29 21:20:09 +00:00
# write output
with rasterio . open ( output_path , ' w ' , * * img . profile ) as imgout :
2019-06-11 19:48:15 +00:00
imgout . write ( arr , 1 )
2018-06-11 19:21:19 +00:00
2021-05-23 15:00:48 +00:00
log . ODM_INFO ( ' Completed smoothing to create %s in %s ' % ( output_path , datetime . now ( ) - start ) )
2018-01-16 19:46:30 +00:00
2019-04-11 20:29:53 +00:00
return output_path