Merge pull request #1551 from pierotofy/300

3.0.0 release
pull/1555/head
Piero Toffanin 2022-11-23 14:25:24 -05:00 zatwierdzone przez GitHub
commit c1cd2456b1
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
23 zmienionych plików z 118 dodań i 266 usunięć

Wyświetl plik

@ -209,7 +209,7 @@ externalproject_add(poissonrecon
externalproject_add(dem2mesh
GIT_REPOSITORY https://github.com/OpenDroneMap/dem2mesh.git
GIT_TAG master
GIT_TAG 300
PREFIX ${SB_BINARY_DIR}/dem2mesh
SOURCE_DIR ${SB_SOURCE_DIR}/dem2mesh
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}

Wyświetl plik

@ -53,7 +53,7 @@ ExternalProject_Add(${_proj_name}
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
GIT_REPOSITORY https://github.com/OpenDroneMap/openMVS
GIT_TAG 291
GIT_TAG 300
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------

Wyświetl plik

@ -1 +1 @@
2.9.2
3.0.0

Wyświetl plik

@ -51,7 +51,6 @@ commands.create_dem(args.point_cloud,
outdir=outdir,
resolution=args.resolution,
decimation=1,
verbose=True,
max_workers=multiprocessing.cpu_count(),
keep_unfilled_copy=False
)

Wyświetl plik

@ -83,15 +83,6 @@ def config(argv=None, parser=None):
nargs='?',
help='Name of dataset (i.e subfolder name within project folder). Default: %(default)s')
parser.add_argument('--resize-to',
metavar='<integer>',
action=StoreValue,
default=2048,
type=int,
help='Legacy option (use --feature-quality instead). Resizes images by the largest side for feature extraction purposes only. '
'Set to -1 to disable. This does not affect the final orthophoto '
'resolution quality and will not resize the original images. Default: %(default)s')
parser.add_argument('--end-with', '-e',
metavar='<string>',
action=StoreValue,
@ -212,15 +203,6 @@ def config(argv=None, parser=None):
'processes. Peak memory requirement is ~1GB per '
'thread and 2 megapixel image resolution. Default: %(default)s'))
parser.add_argument('--depthmap-resolution',
metavar='<positive float>',
action=StoreValue,
type=float,
default=640,
help=('Controls the density of the point cloud by setting the resolution of the depthmap images. Higher values take longer to compute '
'but produce denser point clouds. Overrides the value calculated by --pc-quality.'
'Default: %(default)s'))
parser.add_argument('--use-hybrid-bundle-adjustment',
action=StoreTrue,
nargs=0,
@ -414,13 +396,6 @@ def config(argv=None, parser=None):
help='Reduce the memory usage needed for depthmap fusion by splitting large scenes into tiles. Turn this on if your machine doesn\'t have much RAM and/or you\'ve set --pc-quality to high or ultra. Experimental. '
'Default: %(default)s')
parser.add_argument('--pc-geometric',
action=StoreTrue,
nargs=0,
default=False,
help='Improve the accuracy of the point cloud by computing geometrically consistent depthmaps. This increases processing time, but can improve results in urban scenes. '
'Default: %(default)s')
parser.add_argument('--smrf-scalar',
metavar='<positive float>',
action=StoreValue,
@ -453,20 +428,6 @@ def config(argv=None, parser=None):
help='Simple Morphological Filter window radius parameter (meters). '
'Default: %(default)s')
parser.add_argument('--texturing-data-term',
metavar='<string>',
action=StoreValue,
default='gmi',
choices=['gmi', 'area'],
help=('When texturing the 3D mesh, for each triangle, choose to prioritize images with sharp features (gmi) or those that cover the largest area (area). Default: %(default)s'))
parser.add_argument('--texturing-outlier-removal-type',
metavar='<string>',
action=StoreValue,
default='gauss_clamping',
choices=['none', 'gauss_clamping', 'gauss_damping'],
help=('Type of photometric outlier removal method. Can be one of: %(choices)s. Default: %(default)s'))
parser.add_argument('--texturing-skip-global-seam-leveling',
action=StoreTrue,
nargs=0,
@ -486,15 +447,6 @@ def config(argv=None, parser=None):
help=('Keep faces in the mesh that are not seen in any camera. '
'Default: %(default)s'))
parser.add_argument('--texturing-tone-mapping',
metavar='<string>',
action=StoreValue,
choices=['none', 'gamma'],
default='none',
help='Turn on gamma tone mapping or none for no tone '
'mapping. Can be one of %(choices)s. '
'Default: %(default)s ')
parser.add_argument('--gcp',
metavar='<path string>',
action=StoreValue,
@ -512,7 +464,7 @@ def config(argv=None, parser=None):
action=StoreValue,
default=None,
help=('Path to the image geolocation file containing the camera center coordinates used for georeferencing. '
'Note that omega/phi/kappa are currently not supported (you can set them to 0). '
'If you don''t have values for omega/phi/kappa you can set them to 0. '
'The file needs to '
'use the following format: \n'
'EPSG:<code> or <+proj definition>\n'
@ -592,7 +544,7 @@ def config(argv=None, parser=None):
default=False,
help='Set this parameter if you want a striped GeoTIFF. '
'Default: %(default)s')
parser.add_argument('--orthophoto-png',
action=StoreTrue,
nargs=0,
@ -606,7 +558,6 @@ def config(argv=None, parser=None):
default=False,
help='Set this parameter if you want to generate a Google Earth (KMZ) rendering of the orthophoto. '
'Default: %(default)s')
parser.add_argument('--orthophoto-compression',
metavar='<string>',
@ -670,32 +621,11 @@ def config(argv=None, parser=None):
default=False,
help='Create Cloud-Optimized GeoTIFFs instead of normal GeoTIFFs. Default: %(default)s')
parser.add_argument('--verbose', '-v',
action=StoreTrue,
nargs=0,
default=False,
help='Print additional messages to the console. '
'Default: %(default)s')
parser.add_argument('--copy-to',
metavar='<path>',
action=StoreValue,
help='Copy output results to this folder after processing.')
parser.add_argument('--time',
action=StoreTrue,
nargs=0,
default=False,
help='Generates a benchmark file with runtime info. '
'Default: %(default)s')
parser.add_argument('--debug',
action=StoreTrue,
nargs=0,
default=False,
help='Print debug messages. Default: %(default)s')
parser.add_argument('--version',
action='version',
version='ODM {0}'.format(__version__),
@ -811,7 +741,15 @@ def config(argv=None, parser=None):
'If the images have been postprocessed and are already aligned, use this option. '
'Default: %(default)s'))
args = parser.parse_args(argv)
args, unknown = parser.parse_known_args(argv)
DEPRECATED = ["--verbose", "--debug", "--time", "--resize-to", "--depthmap-resolution", "--pc-geometric", "--texturing-data-term", "--texturing-outlier-removal-type", "--texturing-tone-mapping"]
unknown_e = [p for p in unknown if p not in DEPRECATED]
if len(unknown_e) > 0:
raise parser.error("unrecognized arguments: %s" % " ".join(unknown_e))
for p in unknown:
if p in DEPRECATED:
log.ODM_WARNING("%s is no longer a valid argument and will be ignored!" % p)
# check that the project path setting has been set properly
if not args.project_path:

Wyświetl plik

@ -35,11 +35,11 @@ except ModuleNotFoundError:
except:
pass
def classify(lasFile, scalar, slope, threshold, window, verbose=False):
def classify(lasFile, scalar, slope, threshold, window):
start = datetime.now()
try:
pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window, verbose)
pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window)
except:
log.ODM_WARNING("Error creating classified file %s" % lasFile)
@ -90,7 +90,7 @@ error = None
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
outdir='', resolution=0.1, max_workers=1, max_tile_size=4096,
verbose=False, decimation=None, keep_unfilled_copy=False,
decimation=None, keep_unfilled_copy=False,
apply_smoothing=True):
""" Create DEM from multiple radii, and optionally gapfill """
@ -187,7 +187,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
d = pdal.json_add_decimation_filter(d, decimation)
pdal.json_add_readers(d, [input_point_cloud])
pdal.run_pipeline(d, verbose=verbose)
pdal.run_pipeline(d)
parallel_map(process_tile, tiles, max_workers)
@ -380,5 +380,4 @@ def window_filter_2d(arr, nodata, window, kernel_size, filter):
win_arr = filter(win_arr)
win_arr[nodata_locs] = nodata
win_arr = win_arr[window[0] - expanded_window[0] : window[2] - expanded_window[0], window[1] - expanded_window[1] : window[3] - expanded_window[1]]
log.ODM_DEBUG("Filtered window: %s" % str(window))
return win_arr

Wyświetl plik

@ -133,22 +133,13 @@ def json_add_readers(json, filenames):
return json
def json_print(json):
""" Pretty print JSON """
log.ODM_DEBUG(jsonlib.dumps(json, indent=4, separators=(',', ': ')))
""" Run PDAL commands """
def run_pipeline(json, verbose=False):
def run_pipeline(json):
""" Run PDAL Pipeline with provided JSON """
if verbose:
json_print(json)
# write to temp file
f, jsonfile = tempfile.mkstemp(suffix='.json')
if verbose:
log.ODM_INFO('Pipeline file: %s' % jsonfile)
os.write(f, jsonlib.dumps(json).encode('utf8'))
os.close(f)
@ -157,14 +148,11 @@ def run_pipeline(json, verbose=False):
'pipeline',
'-i %s' % double_quote(jsonfile)
]
if verbose or sys.platform == 'win32':
system.run(' '.join(cmd))
else:
system.run(' '.join(cmd) + ' > /dev/null 2>&1')
system.run(' '.join(cmd))
os.remove(jsonfile)
def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=False):
def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window):
""" Run PDAL translate """
cmd = [
'pdal',
@ -178,12 +166,9 @@ def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=
'--filters.smrf.window=%s' % window,
]
if verbose:
log.ODM_INFO(' '.join(cmd))
system.run(' '.join(cmd))
def merge_point_clouds(input_files, output_file, verbose=False):
def merge_point_clouds(input_files, output_file):
if len(input_files) == 0:
log.ODM_WARNING("Cannot merge point clouds, no point clouds to merge.")
return
@ -194,8 +179,5 @@ def merge_point_clouds(input_files, output_file, verbose=False):
' '.join(map(double_quote, input_files + [output_file])),
]
if verbose:
log.ODM_INFO(' '.join(cmd))
system.run(' '.join(cmd))

Wyświetl plik

@ -43,7 +43,6 @@ def memory():
class ODMLogger:
def __init__(self):
self.show_debug = False
self.json = None
self.json_output_file = None
self.start_time = datetime.datetime.now()
@ -134,10 +133,6 @@ class ODMLogger:
def exception(self, msg):
self.log(FAIL, msg, "EXCEPTION")
def debug(self, msg):
if self.show_debug:
self.log(OKGREEN, msg, "DEBUG")
def close(self):
if self.json is not None and self.json_output_file is not None:
try:
@ -154,4 +149,3 @@ ODM_INFO = logger.info
ODM_WARNING = logger.warning
ODM_ERROR = logger.error
ODM_EXCEPTION = logger.exception
ODM_DEBUG = logger.debug

Wyświetl plik

@ -8,7 +8,7 @@ from opendm import concurrency
from scipy import signal
import numpy as np
def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, verbose=False, available_cores=None, method='gridded', smooth_dsm=True):
def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, available_cores=None, method='gridded', smooth_dsm=True):
# Create DSM from point cloud
# Create temporary directory
@ -21,7 +21,7 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05,
radius_steps = [dsm_radius]
for _ in range(2):
radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary
radius_steps.append(radius_steps[-1] * math.sqrt(2)) # sqrt(2) is arbitrary
log.ODM_INFO('Creating DSM for 2.5D mesh')
@ -33,20 +33,18 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05,
gapfill=True,
outdir=tmp_directory,
resolution=dsm_resolution,
verbose=verbose,
max_workers=available_cores,
apply_smoothing=smooth_dsm
)
if method == 'gridded':
mesh = dem_to_mesh_gridded(os.path.join(tmp_directory, 'mesh_dsm.tif'), outMesh, maxVertexCount, verbose, maxConcurrency=max(1, available_cores))
mesh = dem_to_mesh_gridded(os.path.join(tmp_directory, 'mesh_dsm.tif'), outMesh, maxVertexCount, maxConcurrency=max(1, available_cores))
elif method == 'poisson':
dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply'), verbose)
dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply'))
mesh = screened_poisson_reconstruction(dsm_points, outMesh, depth=depth,
samples=samples,
maxVertexCount=maxVertexCount,
threads=max(1, available_cores - 1), # poissonrecon can get stuck on some machines if --threads == all cores
verbose=verbose)
threads=max(1, available_cores - 1)), # poissonrecon can get stuck on some machines if --threads == all cores
else:
raise 'Not a valid method: ' + method
@ -57,14 +55,13 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05,
return mesh
def dem_to_points(inGeotiff, outPointCloud, verbose=False):
def dem_to_points(inGeotiff, outPointCloud):
log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff)
kwargs = {
'bin': context.dem2points_path,
'outfile': outPointCloud,
'infile': inGeotiff,
'verbose': '-verbose' if verbose else ''
'infile': inGeotiff
}
system.run('"{bin}" -inputFile "{infile}" '
@ -72,12 +69,12 @@ def dem_to_points(inGeotiff, outPointCloud, verbose=False):
'-skirtHeightThreshold 1.5 '
'-skirtIncrements 0.2 '
'-skirtHeightCap 100 '
' {verbose} '.format(**kwargs))
'-verbose '.format(**kwargs))
return outPointCloud
def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxConcurrency=1):
def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, maxConcurrency=1):
log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff)
mesh_path, mesh_filename = os.path.split(outMesh)
@ -99,8 +96,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo
'outfile': outMeshDirty,
'infile': inGeotiff,
'maxVertexCount': maxVertexCount,
'maxConcurrency': maxConcurrency,
'verbose': '-verbose' if verbose else ''
'maxConcurrency': maxConcurrency
}
system.run('"{bin}" -inputFile "{infile}" '
'-outputFile "{outfile}" '
@ -108,7 +104,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo
'-maxVertexCount {maxVertexCount} '
'-maxConcurrency {maxConcurrency} '
'-edgeSwapThreshold 0.15 '
' {verbose} '.format(**kwargs))
'-verbose '.format(**kwargs))
break
except Exception as e:
maxConcurrency = math.floor(maxConcurrency / 2)
@ -138,7 +134,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo
return outMesh
def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False):
def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores):
mesh_path, mesh_filename = os.path.split(outMesh)
# mesh_path = path/to
@ -165,8 +161,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples =
'depth': depth,
'samples': samples,
'pointWeight': pointWeight,
'threads': int(threads),
'verbose': '--verbose' if verbose else ''
'threads': int(threads)
}
# Run PoissonRecon
@ -178,8 +173,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples =
'--samplesPerNode {samples} '
'--threads {threads} '
'--bType 2 '
'--linearFit '
'{verbose}'.format(**poissonReconArgs))
'--linearFit '.format(**poissonReconArgs))
except Exception as e:
log.ODM_WARNING(str(e))

Wyświetl plik

@ -202,27 +202,22 @@ class OSFMContext:
# Compute feature_process_size
feature_process_size = 2048 # default
if ('resize_to_is_set' in args) and args.resize_to > 0:
# Legacy
log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.")
feature_process_size = int(args.resize_to)
feature_quality_scale = {
'ultra': 1,
'high': 0.5,
'medium': 0.25,
'low': 0.125,
'lowest': 0.0675,
}
max_dim = find_largest_photo_dim(photos)
if max_dim > 0:
log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim))
feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality])
log.ODM_INFO("Photo dimensions for feature extraction: %ipx" % feature_process_size)
else:
feature_quality_scale = {
'ultra': 1,
'high': 0.5,
'medium': 0.25,
'low': 0.125,
'lowest': 0.0675,
}
max_dim = find_largest_photo_dim(photos)
if max_dim > 0:
log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim))
feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality])
log.ODM_INFO("Photo dimensions for feature extraction: %ipx" % feature_process_size)
else:
log.ODM_WARNING("Cannot compute max image dimensions, going with defaults")
log.ODM_WARNING("Cannot compute max image dimensions, going with defaults")
# create config file for OpenSfM
if args.matcher_neighbors > 0:

Wyświetl plik

@ -166,10 +166,6 @@ class ODM_Photo:
# parse values from metadata
self.parse_exif_values(path_file)
# print log message
log.ODM_DEBUG('Loaded {}'.format(self))
def __str__(self):
return '{} | camera: {} {} | dimensions: {} x {} | lat: {} | lon: {} | alt: {} | band: {} ({})'.format(
self.filename, self.camera_make, self.camera_model, self.width, self.height,

Wyświetl plik

@ -71,7 +71,7 @@ def split(input_point_cloud, outdir, filename_template, capacity, dims=None):
return [os.path.join(outdir, f) for f in os.listdir(outdir)]
def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, boundary=None, verbose=False, max_concurrency=1):
def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, boundary=None, max_concurrency=1):
"""
Filters a point cloud
"""
@ -82,8 +82,7 @@ def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=
args = [
'--input "%s"' % input_point_cloud,
'--output "%s"' % output_point_cloud,
'--concurrency %s' % max_concurrency,
'--verbose' if verbose else '',
'--concurrency %s' % max_concurrency
]
if sample_radius > 0:

Wyświetl plik

@ -41,7 +41,7 @@ def extract_temperatures_dji(photo, image, dataset_tree):
except ValueError as e:
log.ODM_ERROR("Error during extracting temperature values for file %s : %s" % photo.filename, e)
else:
log.ODM_DEBUG("Only DJI M2EA currently supported, please wait for new updates")
log.ODM_WARNING("Only DJI M2EA currently supported, please wait for new updates")
return image
# Extract raw sensor values from generated image into numpy array
raw_sensor_np = np.array(img)

Wyświetl plik

@ -364,8 +364,10 @@ class ODM_Stage:
if outputs.get('tree') is None:
raise Exception("Assert violation: tree variable is missing from outputs dictionary.")
if self.args.time:
try:
system.benchmark(start_time, outputs['tree'].benchmarking, self.name)
except Exception as e:
log.ODM_WARNING("Cannot write benchmark file: %s" % str(e))
log.ODM_INFO('Finished %s stage' % self.name)
self.update_progress_end()

Wyświetl plik

@ -5,37 +5,33 @@ from osgeo import gdal
from opendm.loghelpers import double_quote
def get_depthmap_resolution(args, photos):
if 'depthmap_resolution_is_set' in args:
# Override pc-quality
return int(args.depthmap_resolution)
max_dims = find_largest_photo_dims(photos)
min_dim = 320 # Never go lower than this
if max_dims is not None:
w, h = max_dims
max_dim = max(w, h)
megapixels = (w * h) / 1e6
multiplier = 1
if megapixels < 6:
multiplier = 2
elif megapixels > 42:
multiplier = 0.5
pc_quality_scale = {
'ultra': 0.5,
'high': 0.25,
'medium': 0.125,
'low': 0.0675,
'lowest': 0.03375
}
return max(min_dim, int(max_dim * pc_quality_scale[args.pc_quality] * multiplier))
else:
max_dims = find_largest_photo_dims(photos)
min_dim = 320 # Never go lower than this
if max_dims is not None:
w, h = max_dims
max_dim = max(w, h)
megapixels = (w * h) / 1e6
multiplier = 1
if megapixels < 6:
multiplier = 2
elif megapixels > 42:
multiplier = 0.5
pc_quality_scale = {
'ultra': 0.5,
'high': 0.25,
'medium': 0.125,
'low': 0.0675,
'lowest': 0.03375
}
return max(min_dim, int(max_dim * pc_quality_scale[args.pc_quality] * multiplier))
else:
log.ODM_WARNING("Cannot compute max image dimensions, going with default depthmap_resolution of 640")
return 640 # Sensible default
log.ODM_WARNING("Cannot compute max image dimensions, going with default depthmap_resolution of 640")
return 640 # Sensible default
def get_raster_stats(geotiff):
stats = []

Wyświetl plik

@ -49,12 +49,15 @@ class ODMLoadDatasetStage(types.ODM_Stage):
tree = types.ODM_Tree(args.project_path, args.gcp, args.geo)
outputs['tree'] = tree
if args.time and io.file_exists(tree.benchmarking):
if io.file_exists(tree.benchmarking):
# Delete the previously made file
os.remove(tree.benchmarking)
with open(tree.benchmarking, 'a') as b:
b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
try:
os.remove(tree.benchmarking)
with open(tree.benchmarking, 'a') as b:
b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
except Exception as e:
log.ODM_WARNING("Cannot write benchmark file: %s" % str(e))
# check if the image filename is supported
def valid_image_filename(filename):
(pathfn, ext) = os.path.splitext(filename)
@ -62,7 +65,6 @@ class ODMLoadDatasetStage(types.ODM_Stage):
# Get supported images from dir
def get_images(in_dir):
log.ODM_DEBUG(in_dir)
entries = os.listdir(in_dir)
valid, rejects = [], []
for f in entries:

Wyświetl plik

@ -93,12 +93,12 @@ class ODMMvsTexStage(types.ODM_Stage):
'bin': context.mvstex_path,
'out_dir': os.path.join(r['out_dir'], "odm_textured_model_geo"),
'model': r['model'],
'dataTerm': args.texturing_data_term,
'outlierRemovalType': args.texturing_outlier_removal_type,
'dataTerm': 'gmi',
'outlierRemovalType': 'gauss_clamping',
'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
'skipLocalSeamLeveling': skipLocalSeamLeveling,
'keepUnseenFaces': keepUnseenFaces,
'toneMapping': args.texturing_tone_mapping,
'toneMapping': 'none',
'nadirMode': nadir,
'maxTextureSize': '--max_texture_size=%s' % max_texture_size,
'nvm_file': r['nvm_file'],

Wyświetl plik

@ -26,17 +26,13 @@ class ODMApp:
"""
Initializes the application and defines the ODM application pipeline stages
"""
if args.debug:
log.logger.show_debug = True
json_log_paths = [os.path.join(args.project_path, "log.json")]
if args.copy_to:
json_log_paths.append(args.copy_to)
log.logger.init_json_output(json_log_paths, args)
dataset = ODMLoadDatasetStage('dataset', args, progress=5.0,
verbose=args.verbose)
dataset = ODMLoadDatasetStage('dataset', args, progress=5.0)
split = ODMSplitStage('split', args, progress=75.0)
merge = ODMMergeStage('merge', args, progress=100.0)
opensfm = ODMOpenSfMStage('opensfm', args, progress=25.0)
@ -47,15 +43,12 @@ class ODMApp:
oct_tree=max(1, min(14, args.mesh_octree_depth)),
samples=1.0,
point_weight=4.0,
max_concurrency=args.max_concurrency,
verbose=args.verbose)
max_concurrency=args.max_concurrency)
texturing = ODMMvsTexStage('mvs_texturing', args, progress=70.0)
georeferencing = ODMGeoreferencingStage('odm_georeferencing', args, progress=80.0,
gcp_file=args.gcp,
verbose=args.verbose)
gcp_file=args.gcp)
dem = ODMDEMStage('odm_dem', args, progress=90.0,
max_concurrency=args.max_concurrency,
verbose=args.verbose)
max_concurrency=args.max_concurrency)
orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0)
report = ODMReport('odm_report', args, progress=99.0)
postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0)

Wyświetl plik

@ -1,4 +1,4 @@
import os, json
import os, json, math
from shutil import copyfile
from opendm import io
@ -58,8 +58,7 @@ class ODMDEMStage(types.ODM_Stage):
args.smrf_scalar,
args.smrf_slope,
args.smrf_threshold,
args.smrf_window,
verbose=args.verbose
args.smrf_window
)
with open(pc_classify_marker, 'w') as f:
@ -91,7 +90,7 @@ class ODMDEMStage(types.ODM_Stage):
radius_steps = [(resolution / 100.0) / 2.0]
for _ in range(args.dem_gapfill_steps - 1):
radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?
radius_steps.append(radius_steps[-1] * math.sqrt(2)) # sqrt(2) is arbitrary, maybe there's a better value?
for product in products:
commands.create_dem(
@ -103,7 +102,6 @@ class ODMDEMStage(types.ODM_Stage):
outdir=odm_dem_root,
resolution=resolution / 100.0,
decimation=args.dem_decimation,
verbose=args.verbose,
max_workers=args.max_concurrency,
keep_unfilled_copy=args.dem_euclidean_map
)

Wyświetl plik

@ -53,7 +53,6 @@ class ODMFilterPoints(types.ODM_Stage):
standard_deviation=args.pc_filter,
sample_radius=args.pc_sample,
boundary=boundary_offset(outputs.get('boundary'), reconstruction.get_proj_offset()),
verbose=args.verbose,
max_concurrency=args.max_concurrency)
# Quick check

Wyświetl plik

@ -27,9 +27,7 @@ class ODMeshingStage(types.ODM_Stage):
samples=self.params.get('samples'),
maxVertexCount=self.params.get('max_vertex'),
pointWeight=self.params.get('point_weight'),
threads=max(1, self.params.get('max_concurrency') - 1), # poissonrecon can get stuck on some machines if --threads == all cores
verbose=self.params.get('verbose'))
threads=max(1, self.params.get('max_concurrency') - 1)), # poissonrecon can get stuck on some machines if --threads == all cores
else:
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
tree.odm_mesh)
@ -68,7 +66,6 @@ class ODMeshingStage(types.ODM_Stage):
depth=self.params.get('oct_tree'),
maxVertexCount=self.params.get('max_vertex'),
samples=self.params.get('samples'),
verbose=self.params.get('verbose'),
available_cores=args.max_concurrency,
method='poisson' if args.fast_orthophoto else 'gridded',
smooth_dsm=True)

Wyświetl plik

@ -18,7 +18,6 @@ class ODMOrthoPhotoStage(types.ODM_Stage):
def process(self, args, outputs):
tree = outputs['tree']
reconstruction = outputs['reconstruction']
verbose = '-verbose' if args.verbose else ''
# define paths and create working directories
system.mkdir_p(tree.odm_orthophoto)
@ -42,8 +41,7 @@ class ODMOrthoPhotoStage(types.ODM_Stage):
'corners': tree.odm_orthophoto_corners,
'res': resolution,
'bands': '',
'depth_idx': '',
'verbose': verbose
'depth_idx': ''
}
models = []
@ -85,7 +83,7 @@ class ODMOrthoPhotoStage(types.ODM_Stage):
# run odm_orthophoto
system.run('"{odm_ortho_bin}" -inputFiles {models} '
'-logFile "{log}" -outputFile "{ortho}" -resolution {res} {verbose} '
'-logFile "{log}" -outputFile "{ortho}" -resolution {res} -verbose '
'-outputCornerFile "{corners}" {bands} {depth_idx}'.format(**kwargs))
# Create georeferenced GeoTiff

Wyświetl plik

@ -77,8 +77,8 @@ class ODMOpenMVSStage(types.ODM_Stage):
gpu_config = []
use_gpu = has_gpu(args)
if use_gpu:
# gpu_config.append("--cuda-device -3")
gpu_config.append("--cuda-device -1")
gpu_config.append("--cuda-device -3")
# gpu_config.append("--cuda-device -1")
else:
gpu_config.append("--cuda-device -2")
@ -87,17 +87,13 @@ class ODMOpenMVSStage(types.ODM_Stage):
extra_config = []
if not args.pc_geometric:
extra_config.append("--geometric-iters 0")
masks_dir = os.path.join(tree.opensfm, "undistorted", "masks")
masks = os.path.exists(masks_dir) and len(os.listdir(masks_dir)) > 0
if masks:
extra_config.append("--ignore-mask-label 0")
sharp = args.pc_geometric
with open(densify_ini_file, 'w+') as f:
f.write("Optimize = %s\n" % (7 if sharp else 3))
f.write("Optimize = 7\n")
def run_densify():
system.run('"%s" "%s" %s' % (context.omvs_densify_path,
@ -173,27 +169,22 @@ class ODMOpenMVSStage(types.ODM_Stage):
try:
system.run('"%s" "%s" %s' % (context.omvs_densify_path, sf, ' '.join(config + gpu_config + extra_config)))
# Filter
if args.pc_filter > 0:
system.run('"%s" "%s" --filter-point-cloud -1 -v 0 %s' % (context.omvs_densify_path, scene_dense_mvs, ' '.join(gpu_config)))
else:
# Just rename
log.ODM_INFO("Skipped filtering, %s --> %s" % (scene_ply_unfiltered, scene_ply))
os.rename(scene_ply_unfiltered, scene_ply)
except:
log.ODM_WARNING("Sub-scene %s could not be reconstructed, skipping..." % sf)
if not io.file_exists(scene_ply):
if not io.file_exists(scene_ply_unfiltered):
scene_ply_files.pop()
log.ODM_WARNING("Could not compute PLY for subscene %s" % sf)
else:
# Do not filter
os.rename(scene_ply_unfiltered, scene_ply)
else:
log.ODM_WARNING("Found existing dense scene file %s" % scene_ply)
# Merge
log.ODM_INFO("Merging %s scene files" % len(scene_ply_files))
if len(scene_ply_files) == 0:
log.ODM_ERROR("Could not compute dense point cloud (no PLY files available).")
raise system.ExitException("Could not compute dense point cloud (no PLY files available).")
if len(scene_ply_files) == 1:
# Simply rename
os.replace(scene_ply_files[0], tree.openmvs_model)
@ -202,32 +193,12 @@ class ODMOpenMVSStage(types.ODM_Stage):
# Merge
fast_merge_ply(scene_ply_files, tree.openmvs_model)
else:
def skip_filtering():
# Just rename
scene_dense_ply = os.path.join(tree.openmvs, 'scene_dense.ply')
log.ODM_INFO("Skipped filtering, %s --> %s" % (scene_dense_ply, tree.openmvs_model))
os.rename(scene_dense_ply, tree.openmvs_model)
# Filter all at once
if args.pc_filter > 0:
if os.path.exists(scene_dense):
config = [
"--filter-point-cloud -1",
'-i "%s"' % scene_dense,
"-v 0"
]
try:
system.run('"%s" %s' % (context.omvs_densify_path, ' '.join(config + gpu_config + extra_config)))
except system.SubprocessException as e:
if e.errorCode == 137 or e.errorCode == 3221226505:
log.ODM_WARNING("OpenMVS filtering ran out of memory, visibility checks will be skipped.")
skip_filtering()
else:
raise e
else:
raise system.ExitException("Cannot find scene_dense.mvs, dense reconstruction probably failed. Exiting...")
else:
skip_filtering()
scene_dense_ply = os.path.join(tree.openmvs, 'scene_dense.ply')
if not os.path.exists(scene_dense_ply):
raise system.ExitException("Dense reconstruction failed. This could be due to poor georeferencing or insufficient image overlap.")
os.rename(scene_dense_ply, tree.openmvs_model)
self.update_progress(95)