kopia lustrzana https://github.com/OpenDroneMap/ODM
split-merge camera models handling, ODM_DEBUG --> ODM_INFO, debug flag
Former-commit-id: 02370b0a5c
pull/1161/head
rodzic
52f82bb20f
commit
9322b176f5
|
@ -25,23 +25,10 @@ def alphanumeric_string(string):
|
|||
return string
|
||||
|
||||
def path_or_json_string(string):
|
||||
if string == "":
|
||||
return {}
|
||||
|
||||
if string.startswith("[") or string.startswith("{"):
|
||||
try:
|
||||
return json.loads(string)
|
||||
except:
|
||||
raise argparse.ArgumentTypeError("{0} is not a valid JSON string.".format(string))
|
||||
elif io.file_exists(string):
|
||||
try:
|
||||
with open(string, 'r') as f:
|
||||
return json.loads(f.read())
|
||||
except:
|
||||
raise argparse.ArgumentTypeError("{0} is not a valid JSON file.".format(string))
|
||||
else:
|
||||
raise argparse.ArgumentTypeError("{0} is not a valid JSON file or string.".format(string))
|
||||
|
||||
try:
|
||||
return io.path_or_json_string_to_dict(string)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError("{0}".format(str(e)))
|
||||
|
||||
# Django URL validation regex
|
||||
def url_string(string):
|
||||
|
@ -541,6 +528,12 @@ def config():
|
|||
default=False,
|
||||
help='Generates a benchmark file with runtime info\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--debug',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Print debug messages\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--version',
|
||||
action='version',
|
||||
|
|
|
@ -10,7 +10,7 @@ import math
|
|||
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, tmpdir=None, scale=1):
|
||||
if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
|
||||
from opendm.grass_engine import grass
|
||||
log.ODM_DEBUG("Computing cutline")
|
||||
log.ODM_INFO("Computing cutline")
|
||||
|
||||
if tmpdir and not io.dir_exists(tmpdir):
|
||||
system.mkdir_p(tmpdir)
|
||||
|
@ -19,7 +19,7 @@ def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrenc
|
|||
scaled_orthophoto = None
|
||||
|
||||
if scale < 1:
|
||||
log.ODM_DEBUG("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))
|
||||
log.ODM_INFO("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))
|
||||
|
||||
scaled_orthophoto = os.path.join(tmpdir, os.path.basename(io.related_file_path(orthophoto_file, postfix=".scaled")))
|
||||
# Scale orthophoto before computing cutline
|
||||
|
@ -37,13 +37,13 @@ def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrenc
|
|||
|
||||
try:
|
||||
ortho_width,ortho_height = get_image_size.get_image_size(orthophoto_file, fallback_on_error=False)
|
||||
log.ODM_DEBUG("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
|
||||
log.ODM_INFO("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
|
||||
number_lines = int(max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
|
||||
except:
|
||||
log.ODM_DEBUG("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
|
||||
log.ODM_INFO("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
|
||||
number_lines = 32
|
||||
|
||||
log.ODM_DEBUG("Number of lines: %s" % number_lines)
|
||||
log.ODM_INFO("Number of lines: %s" % number_lines)
|
||||
|
||||
gctx = grass.create_context({'auto_cleanup' : False, 'tmpdir': tmpdir})
|
||||
gctx.add_param('orthophoto_file', orthophoto_file)
|
||||
|
|
|
@ -178,7 +178,7 @@ def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=
|
|||
]
|
||||
|
||||
if verbose:
|
||||
log.ODM_DEBUG(' '.join(cmd))
|
||||
log.ODM_INFO(' '.join(cmd))
|
||||
|
||||
system.run(' '.join(cmd))
|
||||
|
||||
|
@ -194,7 +194,7 @@ def merge_point_clouds(input_files, output_file, verbose=False):
|
|||
]
|
||||
|
||||
if verbose:
|
||||
log.ODM_DEBUG(' '.join(cmd))
|
||||
log.ODM_INFO(' '.join(cmd))
|
||||
|
||||
system.run(' '.join(cmd))
|
||||
|
||||
|
|
22
opendm/io.py
22
opendm/io.py
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import shutil, errno
|
||||
|
||||
import json
|
||||
|
||||
def get_files_list(path_dir):
|
||||
return os.listdir(path_dir)
|
||||
|
@ -72,4 +72,22 @@ def related_file_path(input_file_path, prefix="", postfix=""):
|
|||
# basename = file
|
||||
# ext = .ext
|
||||
|
||||
return os.path.join(path, "{}{}{}{}".format(prefix, basename, postfix, ext))
|
||||
return os.path.join(path, "{}{}{}{}".format(prefix, basename, postfix, ext))
|
||||
|
||||
def path_or_json_string_to_dict(string):
|
||||
if string == "":
|
||||
return {}
|
||||
|
||||
if string.startswith("[") or string.startswith("{"):
|
||||
try:
|
||||
return json.loads(string)
|
||||
except:
|
||||
raise ValueError("{0} is not a valid JSON string.".format(string))
|
||||
elif file_exists(string):
|
||||
try:
|
||||
with open(string, 'r') as f:
|
||||
return json.loads(f.read())
|
||||
except:
|
||||
raise ValueError("{0} is not a valid JSON file.".format(string))
|
||||
else:
|
||||
raise ValueError("{0} is not a valid JSON file or string.".format(string))
|
||||
|
|
|
@ -116,7 +116,7 @@ def parse_srs_header(header):
|
|||
:param header (str) line
|
||||
:return Proj object
|
||||
"""
|
||||
log.ODM_DEBUG('Parsing SRS header: %s' % header)
|
||||
log.ODM_INFO('Parsing SRS header: %s' % header)
|
||||
header = header.strip()
|
||||
ref = header.split(' ')
|
||||
try:
|
||||
|
|
|
@ -2,19 +2,23 @@ import sys
|
|||
HEADER = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
OKGREEN = '\033[92m'
|
||||
DEFAULT = '\033[39m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
# logging has too many quirks...
|
||||
class ODMLogger:
|
||||
def __init__(self):
|
||||
self.show_debug = False
|
||||
|
||||
def log(self, startc, msg, level_name):
|
||||
level = ("[" + level_name + "]").ljust(9)
|
||||
print("%s%s %s%s" % (startc, level, msg, ENDC))
|
||||
sys.stdout.flush()
|
||||
|
||||
def info(self, msg):
|
||||
self.log(OKBLUE, msg, "INFO")
|
||||
self.log(DEFAULT, msg, "INFO")
|
||||
|
||||
def warning(self, msg):
|
||||
self.log(WARNING, msg, "WARNING")
|
||||
|
@ -26,7 +30,8 @@ class ODMLogger:
|
|||
self.log(FAIL, msg, "EXCEPTION")
|
||||
|
||||
def debug(self, msg):
|
||||
self.log(OKGREEN, msg, "DEBUG")
|
||||
if self.show_debug:
|
||||
self.log(OKGREEN, msg, "DEBUG")
|
||||
|
||||
logger = ODMLogger()
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ def get_orthophoto_vars(args):
|
|||
}
|
||||
|
||||
def build_overviews(orthophoto_file):
|
||||
log.ODM_DEBUG("Building Overviews")
|
||||
log.ODM_INFO("Building Overviews")
|
||||
kwargs = {'orthophoto': orthophoto_file}
|
||||
|
||||
# Run gdaladdo
|
||||
|
|
|
@ -83,7 +83,7 @@ class OSFMContext:
|
|||
# check for image_groups.txt (split-merge)
|
||||
image_groups_file = os.path.join(args.project_path, "image_groups.txt")
|
||||
if io.file_exists(image_groups_file):
|
||||
log.ODM_DEBUG("Copied image_groups.txt to OpenSfM directory")
|
||||
log.ODM_INFO("Copied image_groups.txt to OpenSfM directory")
|
||||
io.copy(image_groups_file, os.path.join(self.opensfm_project_path, "image_groups.txt"))
|
||||
|
||||
# check for cameras
|
||||
|
@ -92,7 +92,7 @@ class OSFMContext:
|
|||
camera_overrides = camera.get_opensfm_camera_models(args.cameras)
|
||||
with open(os.path.join(self.opensfm_project_path, "camera_models_overrides.json"), 'w') as f:
|
||||
f.write(json.dumps(camera_overrides))
|
||||
log.ODM_DEBUG("Wrote camera_models_overrides.json to OpenSfM directory")
|
||||
log.ODM_INFO("Wrote camera_models_overrides.json to OpenSfM directory")
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot set camera_models_overrides.json: %s" % str(e))
|
||||
|
||||
|
@ -116,7 +116,7 @@ class OSFMContext:
|
|||
# TODO: add BOW matching when dataset is not georeferenced (no gps)
|
||||
|
||||
if has_alt:
|
||||
log.ODM_DEBUG("Altitude data detected, enabling it for GPS alignment")
|
||||
log.ODM_INFO("Altitude data detected, enabling it for GPS alignment")
|
||||
config.append("use_altitude_tag: yes")
|
||||
|
||||
if has_alt or gcp_path:
|
||||
|
@ -126,7 +126,7 @@ class OSFMContext:
|
|||
config.append("align_orientation_prior: vertical")
|
||||
|
||||
if args.use_hybrid_bundle_adjustment:
|
||||
log.ODM_DEBUG("Enabling hybrid bundle adjustment")
|
||||
log.ODM_INFO("Enabling hybrid bundle adjustment")
|
||||
config.append("bundle_interval: 100") # Bundle after adding 'bundle_interval' cameras
|
||||
config.append("bundle_new_points_ratio: 1.2") # Bundle when (new points) / (bundled points) > bundle_new_points_ratio
|
||||
config.append("local_bundle_radius: 1") # Max image graph distance for images to be included in local bundle adjustment
|
||||
|
@ -139,7 +139,7 @@ class OSFMContext:
|
|||
config = config + append_config
|
||||
|
||||
# write config file
|
||||
log.ODM_DEBUG(config)
|
||||
log.ODM_INFO(config)
|
||||
config_filename = self.get_config_file_path()
|
||||
with open(config_filename, 'w') as fout:
|
||||
fout.write("\n".join(config))
|
||||
|
@ -209,14 +209,14 @@ class OSFMContext:
|
|||
|
||||
def update_config(self, cfg_dict):
|
||||
cfg_file = self.get_config_file_path()
|
||||
log.ODM_DEBUG("Updating %s" % cfg_file)
|
||||
log.ODM_INFO("Updating %s" % cfg_file)
|
||||
if os.path.exists(cfg_file):
|
||||
try:
|
||||
with open(cfg_file) as fin:
|
||||
cfg = yaml.safe_load(fin)
|
||||
for k, v in cfg_dict.items():
|
||||
cfg[k] = v
|
||||
log.ODM_DEBUG("%s: %s" % (k, v))
|
||||
log.ODM_INFO("%s: %s" % (k, v))
|
||||
with open(cfg_file, 'w') as fout:
|
||||
fout.write(yaml.dump(cfg, default_flow_style=False))
|
||||
except Exception as e:
|
||||
|
@ -244,7 +244,7 @@ class OSFMContext:
|
|||
with open(file, 'w') as f:
|
||||
f.write("\n".join(lines))
|
||||
|
||||
log.ODM_DEBUG("Wrote %s with absolute paths" % file)
|
||||
log.ODM_INFO("Wrote %s with absolute paths" % file)
|
||||
else:
|
||||
log.ODM_WARNING("No %s found, cannot create %s" % (image_list_file, file))
|
||||
|
||||
|
@ -266,10 +266,12 @@ def get_submodel_argv(project_name = None, submodels_path = None, submodel_name
|
|||
adding --dem-euclidean-map
|
||||
adding --skip-3dmodel (split-merge does not support 3D model merging)
|
||||
removing --gcp (the GCP path if specified is always "gcp_list.txt")
|
||||
reading the contents of --cameras
|
||||
"""
|
||||
assure_always = ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel']
|
||||
remove_always_2 = ['--split', '--split-overlap', '--rerun-from', '--rerun', '--gcp', '--end-with', '--sm-cluster']
|
||||
remove_always_1 = ['--rerun-all', '--pc-csv', '--pc-las', '--pc-ept']
|
||||
read_json_always = ['--cameras']
|
||||
|
||||
argv = sys.argv
|
||||
|
||||
|
@ -300,6 +302,17 @@ def get_submodel_argv(project_name = None, submodels_path = None, submodel_name
|
|||
result.append(arg)
|
||||
found_args[arg] = True
|
||||
i += 1
|
||||
elif arg in read_json_always:
|
||||
try:
|
||||
jsond = io.path_or_json_string_to_dict(argv[i + 1])
|
||||
result.append(arg)
|
||||
result.append(json.dumps(jsond))
|
||||
found_args[arg] = True
|
||||
except ValueError as e:
|
||||
log.ODM_WARNING("Cannot parse/read JSON: {}".format(str(e)))
|
||||
pass
|
||||
finally:
|
||||
i += 2
|
||||
elif arg in remove_always_2:
|
||||
i += 2
|
||||
elif arg in remove_always_1:
|
||||
|
@ -317,7 +330,7 @@ def get_submodel_argv(project_name = None, submodels_path = None, submodel_name
|
|||
result.append(arg)
|
||||
|
||||
if not found_args.get('project_name') and submodel_name:
|
||||
result.append(submodel_name)
|
||||
result.append(submodel_name)
|
||||
|
||||
return result
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ class LocalRemoteExecutor:
|
|||
# Create queue
|
||||
q = queue.Queue()
|
||||
for pp in self.project_paths:
|
||||
log.ODM_DEBUG("LRE: Adding to queue %s" % pp)
|
||||
log.ODM_INFO("LRE: Adding to queue %s" % pp)
|
||||
q.put(taskClass(pp, self.node, self.params))
|
||||
|
||||
def remove_task_safe(task):
|
||||
|
@ -90,12 +90,12 @@ class LocalRemoteExecutor:
|
|||
log.ODM_INFO("LRE: No remote tasks left to cleanup")
|
||||
|
||||
for task in self.params['tasks']:
|
||||
log.ODM_DEBUG("LRE: Removing remote task %s... %s" % (task.uuid, 'OK' if remove_task_safe(task) else 'NO'))
|
||||
log.ODM_INFO("LRE: Removing remote task %s... %s" % (task.uuid, 'OK' if remove_task_safe(task) else 'NO'))
|
||||
|
||||
def handle_result(task, local, error = None, partial=False):
|
||||
def cleanup_remote():
|
||||
if not partial and task.remote_task:
|
||||
log.ODM_DEBUG("LRE: Cleaning up remote task (%s)... %s" % (task.remote_task.uuid, 'OK' if remove_task_safe(task.remote_task) else 'NO'))
|
||||
log.ODM_INFO("LRE: Cleaning up remote task (%s)... %s" % (task.remote_task.uuid, 'OK' if remove_task_safe(task.remote_task) else 'NO'))
|
||||
self.params['tasks'].remove(task.remote_task)
|
||||
task.remote_task = None
|
||||
|
||||
|
@ -124,7 +124,7 @@ class LocalRemoteExecutor:
|
|||
pass
|
||||
|
||||
nonloc.max_remote_tasks = max(1, node_task_limit)
|
||||
log.ODM_DEBUG("LRE: Node task limit reached. Setting max remote tasks to %s" % node_task_limit)
|
||||
log.ODM_INFO("LRE: Node task limit reached. Setting max remote tasks to %s" % node_task_limit)
|
||||
|
||||
|
||||
# Retry, but only if the error is not related to a task failure
|
||||
|
@ -138,7 +138,7 @@ class LocalRemoteExecutor:
|
|||
cleanup_remote()
|
||||
q.task_done()
|
||||
|
||||
log.ODM_DEBUG("LRE: Re-queueing %s (retries: %s)" % (task, task.retries))
|
||||
log.ODM_INFO("LRE: Re-queueing %s (retries: %s)" % (task, task.retries))
|
||||
q.put(task)
|
||||
if not local: remote_running_tasks.increment(-1)
|
||||
return
|
||||
|
@ -185,7 +185,7 @@ class LocalRemoteExecutor:
|
|||
|
||||
# Yield to local processing
|
||||
if not nonloc.local_processing:
|
||||
log.ODM_DEBUG("LRE: Yielding to local processing, sending %s back to the queue" % task)
|
||||
log.ODM_INFO("LRE: Yielding to local processing, sending %s back to the queue" % task)
|
||||
q.put(task)
|
||||
q.task_done()
|
||||
time.sleep(0.05)
|
||||
|
@ -277,7 +277,7 @@ class Task:
|
|||
now = datetime.datetime.now()
|
||||
if self.wait_until > now:
|
||||
wait_for = (self.wait_until - now).seconds + 1
|
||||
log.ODM_DEBUG("LRE: Waiting %s seconds before processing %s" % (wait_for, self))
|
||||
log.ODM_INFO("LRE: Waiting %s seconds before processing %s" % (wait_for, self))
|
||||
time.sleep(wait_for)
|
||||
|
||||
# TODO: we could consider uploading multiple tasks
|
||||
|
@ -349,7 +349,7 @@ class Task:
|
|||
|
||||
def print_progress(percentage):
|
||||
if (time.time() - nonloc.last_update >= 2) or int(percentage) == 100:
|
||||
log.ODM_DEBUG("LRE: Upload of %s at [%s%%]" % (self, int(percentage)))
|
||||
log.ODM_INFO("LRE: Upload of %s at [%s%%]" % (self, int(percentage)))
|
||||
nonloc.last_update = time.time()
|
||||
|
||||
# Upload task
|
||||
|
@ -384,18 +384,18 @@ class Task:
|
|||
# Print a status message once in a while
|
||||
nonloc.status_callback_calls += 1
|
||||
if nonloc.status_callback_calls > 30:
|
||||
log.ODM_DEBUG("LRE: %s (%s) is still running" % (self, task.uuid))
|
||||
log.ODM_INFO("LRE: %s (%s) is still running" % (self, task.uuid))
|
||||
nonloc.status_callback_calls = 0
|
||||
try:
|
||||
def print_progress(percentage):
|
||||
if (time.time() - nonloc.last_update >= 2) or int(percentage) == 100:
|
||||
log.ODM_DEBUG("LRE: Download of %s at [%s%%]" % (self, int(percentage)))
|
||||
log.ODM_INFO("LRE: Download of %s at [%s%%]" % (self, int(percentage)))
|
||||
nonloc.last_update = time.time()
|
||||
|
||||
task.wait_for_completion(status_callback=status_callback)
|
||||
log.ODM_DEBUG("LRE: Downloading assets for %s" % self)
|
||||
log.ODM_INFO("LRE: Downloading assets for %s" % self)
|
||||
task.download_assets(self.project_path, progress_callback=print_progress)
|
||||
log.ODM_DEBUG("LRE: Downloaded and extracted assets for %s" % self)
|
||||
log.ODM_INFO("LRE: Downloaded and extracted assets for %s" % self)
|
||||
done()
|
||||
except exceptions.TaskFailedError as e:
|
||||
# Try to get output
|
||||
|
|
|
@ -57,7 +57,7 @@ def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}):
|
|||
"""Run a system command"""
|
||||
global running_subprocesses
|
||||
|
||||
log.ODM_DEBUG('running %s' % cmd)
|
||||
log.ODM_INFO('running %s' % cmd)
|
||||
|
||||
env = os.environ.copy()
|
||||
if len(env_paths) > 0:
|
||||
|
|
|
@ -106,7 +106,7 @@ class ODM_Reconstruction(object):
|
|||
with open(output_coords_file, 'w') as f:
|
||||
coords_header = gcp.wgs84_utm_zone()
|
||||
f.write(coords_header + "\n")
|
||||
log.ODM_DEBUG("Generated coords file from GCP: %s" % coords_header)
|
||||
log.ODM_INFO("Generated coords file from GCP: %s" % coords_header)
|
||||
|
||||
# Convert GCP file to a UTM projection since the rest of the pipeline
|
||||
# does not handle other SRS well.
|
||||
|
|
10
run.py
10
run.py
|
@ -18,14 +18,14 @@ if __name__ == '__main__':
|
|||
|
||||
# Print args
|
||||
args_dict = vars(args)
|
||||
log.ODM_DEBUG('==============')
|
||||
log.ODM_INFO('==============')
|
||||
for k in sorted(args_dict.keys()):
|
||||
# Don't leak token
|
||||
if k == 'sm_cluster' and args_dict[k] is not None:
|
||||
log.ODM_DEBUG('%s: True' % k)
|
||||
log.ODM_INFO('%s: True' % k)
|
||||
else:
|
||||
log.ODM_DEBUG('%s: %s' % (k, args_dict[k]))
|
||||
log.ODM_DEBUG('==============')
|
||||
log.ODM_INFO('%s: %s' % (k, args_dict[k]))
|
||||
log.ODM_INFO('==============')
|
||||
|
||||
progressbc.set_project_name(args.name)
|
||||
|
||||
|
@ -37,7 +37,7 @@ if __name__ == '__main__':
|
|||
|
||||
# If user asks to rerun everything, delete all of the existing progress directories.
|
||||
if args.rerun_all:
|
||||
log.ODM_DEBUG("Rerun all -- Removing old data")
|
||||
log.ODM_INFO("Rerun all -- Removing old data")
|
||||
os.system("rm -rf " +
|
||||
" ".join([
|
||||
quote(os.path.join(args.project_path, "odm_georeferencing")),
|
||||
|
|
|
@ -72,7 +72,7 @@ class ODMLoadDatasetStage(types.ODM_Stage):
|
|||
system.mkdir_p(tree.odm_georeferencing)
|
||||
if not args.use_3dmesh: system.mkdir_p(tree.odm_25dgeoreferencing)
|
||||
|
||||
log.ODM_DEBUG('Loading dataset from: %s' % images_dir)
|
||||
log.ODM_INFO('Loading dataset from: %s' % images_dir)
|
||||
|
||||
# check if we rerun cell or not
|
||||
images_database_file = io.join_paths(tree.root_path, 'images.json')
|
||||
|
@ -84,6 +84,7 @@ class ODMLoadDatasetStage(types.ODM_Stage):
|
|||
|
||||
photos = []
|
||||
with open(tree.dataset_list, 'w') as dataset_list:
|
||||
log.ODM_INFO("Loading %s images" % len(path_files))
|
||||
for f in path_files:
|
||||
photos += [types.ODM_Photo(f)]
|
||||
dataset_list.write(photos[-1].filename + '\n')
|
||||
|
|
|
@ -35,7 +35,7 @@ class ODMMvsTexStage(types.ODM_Stage):
|
|||
odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj)
|
||||
|
||||
if not io.file_exists(odm_textured_model_obj) or self.rerun():
|
||||
log.ODM_DEBUG('Writing MVS Textured file in: %s'
|
||||
log.ODM_INFO('Writing MVS Textured file in: %s'
|
||||
% odm_textured_model_obj)
|
||||
|
||||
# Format arguments to fit Mvs-Texturing app
|
||||
|
|
|
@ -24,6 +24,8 @@ class ODMApp:
|
|||
"""
|
||||
Initializes the application and defines the ODM application pipeline stages
|
||||
"""
|
||||
if args.debug:
|
||||
log.logger.show_debug = True
|
||||
|
||||
dataset = ODMLoadDatasetStage('dataset', args, progress=5.0,
|
||||
verbose=args.verbose)
|
||||
|
|
|
@ -19,7 +19,7 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
# Create full 3D model unless --skip-3dmodel is set
|
||||
if not args.skip_3dmodel:
|
||||
if not io.file_exists(tree.odm_mesh) or self.rerun():
|
||||
log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)
|
||||
log.ODM_INFO('Writing ODM Mesh file in: %s' % tree.odm_mesh)
|
||||
|
||||
mesh.screened_poisson_reconstruction(tree.filtered_point_cloud,
|
||||
tree.odm_mesh,
|
||||
|
@ -41,7 +41,7 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
if not args.use_3dmesh:
|
||||
if not io.file_exists(tree.odm_25dmesh) or self.rerun():
|
||||
|
||||
log.ODM_DEBUG('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh)
|
||||
log.ODM_INFO('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh)
|
||||
ortho_resolution = gsd.cap_resolution(args.orthophoto_resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd) / 100.0
|
||||
|
||||
dsm_multiplier = max(1.0, gsd.rounded_gsd(tree.opensfm_reconstruction, default_value=4, ndigits=3, ignore_gsd=args.ignore_gsd))
|
||||
|
@ -58,7 +58,7 @@ class ODMeshingStage(types.ODM_Stage):
|
|||
if args.fast_orthophoto:
|
||||
dsm_radius *= 2
|
||||
|
||||
log.ODM_DEBUG('ODM 2.5D DSM resolution: %s' % dsm_resolution)
|
||||
log.ODM_INFO('ODM 2.5D DSM resolution: %s' % dsm_resolution)
|
||||
|
||||
mesh.create_25dmesh(tree.filtered_point_cloud, tree.odm_25dmesh,
|
||||
dsm_radius=dsm_radius,
|
||||
|
|
|
@ -80,10 +80,10 @@ class ODMSplitStage(types.ODM_Stage):
|
|||
submodel_images_dir = os.path.abspath(sp_octx.path("..", "images"))
|
||||
|
||||
if reconstruction.gcp.make_filtered_copy(submodel_gcp_file, submodel_images_dir):
|
||||
log.ODM_DEBUG("Copied filtered GCP file to %s" % submodel_gcp_file)
|
||||
log.ODM_INFO("Copied filtered GCP file to %s" % submodel_gcp_file)
|
||||
io.copy(submodel_gcp_file, os.path.abspath(sp_octx.path("gcp_list.txt")))
|
||||
else:
|
||||
log.ODM_DEBUG("No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP" % sp_octx.name())
|
||||
log.ODM_INFO("No GCP will be copied for %s, not enough images in the submodel are referenced by the GCP" % sp_octx.name())
|
||||
|
||||
# Reconstruct each submodel
|
||||
log.ODM_INFO("Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths))
|
||||
|
@ -130,7 +130,7 @@ class ODMSplitStage(types.ODM_Stage):
|
|||
shutil.move(main_recon, unaligned_recon)
|
||||
|
||||
shutil.move(aligned_recon, main_recon)
|
||||
log.ODM_DEBUG("%s is now %s" % (aligned_recon, main_recon))
|
||||
log.ODM_INFO("%s is now %s" % (aligned_recon, main_recon))
|
||||
|
||||
# Remove invalid submodels
|
||||
submodel_paths = [p for p in submodel_paths if not p in remove_paths]
|
||||
|
@ -141,7 +141,7 @@ class ODMSplitStage(types.ODM_Stage):
|
|||
sp_octx = OSFMContext(sp)
|
||||
|
||||
log.ODM_INFO("========================")
|
||||
log.ODM_INFO("Processing %s" % sp_octx.name())
|
||||
log.ODM_INFO("Processing %s" % sp_octx.name())
|
||||
log.ODM_INFO("========================")
|
||||
|
||||
argv = get_submodel_argv(args.name, tree.submodels_path, sp_octx.name())
|
||||
|
@ -198,7 +198,7 @@ class ODMMergeStage(types.ODM_Stage):
|
|||
merged_bounds_file = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')
|
||||
if not io.file_exists(merged_bounds_file) or self.rerun():
|
||||
all_bounds = get_submodel_paths(tree.submodels_path, 'odm_georeferencing', 'odm_georeferenced_model.bounds.gpkg')
|
||||
log.ODM_DEBUG("Merging all crop bounds: %s" % all_bounds)
|
||||
log.ODM_INFO("Merging all crop bounds: %s" % all_bounds)
|
||||
if len(all_bounds) > 0:
|
||||
# Calculate a new crop area
|
||||
# based on the convex hull of all crop areas of all submodels
|
||||
|
@ -219,7 +219,7 @@ class ODMMergeStage(types.ODM_Stage):
|
|||
)
|
||||
|
||||
if len(all_orthos_and_cutlines) > 1:
|
||||
log.ODM_DEBUG("Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_cutlines))
|
||||
log.ODM_INFO("Found %s submodels with valid orthophotos and cutlines" % len(all_orthos_and_cutlines))
|
||||
|
||||
# TODO: histogram matching via rasterio
|
||||
# currently parts have different color tones
|
||||
|
|
Ładowanie…
Reference in New Issue