kopia lustrzana https://github.com/OpenDroneMap/ODM
OSFM get_submodel_argv refactoring, testing, radiometric calibration for 3-channel images
rodzic
7b94d25c0c
commit
59edf1fd0f
173
opendm/config.py
173
opendm/config.py
|
@ -48,19 +48,38 @@ def url_string(string):
|
|||
class RerunFrom(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, processopts[processopts.index(values):])
|
||||
setattr(namespace, self.dest + '_is_set', True)
|
||||
|
||||
class StoreTrue(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, True)
|
||||
setattr(namespace, self.dest + '_is_set', True)
|
||||
|
||||
parser = SettingsParser(description='OpenDroneMap',
|
||||
class StoreValue(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, values)
|
||||
setattr(namespace, self.dest + '_is_set', True)
|
||||
|
||||
args = None
|
||||
|
||||
def config(argv=None, settings_yaml=context.settings_path):
|
||||
global args
|
||||
|
||||
if args is not None and argv is None:
|
||||
return args
|
||||
|
||||
parser = SettingsParser(description='OpenDroneMap',
|
||||
usage='%(prog)s [options] <project name>',
|
||||
yaml_file=open(context.settings_path))
|
||||
yaml_file=open(settings_yaml))
|
||||
|
||||
def config():
|
||||
parser.add_argument('--project-path',
|
||||
metavar='<path>',
|
||||
action=StoreValue,
|
||||
help='Path to the project folder')
|
||||
|
||||
parser.add_argument('name',
|
||||
metavar='<project name>',
|
||||
action=StoreValue,
|
||||
type=alphanumeric_string,
|
||||
default='code',
|
||||
nargs='?',
|
||||
|
@ -68,6 +87,7 @@ def config():
|
|||
|
||||
parser.add_argument('--resize-to',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=2048,
|
||||
type=int,
|
||||
help='Resizes images by the largest side for feature extraction purposes only. '
|
||||
|
@ -76,6 +96,7 @@ def config():
|
|||
|
||||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='odm_orthophoto',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
@ -84,11 +105,13 @@ def config():
|
|||
|
||||
rerun.add_argument('--rerun', '-r',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
rerun.add_argument('--rerun-all',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='force rerun of all tasks')
|
||||
|
||||
|
@ -108,6 +131,7 @@ def config():
|
|||
|
||||
parser.add_argument('--min-num-features',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=8000,
|
||||
type=int,
|
||||
help=('Minimum number of features to extract per image. '
|
||||
|
@ -115,17 +139,19 @@ def config():
|
|||
'execution. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--feature-type',
|
||||
metavar='<string>',
|
||||
default='sift',
|
||||
choices=['sift', 'hahog'],
|
||||
help=('Choose the algorithm for extracting keypoints and computing descriptors. '
|
||||
'Can be one of: [sift, hahog]. Default: '
|
||||
'%(default)s'))
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='sift',
|
||||
choices=['sift', 'hahog'],
|
||||
help=('Choose the algorithm for extracting keypoints and computing descriptors. '
|
||||
'Can be one of: [sift, hahog]. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--matcher-neighbors',
|
||||
type=int,
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=8,
|
||||
type=int,
|
||||
help='Number of nearest images to pre-match based on GPS '
|
||||
'exif data. Set to 0 to skip pre-matching. '
|
||||
'Neighbors works together with Distance parameter, '
|
||||
|
@ -136,6 +162,7 @@ def config():
|
|||
|
||||
parser.add_argument('--matcher-distance',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=0,
|
||||
type=int,
|
||||
help='Distance threshold in meters to find pre-matching '
|
||||
|
@ -144,13 +171,15 @@ def config():
|
|||
'pre-matching. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--use-fixed-camera-params',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Turn off camera parameter optimization during bundler')
|
||||
|
||||
parser.add_argument('--cameras',
|
||||
default='',
|
||||
metavar='<json>',
|
||||
action=StoreValue,
|
||||
type=path_or_json_string,
|
||||
help='Use the camera parameters computed from '
|
||||
'another dataset instead of calculating them. '
|
||||
|
@ -160,6 +189,7 @@ def config():
|
|||
|
||||
parser.add_argument('--camera-lens',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='auto',
|
||||
choices=['auto', 'perspective', 'brown', 'fisheye', 'spherical'],
|
||||
help=('Set a camera projection type. Manually setting a value '
|
||||
|
@ -170,6 +200,7 @@ def config():
|
|||
|
||||
parser.add_argument('--radiometric-calibration',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='none',
|
||||
choices=['none', 'camera', 'camera+sun'],
|
||||
help=('Set the radiometric calibration to perform on images. '
|
||||
|
@ -182,6 +213,7 @@ def config():
|
|||
|
||||
parser.add_argument('--max-concurrency',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=context.num_cores,
|
||||
type=int,
|
||||
help=('The maximum number of processes to use in various '
|
||||
|
@ -190,6 +222,7 @@ def config():
|
|||
|
||||
parser.add_argument('--depthmap-resolution',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=640,
|
||||
help=('Controls the density of the point cloud by setting the resolution of the depthmap images. Higher values take longer to compute '
|
||||
|
@ -198,6 +231,7 @@ def config():
|
|||
|
||||
parser.add_argument('--opensfm-depthmap-min-consistent-views',
|
||||
metavar='<integer: 2 <= x <= 9>',
|
||||
action=StoreValue,
|
||||
type=int,
|
||||
default=3,
|
||||
help=('Minimum number of views that should reconstruct a point for it to be valid. Use lower values '
|
||||
|
@ -207,6 +241,7 @@ def config():
|
|||
|
||||
parser.add_argument('--opensfm-depthmap-method',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='PATCH_MATCH',
|
||||
choices=['PATCH_MATCH', 'BRUTE_FORCE', 'PATCH_MATCH_SAMPLE'],
|
||||
help=('Raw depthmap computation algorithm. '
|
||||
|
@ -216,6 +251,7 @@ def config():
|
|||
|
||||
parser.add_argument('--opensfm-depthmap-min-patch-sd',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=1,
|
||||
help=('When using PATCH_MATCH or PATCH_MATCH_SAMPLE, controls the standard deviation threshold to include patches. '
|
||||
|
@ -223,13 +259,15 @@ def config():
|
|||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--use-hybrid-bundle-adjustment',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Run local bundle adjustment for every image added to the reconstruction and a global '
|
||||
'adjustment every 100 images. Speeds up reconstruction for very large datasets.')
|
||||
|
||||
parser.add_argument('--mve-confidence',
|
||||
metavar='<float: 0 <= x <= 1>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0.60,
|
||||
help=('Discard points that have less than a certain confidence threshold. '
|
||||
|
@ -238,22 +276,26 @@ def config():
|
|||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--use-3dmesh',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use a full 3D mesh to compute the orthophoto instead of a 2.5D mesh. This option is a bit faster and provides similar results in planar areas.')
|
||||
|
||||
parser.add_argument('--skip-3dmodel',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip generation of a full 3D model. This can save time if you only need 2D results such as orthophotos and DEMs.')
|
||||
|
||||
parser.add_argument('--use-opensfm-dense',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use opensfm to compute dense point cloud alternatively')
|
||||
|
||||
parser.add_argument('--ignore-gsd',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Ignore Ground Sampling Distance (GSD). GSD '
|
||||
'caps the maximum resolution of image outputs and '
|
||||
|
@ -262,6 +304,7 @@ def config():
|
|||
|
||||
parser.add_argument('--mesh-size',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=200000,
|
||||
type=int,
|
||||
help=('The maximum vertex count of the output mesh. '
|
||||
|
@ -269,6 +312,7 @@ def config():
|
|||
|
||||
parser.add_argument('--mesh-octree-depth',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=10,
|
||||
type=int,
|
||||
help=('Oct-tree depth used in the mesh reconstruction, '
|
||||
|
@ -277,6 +321,7 @@ def config():
|
|||
|
||||
parser.add_argument('--mesh-samples',
|
||||
metavar='<float >= 1.0>',
|
||||
action=StoreValue,
|
||||
default=1.0,
|
||||
type=float,
|
||||
help=('Number of points per octree node, recommended '
|
||||
|
@ -284,6 +329,7 @@ def config():
|
|||
|
||||
parser.add_argument('--mesh-point-weight',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
default=4,
|
||||
type=float,
|
||||
help=('This floating point value specifies the importance'
|
||||
|
@ -294,7 +340,8 @@ def config():
|
|||
'Default= %(default)s'))
|
||||
|
||||
parser.add_argument('--fast-orthophoto',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skips dense reconstruction and 3D model generation. '
|
||||
'It generates an orthophoto directly from the sparse reconstruction. '
|
||||
|
@ -302,6 +349,7 @@ def config():
|
|||
|
||||
parser.add_argument('--crop',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
default=3,
|
||||
type=float,
|
||||
help=('Automatically crop image outputs by creating a smooth buffer '
|
||||
|
@ -310,7 +358,8 @@ def config():
|
|||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--pc-classify',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Classify the point cloud outputs using a Simple Morphological Filter. '
|
||||
'You can control the behavior of this option by tweaking the --dem-* parameters. '
|
||||
|
@ -318,22 +367,26 @@ def config():
|
|||
'%(default)s')
|
||||
|
||||
parser.add_argument('--pc-csv',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in CSV format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-las',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in LAS format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-ept',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in Entwine Point Tile (EPT) format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-filter',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=2.5,
|
||||
help='Filters the point cloud by removing points that deviate more than N standard deviations from the local mean. Set to 0 to disable filtering.'
|
||||
|
@ -342,6 +395,7 @@ def config():
|
|||
|
||||
parser.add_argument('--pc-sample',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0,
|
||||
help='Filters the point cloud by keeping only a single point around a radius N (in meters). This can be useful to limit the output resolution of the point cloud. Set to 0 to disable sampling.'
|
||||
|
@ -350,6 +404,7 @@ def config():
|
|||
|
||||
parser.add_argument('--smrf-scalar',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=1.25,
|
||||
help='Simple Morphological Filter elevation scalar parameter. '
|
||||
|
@ -358,6 +413,7 @@ def config():
|
|||
|
||||
parser.add_argument('--smrf-slope',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0.15,
|
||||
help='Simple Morphological Filter slope parameter (rise over run). '
|
||||
|
@ -366,6 +422,7 @@ def config():
|
|||
|
||||
parser.add_argument('--smrf-threshold',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0.5,
|
||||
help='Simple Morphological Filter elevation threshold parameter (meters). '
|
||||
|
@ -374,6 +431,7 @@ def config():
|
|||
|
||||
parser.add_argument('--smrf-window',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=18.0,
|
||||
help='Simple Morphological Filter window radius parameter (meters). '
|
||||
|
@ -382,6 +440,7 @@ def config():
|
|||
|
||||
parser.add_argument('--texturing-data-term',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='gmi',
|
||||
choices=['gmi', 'area'],
|
||||
help=('Data term: [area, gmi]. Default: '
|
||||
|
@ -389,6 +448,7 @@ def config():
|
|||
|
||||
parser.add_argument('--texturing-nadir-weight',
|
||||
metavar='<integer: 0 <= x <= 32>',
|
||||
action=StoreValue,
|
||||
default=16,
|
||||
type=int,
|
||||
help=('Affects orthophotos only. '
|
||||
|
@ -399,6 +459,7 @@ def config():
|
|||
|
||||
parser.add_argument('--texturing-outlier-removal-type',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='gauss_clamping',
|
||||
choices=['none', 'gauss_clamping', 'gauss_damping'],
|
||||
help=('Type of photometric outlier removal method: '
|
||||
|
@ -406,36 +467,42 @@ def config():
|
|||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-skip-visibility-test',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Skip geometric visibility test. Default: '
|
||||
' %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-skip-global-seam-leveling',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Skip global seam leveling. Useful for IR data.'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-skip-local-seam-leveling',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip local seam blending. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--texturing-skip-hole-filling',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Skip filling of holes in the mesh. Default: '
|
||||
' %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-keep-unseen-faces',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Keep faces in the mesh that are not seen in any camera. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-tone-mapping',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
choices=['none', 'gamma'],
|
||||
default='none',
|
||||
help='Turn on gamma tone mapping or none for no tone '
|
||||
|
@ -444,6 +511,7 @@ def config():
|
|||
|
||||
parser.add_argument('--gcp',
|
||||
metavar='<path string>',
|
||||
action=StoreValue,
|
||||
default=None,
|
||||
help=('path to the file containing the ground control '
|
||||
'points used for georeferencing. Default: '
|
||||
|
@ -452,25 +520,29 @@ def config():
|
|||
'northing height pixelrow pixelcol imagename'))
|
||||
|
||||
parser.add_argument('--use-exif',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Use this tag if you have a gcp_list.txt but '
|
||||
'want to use the exif geotags instead'))
|
||||
|
||||
parser.add_argument('--dtm',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use this tag to build a DTM (Digital Terrain Model, ground only) using a simple '
|
||||
'morphological filter. Check the --dem* and --smrf* parameters for finer tuning.')
|
||||
|
||||
parser.add_argument('--dsm',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use this tag to build a DSM (Digital Surface Model, ground + objects) using a progressive '
|
||||
'morphological filter. Check the --dem* parameters for finer tuning.')
|
||||
|
||||
parser.add_argument('--dem-gapfill-steps',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=3,
|
||||
type=int,
|
||||
help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. '
|
||||
|
@ -481,6 +553,7 @@ def config():
|
|||
|
||||
parser.add_argument('--dem-resolution',
|
||||
metavar='<float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=5,
|
||||
help='DSM/DTM resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also.'
|
||||
|
@ -488,6 +561,7 @@ def config():
|
|||
|
||||
parser.add_argument('--dem-decimation',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=1,
|
||||
type=int,
|
||||
help='Decimate the points before generating the DEM. 1 is no decimation (full quality). '
|
||||
|
@ -495,7 +569,8 @@ def config():
|
|||
'generation.\nDefault=%(default)s')
|
||||
|
||||
parser.add_argument('--dem-euclidean-map',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Computes an euclidean raster map for each DEM. '
|
||||
'The map reports the distance from each cell to the nearest '
|
||||
|
@ -506,25 +581,29 @@ def config():
|
|||
|
||||
parser.add_argument('--orthophoto-resolution',
|
||||
metavar='<float > 0.0>',
|
||||
action=StoreValue,
|
||||
default=5,
|
||||
type=float,
|
||||
help=('Orthophoto resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also.\n'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--orthophoto-no-tiled',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Set this parameter if you want a stripped geoTIFF.\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-png',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Set this parameter if you want to generate a PNG rendering of the orthophoto.\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-compression',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
type=str,
|
||||
choices=['JPEG', 'LZW', 'PACKBITS', 'DEFLATE', 'LZMA', 'NONE'],
|
||||
default='DEFLATE',
|
||||
|
@ -533,7 +612,8 @@ def config():
|
|||
'are doing. Options: %(choices)s.\nDefault: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-cutline',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Generates a polygon around the cropping area '
|
||||
'that cuts the orthophoto around the edges of features. This polygon '
|
||||
|
@ -542,24 +622,28 @@ def config():
|
|||
'%(default)s')
|
||||
|
||||
parser.add_argument('--build-overviews',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Build orthophoto overviews using gdaladdo.')
|
||||
|
||||
parser.add_argument('--verbose', '-v',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Print additional messages to the console\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--time',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Generates a benchmark file with runtime info\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--debug',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Print debug messages\n'
|
||||
'Default: %(default)s')
|
||||
|
@ -571,6 +655,7 @@ def config():
|
|||
|
||||
parser.add_argument('--split',
|
||||
type=int,
|
||||
action=StoreValue,
|
||||
default=999999,
|
||||
metavar='<positive integer>',
|
||||
help='Average number of images per submodel. When '
|
||||
|
@ -581,6 +666,7 @@ def config():
|
|||
|
||||
parser.add_argument('--split-overlap',
|
||||
type=float,
|
||||
action=StoreValue,
|
||||
metavar='<positive integer>',
|
||||
default=150,
|
||||
help='Radius of the overlap between submodels. '
|
||||
|
@ -591,6 +677,7 @@ def config():
|
|||
|
||||
parser.add_argument('--sm-cluster',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
type=url_string,
|
||||
default=None,
|
||||
help='URL to a ClusterODM instance '
|
||||
|
@ -600,6 +687,7 @@ def config():
|
|||
|
||||
parser.add_argument('--merge',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='all',
|
||||
choices=['all', 'pointcloud', 'orthophoto', 'dem'],
|
||||
help=('Choose what to merge in the merge step in a split dataset. '
|
||||
|
@ -608,20 +696,22 @@ def config():
|
|||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--force-gps',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Use images\' GPS exif data for reconstruction, even if there are GCPs present.'
|
||||
'This flag is useful if you have high precision GPS measurements. '
|
||||
'If there are no GCPs, this flag does nothing. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--pc-rectify',
|
||||
action='store_true',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Perform ground rectification on the point cloud. This means that wrongly classified ground '
|
||||
'points will be re-classified and gaps will be filled. Useful for generating DTMs. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
args = parser.parse_args()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
# check that the project path setting has been set properly
|
||||
if not args.project_path:
|
||||
|
@ -662,5 +752,4 @@ def config():
|
|||
# log.ODM_WARNING("radiometric-calibration is turned on, automatically setting --texturing-skip-global-seam-leveling")
|
||||
# args.texturing_skip_global_seam_leveling = True
|
||||
|
||||
|
||||
return args
|
||||
|
|
121
opendm/osfm.py
121
opendm/osfm.py
|
@ -277,9 +277,9 @@ class OSFMContext:
|
|||
def name(self):
|
||||
return os.path.basename(os.path.abspath(self.path("..")))
|
||||
|
||||
def get_submodel_argv(project_name = None, submodels_path = None, submodel_name = None):
|
||||
def get_submodel_argv(args, submodels_path = None, submodel_name = None):
|
||||
"""
|
||||
Gets argv for a submodel starting from the argv passed to the application startup.
|
||||
Gets argv for a submodel starting from the args passed to the application startup.
|
||||
Additionally, if project_name, submodels_path and submodel_name are passed, the function
|
||||
handles the <project name> value and --project-path detection / override.
|
||||
When all arguments are set to None, --project-path and project name are always removed.
|
||||
|
@ -295,82 +295,73 @@ def get_submodel_argv(project_name = None, submodels_path = None, submodel_name
|
|||
removing --gcp (the GCP path if specified is always "gcp_list.txt")
|
||||
reading the contents of --cameras
|
||||
"""
|
||||
assure_always = ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel']
|
||||
remove_always_2 = ['--split', '--split-overlap', '--rerun-from', '--rerun', '--gcp', '--end-with', '--sm-cluster']
|
||||
remove_always_1 = ['--rerun-all', '--pc-csv', '--pc-las', '--pc-ept']
|
||||
read_json_always = ['--cameras']
|
||||
assure_always = ['orthophoto_cutline', 'dem_euclidean_map', 'skip_3dmodel']
|
||||
remove_always = ['split', 'split_overlap', 'rerun_from', 'rerun', 'gcp', 'end_with', 'sm_cluster', 'rerun_all', 'pc_csv', 'pc_las', 'pc_ept']
|
||||
read_json_always = ['cameras']
|
||||
|
||||
argv = sys.argv
|
||||
result = [argv[0]] # Startup script (/path/to/run.py)
|
||||
|
||||
result = [argv[0]]
|
||||
i = 1
|
||||
found_args = {}
|
||||
args_dict = vars(args).copy()
|
||||
set_keys = [k[:-len("_is_set")] for k in args_dict.keys() if k.endswith("_is_set")]
|
||||
|
||||
while i < len(argv):
|
||||
arg = argv[i]
|
||||
|
||||
if i == 1 and project_name and submodel_name and arg == project_name:
|
||||
i += 1
|
||||
continue
|
||||
elif i == len(argv) - 1:
|
||||
# Project name?
|
||||
if project_name and submodel_name and arg == project_name:
|
||||
result.append(submodel_name)
|
||||
found_args['project_name'] = True
|
||||
i += 1
|
||||
continue
|
||||
|
||||
if arg == '--project-path':
|
||||
if submodels_path:
|
||||
result.append(arg)
|
||||
result.append(submodels_path)
|
||||
found_args[arg] = True
|
||||
i += 2
|
||||
elif arg in assure_always:
|
||||
result.append(arg)
|
||||
found_args[arg] = True
|
||||
i += 1
|
||||
elif arg == '--crop':
|
||||
result.append(arg)
|
||||
crop_value = float(argv[i + 1])
|
||||
if crop_value == 0:
|
||||
crop_value = 0.015625
|
||||
result.append(str(crop_value))
|
||||
found_args[arg] = True
|
||||
i += 2
|
||||
elif arg in read_json_always:
|
||||
# Handle project name and project path (special case)
|
||||
if "name" in set_keys:
|
||||
del args_dict["name"]
|
||||
set_keys.remove("name")
|
||||
|
||||
if "project_path" in set_keys:
|
||||
del args_dict["project_path"]
|
||||
set_keys.remove("project_path")
|
||||
|
||||
# Remove parameters
|
||||
set_keys = [k for k in set_keys if k not in remove_always]
|
||||
|
||||
# Assure parameters
|
||||
for k in assure_always:
|
||||
if not k in set_keys:
|
||||
set_keys.append(k)
|
||||
args_dict[k] = True
|
||||
|
||||
# Read JSON always
|
||||
for k in read_json_always:
|
||||
if k in set_keys:
|
||||
try:
|
||||
jsond = io.path_or_json_string_to_dict(argv[i + 1])
|
||||
result.append(arg)
|
||||
result.append(json.dumps(jsond))
|
||||
found_args[arg] = True
|
||||
if isinstance(args_dict[k], str):
|
||||
args_dict[k] = io.path_or_json_string_to_dict(args_dict[k])
|
||||
if isinstance(args_dict[k], dict):
|
||||
args_dict[k] = json.dumps(args_dict[k])
|
||||
except ValueError as e:
|
||||
log.ODM_WARNING("Cannot parse/read JSON: {}".format(str(e)))
|
||||
finally:
|
||||
i += 2
|
||||
elif arg in remove_always_2:
|
||||
i += 2
|
||||
elif arg in remove_always_1:
|
||||
i += 1
|
||||
else:
|
||||
result.append(arg)
|
||||
i += 1
|
||||
|
||||
# Handle crop (cannot be zero for split/merge)
|
||||
if "crop" in set_keys:
|
||||
crop_value = float(args_dict["crop"])
|
||||
if crop_value == 0:
|
||||
crop_value = 0.015625
|
||||
args_dict["crop"] = crop_value
|
||||
|
||||
# Populate result
|
||||
for k in set_keys:
|
||||
result.append("--%s" % k.replace("_", "-"))
|
||||
|
||||
# No second value for booleans
|
||||
if isinstance(args_dict[k], bool) and args_dict[k] == True:
|
||||
continue
|
||||
|
||||
result.append(str(args_dict[k]))
|
||||
|
||||
if not found_args.get('--project-path') and submodels_path:
|
||||
result.append('--project-path')
|
||||
if submodels_path:
|
||||
result.append("--project-path")
|
||||
result.append(submodels_path)
|
||||
|
||||
for arg in assure_always:
|
||||
if not found_args.get(arg):
|
||||
result.append(arg)
|
||||
|
||||
if not found_args.get('project_name') and submodel_name:
|
||||
|
||||
if submodel_name:
|
||||
result.append(submodel_name)
|
||||
|
||||
return result
|
||||
|
||||
def get_submodel_args_dict():
|
||||
submodel_argv = get_submodel_argv()
|
||||
def get_submodel_args_dict(args):
|
||||
submodel_argv = get_submodel_argv(args)
|
||||
result = {}
|
||||
|
||||
i = 0
|
||||
|
|
|
@ -199,7 +199,7 @@ class ODM_Photo:
|
|||
# ], float)
|
||||
|
||||
self.width, self.height = get_image_size.get_image_size(_path_file)
|
||||
|
||||
|
||||
# Sanitize band name since we use it in folder paths
|
||||
self.band_name = re.sub('[^A-Za-z0-9]+', '', self.band_name)
|
||||
|
||||
|
@ -286,7 +286,7 @@ class ODM_Photo:
|
|||
return " ".join(map(str, tag.values))
|
||||
|
||||
def get_radiometric_calibration(self):
|
||||
if self.radiometric_calibration:
|
||||
if isinstance(self.radiometric_calibration, str):
|
||||
parts = self.radiometric_calibration.split(" ")
|
||||
if len(parts) == 3:
|
||||
return list(map(float, parts))
|
||||
|
|
|
@ -8,6 +8,7 @@ import zipfile
|
|||
import glob
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import config
|
||||
from pyodm import Node, exceptions
|
||||
from pyodm.utils import AtomicCounter
|
||||
from pyodm.types import TaskStatus
|
||||
|
@ -354,7 +355,7 @@ class Task:
|
|||
|
||||
# Upload task
|
||||
task = self.node.create_task(images,
|
||||
get_submodel_args_dict(),
|
||||
get_submodel_args_dict(config.config()),
|
||||
progress_callback=print_progress,
|
||||
skip_post_processing=True,
|
||||
outputs=outputs)
|
||||
|
@ -470,8 +471,7 @@ class ToolchainTask(Task):
|
|||
log.ODM_INFO("=============================")
|
||||
|
||||
submodels_path = os.path.abspath(self.path(".."))
|
||||
project_name = os.path.basename(os.path.abspath(os.path.join(submodels_path, "..")))
|
||||
argv = get_submodel_argv(project_name, submodels_path, submodel_name)
|
||||
argv = get_submodel_argv(config.config(), submodels_path, submodel_name)
|
||||
|
||||
# Re-run the ODM toolchain on the submodel
|
||||
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
|
||||
|
|
6
run.py
6
run.py
|
@ -14,12 +14,16 @@ from stages.odm_app import ODMApp
|
|||
if __name__ == '__main__':
|
||||
args = config.config()
|
||||
|
||||
log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now())
|
||||
log.ODM_INFO('Initializing ODM - %s' % system.now())
|
||||
|
||||
# Print args
|
||||
args_dict = vars(args)
|
||||
log.ODM_INFO('==============')
|
||||
for k in sorted(args_dict.keys()):
|
||||
# Skip _is_set keys
|
||||
if k.endswith("_is_set"):
|
||||
continue
|
||||
|
||||
# Don't leak token
|
||||
if k == 'sm_cluster' and args_dict[k] is not None:
|
||||
log.ODM_INFO('%s: True' % k)
|
||||
|
|
|
@ -9,6 +9,7 @@ from opendm import system
|
|||
from shutil import copyfile
|
||||
from opendm import progress
|
||||
|
||||
|
||||
def save_images_database(photos, database_file):
|
||||
with open(database_file, 'w') as f:
|
||||
f.write(json.dumps(map(lambda p: p.__dict__, photos)))
|
||||
|
@ -38,7 +39,6 @@ def load_images_database(database_file):
|
|||
|
||||
class ODMLoadDatasetStage(types.ODM_Stage):
|
||||
def process(self, args, outputs):
|
||||
# Load tree
|
||||
tree = types.ODM_Tree(args.project_path, args.gcp)
|
||||
outputs['tree'] = tree
|
||||
|
||||
|
|
|
@ -61,9 +61,6 @@ class ODMOpenSfMStage(types.ODM_Stage):
|
|||
if args.radiometric_calibration == "none":
|
||||
octx.convert_and_undistort(self.rerun())
|
||||
else:
|
||||
|
||||
# TODO: does this work for RGB images?
|
||||
|
||||
def radiometric_calibrate(shot_id, image):
|
||||
photo = reconstruction.get_photo(shot_id)
|
||||
return multispectral.dn_to_reflectance(photo, image, use_sun_sensor=args.radiometric_calibration=="camera+sun")
|
||||
|
|
|
@ -144,7 +144,7 @@ class ODMSplitStage(types.ODM_Stage):
|
|||
log.ODM_INFO("Processing %s" % sp_octx.name())
|
||||
log.ODM_INFO("========================")
|
||||
|
||||
argv = get_submodel_argv(args.name, tree.submodels_path, sp_octx.name())
|
||||
argv = get_submodel_argv(tree.submodels_path, sp_octx.name())
|
||||
|
||||
# Re-run the ODM toolchain on the submodel
|
||||
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"test": "1"
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
project_path: '/test'
|
|
@ -0,0 +1,72 @@
|
|||
import unittest
|
||||
import os
|
||||
from opendm.osfm import get_submodel_argv, get_submodel_args_dict
|
||||
from opendm import config
|
||||
|
||||
class TestOSFM(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def test_get_submodel_argv(self):
|
||||
# Base
|
||||
args = config.config(["--project-path", "/datasets"])
|
||||
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# Base + project name
|
||||
args = config.config(["--project-path", "/datasets", "brighton"])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# Project name + base
|
||||
args = config.config(["brighton", "--project-path", "/datasets"])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# Crop
|
||||
args = config.config(["brighton", "--project-path", "/datasets", "--crop", "0"])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--crop', '0.015625', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--crop', '0.015625', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# Using settings.yaml with project-path
|
||||
args = config.config(["brighton"], os.path.join(os.path.dirname(os.path.realpath(__file__)), "assets", "settings.yaml"))
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# With sm-cluster, pc-csv and others
|
||||
args = config.config(["--project-path", "/datasets", "--split", "200", "--pc-csv"])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:],
|
||||
['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])
|
||||
|
||||
# Cameras JSON
|
||||
args = config.config(["--project-path", "/datasets", "--cameras", os.path.join(os.path.dirname(os.path.realpath(__file__)), "assets", "sample.json")])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--cameras', '{"test": "1"}', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
|
||||
# Camera JSON string
|
||||
args = config.config(["--project-path", "/datasets", "--cameras", '{"test": "1"}'])
|
||||
self.assertEqual(get_submodel_argv(args)[1:],
|
||||
['--cameras', '{"test": "1"}', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
|
||||
|
||||
def test_get_submodel_argv_dict(self):
|
||||
# Base
|
||||
args = config.config(["--project-path", "/datasets"])
|
||||
|
||||
self.assertEqual(get_submodel_args_dict(args),
|
||||
{'orthophoto-cutline': True, 'skip-3dmodel': True, 'dem-euclidean-map': True})
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Ładowanie…
Reference in New Issue