2015-12-02 14:24:38 +00:00
|
|
|
import cv2
|
2018-09-13 20:56:49 +00:00
|
|
|
import exifread
|
2015-12-11 21:26:04 +00:00
|
|
|
import re
|
|
|
|
from fractions import Fraction
|
2016-03-04 19:35:35 +00:00
|
|
|
from opensfm.exif import sensor_string
|
2018-01-26 19:38:26 +00:00
|
|
|
from pyproj import Proj
|
2015-11-17 17:17:56 +00:00
|
|
|
|
|
|
|
import log
|
2015-11-26 12:15:02 +00:00
|
|
|
import io
|
2015-11-18 16:39:38 +00:00
|
|
|
import system
|
2015-12-11 21:26:04 +00:00
|
|
|
import context
|
2018-09-13 20:56:49 +00:00
|
|
|
import logging
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2015-12-10 17:17:39 +00:00
|
|
|
class ODM_Photo:
|
2015-11-17 17:17:56 +00:00
|
|
|
""" ODMPhoto - a class for ODMPhotos
|
|
|
|
"""
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2015-12-10 12:35:52 +00:00
|
|
|
def __init__(self, path_file, force_focal, force_ccd):
|
2015-11-17 17:17:56 +00:00
|
|
|
# general purpose
|
2015-11-23 15:43:29 +00:00
|
|
|
self.path_file = path_file
|
2015-11-27 16:48:15 +00:00
|
|
|
self.filename = io.extract_file_from_path_file(path_file)
|
2015-11-20 10:00:43 +00:00
|
|
|
# useful attibutes
|
2015-11-19 12:01:15 +00:00
|
|
|
self.width = None
|
|
|
|
self.height = None
|
2015-11-20 10:00:43 +00:00
|
|
|
self.ccd_width = None
|
|
|
|
self.focal_length = None
|
|
|
|
self.focal_length_px = None
|
2015-11-19 12:01:15 +00:00
|
|
|
# other attributes
|
2016-07-27 14:27:34 +00:00
|
|
|
self.camera_make = ''
|
|
|
|
self.camera_model = ''
|
|
|
|
self.make_model = ''
|
2017-06-23 15:49:24 +00:00
|
|
|
self.latitude = None
|
|
|
|
self.longitude = None
|
|
|
|
self.altitude = None
|
2015-11-27 10:00:08 +00:00
|
|
|
# parse values from metadata
|
2018-09-13 20:56:49 +00:00
|
|
|
self.parse_exif_values(self.path_file, force_focal, force_ccd)
|
2016-02-26 18:50:12 +00:00
|
|
|
# compute focal length into pixels
|
2015-11-27 16:48:15 +00:00
|
|
|
self.update_focal()
|
2015-11-17 17:17:56 +00:00
|
|
|
|
2015-11-27 16:48:15 +00:00
|
|
|
# print log message
|
2018-11-24 17:56:17 +00:00
|
|
|
log.ODM_DEBUG('Loaded {}'.format(self))
|
|
|
|
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return '{} | camera: {} | dimensions: {} x {} | focal: {} | ccd: {} | lat: {} | lon: {} | alt: {}'.format(
|
|
|
|
self.filename, self.make_model, self.width, self.height, self.focal_length,
|
|
|
|
self.ccd_width, self.latitude, self.longitude, self.altitude)
|
2015-11-27 10:00:08 +00:00
|
|
|
|
2015-11-27 16:48:15 +00:00
|
|
|
def update_focal(self):
|
2015-11-27 10:00:08 +00:00
|
|
|
# compute focal length in pixels
|
|
|
|
if self.focal_length and self.ccd_width:
|
|
|
|
# take width or height as reference
|
2015-11-17 17:17:56 +00:00
|
|
|
if self.width > self.height:
|
2015-11-27 10:00:08 +00:00
|
|
|
# f(px) = w(px) * f(mm) / ccd(mm)
|
2015-11-17 17:17:56 +00:00
|
|
|
self.focal_length_px = \
|
|
|
|
self.width * (self.focal_length / self.ccd_width)
|
|
|
|
else:
|
2015-11-27 10:00:08 +00:00
|
|
|
# f(px) = h(px) * f(mm) / ccd(mm)
|
2015-11-17 17:17:56 +00:00
|
|
|
self.focal_length_px = \
|
|
|
|
self.height * (self.focal_length / self.ccd_width)
|
2015-11-23 15:43:29 +00:00
|
|
|
|
2018-09-13 20:56:49 +00:00
|
|
|
def parse_exif_values(self, _path_file, _force_focal, _force_ccd):
|
|
|
|
# Disable exifread log
|
|
|
|
logging.getLogger('exifread').setLevel(logging.CRITICAL)
|
|
|
|
|
|
|
|
with open(_path_file, 'rb') as f:
|
|
|
|
tags = exifread.process_file(f, details=False)
|
|
|
|
|
2015-12-04 14:12:40 +00:00
|
|
|
try:
|
2018-09-13 20:56:49 +00:00
|
|
|
if 'Image Make' in tags:
|
|
|
|
self.camera_make = tags['Image Make'].values.encode('utf8')
|
|
|
|
if 'Image Model' in tags:
|
|
|
|
self.camera_model = tags['Image Model'].values.encode('utf8')
|
|
|
|
if 'EXIF FocalLength' in tags:
|
|
|
|
self.focal_length = self.float_values(tags['EXIF FocalLength'])[0]
|
|
|
|
if 'GPS GPSAltitude' in tags:
|
|
|
|
self.altitude = self.float_values(tags['GPS GPSAltitude'])[0]
|
|
|
|
if 'GPS GPSAltitudeRef' in tags and self.int_values(tags['GPS GPSAltitudeRef'])[0] > 0:
|
|
|
|
self.altitude *= -1
|
|
|
|
if 'GPS GPSLatitude' in tags and 'GPS GPSLatitudeRef' in tags:
|
|
|
|
self.latitude = self.dms_to_decimal(tags['GPS GPSLatitude'], tags['GPS GPSLatitudeRef'])
|
|
|
|
if 'GPS GPSLongitude' in tags and 'GPS GPSLongitudeRef' in tags:
|
|
|
|
self.longitude = self.dms_to_decimal(tags['GPS GPSLongitude'], tags['GPS GPSLongitudeRef'])
|
|
|
|
except IndexError as e:
|
|
|
|
log.ODM_WARNING("Cannot read EXIF tags for %s: %s" % (_path_file, e.message))
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2016-09-15 13:15:50 +00:00
|
|
|
if self.camera_make and self.camera_model:
|
|
|
|
self.make_model = sensor_string(self.camera_make, self.camera_model)
|
2016-03-04 19:35:35 +00:00
|
|
|
|
2015-12-02 14:24:38 +00:00
|
|
|
# needed to do that since sometimes metadata contains wrong data
|
|
|
|
img = cv2.imread(_path_file)
|
2015-12-04 14:12:40 +00:00
|
|
|
self.width = img.shape[1]
|
|
|
|
self.height = img.shape[0]
|
2015-12-02 14:24:38 +00:00
|
|
|
|
2015-12-04 14:12:40 +00:00
|
|
|
# force focal and ccd_width with user parameter
|
2016-03-04 19:35:35 +00:00
|
|
|
if _force_focal:
|
|
|
|
self.focal_length = _force_focal
|
|
|
|
if _force_ccd:
|
|
|
|
self.ccd_width = _force_ccd
|
2015-11-26 12:15:02 +00:00
|
|
|
|
2015-11-23 15:43:29 +00:00
|
|
|
# find ccd_width from file if needed
|
2015-11-26 12:15:02 +00:00
|
|
|
if self.ccd_width is None and self.camera_model is not None:
|
2015-11-23 15:43:29 +00:00
|
|
|
# load ccd_widths from file
|
|
|
|
ccd_widths = system.get_ccd_widths()
|
2015-11-26 12:15:02 +00:00
|
|
|
# search ccd by camera model
|
2016-03-04 19:35:35 +00:00
|
|
|
key = [x for x in ccd_widths.keys() if self.make_model in x]
|
2015-11-26 12:15:02 +00:00
|
|
|
# convert to float if found
|
2016-02-26 18:50:12 +00:00
|
|
|
if key:
|
|
|
|
self.ccd_width = float(ccd_widths[key[0]])
|
2016-03-04 19:35:35 +00:00
|
|
|
else:
|
|
|
|
log.ODM_WARNING('Could not find ccd_width in file. Use --force-ccd or edit the sensor_data.json '
|
|
|
|
'file to manually input ccd width')
|
2015-11-27 10:00:08 +00:00
|
|
|
|
2018-09-13 20:56:49 +00:00
|
|
|
def dms_to_decimal(self, dms, sign):
|
2017-06-23 15:49:24 +00:00
|
|
|
"""Converts dms coords to decimal degrees"""
|
2018-09-13 20:56:49 +00:00
|
|
|
degrees, minutes, seconds = self.float_values(dms)
|
|
|
|
|
|
|
|
return (-1 if sign.values[0] in 'SWsw' else 1) * (
|
|
|
|
degrees +
|
|
|
|
minutes / 60 +
|
|
|
|
seconds / 3600
|
2017-06-23 15:49:24 +00:00
|
|
|
)
|
|
|
|
|
2018-09-13 20:56:49 +00:00
|
|
|
def float_values(self, tag):
|
|
|
|
return map(lambda v: float(v.num) / float(v.den), tag.values)
|
|
|
|
|
|
|
|
def int_values(self, tag):
|
|
|
|
return map(int, tag.values)
|
2015-11-27 10:00:08 +00:00
|
|
|
|
|
|
|
# TODO: finish this class
|
2015-12-10 17:17:39 +00:00
|
|
|
class ODM_Reconstruction(object):
|
|
|
|
"""docstring for ODMReconstruction"""
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def __init__(self, photos, projstring = None, coords_file = None):
|
|
|
|
self.photos = photos # list of ODM_Photos
|
|
|
|
self.projection = None # Projection system the whole project will be in
|
2018-04-19 02:03:54 +00:00
|
|
|
self.georef = None
|
2018-01-26 19:38:26 +00:00
|
|
|
if projstring:
|
|
|
|
self.projection = self.set_projection(projstring)
|
|
|
|
self.georef = ODM_GeoRef(self.projection)
|
|
|
|
else:
|
|
|
|
self.projection = self.parse_coordinate_system(coords_file)
|
2018-04-19 02:03:54 +00:00
|
|
|
if self.projection:
|
|
|
|
self.georef = ODM_GeoRef(self.projection)
|
2018-01-26 19:38:26 +00:00
|
|
|
|
|
|
|
def parse_coordinate_system(self, _file):
|
|
|
|
"""Write attributes to jobOptions from coord file"""
|
|
|
|
# check for coordinate file existence
|
|
|
|
if not io.file_exists(_file):
|
2018-04-19 02:03:54 +00:00
|
|
|
log.ODM_WARNING('Could not find file %s' % _file)
|
2018-01-26 19:38:26 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
with open(_file) as f:
|
|
|
|
# extract reference system and utm zone from first line.
|
|
|
|
# We will assume the following format:
|
|
|
|
# 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n'
|
|
|
|
line = f.readline().rstrip()
|
|
|
|
log.ODM_DEBUG('Line: %s' % line)
|
|
|
|
ref = line.split(' ')
|
|
|
|
# match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I)
|
2018-05-23 13:43:40 +00:00
|
|
|
try:
|
|
|
|
if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm:
|
|
|
|
datum = ref[0]
|
|
|
|
utm_pole = ref[2][len(ref[2]) - 1]
|
|
|
|
utm_zone = int(ref[2][:len(ref[2]) - 1])
|
|
|
|
|
|
|
|
return Proj(proj="utm", zone=utm_zone, datum=datum, no_defs=True)
|
|
|
|
elif '+proj' in line:
|
|
|
|
return Proj(line.strip('\''))
|
|
|
|
elif 'epsg' in line.lower():
|
|
|
|
return Proj(init=line)
|
|
|
|
else:
|
|
|
|
log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line)
|
|
|
|
except RuntimeError as e:
|
|
|
|
log.ODM_ERROR('Uh oh! There seems to be a problem with your GCP file.\n\n'
|
|
|
|
'The line: %s\n\n'
|
|
|
|
'Is not valid. Projections that are valid include:\n'
|
|
|
|
' - EPSG:*****\n'
|
|
|
|
' - WGS84 UTM **(N|S)\n'
|
|
|
|
' - Any valid proj4 string (for example, +proj=utm +zone=32 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs)\n\n'
|
|
|
|
'Modify your GCP file and try again.' % line)
|
|
|
|
raise RuntimeError(e)
|
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
|
|
|
|
def set_projection(self, projstring):
|
|
|
|
try:
|
|
|
|
return Proj(projstring)
|
|
|
|
except RuntimeError:
|
|
|
|
log.ODM_EXCEPTION('Could not set projection. Please use a proj4 string')
|
2015-12-10 17:17:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ODM_GCPoint(object):
|
|
|
|
"""docstring for ODMPoint"""
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2015-12-10 17:17:39 +00:00
|
|
|
def __init__(self, x, y, z):
|
|
|
|
self.x = x
|
|
|
|
self.y = y
|
|
|
|
self.z = z
|
|
|
|
|
|
|
|
|
|
|
|
class ODM_GeoRef(object):
|
|
|
|
"""docstring for ODMUtmZone"""
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def __init__(self, projection):
|
|
|
|
self.projection = projection
|
2015-12-10 17:17:39 +00:00
|
|
|
self.datum = 'WGS84'
|
2015-12-11 21:26:04 +00:00
|
|
|
self.epsg = None
|
2015-12-10 17:17:39 +00:00
|
|
|
self.utm_zone = 0
|
|
|
|
self.utm_pole = 'N'
|
|
|
|
self.utm_east_offset = 0
|
|
|
|
self.utm_north_offset = 0
|
2018-01-26 19:38:26 +00:00
|
|
|
self.transform = []
|
2015-12-10 17:17:39 +00:00
|
|
|
self.gcps = []
|
|
|
|
|
|
|
|
def calculate_EPSG(self, _utm_zone, _pole):
|
|
|
|
"""Calculate and return the EPSG"""
|
|
|
|
if _pole == 'S':
|
|
|
|
return 32700 + _utm_zone
|
|
|
|
elif _pole == 'N':
|
|
|
|
return 32600 + _utm_zone
|
|
|
|
else:
|
2015-12-11 21:26:04 +00:00
|
|
|
log.ODM_ERROR('Unknown pole format %s' % _pole)
|
2015-12-10 17:17:39 +00:00
|
|
|
return
|
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def calculate_EPSG(self, proj):
|
|
|
|
return proj
|
|
|
|
|
2016-04-05 20:10:02 +00:00
|
|
|
def coord_to_fractions(self, coord, refs):
|
|
|
|
deg_dec = abs(float(coord))
|
|
|
|
deg = int(deg_dec)
|
2017-04-06 17:59:26 +00:00
|
|
|
minute_dec = (deg_dec - deg) * 60
|
2016-04-05 20:10:02 +00:00
|
|
|
minute = int(minute_dec)
|
|
|
|
|
2017-04-06 17:59:26 +00:00
|
|
|
sec_dec = (minute_dec - minute) * 60
|
|
|
|
sec_dec = round(sec_dec, 3)
|
2016-04-05 20:10:02 +00:00
|
|
|
sec_denominator = 1000
|
2017-04-06 17:59:26 +00:00
|
|
|
sec_numerator = int(sec_dec * sec_denominator)
|
2016-04-05 20:10:02 +00:00
|
|
|
if float(coord) >= 0:
|
|
|
|
latRef = refs[0]
|
|
|
|
else:
|
|
|
|
latRef = refs[1]
|
|
|
|
|
|
|
|
output = str(deg) + '/1 ' + str(minute) + '/1 ' + str(sec_numerator) + '/' + str(sec_denominator)
|
|
|
|
return output, latRef
|
|
|
|
|
2017-04-06 17:59:26 +00:00
|
|
|
def convert_to_las(self, _file, _file_out, json_file):
|
2015-12-11 21:26:04 +00:00
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
if not self.projection.srs:
|
|
|
|
log.ODM_ERROR('Empty CRS: Could not convert to LAS')
|
2015-12-11 21:26:04 +00:00
|
|
|
return
|
|
|
|
|
2016-02-26 18:50:12 +00:00
|
|
|
kwargs = {'bin': context.pdal_path,
|
|
|
|
'f_in': _file,
|
2017-04-06 13:06:09 +00:00
|
|
|
'f_out': _file_out,
|
2018-02-05 17:46:27 +00:00
|
|
|
'east': self.utm_east_offset,
|
2016-02-26 18:50:12 +00:00
|
|
|
'north': self.utm_north_offset,
|
2018-01-26 19:38:26 +00:00
|
|
|
'srs': self.projection.srs,
|
2017-04-06 17:59:26 +00:00
|
|
|
'json': json_file}
|
2015-12-11 21:26:04 +00:00
|
|
|
|
2018-06-18 15:27:33 +00:00
|
|
|
# create pipeline file las.json to write odm_georeferenced_model.laz point cloud
|
2017-04-06 17:59:26 +00:00
|
|
|
pipeline = '{{' \
|
|
|
|
' "pipeline":[' \
|
|
|
|
' "untransformed.ply",' \
|
|
|
|
' {{' \
|
2018-06-18 15:27:33 +00:00
|
|
|
' "type":"writers.las",' \
|
2018-01-26 19:38:26 +00:00
|
|
|
' "a_srs":"{srs}",' \
|
2017-05-08 14:50:16 +00:00
|
|
|
' "offset_x":"{east}",' \
|
|
|
|
' "offset_y":"{north}",' \
|
|
|
|
' "offset_z":"0",' \
|
2018-06-18 15:27:33 +00:00
|
|
|
' "compression":"laszip",' \
|
|
|
|
' "filename":"{f_out}"' \
|
2017-04-06 17:59:26 +00:00
|
|
|
' }}' \
|
|
|
|
' ]' \
|
|
|
|
'}}'.format(**kwargs)
|
|
|
|
|
|
|
|
with open(json_file, 'w') as f:
|
|
|
|
f.write(pipeline)
|
2016-02-25 16:16:42 +00:00
|
|
|
|
2018-06-27 18:32:49 +00:00
|
|
|
# call pdal
|
2018-06-18 15:27:33 +00:00
|
|
|
system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in}'.format(**kwargs))
|
2015-12-11 21:26:04 +00:00
|
|
|
|
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def extract_offsets(self, _file):
|
2015-12-10 17:17:39 +00:00
|
|
|
if not io.file_exists(_file):
|
2016-02-23 17:47:43 +00:00
|
|
|
log.ODM_ERROR('Could not find file %s' % _file)
|
2015-12-10 17:17:39 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
with open(_file) as f:
|
2018-01-26 19:38:26 +00:00
|
|
|
offsets = f.readlines()[1].split(' ')
|
2018-04-25 14:00:56 +00:00
|
|
|
self.utm_east_offset = float(offsets[0])
|
|
|
|
self.utm_north_offset = float(offsets[1])
|
2016-04-05 20:10:02 +00:00
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def create_gcps(self, _file):
|
|
|
|
if not io.file_exists(_file):
|
|
|
|
log.ODM_ERROR('Could not find file %s' % _file)
|
|
|
|
return
|
|
|
|
|
|
|
|
with open(_file) as f:
|
2015-12-10 17:17:39 +00:00
|
|
|
# parse coordinates
|
2018-01-26 19:38:26 +00:00
|
|
|
lines = f.readlines()[2:]
|
2015-12-10 17:17:39 +00:00
|
|
|
for l in lines:
|
2016-04-05 20:10:02 +00:00
|
|
|
xyz = l.split(' ')
|
|
|
|
if len(xyz) == 3:
|
|
|
|
x, y, z = xyz[:3]
|
|
|
|
elif len(xyz) == 2:
|
|
|
|
x, y = xyz[:2]
|
|
|
|
z = 0
|
2015-12-10 17:17:39 +00:00
|
|
|
self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
|
2017-04-06 17:59:26 +00:00
|
|
|
# Write to json file
|
2015-12-10 17:17:39 +00:00
|
|
|
|
2018-01-26 19:38:26 +00:00
|
|
|
def parse_transformation_matrix(self, _file):
|
|
|
|
if not io.file_exists(_file):
|
|
|
|
log.ODM_ERROR('Could not find file %s' % _file)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Create a nested list for the transformation matrix
|
|
|
|
with open(_file) as f:
|
|
|
|
for line in f:
|
2018-06-27 18:32:49 +00:00
|
|
|
# Handle matrix formats that either
|
2018-04-25 14:00:56 +00:00
|
|
|
# have leading or trailing brakets or just plain numbers.
|
|
|
|
line = re.sub(r"[\[\],]", "", line).strip()
|
2018-01-26 19:38:26 +00:00
|
|
|
self.transform += [[float(i) for i in line.split()]]
|
|
|
|
|
|
|
|
self.utm_east_offset = self.transform[0][3]
|
|
|
|
self.utm_north_offset = self.transform[1][3]
|
|
|
|
|
2016-02-25 20:02:48 +00:00
|
|
|
|
2015-12-10 17:17:39 +00:00
|
|
|
class ODM_Tree(object):
|
2018-02-01 16:10:32 +00:00
|
|
|
def __init__(self, root_path, images_path, gcp_file = None):
|
2016-02-26 18:50:12 +00:00
|
|
|
# root path to the project
|
2015-12-02 14:24:38 +00:00
|
|
|
self.root_path = io.absolute_path_file(root_path)
|
2016-12-09 14:51:25 +00:00
|
|
|
if not images_path:
|
|
|
|
self.input_images = io.join_paths(self.root_path, 'images')
|
|
|
|
else:
|
|
|
|
self.input_images = io.absolute_path_file(images_path)
|
2015-12-02 14:24:38 +00:00
|
|
|
|
2016-02-26 18:50:12 +00:00
|
|
|
# modules paths
|
2015-12-02 14:24:38 +00:00
|
|
|
|
|
|
|
# here are defined where all modules should be located in
|
|
|
|
# order to keep track all files al directories during the
|
|
|
|
# whole reconstruction process.
|
|
|
|
self.dataset_raw = io.join_paths(self.root_path, 'images')
|
|
|
|
self.opensfm = io.join_paths(self.root_path, 'opensfm')
|
2018-06-27 18:32:49 +00:00
|
|
|
self.smvs = io.join_paths(self.root_path, 'smvs')
|
2015-12-02 14:24:38 +00:00
|
|
|
self.odm_meshing = io.join_paths(self.root_path, 'odm_meshing')
|
|
|
|
self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing')
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_25dtexturing = io.join_paths(self.root_path, 'odm_texturing_25d')
|
2015-12-02 14:24:38 +00:00
|
|
|
self.odm_georeferencing = io.join_paths(self.root_path, 'odm_georeferencing')
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_25dgeoreferencing = io.join_paths(self.root_path, 'odm_25dgeoreferencing')
|
2015-12-02 14:24:38 +00:00
|
|
|
self.odm_orthophoto = io.join_paths(self.root_path, 'odm_orthophoto')
|
2016-02-25 16:29:00 +00:00
|
|
|
self.odm_pdal = io.join_paths(self.root_path, 'pdal')
|
2015-12-02 14:24:38 +00:00
|
|
|
|
2016-02-26 18:50:12 +00:00
|
|
|
# important files paths
|
|
|
|
|
2016-02-29 14:45:00 +00:00
|
|
|
# benchmarking
|
|
|
|
self.benchmarking = io.join_paths(self.root_path, 'benchmark.txt')
|
2018-01-26 19:38:26 +00:00
|
|
|
self.dataset_list = io.join_paths(self.root_path, 'img_list.txt')
|
2016-02-29 14:45:00 +00:00
|
|
|
|
2015-12-02 14:24:38 +00:00
|
|
|
# opensfm
|
2017-03-22 22:22:24 +00:00
|
|
|
self.opensfm_tracks = io.join_paths(self.opensfm, 'tracks.csv')
|
2015-12-02 14:24:38 +00:00
|
|
|
self.opensfm_bundle = io.join_paths(self.opensfm, 'bundle_r000.out')
|
|
|
|
self.opensfm_bundle_list = io.join_paths(self.opensfm, 'list_r000.out')
|
|
|
|
self.opensfm_image_list = io.join_paths(self.opensfm, 'image_list.txt')
|
|
|
|
self.opensfm_reconstruction = io.join_paths(self.opensfm, 'reconstruction.json')
|
2017-04-06 05:31:03 +00:00
|
|
|
self.opensfm_reconstruction_nvm = io.join_paths(self.opensfm, 'reconstruction.nvm')
|
2016-09-30 13:08:56 +00:00
|
|
|
self.opensfm_model = io.join_paths(self.opensfm, 'depthmaps/merged.ply')
|
2018-01-26 19:38:26 +00:00
|
|
|
self.opensfm_transformation = io.join_paths(self.opensfm, 'geocoords_transformation.txt')
|
2015-12-10 11:01:41 +00:00
|
|
|
|
2018-06-27 18:32:49 +00:00
|
|
|
# smvs
|
|
|
|
self.smvs_model = io.join_paths(self.smvs, 'smvs_dense_point_cloud.ply')
|
|
|
|
self.mve_path = io.join_paths(self.opensfm, 'mve')
|
|
|
|
self.mve_image_list = io.join_paths(self.mve_path, 'list.txt')
|
|
|
|
self.mve_bundle = io.join_paths(self.mve_path, 'bundle/bundle.out')
|
2018-07-01 22:49:53 +00:00
|
|
|
self.mve_views = io.join_paths(self.smvs, 'views')
|
2018-06-27 18:32:49 +00:00
|
|
|
|
2015-12-02 14:24:38 +00:00
|
|
|
# odm_meshing
|
2015-12-10 11:01:41 +00:00
|
|
|
self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply')
|
|
|
|
self.odm_meshing_log = io.join_paths(self.odm_meshing, 'odm_meshing_log.txt')
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_25dmesh = io.join_paths(self.odm_meshing, 'odm_25dmesh.ply')
|
|
|
|
self.odm_25dmeshing_log = io.join_paths(self.odm_meshing, 'odm_25dmeshing_log.txt')
|
2016-02-26 18:50:12 +00:00
|
|
|
|
2016-03-24 17:35:29 +00:00
|
|
|
# texturing
|
2016-03-03 11:58:25 +00:00
|
|
|
self.odm_texturing_undistorted_image_path = io.join_paths(
|
|
|
|
self.odm_texturing, 'undistorted')
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_textured_model_obj = 'odm_textured_model.obj'
|
|
|
|
self.odm_textured_model_mtl = 'odm_textured_model.mtl'
|
|
|
|
# Log is only used by old odm_texturing
|
|
|
|
self.odm_texuring_log = 'odm_texturing_log.txt'
|
2015-12-10 11:01:41 +00:00
|
|
|
|
|
|
|
# odm_georeferencing
|
2015-12-10 17:17:39 +00:00
|
|
|
self.odm_georeferencing_latlon = io.join_paths(
|
|
|
|
self.odm_georeferencing, 'latlon.txt')
|
2015-12-10 11:01:41 +00:00
|
|
|
self.odm_georeferencing_coords = io.join_paths(
|
2018-03-03 16:48:43 +00:00
|
|
|
self.odm_georeferencing, 'coords.txt')
|
2018-02-01 16:10:32 +00:00
|
|
|
self.odm_georeferencing_gcp = gcp_file or io.find('gcp_list.txt', self.root_path)
|
2015-12-10 11:01:41 +00:00
|
|
|
self.odm_georeferencing_utm_log = io.join_paths(
|
|
|
|
self.odm_georeferencing, 'odm_georeferencing_utm_log.txt')
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_georeferencing_log = 'odm_georeferencing_log.txt'
|
2017-06-12 11:15:32 +00:00
|
|
|
self.odm_georeferencing_transform_file = 'odm_georeferencing_transform.txt'
|
2018-03-03 16:48:43 +00:00
|
|
|
self.odm_georeferencing_proj = 'proj.txt'
|
2017-04-05 17:56:48 +00:00
|
|
|
self.odm_georeferencing_model_txt_geo = 'odm_georeferencing_model_geo.txt'
|
|
|
|
self.odm_georeferencing_model_ply_geo = 'odm_georeferenced_model.ply'
|
|
|
|
self.odm_georeferencing_model_obj_geo = 'odm_textured_model_geo.obj'
|
2016-02-25 19:51:03 +00:00
|
|
|
self.odm_georeferencing_xyz_file = io.join_paths(
|
2016-02-25 20:02:48 +00:00
|
|
|
self.odm_georeferencing, 'odm_georeferenced_model.csv')
|
2017-04-06 17:59:26 +00:00
|
|
|
self.odm_georeferencing_las_json = io.join_paths(
|
|
|
|
self.odm_georeferencing, 'las.json')
|
2018-06-18 13:57:20 +00:00
|
|
|
self.odm_georeferencing_model_laz = io.join_paths(
|
2018-06-17 13:51:37 +00:00
|
|
|
self.odm_georeferencing, 'odm_georeferenced_model.laz')
|
2017-04-06 13:06:09 +00:00
|
|
|
self.odm_georeferencing_dem = io.join_paths(
|
|
|
|
self.odm_georeferencing, 'odm_georeferencing_model_dem.tif')
|
2015-12-10 11:01:41 +00:00
|
|
|
|
2015-12-02 14:24:38 +00:00
|
|
|
# odm_orthophoto
|
2015-12-10 11:01:41 +00:00
|
|
|
self.odm_orthophoto_file = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.png')
|
2016-02-23 17:47:43 +00:00
|
|
|
self.odm_orthophoto_tif = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.tif')
|
2016-08-10 22:46:19 +00:00
|
|
|
self.odm_orthophoto_corners = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_corners.txt')
|
2015-12-10 11:01:41 +00:00
|
|
|
self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt')
|
2016-02-23 18:23:34 +00:00
|
|
|
self.odm_orthophoto_tif_log = io.join_paths(self.odm_orthophoto, 'gdal_translate_log.txt')
|
2017-03-31 18:53:47 +00:00
|
|
|
self.odm_orthophoto_gdaladdo_log = io.join_paths(self.odm_orthophoto, 'gdaladdo_log.txt')
|
2017-06-23 15:20:46 +00:00
|
|
|
|
2017-06-23 20:15:13 +00:00
|
|
|
def path(self, *args):
|
2017-09-03 00:32:25 +00:00
|
|
|
return io.join_paths(self.root_path, *args)
|