OpenDroneMap-ODM/opendm/types.py

462 wiersze
18 KiB
Python
Czysty Zwykły widok Historia

2015-12-02 14:24:38 +00:00
import cv2
import pyexiv2
2015-12-11 21:26:04 +00:00
import re
from fractions import Fraction
from opensfm.exif import sensor_string
2015-11-17 17:17:56 +00:00
import log
2015-11-26 12:15:02 +00:00
import io
2015-11-18 16:39:38 +00:00
import system
2015-12-11 21:26:04 +00:00
import context
2015-11-17 17:17:56 +00:00
2015-12-10 17:17:39 +00:00
class ODM_Photo:
2015-11-17 17:17:56 +00:00
""" ODMPhoto - a class for ODMPhotos
"""
2015-12-10 12:35:52 +00:00
def __init__(self, path_file, force_focal, force_ccd):
2015-11-17 17:17:56 +00:00
# general purpose
self.path_file = path_file
2015-11-27 16:48:15 +00:00
self.filename = io.extract_file_from_path_file(path_file)
2015-11-20 10:00:43 +00:00
# useful attibutes
2015-11-19 12:01:15 +00:00
self.width = None
self.height = None
2015-11-20 10:00:43 +00:00
self.ccd_width = None
self.focal_length = None
self.focal_length_px = None
2015-11-19 12:01:15 +00:00
# other attributes
2016-07-27 14:27:34 +00:00
self.camera_make = ''
self.camera_model = ''
self.make_model = ''
2015-11-27 10:00:08 +00:00
# parse values from metadata
2015-12-10 12:35:52 +00:00
self.parse_pyexiv2_values(self.path_file, force_focal, force_ccd)
# compute focal length into pixels
2015-11-27 16:48:15 +00:00
self.update_focal()
2015-11-17 17:17:56 +00:00
2015-11-27 16:48:15 +00:00
# print log message
log.ODM_DEBUG('Loaded %s | camera: %s | dimensions: %s x %s | focal: %s | ccd: %s' %
(self.filename, self.make_model, self.width, self.height, self.focal_length, self.ccd_width))
2015-11-27 10:00:08 +00:00
2015-11-27 16:48:15 +00:00
def update_focal(self):
2015-11-27 10:00:08 +00:00
# compute focal length in pixels
if self.focal_length and self.ccd_width:
# take width or height as reference
2015-11-17 17:17:56 +00:00
if self.width > self.height:
2015-11-27 10:00:08 +00:00
# f(px) = w(px) * f(mm) / ccd(mm)
2015-11-17 17:17:56 +00:00
self.focal_length_px = \
self.width * (self.focal_length / self.ccd_width)
else:
2015-11-27 10:00:08 +00:00
# f(px) = h(px) * f(mm) / ccd(mm)
2015-11-17 17:17:56 +00:00
self.focal_length_px = \
self.height * (self.focal_length / self.ccd_width)
2015-12-10 12:35:52 +00:00
def parse_pyexiv2_values(self, _path_file, _force_focal, _force_ccd):
# read image metadata
metadata = pyexiv2.ImageMetadata(_path_file)
metadata.read()
# loop over image tags
for key in metadata:
# try/catch tag value due to weird bug in pyexiv2
# ValueError: invalid literal for int() with base 10: ''
try:
val = metadata[key].value
# parse tag names
if key == 'Exif.Image.Make':
self.camera_make = val
elif key == 'Exif.Image.Model':
self.camera_model = val
elif key == 'Exif.Photo.FocalLength':
self.focal_length = float(val)
2016-05-07 16:27:52 +00:00
except (pyexiv2.ExifValueError, ValueError) as e:
pass
except NotImplementedError as e:
pass
if self.camera_make and self.camera_model:
self.make_model = sensor_string(self.camera_make, self.camera_model)
2015-12-02 14:24:38 +00:00
# needed to do that since sometimes metadata contains wrong data
img = cv2.imread(_path_file)
self.width = img.shape[1]
self.height = img.shape[0]
2015-12-02 14:24:38 +00:00
# force focal and ccd_width with user parameter
if _force_focal:
self.focal_length = _force_focal
if _force_ccd:
self.ccd_width = _force_ccd
2015-11-26 12:15:02 +00:00
# find ccd_width from file if needed
2015-11-26 12:15:02 +00:00
if self.ccd_width is None and self.camera_model is not None:
# load ccd_widths from file
ccd_widths = system.get_ccd_widths()
2015-11-26 12:15:02 +00:00
# search ccd by camera model
key = [x for x in ccd_widths.keys() if self.make_model in x]
2015-11-26 12:15:02 +00:00
# convert to float if found
if key:
self.ccd_width = float(ccd_widths[key[0]])
else:
log.ODM_WARNING('Could not find ccd_width in file. Use --force-ccd or edit the sensor_data.json '
'file to manually input ccd width')
2015-11-27 10:00:08 +00:00
# TODO: finish this class
2015-12-10 17:17:39 +00:00
class ODM_Reconstruction(object):
"""docstring for ODMReconstruction"""
2015-12-10 17:17:39 +00:00
def __init__(self, arg):
super(ODMReconstruction, self).__init__()
self.arg = arg
class ODM_GCPoint(object):
"""docstring for ODMPoint"""
2015-12-10 17:17:39 +00:00
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class ODM_GeoRef(object):
"""docstring for ODMUtmZone"""
2015-12-10 17:17:39 +00:00
def __init__(self):
self.datum = 'WGS84'
2015-12-11 21:26:04 +00:00
self.epsg = None
2015-12-10 17:17:39 +00:00
self.utm_zone = 0
self.utm_pole = 'N'
self.utm_east_offset = 0
self.utm_north_offset = 0
self.gcps = []
def calculate_EPSG(self, _utm_zone, _pole):
"""Calculate and return the EPSG"""
if _pole == 'S':
return 32700 + _utm_zone
elif _pole == 'N':
return 32600 + _utm_zone
else:
2015-12-11 21:26:04 +00:00
log.ODM_ERROR('Unknown pole format %s' % _pole)
2015-12-10 17:17:39 +00:00
return
2016-04-05 20:10:02 +00:00
def coord_to_fractions(self, coord, refs):
deg_dec = abs(float(coord))
deg = int(deg_dec)
2017-04-06 17:59:26 +00:00
minute_dec = (deg_dec - deg) * 60
2016-04-05 20:10:02 +00:00
minute = int(minute_dec)
2017-04-06 17:59:26 +00:00
sec_dec = (minute_dec - minute) * 60
sec_dec = round(sec_dec, 3)
2016-04-05 20:10:02 +00:00
sec_denominator = 1000
2017-04-06 17:59:26 +00:00
sec_numerator = int(sec_dec * sec_denominator)
2016-04-05 20:10:02 +00:00
if float(coord) >= 0:
latRef = refs[0]
else:
latRef = refs[1]
output = str(deg) + '/1 ' + str(minute) + '/1 ' + str(sec_numerator) + '/' + str(sec_denominator)
return output, latRef
2017-04-06 17:59:26 +00:00
def convert_to_las(self, _file, _file_out, json_file):
2015-12-11 21:26:04 +00:00
if not self.epsg:
log.ODM_ERROR('Empty EPSG: Could not convert to LAS')
return
kwargs = {'bin': context.pdal_path,
'f_in': _file,
2017-04-06 13:06:09 +00:00
'f_out': _file_out,
'east': self.utm_east_offset,
'north': self.utm_north_offset,
'epsg': self.epsg,
2017-04-06 17:59:26 +00:00
'json': json_file}
2015-12-11 21:26:04 +00:00
2016-02-25 15:36:18 +00:00
# create pipeline file transform.xml to enable transformation
2017-04-06 17:59:26 +00:00
pipeline = '{{' \
' "pipeline":[' \
' "untransformed.ply",' \
' {{' \
' "type":"filters.transformation",' \
' "matrix":"1 0 0 {east} 0 1 0 {north} 0 0 1 0 0 0 0 1"' \
' }},' \
' {{' \
' "a_srs":"EPSG:{epsg}",' \
' "filename":"transformed.las"' \
' }}' \
' ]' \
'}}'.format(**kwargs)
with open(json_file, 'w') as f:
f.write(pipeline)
2016-02-25 16:16:42 +00:00
2016-02-25 15:36:18 +00:00
# call pdal
2017-04-06 17:59:26 +00:00
system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} '
2016-02-25 19:05:18 +00:00
'--writers.las.filename={f_out}'.format(**kwargs))
2015-12-11 21:26:04 +00:00
2017-04-06 19:37:13 +00:00
def convert_to_dem(self, _file, _file_out, pdalJSON, sample_radius, gdal_res, gdal_radius):
2017-04-06 13:06:09 +00:00
# Check if exists f_in
if not io.file_exists(_file):
log.ODM_ERROR('LAS file does not exist')
return False
2017-03-31 14:31:46 +00:00
kwargs = {
'bin': context.pdal_path,
'f_in': _file,
2017-04-06 19:37:13 +00:00
'sample_radius': sample_radius,
'gdal_res': gdal_res,
'gdal_radius': gdal_radius,
'f_out': _file_out,
2017-03-31 14:31:46 +00:00
'json': pdalJSON
}
2017-04-06 19:37:13 +00:00
pipelineJSON = '{{' \
2017-03-31 14:31:46 +00:00
' "pipeline":[' \
' "input.las",' \
2017-04-06 19:37:13 +00:00
' {{' \
2017-03-31 14:31:46 +00:00
' "type":"filters.sample",' \
2017-04-06 19:37:13 +00:00
' "radius":"{sample_radius}"' \
' }},' \
' {{' \
2017-03-31 14:31:46 +00:00
' "type":"filters.pmf",' \
' "extract":"true"' \
2017-04-06 19:37:13 +00:00
' }},' \
' {{' \
' "resolution": {gdal_res},' \
' "radius": {gdal_radius},' \
2017-03-31 14:31:46 +00:00
' "output_type":"idw",' \
' "filename":"outputfile.tif"' \
2017-04-06 19:37:13 +00:00
' }}' \
2017-03-31 14:31:46 +00:00
' ]' \
2017-04-06 19:37:13 +00:00
'}}'.format(**kwargs)
2017-04-06 13:06:09 +00:00
2017-03-31 14:31:46 +00:00
with open(pdalJSON, 'w') as f:
2017-04-06 17:59:26 +00:00
f.write(pipelineJSON)
2017-03-31 14:31:46 +00:00
system.run('{bin}/pdal pipeline {json} --readers.las.filename={f_in} '
'--writers.gdal.filename={f_out}'.format(**kwargs))
2017-04-06 13:06:09 +00:00
if io.file_exists(kwargs['f_out']):
return True
else:
return False
2015-12-10 17:17:39 +00:00
def utm_to_latlon(self, _file, _photo, idx):
gcp = self.gcps[idx]
2016-04-05 20:10:02 +00:00
kwargs = {'epsg': self.epsg,
'file': _file,
'x': gcp.x + self.utm_east_offset,
'y': gcp.y + self.utm_north_offset,
'z': gcp.z}
2015-12-10 17:17:39 +00:00
2016-04-05 20:10:02 +00:00
latlon = system.run_and_return('echo {x} {y} {z} '.format(**kwargs),
'gdaltransform -s_srs \"EPSG:{epsg}\" '
'-t_srs \"EPSG:4326\"'.format(**kwargs)).split()
2015-12-11 21:26:04 +00:00
# Example: 83d18'16.285"W
# Example: 41d2'11.789"N
# Example: 0.998
if len(latlon) == 3:
lon_str, lat_str, alt_str = latlon
elif len(latlon) == 2:
lon_str, lat_str = latlon
alt_str = ''
else:
log.ODM_ERROR('Something went wrong %s' % latlon)
2016-04-05 20:10:02 +00:00
lat_frac = self.coord_to_fractions(latlon[1], ['N', 'S'])
lon_frac = self.coord_to_fractions(latlon[0], ['E', 'W'])
2015-12-11 21:26:04 +00:00
# read image metadata
metadata = pyexiv2.ImageMetadata(_photo.path_file)
metadata.read()
# set values
2015-12-11 21:26:04 +00:00
# GPS latitude
key = 'Exif.GPSInfo.GPSLatitude'
2016-04-05 20:10:02 +00:00
value = lat_frac[0].split(' ')
log.ODM_DEBUG('lat_frac: %s %s %s' % (value[0], value[1], value[2]))
metadata[key] = pyexiv2.ExifTag(key,
[Fraction(value[0]),
Fraction(value[1]),
Fraction(value[2])])
2015-12-11 21:26:04 +00:00
key = 'Exif.GPSInfo.GPSLatitudeRef'
2016-04-05 20:10:02 +00:00
value = lat_frac[1]
2015-12-11 21:26:04 +00:00
metadata[key] = pyexiv2.ExifTag(key, value)
# GPS longitude
key = 'Exif.GPSInfo.GPSLongitude'
2016-04-05 20:10:02 +00:00
value = lon_frac[0].split(' ')
metadata[key] = pyexiv2.ExifTag(key,
[Fraction(value[0]),
Fraction(value[1]),
Fraction(value[2])])
2015-12-11 21:26:04 +00:00
key = 'Exif.GPSInfo.GPSLongitudeRef'
2016-04-05 20:10:02 +00:00
value = lon_frac[1]
2015-12-11 21:26:04 +00:00
metadata[key] = pyexiv2.ExifTag(key, value)
# GPS altitude
2017-04-06 17:59:26 +00:00
altitude = abs(int(float(latlon[2]) * 100))
2015-12-11 21:26:04 +00:00
key = 'Exif.GPSInfo.GPSAltitude'
2016-04-05 20:10:02 +00:00
value = Fraction(altitude, 1)
2015-12-11 21:26:04 +00:00
metadata[key] = pyexiv2.ExifTag(key, value)
2016-04-05 20:10:02 +00:00
if latlon[2] >= 0:
altref = '0'
else:
altref = '1'
2015-12-11 21:26:04 +00:00
key = 'Exif.GPSInfo.GPSAltitudeRef'
2016-04-05 20:10:02 +00:00
metadata[key] = pyexiv2.ExifTag(key, altref)
2015-12-11 21:26:04 +00:00
# write values
2015-12-11 21:26:04 +00:00
metadata.write()
2015-12-10 17:17:39 +00:00
def parse_coordinate_system(self, _file):
"""Write attributes to jobOptions from coord file"""
# check for coordinate file existence
if not io.file_exists(_file):
2016-02-23 17:47:43 +00:00
log.ODM_ERROR('Could not find file %s' % _file)
2015-12-10 17:17:39 +00:00
return
with open(_file) as f:
# extract reference system and utm zone from first line.
# We will assume the following format:
# 'WGS84 UTM 17N'
2016-04-05 20:10:02 +00:00
line = f.readline()
log.ODM_DEBUG('Line: %s' % line)
ref = line.split(' ')
# match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I)
2017-04-06 17:59:26 +00:00
if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm:
2016-04-05 20:10:02 +00:00
self.datum = ref[0]
self.utm_pole = ref[2][len(ref) - 1]
self.utm_zone = int(ref[2][:len(ref) - 1])
# extract east and west offsets from second line.
# We will assume the following format:
# '440143 4588391'
# update EPSG
self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole)
# If the first line looks like "EPSG:n" or "epsg:n"
elif ref[0].split(':')[0].lower() == 'epsg':
self.epsg = line.split(':')[1]
else:
log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line)
return
offsets = f.readline().split(' ')
self.utm_east_offset = int(offsets[0])
self.utm_north_offset = int(offsets[1])
2015-12-10 17:17:39 +00:00
# parse coordinates
lines = f.readlines()
for l in lines:
2016-04-05 20:10:02 +00:00
xyz = l.split(' ')
if len(xyz) == 3:
x, y, z = xyz[:3]
elif len(xyz) == 2:
x, y = xyz[:2]
z = 0
2015-12-10 17:17:39 +00:00
self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
2017-04-06 17:59:26 +00:00
# Write to json file
2015-12-10 17:17:39 +00:00
2016-02-25 20:02:48 +00:00
2015-12-10 17:17:39 +00:00
class ODM_Tree(object):
def __init__(self, root_path, images_path):
# root path to the project
2015-12-02 14:24:38 +00:00
self.root_path = io.absolute_path_file(root_path)
if not images_path:
self.input_images = io.join_paths(self.root_path, 'images')
else:
self.input_images = io.absolute_path_file(images_path)
2015-12-02 14:24:38 +00:00
# modules paths
2015-12-02 14:24:38 +00:00
# here are defined where all modules should be located in
# order to keep track all files al directories during the
# whole reconstruction process.
self.dataset_raw = io.join_paths(self.root_path, 'images')
self.dataset_resize = io.join_paths(self.root_path, 'images_resize')
2015-12-02 14:24:38 +00:00
self.opensfm = io.join_paths(self.root_path, 'opensfm')
self.pmvs = io.join_paths(self.root_path, 'pmvs')
self.odm_meshing = io.join_paths(self.root_path, 'odm_meshing')
self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing')
self.odm_georeferencing = io.join_paths(self.root_path, 'odm_georeferencing')
self.odm_orthophoto = io.join_paths(self.root_path, 'odm_orthophoto')
2016-02-25 16:29:00 +00:00
self.odm_pdal = io.join_paths(self.root_path, 'pdal')
2015-12-02 14:24:38 +00:00
# important files paths
2016-02-29 14:45:00 +00:00
# benchmarking
self.benchmarking = io.join_paths(self.root_path, 'benchmark.txt')
2015-12-02 14:24:38 +00:00
# opensfm
self.opensfm_bundle = io.join_paths(self.opensfm, 'bundle_r000.out')
self.opensfm_bundle_list = io.join_paths(self.opensfm, 'list_r000.out')
self.opensfm_image_list = io.join_paths(self.opensfm, 'image_list.txt')
self.opensfm_reconstruction = io.join_paths(self.opensfm, 'reconstruction.json')
self.opensfm_model = io.join_paths(self.opensfm, 'depthmaps/merged.ply')
2015-12-02 14:24:38 +00:00
# pmvs
self.pmvs_rec_path = io.join_paths(self.pmvs, 'recon0')
self.pmvs_bundle = io.join_paths(self.pmvs_rec_path, 'bundle.rd.out')
self.pmvs_visdat = io.join_paths(self.pmvs_rec_path, 'vis.dat')
self.pmvs_options = io.join_paths(self.pmvs_rec_path, 'pmvs_options.txt')
self.pmvs_model = io.join_paths(self.pmvs_rec_path, 'models/option-0000.ply')
2015-12-02 14:24:38 +00:00
# odm_meshing
self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply')
self.odm_meshing_log = io.join_paths(self.odm_meshing, 'odm_meshing_log.txt')
2016-03-24 17:35:29 +00:00
# texturing
self.odm_texturing_undistorted_image_path = io.join_paths(
self.odm_texturing, 'undistorted')
self.odm_textured_model_obj = io.join_paths(
self.odm_texturing, 'odm_textured_model.obj')
self.odm_textured_model_mtl = io.join_paths(
self.odm_texturing, 'odm_textured_model.mtl')
2017-04-06 17:59:26 +00:00
# Log is only used by old odm_texturing
self.odm_texuring_log = io.join_paths(
self.odm_texturing, 'odm_texturing_log.txt')
# odm_georeferencing
2015-12-10 17:17:39 +00:00
self.odm_georeferencing_latlon = io.join_paths(
self.odm_georeferencing, 'latlon.txt')
self.odm_georeferencing_coords = io.join_paths(
self.odm_georeferencing, 'coords.txt')
self.odm_georeferencing_gcp = io.join_paths(
self.odm_georeferencing, 'gcp_list.txt')
self.odm_georeferencing_utm_log = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_utm_log.txt')
self.odm_georeferencing_log = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_log.txt')
2016-02-25 19:51:03 +00:00
self.odm_georeferencing_model_txt_geo = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_model_geo.txt')
self.odm_georeferencing_model_ply_geo = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.ply')
self.odm_georeferencing_model_obj_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.obj') # these files will be kept in odm_texturing/
2016-02-25 19:51:03 +00:00
self.odm_georeferencing_model_mtl_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.mtl') # these files will be kept in odm_texturing/
2016-02-25 19:51:03 +00:00
self.odm_georeferencing_xyz_file = io.join_paths(
2016-02-25 20:02:48 +00:00
self.odm_georeferencing, 'odm_georeferenced_model.csv')
2017-04-06 17:59:26 +00:00
self.odm_georeferencing_las_json = io.join_paths(
self.odm_georeferencing, 'las.json')
2017-04-06 13:06:09 +00:00
self.odm_georeferencing_model_las = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.las')
self.odm_georeferencing_dem = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_model_dem.tif')
self.odm_georeferencing_dem_json = io.join_paths(
self.odm_georeferencing, 'dem.json')
2015-12-02 14:24:38 +00:00
# odm_orthophoto
self.odm_orthophoto_file = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.png')
2016-02-23 17:47:43 +00:00
self.odm_orthophoto_tif = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.tif')
self.odm_orthophoto_corners = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_corners.txt')
self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt')
self.odm_orthophoto_tif_log = io.join_paths(self.odm_orthophoto, 'gdal_translate_log.txt')