fix georef, add silent flag to dmrecon, add flag for confidence level to scene2pset

Former-commit-id: ca43e539c4
pull/1161/head
Stephen Mather 2019-01-17 11:29:57 +00:00
commit a51fabd663
10 zmienionych plików z 136 dodań i 150 usunięć

Wyświetl plik

@ -6,7 +6,7 @@ ExternalProject_Add(${_proj_name}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL http://www.exiv2.org/builds/exiv2-0.26-trunk.tar.gz
URL http://www.exiv2.org/builds/exiv2-0.27.0-Source.tar.gz
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
@ -17,4 +17,4 @@ ExternalProject_Add(${_proj_name}
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)
)

Wyświetl plik

@ -4,7 +4,7 @@ cmake_minimum_required(VERSION 2.8)
set(PROJ4_INCLUDE_DIR "/usr/include/" CACHE "PROJ4_INCLUDE_DIR" "Path to the proj4 inlcude directory")
find_library(PROJ4_LIBRARY "libproj.so" PATHS "/usr/lib" "/usr/lib/x86_64-linux-gnu")
find_library(EXIV2_LIBRARY "libexiv2.so.26" PATHS "${PROJECT_SOURCE_DIR}/../../SuperBuild/install/lib")
find_library(EXIV2_LIBRARY "libexiv2.so.27" PATHS "${PROJECT_SOURCE_DIR}/../../SuperBuild/install/lib")
include_directories("${PROJECT_SOURCE_DIR}/../../SuperBuild/install/include")
# Add compiler options.

Wyświetl plik

@ -26,6 +26,13 @@ set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui)
include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR})
include_directories(${EIGEN_ROOT})
# PDAL and jsoncpp
find_package(PDAL REQUIRED CONFIG)
include_directories(${PDAL_INCLUDE_DIRS})
include_directories("${PROJECT_SOURCE_DIR}/../../SuperBuild/src/pdal/vendor/jsoncpp/dist")
link_directories(${PDAL_LIBRARY_DIRS})
add_definitions(${PDAL_DEFINITIONS})
# Add source directory
aux_source_directory("./src" SRC_LIST)
@ -33,4 +40,4 @@ aux_source_directory("./src" SRC_LIST)
add_executable(${PROJECT_NAME} ${SRC_LIST})
# Link
target_link_libraries(${PROJECT_NAME} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${PROJ4_LIBRARY} ${OpenCV_LIBS})
target_link_libraries(${PROJECT_NAME} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${PROJ4_LIBRARY} ${OpenCV_LIBS} jsoncpp ${PDAL_LIBRARIES})

Wyświetl plik

@ -13,8 +13,6 @@ using namespace std;
// This
#include "Georef.hpp"
#define IS_BIG_ENDIAN (*(uint16_t *)"\0\xff" < 0x100)
std::ostream& operator<<(std::ostream &os, const GeorefSystem &geo)
{
return os << setiosflags(ios::fixed) << setprecision(7) << geo.system_ << "\n" << geo.eastingOffset_ << " " << geo.northingOffset_;
@ -413,6 +411,16 @@ void Georef::parseArguments(int argc, char *argv[])
log_ << "Reading point cloud from: " << inputPointCloudFilename_ << "\n";
georeferencePointCloud_ = true;
}
else if(argument == "-outputPointCloudSrs" && argIndex < argc)
{
argIndex++;
if (argIndex >= argc)
{
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
outputPointCloudSrs_ = std::string(argv[argIndex]);
log_ << "Using SRS for point cloud: " << outputPointCloudSrs_ << "\n";
}
else if(argument == "-gcpFile" && argIndex < argc)
{
argIndex++;
@ -590,10 +598,13 @@ void Georef::printHelp()
// log_ << "The resized resolution used in bundler.\n\n";
log_ << "\"-outputFile <path>\" (optional, default <inputFile>_geo)" << "\n";
log_ << "\"Output obj file that will contain the georeferenced texture mesh.\n\n";
log_ << "Output obj file that will contain the georeferenced texture mesh.\n\n";
log_ << "\"-outputPointCloudFile <path>\" (mandatory if georeferencing a point cloud)" << "\n";
log_ << "\"Output ply file that will contain the georeferenced point cloud.\n\n";
log_ << "Output las/laz file that will contain the georeferenced point cloud.\n\n";
log_ << "\"-outputPointCloudSrs <srs>\" (recommended if georeferencing a point cloud)" << "\n";
log_ << "Output spatial reference system to use for output point cloud.\n\n";
log_.setIsPrintingInCout(printInCoutPop);
}
@ -1406,72 +1417,43 @@ void Georef::performFinalTransform(Mat4 &transMat, pcl::TextureMesh &mesh, pcl::
template <typename Scalar>
void Georef::transformPointCloud(const char *inputFile, const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform, const char *outputFile){
try{
pcl::PointCloud<pcl::PointXYZRGBNormal> pointCloud;
if(pcl::io::loadPLYFile<pcl::PointXYZRGBNormal> (inputFile, pointCloud) == -1) {
throw GeorefException("Error when reading point cloud:\n" + std::string(inputFile) + "\n");
}
else
{
log_ << "Successfully loaded " << pointCloud.size() << " points with corresponding normals from file.\n";
}
log_ << "Writing transformed point cloud to " << outputFile << "...\n";
log_ << "Transforming point cloud...\n";
// We don't use PCL's built-in functions
// because PCL does not support double coordinates
// precision
std::ofstream f (outputFile);
f << "ply" << std::endl;
// PDAL pipeline: ply reader --> matrix transform --> las writer.
if (IS_BIG_ENDIAN){
f << "format binary_big_endian 1.0" << std::endl;
pdal::Options inPlyOpts;
inPlyOpts.add("filename", inputFile);
pdal::PointTable table;
pdal::PlyReader plyReader;
plyReader.setOptions(inPlyOpts);
pdal::MatrixTransformFilter<Scalar> transformFilter(transform);
transformFilter.setInput(plyReader);
pdal::Options outLasOpts;
outLasOpts.add("filename", outputFile);
std::string s(outputFile);
if (s.substr(s.find_last_of(".") + 1) == "laz"){
outLasOpts.add("compression", "laszip");
}
outLasOpts.add("offset_x", georefSystem_.eastingOffset_);
outLasOpts.add("offset_y", georefSystem_.northingOffset_);
outLasOpts.add("offset_z", 0);
if (!outputPointCloudSrs_.empty()){
outLasOpts.add("a_srs", outputPointCloudSrs_);
}else{
f << "format binary_little_endian 1.0" << std::endl;
log_ << "WARNING! A point cloud will be generated without a SRS, because -outputPointCloudSrs was not specified!\n";
}
const char *type = "double";
if (sizeof(Scalar) == sizeof(float)){
type = "float";
}
f << "element vertex " << pointCloud.size() << std::endl
<< "property " << type << " x" << std::endl
<< "property " << type << " y" << std::endl
<< "property " << type << " z" << std::endl
<< "property uchar red" << std::endl
<< "property uchar green" << std::endl
<< "property uchar blue" << std::endl
<< "end_header" << std::endl;
struct PlyPoint{
Scalar x;
Scalar y;
Scalar z;
uint8_t r;
uint8_t g;
uint8_t b;
} p;
size_t psize = sizeof(Scalar) * 3 + sizeof(uint8_t) * 3;
// Transform
for (unsigned int i = 0; i < pointCloud.size(); i++){
Scalar x = static_cast<Scalar>(pointCloud[i].x);
Scalar y = static_cast<Scalar>(pointCloud[i].y);
Scalar z = static_cast<Scalar>(pointCloud[i].z);
p.r = pointCloud[i].r;
p.g = pointCloud[i].g;
p.b = pointCloud[i].b;
p.x = static_cast<Scalar> (transform (0, 0) * x + transform (0, 1) * y + transform (0, 2) * z + transform (0, 3));
p.y = static_cast<Scalar> (transform (1, 0) * x + transform (1, 1) * y + transform (1, 2) * z + transform (1, 3));
p.z = static_cast<Scalar> (transform (2, 0) * x + transform (2, 1) * y + transform (2, 2) * z + transform (2, 3));
f.write(reinterpret_cast<char*>(&p), psize);
// TODO: normals can be computed using the inverse transpose
// https://paroj.github.io/gltut/Illumination/Tut09%20Normal%20Transformation.html
}
f.close();
pdal::LasWriter lasWriter;
lasWriter.setOptions(outLasOpts);
lasWriter.setInput(transformFilter);
lasWriter.prepare(table);
lasWriter.execute(table);
log_ << "Point cloud file saved.\n";
}

Wyświetl plik

@ -18,6 +18,9 @@
// Transformation
#include "FindTransform.hpp"
// PDAL matrix transform filter
#include "MatrixTransformFilter.hpp"
/*!
* \brief The GeorefSystem struct is used to store information about a georeference system.
*/
@ -275,7 +278,8 @@ private:
std::string outputObjFilename_; /**< The path to the output mesh obj file. **/
std::string inputPointCloudFilename_; /**< The path to the input point cloud file. **/
std::string outputPointCloudFilename_; /**< The path to the output point cloud file. **/
std::string georefFilename_; /**< The path to the output offset file. **/
std::string georefFilename_; /**< The path to the output offset file. **/
std::string outputPointCloudSrs_; /**< The spatial reference system of the point cloud file to be written. Can be an EPSG string (e.g. “EPSG:26910”) or a WKT string. **/
bool georeferencePointCloud_;
bool exportCoordinateFile_;

Wyświetl plik

@ -0,0 +1,2 @@
#include "MatrixTransformFilter.hpp"

Wyświetl plik

@ -0,0 +1,38 @@
#pragma once
#include <pcl/common/eigen.h>
#include <pdal/PointTable.hpp>
#include <pdal/PointView.hpp>
#include <pdal/io/PlyReader.hpp>
#include <pdal/io/LasWriter.hpp>
#include <pdal/Options.hpp>
#include <pdal/Filter.hpp>
namespace pdal{
template <typename Scalar>
class MatrixTransformFilter : public Filter{
Eigen::Transform<Scalar, 3, Eigen::Affine> transform;
public:
MatrixTransformFilter(const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform)
: transform(transform){};
std::string getName() const { return "MatrixTransformFilter"; }
virtual void filter(PointView &view)
{
for (PointId id = 0; id < view.size(); ++id)
{
Scalar x = view.getFieldAs<Scalar>(Dimension::Id::X, id);
Scalar y = view.getFieldAs<Scalar>(Dimension::Id::Y, id);
Scalar z = view.getFieldAs<Scalar>(Dimension::Id::Z, id);
view.setField(pdal::Dimension::Id::X, id, transform (0, 0) * x + transform (0, 1) * y + transform (0, 2) * z + transform (0, 3));
view.setField(pdal::Dimension::Id::Y, id, transform (1, 0) * x + transform (1, 1) * y + transform (1, 2) * z + transform (1, 3));
view.setField(pdal::Dimension::Id::Z, id, transform (2, 0) * x + transform (2, 1) * y + transform (2, 2) * z + transform (2, 3));
}
}
// TODO: implement streaming mode
};
}

Wyświetl plik

@ -25,8 +25,8 @@ orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2")
# define mve join_paths
makescene_path = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'makescene', 'makescene') #TODO: don't install in source
mve_path = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'dmrecon', 'dmrecon')
mve_path_pc = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'scene2pset', 'scene2pset -F2')
mve_path = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'dmrecon', 'dmrecon --progress=silent')
mve_path_pc = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'scene2pset', 'scene2pset -F2 --with-conf')
poisson_recon_path = os.path.join(superbuild_path, 'src', 'PoissonRecon', 'Bin', 'Linux', 'PoissonRecon')
dem2mesh_path = os.path.join(superbuild_path, 'src', 'dem2mesh', 'dem2mesh')

Wyświetl plik

@ -242,43 +242,6 @@ class ODM_GeoRef(object):
output = str(deg) + '/1 ' + str(minute) + '/1 ' + str(sec_numerator) + '/' + str(sec_denominator)
return output, latRef
def convert_to_las(self, _file, _file_out, json_file):
if not self.projection.srs:
log.ODM_ERROR('Empty CRS: Could not convert to LAS')
return
kwargs = {'bin': context.pdal_path,
'f_in': _file,
'f_out': _file_out,
'east': self.utm_east_offset,
'north': self.utm_north_offset,
'srs': self.projection.srs,
'json': json_file}
# create pipeline file las.json to write odm_georeferenced_model.laz point cloud
pipeline = '{{' \
' "pipeline":[' \
' "untransformed.ply",' \
' {{' \
' "type":"writers.las",' \
' "a_srs":"{srs}",' \
' "offset_x":"{east}",' \
' "offset_y":"{north}",' \
' "offset_z":"0",' \
' "compression":"laszip",' \
' "filename":"{f_out}"' \
' }}' \
' ]' \
'}}'.format(**kwargs)
with open(json_file, 'w') as f:
f.write(pipeline)
# call pdal
system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in}'.format(**kwargs))
def extract_offsets(self, _file):
if not io.file_exists(_file):
log.ODM_ERROR('Could not find file %s' % _file)
@ -398,7 +361,6 @@ class ODM_Tree(object):
self.odm_georeferencing_transform_file = 'odm_georeferencing_transform.txt'
self.odm_georeferencing_proj = 'proj.txt'
self.odm_georeferencing_model_txt_geo = 'odm_georeferencing_model_geo.txt'
self.odm_georeferencing_model_ply_geo = 'odm_georeferenced_model.ply'
self.odm_georeferencing_model_obj_geo = 'odm_textured_model_geo.obj'
self.odm_georeferencing_xyz_file = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.csv')

Wyświetl plik

@ -1,7 +1,7 @@
import ecto
import csv
import os
import struct
import pipes
from opendm import io
from opendm import log
@ -41,6 +41,7 @@ class ODMGeoreferencingCell(ecto.Cell):
doPointCloudGeo = True
transformPointCloud = True
verbose = '-verbose' if self.params.verbose else ''
geo_ref = reconstruction.georef
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
@ -67,13 +68,12 @@ class ODMGeoreferencingCell(ecto.Cell):
for r in runs:
odm_georeferencing_model_obj_geo = os.path.join(r['texturing_dir'], tree.odm_georeferencing_model_obj_geo)
odm_georeferencing_model_ply_geo = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_ply_geo)
odm_georeferencing_log = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_log)
odm_georeferencing_transform_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_transform_file)
odm_georeferencing_model_txt_geo_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_txt_geo)
if not io.file_exists(odm_georeferencing_model_obj_geo) or \
not io.file_exists(odm_georeferencing_model_ply_geo) or rerun_cell:
not io.file_exists(tree.odm_georeferencing_model_laz) or rerun_cell:
# odm_georeference definitions
kwargs = {
@ -86,26 +86,30 @@ class ODMGeoreferencingCell(ecto.Cell):
'input_trans_file': tree.opensfm_transformation,
'transform_file': odm_georeferencing_transform_file,
'coords': tree.odm_georeferencing_coords,
'pc_geo': odm_georeferencing_model_ply_geo,
'output_pc_file': tree.odm_georeferencing_model_laz,
'geo_sys': odm_georeferencing_model_txt_geo_file,
'model_geo': odm_georeferencing_model_obj_geo,
'gcp': gcpfile,
'verbose': verbose
}
if args.fast_orthophoto:
kwargs['pc'] = os.path.join(tree.opensfm, 'reconstruction.ply')
kwargs['input_pc_file'] = os.path.join(tree.opensfm, 'reconstruction.ply')
elif args.use_opensfm_dense:
kwargs['pc'] = tree.opensfm_model
kwargs['input_pc_file'] = tree.opensfm_model
else:
kwargs['pc'] = tree.mve_model
kwargs['input_pc_file'] = tree.mve_model
if transformPointCloud:
kwargs['pc_params'] = '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo}'.format(**kwargs)
kwargs['pc_params'] = '-inputPointCloudFile {input_pc_file} -outputPointCloudFile {output_pc_file}'.format(**kwargs)
if geo_ref.projection.srs:
kwargs['pc_params'] += ' -outputPointCloudSrs %s' % pipes.quote(geo_ref.projection.srs)
else:
log.ODM_WARNING('NO SRS: The output point cloud will not have a SRS.')
else:
kwargs['pc_params'] = ''
# Check to see if the GCP file exists
if not self.params.use_exif and (self.params.gcp_file or tree.odm_georeferencing_gcp):
@ -139,46 +143,33 @@ class ODMGeoreferencingCell(ecto.Cell):
if doPointCloudGeo:
# update images metadata
geo_ref = reconstruction.georef
geo_ref.extract_offsets(odm_georeferencing_model_txt_geo_file)
# convert ply model to LAS reference system
geo_ref.convert_to_las(odm_georeferencing_model_ply_geo,
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_las_json)
reconstruction.georef = geo_ref
# XYZ point cloud output
if args.pc_csv:
log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)")
with open(tree.odm_georeferencing_xyz_file, "wb") as csvfile:
csvfile_writer = csv.writer(csvfile, delimiter=",")
with open(odm_georeferencing_model_ply_geo) as f:
endianess = '<' # little endian
while True:
line = f.readline()
if "binary_big_endian" in line:
endianess = '>'
if line.startswith("end_header"):
break
fmt = '{}dddBBB'.format(endianess)
while True:
chunk = f.read(27) # 3 doubles, 3 uints
if len(chunk) < 27:
break
tokens = struct.unpack(fmt, chunk)
csv_line = [float(tokens[0]),
float(tokens[1]),
tokens[2]]
csvfile_writer.writerow(csv_line)
system.run("pdal translate -i \"{}\" "
"-o \"{}\" "
"--writers.text.format=csv "
"--writers.text.order=\"X,Y,Z\" "
"--writers.text.keep_unspecified=false ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_xyz_file))
if args.crop > 0:
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
decimation_step = 40 if args.fast_orthophoto or args.use_opensfm_dense else 90
# More aggressive decimation for large datasets
if not args.fast_orthophoto:
decimation_step *= int(len(reconstruction.photos) / 1000) + 1
cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop,
decimation_step=40 if args.fast_orthophoto or args.use_opensfm_dense else 90,
decimation_step=decimation_step,
outlier_radius=20 if args.fast_orthophoto else 2)
# Do not execute a second time, since
@ -188,7 +179,7 @@ class ODMGeoreferencingCell(ecto.Cell):
transformPointCloud = False
else:
log.ODM_WARNING('Found a valid georeferenced model in: %s'
% odm_georeferencing_model_ply_geo)
% tree.odm_georeferencing_model_laz)
outputs.reconstruction = reconstruction