Merge branch 'master' into libexiv

pull/892/head
Piero Toffanin 2018-09-13 14:28:52 -04:00
commit 7da9eda0e6
59 zmienionych plików z 2039 dodań i 2900 usunięć

Wyświetl plik

@ -6,12 +6,9 @@ SuperBuild/install
SuperBuild/src
build
opensfm
pmvs
odm_orthophoto
odm_texturing
odm_meshing
odm_georeferencing
images_resize
.git

Wyświetl plik

@ -21,7 +21,7 @@ libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/v1.0.0.zip loky shapely numpy pyproj psutil && pip install -U scipy --ignore-installed
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/1.0.0.zip loky shapely numpy pyproj psutil repoze.lru && pip install -U scipy --ignore-installed
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"
@ -60,7 +60,7 @@ RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Clean Superbuild
RUN rm -rf /code/SuperBuild/download /code/SuperBuild/src/vtk7 /code/SuperBuild/src/opencv /code/SuperBuild/src/pcl /code/SuperBuild/src/pdal /code/SuperBuild/src/opengv /code/SuperBuild/src/mvstexturing /code/SuperBuild/src/ceres /code/SuperBuild/build/vtk7 /code/SuperBuild/build/opencv
RUN rm -rf /code/SuperBuild/download /code/SuperBuild/src/opencv /code/SuperBuild/src/pcl /code/SuperBuild/src/pdal /code/SuperBuild/src/opengv /code/SuperBuild/src/mvstexturing /code/SuperBuild/src/ceres /code/SuperBuild/build/opencv
# Entry point
ENTRYPOINT ["python", "/code/run.py", "code"]

Wyświetl plik

@ -105,7 +105,7 @@ or
python run.py --rerun-from odm_meshing project-name
The options for rerunning are: 'resize', 'opensfm', 'slam', 'cmvs', 'pmvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto'
The options for rerunning are: 'resize', 'opensfm', 'slam', 'smvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto'
### View Results
@ -151,7 +151,7 @@ You can also view the orthophoto GeoTIFF in [QGIS](http://www.qgis.org/) or othe
## Build and Run Using Docker
(Instructions below apply to Ubuntu 14.04, but the Docker image workflow
(Instructions below apply to Ubuntu 14.04, but the Docker image workflow
has equivalent procedures for Mac OS X and Windows. See [docs.docker.com](https://docs.docker.com/))
OpenDroneMap is Dockerized, meaning you can use containerization to build and run it without tampering with the configuration of libraries and packages already
@ -177,7 +177,7 @@ If you want to build your own Docker image from sources, type:
Using this method, the containerized ODM will process the images in the OpenDroneMap/images directory and output results
to the OpenDroneMap/odm_orthophoto and OpenDroneMap/odm_texturing directories as described in the [Viewing Results](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Output-and-Results) section.
If you want to view other results outside the Docker image simply add which directories you're interested in to the run command in the same pattern
established above. For example, if you're interested in the dense cloud results generated by PMVS and in the orthophoto,
established above. For example, if you're interested in the dense cloud results generated by OpenSfM and in the orthophoto,
simply use the following `docker run` command after building the image:
docker run -it --rm \
@ -195,7 +195,7 @@ If you want to get all intermediate outputs, run the following command:
-v "$(pwd)/odm_orthophoto:/code/odm_orthophoto" \
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
-v "$(pwd)/opensfm:/code/opensfm" \
-v "$(pwd)/pmvs:/code/pmvs" \
-v "$(pwd)/smvs:/code/smvs" \
opendronemap/opendronemap
To pass in custom parameters to the run.py script, simply pass it as arguments to the `docker run` command. For example:
@ -222,7 +222,7 @@ When building your own Docker image, if image size is of importance to you, you
This will clean up intermediate steps in the Docker build process, resulting in a significantly smaller image (about half the size).
Experimental flags need to be enabled in Docker to use the ```--squash``` flag. To enable this, insert the following into the file ```/etc/docker/daemon.json```:
{
"experimental": true
}
@ -244,7 +244,7 @@ Coming soon...
## Documentation:
For documentation, everything is being moved to [http://docs.opendronemap.org/](http://docs.opendronemap.org/) but you can also take a look at our [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki). Check those places first if you are having problems. There's also help at [community forum](http://community.opendronemap.org/), and if you still need help and think you've found a bug or need an enhancement, look through the issue queue or create one.
For documentation, everything is being moved to [http://docs.opendronemap.org/](http://docs.opendronemap.org/) but you can also take a look at our [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki). Check those places first if you are having problems. There's also help at [community forum](http://community.opendronemap.org/), and if you still need help and think you've found a bug or need an enhancement, look through the issue queue or create one.
## Developers

Wyświetl plik

@ -95,17 +95,6 @@ option(ODM_BUILD_Ceres "Force to build Ceres library" OFF)
SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres})
# ---------------------------------------------------------------------------------------------
# VTK7
# We need to build VTK from sources because Debian packages
# are built with DVTK_SMP_IMPLEMENTATION_TYPE set to
# "Sequential" which means no multithread support.
set(ODM_VTK7_Version 7.1.1)
option(ODM_BUILD_VTK7 "Force to build VTK7 library" OFF)
SETUP_EXTERNAL_PROJECT(VTK7 ${ODM_VTK7_Version} ${ODM_BUILD_VTK7})
# ---------------------------------------------------------------------------------------------
# Hexer
#
@ -114,14 +103,12 @@ SETUP_EXTERNAL_PROJECT(Hexer 1.4 ON)
# ---------------------------------------------------------------------------------------------
# Open Geometric Vision (OpenGV)
# Open Structure from Motion (OpenSfM)
# Clustering Views for Multi-view Stereo (CMVS)
# Catkin
# Ecto
#
set(custom_libs OpenGV
OpenSfM
CMVS
Catkin
Ecto
LASzip
@ -141,3 +128,38 @@ foreach(lib ${custom_libs})
SETUP_EXTERNAL_PROJECT_CUSTOM(${lib})
endforeach()
## Add smvs Build
externalproject_add(mve
GIT_REPOSITORY https://github.com/simonfuhrmann/mve.git
GIT_TAG 2106a5b0aef61a7f744551510b1b4c5d8e3be594
UPDATE_COMMAND ""
SOURCE_DIR ${SB_SOURCE_DIR}/elibs/mve
CONFIGURE_COMMAND ""
BUILD_IN_SOURCE 1
BUILD_COMMAND make
INSTALL_COMMAND ""
)
externalproject_add(smvs
DEPENDS mve
GIT_REPOSITORY https://github.com/flanggut/smvs.git
GIT_TAG 6a7d0c095aa66ab98c5b285c2bc04e34d8993353
UPDATE_COMMAND ""
SOURCE_DIR ${SB_SOURCE_DIR}/elibs/smvs
CONFIGURE_COMMAND ""
BUILD_IN_SOURCE 1
BUILD_COMMAND make
INSTALL_COMMAND ""
)
externalproject_add(poissonrecon
GIT_REPOSITORY https://github.com/mkazhdan/PoissonRecon.git
GIT_TAG ce5005ae3094d902d551a65a8b3131e06f45e7cf
SOURCE_DIR ${SB_SOURCE_DIR}/PoissonRecon
UPDATE_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_IN_SOURCE 1
BUILD_COMMAND make poissonrecon
INSTALL_COMMAND ""
)

Wyświetl plik

@ -1,28 +0,0 @@
set(_proj_name cmvs)
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
ExternalProject_Add(${_proj_name}
PREFIX ${_SB_BINARY_DIR}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL https://github.com/edgarriba/CMVS-PMVS/archive/master.zip
URL_MD5 dbb1493f49ca099b4208381bd20d1435
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CONFIGURE_COMMAND cmake <SOURCE_DIR>/program
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${SB_INSTALL_DIR}/bin
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
#--Build step-----------------
BINARY_DIR ${_SB_BINARY_DIR}
#--Install step---------------
INSTALL_DIR ${SB_INSTALL_DIR}
#--Output logging-------------
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)

Wyświetl plik

@ -8,8 +8,7 @@ ExternalProject_Add(${_proj_name}
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
URL https://github.com/mapillary/OpenSfM/archive/93ae099862297c36ae94dd56fca1c062aa2bb60d.zip
URL_MD5 f0d8ec8a8dc9e0f6fd55f77d5407e50f
URL https://github.com/OpenDroneMap/OpenSfM/archive/9b4b17f238a3762c4267cdaeb5f64173c0f704a6.zip
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------

Wyświetl plik

@ -1,29 +0,0 @@
set(_proj_name vtk7)
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
ExternalProject_Add(${_proj_name}
PREFIX ${_SB_BINARY_DIR}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL https://github.com/Kitware/VTK/archive/v7.1.1.zip
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
-DVTK_SMP_IMPLEMENTATION_TYPE=TBB
-DCMAKE_BUILD_TYPE=Release
-DVTK_Group_Rendering=OFF
-DBUILD_TESTING=OFF
#--Build step-----------------
BINARY_DIR ${_SB_BINARY_DIR}
#--Install step---------------
INSTALL_DIR ${SB_INSTALL_DIR}
#--Output logging-------------
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)

Wyświetl plik

@ -1 +1 @@
0.3.1
0.4.0

Wyświetl plik

@ -77,7 +77,8 @@ install() {
gpxpy \
xmltodict \
appsettings \
loky
loky \
repoze.lru
echo "Installing Ecto Dependencies"
pip install -U catkin-pkg
@ -99,7 +100,8 @@ install() {
echo "Installing split-merge Dependencies"
pip install -U scipy shapely numpy pyproj
pip install -U https://github.com/gipit/gippy/archive/v1.0.0.zip psutil
pip install -U https://github.com/gipit/gippy/archive/1.0.0.zip psutil
echo "Compiling SuperBuild"
cd ${RUNPATH}/SuperBuild

Wyświetl plik

@ -19,7 +19,7 @@ libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/v1.0.0.zip loky shapely numpy pyproj psutil && pip install -U scipy --ignore-installed
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/1.0.0.zip loky shapely numpy pyproj psutil repoze.lru && pip install -U scipy --ignore-installed
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"
@ -61,7 +61,7 @@ RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Clean Superbuild
RUN rm -rf /code/SuperBuild/download /code/SuperBuild/src/vtk7 /code/SuperBuild/src/opencv /code/SuperBuild/src/pcl /code/SuperBuild/src/pdal /code/SuperBuild/src/opengv /code/SuperBuild/src/mvstexturing /code/SuperBuild/src/ceres /code/SuperBuild/build/vtk7 /code/SuperBuild/build/opencv
RUN rm -rf /code/SuperBuild/download /code/SuperBuild/src/opencv /code/SuperBuild/src/pcl /code/SuperBuild/src/pdal /code/SuperBuild/src/opengv /code/SuperBuild/src/mvstexturing /code/SuperBuild/src/ceres /code/SuperBuild/build/opencv
# Entry point
ENTRYPOINT ["python", "/code/run.py", "code"]

Wyświetl plik

@ -10,8 +10,6 @@ Licensing for portions of OpenDroneMap are as follows:
* libcv - BSD - http://opencv.org/license.html
* libcvaux - BSD - http://opencv.org/license.html
* bundler - GPLv3 - http://www.gnu.org/copyleft/gpl.html
* cmvs - GPLv3 - http://www.gnu.org/copyleft/gpl.html
* pmvs2 - GPLv3 - http://www.gnu.org/copyleft/gpl.html
* parallel - GPLv3 - http://www.gnu.org/copyleft/gpl.html
* PoissonRecon - BSD - http://www.cs.jhu.edu/~misha/Code/PoissonRecon/license.txt
* vlfeat - BSD - http://www.vlfeat.org/license.html

Wyświetl plik

@ -6,9 +6,9 @@ endif()
# Add ODM sub-modules
add_subdirectory(odm_extract_utm)
add_subdirectory(odm_georef)
add_subdirectory(odm_meshing)
add_subdirectory(odm_orthophoto)
add_subdirectory(odm_25dmeshing)
add_subdirectory(odm_cleanmesh)
add_subdirectory(odm_dem2points)
if (ODM_BUILD_SLAM)
add_subdirectory(odm_slam)
endif ()

Wyświetl plik

@ -1,31 +0,0 @@
#include "Logger.hpp"
Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout)
{
}
Logger::~Logger()
{
}
void Logger::printToFile(std::string filePath)
{
std::ofstream file(filePath.c_str(), std::ios::binary);
file << logStream_.str();
file.close();
}
bool Logger::isPrintingInCout() const
{
return isPrintingInCout_;
}
void Logger::setIsPrintingInCout(bool isPrintingInCout)
{
isPrintingInCout_ = isPrintingInCout;
}

Wyświetl plik

@ -1,67 +0,0 @@
#pragma once
#include <string>
#include <sstream>
#include <fstream>
#include <iostream>
/*!
* \brief The Logger class is used to store program messages in a log file.
* \details By using the << operator while printInCout is set, the class writes both to
* cout and to file, if the flag is not set, output is written to file only.
*/
class Logger
{
public:
/*!
* \brief Logger Contains functionality for printing and displaying log information.
* \param printInCout Flag toggling if operator << also writes to cout.
*/
Logger(bool isPrintingInCout = true);
/*!
* \brief Destructor.
*/
~Logger();
/*!
* \brief print Prints the contents of the log to file.
* \param filePath Path specifying where to write the log.
*/
void printToFile(std::string filePath);
/*!
* \brief isPrintingInCout Check if console printing flag is set.
* \return Console printing flag.
*/
bool isPrintingInCout() const;
/*!
* \brief setIsPrintingInCout Set console printing flag.
* \param isPrintingInCout Value, if true, messages added to the log are also printed in cout.
*/
void setIsPrintingInCout(bool isPrintingInCout);
/*!
* Operator for printing messages to log and in the standard output stream if desired.
*/
template<class T>
friend Logger& operator<< (Logger &log, T t)
{
// If console printing is enabled.
if (log.isPrintingInCout_)
{
std::cout << t;
std::cout.flush();
}
// Write to log.
log.logStream_ << t;
return log;
}
private:
bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */
std::stringstream logStream_; /*!< Stream for storing the log. */
};

Wyświetl plik

@ -1,450 +0,0 @@
#include "Odm25dMeshing.hpp"
int Odm25dMeshing::run(int argc, char **argv) {
log << logFilePath << "\n";
// If no arguments were passed, print help and return early.
if (argc <= 1) {
printHelp();
return EXIT_SUCCESS;
}
try {
parseArguments(argc, argv);
loadPointCloud();
buildMesh();
} catch (const Odm25dMeshingException& e) {
log.setIsPrintingInCout(true);
log << e.what() << "\n";
log.printToFile(logFilePath);
log << "For more detailed information, see log file." << "\n";
return EXIT_FAILURE;
} catch (const std::exception& e) {
log.setIsPrintingInCout(true);
log << "Error in OdmMeshing:\n";
log << e.what() << "\n";
log.printToFile(logFilePath);
log << "For more detailed information, see log file." << "\n";
return EXIT_FAILURE;
}
log.printToFile(logFilePath);
return EXIT_SUCCESS;
}
void Odm25dMeshing::loadPointCloud() {
log << "Loading point cloud... ";
try{
std::ifstream ss(inputFile, std::ios::binary);
if (ss.fail()) throw Odm25dMeshingException("Failed to open " + inputFile);
PlyFile file;
file.parse_header(ss);
std::shared_ptr<PlyData> vertices = file.request_properties_from_element("vertex", { "x", "y", "z" });
file.read(ss);
const size_t numVerticesBytes = vertices->buffer.size_bytes();
struct float3 { float x, y, z; };
struct double3 { double x, y, z; };
if (vertices->t == tinyply::Type::FLOAT32) {
std::vector<float3> verts(vertices->count);
std::memcpy(verts.data(), vertices->buffer.get(), numVerticesBytes);
for (float3 &v : verts){
points->InsertNextPoint(v.x, v.y, v.z);
}
}else if (vertices->t == tinyply::Type::FLOAT64) {
std::vector<double3> verts(vertices->count);
std::memcpy(verts.data(), vertices->buffer.get(), numVerticesBytes);
for (double3 &v : verts){
points->InsertNextPoint(v.x, v.y, v.z);
}
}else{
throw Odm25dMeshingException("Invalid data type (only float32 and float64 are supported): " + std::to_string((int)vertices->t));
}
}
catch (const std::exception & e)
{
throw Odm25dMeshingException("Error while loading point cloud: " + std::string(e.what()));
}
log << "loaded " << points->GetNumberOfPoints() << " points\n";
}
void Odm25dMeshing::buildMesh(){
vtkThreadedImageAlgorithm::SetGlobalDefaultEnableSMP(true);
log << "Remove outliers... ";
vtkSmartPointer<vtkPolyData> polyPoints =
vtkSmartPointer<vtkPolyData>::New();
polyPoints->SetPoints(points);
vtkSmartPointer<vtkOctreePointLocator> locator = vtkSmartPointer<vtkOctreePointLocator>::New();
vtkSmartPointer<vtkRadiusOutlierRemoval> radiusRemoval =
vtkSmartPointer<vtkRadiusOutlierRemoval>::New();
radiusRemoval->SetInputData(polyPoints);
radiusRemoval->SetLocator(locator);
radiusRemoval->SetRadius(20); // 20 meters
radiusRemoval->SetNumberOfNeighbors(2);
vtkSmartPointer<vtkStatisticalOutlierRemoval> statsRemoval =
vtkSmartPointer<vtkStatisticalOutlierRemoval>::New();
statsRemoval->SetInputConnection(radiusRemoval->GetOutputPort());
statsRemoval->SetLocator(locator);
statsRemoval->SetSampleSize(neighbors);
statsRemoval->SetStandardDeviationFactor(1.5);
statsRemoval->GenerateOutliersOff();
statsRemoval->Update();
log << (radiusRemoval->GetNumberOfPointsRemoved() + statsRemoval->GetNumberOfPointsRemoved()) << " points removed\n";
vtkSmartPointer<vtkPoints> cleanedPoints = statsRemoval->GetOutput()->GetPoints();
statsRemoval = nullptr;
radiusRemoval = nullptr;
polyPoints = nullptr;
log << "Squash point cloud to plane... ";
vtkSmartPointer<vtkFloatArray> elevation = vtkSmartPointer<vtkFloatArray>::New();
elevation->SetName("elevation");
elevation->SetNumberOfComponents(1);
double p[2];
for (vtkIdType i = 0; i < cleanedPoints->GetNumberOfPoints(); i++){
cleanedPoints->GetPoint(i, p);
elevation->InsertNextValue(p[2]);
p[2] = 0.0;
cleanedPoints->SetPoint(i, p);
}
log << "OK\n";
vtkSmartPointer<vtkPolyData> polydataToProcess =
vtkSmartPointer<vtkPolyData>::New();
polydataToProcess->SetPoints(cleanedPoints);
polydataToProcess->GetPointData()->SetScalars(elevation);
const float NODATA = -9999;
double *bounds = polydataToProcess->GetBounds();
double centerX = polydataToProcess->GetCenter()[0];
double centerY = polydataToProcess->GetCenter()[1];
double centerZ = polydataToProcess->GetCenter()[2];
double extentX = bounds[1] - bounds[0];
double extentY = bounds[3] - bounds[2];
if (resolution == 0.0){
resolution = (double)maxVertexCount / (sqrt(extentX * extentY) * 75.0);
log << "Automatically set resolution to " << std::fixed << resolution << "\n";
}
int width = ceil(extentX * resolution);
int height = ceil(extentY * resolution);
log << "Plane extentX: " << extentX <<
", extentY: " << extentY << "\n";
double planeCenter[3];
planeCenter[0] = centerX;
planeCenter[1] = centerY;
planeCenter[2] = centerZ;
vtkSmartPointer<vtkPlaneSource> plane =
vtkSmartPointer<vtkPlaneSource>::New();
plane->SetResolution(width, height);
plane->SetOrigin(0.0, 0.0, 0.0);
plane->SetPoint1(extentX, 0.0, 0.0);
plane->SetPoint2(0.0, extentY, 0);
plane->SetCenter(planeCenter);
plane->SetNormal(0.0, 0.0, 1.0);
vtkSmartPointer<vtkShepardKernel> shepardKernel =
vtkSmartPointer<vtkShepardKernel>::New();
shepardKernel->SetPowerParameter(2.0);
shepardKernel->SetKernelFootprintToNClosest();
shepardKernel->SetNumberOfPoints(neighbors);
vtkSmartPointer<vtkImageData> image =
vtkSmartPointer<vtkImageData>::New();
image->SetDimensions(width, height, 1);
log << "DSM size is " << width << "x" << height << " (" << ceil(width * height * sizeof(float) * 1e-6) << " MB) \n";
image->AllocateScalars(VTK_FLOAT, 1);
log << "Point interpolation using shepard's kernel... ";
vtkSmartPointer<vtkPointInterpolator> interpolator =
vtkSmartPointer<vtkPointInterpolator>::New();
interpolator->SetInputConnection(plane->GetOutputPort());
interpolator->SetSourceData(polydataToProcess);
interpolator->SetKernel(shepardKernel);
interpolator->SetLocator(locator);
interpolator->SetNullValue(NODATA);
interpolator->Update();
vtkSmartPointer<vtkPolyData> interpolatedPoly =
interpolator->GetPolyDataOutput();
log << "OK\nTransfering interpolation results to DSM... ";
interpolator = nullptr;
polydataToProcess = nullptr;
elevation = nullptr;
cleanedPoints = nullptr;
plane = nullptr;
shepardKernel = nullptr;
locator = nullptr;
vtkSmartPointer<vtkFloatArray> interpolatedElevation =
vtkFloatArray::SafeDownCast(interpolatedPoly->GetPointData()->GetArray("elevation"));
for (int i = 0; i < width; i++){
for (int j = 0; j < height; j++){
float* pixel = static_cast<float*>(image->GetScalarPointer(i,j,0));
vtkIdType cellId = interpolatedPoly->GetCell(j * width + i)->GetPointId(0);
pixel[0] = interpolatedElevation->GetValue(cellId);
}
}
log << "OK\nMedian filter...";
vtkSmartPointer<vtkImageMedian3D> medianFilter =
vtkSmartPointer<vtkImageMedian3D>::New();
medianFilter->SetInputData(image);
medianFilter->SetKernelSize(
std::max(1.0, resolution),
std::max(1.0, resolution),
1);
medianFilter->Update();
log << "OK\n";
// double diffuseIterations = std::max(1.0, resolution / 2.0);
// vtkSmartPointer<vtkImageAnisotropicDiffusion2D> diffuse1 =
// vtkSmartPointer<vtkImageAnisotropicDiffusion2D>::New();
// diffuse1->SetInputConnection(medianFilter->GetOutputPort());
// diffuse1->FacesOn();
// diffuse1->EdgesOn();
// diffuse1->CornersOn();
// diffuse1->SetDiffusionFactor(1); // Full strength
// diffuse1->GradientMagnitudeThresholdOn();
// diffuse1->SetDiffusionThreshold(0.2); // Don't smooth jumps in elevation > than 0.20m
// diffuse1->SetNumberOfIterations(diffuseIterations);
// diffuse1->Update();
if (outputDsmFile != ""){
log << "Saving DSM to file... ";
vtkSmartPointer<vtkTIFFWriter> tiffWriter =
vtkSmartPointer<vtkTIFFWriter>::New();
tiffWriter->SetFileName(outputDsmFile.c_str());
tiffWriter->SetInputData(medianFilter->GetOutput());
tiffWriter->Write();
log << "OK\n";
}
log << "Triangulate... ";
vtkSmartPointer<vtkGreedyTerrainDecimation> terrain =
vtkSmartPointer<vtkGreedyTerrainDecimation>::New();
terrain->SetErrorMeasureToNumberOfTriangles();
terrain->SetNumberOfTriangles(maxVertexCount * 2); // Approximate
terrain->SetInputData(medianFilter->GetOutput());
terrain->BoundaryVertexDeletionOn();
log << "OK\nTransform... ";
vtkSmartPointer<vtkTransform> transform =
vtkSmartPointer<vtkTransform>::New();
transform->Translate(-extentX / 2.0 + centerX,
-extentY / 2.0 + centerY, 0);
transform->Scale(extentX / width, extentY / height, 1);
vtkSmartPointer<vtkTransformFilter> transformFilter =
vtkSmartPointer<vtkTransformFilter>::New();
transformFilter->SetInputConnection(terrain->GetOutputPort());
transformFilter->SetTransform(transform);
log << "OK\n";
log << "Saving mesh to file... ";
vtkSmartPointer<vtkPLYWriter> plyWriter =
vtkSmartPointer<vtkPLYWriter>::New();
plyWriter->SetFileName(outputFile.c_str());
plyWriter->SetInputConnection(transformFilter->GetOutputPort());
plyWriter->SetFileTypeToASCII();
plyWriter->Write();
log << "OK\n";
#ifdef SUPPORTDEBUGWINDOW
if (showDebugWindow){
vtkSmartPointer<vtkPolyDataMapper> mapper =
vtkSmartPointer<vtkPolyDataMapper>::New();
mapper->SetInputConnection(transformFilter->GetOutputPort());
mapper->SetScalarRange(150, 170);
// vtkSmartPointer<vtkDataSetMapper> mapper =
// vtkSmartPointer<vtkDataSetMapper>::New();
// mapper->SetInputData(image);
// mapper->SetScalarRange(150, 170);
vtkSmartPointer<vtkActor> actor =
vtkSmartPointer<vtkActor>::New();
actor->SetMapper(mapper);
actor->GetProperty()->SetPointSize(5);
vtkSmartPointer<vtkRenderer> renderer =
vtkSmartPointer<vtkRenderer>::New();
vtkSmartPointer<vtkRenderWindow> renderWindow =
vtkSmartPointer<vtkRenderWindow>::New();
renderWindow->AddRenderer(renderer);
vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor =
vtkSmartPointer<vtkRenderWindowInteractor>::New();
renderWindowInteractor->SetRenderWindow(renderWindow);
renderer->AddActor(actor);
renderer->SetBackground(0.1804,0.5451,0.3412); // Sea green
renderWindow->Render();
renderWindowInteractor->Start();
}
#endif
}
void Odm25dMeshing::parseArguments(int argc, char **argv) {
for (int argIndex = 1; argIndex < argc; ++argIndex) {
// The argument to be parsed.
std::string argument = std::string(argv[argIndex]);
if (argument == "-help") {
printHelp();
exit(0);
} else if (argument == "-verbose") {
log.setIsPrintingInCout(true);
} else if (argument == "-maxVertexCount" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
std::stringstream ss(argv[argIndex]);
ss >> maxVertexCount;
if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
maxVertexCount = std::max<unsigned int>(maxVertexCount, 0);
log << "Vertex count was manually set to: " << maxVertexCount << "\n";
} else if (argument == "-resolution" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
std::stringstream ss(argv[argIndex]);
ss >> resolution;
if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
resolution = std::min<double>(100000, std::max<double>(resolution, 0));
log << "Resolution was manually set to: " << resolution << "\n";
} else if (argument == "-neighbors" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
std::stringstream ss(argv[argIndex]);
ss >> neighbors;
if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
neighbors = std::min<unsigned int>(1000, std::max<unsigned int>(neighbors, 1));
log << "Neighbors was manually set to: " << neighbors << "\n";
} else if (argument == "-inputFile" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) {
throw Odm25dMeshingException(
"Argument '" + argument
+ "' expects 1 more input following it, but no more inputs were provided.");
}
inputFile = std::string(argv[argIndex]);
std::ifstream testFile(inputFile.c_str(), std::ios::binary);
if (!testFile.is_open()) {
throw Odm25dMeshingException(
"Argument '" + argument + "' has a bad value. (file not accessible)");
}
testFile.close();
log << "Reading point cloud at: " << inputFile << "\n";
} else if (argument == "-outputFile" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) {
throw Odm25dMeshingException(
"Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
outputFile = std::string(argv[argIndex]);
std::ofstream testFile(outputFile.c_str());
if (!testFile.is_open()) {
throw Odm25dMeshingException(
"Argument '" + argument + "' has a bad value.");
}
testFile.close();
log << "Writing output to: " << outputFile << "\n";
}else if (argument == "-outputDsmFile" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) {
throw Odm25dMeshingException(
"Argument '" + argument
+ "' expects 1 more input following it, but no more inputs were provided.");
}
outputDsmFile = std::string(argv[argIndex]);
std::ofstream testFile(outputDsmFile.c_str());
if (!testFile.is_open()) {
throw Odm25dMeshingException(
"Argument '" + argument + "' has a bad value. (file not accessible)");
}
testFile.close();
log << "Saving DSM output to: " << outputDsmFile << "\n";
} else if (argument == "-showDebugWindow") {
showDebugWindow = true;
} else if (argument == "-logFile" && argIndex < argc) {
++argIndex;
if (argIndex >= argc) {
throw Odm25dMeshingException(
"Argument '" + argument
+ "' expects 1 more input following it, but no more inputs were provided.");
}
logFilePath = std::string(argv[argIndex]);
std::ofstream testFile(outputFile.c_str());
if (!testFile.is_open()) {
throw Odm25dMeshingException(
"Argument '" + argument + "' has a bad value.");
}
testFile.close();
log << "Writing log information to: " << logFilePath << "\n";
} else {
printHelp();
throw Odm25dMeshingException("Unrecognised argument '" + argument + "'");
}
}
}
void Odm25dMeshing::printHelp() {
bool printInCoutPop = log.isPrintingInCout();
log.setIsPrintingInCout(true);
log << "Usage: odm_25dmeshing -inputFile [plyFile] [optional-parameters]\n";
log << "Create a 2.5D mesh from a point cloud. "
<< "The program requires a path to an input PLY point cloud file, all other input parameters are optional.\n\n";
log << " -inputFile <path> to PLY point cloud\n"
<< " -outputFile <path> where the output PLY 2.5D mesh should be saved (default: " << outputFile << ")\n"
<< " -outputDsmFile <path> Optionally output the Digital Surface Model (DSM) computed for generating the mesh. (default: " << outputDsmFile << ")\n"
<< " -logFile <path> log file path (default: " << logFilePath << ")\n"
<< " -verbose whether to print verbose output (default: " << (printInCoutPop ? "true" : "false") << ")\n"
<< " -maxVertexCount <0 - N> Maximum number of vertices in the output mesh. The mesh might have fewer vertices, but will not exceed this limit. (default: " << maxVertexCount << ")\n"
<< " -neighbors <1 - 1000> Number of nearest neighbors to consider when doing shepard's interpolation and outlier removal. Higher values lead to smoother meshes but take longer to process. (default: " << neighbors << ")\n"
<< " -resolution <0 - N> Size of the interpolated digital surface model (DSM) used for deriving the 2.5D mesh, expressed in pixels per meter unit. When set to zero, the program automatically attempts to find a good value based on the point cloud extent and target vertex count. (default: " << resolution << ")\n"
<< "\n";
log.setIsPrintingInCout(printInCoutPop);
}

Wyświetl plik

@ -1,111 +0,0 @@
#pragma once
//#define SUPPORTDEBUGWINDOW 1
#ifdef SUPPORTDEBUGWINDOW
#include <vtkActor.h>
#include <vtkCamera.h>
#include <vtkPolyDataMapper.h>
#include <vtkProperty.h>
#include <vtkRenderer.h>
#include <vtkRenderWindow.h>
#include <vtkRenderWindowInteractor.h>
#include <vtkDataSetMapper.h>
#endif
#include <vtkShepardKernel.h>
#include <vtkPointData.h>
#include <vtkImageData.h>
#include <vtkGreedyTerrainDecimation.h>
#include <vtkPLYWriter.h>
#include <vtkSmartPointer.h>
#include <vtkFloatArray.h>
#include <vtkOctreePointLocator.h>
#include <vtkPointInterpolator.h>
#include <vtkPlaneSource.h>
#include <vtkTransform.h>
#include <vtkTransformFilter.h>
#include <vtkImageAnisotropicDiffusion2D.h>
#include <vtkTIFFWriter.h>
#include <vtkStatisticalOutlierRemoval.h>
#include <vtkImageMedian3D.h>
#include <vtkRadiusOutlierRemoval.h>
// For compatibility with new VTK generic data arrays
#ifdef vtkGenericDataArray_h
#define InsertNextTupleValue InsertNextTypedTuple
#endif
#include <cstring>
#include "tinyply.h"
using namespace tinyply;
#include "Logger.hpp"
class Odm25dMeshing {
public:
Odm25dMeshing() :
log(false) {};
~Odm25dMeshing() {};
/*!
* \brief run Runs the meshing functionality using the provided input arguments.
* For a list of accepted arguments, please see the main page documentation or
* call the program with parameter "-help".
* \param argc Application argument count.
* \param argv Argument values.
* \return 0 If successful.
*/
int run(int argc, char **argv);
private:
/*!
* \brief parseArguments Parses command line arguments.
* \param argc Application argument count.
* \param argv Argument values.
*/
void parseArguments(int argc, char** argv);
/*!
* \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help".
*/
void printHelp();
void loadPointCloud();
void buildMesh();
Logger log;
std::string inputFile = "";
std::string outputFile = "odm_25dmesh.ply";
std::string logFilePath = "odm_25dmeshing_log.txt";
int maxVertexCount = 100000;
double resolution = 0;
unsigned int neighbors = 24;
std::string outputDsmFile = "";
bool showDebugWindow = false;
vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
};
class Odm25dMeshingException: public std::exception {
public:
Odm25dMeshingException() :
message("Error in Odm25dMeshing") {
}
Odm25dMeshingException(std::string msgInit) :
message("Error in Odm25dMeshing:\n" + msgInit) {
}
~Odm25dMeshingException() throw () {
}
virtual const char* what() const throw () {
return message.c_str();
}
private:
std::string message; /**< The error message **/
};

Wyświetl plik

@ -1,31 +0,0 @@
/*
OpenDroneMap - https://www.opendronemap.org
Copyright (C) 2017 OpenDroneMap Contributors
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "Odm25dMeshing.hpp"
/*!
* \mainpage main OpenDroneMap 2.5D Meshing Module
*
* The OpenDroneMap 2.5D Meshing Module generates a 2.5D mesh using a constrained
* delaunay triangulation from any point cloud (points with corresponding normals).
*/
int main(int argc, char** argv)
{
Odm25dMeshing om;
return om.run(argc, argv);
}

Wyświetl plik

@ -1,621 +0,0 @@
// This software is in the public domain. Where that dedication is not
// recognized, you are granted a perpetual, irrevocable license to copy,
// distribute, and modify this file as you see fit.
// Authored in 2015 by Dimitri Diakopoulos (http://www.dimitridiakopoulos.com)
// https://github.com/ddiakopoulos/tinyply
// Version 2.0
#include "tinyply.h"
#include <algorithm>
#include <functional>
#include <type_traits>
#include <iostream>
#include <cstring>
using namespace tinyply;
using namespace std;
//////////////////
// Endian Utils //
//////////////////
template<typename T> T endian_swap(const T & v) { return v; }
template<> inline uint16_t endian_swap(const uint16_t & v) { return (v << 8) | (v >> 8); }
template<> inline uint32_t endian_swap(const uint32_t & v) { return (v << 24) | ((v << 8) & 0x00ff0000) | ((v >> 8) & 0x0000ff00) | (v >> 24); }
template<> inline uint64_t endian_swap(const uint64_t & v)
{
return (((v & 0x00000000000000ffLL) << 56) |
((v & 0x000000000000ff00LL) << 40) |
((v & 0x0000000000ff0000LL) << 24) |
((v & 0x00000000ff000000LL) << 8) |
((v & 0x000000ff00000000LL) >> 8) |
((v & 0x0000ff0000000000LL) >> 24) |
((v & 0x00ff000000000000LL) >> 40) |
((v & 0xff00000000000000LL) >> 56));
}
template<> inline int16_t endian_swap(const int16_t & v) { uint16_t r = endian_swap(*(uint16_t*)&v); return *(int16_t*)&r; }
template<> inline int32_t endian_swap(const int32_t & v) { uint32_t r = endian_swap(*(uint32_t*)&v); return *(int32_t*)&r; }
template<> inline int64_t endian_swap(const int64_t & v) { uint64_t r = endian_swap(*(uint64_t*)&v); return *(int64_t*)&r; }
inline float endian_swap_float(const uint32_t & v) { union { float f; uint32_t i; }; i = endian_swap(v); return f; }
inline double endian_swap_double(const uint64_t & v) { union { double d; uint64_t i; }; i = endian_swap(v); return d; }
/////////////////////////////
// Internal Implementation //
/////////////////////////////
inline Type property_type_from_string(const std::string & t)
{
if (t == "int8" || t == "char") return Type::INT8;
else if (t == "uint8" || t == "uchar") return Type::UINT8;
else if (t == "int16" || t == "short") return Type::INT16;
else if (t == "uint16" || t == "ushort") return Type::UINT16;
else if (t == "int32" || t == "int") return Type::INT32;
else if (t == "uint32" || t == "uint") return Type::UINT32;
else if (t == "float32" || t == "float") return Type::FLOAT32;
else if (t == "float64" || t == "double") return Type::FLOAT64;
return Type::INVALID;
}
struct PlyFile::PlyFileImpl
{
struct PlyCursor
{
size_t byteOffset;
size_t totalSizeBytes;
};
struct ParsingHelper
{
std::shared_ptr<PlyData> data;
std::shared_ptr<PlyCursor> cursor;
};
std::map<std::string, ParsingHelper> userData;
bool isBinary = false;
bool isBigEndian = false;
std::vector<PlyElement> elements;
std::vector<std::string> comments;
std::vector<std::string> objInfo;
void read(std::istream & is);
void write(std::ostream & os, bool isBinary);
std::shared_ptr<PlyData> request_properties_from_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys);
void add_properties_to_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys, const Type type, const size_t count, uint8_t * data, const Type listType, const size_t listCount);
size_t read_property_binary(const Type t, void * dest, size_t & destOffset, std::istream & is);
size_t read_property_ascii(const Type t, void * dest, size_t & destOffset, std::istream & is);
size_t skip_property_binary(const PlyProperty & property, std::istream & is);
size_t skip_property_ascii(const PlyProperty & property, std::istream & is);
bool parse_header(std::istream & is);
void parse_data(std::istream & is, bool firstPass);
void read_header_format(std::istream & is);
void read_header_element(std::istream & is);
void read_header_property(std::istream & is);
void read_header_text(std::string line, std::vector<std::string> & place, int erase = 0);
void write_header(std::ostream & os);
void write_ascii_internal(std::ostream & os);
void write_binary_internal(std::ostream & os);
void write_property_ascii(Type t, std::ostream & os, uint8_t * src, size_t & srcOffset);
void write_property_binary(Type t, std::ostream & os, uint8_t * src, size_t & srcOffset);
};
//////////////////
// PLY Property //
//////////////////
PlyProperty::PlyProperty(std::istream & is) : isList(false)
{
std::string type;
is >> type;
if (type == "list")
{
std::string countType;
is >> countType >> type;
listType = property_type_from_string(countType);
isList = true;
}
propertyType = property_type_from_string(type);
is >> name;
}
/////////////////
// PLY Element //
/////////////////
PlyElement::PlyElement(std::istream & is)
{
is >> name >> size;
}
///////////
// Utils //
///////////
std::string make_key(const std::string & a, const std::string & b)
{
return (a + "-" + b);
}
template<typename T> void ply_cast(void * dest, const char * src, bool be)
{
*(static_cast<T *>(dest)) = (be) ? endian_swap(*(reinterpret_cast<const T *>(src))) : *(reinterpret_cast<const T *>(src));
}
template<typename T> void ply_cast_float(void * dest, const char * src, bool be)
{
*(static_cast<T *>(dest)) = (be) ? endian_swap_float(*(reinterpret_cast<const uint32_t *>(src))) : *(reinterpret_cast<const T *>(src));
}
template<typename T> void ply_cast_double(void * dest, const char * src, bool be)
{
*(static_cast<T *>(dest)) = (be) ? endian_swap_double(*(reinterpret_cast<const uint64_t *>(src))) : *(reinterpret_cast<const T *>(src));
}
template<typename T> T ply_read_ascii(std::istream & is)
{
T data;
is >> data;
return data;
}
template<typename T> void ply_cast_ascii(void * dest, std::istream & is)
{
*(static_cast<T *>(dest)) = ply_read_ascii<T>(is);
}
size_t find_element(const std::string & key, const std::vector<PlyElement> & list)
{
for (size_t i = 0; i < list.size(); i++) if (list[i].name == key) return i;
return -1;
}
size_t find_property(const std::string & key, const std::vector<PlyProperty> & list)
{
for (size_t i = 0; i < list.size(); ++i) if (list[i].name == key) return i;
return -1;
}
//////////////
// PLY File //
//////////////
bool PlyFile::PlyFileImpl::parse_header(std::istream & is)
{
std::string line;
while (std::getline(is, line))
{
std::istringstream ls(line);
std::string token;
ls >> token;
if (token == "ply" || token == "PLY" || token == "") continue;
else if (token == "comment") read_header_text(line, comments, 8);
else if (token == "format") read_header_format(ls);
else if (token == "element") read_header_element(ls);
else if (token == "property") read_header_property(ls);
else if (token == "obj_info") read_header_text(line, objInfo, 9);
else if (token == "end_header") break;
else return false;
}
return true;
}
void PlyFile::PlyFileImpl::read_header_text(std::string line, std::vector<std::string>& place, int erase)
{
place.push_back((erase > 0) ? line.erase(0, erase) : line);
}
void PlyFile::PlyFileImpl::read_header_format(std::istream & is)
{
std::string s;
(is >> s);
if (s == "binary_little_endian") isBinary = true;
else if (s == "binary_big_endian") isBinary = isBigEndian = true;
}
void PlyFile::PlyFileImpl::read_header_element(std::istream & is)
{
elements.emplace_back(is);
}
void PlyFile::PlyFileImpl::read_header_property(std::istream & is)
{
elements.back().properties.emplace_back(is);
}
size_t PlyFile::PlyFileImpl::skip_property_binary(const PlyProperty & p, std::istream & is)
{
static std::vector<char> skip(PropertyTable[p.propertyType].stride);
if (p.isList)
{
size_t listSize = 0;
size_t dummyCount = 0;
read_property_binary(p.listType, &listSize, dummyCount, is);
for (size_t i = 0; i < listSize; ++i) is.read(skip.data(), PropertyTable[p.propertyType].stride);
return listSize * PropertyTable[p.propertyType].stride; // in bytes
}
else
{
is.read(skip.data(), PropertyTable[p.propertyType].stride);
return PropertyTable[p.propertyType].stride;
}
}
size_t PlyFile::PlyFileImpl::skip_property_ascii(const PlyProperty & p, std::istream & is)
{
std::string skip;
if (p.isList)
{
size_t listSize = 0;
size_t dummyCount = 0;
read_property_ascii(p.listType, &listSize, dummyCount, is);
for (size_t i = 0; i < listSize; ++i) is >> skip;
return listSize * PropertyTable[p.propertyType].stride; // in bytes
}
else
{
is >> skip;
return PropertyTable[p.propertyType].stride;
}
}
size_t PlyFile::PlyFileImpl::read_property_binary(const Type t, void * dest, size_t & destOffset, std::istream & is)
{
destOffset += PropertyTable[t].stride;
std::vector<char> src(PropertyTable[t].stride);
is.read(src.data(), PropertyTable[t].stride);
switch (t)
{
case Type::INT8: ply_cast<int8_t>(dest, src.data(), isBigEndian); break;
case Type::UINT8: ply_cast<uint8_t>(dest, src.data(), isBigEndian); break;
case Type::INT16: ply_cast<int16_t>(dest, src.data(), isBigEndian); break;
case Type::UINT16: ply_cast<uint16_t>(dest, src.data(), isBigEndian); break;
case Type::INT32: ply_cast<int32_t>(dest, src.data(), isBigEndian); break;
case Type::UINT32: ply_cast<uint32_t>(dest, src.data(), isBigEndian); break;
case Type::FLOAT32: ply_cast_float<float>(dest, src.data(), isBigEndian); break;
case Type::FLOAT64: ply_cast_double<double>(dest, src.data(), isBigEndian); break;
case Type::INVALID: throw std::invalid_argument("invalid ply property");
}
return PropertyTable[t].stride;
}
size_t PlyFile::PlyFileImpl::read_property_ascii(const Type t, void * dest, size_t & destOffset, std::istream & is)
{
destOffset += PropertyTable[t].stride;
switch (t)
{
case Type::INT8: *((int8_t *)dest) = ply_read_ascii<int32_t>(is); break;
case Type::UINT8: *((uint8_t *)dest) = ply_read_ascii<uint32_t>(is); break;
case Type::INT16: ply_cast_ascii<int16_t>(dest, is); break;
case Type::UINT16: ply_cast_ascii<uint16_t>(dest, is); break;
case Type::INT32: ply_cast_ascii<int32_t>(dest, is); break;
case Type::UINT32: ply_cast_ascii<uint32_t>(dest, is); break;
case Type::FLOAT32: ply_cast_ascii<float>(dest, is); break;
case Type::FLOAT64: ply_cast_ascii<double>(dest, is); break;
case Type::INVALID: throw std::invalid_argument("invalid ply property");
}
return PropertyTable[t].stride;
}
void PlyFile::PlyFileImpl::write_property_ascii(Type t, std::ostream & os, uint8_t * src, size_t & srcOffset)
{
switch (t)
{
case Type::INT8: os << static_cast<int32_t>(*reinterpret_cast<int8_t*>(src)); break;
case Type::UINT8: os << static_cast<uint32_t>(*reinterpret_cast<uint8_t*>(src)); break;
case Type::INT16: os << *reinterpret_cast<int16_t*>(src); break;
case Type::UINT16: os << *reinterpret_cast<uint16_t*>(src); break;
case Type::INT32: os << *reinterpret_cast<int32_t*>(src); break;
case Type::UINT32: os << *reinterpret_cast<uint32_t*>(src); break;
case Type::FLOAT32: os << *reinterpret_cast<float*>(src); break;
case Type::FLOAT64: os << *reinterpret_cast<double*>(src); break;
case Type::INVALID: throw std::invalid_argument("invalid ply property");
}
os << " ";
srcOffset += PropertyTable[t].stride;
}
void PlyFile::PlyFileImpl::write_property_binary(Type t, std::ostream & os, uint8_t * src, size_t & srcOffset)
{
os.write((char *)src, PropertyTable[t].stride);
srcOffset += PropertyTable[t].stride;
}
void PlyFile::PlyFileImpl::read(std::istream & is)
{
// Parse but only get the data size
parse_data(is, true);
std::vector<std::shared_ptr<PlyData>> buffers;
for (auto & entry : userData) buffers.push_back(entry.second.data);
// Since group-requested properties share the same cursor, we need to find unique cursors so we only allocate once
std::sort(buffers.begin(), buffers.end());
buffers.erase(std::unique(buffers.begin(), buffers.end()), buffers.end());
// Not great, but since we sorted by ptrs on PlyData, need to remap back onto its cursor in the userData table
for (auto & b : buffers)
{
for (auto & entry : userData)
{
if (entry.second.data == b && b->buffer.get() == nullptr)
{
b->buffer = Buffer(entry.second.cursor->totalSizeBytes);
}
}
}
// Populate the data
parse_data(is, false);
}
void PlyFile::PlyFileImpl::write(std::ostream & os, bool _isBinary)
{
if (_isBinary) write_binary_internal(os);
else write_ascii_internal(os);
}
void PlyFile::PlyFileImpl::write_binary_internal(std::ostream & os)
{
isBinary = true;
write_header(os);
for (auto & e : elements)
{
for (size_t i = 0; i < e.size; ++i)
{
for (auto & p : e.properties)
{
auto & helper = userData[make_key(e.name, p.name)];
if (p.isList)
{
uint8_t listSize[4] = {0, 0, 0, 0};
std::memcpy(listSize, &p.listCount, sizeof(uint32_t));
size_t dummyCount = 0;
write_property_binary(p.listType, os, listSize, dummyCount);
for (int j = 0; j < p.listCount; ++j)
{
write_property_binary(p.propertyType, os, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset);
}
}
else
{
write_property_binary(p.propertyType, os, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset);
}
}
}
}
}
void PlyFile::PlyFileImpl::write_ascii_internal(std::ostream & os)
{
write_header(os);
for (auto & e : elements)
{
for (size_t i = 0; i < e.size; ++i)
{
for (auto & p : e.properties)
{
auto & helper = userData[make_key(e.name, p.name)];
if (p.isList)
{
os << p.listCount << " ";
for (int j = 0; j < p.listCount; ++j)
{
write_property_ascii(p.propertyType, os, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset);
}
}
else
{
write_property_ascii(p.propertyType, os, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset);
}
}
os << "\n";
}
}
}
void PlyFile::PlyFileImpl::write_header(std::ostream & os)
{
const std::locale & fixLoc = std::locale("C");
os.imbue(fixLoc);
os << "ply\n";
if (isBinary) os << ((isBigEndian) ? "format binary_big_endian 1.0" : "format binary_little_endian 1.0") << "\n";
else os << "format ascii 1.0\n";
for (const auto & comment : comments) os << "comment " << comment << "\n";
for (auto & e : elements)
{
os << "element " << e.name << " " << e.size << "\n";
for (const auto & p : e.properties)
{
if (p.isList)
{
os << "property list " << PropertyTable[p.listType].str << " "
<< PropertyTable[p.propertyType].str << " " << p.name << "\n";
}
else
{
os << "property " << PropertyTable[p.propertyType].str << " " << p.name << "\n";
}
}
}
os << "end_header\n";
}
// Returns the size (in bytes)
std::shared_ptr<PlyData> PlyFile::PlyFileImpl::request_properties_from_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys)
{
// All requested properties in the userDataTable share the same cursor (thrown into the same flat array)
ParsingHelper helper;
helper.data = std::make_shared<PlyData>();
helper.data->count = 0;
helper.data->t = Type::INVALID;
helper.cursor = std::make_shared<PlyCursor>();
helper.cursor->byteOffset = 0;
helper.cursor->totalSizeBytes = 0;
if (elements.size() == 0) throw std::runtime_error("parsed header had no elements defined. malformed file?");
if (!propertyKeys.size()) throw std::invalid_argument("`propertyKeys` argument is empty");
if (elementKey.size() == 0) throw std::invalid_argument("`elementKey` argument is empty");
const int elementIndex = find_element(elementKey, elements);
// Sanity check if the user requested element is in the pre-parsed header
if (elementIndex >= 0)
{
// We found the element
const PlyElement & element = elements[elementIndex];
helper.data->count = element.size;
// Find each of the keys
for (auto key : propertyKeys)
{
const int propertyIndex = find_property(key, element.properties);
if (propertyIndex >= 0)
{
// We found the property
const PlyProperty & property = element.properties[propertyIndex];
helper.data->t = property.propertyType; // hmm....
auto result = userData.insert(std::pair<std::string, ParsingHelper>(make_key(element.name, property.name), helper));
if (result.second == false) throw std::invalid_argument("element-property key has already been requested: " + make_key(element.name, property.name));
}
else throw std::invalid_argument("one of the property keys was not found in the header: " + key);
}
}
else throw std::invalid_argument("the element key was not found in the header: " + elementKey);
return helper.data;
}
void PlyFile::PlyFileImpl::add_properties_to_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys, const Type type, const size_t count, uint8_t * data, const Type listType, const size_t listCount)
{
ParsingHelper helper;
helper.data = std::make_shared<PlyData>();
helper.data->count = count;
helper.data->t = type;
helper.data->buffer = Buffer(data);
helper.cursor = std::make_shared<PlyCursor>();
helper.cursor->byteOffset = 0;
helper.cursor->totalSizeBytes = 0;
auto create_property_on_element = [&](PlyElement & e)
{
for (auto key : propertyKeys)
{
PlyProperty newProp = (listType == Type::INVALID) ? PlyProperty(type, key) : PlyProperty(listType, type, key, listCount);
/* auto result = */userData.insert(std::pair<std::string, ParsingHelper>(make_key(elementKey, key), helper));
e.properties.push_back(newProp);
}
};
int idx = find_element(elementKey, elements);
if (idx >= 0)
{
PlyElement & e = elements[idx];
create_property_on_element(e);
}
else
{
PlyElement newElement = (listType == Type::INVALID) ? PlyElement(elementKey, count / propertyKeys.size()) : PlyElement(elementKey, count / listCount);
create_property_on_element(newElement);
elements.push_back(newElement);
}
}
void PlyFile::PlyFileImpl::parse_data(std::istream & is, bool firstPass)
{
std::function<size_t(const Type t, void * dest, size_t & destOffset, std::istream & is)> read;
std::function<size_t(const PlyProperty & p, std::istream & is)> skip;
const auto start = is.tellg();
if (isBinary)
{
read = [&](const Type t, void * dest, size_t & destOffset, std::istream & _is) { return read_property_binary(t, dest, destOffset, _is); };
skip = [&](const PlyProperty & p, std::istream & _is) { return skip_property_binary(p, _is); };
}
else
{
read = [&](const Type t, void * dest, size_t & destOffset, std::istream & _is) { return read_property_ascii(t, dest, destOffset, _is); };
skip = [&](const PlyProperty & p, std::istream & _is) { return skip_property_ascii(p, _is); };
}
for (auto & element : elements)
{
for (size_t count = 0; count < element.size; ++count)
{
for (auto & property : element.properties)
{
auto cursorIt = userData.find(make_key(element.name, property.name));
if (cursorIt != userData.end())
{
auto & helper = cursorIt->second;
if (!firstPass)
{
if (property.isList)
{
size_t listSize = 0;
size_t dummyCount = 0;
read(property.listType, &listSize, dummyCount, is);
for (size_t i = 0; i < listSize; ++i)
{
read(property.propertyType, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset, is);
}
}
else
{
read(property.propertyType, (helper.data->buffer.get() + helper.cursor->byteOffset), helper.cursor->byteOffset, is);
}
}
else
{
helper.cursor->totalSizeBytes += skip(property, is);
}
}
else
{
skip(property, is);
}
}
}
}
// Reset istream reader to the beginning
if (firstPass) is.seekg(start, is.beg);
}
///////////////////////////////////
// Pass-Through Public Interface //
///////////////////////////////////
PlyFile::PlyFile() { impl.reset(new PlyFileImpl()); };
PlyFile::~PlyFile() { };
bool PlyFile::parse_header(std::istream & is) { return impl->parse_header(is); }
void PlyFile::read(std::istream & is) { return impl->read(is); }
void PlyFile::write(std::ostream & os, bool isBinary) { return impl->write(os, isBinary); }
std::vector<PlyElement> PlyFile::get_elements() const { return impl->elements; }
std::vector<std::string> & PlyFile::get_comments() { return impl->comments; }
std::vector<std::string> PlyFile::get_info() const { return impl->objInfo; }
std::shared_ptr<PlyData> PlyFile::request_properties_from_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys)
{
return impl->request_properties_from_element(elementKey, propertyKeys);
}
void PlyFile::add_properties_to_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys, const Type type, const size_t count, uint8_t * data, const Type listType, const size_t listCount)
{
return impl->add_properties_to_element(elementKey, propertyKeys, type, count, data, listType, listCount);
}

Wyświetl plik

@ -1,119 +0,0 @@
// This software is in the public domain. Where that dedication is not
// recognized, you are granted a perpetual, irrevocable license to copy,
// distribute, and modify this file as you see fit.
// Authored in 2015 by Dimitri Diakopoulos (http://www.dimitridiakopoulos.com)
// https://github.com/ddiakopoulos/tinyply
// Version 2.0
#ifndef tinyply_h
#define tinyply_h
#include <vector>
#include <string>
#include <stdint.h>
#include <sstream>
#include <memory>
#include <map>
namespace tinyply
{
enum class Type : uint8_t
{
INVALID,
INT8,
UINT8,
INT16,
UINT16,
INT32,
UINT32,
FLOAT32,
FLOAT64
};
struct PropertyInfo
{
int stride;
std::string str;
};
static std::map<Type, PropertyInfo> PropertyTable
{
{ Type::INT8,{ 1, "char" } },
{ Type::UINT8,{ 1, "uchar" } },
{ Type::INT16,{ 2, "short" } },
{ Type::UINT16,{ 2, "ushort" } },
{ Type::INT32,{ 4, "int" } },
{ Type::UINT32,{ 4, "uint" } },
{ Type::FLOAT32,{ 4, "float" } },
{ Type::FLOAT64,{ 8, "double" } },
{ Type::INVALID,{ 0, "INVALID" } }
};
class Buffer
{
uint8_t * alias{ nullptr };
struct delete_array { void operator()(uint8_t * p) { delete[] p; } };
std::unique_ptr<uint8_t, decltype(Buffer::delete_array())> data;
size_t size;
public:
Buffer() {};
Buffer(const size_t size) : data(new uint8_t[size], delete_array()), size(size) { alias = data.get(); } // allocating
Buffer(uint8_t * ptr) { alias = ptr; } // non-allocating, fixme: set size?
uint8_t * get() { return alias; }
size_t size_bytes() const { return size; }
};
struct PlyData
{
Type t;
size_t count;
Buffer buffer;
};
struct PlyProperty
{
PlyProperty(std::istream & is);
PlyProperty(Type type, std::string & _name) : name(_name), propertyType(type) {}
PlyProperty(Type list_type, Type prop_type, std::string & _name, int list_count) : name(_name), propertyType(prop_type), isList(true), listType(list_type), listCount(list_count) {}
std::string name;
Type propertyType;
bool isList{ false };
Type listType{ Type::INVALID };
int listCount{ 0 };
};
struct PlyElement
{
PlyElement(std::istream & istream);
PlyElement(const std::string & _name, size_t count) : name(_name), size(count) {}
std::string name;
size_t size;
std::vector<PlyProperty> properties;
};
struct PlyFile
{
struct PlyFileImpl;
std::unique_ptr<PlyFileImpl> impl;
PlyFile();
~PlyFile();
bool parse_header(std::istream & is);
void read(std::istream & is);
void write(std::ostream & os, bool isBinary);
std::vector<PlyElement> get_elements() const;
std::vector<std::string> & get_comments();
std::vector<std::string> get_info() const;
std::shared_ptr<PlyData> request_properties_from_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys);
void add_properties_to_element(const std::string & elementKey, const std::initializer_list<std::string> propertyKeys, const Type type, const size_t count, uint8_t * data, const Type listType, const size_t listCount);
};
} // namesapce tinyply
#endif // tinyply_h

Wyświetl plik

@ -1,16 +1,13 @@
project(odm_25dmeshing)
project(odm_cleanmesh)
cmake_minimum_required(VERSION 2.8)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR})
set (CMAKE_CXX_STANDARD 11)
#set(VTK_DIR "/data/packages/VTK-7.1.1-build")
set(VTK_SMP_IMPLEMENTATION_TYPE TBB)
find_package(VTK 7.1.1 REQUIRED)
find_package(VTK REQUIRED)
include(${VTK_USE_FILE})
# Add compiler options.
#add_definitions(-Wall -Wextra -O0 -g3)
add_definitions(-Wall -Wextra)
# Add source directory
@ -19,5 +16,4 @@ aux_source_directory("./src" SRC_LIST)
# Add exectuteable
add_executable(${PROJECT_NAME} ${SRC_LIST})
target_link_libraries(odm_25dmeshing ${VTK_LIBRARIES})
target_link_libraries(${PROJECT_NAME} ${VTK_LIBRARIES})

Wyświetl plik

@ -0,0 +1,106 @@
/*
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#ifndef CMD_LINE_PARSER_INCLUDED
#define CMD_LINE_PARSER_INCLUDED
#include <stdarg.h>
#include <cstring>
#include <cstdlib>
#include <string>
#include <vector>
#ifdef WIN32
int strcasecmp( const char* c1 , const char* c2 );
#endif // WIN32
class cmdLineReadable
{
public:
bool set;
char *name;
cmdLineReadable( const char *name );
virtual ~cmdLineReadable( void );
virtual int read( char** argv , int argc );
virtual void writeValue( char* str ) const;
};
template< class Type > void cmdLineWriteValue( Type t , char* str );
template< class Type > void cmdLineCleanUp( Type* t );
template< class Type > Type cmdLineInitialize( void );
template< class Type > Type cmdLineCopy( Type t );
template< class Type > Type cmdLineStringToType( const char* str );
template< class Type >
class cmdLineParameter : public cmdLineReadable
{
public:
Type value;
cmdLineParameter( const char *name );
cmdLineParameter( const char *name , Type v );
~cmdLineParameter( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type , int Dim >
class cmdLineParameterArray : public cmdLineReadable
{
public:
Type values[Dim];
cmdLineParameterArray( const char *name, const Type* v=NULL );
~cmdLineParameterArray( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type >
class cmdLineParameters : public cmdLineReadable
{
public:
int count;
Type *values;
cmdLineParameters( const char* name );
~cmdLineParameters( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
void cmdLineParse( int argc , char **argv, cmdLineReadable** params );
char* FileExtension( char* fileName );
char* LocalFileName( char* fileName );
char* DirectoryName( char* fileName );
char* GetFileExtension( const char* fileName );
char* GetLocalFileName( const char* fileName );
char** ReadWords( const char* fileName , int& cnt );
#include "CmdLineParser.inl"
#endif // CMD_LINE_PARSER_INCLUDED

Wyświetl plik

@ -0,0 +1,300 @@
/* -*- C++ -*-
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#include <cassert>
#include <string.h>
#if defined( WIN32 ) || defined( _WIN64 )
inline int strcasecmp( const char* c1 , const char* c2 ){ return _stricmp( c1 , c2 ); }
#endif // WIN32 || _WIN64
template< > void cmdLineCleanUp< int >( int* t ){ *t = 0; }
template< > void cmdLineCleanUp< float >( float* t ){ *t = 0; }
template< > void cmdLineCleanUp< double >( double* t ){ *t = 0; }
template< > void cmdLineCleanUp< char* >( char** t ){ if( *t ) free( *t ) ; *t = NULL; }
template< > int cmdLineInitialize< int >( void ){ return 0; }
template< > float cmdLineInitialize< float >( void ){ return 0.f; }
template< > double cmdLineInitialize< double >( void ){ return 0.; }
template< > char* cmdLineInitialize< char* >( void ){ return NULL; }
template< > void cmdLineWriteValue< int >( int t , char* str ){ sprintf( str , "%d" , t ); }
template< > void cmdLineWriteValue< float >( float t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< double >( double t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< char* >( char* t , char* str ){ if( t ) sprintf( str , "%s" , t ) ; else str[0]=0; }
template< > int cmdLineCopy( int t ){ return t; }
template< > float cmdLineCopy( float t ){ return t; }
template< > double cmdLineCopy( double t ){ return t; }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineCopy( char* t ){ return _strdup( t ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineCopy( char* t ){ return strdup( t ); }
#endif // WIN32 || _WIN64
template< > int cmdLineStringToType( const char* str ){ return atoi( str ); }
template< > float cmdLineStringToType( const char* str ){ return float( atof( str ) ); }
template< > double cmdLineStringToType( const char* str ){ return double( atof( str ) ); }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineStringToType( const char* str ){ return _strdup( str ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineStringToType( const char* str ){ return strdup( str ); }
#endif // WIN32 || _WIN64
/////////////////////
// cmdLineReadable //
/////////////////////
#if defined( WIN32 ) || defined( _WIN64 )
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = _strdup( name ); }
#else // !WIN32 && !_WIN64
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = strdup( name ); }
#endif // WIN32 || _WIN64
inline cmdLineReadable::~cmdLineReadable( void ){ if( name ) free( name ) ; name = NULL; }
inline int cmdLineReadable::read( char** , int ){ set = true ; return 0; }
inline void cmdLineReadable::writeValue( char* str ) const { str[0] = 0; }
//////////////////////
// cmdLineParameter //
//////////////////////
template< class Type > cmdLineParameter< Type >::~cmdLineParameter( void ) { cmdLineCleanUp( &value ); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name ) : cmdLineReadable( name ){ value = cmdLineInitialize< Type >(); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name , Type v ) : cmdLineReadable( name ){ value = cmdLineCopy< Type >( v ); }
template< class Type >
int cmdLineParameter< Type >::read( char** argv , int argc )
{
if( argc>0 )
{
cmdLineCleanUp< Type >( &value ) , value = cmdLineStringToType< Type >( argv[0] );
set = true;
return 1;
}
else return 0;
}
template< class Type >
void cmdLineParameter< Type >::writeValue( char* str ) const { cmdLineWriteValue< Type >( value , str ); }
///////////////////////////
// cmdLineParameterArray //
///////////////////////////
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::cmdLineParameterArray( const char *name , const Type* v ) : cmdLineReadable( name )
{
if( v ) for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineCopy< Type >( v[i] );
else for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineInitialize< Type >();
}
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::~cmdLineParameterArray( void ){ for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ); }
template< class Type , int Dim >
int cmdLineParameterArray< Type , Dim >::read( char** argv , int argc )
{
if( argc>=Dim )
{
for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ) , values[i] = cmdLineStringToType< Type >( argv[i] );
set = true;
return Dim;
}
else return 0;
}
template< class Type , int Dim >
void cmdLineParameterArray< Type , Dim >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<Dim ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
///////////////////////
// cmdLineParameters //
///////////////////////
template< class Type >
cmdLineParameters< Type >::cmdLineParameters( const char* name ) : cmdLineReadable( name ) , values(NULL) , count(0) { }
template< class Type >
cmdLineParameters< Type >::~cmdLineParameters( void )
{
if( values ) delete[] values;
values = NULL;
count = 0;
}
template< class Type >
int cmdLineParameters< Type >::read( char** argv , int argc )
{
if( values ) delete[] values;
values = NULL;
if( argc>0 )
{
count = atoi(argv[0]);
if( count <= 0 || argc <= count ) return 1;
values = new Type[count];
if( !values ) return 0;
for( int i=0 ; i<count ; i++ ) values[i] = cmdLineStringToType< Type >( argv[i+1] );
set = true;
return count+1;
}
else return 0;
}
template< class Type >
void cmdLineParameters< Type >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<count ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
inline char* FileExtension( char* fileName )
{
char* temp = fileName;
for( unsigned int i=0 ; i<strlen(fileName) ; i++ ) if( fileName[i]=='.' ) temp = &fileName[i+1];
return temp;
}
inline char* GetFileExtension( const char* fileName )
{
char* fileNameCopy;
char* ext=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,".");
while(temp!=NULL)
{
if(ext!=NULL){delete[] ext;}
ext=new char[strlen(temp)+1];
assert(ext);
strcpy(ext,temp);
temp=strtok(NULL,".");
}
delete[] fileNameCopy;
return ext;
}
inline char* GetLocalFileName( const char* fileName )
{
char* fileNameCopy;
char* name=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,"\\");
while(temp!=NULL){
if(name!=NULL){delete[] name;}
name=new char[strlen(temp)+1];
assert(name);
strcpy(name,temp);
temp=strtok(NULL,"\\");
}
delete[] fileNameCopy;
return name;
}
inline char* LocalFileName( char* fileName )
{
char* temp = fileName;
for( int i=0 ; i<(int)strlen(fileName) ; i++ ) if( fileName[i] =='\\' ) temp = &fileName[i+1];
return temp;
}
inline char* DirectoryName( char* fileName )
{
for( int i=int( strlen(fileName) )-1 ; i>=0 ; i-- )
if( fileName[i] =='\\' )
{
fileName[i] = 0;
return fileName;
}
fileName[0] = 0;
return fileName;
}
inline void cmdLineParse( int argc , char **argv , cmdLineReadable** params )
{
while( argc>0 )
{
if( argv[0][0]=='-' )
{
cmdLineReadable* readable=NULL;
for( int i=0 ; params[i]!=NULL && readable==NULL ; i++ ) if( !strcasecmp( params[i]->name , argv[0]+1 ) ) readable = params[i];
if( readable )
{
int j = readable->read( argv+1 , argc-1 );
argv += j , argc -= j;
}
else
{
fprintf( stderr , "[WARNING] Invalid option: %s\n" , argv[0] );
for( int i=0 ; params[i]!=NULL ; i++ ) printf( "\t-%s\n" , params[i]->name );
}
}
else fprintf( stderr , "[WARNING] Parameter name should be of the form -<name>: %s\n" , argv[0] );
++argv , --argc;
}
}
inline char** ReadWords(const char* fileName,int& cnt)
{
char** names;
char temp[500];
FILE* fp;
fp=fopen(fileName,"r");
if(!fp){return NULL;}
cnt=0;
while(fscanf(fp," %s ",temp)==1){cnt++;}
fclose(fp);
names=new char*[cnt];
if(!names){return NULL;}
fp=fopen(fileName,"r");
if(!fp){
delete[] names;
cnt=0;
return NULL;
}
cnt=0;
while(fscanf(fp," %s ",temp)==1){
names[cnt]=new char[strlen(temp)+1];
if(!names){
for(int j=0;j<cnt;j++){delete[] names[j];}
delete[] names;
cnt=0;
fclose(fp);
return NULL;
}
strcpy(names[cnt],temp);
cnt++;
}
fclose(fp);
return names;
}

Wyświetl plik

@ -0,0 +1,33 @@
#include <cstdio>
#include <cstdarg>
#include "CmdLineParser.h"
struct Logger{
bool verbose;
const char* outputFile;
Logger(){
this->verbose = false;
this->outputFile = NULL;
}
void operator() ( const char* format , ... )
{
if( outputFile )
{
FILE* fp = fopen( outputFile , "a" );
va_list args;
va_start( args , format );
vfprintf( fp , format , args );
fclose( fp );
va_end( args );
}
if( verbose )
{
va_list args;
va_start( args , format );
vprintf( format , args );
va_end( args );
}
}
};

Wyświetl plik

@ -0,0 +1,114 @@
#include <iostream>
#include <string>
#include <fstream>
#include <vtkPolyDataConnectivityFilter.h>
#include <vtkSmartPointer.h>
#include <vtkPLYReader.h>
#include <vtkPLYWriter.h>
#include <vtkAlgorithmOutput.h>
#include <vtkQuadricDecimation.h>
#include "CmdLineParser.h"
#include "Logger.h"
Logger logWriter;
cmdLineParameter< char* >
InputFile( "inputFile" ) ,
OutputFile( "outputFile" );
cmdLineParameter< int >
DecimateMesh( "decimateMesh" );
cmdLineReadable
RemoveIslands( "removeIslands" ) ,
Verbose( "verbose" );
cmdLineReadable* params[] = {
&InputFile , &OutputFile , &DecimateMesh, &RemoveIslands, &Verbose ,
NULL
};
void help(char *ex){
std::cout << "Usage: " << ex << std::endl
<< "\t -" << InputFile.name << " <input polygon mesh>" << std::endl
<< "\t -" << OutputFile.name << " <output polygon mesh>" << std::endl
<< "\t [-" << DecimateMesh.name << " <target number of vertices>]" << std::endl
<< "\t [-" << RemoveIslands.name << "]" << std::endl
<< "\t [-" << Verbose.name << "]" << std::endl;
exit(EXIT_FAILURE);
}
void logArgs(cmdLineReadable* params[], Logger& logWriter){
logWriter("Running with parameters:\n");
char str[1024];
for( int i=0 ; params[i] ; i++ ){
if( params[i]->set ){
params[i]->writeValue( str );
if( strlen( str ) ) logWriter( "\t--%s %s\n" , params[i]->name , str );
else logWriter( "\t--%s\n" , params[i]->name );
}
}
}
int main(int argc, char **argv) {
cmdLineParse( argc-1 , &argv[1] , params );
if( !InputFile.set || !OutputFile.set ) help(argv[0]);
if( !RemoveIslands.set && !DecimateMesh.set ) help (argv[0]);
logWriter.verbose = Verbose.set;
// logWriter.outputFile = "odm_cleanmesh_log.txt";
logArgs(params, logWriter);
vtkSmartPointer<vtkPLYReader> reader =
vtkSmartPointer<vtkPLYReader>::New();
reader->SetFileName ( InputFile.value );
reader->Update();
vtkPolyData *nextOutput = reader->GetOutput();
vtkSmartPointer<vtkPolyDataConnectivityFilter> connectivityFilter =
vtkSmartPointer<vtkPolyDataConnectivityFilter>::New();
connectivityFilter->SetExtractionModeToLargestRegion();
vtkSmartPointer<vtkQuadricDecimation> decimationFilter =
vtkSmartPointer<vtkQuadricDecimation>::New();
if (RemoveIslands.set){
logWriter("Removing islands\n");
connectivityFilter->SetInputData(nextOutput);
connectivityFilter->Update();
nextOutput = connectivityFilter->GetOutput();
}
if (DecimateMesh.set){
logWriter("Decimating mesh\n");
int vertexCount = nextOutput->GetNumberOfPoints();
logWriter("Current vertex count: %d\n", vertexCount);
logWriter("Wanted vertex count: %d\n", DecimateMesh.value);
if (vertexCount > DecimateMesh.value){
double targetReduction = 1.0 - static_cast<double>(DecimateMesh.value) / static_cast<double>(vertexCount);
logWriter("Target reduction set to %f\n", targetReduction);
decimationFilter->SetTargetReduction(targetReduction);
decimationFilter->SetInputData(nextOutput);
decimationFilter->Update();
nextOutput = decimationFilter->GetOutput();
}else{
logWriter("Skipping decimation\n");
}
}
logWriter("Saving cleaned mesh to file... \n");
vtkSmartPointer<vtkPLYWriter> plyWriter =
vtkSmartPointer<vtkPLYWriter>::New();
plyWriter->SetFileName(OutputFile.value);
plyWriter->SetFileTypeToBinary();
plyWriter->SetInputData(nextOutput);
plyWriter->Write();
logWriter("OK\n");
}

Wyświetl plik

@ -0,0 +1,19 @@
project(odm_dem2points)
cmake_minimum_required(VERSION 2.8)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR})
set (CMAKE_CXX_STANDARD 11)
find_package(GDAL REQUIRED)
include_directories(${GDAL_INCLUDE_DIR})
# Add compiler options.
add_definitions(-Wall -Wextra)
# Add source directory
aux_source_directory("./src" SRC_LIST)
# Add exectuteable
add_executable(${PROJECT_NAME} ${SRC_LIST})
target_link_libraries(${PROJECT_NAME} ${GDAL_LIBRARY})

Wyświetl plik

@ -0,0 +1,106 @@
/*
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#ifndef CMD_LINE_PARSER_INCLUDED
#define CMD_LINE_PARSER_INCLUDED
#include <stdarg.h>
#include <cstring>
#include <cstdlib>
#include <string>
#include <vector>
#ifdef WIN32
int strcasecmp( const char* c1 , const char* c2 );
#endif // WIN32
class cmdLineReadable
{
public:
bool set;
char *name;
cmdLineReadable( const char *name );
virtual ~cmdLineReadable( void );
virtual int read( char** argv , int argc );
virtual void writeValue( char* str ) const;
};
template< class Type > void cmdLineWriteValue( Type t , char* str );
template< class Type > void cmdLineCleanUp( Type* t );
template< class Type > Type cmdLineInitialize( void );
template< class Type > Type cmdLineCopy( Type t );
template< class Type > Type cmdLineStringToType( const char* str );
template< class Type >
class cmdLineParameter : public cmdLineReadable
{
public:
Type value;
cmdLineParameter( const char *name );
cmdLineParameter( const char *name , Type v );
~cmdLineParameter( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type , int Dim >
class cmdLineParameterArray : public cmdLineReadable
{
public:
Type values[Dim];
cmdLineParameterArray( const char *name, const Type* v=NULL );
~cmdLineParameterArray( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type >
class cmdLineParameters : public cmdLineReadable
{
public:
int count;
Type *values;
cmdLineParameters( const char* name );
~cmdLineParameters( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
void cmdLineParse( int argc , char **argv, cmdLineReadable** params );
char* FileExtension( char* fileName );
char* LocalFileName( char* fileName );
char* DirectoryName( char* fileName );
char* GetFileExtension( const char* fileName );
char* GetLocalFileName( const char* fileName );
char** ReadWords( const char* fileName , int& cnt );
#include "CmdLineParser.inl"
#endif // CMD_LINE_PARSER_INCLUDED

Wyświetl plik

@ -0,0 +1,300 @@
/* -*- C++ -*-
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#include <cassert>
#include <string.h>
#if defined( WIN32 ) || defined( _WIN64 )
inline int strcasecmp( const char* c1 , const char* c2 ){ return _stricmp( c1 , c2 ); }
#endif // WIN32 || _WIN64
template< > void cmdLineCleanUp< int >( int* t ){ *t = 0; }
template< > void cmdLineCleanUp< float >( float* t ){ *t = 0; }
template< > void cmdLineCleanUp< double >( double* t ){ *t = 0; }
template< > void cmdLineCleanUp< char* >( char** t ){ if( *t ) free( *t ) ; *t = NULL; }
template< > int cmdLineInitialize< int >( void ){ return 0; }
template< > float cmdLineInitialize< float >( void ){ return 0.f; }
template< > double cmdLineInitialize< double >( void ){ return 0.; }
template< > char* cmdLineInitialize< char* >( void ){ return NULL; }
template< > void cmdLineWriteValue< int >( int t , char* str ){ sprintf( str , "%d" , t ); }
template< > void cmdLineWriteValue< float >( float t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< double >( double t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< char* >( char* t , char* str ){ if( t ) sprintf( str , "%s" , t ) ; else str[0]=0; }
template< > int cmdLineCopy( int t ){ return t; }
template< > float cmdLineCopy( float t ){ return t; }
template< > double cmdLineCopy( double t ){ return t; }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineCopy( char* t ){ return _strdup( t ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineCopy( char* t ){ return strdup( t ); }
#endif // WIN32 || _WIN64
template< > int cmdLineStringToType( const char* str ){ return atoi( str ); }
template< > float cmdLineStringToType( const char* str ){ return float( atof( str ) ); }
template< > double cmdLineStringToType( const char* str ){ return double( atof( str ) ); }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineStringToType( const char* str ){ return _strdup( str ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineStringToType( const char* str ){ return strdup( str ); }
#endif // WIN32 || _WIN64
/////////////////////
// cmdLineReadable //
/////////////////////
#if defined( WIN32 ) || defined( _WIN64 )
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = _strdup( name ); }
#else // !WIN32 && !_WIN64
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = strdup( name ); }
#endif // WIN32 || _WIN64
inline cmdLineReadable::~cmdLineReadable( void ){ if( name ) free( name ) ; name = NULL; }
inline int cmdLineReadable::read( char** , int ){ set = true ; return 0; }
inline void cmdLineReadable::writeValue( char* str ) const { str[0] = 0; }
//////////////////////
// cmdLineParameter //
//////////////////////
template< class Type > cmdLineParameter< Type >::~cmdLineParameter( void ) { cmdLineCleanUp( &value ); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name ) : cmdLineReadable( name ){ value = cmdLineInitialize< Type >(); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name , Type v ) : cmdLineReadable( name ){ value = cmdLineCopy< Type >( v ); }
template< class Type >
int cmdLineParameter< Type >::read( char** argv , int argc )
{
if( argc>0 )
{
cmdLineCleanUp< Type >( &value ) , value = cmdLineStringToType< Type >( argv[0] );
set = true;
return 1;
}
else return 0;
}
template< class Type >
void cmdLineParameter< Type >::writeValue( char* str ) const { cmdLineWriteValue< Type >( value , str ); }
///////////////////////////
// cmdLineParameterArray //
///////////////////////////
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::cmdLineParameterArray( const char *name , const Type* v ) : cmdLineReadable( name )
{
if( v ) for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineCopy< Type >( v[i] );
else for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineInitialize< Type >();
}
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::~cmdLineParameterArray( void ){ for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ); }
template< class Type , int Dim >
int cmdLineParameterArray< Type , Dim >::read( char** argv , int argc )
{
if( argc>=Dim )
{
for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ) , values[i] = cmdLineStringToType< Type >( argv[i] );
set = true;
return Dim;
}
else return 0;
}
template< class Type , int Dim >
void cmdLineParameterArray< Type , Dim >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<Dim ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
///////////////////////
// cmdLineParameters //
///////////////////////
template< class Type >
cmdLineParameters< Type >::cmdLineParameters( const char* name ) : cmdLineReadable( name ) , values(NULL) , count(0) { }
template< class Type >
cmdLineParameters< Type >::~cmdLineParameters( void )
{
if( values ) delete[] values;
values = NULL;
count = 0;
}
template< class Type >
int cmdLineParameters< Type >::read( char** argv , int argc )
{
if( values ) delete[] values;
values = NULL;
if( argc>0 )
{
count = atoi(argv[0]);
if( count <= 0 || argc <= count ) return 1;
values = new Type[count];
if( !values ) return 0;
for( int i=0 ; i<count ; i++ ) values[i] = cmdLineStringToType< Type >( argv[i+1] );
set = true;
return count+1;
}
else return 0;
}
template< class Type >
void cmdLineParameters< Type >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<count ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
inline char* FileExtension( char* fileName )
{
char* temp = fileName;
for( unsigned int i=0 ; i<strlen(fileName) ; i++ ) if( fileName[i]=='.' ) temp = &fileName[i+1];
return temp;
}
inline char* GetFileExtension( const char* fileName )
{
char* fileNameCopy;
char* ext=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,".");
while(temp!=NULL)
{
if(ext!=NULL){delete[] ext;}
ext=new char[strlen(temp)+1];
assert(ext);
strcpy(ext,temp);
temp=strtok(NULL,".");
}
delete[] fileNameCopy;
return ext;
}
inline char* GetLocalFileName( const char* fileName )
{
char* fileNameCopy;
char* name=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,"\\");
while(temp!=NULL){
if(name!=NULL){delete[] name;}
name=new char[strlen(temp)+1];
assert(name);
strcpy(name,temp);
temp=strtok(NULL,"\\");
}
delete[] fileNameCopy;
return name;
}
inline char* LocalFileName( char* fileName )
{
char* temp = fileName;
for( int i=0 ; i<(int)strlen(fileName) ; i++ ) if( fileName[i] =='\\' ) temp = &fileName[i+1];
return temp;
}
inline char* DirectoryName( char* fileName )
{
for( int i=int( strlen(fileName) )-1 ; i>=0 ; i-- )
if( fileName[i] =='\\' )
{
fileName[i] = 0;
return fileName;
}
fileName[0] = 0;
return fileName;
}
inline void cmdLineParse( int argc , char **argv , cmdLineReadable** params )
{
while( argc>0 )
{
if( argv[0][0]=='-' )
{
cmdLineReadable* readable=NULL;
for( int i=0 ; params[i]!=NULL && readable==NULL ; i++ ) if( !strcasecmp( params[i]->name , argv[0]+1 ) ) readable = params[i];
if( readable )
{
int j = readable->read( argv+1 , argc-1 );
argv += j , argc -= j;
}
else
{
fprintf( stderr , "[WARNING] Invalid option: %s\n" , argv[0] );
for( int i=0 ; params[i]!=NULL ; i++ ) printf( "\t-%s\n" , params[i]->name );
}
}
else fprintf( stderr , "[WARNING] Parameter name should be of the form -<name>: %s\n" , argv[0] );
++argv , --argc;
}
}
inline char** ReadWords(const char* fileName,int& cnt)
{
char** names;
char temp[500];
FILE* fp;
fp=fopen(fileName,"r");
if(!fp){return NULL;}
cnt=0;
while(fscanf(fp," %s ",temp)==1){cnt++;}
fclose(fp);
names=new char*[cnt];
if(!names){return NULL;}
fp=fopen(fileName,"r");
if(!fp){
delete[] names;
cnt=0;
return NULL;
}
cnt=0;
while(fscanf(fp," %s ",temp)==1){
names[cnt]=new char[strlen(temp)+1];
if(!names){
for(int j=0;j<cnt;j++){delete[] names[j];}
delete[] names;
cnt=0;
fclose(fp);
return NULL;
}
strcpy(names[cnt],temp);
cnt++;
}
fclose(fp);
return names;
}

Wyświetl plik

@ -0,0 +1,33 @@
#include <cstdio>
#include <cstdarg>
#include "CmdLineParser.h"
struct Logger{
bool verbose;
const char* outputFile;
Logger(){
this->verbose = false;
this->outputFile = NULL;
}
void operator() ( const char* format , ... )
{
if( outputFile )
{
FILE* fp = fopen( outputFile , "a" );
va_list args;
va_start( args , format );
vfprintf( fp , format , args );
fclose( fp );
va_end( args );
}
if( verbose )
{
va_list args;
va_start( args , format );
vprintf( format , args );
va_end( args );
}
}
};

Wyświetl plik

@ -0,0 +1,234 @@
#include <iostream>
#include <string>
#include <fstream>
#include "CmdLineParser.h"
#include "Logger.h"
#include "gdal_priv.h"
#include "cpl_conv.h" // for CPLMalloc()
Logger logWriter;
typedef struct Point2D{
double x;
double y;
Point2D(): x(0.0), y(0.0){}
Point2D(double x, double y): x(x), y(y){}
} Point2D;
typedef struct BoundingBox{
Point2D max;
Point2D min;
BoundingBox(): max(Point2D()), min(Point2D()){}
BoundingBox(Point2D min, Point2D max): max(max), min(min){}
} BoundingBox;
struct PlyPoint{
float x;
float y;
float z;
float nx;
float ny;
float nz;
} p;
size_t psize = sizeof(float) * 6;
float *rasterData;
int arr_width, arr_height;
#define IS_BIG_ENDIAN (*(uint16_t *)"\0\xff" < 0x100)
cmdLineParameter< char* >
InputFile( "inputFile" ) ,
OutputFile( "outputFile" );
cmdLineParameter< float >
SkirtHeightThreshold( "skirtHeightThreshold" ),
SkirtIncrements("skirtIncrements"),
SkirtHeightCap( "skirtHeightCap");
cmdLineReadable
Verbose( "verbose" );
cmdLineReadable* params[] = {
&InputFile , &OutputFile , &SkirtHeightThreshold, &SkirtIncrements, &SkirtHeightCap, &Verbose ,
NULL
};
void help(char *ex){
std::cout << "Usage: " << ex << std::endl
<< "\t -" << InputFile.name << " <input DSM raster>" << std::endl
<< "\t -" << OutputFile.name << " <output PLY points>" << std::endl
<< "\t [-" << SkirtHeightThreshold.name << " <Height threshold between cells that triggers the creation of a skirt>]" << std::endl
<< "\t [-" << SkirtIncrements.name << " <Skirt height increments when adding a new skirt>]" << std::endl
<< "\t [-" << SkirtHeightCap.name << " <Height cap that blocks the creation of a skirt>]" << std::endl
<< "\t [-" << Verbose.name << "]" << std::endl;
exit(EXIT_FAILURE);
}
void logArgs(cmdLineReadable* params[], Logger& logWriter){
logWriter("Running with parameters:\n");
char str[1024];
for( int i=0 ; params[i] ; i++ ){
if( params[i]->set ){
params[i]->writeValue( str );
if( strlen( str ) ) logWriter( "\t--%s %s\n" , params[i]->name , str );
else logWriter( "\t--%s\n" , params[i]->name );
}
}
}
Point2D geoLoc(float xloc, float yloc, double *affine){
return Point2D(affine[0] + xloc*affine[1] + yloc*affine[2], affine[3] + xloc*affine[4] + yloc*affine[5]);
}
BoundingBox getExtent(GDALDataset *dataset){
double affine[6];
dataset->GetGeoTransform(affine);
return BoundingBox(geoLoc(0, dataset->GetRasterYSize(), affine), geoLoc(dataset->GetRasterXSize(), 0, affine));
}
int countSkirts(int nx, int ny){
float neighbor_z = rasterData[ny * arr_width + nx];
float current_z = p.z;
int result = 0;
if (current_z - neighbor_z > SkirtHeightThreshold.value && current_z - neighbor_z < SkirtHeightCap.value){
while (current_z > neighbor_z){
current_z -= SkirtIncrements.value;
result++;
}
result++;
}
return result;
}
void writeSkirts(std::ofstream &f, int nx, int ny){
float neighbor_z = rasterData[ny * arr_width + nx];
float z_val = p.z;
if (p.z - neighbor_z > SkirtHeightThreshold.value && p.z - neighbor_z < SkirtHeightCap.value){
while (p.z > neighbor_z){
p.z -= SkirtIncrements.value;
f.write(reinterpret_cast<char*>(&p), psize);
}
p.z = neighbor_z;
f.write(reinterpret_cast<char*>(&p), psize);
}
// Restore original p.z value
p.z = z_val;
}
int main(int argc, char **argv) {
cmdLineParse( argc-1 , &argv[1] , params );
if( !InputFile.set || !OutputFile.set ) help(argv[0]);
if (!SkirtHeightThreshold.set) SkirtHeightThreshold.value = 1.5f;
if (!SkirtIncrements.set) SkirtIncrements.value = 0.1f;
if (!SkirtHeightCap.set) SkirtHeightCap.value = 100.0f;
logWriter.verbose = Verbose.set;
// logWriter.outputFile = "odm_dem2points_log.txt";
logArgs(params, logWriter);
GDALDataset *dataset;
GDALAllRegister();
dataset = (GDALDataset *) GDALOpen( InputFile.value, GA_ReadOnly );
if( dataset != NULL )
{
arr_width = dataset->GetRasterXSize();
arr_height = dataset->GetRasterYSize();
logWriter("Raster Size is %dx%d\n", arr_width, arr_height);
BoundingBox extent = getExtent(dataset);
logWriter("Extent is (%f, %f), (%f, %f)\n", extent.min.x, extent.max.x, extent.min.y, extent.max.y);
float ext_width = extent.max.x - extent.min.x;
float ext_height = extent.max.y - extent.min.y;
int vertex_count = (arr_height - 4) * (arr_width - 4);
int skirt_points = 0;
GDALRasterBand *band = dataset->GetRasterBand(1);
rasterData = new float[arr_width * arr_height];
if (band->RasterIO( GF_Read, 0, 0, arr_width, arr_height,
rasterData, arr_width, arr_height, GDT_Float32, 0, 0 ) == CE_Failure){
std::cerr << "Cannot access raster data" << std::endl;
exit(1);
}
// On the first pass we calculate the number of skirts
// on the second pass we sample and write the points
logWriter("Calculating skirts... ");
for (int y = 2; y < arr_height - 2; y++){
for (int x = 2; x < arr_width - 2; x++){
p.z = rasterData[y * arr_width + x];
skirt_points += countSkirts(x, y + 1);
skirt_points += countSkirts(x, y - 1);
skirt_points += countSkirts(x + 1, y);
skirt_points += countSkirts(x - 1, y);
}
}
logWriter("%d vertices will be added\n", skirt_points);
logWriter("Total vertices: %d\n", (skirt_points + vertex_count));
logWriter("Sampling and writing to file...");
// Starting writing ply file
std::ofstream f (OutputFile.value);
f << "ply" << std::endl;
if (IS_BIG_ENDIAN){
f << "format binary_big_endian 1.0" << std::endl;
}else{
f << "format binary_little_endian 1.0" << std::endl;
}
f << "element vertex " << (vertex_count + skirt_points) << std::endl
<< "property float x" << std::endl
<< "property float y" << std::endl
<< "property float z" << std::endl
<< "property float nx" << std::endl
<< "property float ny" << std::endl
<< "property float nz" << std::endl
<< "end_header" << std::endl;
// Normals are always pointing up
p.nx = 0;
p.ny = 0;
p.nz = 1;
for (int y = 2; y < arr_height - 2; y++){
for (int x = 2; x < arr_width - 2; x++){
p.z = rasterData[y * arr_width + x];
p.x = extent.min.x + (static_cast<float>(x) / static_cast<float>(arr_width)) * ext_width;
p.y = extent.max.y - (static_cast<float>(y) / static_cast<float>(arr_height)) * ext_height;
f.write(reinterpret_cast<char*>(&p), psize);
writeSkirts(f, x, y + 1);
writeSkirts(f, x, y - 1);
writeSkirts(f, x + 1, y);
writeSkirts(f, x - 1, y);
}
}
logWriter(" done!\n");
f.close();
GDALClose(dataset);
}else{
std::cerr << "Cannot open " << InputFile.value << std::endl;
}
}

Wyświetl plik

@ -1,27 +0,0 @@
project(odm_meshing)
cmake_minimum_required(VERSION 2.8)
# Set pcl dir to the input spedified with option -DPCL_DIR="path"
set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory")
# Add compiler options.
add_definitions(-Wall -Wextra)
# Find pcl at the location specified by PCL_DIR
find_package(VTK 6.0 REQUIRED)
find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8")
# Add the PCL and Eigen include dirs.
# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.)
include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR})
include_directories(${EIGEN_ROOT})
# Add source directory
aux_source_directory("./src" SRC_LIST)
# Add exectuteable
add_executable(${PROJECT_NAME} ${SRC_LIST})
# Link
target_link_libraries(odm_meshing ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES})

Wyświetl plik

@ -1,31 +0,0 @@
#include "Logger.hpp"
Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout)
{
}
Logger::~Logger()
{
}
void Logger::printToFile(std::string filePath)
{
std::ofstream file(filePath.c_str(), std::ios::binary);
file << logStream_.str();
file.close();
}
bool Logger::isPrintingInCout() const
{
return isPrintingInCout_;
}
void Logger::setIsPrintingInCout(bool isPrintingInCout)
{
isPrintingInCout_ = isPrintingInCout;
}

Wyświetl plik

@ -1,68 +0,0 @@
#pragma once
// STL
#include <string>
#include <sstream>
#include <fstream>
#include <iostream>
/*!
* \brief The Logger class is used to store program messages in a log file.
* \details By using the << operator while printInCout is set, the class writes both to
* cout and to file, if the flag is not set, output is written to file only.
*/
class Logger
{
public:
/*!
* \brief Logger Contains functionality for printing and displaying log information.
* \param printInCout Flag toggling if operator << also writes to cout.
*/
Logger(bool isPrintingInCout = true);
/*!
* \brief Destructor.
*/
~Logger();
/*!
* \brief print Prints the contents of the log to file.
* \param filePath Path specifying where to write the log.
*/
void printToFile(std::string filePath);
/*!
* \brief isPrintingInCout Check if console printing flag is set.
* \return Console printing flag.
*/
bool isPrintingInCout() const;
/*!
* \brief setIsPrintingInCout Set console printing flag.
* \param isPrintingInCout Value, if true, messages added to the log are also printed in cout.
*/
void setIsPrintingInCout(bool isPrintingInCout);
/*!
* Operator for printing messages to log and in the standard output stream if desired.
*/
template<class T>
friend Logger& operator<< (Logger &log, T t)
{
// If console printing is enabled.
if (log.isPrintingInCout_)
{
std::cout << t;
std::cout.flush();
}
// Write to log.
log.logStream_ << t;
return log;
}
private:
bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */
std::stringstream logStream_; /*!< Stream for storing the log. */
};

Wyświetl plik

@ -1,361 +0,0 @@
#include "OdmMeshing.hpp"
OdmMeshing::OdmMeshing() : log_(false)
{
meshCreator_ = pcl::Poisson<pcl::PointNormal>::Ptr(new pcl::Poisson<pcl::PointNormal>());
points_ = pcl::PointCloud<pcl::PointNormal>::Ptr(new pcl::PointCloud<pcl::PointNormal>());
mesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh);
decimatedMesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh);
// Set default values
outputFile_ = "";
logFilePath_ = "";
maxVertexCount_ = 0;
treeDepth_ = 0;
solverDivide_ = 9.0;
samplesPerNode_ = 1.0;
decimationFactor_ = 0.0;
logFilePath_ = "odm_meshing_log.txt";
log_ << logFilePath_ << "\n";
}
OdmMeshing::~OdmMeshing()
{
}
int OdmMeshing::run(int argc, char **argv)
{
// If no arguments were passed, print help and return early.
if (argc <= 1)
{
printHelp();
return EXIT_SUCCESS;
}
try
{
parseArguments(argc, argv);
loadPoints();
createMesh();
decimateMesh();
writePlyFile();
}
catch (const OdmMeshingException& e)
{
log_.setIsPrintingInCout(true);
log_ << e.what() << "\n";
log_.printToFile(logFilePath_);
log_ << "For more detailed information, see log file." << "\n";
return EXIT_FAILURE;
}
catch (const std::exception& e)
{
log_.setIsPrintingInCout(true);
log_ << "Error in OdmMeshing:\n";
log_ << e.what() << "\n";
log_.printToFile(logFilePath_);
log_ << "For more detailed information, see log file." << "\n";
return EXIT_FAILURE;
}
catch (...)
{
log_.setIsPrintingInCout(true);
log_ << "Unknwon error in OdmMeshing:\n";
log_.printToFile(logFilePath_);
log_ << "For more detailed information, see log file." << "\n";
return EXIT_FAILURE;
}
log_.printToFile(logFilePath_);
return EXIT_SUCCESS;
}
void OdmMeshing::parseArguments(int argc, char **argv)
{
for(int argIndex = 1; argIndex < argc; ++argIndex)
{
// The argument to be parsed.
std::string argument = std::string(argv[argIndex]);
if(argument == "-help")
{
printHelp();
}
else if(argument == "-verbose")
{
log_.setIsPrintingInCout(true);
}
else if(argument == "-maxVertexCount" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
std::stringstream ss(argv[argIndex]);
ss >> maxVertexCount_;
if (ss.bad())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
}
log_ << "Vertex count was manually set to: " << maxVertexCount_ << "\n";
}
else if(argument == "-octreeDepth" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
std::stringstream ss(argv[argIndex]);
ss >> treeDepth_;
if (ss.bad())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
}
log_ << "Octree depth was manually set to: " << treeDepth_ << "\n";
}
else if(argument == "-solverDivide" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
std::stringstream ss(argv[argIndex]);
ss >> solverDivide_;
if (ss.bad())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
}
log_ << "Numerical solver divisions was manually set to: " << treeDepth_ << "\n";
}
else if(argument == "-samplesPerNode" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
std::stringstream ss(argv[argIndex]);
ss >> samplesPerNode_;
if (ss.bad())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type).");
}
log_ << "The number of samples per octree node was manually set to: " << samplesPerNode_ << "\n";
}
else if(argument == "-inputFile" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
inputFile_ = std::string(argv[argIndex]);
std::ifstream testFile(inputFile_.c_str(), std::ios::binary);
if (!testFile.is_open())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value. (file not accessible)");
}
testFile.close();
log_ << "Reading point cloud at: " << inputFile_ << "\n";
}
else if(argument == "-outputFile" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
outputFile_ = std::string(argv[argIndex]);
std::ofstream testFile(outputFile_.c_str());
if (!testFile.is_open())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value.");
}
testFile.close();
log_ << "Writing output to: " << outputFile_ << "\n";
}
else if(argument == "-logFile" && argIndex < argc)
{
++argIndex;
if (argIndex >= argc)
{
throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
}
logFilePath_ = std::string(argv[argIndex]);
std::ofstream testFile(outputFile_.c_str());
if (!testFile.is_open())
{
throw OdmMeshingException("Argument '" + argument + "' has a bad value.");
}
testFile.close();
log_ << "Writing log information to: " << logFilePath_ << "\n";
}
else
{
printHelp();
throw OdmMeshingException("Unrecognised argument '" + argument + "'");
}
}
}
void OdmMeshing::loadPoints()
{
if(pcl::io::loadPLYFile<pcl::PointNormal> (inputFile_.c_str(), *points_.get()) == -1) {
throw OdmMeshingException("Error when reading points and normals from:\n" + inputFile_ + "\n");
}
else
{
log_ << "Successfully loaded " << points_->size() << " points with corresponding normals from file.\n";
}
}
void OdmMeshing::printHelp()
{
bool printInCoutPop = log_.isPrintingInCout();
log_.setIsPrintingInCout(true);
log_ << "OpenDroneMapMeshing.exe\n\n";
log_ << "Purpose:" << "\n";
log_ << "Create a mesh from an oriented point cloud (points with normals) using the Poisson surface reconstruction method." << "\n";
log_ << "Usage:" << "\n";
log_ << "The program requires a path to an input PLY point cloud file, all other input parameters are optional." << "\n\n";
log_ << "The following flags are available\n";
log_ << "Call the program with flag \"-help\", or without parameters to print this message, or check any generated log file.\n";
log_ << "Call the program with flag \"-verbose\", to print log messages in the standard output stream as well as in the log file.\n\n";
log_ << "Parameters are specified as: \"-<argument name> <argument>\", (without <>), and the following parameters are configureable: " << "\n";
log_ << "\"-inputFile <path>\" (mandatory)" << "\n";
log_ << "\"Input ascii ply file that must contain a point cloud with normals.\n\n";
log_ << "\"-outputFile <path>\" (optional, default: odm_mesh.ply)" << "\n";
log_ << "\"Target file in which the mesh is saved.\n\n";
log_ << "\"-logFile <path>\" (optional, default: odm_meshing_log.txt)" << "\n";
log_ << "\"Target file in which the mesh is saved.\n\n";
log_ << "\"-maxVertexCount <integer>\" (optional, default: 100,000)" << "\n";
log_ << "Desired final vertex count (after decimation), set to 0 to disable decimation.\n\n";
log_ << "\"-treeDepth <integer>\" (optional, default: 0 (automatic))" << "\n";
log_ << "Controls octree depth used for poisson reconstruction. Recommended values (9-11).\n"
<< "Increasing the value on this parameter will raise initial vertex count."
<< "If omitted or zero, the depth is calculated automatically from the input point count.\n\n";
log_ << "\"-samplesPerNode <float>\" (optional, default: 1)" << "\n";
log_ << "Average number of samples (points) per octree node. Increasing this value might help if data is very noisy.\n\n";
log_ << "\"-solverDivide <integer>\" (optional, default: 9)" << "\n";
log_ << "Ocree depth at which the Laplacian equation is solved in the surface reconstruction step.\n";
log_ << "Increasing this value increases computation times slightly but helps reduce memory usage.\n\n";
log_.setIsPrintingInCout(printInCoutPop);
}
void OdmMeshing::createMesh()
{
// Attempt to calculate the depth of the tree if unspecified
if (treeDepth_ == 0)
{
treeDepth_ = calcTreeDepth(points_->size());
}
log_ << "Octree depth used for reconstruction is: " << treeDepth_ << "\n";
log_ << "Estimated initial vertex count: " << pow(4, treeDepth_) << "\n\n";
meshCreator_->setDepth(treeDepth_);
meshCreator_->setSamplesPerNode(samplesPerNode_);
meshCreator_->setInputCloud(points_);
// Guarantee manifold mesh.
meshCreator_->setManifold(true);
// Begin reconstruction
meshCreator_->reconstruct(*mesh_.get());
log_ << "Reconstruction complete:\n";
log_ << "Vertex count: " << mesh_->cloud.width << "\n";
log_ << "Triangle count: " << mesh_->polygons.size() << "\n\n";
}
void OdmMeshing::decimateMesh()
{
if (maxVertexCount_ <= 0)
{
log_ << "Vertex count not specified, decimation cancelled.\n";
return;
}
if (maxVertexCount_ > mesh_->cloud.height*mesh_->cloud.width)
{
log_ << "Vertex count in mesh lower than initially generated mesh, unable to decimate.\n";
return;
}
else
{
decimatedMesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh);
double reductionFactor = 1.0 - double(maxVertexCount_)/double(mesh_->cloud.height*mesh_->cloud.width);
log_ << "Decimating mesh, removing " << reductionFactor*100 << " percent of vertices.\n";
pcl::MeshQuadricDecimationVTK decimator;
decimator.setInputMesh(mesh_);
decimator.setTargetReductionFactor(reductionFactor);
decimator.process(*decimatedMesh_.get());
log_ << "Decimation complete.\n";
log_ << "Decimated vertex count: " << decimatedMesh_->cloud.width << "\n";
log_ << "Decimated triangle count: " << decimatedMesh_->polygons.size() << "\n\n";
mesh_ = decimatedMesh_;
}
}
int OdmMeshing::calcTreeDepth(size_t nPoints)
{
// Assume points are located (roughly) in a plane.
double squareSide = std::sqrt(double(nPoints));
// Calculate octree depth such that if points were equally distributed in
// a quadratic plane, there would be at least 1 point per octree node.
int depth = 0;
while(std::pow<double>(2,depth) < squareSide/2)
{
depth++;
}
return depth;
}
void OdmMeshing::writePlyFile()
{
log_ << "Saving mesh to file.\n";
if (pcl::io::savePLYFile(outputFile_.c_str(), *mesh_.get()) == -1) {
throw OdmMeshingException("Error when saving mesh to file:\n" + outputFile_ + "\n");
}
else
{
log_ << "Successfully wrote mesh to:\n"
<< outputFile_ << "\n";
}
}

Wyświetl plik

@ -1,117 +0,0 @@
#pragma once
// STL
#include <string>
#include <iostream>
// PCL
#include <pcl/io/ply_io.h>
#include <pcl/surface/poisson.h>
#include <pcl/surface/vtk_smoothing/vtk_mesh_quadric_decimation.h>
// Logging
#include "Logger.hpp"
/*!
* \brief The OdmMeshing class is used to create a triangulated mesh using the Poisson method.
* The class reads an oriented point cloud (coordinates and normals) from a PLY ascii
* file and outputs the resulting welded manifold mesh on the form of an ASCII PLY-file.
* The class uses file read and write functions from pcl.
*/
class OdmMeshing
{
public:
OdmMeshing();
~OdmMeshing();
/*!
* \brief run Runs the meshing functionality using the provided input arguments.
* For a list of accepted arguments, please see the main page documentation or
* call the program with parameter "-help".
* \param argc Application argument count.
* \param argv Argument values.
* \return 0 If successful.
*/
int run(int argc, char **argv);
private:
/*!
* \brief parseArguments Parses command line arguments.
* \param argc Application argument count.
* \param argv Argument values.
*/
void parseArguments(int argc, char** argv);
/*!
* \brief createMesh Sets up the pcl::Poisson meshing class using provided arguments and calls
* it to start the meshing.
*/
void createMesh();
/*!
* \brief loadPoints Loads a PLY ascii file with points and normals from file.
*/
void loadPoints();
/*!
* \brief decimateMesh Performs post-processing on the form of quadric decimation to generate a mesh
* that has a higher density in areas with a lot of structure.
*/
void decimateMesh();
/*!
* \brief writePlyFile Writes the mesh to file on the Ply format.
*/
void writePlyFile();
/*!
* \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help".
*/
void printHelp();
/*!
* \brief calcTreeDepth Attepts to calculate the depth of the tree using the point cloud.
* The function makes the assumption points are located roughly in a plane
* (fairly reasonable for ortho-terrain photos) and tries to generate a mesh using
* an octree with an appropriate resolution.
* \param nPoints The total number of points in the input point cloud.
* \return The calcualated octree depth.
*/
int calcTreeDepth(size_t nPoints);
Logger log_; /**< Logging object. */
pcl::Poisson<pcl::PointNormal>::Ptr meshCreator_; /**< PCL poisson meshing class. */
pcl::PointCloud<pcl::PointNormal>::Ptr points_; /**< Input point and normals. */
pcl::PolygonMeshPtr mesh_; /**< PCL polygon mesh. */
pcl::PolygonMeshPtr decimatedMesh_; /**< Decimated polygon mesh. */
std::string inputFile_; /**< Path to a file containing points and normals. */
std::string outputFile_; /**< Path to the destination file. */
std::string logFilePath_; /**< Path to the log file. */
unsigned int maxVertexCount_; /**< Desired output vertex count. */
unsigned int treeDepth_; /**< Depth of octree used for reconstruction. */
double samplesPerNode_; /**< Samples per octree node.*/
double solverDivide_; /**< Depth at which the Laplacian equation solver is run during surface estimation.*/
double decimationFactor_; /**< Percentage of points to remove when decimating the mesh. */
};
/*!
* \brief The OdmMeshingException class
*/
class OdmMeshingException : public std::exception
{
public:
OdmMeshingException() : message("Error in OdmMeshing") {}
OdmMeshingException(std::string msgInit) : message("Error in OdmMeshing:\n" + msgInit) {}
~OdmMeshingException() throw() {}
virtual const char* what() const throw() {return message.c_str(); }
private:
std::string message; /**< The error message **/
};

Wyświetl plik

@ -1,20 +0,0 @@
// Insert license here.
// Include meshing source code.
#include "OdmMeshing.hpp"
/*!
* \mainpage main OpenDroneMap Meshing Module
*
* The OpenDroneMap Meshing Module generates a welded, manifold mesh using the Poisson
* surface reconstruction algorithm from any oriented point cloud (points with corresponding normals).
*
*/
int main(int argc, char** argv)
{
OdmMeshing meshCreator;
return meshCreator.run(argc, argv);
}

Wyświetl plik

@ -7,7 +7,7 @@ from appsettings import SettingsParser
import sys
# parse arguments
processopts = ['dataset', 'opensfm', 'slam', 'cmvs', 'pmvs',
processopts = ['dataset', 'opensfm', 'slam', 'smvs',
'odm_meshing', 'odm_25dmeshing', 'mvs_texturing', 'odm_georeferencing',
'odm_dem', 'odm_orthophoto']
@ -140,18 +140,19 @@ def config():
default=False,
help='Turn off camera parameter optimization during bundler')
parser.add_argument('--opensfm-processes',
parser.add_argument('--max-concurrency',
metavar='<positive integer>',
default=context.num_cores,
type=int,
help=('The maximum number of processes to use in dense '
'reconstruction. Default: %(default)s'))
help=('The maximum number of processes to use in various '
'processes. Peak memory requirement is ~1GB per '
'thread and 2 megapixel image resolution. Default: %(default)s'))
parser.add_argument('--opensfm-depthmap-resolution',
parser.add_argument('--depthmap-resolution',
metavar='<positive float>',
type=float,
default=640,
help=('Resolution of the depthmaps. Higher values take longer to compute '
help=('Controls the density of the point cloud by setting the resolution of the depthmap images. Higher values take longer to compute '
'but produce denser point clouds. '
'Default: %(default)s'))
@ -187,78 +188,69 @@ def config():
help='Run local bundle adjustment for every image added to the reconstruction and a global '
'adjustment every 100 images. Speeds up reconstruction for very large datasets.')
parser.add_argument('--use-25dmesh',
parser.add_argument('--use-3dmesh',
action='store_true',
default=False,
help='Use a 2.5D mesh to compute the orthophoto. This option tends to provide better results for planar surfaces. Experimental.')
help='Use a full 3D mesh to compute the orthophoto instead of a 2.5D mesh. This option is a bit faster and provides similar results in planar areas.')
parser.add_argument('--use-pmvs',
parser.add_argument('--skip-3dmodel',
action='store_true',
default=False,
help='Skip generation of a full 3D model. This can save time if you only need 2D results such as orthophotos and DEMs.')
parser.add_argument('--use-opensfm-dense',
action='store_true',
default=False,
help='Use pmvs to compute point cloud alternatively')
help='Use opensfm to compute dense point cloud alternatively')
parser.add_argument('--cmvs-maxImages',
metavar='<integer>',
default=500,
type=int,
help='The maximum number of images per cluster. '
'Default: %(default)s')
parser.add_argument('--ignore-gsd',
action='store_true',
default=False,
help='Ignore Ground Sampling Distance (GSD). GSD '
'caps the maximum resolution of image outputs and '
'resizes images when necessary, resulting in faster processing and '
'lower memory usage. Since GSD is an estimate, sometimes ignoring it can result in slightly better image output quality.')
parser.add_argument('--pmvs-level',
metavar='<positive integer>',
default=1,
type=int,
help=('The level in the image pyramid that is used '
'for the computation. see '
'http://www.di.ens.fr/pmvs/documentation.html for '
'more pmvs documentation. Default: %(default)s'))
parser.add_argument('--smvs-alpha',
metavar='<float>',
default=1.0,
type=float,
help='Regularization parameter, a higher alpha leads to '
'smoother surfaces. Default: %(default)s')
parser.add_argument('--pmvs-csize',
parser.add_argument('--smvs-output-scale',
metavar='<positive integer>',
default=2,
type=int,
help='Cell size controls the density of reconstructions'
'Default: %(default)s')
help='The scale of the optimization - the '
'finest resolution of the bicubic patches will have the'
' size of the respective power of 2 (e.g. 2 will '
'optimize patches covering down to 4x4 pixels). '
'Default: %(default)s')
parser.add_argument('--pmvs-threshold',
metavar='<float: -1.0 <= x <= 1.0>',
default=0.7,
type=float,
help=('A patch reconstruction is accepted as a success '
'and kept if its associated photometric consistency '
'measure is above this threshold. Default: %(default)s'))
parser.add_argument('--smvs-enable-shading',
action='store_true',
default=False,
help='Use shading-based optimization. This model cannot '
'handle complex scenes. Try to supply linear images to '
'the reconstruction pipeline that are not tone mapped '
'or altered as this can also have very negative effects '
'on the reconstruction. If you have simple JPGs with SRGB '
'gamma correction you can remove it with the --smvs-gamma-srgb '
'option. Default: %(default)s')
parser.add_argument('--pmvs-wsize',
metavar='<positive integer>',
default=7,
type=int,
help='pmvs samples wsize x wsize pixel colors from '
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower. Default: %(default)s')
parser.add_argument('--pmvs-min-images',
metavar='<positive integer>',
default=3,
type=int,
help=('Each 3D point must be visible in at least '
'minImageNum images for being reconstructed. 3 is '
'suggested in general. Default: %(default)s'))
parser.add_argument('--pmvs-num-cores',
metavar='<positive integer>',
default=context.num_cores,
type=int,
help=('The maximum number of cores to use in dense '
'reconstruction. Default: %(default)s'))
parser.add_argument('--smvs-gamma-srgb',
action='store_true',
default=False,
help='Apply inverse SRGB gamma correction. To be used '
'with --smvs-enable-shading when you have simple JPGs with '
'SRGB gamma correction. Default: %(default)s')
parser.add_argument('--mesh-size',
metavar='<positive integer>',
default=100000,
type=int,
help=('The maximum vertex count of the output mesh '
help=('The maximum vertex count of the output mesh. '
'Default: %(default)s'))
parser.add_argument('--mesh-octree-depth',
@ -276,35 +268,16 @@ def config():
help=('Number of points per octree node, recommended '
'and default value: %(default)s'))
parser.add_argument('--mesh-solver-divide',
metavar='<positive integer>',
default=9,
type=int,
help=('Oct-tree depth at which the Laplacian equation '
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage. '
'Default: %(default)s'))
parser.add_argument('--mesh-neighbors',
metavar='<positive integer>',
default=24,
type=int,
help=('Number of neighbors to select when estimating the surface model used to compute the mesh and for statistical outlier removal. Higher values lead to smoother meshes but take longer to process. '
'Applies to 2.5D mesh only. '
'Default: %(default)s'))
parser.add_argument('--mesh-resolution',
metavar='<positive float>',
default=0,
parser.add_argument('--mesh-point-weight',
metavar='<interpolation weight>',
default=4,
type=float,
help=('Size of the interpolated surface model used for deriving the 2.5D mesh, expressed in pixels per meter. '
'Higher values work better for complex or urban terrains. '
'Lower values work better on flat areas. '
'Resolution has no effect on the number of vertices, but high values can severely impact runtime speed and memory usage. '
'When set to zero, the program automatically attempts to find a good value based on the point cloud extent and target vertex count. '
'Applies to 2.5D mesh only. '
'Default: %(default)s'))
help=('This floating point value specifies the importance'
' that interpolation of the point samples is given in the '
'formulation of the screened Poisson equation. The results '
'of the original (unscreened) Poisson Reconstruction can '
'be obtained by setting this value to 0.'
'Default= %(default)s'))
parser.add_argument('--fast-orthophoto',
action='store_true',
@ -346,6 +319,16 @@ def config():
help=('Data term: [area, gmi]. Default: '
'%(default)s'))
parser.add_argument('--texturing-nadir-weight',
metavar='<integer: 0 <= x <= 32>',
default=16,
type=int,
help=('Affects orthophotos only. '
'Higher values result in sharper corners, but can affect color distribution and blurriness. '
'Use lower values for planar areas and higher values for urban areas. '
'The default value works well for most scenarios. Default: '
'%(default)s'))
parser.add_argument('--texturing-outlier-removal-type',
metavar='<string>',
default='gauss_clamping',
@ -420,7 +403,7 @@ def config():
parser.add_argument('--dem-gapfill-steps',
metavar='<positive integer>',
default=4,
default=3,
type=int,
help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. '
'Starting with a radius equal to the output resolution, N different DEMs are generated with '
@ -431,8 +414,8 @@ def config():
parser.add_argument('--dem-resolution',
metavar='<float>',
type=float,
default=0.1,
help='Length of raster cell edges in meters.'
default=5,
help='DSM/DTM resolution in cm / pixel.'
'\nDefault: %(default)s')
parser.add_argument('--dem-maxangle',
@ -489,9 +472,9 @@ def config():
parser.add_argument('--orthophoto-resolution',
metavar='<float > 0.0>',
default=20.0,
default=5,
type=float,
help=('Orthophoto ground resolution in pixels/meter'
help=('Orthophoto resolution in cm / pixel.\n'
'Default: %(default)s'))
parser.add_argument('--orthophoto-target-srs',
@ -565,16 +548,15 @@ def config():
sys.exit(1)
if args.fast_orthophoto:
log.ODM_INFO('Fast orthophoto is turned on, automatically setting --use-25dmesh')
args.use_25dmesh = True
# Cannot use pmvs
if args.use_pmvs:
log.ODM_INFO('Fast orthophoto is turned on, cannot use pmvs (removing --use-pmvs)')
args.use_pmvs = False
log.ODM_INFO('Fast orthophoto is turned on, automatically setting --skip-3dmodel')
args.skip_3dmodel = True
if args.dtm and args.pc_classify == 'none':
log.ODM_INFO("DTM is turned on, automatically turning on point cloud classification")
args.pc_classify = "smrf"
if args.skip_3dmodel and args.use_3dmesh:
log.ODM_WARNING('--skip-3dmodel is set, but so is --use-3dmesh. You can\'t have both!')
sys.exit(1)
return args

Wyświetl plik

@ -23,10 +23,11 @@ ccd_widths_path = os.path.join(opensfm_path, 'opensfm/data/sensor_data.json')
# define orb_slam2 path
orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2")
# define pmvs path
cmvs_path = os.path.join(superbuild_path, "install/bin/cmvs")
cmvs_opts_path = os.path.join(superbuild_path, "install/bin/genOption")
pmvs2_path = os.path.join(superbuild_path, "install/bin/pmvs2")
# define smvs join_paths
makescene_path = os.path.join(superbuild_path, 'src', 'elibs', 'mve', 'apps', 'makescene', 'makescene') #TODO: don't install in source
smvs_path = os.path.join(superbuild_path, 'src', 'elibs', 'smvs', 'app', 'smvsrecon')
poisson_recon_path = os.path.join(superbuild_path, 'src', 'PoissonRecon', 'Bin', 'Linux', 'PoissonRecon')
# define mvstex path
mvstex_path = os.path.join(superbuild_path, "install/bin/texrecon")
@ -44,5 +45,5 @@ settings_path = os.path.join(root_path, 'settings.yaml')
# Define supported image extensions
supported_extensions = {'.jpg','.jpeg','.png'}
# Define the number of cores
# Define the number of cores
num_cores = multiprocessing.cpu_count()

Wyświetl plik

@ -63,7 +63,7 @@ class Cropper:
return geotiff_path
def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0):
def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
"""
Compute a buffered polygon around the data extents (not just a bounding box)
of the given point cloud.
@ -80,11 +80,11 @@ class Cropper:
run("pdal translate -i \"{}\" "
"-o \"{}\" "
"decimation outlier range "
"--filters.decimation.step=40 "
"--filters.decimation.step={} "
"--filters.outlier.method=radius "
"--filters.outlier.radius=20 "
"--filters.outlier.radius={} "
"--filters.outlier.min_k=2 "
"--filters.range.limits='Classification![7:7]'".format(pointcloud_path, filtered_pointcloud_path))
"--filters.range.limits='Classification![7:7]'".format(pointcloud_path, filtered_pointcloud_path, decimation_step, outlier_radius))
if not os.path.exists(filtered_pointcloud_path):
log.ODM_WARNING('Could not filter point cloud, cannot generate shapefile bounds {}'.format(filtered_pointcloud_path))
@ -164,7 +164,7 @@ class Cropper:
return bounds_geojson_path
def create_bounds_shapefile(self, pointcloud_path, buffer_distance = 0):
def create_bounds_shapefile(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
"""
Compute a buffered polygon around the data extents (not just a bounding box)
of the given point cloud.
@ -175,7 +175,7 @@ class Cropper:
log.ODM_WARNING('Point cloud does not exist, cannot generate shapefile bounds {}'.format(pointcloud_path))
return ''
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance)
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step, outlier_radius)
summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
run('pdal info --summary {0} > {1}'.format(pointcloud_path, summary_file_path))

Wyświetl plik

@ -1,8 +1,11 @@
import os, glob
import gippy
import numpy
from scipy import ndimage
from datetime import datetime
from opendm import log
from loky import get_reusable_executor
from functools import partial
from . import pdal
@ -23,13 +26,17 @@ def classify(lasFile, smrf=False, slope=1, cellsize=3, maxWindowSize=10, maxDist
def create_dems(filenames, demtype, radius=['0.56'], gapfill=False,
outdir='', suffix='', resolution=0.1, **kwargs):
outdir='', suffix='', resolution=0.1, max_workers=None, **kwargs):
""" Create DEMS for multiple radii, and optionally gapfill """
fouts = []
for rad in radius:
fouts.append(
create_dem(filenames, demtype,
radius=rad, outdir=outdir, suffix=suffix, resolution=resolution, **kwargs))
create_dem_for_radius = partial(create_dem,
filenames, demtype,
outdir=outdir, suffix=suffix, resolution=resolution, **kwargs)
with get_reusable_executor(max_workers=max_workers, timeout=None) as e:
fouts = list(e.map(create_dem_for_radius, radius))
fnames = {}
# convert from list of dicts, to dict of lists
for product in fouts[0].keys():
@ -52,7 +59,7 @@ def create_dems(filenames, demtype, radius=['0.56'], gapfill=False,
return _fouts
def create_dem(filenames, demtype, radius='0.56', decimation=None,
def create_dem(filenames, demtype, radius, decimation=None,
maxsd=None, maxz=None, maxangle=None, returnnum=None,
products=['idw'], outdir='', suffix='', verbose=False, resolution=0.1):
""" Create DEM from collection of LAS files """
@ -67,7 +74,10 @@ def create_dem(filenames, demtype, radius='0.56', decimation=None,
log.ODM_INFO('Creating %s from %s files' % (prettyname, len(filenames)))
# JSON pipeline
json = pdal.json_gdal_base(bname, products, radius, resolution)
json = pdal.json_add_filters(json, maxsd, maxz, maxangle, returnnum)
# A DSM for meshing does not use additional filters
if demtype != 'mesh_dsm':
json = pdal.json_add_filters(json, maxsd, maxz, maxangle, returnnum)
if demtype == 'dsm':
json = pdal.json_add_classification_filter(json, 2, equality='max')
@ -80,6 +90,7 @@ def create_dem(filenames, demtype, radius='0.56', decimation=None,
pdal.json_add_readers(json, filenames)
pdal.run_pipeline(json, verbose=verbose)
# verify existence of fout
exists = True
for f in fouts.values():
@ -92,13 +103,14 @@ def create_dem(filenames, demtype, radius='0.56', decimation=None,
return fouts
def gap_fill(filenames, fout, interpolation='nearest'):
def gap_fill(filenames, fout):
""" Gap fill from higher radius DTMs, then fill remainder with interpolation """
start = datetime.now()
from scipy.interpolate import griddata
if len(filenames) == 0:
raise Exception('No filenames provided!')
log.ODM_INFO('Starting gap-filling with nearest interpolation...')
filenames = sorted(filenames)
imgs = map(gippy.GeoImage, filenames)
@ -109,11 +121,16 @@ def gap_fill(filenames, fout, interpolation='nearest'):
locs = numpy.where(arr == nodata)
arr[locs] = imgs[i][0].read()[locs]
# interpolation at bad points
goodlocs = numpy.where(arr != nodata)
badlocs = numpy.where(arr == nodata)
arr[badlocs] = griddata(goodlocs, arr[goodlocs], badlocs, method=interpolation)
# Nearest neighbor interpolation at bad points
indices = ndimage.distance_transform_edt(arr == nodata,
return_distances=False,
return_indices=True)
arr = arr[tuple(indices)]
# Median filter
from scipy import signal
arr = signal.medfilt(arr, 5)
# write output
imgout = gippy.GeoImage.create_from(imgs[0], fout)
imgout.set_nodata(nodata)

Wyświetl plik

@ -63,7 +63,8 @@ def json_gdal_base(fout, output, radius, resolution=1):
'resolution': resolution,
'radius': radius,
'filename': '{0}.{1}.tif'.format(fout, output[0]),
'output_type': output[0]
'output_type': output[0],
'data_type': 'float'
})
return json
@ -170,10 +171,19 @@ def json_add_crop_filter(json, wkt):
return json
def is_ply_file(filename):
_, ext = os.path.splitext(filename)
return ext.lower() == '.ply'
def json_add_reader(json, filename):
""" Add LAS Reader Element and return """
""" Add Reader Element and return """
reader_type = 'readers.las' # default
if is_ply_file(filename):
reader_type = 'readers.ply'
json['pipeline'].insert(0, {
'type': 'readers.las',
'type': reader_type,
'filename': os.path.abspath(filename)
})
return json

124
opendm/gsd.py 100644
Wyświetl plik

@ -0,0 +1,124 @@
import os
import json
import numpy as np
from repoze.lru import lru_cache
from opendm import log
def image_scale_factor(target_resolution, reconstruction_json, gsd_error_estimate = 0.1):
"""
:param target_resolution resolution the user wants have in cm / pixel
:param reconstruction_json path to OpenSfM's reconstruction.json
:param gsd_error_estimate percentage of estimated error in the GSD calculation to set an upper bound on resolution.
:return A down-scale (<= 1) value to apply to images to achieve the target resolution by comparing the current GSD of the reconstruction.
If a GSD cannot be computed, it just returns 1. Returned scale values are never higher than 1.
"""
gsd = opensfm_reconstruction_average_gsd(reconstruction_json)
if gsd is not None and target_resolution > 0:
gsd = gsd * (1 + gsd_error_estimate)
return min(1, gsd / target_resolution)
else:
return 1
def cap_resolution(resolution, reconstruction_json, gsd_error_estimate = 0.1, ignore_gsd=False):
"""
:param resolution resolution in cm / pixel
:param reconstruction_json path to OpenSfM's reconstruction.json
:param gsd_error_estimate percentage of estimated error in the GSD calculation to set an upper bound on resolution.
:param ignore_gsd when set to True, forces the function to just return resolution.
:return The max value between resolution and the GSD computed from the reconstruction.
If a GSD cannot be computed, or ignore_gsd is set to True, it just returns resolution. Units are in cm / pixel.
"""
if ignore_gsd:
return resolution
gsd = opensfm_reconstruction_average_gsd(reconstruction_json)
if gsd is not None:
gsd = gsd * (1 - gsd_error_estimate)
if gsd > resolution:
log.ODM_WARNING('Maximum resolution set to GSD - {}% ({} cm / pixel, requested resolution was {} cm / pixel)'.format(gsd_error_estimate * 100, round(gsd, 2), round(resolution, 2)))
return gsd
else:
return resolution
else:
log.ODM_WARNING('Cannot calculate GSD, using requested resolution of {}'.format(round(resolution, 2)))
return resolution
@lru_cache(maxsize=None)
def opensfm_reconstruction_average_gsd(reconstruction_json):
"""
Computes the average Ground Sampling Distance of an OpenSfM reconstruction.
:param reconstruction_json path to OpenSfM's reconstruction.json
:return Ground Sampling Distance value (cm / pixel) or None if
a GSD estimate cannot be compute
"""
if not os.path.isfile(reconstruction_json):
raise FileNotFoundError(reconstruction_json + " does not exist.")
with open(reconstruction_json) as f:
data = json.load(f)
# Calculate median height from sparse reconstruction
reconstruction = data[0]
point_heights = []
for pointId in reconstruction['points']:
point = reconstruction['points'][pointId]
point_heights.append(point['coordinates'][2])
ground_height = np.median(point_heights)
gsds = []
for shotImage in reconstruction['shots']:
shot = reconstruction['shots'][shotImage]
if shot['gps_dop'] < 999999:
camera = reconstruction['cameras'][shot['camera']]
shot_height = shot['translation'][2]
focal_ratio = camera['focal']
gsds.append(calculate_gsd_from_focal_ratio(focal_ratio,
shot_height - ground_height,
camera['width']))
if len(gsds) > 0:
mean = np.mean(gsds)
if mean > 0:
return mean
return None
def calculate_gsd(sensor_width, flight_height, focal_length, image_width):
"""
:param sensor_width in millimeters
:param flight_height in meters
:param focal_length in millimeters
:param image_width in pixels
:return Ground Sampling Distance
>>> round(calculate_gsd(13.2, 100, 8.8, 5472), 2)
2.74
>>> calculate_gsd(13.2, 100, 0, 2000)
>>> calculate_gsd(13.2, 100, 8.8, 0)
"""
if sensor_width != 0:
return calculate_gsd_from_focal_ratio(focal_length / sensor_width,
flight_height,
image_width)
else:
return None
def calculate_gsd_from_focal_ratio(focal_ratio, flight_height, image_width):
"""
:param focal_ratio focal length (mm) / sensor_width (mm)
:param flight_height in meters
:param image_width in pixels
:return Ground Sampling Distance
"""
if focal_ratio == 0 or image_width == 0:
return None
return ((flight_height * 100) / image_width) / focal_ratio

Wyświetl plik

@ -34,13 +34,23 @@ def dir_exists(dirname):
def copy(src, dst):
try:
try:
shutil.copytree(src, dst)
except OSError as e:
if e.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else: raise
def rename_file(src, dst):
try:
os.rename(src, dst)
return True
except OSError as e:
if e.errno == errno.ENOENT:
return False
else:
raise
# find a file in the root directory
def find(filename, folder):

183
opendm/mesh.py 100644
Wyświetl plik

@ -0,0 +1,183 @@
from __future__ import absolute_import
import os, shutil, sys, struct, random, math
from gippy import GeoImage
from opendm.dem import commands
from opendm import system
from opendm import log
from opendm import context
from scipy import signal, ndimage
import numpy as np
def create_25dmesh(inPointCloud, outMesh, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, verbose=False, max_workers=None):
# Create DSM from point cloud
# Create temporary directory
mesh_directory = os.path.dirname(outMesh)
tmp_directory = os.path.join(mesh_directory, 'tmp')
if os.path.exists(tmp_directory):
shutil.rmtree(tmp_directory)
os.mkdir(tmp_directory)
log.ODM_INFO('Created temporary directory: %s' % tmp_directory)
radius_steps = [dsm_resolution * math.sqrt(2)]
log.ODM_INFO('Creating DSM for 2.5D mesh')
commands.create_dems(
[inPointCloud],
'mesh_dsm',
radius=map(str, radius_steps),
gapfill=True,
outdir=tmp_directory,
resolution=dsm_resolution,
products=['idw'],
verbose=verbose,
max_workers=max_workers
)
dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply'), verbose)
mesh = screened_poisson_reconstruction(dsm_points, outMesh, depth=depth,
samples=samples,
maxVertexCount=maxVertexCount,
threads=max_workers,
verbose=verbose)
# Cleanup tmp
if os.path.exists(tmp_directory):
shutil.rmtree(tmp_directory)
return mesh
def dem_to_points(inGeotiff, outPointCloud, verbose=False):
log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff)
kwargs = {
'bin': context.odm_modules_path,
'outfile': outPointCloud,
'infile': inGeotiff,
'verbose': '-verbose' if verbose else ''
}
system.run('{bin}/odm_dem2points -inputFile {infile} '
'-outputFile {outfile} '
'-skirtHeightThreshold 1.5 '
'-skirtIncrements 0.2 '
'-skirtHeightCap 100 '
' {verbose} '.format(**kwargs))
return outPointCloud
# Old Python implementation of dem_to_points
# def dem_to_points(inGeotiff, outPointCloud):
# log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff)
# image = GeoImage.open([inGeotiff], bandnames=['z'], nodata=-9999)
# arr = image['z'].read_raw()
# mem_file = BytesIO()
# xmin, xmax, ymin, ymax = image.extent().x0(), image.extent().x1(), image.extent().y0(), image.extent().y1()
# ext_width, ext_height = xmax - xmin, ymax - ymin
# arr_height, arr_width = arr.shape
# vertex_count = (arr_height - 4) * (arr_width - 4)
# skirt_points = 0
# skirt_height_threshold = 1 # meter
# skirt_increments = 0.1
# for x in range(2, arr_width - 2):
# for y in range(2, arr_height - 2):
# z = arr[y][x]
# tx = xmin + (float(x) / float(arr_width)) * ext_width
# ty = ymax - (float(y) / float(arr_height)) * ext_height
# mem_file.write(struct.pack('ffffff', tx, ty, z, 0, 0, 1))
# # Skirting
# for (nx, ny) in ((x, y + 1), (x, y - 1), (x + 1, y), (x - 1, y)):
# current_z = z
# neighbor_z = arr[ny][nx]
# if current_z - neighbor_z > skirt_height_threshold:
# while current_z > neighbor_z:
# current_z -= skirt_increments
# mem_file.write(struct.pack('ffffff', tx, ty, current_z, 0, 0, 1))
# skirt_points += 1
# mem_file.write(struct.pack('ffffff', tx, ty, neighbor_z, 0, 0, 1))
# skirt_points += 1
# log.ODM_INFO("Points count: %s (%s samples, %s skirts)", vertex_count + skirt_points, vertex_count, skirt_points)
# log.ODM_INFO('Writing points...')
# mem_file.seek(0)
# with open(outPointCloud, "wb") as f:
# f.write("ply\n")
# f.write("format binary_%s_endian 1.0\n" % sys.byteorder)
# f.write("element vertex %s\n" % (vertex_count + skirt_points))
# f.write("property float x\n")
# f.write("property float y\n")
# f.write("property float z\n")
# f.write("property float nx\n")
# f.write("property float ny\n")
# f.write("property float nz\n")
# f.write("end_header\n")
# shutil.copyfileobj(mem_file, f)
# mem_file.close()
# log.ODM_INFO('Wrote points to: %s' % outPointCloud)
# return outPointCloud
def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False):
mesh_path, mesh_filename = os.path.split(outMesh)
# mesh_path = path/to
# mesh_filename = odm_mesh.ply
basename, ext = os.path.splitext(mesh_filename)
# basename = odm_mesh
# ext = .ply
outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))
poissonReconArgs = {
'bin': context.poisson_recon_path,
'outfile': outMeshDirty,
'infile': inPointCloud,
'depth': depth,
'samples': samples,
'pointWeight': pointWeight,
'threads': threads,
'verbose': '--verbose' if verbose else ''
}
# Run PoissonRecon
system.run('{bin} --in {infile} '
'--out {outfile} '
'--depth {depth} '
'--pointWeight {pointWeight} '
'--samplesPerNode {samples} '
'--threads {threads} '
'--linearFit '
'{verbose}'.format(**poissonReconArgs))
# Cleanup and reduce vertex count if necessary
cleanupArgs = {
'bin': context.odm_modules_path,
'outfile': outMesh,
'infile': outMeshDirty,
'max_vertex': maxVertexCount,
'verbose': '-verbose' if verbose else ''
}
system.run('{bin}/odm_cleanmesh -inputFile {infile} '
'-outputFile {outfile} '
'-removeIslands '
'-decimateMesh {max_vertex} {verbose} '.format(**cleanupArgs))
# Delete intermediate results
os.remove(outMeshDirty)
return outMesh

Wyświetl plik

@ -1,120 +0,0 @@
import log
import system
import dataset
import types
from scripts.opensfm import opensfm
# Define pipeline tasks
tasks_dict = {'1': 'opensfm',
'2': 'cmvs',
'3': 'pmvs',
'4': 'odm_meshing',
'5': 'mvs_texturing',
'6': 'odm_georeferencing',
'7': 'odm_dem',
'8': 'odm_orthophoto',
'9': 'zip_results'}
class ODMTaskManager(object):
"""docstring for ODMTaskManager"""
def __init__(self, odm_app):
self.odm_app = odm_app
self.initial_task_id = 0
self.current_task_id = 0
self.final_task_id = len(tasks_dict)
self.tasks = self.init_tasks(tasks_dict, self.odm_app)
def init_tasks(self, _tasks_dict, _odm_app):
# dict to store tasks objects
tasks = {}
# loop over tasks dict
for key, in _tasks_dict:
# instantiate and append ODMTask
task_name = _tasks_dict[key]
tasks[key] = ODMTask(key, task_name)
# setup tasks
if task_name == 'resize':
# setup this task
command = resize
inputs = {'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos}
elif task_name == 'opensfm':
# setup this task
command = opensfm
inputs = {'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos}
elif task_name in ['cmvs', 'pmvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto', 'zip_results']:
# setup this task
command = None
inputs = {}
else:
log.ODM_ERROR('task_name %s is not valid' % task_name)
# setup task configuration
task = tasks[key]
task.command = command
task.inputs = inputs
return tasks
def run_tasks(self):
# curr_task = self.tasks['resize']
# self.tasks['resize']
for id in range(self.initial_task_id, self.final_task_id + 1):
# catch task with current id
task = self.tasks[str(id)]
# update task tracking
log.ODM_INFO('Running task %s: %s' % (task.id, task.name))
self.current_task_id = task.id
# run task
task.state = task.run()
if task.state == 2:
log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now()))
else:
log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name))
class ODMTask(object):
"""docstring for ODMTask"""
def __init__(self, id, name):
# task definition
self.id = id
self.name = name
# task i/o
self.command = None
self.inputs = {}
# Current task state (0:waiting, 1:running, 2:succeded: 3:failed)
# By default we set a task in waiting state
self.state = 0
# Launch task
def run(self):
# while doing something
self.state = 1
return self.launch_command()
def launch_command(self):
if self.command is None:
log.ODM_ERROR('Call method for task %s not defined' % self.name)
return 3 # failed
# run conmmand
try:
succeed = self.command(**self.inputs)
return 2 if succeed else 3 # 2:succeed, 3:failed
except Exception, e:
log.ODM_ERROR(str(e))
return 3 # failed

Wyświetl plik

@ -61,7 +61,7 @@ class ODM_Photo:
metadata.read()
# loop over image tags
for key in metadata:
# try/catch tag value due to weird bug in pyexiv2
# try/catch tag value due to weird bug in pyexiv2
# ValueError: invalid literal for int() with base 10: ''
GPS = 'Exif.GPSInfo.GPS'
try:
@ -274,7 +274,7 @@ class ODM_GeoRef(object):
with open(json_file, 'w') as f:
f.write(pipeline)
# call pdal
# call pdal
system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in}'.format(**kwargs))
def utm_to_latlon(self, _file, _photo, idx):
@ -389,7 +389,7 @@ class ODM_GeoRef(object):
# Create a nested list for the transformation matrix
with open(_file) as f:
for line in f:
# Handle matrix formats that either
# Handle matrix formats that either
# have leading or trailing brakets or just plain numbers.
line = re.sub(r"[\[\],]", "", line).strip()
self.transform += [[float(i) for i in line.split()]]
@ -414,7 +414,7 @@ class ODM_Tree(object):
# whole reconstruction process.
self.dataset_raw = io.join_paths(self.root_path, 'images')
self.opensfm = io.join_paths(self.root_path, 'opensfm')
self.pmvs = io.join_paths(self.root_path, 'pmvs')
self.smvs = io.join_paths(self.root_path, 'smvs')
self.odm_meshing = io.join_paths(self.root_path, 'odm_meshing')
self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing')
self.odm_25dtexturing = io.join_paths(self.root_path, 'odm_texturing_25d')
@ -439,12 +439,12 @@ class ODM_Tree(object):
self.opensfm_model = io.join_paths(self.opensfm, 'depthmaps/merged.ply')
self.opensfm_transformation = io.join_paths(self.opensfm, 'geocoords_transformation.txt')
# pmvs
self.pmvs_rec_path = io.join_paths(self.pmvs, 'recon0')
self.pmvs_bundle = io.join_paths(self.pmvs_rec_path, 'bundle.rd.out')
self.pmvs_visdat = io.join_paths(self.pmvs_rec_path, 'vis.dat')
self.pmvs_options = io.join_paths(self.pmvs_rec_path, 'pmvs_options.txt')
self.pmvs_model = io.join_paths(self.pmvs_rec_path, 'models/option-0000.ply')
# smvs
self.smvs_model = io.join_paths(self.smvs, 'smvs_dense_point_cloud.ply')
self.mve_path = io.join_paths(self.opensfm, 'mve')
self.mve_image_list = io.join_paths(self.mve_path, 'list.txt')
self.mve_bundle = io.join_paths(self.mve_path, 'bundle/bundle.out')
self.mve_views = io.join_paths(self.smvs, 'views')
# odm_meshing
self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply')

2
run.py
Wyświetl plik

@ -33,7 +33,7 @@ if __name__ == '__main__':
+ args.project_path + "/odm_orthophoto "
+ args.project_path + "/odm_texturing "
+ args.project_path + "/opensfm "
+ args.project_path + "/pmvs")
+ args.project_path + "/smvs")
# create an instance of my App BlackBox
# internally configure all tasks

Wyświetl plik

@ -1,68 +0,0 @@
import ecto
from opendm import io
from opendm import log
from opendm import system
from opendm import context
class ODMCmvsCell(ecto.Cell):
def declare_params(self, params):
params.declare("max_images", 'The maximum number of images '
'per cluster', 500)
params.declare("cores", 'The maximum number of cores to use '
'in dense reconstruction.', context.num_cores)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
inputs.declare("args", "Struct with paths", [])
inputs.declare("reconstruction", "list of ODMReconstructions", [])
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running ODM CMVS Cell')
# get inputs
args = self.inputs.args
tree = self.inputs.tree
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
args.rerun == 'cmvs') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'cmvs' in args.rerun_from)
if not io.file_exists(tree.pmvs_bundle) or rerun_cell:
log.ODM_DEBUG('Writing CMVS vis in: %s' % tree.pmvs_bundle)
# copy bundle file to pmvs dir
from shutil import copyfile
copyfile(tree.opensfm_bundle,
tree.pmvs_bundle)
kwargs = {
'bin': context.cmvs_path,
'prefix': self.inputs.tree.pmvs_rec_path,
'max_images': self.params.max_images,
'cores': self.params.cores
}
# run cmvs
system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid CMVS file in: %s' %
tree.pmvs_bundle)
outputs.reconstruction = inputs.reconstruction
if args.time:
system.benchmark(start_time, tree.benchmarking, 'CMVS')
log.ODM_INFO('Running ODM CMVS Cell - Finished')
return ecto.OK if args.end_with != 'cmvs' else ecto.QUIT

Wyświetl plik

@ -59,7 +59,7 @@ class ODMLoadDatasetCell(ecto.Cell):
# define paths and create working directories
system.mkdir_p(tree.odm_georeferencing)
if args.use_25dmesh: system.mkdir_p(tree.odm_25dgeoreferencing)
if not args.use_3dmesh: system.mkdir_p(tree.odm_25dgeoreferencing)
log.ODM_DEBUG('Loading dataset from: %s' % images_dir)

Wyświetl plik

@ -5,8 +5,6 @@ from opendm import io
from opendm import system
from opendm import context
import pmvs2nvmcams
class ODMMvsTexCell(ecto.Cell):
def declare_params(self, params):
params.declare("data_term", 'Data term: [area, gmi] default: gmi', "gmi")
@ -38,7 +36,7 @@ class ODMMvsTexCell(ecto.Cell):
# define paths and create working directories
system.mkdir_p(tree.odm_texturing)
if args.use_25dmesh: system.mkdir_p(tree.odm_25dtexturing)
if not args.use_3dmesh: system.mkdir_p(tree.odm_25dtexturing)
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
@ -50,23 +48,17 @@ class ODMMvsTexCell(ecto.Cell):
runs = [{
'out_dir': tree.odm_texturing,
'model': tree.odm_mesh,
'force_skip_vis_test': False
'nadir': False
}]
if args.fast_orthophoto:
if args.skip_3dmodel:
runs = []
if args.use_25dmesh:
if not args.use_3dmesh:
runs += [{
'out_dir': tree.odm_25dtexturing,
'model': tree.odm_25dmesh,
# We always skip the visibility test when using the 2.5D mesh
# because many faces end up being narrow, and almost perpendicular
# to the ground plane. The visibility test improperly classifies
# them as "not seen" since the test is done on a single triangle vertex,
# and while one vertex might be occluded, the other two might not.
'force_skip_vis_test': True
'nadir': True
}]
for r in runs:
@ -82,8 +74,9 @@ class ODMMvsTexCell(ecto.Cell):
skipLocalSeamLeveling = ""
skipHoleFilling = ""
keepUnseenFaces = ""
if (self.params.skip_vis_test or r['force_skip_vis_test']):
nadir = ""
if (self.params.skip_vis_test):
skipGeometricVisibilityTest = "--skip_geometric_visibility_test"
if (self.params.skip_glob_seam_leveling):
skipGlobalSeamLeveling = "--skip_global_seam_leveling"
@ -93,13 +86,13 @@ class ODMMvsTexCell(ecto.Cell):
skipHoleFilling = "--skip_hole_filling"
if (self.params.keep_unseen_faces):
keepUnseenFaces = "--keep_unseen_faces"
if (r['nadir']):
nadir = '--nadir_mode'
# mvstex definitions
kwargs = {
'bin': context.mvstex_path,
'out_dir': io.join_paths(r['out_dir'], "odm_textured_model"),
'pmvs_folder': tree.pmvs_rec_path,
'nvm_file': io.join_paths(tree.pmvs_rec_path, "nvmCams.nvm"),
'model': r['model'],
'dataTerm': self.params.data_term,
'outlierRemovalType': self.params.outlier_rem_type,
@ -108,20 +101,12 @@ class ODMMvsTexCell(ecto.Cell):
'skipLocalSeamLeveling': skipLocalSeamLeveling,
'skipHoleFilling': skipHoleFilling,
'keepUnseenFaces': keepUnseenFaces,
'toneMapping': self.params.tone_mapping
'toneMapping': self.params.tone_mapping,
'nadirMode': nadir,
'nadirWeight': 2 ** args.texturing_nadir_weight - 1,
'nvm_file': io.join_paths(tree.opensfm, "reconstruction.nvm")
}
if not args.use_pmvs:
kwargs['nvm_file'] = io.join_paths(tree.opensfm,
"reconstruction.nvm")
else:
log.ODM_DEBUG('Generating .nvm file from pmvs output: %s'
% '{nvm_file}'.format(**kwargs))
# Create .nvm camera file.
pmvs2nvmcams.run('{pmvs_folder}'.format(**kwargs),
'{nvm_file}'.format(**kwargs))
# Make sure tmp directory is empty
mvs_tmp_dir = os.path.join(r['out_dir'], 'tmp')
if io.dir_exists(mvs_tmp_dir):
@ -136,7 +121,9 @@ class ODMMvsTexCell(ecto.Cell):
'{skipGlobalSeamLeveling} '
'{skipLocalSeamLeveling} '
'{skipHoleFilling} '
'{keepUnseenFaces}'.format(**kwargs))
'{keepUnseenFaces} '
'{nadirMode} '
'-n {nadirWeight}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid ODM Texture file in: %s'
% odm_textured_model_obj)

Wyświetl plik

@ -8,9 +8,8 @@ from opendm import system
from dataset import ODMLoadDatasetCell
from run_opensfm import ODMOpenSfMCell
from smvs import ODMSmvsCell
from odm_slam import ODMSlamCell
from pmvs import ODMPmvsCell
from cmvs import ODMCmvsCell
from odm_meshing import ODMeshingCell
from mvstex import ODMMvsTexCell
from odm_georeferencing import ODMGeoreferencingCell
@ -44,23 +43,24 @@ class ODMApp(ecto.BlackBox):
'opensfm': ODMOpenSfMCell(use_exif_size=False,
feature_process_size=p.args.resize_to,
feature_min_frames=p.args.min_num_features,
processes=p.args.opensfm_processes,
processes=p.args.max_concurrency,
matching_gps_neighbors=p.args.matcher_neighbors,
matching_gps_distance=p.args.matcher_distance,
fixed_camera_params=p.args.use_fixed_camera_params,
hybrid_bundle_adjustment=p.args.use_hybrid_bundle_adjustment),
'slam': ODMSlamCell(),
'cmvs': ODMCmvsCell(max_images=p.args.cmvs_maxImages),
'pmvs': ODMPmvsCell(level=p.args.pmvs_level,
csize=p.args.pmvs_csize,
thresh=p.args.pmvs_threshold,
wsize=p.args.pmvs_wsize,
min_imgs=p.args.pmvs_min_images,
cores=p.args.pmvs_num_cores),
'smvs': ODMSmvsCell(alpha=p.args.smvs_alpha,
max_pixels=p.args.depthmap_resolution*p.args.depthmap_resolution,
threads=p.args.max_concurrency,
output_scale=p.args.smvs_output_scale,
shading=p.args.smvs_enable_shading,
gamma_srgb=p.args.smvs_gamma_srgb,
verbose=p.args.verbose),
'meshing': ODMeshingCell(max_vertex=p.args.mesh_size,
oct_tree=p.args.mesh_octree_depth,
samples=p.args.mesh_samples,
solver=p.args.mesh_solver_divide,
point_weight=p.args.mesh_point_weight,
max_concurrency=p.args.max_concurrency,
verbose=p.args.verbose),
'texturing': ODMMvsTexCell(data_term=p.args.texturing_data_term,
outlier_rem_type=p.args.texturing_outlier_removal_type,
@ -73,13 +73,15 @@ class ODMApp(ecto.BlackBox):
'georeferencing': ODMGeoreferencingCell(gcp_file=p.args.gcp,
use_exif=p.args.use_exif,
verbose=p.args.verbose),
'dem': ODMDEMCell(verbose=p.args.verbose),
'dem': ODMDEMCell(max_concurrency=p.args.max_concurrency,
verbose=p.args.verbose),
'orthophoto': ODMOrthoPhotoCell(resolution=p.args.orthophoto_resolution,
t_srs=p.args.orthophoto_target_srs,
no_tiled=p.args.orthophoto_no_tiled,
compress=p.args.orthophoto_compression,
bigtiff=p.args.orthophoto_bigtiff,
build_overviews=p.args.build_overviews,
max_concurrency=p.args.max_concurrency,
verbose=p.args.verbose)
}
@ -116,26 +118,22 @@ class ODMApp(ecto.BlackBox):
self.args[:] >> self.opensfm['args'],
self.dataset['reconstruction'] >> self.opensfm['reconstruction']]
if not p.args.use_pmvs:
if p.args.use_opensfm_dense or p.args.fast_orthophoto:
# create odm mesh from opensfm point cloud
connections += [self.tree[:] >> self.meshing['tree'],
self.args[:] >> self.meshing['args'],
self.opensfm['reconstruction'] >> self.meshing['reconstruction']]
else:
# run cmvs
connections += [self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.opensfm['reconstruction'] >> self.cmvs['reconstruction']]
# run smvs
# run pmvs
connections += [self.tree[:] >> self.pmvs['tree'],
self.args[:] >> self.pmvs['args'],
self.cmvs['reconstruction'] >> self.pmvs['reconstruction']]
connections += [self.tree[:] >> self.smvs['tree'],
self.args[:] >> self.smvs['args'],
self.opensfm['reconstruction'] >> self.smvs['reconstruction']]
# create odm mesh from pmvs point cloud
# create odm mesh from smvs point cloud
connections += [self.tree[:] >> self.meshing['tree'],
self.args[:] >> self.meshing['args'],
self.pmvs['reconstruction'] >> self.meshing['reconstruction']]
self.smvs['reconstruction'] >> self.meshing['reconstruction']]
# create odm texture
connections += [self.tree[:] >> self.texturing['tree'],
@ -166,20 +164,14 @@ class ODMApp(ecto.BlackBox):
connections += [self.tree[:] >> self.slam['tree'],
self.args[:] >> self.slam['args']]
# run cmvs
connections += [self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.slam['reconstruction'] >> self.cmvs['reconstruction']]
# run pmvs
connections += [self.tree[:] >> self.pmvs['tree'],
self.args[:] >> self.pmvs['args'],
self.cmvs['reconstruction'] >> self.pmvs['reconstruction']]
connections += [self.tree[:] >> self.smvs['tree'],
self.args[:] >> self.smvs['args'],
self.slam['reconstruction'] >> self.smvs['reconstruction']]
# create odm mesh
connections += [self.tree[:] >> self.meshing['tree'],
self.args[:] >> self.meshing['args'],
self.pmvs['reconstruction'] >> self.meshing['reconstruction']]
self.smvs['reconstruction'] >> self.meshing['reconstruction']]
# create odm texture
connections += [self.tree[:] >> self.texturing['tree'],

Wyświetl plik

@ -6,6 +6,7 @@ from opendm import log
from opendm import system
from opendm import context
from opendm import types
from opendm import gsd
from opendm.dem import commands
from opendm.cropper import Cropper
@ -13,6 +14,7 @@ from opendm.cropper import Cropper
class ODMDEMCell(ecto.Cell):
def declare_params(self, params):
params.declare("verbose", 'print additional messages to console', False)
params.declare("max_concurrency", "Number of threads", context.num_cores)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -44,12 +46,12 @@ class ODMDEMCell(ecto.Cell):
# Setup terrain parameters
terrain_params_map = {
'flatnonforest': (1, 3),
'flatforest': (1, 2),
'complexnonforest': (5, 2),
'flatnonforest': (1, 3),
'flatforest': (1, 2),
'complexnonforest': (5, 2),
'complexforest': (10, 2)
}
terrain_params = terrain_params_map[args.dem_terrain_type.lower()]
terrain_params = terrain_params_map[args.dem_terrain_type.lower()]
slope, cellsize = terrain_params
# define paths and create working directories
@ -62,7 +64,7 @@ class ODMDEMCell(ecto.Cell):
if not io.file_exists(pc_classify_marker) or rerun_cell:
log.ODM_INFO("Classifying {} using {}".format(tree.odm_georeferencing_model_laz, args.pc_classify))
commands.classify(tree.odm_georeferencing_model_laz,
commands.classify(tree.odm_georeferencing_model_laz,
args.pc_classify == "smrf",
slope,
cellsize,
@ -70,7 +72,7 @@ class ODMDEMCell(ecto.Cell):
initialDistance=args.dem_initial_distance,
verbose=args.verbose
)
with open(pc_classify_marker, 'w') as f:
with open(pc_classify_marker, 'w') as f:
f.write('Classify: {}\n'.format(args.pc_classify))
f.write('Slope: {}\n'.format(slope))
f.write('Cellsize: {}\n'.format(cellsize))
@ -87,25 +89,27 @@ class ODMDEMCell(ecto.Cell):
rerun_cell:
products = []
if args.dsm: products.append('dsm')
if args.dsm: products.append('dsm')
if args.dtm: products.append('dtm')
radius_steps = [args.dem_resolution]
resolution = gsd.cap_resolution(args.dem_resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd)
radius_steps = [(resolution / 100.0) / 2.0]
for _ in range(args.dem_gapfill_steps - 1):
radius_steps.append(radius_steps[-1] * 3) # 3 is arbitrary, maybe there's a better value?
radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?
for product in products:
commands.create_dems(
[tree.odm_georeferencing_model_laz],
[tree.odm_georeferencing_model_laz],
product,
radius=map(str, radius_steps),
gapfill=True,
outdir=odm_dem_root,
resolution=args.dem_resolution,
resolution=resolution / 100.0,
maxsd=args.dem_maxsd,
maxangle=args.dem_maxangle,
decimation=args.dem_decimation,
verbose=args.verbose
verbose=args.verbose,
max_workers=args.max_concurrency
)
if args.crop > 0:
@ -116,7 +120,7 @@ class ODMDEMCell(ecto.Cell):
'COMPRESS': 'LZW',
'BLOCKXSIZE': 512,
'BLOCKYSIZE': 512,
'NUM_THREADS': 'ALL_CPUS'
'NUM_THREADS': self.params.max_concurrency
})
else:
log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)

Wyświetl plik

@ -39,6 +39,7 @@ class ODMGeoreferencingCell(ecto.Cell):
reconstruction = inputs.reconstruction
gcpfile = tree.odm_georeferencing_gcp
doPointCloudGeo = True
transformPointCloud = True
verbose = '-verbose' if self.params.verbose else ''
# check if we rerun cell or not
@ -54,10 +55,10 @@ class ODMGeoreferencingCell(ecto.Cell):
'model': os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
}]
if args.fast_orthophoto:
if args.skip_3dmodel:
runs = []
if args.use_25dmesh:
if not args.use_3dmesh:
runs += [{
'georeferencing_dir': tree.odm_25dgeoreferencing,
'texturing_dir': tree.odm_25dtexturing,
@ -92,14 +93,19 @@ class ODMGeoreferencingCell(ecto.Cell):
'verbose': verbose
}
if not args.use_pmvs:
if args.fast_orthophoto:
kwargs['pc'] = os.path.join(tree.opensfm, 'reconstruction.ply')
else:
kwargs['pc'] = tree.opensfm_model
else:
kwargs['pc'] = tree.pmvs_model
if args.fast_orthophoto:
kwargs['pc'] = os.path.join(tree.opensfm, 'reconstruction.ply')
elif args.use_opensfm_dense:
kwargs['pc'] = tree.opensfm_model
else:
kwargs['pc'] = tree.smvs_model
if transformPointCloud:
kwargs['pc_params'] = '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo}'.format(**kwargs)
else:
kwargs['pc_params'] = ''
# Check to see if the GCP file exists
if not self.params.use_exif and (self.params.gcp_file or tree.odm_georeferencing_gcp):
@ -107,7 +113,7 @@ class ODMGeoreferencingCell(ecto.Cell):
try:
system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
'-inputFile {model} -outputFile {model_geo} '
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
'{pc_params} {verbose} '
'-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
'-outputCoordFile {coords}'.format(**kwargs))
except Exception:
@ -117,13 +123,13 @@ class ODMGeoreferencingCell(ecto.Cell):
log.ODM_INFO('Running georeferencing with OpenSfM transformation matrix')
system.run('{bin}/odm_georef -bundleFile {bundle} -inputTransformFile {input_trans_file} -inputCoordFile {coords} '
'-inputFile {model} -outputFile {model_geo} '
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
'{pc_params} {verbose} '
'-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
elif io.file_exists(tree.odm_georeferencing_coords):
log.ODM_INFO('Running georeferencing with generated coords file.')
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
'-inputFile {model} -outputFile {model_geo} '
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
'{pc_params} {verbose} '
'-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
else:
log.ODM_WARNING('Georeferencing failed. Make sure your '
@ -171,12 +177,15 @@ class ODMGeoreferencingCell(ecto.Cell):
if args.crop > 0:
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop)
cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop,
decimation_step=40 if args.fast_orthophoto or args.use_opensfm_dense else 90,
outlier_radius=20 if args.fast_orthophoto else 2)
# Do not execute a second time, since
# We might be doing georeferencing for
# multiple models (3D, 2.5D, ...)
doPointCloudGeo = False
transformPointCloud = False
else:
log.ODM_WARNING('Found a valid georeferenced model in: %s'
% odm_georeferencing_model_ply_geo)

Wyświetl plik

@ -4,6 +4,8 @@ from opendm import log
from opendm import io
from opendm import system
from opendm import context
from opendm import mesh
from opendm import gsd
class ODMeshingCell(ecto.Cell):
@ -15,10 +17,8 @@ class ODMeshingCell(ecto.Cell):
'values are 8-12', 9)
params.declare("samples", 'Number of points per octree node, recommended '
'value: 1.0', 1)
params.declare("solver", 'Oct-tree depth at which the Laplacian equation '
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage.', 9)
params.declare("point_weight", "specifies the importance that interpolation of the point samples is given in the formulation of the screened Poisson equation.", 4)
params.declare("max_concurrency", 'max threads', context.num_cores)
params.declare("verbose", 'print additional messages to console', False)
def declare_io(self, params, inputs, outputs):
@ -38,7 +38,6 @@ class ODMeshingCell(ecto.Cell):
args = inputs.args
tree = inputs.tree
reconstruction = inputs.reconstruction
verbose = '-verbose' if self.params.verbose else ''
# define paths and create working directories
system.mkdir_p(tree.odm_meshing)
@ -50,60 +49,53 @@ class ODMeshingCell(ecto.Cell):
(args.rerun_from is not None and
'odm_meshing' in args.rerun_from)
infile = tree.opensfm_model
if args.use_pmvs:
infile = tree.pmvs_model
elif args.fast_orthophoto:
infile = tree.smvs_model
if args.fast_orthophoto:
infile = os.path.join(tree.opensfm, 'reconstruction.ply')
elif args.use_opensfm_dense:
infile = tree.opensfm_model
# Do not create full 3D model with fast_orthophoto
if not args.fast_orthophoto:
# Create full 3D model unless --skip-3dmodel is set
if not args.skip_3dmodel:
if not io.file_exists(tree.odm_mesh) or rerun_cell:
log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)
kwargs = {
'bin': context.odm_modules_path,
'outfile': tree.odm_mesh,
'infile': infile,
'log': tree.odm_meshing_log,
'max_vertex': self.params.max_vertex,
'oct_tree': self.params.oct_tree,
'samples': self.params.samples,
'solver': self.params.solver,
'verbose': verbose
}
mesh.screened_poisson_reconstruction(infile,
tree.odm_mesh,
depth=self.params.oct_tree,
samples=self.params.samples,
maxVertexCount=self.params.max_vertex,
pointWeight=self.params.point_weight,
threads=self.params.max_concurrency,
verbose=self.params.verbose)
# run meshing binary
system.run('{bin}/odm_meshing -inputFile {infile} '
'-outputFile {outfile} -logFile {log} '
'-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} '
'-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
tree.odm_mesh)
# Do we need to generate a 2.5D mesh also?
# This is always set if fast_orthophoto is set
if args.use_25dmesh:
# Always generate a 2.5D mesh
# unless --use-3dmesh is set.
if not args.use_3dmesh:
if not io.file_exists(tree.odm_25dmesh) or rerun_cell:
log.ODM_DEBUG('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh)
dsm_resolution = gsd.cap_resolution(args.orthophoto_resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd) / 100.0
kwargs = {
'bin': context.odm_modules_path,
'outfile': tree.odm_25dmesh,
'infile': infile,
'log': tree.odm_25dmeshing_log,
'verbose': verbose,
'max_vertex': self.params.max_vertex,
'neighbors': args.mesh_neighbors,
'resolution': args.mesh_resolution
}
# Create reference DSM at half ortho resolution
dsm_resolution *= 2
# run 2.5D meshing binary
system.run('{bin}/odm_25dmeshing -inputFile {infile} '
'-outputFile {outfile} -logFile {log} '
'-maxVertexCount {max_vertex} -neighbors {neighbors} '
'-resolution {resolution} {verbose}'.format(**kwargs))
# Sparse point clouds benefits from using
# a larger resolution value (more radius interolation, less holes)
if args.fast_orthophoto:
dsm_resolution *= 2
mesh.create_25dmesh(infile, tree.odm_25dmesh,
dsm_resolution=dsm_resolution,
depth=self.params.oct_tree,
maxVertexCount=self.params.max_vertex,
samples=self.params.samples,
verbose=self.params.verbose,
max_workers=args.max_concurrency)
else:
log.ODM_WARNING('Found a valid ODM 2.5D Mesh file in: %s' %
tree.odm_25dmesh)

Wyświetl plik

@ -6,18 +6,20 @@ from opendm import log
from opendm import system
from opendm import context
from opendm import types
from opendm import gsd
from opendm.cropper import Cropper
class ODMOrthoPhotoCell(ecto.Cell):
def declare_params(self, params):
params.declare("resolution", 'Orthophoto ground resolution in pixels/meter', 20)
params.declare("resolution", 'Orthophoto resolution in cm / pixel', 5)
params.declare("t_srs", 'Target SRS', None)
params.declare("no_tiled", 'Do not tile tiff', False)
params.declare("compress", 'Compression type', 'DEFLATE')
params.declare("bigtiff", 'Make BigTIFF orthophoto', 'IF_SAFER')
params.declare("build_overviews", 'Build overviews', False)
params.declare("verbose", 'print additional messages to console', False)
params.declare("max_concurrency", "number of threads", context.num_cores)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -55,7 +57,7 @@ class ODMOrthoPhotoCell(ecto.Cell):
'log': tree.odm_orthophoto_log,
'ortho': tree.odm_orthophoto_file,
'corners': tree.odm_orthophoto_corners,
'res': self.params.resolution,
'res': 1.0 / (gsd.cap_resolution(self.params.resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd) / 100.0),
'verbose': verbose
}
@ -76,15 +78,15 @@ class ODMOrthoPhotoCell(ecto.Cell):
if georef:
if args.use_25dmesh:
kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_georeferencing_model_obj_geo)
else:
if args.use_3dmesh:
kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_georeferencing_model_obj_geo)
else:
if args.use_25dmesh:
kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
else:
kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_georeferencing_model_obj_geo)
else:
if args.use_3dmesh:
kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
else:
kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
# run odm_orthophoto
system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
@ -125,7 +127,8 @@ class ODMOrthoPhotoCell(ecto.Cell):
'png': tree.odm_orthophoto_file,
'tiff': tree.odm_orthophoto_tif,
'log': tree.odm_orthophoto_tif_log,
'max_memory': max(5, (100 - virtual_memory().percent) / 2)
'max_memory': max(5, (100 - virtual_memory().percent) / 2),
'threads': self.params.max_concurrency
}
system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
@ -135,7 +138,7 @@ class ODMOrthoPhotoCell(ecto.Cell):
'{predictor} '
'-co BLOCKXSIZE=512 '
'-co BLOCKYSIZE=512 '
'-co NUM_THREADS=ALL_CPUS '
'-co NUM_THREADS={threads} '
'-a_srs \"{proj}\" '
'--config GDAL_CACHEMAX {max_memory}% '
'{png} {tiff} > {log}'.format(**kwargs))
@ -149,7 +152,7 @@ class ODMOrthoPhotoCell(ecto.Cell):
'BIGTIFF': self.params.bigtiff,
'BLOCKXSIZE': 512,
'BLOCKYSIZE': 512,
'NUM_THREADS': 'ALL_CPUS'
'NUM_THREADS': self.params.max_concurrency
})
if self.params.build_overviews:

Wyświetl plik

@ -39,7 +39,6 @@ class ODMSlamCell(ecto.Cell):
# create working directories
system.mkdir_p(tree.opensfm)
system.mkdir_p(tree.pmvs)
vocabulary = os.path.join(context.orb_slam2_path,
'Vocabulary/ORBvoc.txt')
@ -96,16 +95,5 @@ class ODMSlamCell(ecto.Cell):
'Found a valid Bundler file in: {}'.format(
tree.opensfm_reconstruction))
# check if reconstruction was exported to pmvs before
if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
# run PMVS converter
system.run(
'PYTHONPATH={} {}/bin/export_pmvs {} --output {}'.format(
context.pyopencv_path, context.opensfm_path, tree.opensfm,
tree.pmvs))
else:
log.ODM_WARNING('Found a valid CMVS file in: {}'.format(
tree.pmvs_visdat))
log.ODM_INFO('Running OMD Slam Cell - Finished')
return ecto.OK if args.end_with != 'odm_slam' else ecto.QUIT

Wyświetl plik

@ -1,84 +0,0 @@
import ecto
from opendm import io
from opendm import log
from opendm import system
from opendm import context
class ODMPmvsCell(ecto.Cell):
def declare_params(self, params):
params.declare("level", 'The level in the image pyramid that is used '
'for the computation', 1)
params.declare("csize", 'Cell size controls the density of reconstructions', 2)
params.declare("thresh", 'A patch reconstruction is accepted as a success '
'and kept, if its associcated photometric consistency '
'measure is above this threshold.', 0.7)
params.declare("wsize", 'pmvs samples wsize x wsize pixel colors from '
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower.', 7)
params.declare("min_imgs", 'Each 3D point must be visible in at least '
'minImageNum images for being reconstructed. 3 is '
'suggested in general.', 3)
params.declare("cores", 'The maximum number of cores to use in dense '
' reconstruction.', context.num_cores)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
inputs.declare("args", "The application arguments.", {})
inputs.declare("reconstruction", "list of ODMReconstructions", [])
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running OMD PMVS Cell')
# get inputs
args = self.inputs.args
tree = self.inputs.tree
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
args.rerun == 'pmvs') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'pmvs' in args.rerun_from)
if not io.file_exists(tree.pmvs_model) or rerun_cell:
log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)
kwargs = {
'bin': context.cmvs_opts_path,
'prefix': tree.pmvs_rec_path,
'level': self.params.level,
'csize': self.params.csize,
'thresh': self.params.thresh,
'wsize': self.params.wsize,
'min_imgs': self.params.min_imgs,
'cores': self.params.cores
}
# generate pmvs2 options
system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
'{min_imgs} {cores}'.format(**kwargs))
# run pmvs2
system.run('%s %s/ option-0000' %
(context.pmvs2_path, tree.pmvs_rec_path))
else:
log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)
outputs.reconstruction = inputs.reconstruction
if args.time:
system.benchmark(start_time, tree.benchmarking, 'PMVS')
log.ODM_INFO('Running ODM PMVS Cell - Finished')
return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT

Wyświetl plik

@ -1,144 +0,0 @@
import os
import numpy as np
from opendm import log
# Go from QR-factorizatoin to corresponding RQ-factorization.
def rq(A):
Q,R = np.linalg.qr(np.flipud(A).T)
R = np.flipud(R.T)
Q = Q.T
return R[:,::-1],Q[::-1,:]
# Create a unit quaternion from rotation matrix.
def rot2quat(R):
# Float epsilon (use square root to be well with the stable region).
eps = np.sqrt(np.finfo(float).eps)
# If the determinant is not 1, it's not a rotation matrix
if np.abs(np.linalg.det(R) - 1.0) > eps:
log.ODM_ERROR('Matrix passed to rot2quat was not a rotation matrix, det != 1.0')
tr = np.trace(R)
quat = np.zeros((1,4))
# Is trace big enough be computationally stable?
if tr > eps:
S = 0.5 / np.sqrt(tr + 1.0)
quat[0,0] = 0.25 / S
quat[0,1] = (R[2,1] - R[1,2]) * S
quat[0,2] = (R[0,2] - R[2,0]) * S
quat[0,3] = (R[1,0] - R[0,1]) * S
else: # It's not, use the largest diagonal.
if R[0,0] > R[1,1] and R[0,0] > R[2,2]:
S = np.sqrt(1.0 + R[0,0] - R[1,1] - R[2,2]) * 2.0
quat[0,0] = (R[2,1] - R[1,2]) / S
quat[0,1] = 0.25 * S
quat[0,2] = (R[0,1] + R[1,0]) / S
quat[0,3] = (R[0,2] + R[2,0]) / S
elif R[1,1] > R[2,2]:
S = np.sqrt(1.0 - R[0,0] + R[1,1] - R[2,2]) * 2.0
quat[0,0] = (R[0,2] - R[2,0]) / S
quat[0,1] = (R[0,1] + R[1,0]) / S
quat[0,2] = 0.25 * S
quat[0,3] = (R[1,2] + R[2,1]) / S
else:
S = np.sqrt(1.0 - R[0,0] - R[1,1] + R[2,2]) * 2.0
quat[0,0] = (R[1,0] - R[0,1]) / S
quat[0,1] = (R[0,2] + R[2,0]) / S
quat[0,2] = (R[1,2] + R[2,1]) / S
quat[0,3] = 0.25 * S
return quat
# Decompose a projection matrix into parts
# (Intrinsic projection, Rotation, Camera position)
def decomposeProjection(projectionMatrix):
# Check input:
if projectionMatrix.shape != (3,4):
log.ODM_ERROR('Unable to decompose projection matrix, shape != (3,4)')
RQ = rq(projectionMatrix[:,:3])
# Fix sign, since we know K is upper triangular and has a positive diagonal.
signMat = np.diag(np.diag(np.sign(RQ[0])))
K = signMat*RQ[0]
R = signMat*RQ[1]
# Calculate camera position from translation vector.
t = np.linalg.inv(-1.0*projectionMatrix[:,:3])*projectionMatrix[:,3]
return K, R, t
# Parses pvms contour file.
def parseContourFile(filePath):
with open(filePath, 'r') as contourFile:
if (contourFile.readline().strip() != "CONTOUR"):
return np.array([])
else:
pMatData = np.loadtxt(contourFile, float, '#', None, None, 0)
if pMatData.shape == (3,4):
return pMatData
return np.array([])
# Creates a .nvm camera file in the pmvs folder.
def run(pmvsFolder, outputFile):
projectionFolder = pmvsFolder + "/txt"
imageFolder = pmvsFolder + "/visualize"
pMatrices = []
imageFileNames = []
# for all files in the visualize folder:
for imageFileName in os.listdir(imageFolder):
fileNameNoExt = os.path.splitext(imageFileName)[0]
# look for corresponding projection matrix txt file
projectionFilePath = os.path.join(projectionFolder, fileNameNoExt)
projectionFilePath += ".txt"
if os.path.isfile(projectionFilePath):
pMatData = parseContourFile(projectionFilePath)
if pMatData.size == 0:
log.ODM_WARNING('Unable to parse contour file, skipping: %s'
% projectionFilePath)
else:
pMatrices.append(np.matrix(pMatData))
imageFileNames.append(imageFileName)
# Decompose projection matrices
focals = []
rotations = []
translations = []
for projection in pMatrices:
KRt = decomposeProjection(projection)
focals.append(KRt[0][0,0])
rotations.append(rot2quat(KRt[1]))
translations.append(KRt[2])
# Create .nvm file
with open (outputFile, 'w') as nvmFile:
nvmFile.write("NVM_V3\n\n")
nvmFile.write('%d' % len(rotations) + "\n")
for idx, imageFileName in enumerate(imageFileNames):
nvmFile.write(os.path.join("visualize", imageFileName))
nvmFile.write(" " + '%f' % focals[idx])
nvmFile.write(" " + '%f' % rotations[idx][0,0] +
" " + '%f' % rotations[idx][0,1] +
" " + '%f' % rotations[idx][0,2] +
" " + '%f' % rotations[idx][0,3])
nvmFile.write(" " + '%f' % translations[idx][0] +
" " + '%f' % translations[idx][1] +
" " + '%f' % translations[idx][2])
nvmFile.write(" 0 0\n")
nvmFile.write("0\n\n")
nvmFile.write("0\n\n")
nvmFile.write("0")

Wyświetl plik

@ -4,7 +4,7 @@ from opendm import log
from opendm import io
from opendm import system
from opendm import context
from opendm import gsd
class ODMOpenSfMCell(ecto.Cell):
def declare_params(self, params):
@ -40,9 +40,8 @@ class ODMOpenSfMCell(ecto.Cell):
log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
return ecto.QUIT
# create working directories
# create working directories
system.mkdir_p(tree.opensfm)
system.mkdir_p(tree.pmvs)
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
@ -51,10 +50,10 @@ class ODMOpenSfMCell(ecto.Cell):
(args.rerun_from is not None and
'opensfm' in args.rerun_from)
if not args.use_pmvs:
if args.fast_orthophoto:
output_file = io.join_paths(tree.opensfm, 'reconstruction.ply')
elif args.use_opensfm_dense:
output_file = tree.opensfm_model
if args.fast_orthophoto:
output_file = io.join_paths(tree.opensfm, 'reconstruction.ply')
else:
output_file = tree.opensfm_reconstruction
@ -77,7 +76,7 @@ class ODMOpenSfMCell(ecto.Cell):
"processes: %s" % self.params.processes,
"matching_gps_neighbors: %s" % self.params.matching_gps_neighbors,
"depthmap_method: %s" % args.opensfm_depthmap_method,
"depthmap_resolution: %s" % args.opensfm_depthmap_resolution,
"depthmap_resolution: %s" % args.depthmap_resolution,
"depthmap_min_patch_sd: %s" % args.opensfm_depthmap_min_patch_sd,
"depthmap_min_consistent_views: %s" % args.opensfm_depthmap_min_consistent_views,
"optimize_camera_parameters: %s" % ('no' if self.params.fixed_camera_params else 'yes')
@ -136,25 +135,37 @@ class ODMOpenSfMCell(ecto.Cell):
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
tree.opensfm_reconstruction)
if not args.use_pmvs:
if not io.file_exists(tree.opensfm_reconstruction_nvm) or rerun_cell:
system.run('PYTHONPATH=%s %s/bin/opensfm export_visualsfm %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
else:
log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
tree.opensfm_reconstruction_nvm)
# Always export VisualSFM's reconstruction and undistort images
# as we'll use these for texturing (after GSD estimation and resizing)
if not args.ignore_gsd:
image_scale = gsd.image_scale_factor(args.orthophoto_resolution, tree.opensfm_reconstruction)
else:
image_scale = 1.0
if not io.file_exists(tree.opensfm_reconstruction_nvm) or rerun_cell:
system.run('PYTHONPATH=%s %s/bin/opensfm export_visualsfm --image_extension png --scale_focal %s %s' %
(context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))
else:
log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
tree.opensfm_reconstruction_nvm)
# These will be used for texturing
system.run('PYTHONPATH=%s %s/bin/opensfm undistort --image_format png --image_scale %s %s' %
(context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))
# Skip dense reconstruction if necessary and export
# sparse reconstruction instead
if args.fast_orthophoto:
system.run('PYTHONPATH=%s %s/bin/opensfm export_ply --no-cameras %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
elif args.use_opensfm_dense:
# Undistort images at full scale in JPG
# (TODO: we could compare the size of the PNGs if they are < than depthmap_resolution
# and use those instead of re-exporting full resolution JPGs)
system.run('PYTHONPATH=%s %s/bin/opensfm undistort %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
# Skip dense reconstruction if necessary and export
# sparse reconstruction instead
if args.fast_orthophoto:
system.run('PYTHONPATH=%s %s/bin/opensfm export_ply --no-cameras %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
else:
system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
else:
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
@ -169,15 +180,6 @@ class ODMOpenSfMCell(ecto.Cell):
log.ODM_WARNING('Found a valid Bundler file in: %s' %
tree.opensfm_reconstruction)
if args.use_pmvs:
# check if reconstruction was exported to pmvs before
if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
# run PMVS converter
system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
else:
log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat)
if reconstruction.georef:
system.run('PYTHONPATH=%s %s/bin/opensfm export_geocoords %s --transformation --proj \'%s\'' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm, reconstruction.georef.projection.srs))

103
scripts/smvs.py 100644
Wyświetl plik

@ -0,0 +1,103 @@
import ecto, shutil, os, glob
from opendm import log
from opendm import io
from opendm import system
from opendm import context
class ODMSmvsCell(ecto.Cell):
def declare_params(self, params):
params.declare("threads", "max number of threads", context.num_cores)
params.declare("alpha", "Regularization parameter", 1)
params.declare("max_pixels", "max pixels for reconstruction", 1700000)
params.declare("output_scale", "scale of optimization", 2)
params.declare("shading", "Enable shading-aware model", False)
params.declare("gamma_srgb", "Apply inverse SRGB gamma correction", False)
params.declare("verbose", "Increase debug level", False)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
inputs.declare("args", "The application arguments.", {})
inputs.declare("reconstruction", "ODMReconstruction", [])
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running SMVS Cell')
# get inputs
tree = inputs.tree
args = inputs.args
reconstruction = inputs.reconstruction
photos = reconstruction.photos
if not photos:
log.ODM_ERROR('Not enough photos in photos array to start SMVS')
return ecto.QUIT
# check if we rerun cell or not
rerun_cell = (args.rerun is not None and
args.rerun == 'smvs') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'smvs' in args.rerun_from)
# check if reconstruction was done before
if not io.file_exists(tree.smvs_model) or rerun_cell:
# make bundle directory
if not io.file_exists(tree.mve_bundle):
system.mkdir_p(tree.mve_path) #TODO: check permissions/what happens when rerun
system.mkdir_p(io.join_paths(tree.mve_path, 'bundle'))
io.copy(tree.opensfm_image_list, tree.mve_image_list)
io.copy(tree.opensfm_bundle, tree.mve_bundle)
# mve makescene wants the output directory
# to not exists before executing it (otherwise it
# will prompt the user for confirmation)
if io.dir_exists(tree.smvs):
shutil.rmtree(tree.smvs)
# run mve makescene
if not io.dir_exists(tree.mve_views):
system.run('%s %s %s' % (context.makescene_path, tree.mve_path, tree.smvs))
# config
config = [
"-t%s" % self.params.threads,
"-a%s" % self.params.alpha,
"--max-pixels=%s" % int(self.params.max_pixels),
"-o%s" % self.params.output_scale,
"--debug-lvl=%s" % ('1' if self.params.verbose else '0'),
"%s" % '-S' if self.params.shading else '',
"%s" % '-g' if self.params.gamma_srgb and self.params.shading else '',
"--force" if rerun_cell else ''
]
# run smvs
system.run('%s %s %s' % (context.smvs_path, ' '.join(config), tree.smvs))
# find and rename the output file for simplicity
smvs_files = glob.glob(os.path.join(tree.smvs, 'smvs-*'))
smvs_files.sort(key=os.path.getmtime) # sort by last modified date
if len(smvs_files) > 0:
old_file = smvs_files[-1]
if not (io.rename_file(old_file, tree.smvs_model)):
log.ODM_WARNING("File %s does not exist, cannot be renamed. " % old_file)
else:
log.ODM_WARNING("Cannot find a valid point cloud (smvs-XX.ply) in %s. Check the console output for errors." % tree.smvs)
else:
log.ODM_WARNING('Found a valid SMVS reconstruction file in: %s' %
tree.smvs_model)
outputs.reconstruction = reconstruction
if args.time:
system.benchmark(start_time, tree.benchmarking, 'SMVS')
log.ODM_INFO('Running ODM SMVS Cell - Finished')
return ecto.OK if args.end_with != 'smvs' else ecto.QUIT