kopia lustrzana https://github.com/OpenDroneMap/ODM
Merge branch 'cmake' of https://github.com/edgarriba/OpenDroneMap into edgarriba-cmake
Conflicts: install.sh Changes to be committed: modified: .gitignore new file: CMakeLists.txt deleted: PoissonRecon.zip new file: SuperBuild/CMakeLists.txt new file: SuperBuild/cmake/External-CMVS.cmake new file: SuperBuild/cmake/External-Catkin.cmake new file: SuperBuild/cmake/External-Ceres.cmake new file: SuperBuild/cmake/External-Ecto.cmake new file: SuperBuild/cmake/External-GFlags.cmake new file: SuperBuild/cmake/External-LAStools.cmake new file: SuperBuild/cmake/External-OpenCV.cmake new file: SuperBuild/cmake/External-OpenGV.cmake new file: SuperBuild/cmake/External-OpenSfM.cmake new file: SuperBuild/cmake/External-PCL.cmake new file: SuperBuild/cmake/ExternalProject-Setup.cmake modified: ccd_defs_check.py deleted: clapack.tgz deleted: cmvs.tar.gz.part-aa deleted: cmvs.tar.gz.part-ab deleted: cmvs.tar.gz.part-ac deleted: cmvs.tar.gz.part-ad deleted: cmvs.tar.gz.part-ae new file: configure.sh deleted: convert_vlsift_to_lowesift.pl renamed: ccd_defs.json -> data/ccd_defs.json deleted: graclus.tar.gz modified: hooks/pre-commit deleted: install.sh deleted: knnMatch_exif.py new file: modules/CMakeLists.txt renamed: odm_extract_utm/CMakeLists.txt -> modules/odm_extract_utm/CMakeLists.txt renamed: odm_extract_utm/src/Logger.cpp -> modules/odm_extract_utm/src/Logger.cpp renamed: odm_extract_utm/src/Logger.hpp -> modules/odm_extract_utm/src/Logger.hpp renamed: odm_extract_utm/src/UtmExtractor.cpp -> modules/odm_extract_utm/src/UtmExtractor.cpp renamed: odm_extract_utm/src/UtmExtractor.hpp -> modules/odm_extract_utm/src/UtmExtractor.hpp renamed: odm_extract_utm/src/main.cpp -> modules/odm_extract_utm/src/main.cpp renamed: odm_georef/CMakeLists.txt -> modules/odm_georef/CMakeLists.txt renamed: odm_georef/CMakeLists.txt.user -> modules/odm_georef/CMakeLists.txt.user renamed: odm_georef/src/FindTransform.cpp -> modules/odm_georef/src/FindTransform.cpp renamed: odm_georef/src/FindTransform.hpp -> modules/odm_georef/src/FindTransform.hpp renamed: odm_georef/src/Georef.cpp -> modules/odm_georef/src/Georef.cpp renamed: odm_georef/src/Georef.hpp -> modules/odm_georef/src/Georef.hpp renamed: odm_georef/src/Logger.cpp -> modules/odm_georef/src/Logger.cpp renamed: odm_georef/src/Logger.hpp -> modules/odm_georef/src/Logger.hpp renamed: odm_georef/src/main.cpp -> modules/odm_georef/src/main.cpp renamed: odm_georef/src/modifiedPclFunctions.cpp -> modules/odm_georef/src/modifiedPclFunctions.cpp renamed: odm_georef/src/modifiedPclFunctions.hpp -> modules/odm_georef/src/modifiedPclFunctions.hpp renamed: odm_meshing/CMakeLists.txt -> modules/odm_meshing/CMakeLists.txt renamed: odm_meshing/src/Logger.cpp -> modules/odm_meshing/src/Logger.cpp renamed: odm_meshing/src/Logger.hpp -> modules/odm_meshing/src/Logger.hpp renamed: odm_meshing/src/OdmMeshing.cpp -> modules/odm_meshing/src/OdmMeshing.cpp renamed: odm_meshing/src/OdmMeshing.hpp -> modules/odm_meshing/src/OdmMeshing.hpp renamed: odm_meshing/src/main.cpp -> modules/odm_meshing/src/main.cpp renamed: odm_orthophoto/CMakeLists.txt -> modules/odm_orthophoto/CMakeLists.txt renamed: odm_orthophoto/CMakeLists.txt.user -> modules/odm_orthophoto/CMakeLists.txt.user renamed: odm_orthophoto/src/Logger.cpp -> modules/odm_orthophoto/src/Logger.cpp renamed: odm_orthophoto/src/Logger.hpp -> modules/odm_orthophoto/src/Logger.hpp renamed: odm_orthophoto/src/OdmOrthoPhoto.cpp -> modules/odm_orthophoto/src/OdmOrthoPhoto.cpp renamed: odm_orthophoto/src/OdmOrthoPhoto.hpp -> modules/odm_orthophoto/src/OdmOrthoPhoto.hpp renamed: odm_orthophoto/src/main.cpp -> modules/odm_orthophoto/src/main.cpp renamed: odm_texturing/CMakeLists.txt -> modules/odm_texturing/CMakeLists.txt renamed: odm_texturing/CMakeLists.txt.user -> modules/odm_texturing/CMakeLists.txt.user renamed: odm_texturing/src/Logger.cpp -> modules/odm_texturing/src/Logger.cpp renamed: odm_texturing/src/Logger.hpp -> modules/odm_texturing/src/Logger.hpp renamed: odm_texturing/src/OdmTexturing.cpp -> modules/odm_texturing/src/OdmTexturing.cpp renamed: odm_texturing/src/OdmTexturing.hpp -> modules/odm_texturing/src/OdmTexturing.hpp renamed: odm_texturing/src/main.cpp -> modules/odm_texturing/src/main.cpp renamed: odm_texturing/src/modifiedPclFunctions.cpp -> modules/odm_texturing/src/modifiedPclFunctions.cpp renamed: odm_texturing/src/modifiedPclFunctions.hpp -> modules/odm_texturing/src/modifiedPclFunctions.hpp new file: opendm/__init__.py new file: opendm/config.py new file: opendm/context.py new file: opendm/io.py new file: opendm/log.py new file: opendm/system.py new file: opendm/tasks.py new file: opendm/types.py modified: run.py new file: scripts/__init__.py new file: scripts/cmvs.py new file: scripts/dataset.py new file: scripts/example_ecto_python.py new file: scripts/odm_app.py new file: scripts/odm_georeferencing.py new file: scripts/odm_meshing.py new file: scripts/odm_orthophoto.py new file: scripts/odm_texturing.py new file: scripts/opensfm.py new file: scripts/pmvs.py new file: scripts/resize.py deleted: src/bundler deleted: vlfeat.tar.gzpull/254/head
commit
7b41a963d1
|
@ -5,6 +5,7 @@ lib/
|
|||
logs/
|
||||
share/
|
||||
src/
|
||||
download/
|
||||
|
||||
cmvs.tar.gz
|
||||
parallel.tar.bz2
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
project(OpenDroneMap C CXX)
|
||||
|
||||
# TODO(edgar): add option in order to point to CMAKE_PREFIX_PATH
|
||||
# if we want to build SuperBuild in an external directory.
|
||||
# It is assumed that SuperBuild have been compiled.
|
||||
|
||||
# Set third party libs location
|
||||
set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/SuperBuild/install")
|
||||
|
||||
# move binaries to the same bin directory
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
|
||||
|
||||
# Add ODM sub-modules
|
||||
add_subdirectory(modules)
|
BIN
PoissonRecon.zip
BIN
PoissonRecon.zip
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,110 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
project(ODM-SuperBuild)
|
||||
|
||||
# Setup SuperBuild root location
|
||||
set(SB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
|
||||
# Path to additional CMake modules
|
||||
set(CMAKE_MODULE_PATH ${SB_ROOT_DIR}/cmake)
|
||||
|
||||
include(ExternalProject)
|
||||
include(ExternalProject-Setup)
|
||||
|
||||
|
||||
################################
|
||||
# Setup SuperBuild directories #
|
||||
################################
|
||||
|
||||
# Setup location where source tar-balls are downloaded
|
||||
set(SB_DOWNLOAD_DIR "${SB_ROOT_DIR}/download"
|
||||
CACHE PATH "Location where source tar-balls are (to be) downloaded.")
|
||||
mark_as_advanced(SB_DOWNLOAD_DIR)
|
||||
|
||||
message(STATUS "SuperBuild files will be downloaded to: ${SB_DOWNLOAD_DIR}")
|
||||
|
||||
|
||||
# Setup location where source tar-balls are located
|
||||
set(SB_SOURCE_DIR "${SB_ROOT_DIR}/src"
|
||||
CACHE PATH "Location where source tar-balls are (will be).")
|
||||
mark_as_advanced(SB_SOURCE_DIR)
|
||||
|
||||
message(STATUS "SuperBuild source files will be extracted to: ${SB_SOURCE_DIR}")
|
||||
|
||||
|
||||
# Setup location where source tar-balls are located
|
||||
set(SB_INSTALL_DIR "${SB_ROOT_DIR}/install"
|
||||
CACHE PATH "Location where source tar-balls are (will be) installed.")
|
||||
mark_as_advanced(SB_SOURCE_DIR)
|
||||
|
||||
message(STATUS "SuperBuild source files will be installed to: ${SB_INSTALL_DIR}")
|
||||
|
||||
|
||||
# Setup location where binary files are located
|
||||
set(SB_BINARY_DIR "${SB_ROOT_DIR}/build"
|
||||
CACHE PATH "Location where files are (will be) located.")
|
||||
mark_as_advanced(SB_BINARY_DIR)
|
||||
|
||||
message(STATUS "SuperBuild binary files will be located to: ${SB_BINARY_DIR}")
|
||||
|
||||
|
||||
#########################################
|
||||
# Download and install third party libs #
|
||||
#########################################
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Open Source Computer Vision (OpenCV)
|
||||
#
|
||||
set(ODM_OpenCV_Version 2.4.11)
|
||||
option(ODM_BUILD_OpenCV "Force to build OpenCV library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(OpenCV ${ODM_OpenCV_Version} ${ODM_BUILD_OpenCV})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Point Cloud Library (PCL)
|
||||
#
|
||||
set(ODM_PCL_Version 1.7.2)
|
||||
option(ODM_BUILD_PCL "Force to build PCL library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(PCL ${ODM_PCL_Version} ${ODM_BUILD_PCL})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Google Flags library (GFlags)
|
||||
#
|
||||
set(ODM_GFlags_Version 2.1.2)
|
||||
option(ODM_BUILD_GFlags "Force to build GFlags library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(GFlags ${ODM_GFlags_Version} ${ODM_BUILD_GFlags})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Ceres Solver
|
||||
#
|
||||
set(ODM_Ceres_Version 1.10.0)
|
||||
option(ODM_BUILD_Ceres "Force to build Ceres library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Open Geometric Vision (OpenGV)
|
||||
# Open Structure from Motion (OpenSfM)
|
||||
# Clustering Views for Multi-view Stereo (CMVS)
|
||||
# Catkin
|
||||
# Ecto
|
||||
# LAStools
|
||||
#
|
||||
|
||||
set(custom_libs OpenGV
|
||||
OpenSfM
|
||||
CMVS
|
||||
Catkin
|
||||
Ecto
|
||||
LAStools)
|
||||
|
||||
foreach(lib ${custom_libs})
|
||||
SETUP_EXTERNAL_PROJECT_CUSTOM(${lib})
|
||||
endforeach()
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
set(_proj_name cmvs)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
|
||||
URL https://github.com/edgarriba/CMVS-PMVS/archive/master.zip
|
||||
URL_MD5 dbb1493f49ca099b4208381bd20d1435
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CONFIGURE_COMMAND cmake <SOURCE_DIR>/program
|
||||
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${SB_INSTALL_DIR}/bin
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
set(_proj_name catkin)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/ros/catkin/archive/0.6.16.zip
|
||||
URL_MD5 F5D45AE68709CE6E3346FB8C019416F8
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,31 @@
|
|||
set(_proj_name ceres)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS gflags
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL http://ceres-solver.org/ceres-solver-1.10.0.tar.gz
|
||||
URL_MD5 dbf9f452bd46e052925b835efea9ab16
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_C_FLAGS=-fPIC
|
||||
-DCMAKE_CXX_FLAGS=-fPIC
|
||||
-DBUILD_EXAMPLES=OFF
|
||||
-DBUILD_TESTING=OFF
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,27 @@
|
|||
set(_proj_name ecto)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS catkin
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
|
||||
URL https://github.com/plasmodic/ecto/archive/master.zip
|
||||
URL_MD5 8cd70c525bcda99d9eba5ac2865e42dd
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,27 @@
|
|||
set(_proj_name gflags)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/gflags/gflags/archive/v2.1.2.zip
|
||||
URL_MD5 5cb0a1b38740ed596edb7f86cd5b3bd8
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,26 @@
|
|||
set(_proj_name las-tools)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL http://lastools.org/download/LAStools.zip
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CONFIGURE_COMMAND ""
|
||||
WORKING_DIRECTORY "${SB_SOURCE_DIR}/${_proj_name}"
|
||||
#--Build step-----------------
|
||||
BUILD_IN_SOURCE 1
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
INSTALL_COMMAND ""
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,60 @@
|
|||
set(_proj_name opencv)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/Itseez/opencv/archive/2.4.11.zip
|
||||
URL_MD5 b517e83489c709eee1d8be76b16976a7
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DBUILD_opencv_core=ON
|
||||
-DBUILD_opencv_imgproc=ON
|
||||
-DBUILD_opencv_highgui=ON
|
||||
-DBUILD_opencv_video=ON
|
||||
-DBUILD_opencv_ml=ON
|
||||
-DBUILD_opencv_features2d=ON
|
||||
-DBUILD_opencv_calib3d=ON
|
||||
-DBUILD_opencv_contrib=ON
|
||||
-DBUILD_opencv_flann=ON
|
||||
-DBUILD_opencv_objdetect=ON
|
||||
-DBUILD_opencv_photo=ON
|
||||
-DBUILD_opencv_legacy=ON
|
||||
-DBUILD_opencv_python=ON
|
||||
-DWITH_FFMPEG=OFF
|
||||
-DWITH_CUDA=OFF
|
||||
-DWITH_GTK=OFF
|
||||
-DWITH_VTK=OFF
|
||||
-DWITH_EIGEN=OFF
|
||||
-DWITH_OPENNI=OFF
|
||||
-DBUILD_EXAMPLES=OFF
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PERF_TESTS=OFF
|
||||
-DBUILD_DOCS=OFF
|
||||
-DBUILD_opencv_apps=OFF
|
||||
-DBUILD_opencv_gpu=OFF
|
||||
-DBUILD_opencv_videostab=OFF
|
||||
-DBUILD_opencv_nonfree=OFF
|
||||
-DBUILD_opencv_stitching=OFF
|
||||
-DBUILD_opencv_world=OFF
|
||||
-DBUILD_opencv_superres=OFF
|
||||
-DBUILD_opencv_java=OFF
|
||||
-DBUILD_opencv_ocl=OFF
|
||||
-DBUILD_opencv_ts=OFF
|
||||
-DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,29 @@
|
|||
set(_proj_name opengv)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/paulinus/opengv/archive/python-wrapper.zip
|
||||
URL_MD5 6afd5dfbec8f11b556e794d009bbbcc2
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PYTHON=ON
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
set(_proj_name opensfm)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS ceres opencv opengv
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/mapillary/OpenSfM/archive/a4b07056aec1184692c1432fbdd1074710aec32b.zip
|
||||
URL_MD5 42B2B1994C3309BBF4525C8CC1F6F741
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CONFIGURE_COMMAND cmake <SOURCE_DIR>/${_proj_name}/src
|
||||
-DCERES_ROOT_DIR=${SB_INSTALL_DIR}
|
||||
-DOpenCV_DIR=${SB_INSTALL_DIR}/share/OpenCV
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_COMMAND ""
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
set(_proj_name pcl)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/PointCloudLibrary/pcl/archive/pcl-1.7.2.tar.gz
|
||||
URL_MD5 02c72eb6760fcb1f2e359ad8871b9968
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DBUILD_features=OFF
|
||||
-DBUILD_filters=OFF
|
||||
-DBUILD_geometry=OFF
|
||||
-DBUILD_keypoints=OFF
|
||||
-DBUILD_outofcore=OFF
|
||||
-DBUILD_people=OFF
|
||||
-DBUILD_recognition=OFF
|
||||
-DBUILD_registration=OFF
|
||||
-DBUILD_sample_consensus=OFF
|
||||
-DBUILD_segmentation=OFF
|
||||
-DBUILD_features=OFF
|
||||
-DBUILD_surface_on_nurbs=OFF
|
||||
-DBUILD_tools=OFF
|
||||
-DBUILD_tracking=OFF
|
||||
-DBUILD_visualization=OFF
|
||||
-DWITH_QT=OFF
|
||||
-DBUILD_OPENNI=OFF
|
||||
-DBUILD_OPENNI2=OFF
|
||||
-DWITH_OPENNI=OFF
|
||||
-DWITH_OPENNI2=OFF
|
||||
-DWITH_FZAPI=OFF
|
||||
-DWITH_LIBUSB=OFF
|
||||
-DWITH_PCAP=OFF
|
||||
-DWITH_PXCAPI=OFF
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DPCL_VERBOSITY_LEVEL=Error
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,27 @@
|
|||
set(ADD_INTERNAL_LIB_MSG "--- Adding internal version")
|
||||
set(FORCE_BUILD_LIB_MSG "force build ${ADD_INTERNAL_LIB_MSG}")
|
||||
|
||||
macro(SETUP_EXTERNAL_PROJECT name version force_build)
|
||||
|
||||
if(NOT ${force_build})
|
||||
|
||||
find_package(${name} ${version} EXACT QUIET)
|
||||
|
||||
if(${${name}_FOUND})
|
||||
message(STATUS "${name} ${${name}_VERSION} found")
|
||||
set(${name}_DIR ${${name}_DIR})
|
||||
else()
|
||||
message(STATUS "${name} ${version} not found ${ADD_INTERNAL_LIB_MSG}")
|
||||
include(External-${name})
|
||||
endif()
|
||||
else()
|
||||
message(STATUS "${name} ${version} ${FORCE_BUILD_LIB_MSG}")
|
||||
include(External-${name})
|
||||
endif()
|
||||
|
||||
endmacro()
|
||||
|
||||
macro(SETUP_EXTERNAL_PROJECT_CUSTOM name)
|
||||
message(STATUS "${name} ${FORCE_BUILD_LIB_MSG}")
|
||||
include(External-${name})
|
||||
endmacro()
|
|
@ -7,7 +7,7 @@ BIN_PATH_ABS = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
|
|||
|
||||
def get_ccd_widths():
|
||||
"""Return the CCD Width of the camera listed in the JSON defs file."""
|
||||
with open(BIN_PATH_ABS + '/ccd_defs.json') as jsonFile:
|
||||
with open(BIN_PATH_ABS + '/data/ccd_defs.json') as jsonFile:
|
||||
return json.load(jsonFile)
|
||||
|
||||
try:
|
||||
|
|
BIN
clapack.tgz
BIN
clapack.tgz
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
|
@ -0,0 +1,122 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Check OS
|
||||
if [ ! $(command -v apt-get) ]; then
|
||||
echo -e "\e[1;31mERROR: Not a Debian-based linux system.
|
||||
Impossible to install OpenCV with this script\e[0;39m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Before installing
|
||||
echo -e "\e[1;34mUpdating the system\e[0;39m"
|
||||
sudo apt-get update
|
||||
END_CMD1=$?
|
||||
sudo apt-get upgrade -y
|
||||
END_CMD2=$?
|
||||
if [ $END_CMD1 -ne 0 -o $END_CMD2 -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mWhen Updating the system\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Install Required Requisites
|
||||
echo -e "\e[1;34mInstalling Required Requisites\e[0;39m"
|
||||
sudo apt-get install build-essential \
|
||||
cmake \
|
||||
git \
|
||||
python-pip \
|
||||
pkg-config -y
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mWhen Installing Required Requisites\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Installing Optional Requisites
|
||||
echo -e "\e[1;34mInstalling OpenCV Dependencies\e[0;39m"
|
||||
sudo apt-get install libgtk2.0-dev \
|
||||
libavcodec-dev \
|
||||
libavformat-dev \
|
||||
libswscale-dev \
|
||||
python-dev \
|
||||
python-numpy \
|
||||
libtbb2 \
|
||||
libtbb-dev \
|
||||
libjpeg-dev \
|
||||
libpng-dev \
|
||||
libtiff-dev \
|
||||
libjasper-dev \
|
||||
libflann-dev \
|
||||
libproj-dev \
|
||||
libxext-dev \
|
||||
liblapack-dev \
|
||||
libeigen3-dev \
|
||||
libvtk5-dev -y
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mError when Installing Dependencies Requisites\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Remove libdc1394-22-dev due to python opencv issue
|
||||
echo -e "\e[1;34mRemoving libdc1394-22-dev\e[0;39m"
|
||||
sudo apt-get remove libdc1394-22-dev
|
||||
|
||||
## Installing OpenSfM Requisites
|
||||
echo -e "\e[1;34mInstalling OpenSfM Dependencies\e[0;39m"
|
||||
sudo apt-get install python-networkx \
|
||||
libgoogle-glog-dev \
|
||||
libsuitesparse-dev \
|
||||
libboost-filesystem1.55-dev \
|
||||
libboost-iostreams1.55-dev \
|
||||
libboost-regex1.55-dev \
|
||||
libboost-python1.55-dev \
|
||||
#libboost1.55-all-dev \
|
||||
libboost-python-dev -y
|
||||
|
||||
sudo pip install -U PyYAML \
|
||||
exifread \
|
||||
gpxpy \
|
||||
xmltodict
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mError when Installing OpenSfM Dependencies\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Installing Ecto Requisites
|
||||
echo -e "\e[1;34mInstalling Ecto Dependencies\e[0;39m"
|
||||
sudo pip install -U catkin-pkg
|
||||
sudo apt-get install python-empy \
|
||||
python-nose \
|
||||
python-pyside -y
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mError when Installing Ecto Dependencies\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Installing OpenDroneMap Requisites
|
||||
echo -e "\e[1;34mInstalling OpenDroneMap Dependencies\e[0;39m"
|
||||
sudo apt-get install python-pyexiv2 \
|
||||
python-scipy \
|
||||
jhead \
|
||||
liblas-bin -y
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo -e "\e[1;31mERROR: \e[39mError when Installing OpenDroneMap Dependencies\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
## Get sys vars
|
||||
NUM_CORES=`grep -c processor /proc/cpuinfo`
|
||||
|
||||
## Add SuperBuild path to the python path
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`/SuperBuild/install/lib/python2.7/dist-packages
|
||||
|
||||
## Compile SuperBuild
|
||||
cd SuperBuild
|
||||
mkdir -p build && cd build
|
||||
cmake .. && make -j ${NUM_CORES}
|
||||
|
||||
echo -e "\e[1;34mScript finished\e[0;39m"
|
|
@ -1,112 +0,0 @@
|
|||
#!/usr/local/bin/perl
|
||||
|
||||
$filename_base = $ARGV[0];
|
||||
|
||||
$write_binary = 1;
|
||||
|
||||
$filename_src = $filename_base.".key.sift";
|
||||
$filename_dest_bin = $filename_base.".key.bin";
|
||||
$filename_dest_key = $filename_base.".key";
|
||||
$filename_image = $filename_base.".pgm";
|
||||
|
||||
open (DEST_BIN, ">$filename_dest_bin");
|
||||
open (DEST_KEY, ">$filename_dest_key");
|
||||
|
||||
open (SRC, "$filename_src");
|
||||
|
||||
$linecount = 0;
|
||||
$linecount += tr/\n/\n/ while sysread(SRC, $_, 2 ** 16);
|
||||
|
||||
printf ("%d", $linecount);
|
||||
|
||||
if($write_binary){
|
||||
seek(SRC, 0, 0);
|
||||
|
||||
print DEST_BIN pack("L", $linecount);
|
||||
|
||||
while ($record = <SRC>) {
|
||||
@parts = split(/ /, $record);
|
||||
|
||||
if(@parts[3] > 3.141){
|
||||
@parts[3] -= 6.282;
|
||||
}
|
||||
|
||||
@parts[3] *= -1;
|
||||
|
||||
@tmp = @parts[0];
|
||||
@parts[0] = @parts[1];
|
||||
@parts[1] = @tmp;
|
||||
|
||||
for ($count = 4; $count < 132; $count += 8) {
|
||||
@tmp = @parts[$count+7];
|
||||
@parts[$count+7] = @parts[$count+1];
|
||||
@parts[$count+1] = @tmp;
|
||||
|
||||
@tmp = @parts[$count+6];
|
||||
@parts[$count+6] = @parts[$count+2];
|
||||
@parts[$count+2] = @tmp;
|
||||
|
||||
@tmp = @parts[$count+3];
|
||||
@parts[$count+3] = @parts[$count+5];
|
||||
@parts[$count+5] = @tmp;
|
||||
}
|
||||
|
||||
print DEST_BIN pack("f4 C128", @parts);
|
||||
}
|
||||
}
|
||||
|
||||
seek(SRC, 0, 0);
|
||||
|
||||
print DEST_KEY $linecount, " 128\n";
|
||||
|
||||
while ($record = <SRC>) {
|
||||
@parts = split(/ /, $record);
|
||||
|
||||
$counter = 0;
|
||||
|
||||
if(@parts[3] > 3.141){
|
||||
@parts[3] -= 6.282;
|
||||
}
|
||||
|
||||
@parts[3] *= -1;
|
||||
|
||||
printf (DEST_KEY "%.3f %.3f %.3f %.3f", @parts[1], @parts[0], @parts[2], @parts[3]);
|
||||
|
||||
shift(@parts);
|
||||
shift(@parts);
|
||||
shift(@parts);
|
||||
shift(@parts);
|
||||
|
||||
for ($count = 0; $count < 128; $count += 8) {
|
||||
@tmp = @parts[$count+7];
|
||||
@parts[$count+7] = @parts[$count+1];
|
||||
@parts[$count+1] = @tmp;
|
||||
|
||||
@tmp = @parts[$count+6];
|
||||
@parts[$count+6] = @parts[$count+2];
|
||||
@parts[$count+2] = @tmp;
|
||||
|
||||
@tmp = @parts[$count+3];
|
||||
@parts[$count+3] = @parts[$count+5];
|
||||
@parts[$count+5] = @tmp;
|
||||
}
|
||||
|
||||
foreach (@parts) {
|
||||
if((($counter) % 20) == 0) {
|
||||
print DEST_KEY "\n ";
|
||||
} else {
|
||||
if($counter != 0){
|
||||
print DEST_KEY " ";
|
||||
}
|
||||
}
|
||||
|
||||
print DEST_KEY $_;
|
||||
|
||||
$counter++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
close(DEST_BIN);
|
||||
close(DEST_KEY);
|
||||
close(SRC);
|
BIN
graclus.tar.gz
BIN
graclus.tar.gz
Plik binarny nie jest wyświetlany.
|
@ -10,7 +10,6 @@
|
|||
exec 1>&2
|
||||
|
||||
echo "RUNNING PRE-COMMIT"
|
||||
|
||||
EXIT_CODE=0
|
||||
# Get list of files about to be committed
|
||||
if git diff --cached --name-only --diff-filter=ACM | grep 'ccd_defs.json'; then
|
||||
|
|
559
install.sh
559
install.sh
|
@ -1,559 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -o nounset
|
||||
set -o errexit
|
||||
|
||||
echo
|
||||
echo " created by Daniel Schwarz/daniel.schwarz@topoi.org"
|
||||
echo " released under Creative Commons/CC-BY"
|
||||
echo " Attribution"
|
||||
echo
|
||||
echo " if the script doesn't finish properly"
|
||||
echo " (i.e. it doesn't print \"script finished\" at the end)"
|
||||
echo " please email me the content of the logs folder"
|
||||
echo
|
||||
echo
|
||||
echo " - script started - `date`"
|
||||
|
||||
## dest base path
|
||||
TOOLS_PATH="$PWD"
|
||||
|
||||
## paths for the tools
|
||||
TOOLS_BIN_PATH="$TOOLS_PATH/bin"
|
||||
TOOLS_INC_PATH="$TOOLS_PATH/include"
|
||||
TOOLS_LIB_PATH="$TOOLS_PATH/lib"
|
||||
TOOLS_SRC_PATH="$TOOLS_PATH/src"
|
||||
TOOLS_LOG_PATH="$TOOLS_PATH/logs"
|
||||
TOOLS_PATCHED_PATH="$TOOLS_PATH/patched_files"
|
||||
|
||||
## loacal dest paths
|
||||
LIB_PATH="/usr/local/lib"
|
||||
INC_PATH="/usr/local/include"
|
||||
|
||||
## source paths
|
||||
BUNDLER_PATH="$TOOLS_SRC_PATH/bundler"
|
||||
CMVS_PATH="$TOOLS_SRC_PATH/cmvs"
|
||||
PMVS_PATH="$TOOLS_SRC_PATH/pmvs"
|
||||
CLAPACK_PATH="$TOOLS_SRC_PATH/clapack"
|
||||
VLFEAT_PATH="$TOOLS_SRC_PATH/vlfeat"
|
||||
PARALLEL_PATH="$TOOLS_SRC_PATH/parallel"
|
||||
PSR_PATH="$TOOLS_SRC_PATH/PoissonRecon"
|
||||
GRACLUS_PATH="$TOOLS_SRC_PATH/graclus"
|
||||
CERES_PATH="$TOOLS_SRC_PATH/ceres-solver"
|
||||
|
||||
PCL_PATH="$TOOLS_SRC_PATH/pcl"
|
||||
LASTOOLS_PATH="$TOOLS_SRC_PATH/lastools"
|
||||
ODM_MESHING_PATH="$TOOLS_SRC_PATH/odm_meshing"
|
||||
ODM_TEXTURING_PATH="$TOOLS_SRC_PATH/odm_texturing"
|
||||
ODM_ORTHOPHOTO_PATH="$TOOLS_SRC_PATH/odm_orthophoto"
|
||||
ODM_EXTRACT_UTM_PATH="$TOOLS_SRC_PATH/odm_extract_utm"
|
||||
ODM_GEOREF_PATH="$TOOLS_SRC_PATH/odm_georef"
|
||||
|
||||
OPENGV_PATH="$TOOLS_SRC_PATH/opengv"
|
||||
OPENSFM_PATH="$TOOLS_SRC_PATH/OpenSfM"
|
||||
|
||||
## executables
|
||||
EXTRACT_FOCAL="$TOOLS_BIN_PATH/extract_focal.pl"
|
||||
MATCHKEYS="$TOOLS_BIN_PATH/KeyMatch"
|
||||
MATCHKEYSFULL="$TOOLS_BIN_PATH/KeyMatchFull"
|
||||
BUNDLER="$TOOLS_BIN_PATH/bundler"
|
||||
BUNDLE2PVMS="$TOOLS_BIN_PATH/Bundle2PMVS"
|
||||
CMVS="$TOOLS_BIN_PATH/cmvs"
|
||||
PMVS="$TOOLS_BIN_PATH/pmvs2"
|
||||
GENOPTION="$TOOLS_BIN_PATH/genOption"
|
||||
VLSIFT="$TOOLS_BIN_PATH/vlsift"
|
||||
PARALLEL="$TOOLS_BIN_PATH/parallel"
|
||||
PSR="$TOOLS_BIN_PATH/PoissonRecon"
|
||||
VLSIFT_TO_LOWESIFT="$TOOLS_BIN_PATH/convert_vlsift_to_lowesift.pl"
|
||||
|
||||
ODM_MESHING="$TOOLS_BIN_PATH/odm_meshing"
|
||||
ODM_TEXTURING="$TOOLS_BIN_PATH/odm_texturing"
|
||||
ODM_ORTHOPHOTO="$TOOLS_BIN_PATH/odm_orthophoto"
|
||||
ODM_EXTRACT_UTM="$TOOLS_BIN_PATH/odm_extract_utm"
|
||||
ODM_GEOREF="$TOOLS_BIN_PATH/odm_georef"
|
||||
|
||||
## get sys vars
|
||||
ARCH=`uname -m`
|
||||
CORES=`grep -c processor /proc/cpuinfo`
|
||||
|
||||
## prevents different (localized) output
|
||||
LC_ALL=C
|
||||
|
||||
## removing old stuff
|
||||
sudo rm -Rf "$TOOLS_BIN_PATH"
|
||||
sudo rm -Rf "$TOOLS_INC_PATH"
|
||||
sudo rm -Rf "$TOOLS_LIB_PATH"
|
||||
sudo rm -Rf "$TOOLS_SRC_PATH"
|
||||
sudo rm -Rf "$TOOLS_LOG_PATH"
|
||||
|
||||
## create needed directories
|
||||
mkdir -p "$TOOLS_BIN_PATH"
|
||||
mkdir -p "$TOOLS_INC_PATH"
|
||||
mkdir -p "$TOOLS_LIB_PATH"
|
||||
mkdir -p "$TOOLS_SRC_PATH"
|
||||
mkdir -p "$TOOLS_LOG_PATH"
|
||||
|
||||
## Copy meshing and texturing to src folder
|
||||
cp -rf "odm_meshing" "$TOOLS_SRC_PATH/"
|
||||
cp -rf "odm_texturing" "$TOOLS_SRC_PATH/"
|
||||
cp -rf "odm_orthophoto" "$TOOLS_SRC_PATH/"
|
||||
cp -rf "odm_extract_utm" "$TOOLS_SRC_PATH/"
|
||||
cp -rf "odm_georef" "$TOOLS_SRC_PATH/"
|
||||
|
||||
## output sys info
|
||||
echo "System info:" > "$TOOLS_LOG_PATH/sysinfo.txt"
|
||||
uname -a > "$TOOLS_LOG_PATH/sysinfo.txt"
|
||||
|
||||
## install packages
|
||||
echo
|
||||
echo " > installing required packages"
|
||||
|
||||
echo " - updating"
|
||||
sudo apt-get update --assume-yes > "$TOOLS_LOG_PATH/apt-get_get.log" 2>&1
|
||||
|
||||
echo " - installing"
|
||||
if [[ `lsb_release -rs` == "12.04" ]];
|
||||
then
|
||||
sudo apt-get install --assume-yes --install-recommends \
|
||||
build-essential cmake g++ gcc gFortran perl git autoconf \
|
||||
curl wget \
|
||||
unzip \
|
||||
imagemagick jhead proj-bin libproj-dev\
|
||||
libjpeg-dev libboost1.48-all-dev libgsl0-dev libx11-dev libxext-dev liblapack-dev \
|
||||
libeigen3-dev libflann-dev libvtk5-dev libqhull-dev libusb-1.0-0-dev\
|
||||
libzip-dev \
|
||||
libswitch-perl libjson-perl \
|
||||
libcv-dev libcvaux-dev libopencv-dev \
|
||||
gdal-bin \
|
||||
exiv2 \
|
||||
libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev \
|
||||
> "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1
|
||||
else
|
||||
sudo apt-get install --assume-yes --install-recommends \
|
||||
build-essential cmake g++ gcc gFortran perl git autoconf \
|
||||
curl wget \
|
||||
unzip \
|
||||
imagemagick jhead proj-bin libproj-dev\
|
||||
libjpeg-dev libboost-all-dev libgsl0-dev libx11-dev libxext-dev liblapack-dev \
|
||||
libeigen3-dev libflann-dev libvtk5-dev libqhull-dev libusb-1.0-0-dev\
|
||||
libjson-perl \
|
||||
libzip-dev \
|
||||
libswitch-perl \
|
||||
libcv-dev libcvaux-dev libopencv-dev \
|
||||
libgoogle-glog-dev libatlas-base-dev libeigen3-dev libsuitesparse-dev \
|
||||
python-dev python-pip libboost-python-dev \
|
||||
python-numpy-dev python-scipy python-yaml \
|
||||
python-opencv python-pyexiv2 \
|
||||
gdal-bin \
|
||||
exiv2 \
|
||||
> "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1
|
||||
fi
|
||||
|
||||
sudo pip install networkx exifread xmltodict
|
||||
|
||||
echo " - installing git submodules"
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
echo " < done - `date`"
|
||||
|
||||
## downloading sources
|
||||
echo
|
||||
echo " > getting the sources"
|
||||
|
||||
## Reconstruct CMVS tar.gz from pieces...
|
||||
cat cmvs.tar.gz.part-?? > cmvs.tar.gz
|
||||
|
||||
## getting all archives if not already present; save them to .tmp and rename them after download
|
||||
while read target source
|
||||
do
|
||||
if [ ! -f "$target" ] ; then
|
||||
echo " - getting $source"
|
||||
|
||||
curl --progress-bar --location -o "$target.tmp" "$source"
|
||||
mv "$target.tmp" "$target"
|
||||
echo " - finished $target"
|
||||
echo
|
||||
else
|
||||
echo " - already downloaded $source"
|
||||
fi
|
||||
done <<EOF
|
||||
parallel.tar.bz2 http://ftp.gnu.org/gnu/parallel/parallel-20141022.tar.bz2
|
||||
clapack.tgz http://www.netlib.org/clapack/clapack-3.2.1-CMAKE.tgz
|
||||
PoissonRecon.zip http://www.cs.jhu.edu/~misha/Code/PoissonRecon/Version2/PoissonRecon.zip
|
||||
vlfeat.tar.gz http://www.vlfeat.org/download/vlfeat-0.9.13-bin.tar.gz
|
||||
cmvs.tar.gz http://www.di.ens.fr/cmvs/cmvs-fix2.tar.gz
|
||||
graclus.tar.gz http://smathermather.github.io/BundlerTools/patched_files/src/graclus/graclus1.2.tar.gz
|
||||
pcl.tar.gz https://github.com/PointCloudLibrary/pcl/archive/pcl-1.7.2.tar.gz
|
||||
ceres-solver.tar.gz http://ceres-solver.org/ceres-solver-1.10.0.tar.gz
|
||||
LAStools.zip http://lastools.org/download/LAStools.zip
|
||||
EOF
|
||||
git clone https://github.com/paulinus/opengv.git $OPENGV_PATH
|
||||
git clone https://github.com/mapillary/OpenSfM.git $OPENSFM_PATH
|
||||
|
||||
echo " < done - `date`"
|
||||
|
||||
## unzipping sources
|
||||
echo
|
||||
echo " - unzipping sources"
|
||||
|
||||
for i in *.tar.bz2 ; do
|
||||
tar xjf "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 &
|
||||
done
|
||||
for i in *.tgz *.tar.gz ; do
|
||||
tar xzf "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 &
|
||||
done
|
||||
for i in *.zip ; do
|
||||
unzip "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 &
|
||||
done
|
||||
|
||||
wait
|
||||
|
||||
mv -f graclus1.2 "$GRACLUS_PATH"
|
||||
mv -f clapack-3.2.1-CMAKE "$CLAPACK_PATH"
|
||||
mv -f vlfeat-0.9.13 "$VLFEAT_PATH"
|
||||
mv -f parallel-20141022 "$PARALLEL_PATH"
|
||||
mv -f PoissonRecon "$PSR_PATH"
|
||||
mv -f cmvs "$CMVS_PATH"
|
||||
mv -f pcl-pcl-1.7.2 "$PCL_PATH"
|
||||
mv -f ceres-solver-1.10.0 "$CERES_PATH"
|
||||
mv -f LAStools "$LASTOOLS_PATH"
|
||||
|
||||
|
||||
echo " < done - `date`"
|
||||
|
||||
|
||||
## copying patches
|
||||
echo
|
||||
echo " - copying patches"
|
||||
echo
|
||||
|
||||
for file in `find $TOOLS_PATCHED_PATH -type f -print` ; do
|
||||
cp $file $TOOLS_PATH/${file/$TOOLS_PATCHED_PATH/.}
|
||||
done
|
||||
|
||||
echo " < done - `date`"
|
||||
|
||||
|
||||
# building
|
||||
echo
|
||||
echo " - building"
|
||||
echo
|
||||
|
||||
sudo chown -R `id -u`:`id -g` *
|
||||
#sudo chmod -R 777 *
|
||||
|
||||
|
||||
echo " > graclus"
|
||||
cd "$GRACLUS_PATH"
|
||||
|
||||
if [ "$ARCH" = "i686" ]; then
|
||||
sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=32"
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=64"
|
||||
fi
|
||||
|
||||
echo " - cleaning graclus"
|
||||
make clean > "$TOOLS_LOG_PATH/graclus_1_clean.log" 2>&1
|
||||
|
||||
echo " - building graclus"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/graclus_2_build.log" 2>&1
|
||||
|
||||
mkdir "$TOOLS_INC_PATH/metisLib"
|
||||
cp -f "$GRACLUS_PATH/metisLib/"*.h "$TOOLS_INC_PATH/metisLib/"
|
||||
|
||||
cp -f lib* "$TOOLS_LIB_PATH/"
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > poisson surface reconstruction "
|
||||
cd "$PSR_PATH"
|
||||
|
||||
sed -i "$PSR_PATH/Makefile" -e "21c\BIN = ./"
|
||||
|
||||
echo " - building poisson surface reconstruction"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/poisson_1_build.log" 2>&1
|
||||
|
||||
cp -f "$PSR_PATH/PoissonRecon" "$TOOLS_BIN_PATH/PoissonRecon"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
echo " > parallel"
|
||||
cd "$PARALLEL_PATH"
|
||||
|
||||
echo " - configuring parallel"
|
||||
./configure > "$TOOLS_LOG_PATH/parallel_1_build.log" 2>&1
|
||||
|
||||
echo " - building paralel"
|
||||
make -j$CORES> "$TOOLS_LOG_PATH/parallel_2_build.log" 2>&1
|
||||
|
||||
cp -f src/parallel "$TOOLS_BIN_PATH/"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
echo " > clapack"
|
||||
cd "$CLAPACK_PATH"
|
||||
cp make.inc.example make.inc
|
||||
|
||||
set +e
|
||||
echo " - building clapack"
|
||||
make -j$CORES all > "$TOOLS_LOG_PATH/clapack_1_build.log" 2>&1
|
||||
set -e
|
||||
|
||||
echo " - installing clapack"
|
||||
make -j$CORES lapack_install > "$TOOLS_LOG_PATH/clapack_2_install.log" 2>&1
|
||||
|
||||
sudo cp -Rf INCLUDE "$INC_PATH/clapack"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
echo " > vlfeat"
|
||||
cd "$VLFEAT_PATH"
|
||||
|
||||
echo " - installing vlfeat"
|
||||
|
||||
if [ "$ARCH" = "i686" ]; then
|
||||
cp -f "$VLFEAT_PATH/bin/glnx86/sift" "$TOOLS_BIN_PATH/vlsift"
|
||||
cp -f "$VLFEAT_PATH/bin/glnx86/libvl.so" "$TOOLS_LIB_PATH/"
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
cp -f "$VLFEAT_PATH/bin/glnxa64/sift" "$TOOLS_BIN_PATH/vlsift"
|
||||
cp -f "$VLFEAT_PATH/bin/glnxa64/libvl.so" "$TOOLS_LIB_PATH/"
|
||||
fi
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > LAStools"
|
||||
cd "$LASTOOLS_PATH"
|
||||
|
||||
echo " - installing LAStools"
|
||||
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/lastools_1_build.log" 2>&1
|
||||
|
||||
if [ "$ARCH" = "i686" ]; then
|
||||
cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las"
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las"
|
||||
fi
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
echo " > cmvs/pmvs"
|
||||
cd "$CMVS_PATH/program/main"
|
||||
|
||||
sed -i "$CMVS_PATH/program/main/genOption.cc" -e "5c\#include <stdlib.h>\n"
|
||||
sed -i "$CMVS_PATH/program/base/cmvs/bundle.cc" -e "3c\#include <numeric>\n"
|
||||
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "10c\#Your INCLUDE path (e.g., -I\/usr\/include)"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "11c\YOUR_INCLUDE_PATH =-I$INC_PATH -I$TOOLS_INC_PATH"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "13c\#Your metis directory (contains header files under graclus1.2/metisLib/)"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "14c\YOUR_INCLUDE_METIS_PATH = -I$TOOLS_INC_PATH/metisLib/"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "16c\#Your LDLIBRARY path (e.g., -L/usr/lib)"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "17c\YOUR_LDLIB_PATH = -L$LIB_PATH -L$TOOLS_LIB_PATH"
|
||||
|
||||
if [ "$ARCH" = "i686" ]; then
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=32 \\\\"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=32 ${OPENMP_FLAG}'
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=64 \\\\"
|
||||
sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=64 ${OPENMP_FLAG}'
|
||||
fi
|
||||
|
||||
echo " - cleaning cmvs"
|
||||
make clean > "$TOOLS_LOG_PATH/cmvs_1_clean.log" 2>&1
|
||||
|
||||
echo " - building cmvs"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/cmvs_2_build.log" 2>&1
|
||||
|
||||
echo " - make depend cmvs"
|
||||
sudo make depend > "$TOOLS_LOG_PATH/cmvs_3_depend.log" 2>&1
|
||||
|
||||
cp -f "$CMVS_PATH/program/main/cmvs" "$CMVS_PATH/program/main/pmvs2" "$CMVS_PATH/program/main/genOption" "$TOOLS_BIN_PATH/"
|
||||
cp -f "$CMVS_PATH/program/main/"*so* "$TOOLS_LIB_PATH/"
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > ceres"
|
||||
cd "$CERES_PATH"
|
||||
|
||||
echo " - configuring ceres"
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=$TOOLS_PATH \
|
||||
-DCMAKE_C_FLAGS=-fPIC -DCMAKE_CXX_FLAGS=-fPIC \
|
||||
-DBUILD_EXAMPLES=OFF -DBUILD_TESTING=OFF > "$TOOLS_LOG_PATH/ceres_1_config.log" 2>&1
|
||||
|
||||
echo " - building ceres"
|
||||
make -j$CORES install > "$TOOLS_LOG_PATH/ceres_1_build.log" 2>&1
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > bundler"
|
||||
cd "$BUNDLER_PATH"
|
||||
|
||||
echo " - cleaning bundler"
|
||||
make clean > "$TOOLS_LOG_PATH/bundler_1_clean.log" 2>&1
|
||||
|
||||
echo " - building bundler"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/bundler_2_build.log" 2>&1
|
||||
|
||||
ln -s "$BUNDLER_PATH/bin/Bundle2PMVS" "$BUNDLER_PATH/bin/Bundle2Vis" "$BUNDLER_PATH/bin/KeyMatchFull" "$BUNDLER_PATH/bin/KeyMatch" "$BUNDLER_PATH/bin/bundler" "$BUNDLER_PATH/bin/RadialUndistort" "$TOOLS_BIN_PATH/"
|
||||
|
||||
ln -s "$BUNDLER_PATH/lib/libANN_char.so" "$TOOLS_LIB_PATH/"
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > pcl "
|
||||
#cd "$PCL_PATH"
|
||||
|
||||
#Install pcl dependencies using the default package manager.
|
||||
#sudo apt-get install libeigen3-dev libflann-dev libvtk5-dev libqhull-dev
|
||||
|
||||
#install the required boost version.
|
||||
#sudo apt-get install libboost1.48-all-dev
|
||||
|
||||
mkdir -p "pcl"
|
||||
mkdir -p "$TOOLS_LIB_PATH/pcl"
|
||||
mkdir -p "$PCL_PATH/pcl_tmp"
|
||||
mkdir -p "$PCL_PATH/pcl_build"
|
||||
|
||||
#mv -f "pcl-pcl-1.7.2" "$PCL_PATH/pcl_tmp"
|
||||
|
||||
cd "$PCL_PATH/pcl_build"
|
||||
|
||||
echo " - configuring pcl"
|
||||
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX="$TOOLS_LIB_PATH/pcl" -DCMAKE_BUILD_TYPE=Release -DPCL_VERBOSITY_LEVEL=Error -DBUILD_features=OFF -DBUILD_filters=OFF -DBUILD_geometry=OFF -DBUILD_keypoints=OFF -DBUILD_outofcore=OFF -DBUILD_people=OFF -DBUILD_recognition=OFF -DBUILD_registration=OFF -DBUILD_sample_consensus=OFF -DBUILD_segmentation=OFF -DBUILD_features=OFF -DBUILD_surface_on_nurbs=OFF -DBUILD_tools=OFF -DBUILD_tracking=OFF -DBUILD_visualization=OFF -DWITH_QT=OFF -DBUILD_OPENNI=OFF -DBUILD_OPENNI2=OFF -DWITH_OPENNI=OFF -DWITH_OPENNI2=OFF -DWITH_FZAPI=OFF -DWITH_LIBUSB=OFF -DWITH_PCAP=OFF -DWITH_PXCAPI=OFF > "$TOOLS_LOG_PATH/pcl_1_build.log" 2>&1
|
||||
|
||||
echo " - building and installing pcl"
|
||||
make install > "$TOOLS_LOG_PATH/pcl_2_build.log" 2>&1
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > meshing "
|
||||
cd "$ODM_MESHING_PATH"
|
||||
|
||||
echo " - configuring odm_meshing"
|
||||
cmake . -DPCL_DIR="$TOOLS_LIB_PATH/pcl" > "$TOOLS_LOG_PATH/odm_meshing_1_build.log" 2>&1
|
||||
|
||||
echo " - building odm_meshing"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/odm_meshing_2_build.log" 2>&1
|
||||
|
||||
# copy output program to the binaries folder.
|
||||
cp -f "odm_meshing" "$TOOLS_BIN_PATH/odm_meshing"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > texturing "
|
||||
cd "$ODM_TEXTURING_PATH"
|
||||
|
||||
echo " - configuring odm_texturing"
|
||||
cmake . -DPCL_DIR="$TOOLS_LIB_PATH/pcl" > "$TOOLS_LOG_PATH/odm_texturing_1_build.log" 2>&1
|
||||
|
||||
echo " - building odm_texturing"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/odm_texturing_2_build.log" 2>&1
|
||||
|
||||
# copy output program to the binaries folder.
|
||||
cp -f "odm_texturing" "$TOOLS_BIN_PATH/odm_texturing"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > extract_utm "
|
||||
cd "$ODM_EXTRACT_UTM_PATH"
|
||||
|
||||
echo " - configuring odm_extract_utm"
|
||||
cmake . > "$TOOLS_LOG_PATH/odm_extract_utm_1_build.log" 2>&1
|
||||
|
||||
echo " - building odm_extract_utm"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/odm_extract_utm_2_build.log" 2>&1
|
||||
|
||||
# copy output program to the binaries folder.
|
||||
cp -f "odm_extract_utm" "$TOOLS_BIN_PATH/odm_extract_utm"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > georef "
|
||||
cd "$ODM_GEOREF_PATH"
|
||||
|
||||
echo " - configuring odm_georef"
|
||||
cmake . -DPCL_DIR="$TOOLS_LIB_PATH/pcl" > "$TOOLS_LOG_PATH/odm_georef_1_build.log" 2>&1
|
||||
|
||||
echo " - building odm_georef"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/odm_georef_2_build.log" 2>&1
|
||||
|
||||
# copy output program to the binaries folder.
|
||||
cp -f "odm_georef" "$TOOLS_BIN_PATH/odm_georef"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > orthophoto "
|
||||
cd "$ODM_ORTHOPHOTO_PATH"
|
||||
|
||||
echo " - configuring odm_orthophoto"
|
||||
cmake . -DPCL_DIR="$TOOLS_LIB_PATH/pcl" > "$TOOLS_LOG_PATH/odm_orthophoto_1_build.log" 2>&1
|
||||
|
||||
echo " - building odm_orthophoto"
|
||||
make -j$CORES > "$TOOLS_LOG_PATH/odm_orthophoto_2_build.log" 2>&1
|
||||
|
||||
# copy output program to the binaries folder.
|
||||
cp -f "odm_orthophoto" "$TOOLS_BIN_PATH/odm_orthophoto"
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > OpenGV"
|
||||
cd "$OPENGV_PATH"
|
||||
|
||||
echo " - configuring opengv"
|
||||
git checkout python-wrapper
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=$TOOLS_PATH -DBUILD_TESTS=OFF -DBUILD_PYTHON=ON > "$TOOLS_LOG_PATH/opengv_1_build.log" 2>&1
|
||||
echo " - building opengv"
|
||||
make install > "$TOOLS_LOG_PATH/opengv_2_build.log" 2>&1
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > OpenSfM"
|
||||
cd "$OPENSFM_PATH"
|
||||
|
||||
echo " - configuring opensfm"
|
||||
git checkout odm-2
|
||||
echo " - building opensfm"
|
||||
CERES_ROOT_DIR=$TOOLS_PATH python setup.py build > "$TOOLS_LOG_PATH/opensfm_1_build.log" 2>&1
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
cd "$TOOLS_PATH"
|
||||
|
||||
sudo install -o `id -u` -g `id -g` -m 644 -t "$LIB_PATH" lib/*.so
|
||||
sudo ldconfig -v > "$TOOLS_LOG_PATH/ldconfig.log" 2>&1
|
||||
|
||||
sudo chown -R `id -u`:`id -g` *
|
||||
#sudo chmod -R 777 *
|
||||
#sudo chmod 700 run.pl
|
||||
echo " - script finished - `date`"
|
||||
exit
|
119
knnMatch_exif.py
119
knnMatch_exif.py
|
@ -1,119 +0,0 @@
|
|||
import os
|
||||
import math
|
||||
import subprocess
|
||||
|
||||
# Purpose:
|
||||
# Preselect camera pairs for meature matching based by using GPS exif data.
|
||||
|
||||
def preselect_pairs(utm_extractor_path, image_list, k_distance, use_knn_mode = True, verbose = False):
|
||||
|
||||
# Parameterss:
|
||||
# utm_extractor_path: Path to the odm utm_extraction program that parses exif data.
|
||||
# images_path: Path to the folder containing the images.
|
||||
# k_distance: Nearest neighbor count in case of use_knn_mode = True, otherwise
|
||||
# the minimum distance between cameras to be matched.
|
||||
# use_knn_mode: True if knn mode is used to preselect matches, False for distance thresholding.
|
||||
|
||||
# Temporary files
|
||||
image_list_file = 'image_list_tmp.txt'
|
||||
utm_coord_file = 'utm_coord_tmp.txt'
|
||||
|
||||
# Parse the list of files with successfullt detectd points.
|
||||
image_folder = ""
|
||||
filenames = []
|
||||
with open(image_list, 'r') as keypoint_file:
|
||||
first_line = keypoint_file.readline()
|
||||
image_folder = first_line[:first_line.rfind("/")]
|
||||
keypoint_file.seek(0)
|
||||
for line in keypoint_file:
|
||||
filename = line[line.rfind("/")+1:line.rfind(".")]
|
||||
filename += ".jpg"
|
||||
filenames.append(filename)
|
||||
|
||||
data = open(image_list_file, "w")
|
||||
for filename in filenames:
|
||||
data.write("%s\n" % filename)
|
||||
data.close()
|
||||
|
||||
|
||||
# Define call parameters for calling the odm_extract_utm module
|
||||
utm_cmd = ''
|
||||
if verbose:
|
||||
utm_cmd = [utm_extractor_path, '-verbose', '-imagesPath', image_folder, \
|
||||
'-imageListFile', image_list_file, '-outputCoordFile', utm_coord_file]
|
||||
else:
|
||||
utm_cmd = [utm_extractor_path, '-imagesPath', image_folder, '-imageListFile', \
|
||||
image_list_file, '-outputCoordFile', utm_coord_file]
|
||||
|
||||
# Perform UTM extraction
|
||||
utm_process = subprocess.Popen(utm_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
output, err = utm_process.communicate()
|
||||
|
||||
# Check return code
|
||||
if utm_process.returncode != 0:
|
||||
print "Error when performing utm_extraction, dumping log file:"
|
||||
print output
|
||||
os.remove(image_list_file)
|
||||
return []
|
||||
|
||||
# Parse odm_extract_utm output
|
||||
coords = []
|
||||
with open('utm_coord_tmp.txt', 'r') as coord_file:
|
||||
# Skip header lines
|
||||
next(coord_file)
|
||||
next(coord_file)
|
||||
for line in coord_file:
|
||||
first_space = line.find(' ')
|
||||
second_space = first_space + line[first_space+1:].find(' ')
|
||||
xCoord = float(line[0: first_space])
|
||||
yCoord = float(line[first_space+1: second_space+1])
|
||||
coords.append([xCoord, yCoord])
|
||||
|
||||
# Calculate distances between camera pairs
|
||||
distances = []
|
||||
for xo,yo in coords:
|
||||
distances.append([])
|
||||
for xi, yi in coords:
|
||||
current_image = len(distances)
|
||||
xDist = xo-xi
|
||||
yDist = yo-yi
|
||||
distance = math.sqrt(xDist*xDist + yDist*yDist)
|
||||
current_image = len(distances[-1])
|
||||
distances[-1].append([current_image, distance, False])
|
||||
distances[-1].sort(key=lambda tup: tup[1])
|
||||
|
||||
# Select matched pairs and make there are no doubles
|
||||
matched_pairs = []
|
||||
if use_knn_mode:
|
||||
# Make sure that image count > k
|
||||
if len(coords) <= k_distance:
|
||||
print "Warning, k >= image count, the result will be equivalent to exhaustive matching"
|
||||
k_distance = len(coords)-1
|
||||
for outer_index, sorted_list in enumerate(distances):
|
||||
# Skip first element as distance will always be zero
|
||||
for sorted_index in xrange(1, k_distance+1):
|
||||
image_index, distance, dummy = distances[outer_index][sorted_index]
|
||||
is_added = distances[outer_index][image_index][2]
|
||||
if not is_added:
|
||||
matched_pairs.append([outer_index, image_index])
|
||||
distances[outer_index][image_index][2] = True
|
||||
distances[image_index][outer_index][2] = True
|
||||
else: # Distance mode
|
||||
for outer_index, sorted_list in enumerate(distances):
|
||||
# Skip first element as distance will always be zero
|
||||
for image_index, distance, dummy in sorted_list:
|
||||
if outer_index == image_index:
|
||||
continue
|
||||
if distance > k_distance:
|
||||
break
|
||||
is_added = distances[outer_index][image_index][2]
|
||||
if not is_added:
|
||||
matched_pairs.append([outer_index, image_index])
|
||||
distances[outer_index][image_index][2] = True
|
||||
distances[image_index][outer_index][2] = True
|
||||
|
||||
# Remove temporary files
|
||||
os.remove(image_list_file)
|
||||
os.remove(utm_coord_file)
|
||||
|
||||
return matched_pairs
|
|
@ -0,0 +1,7 @@
|
|||
# Add ODM sub-modules
|
||||
add_subdirectory(odm_extract_utm)
|
||||
add_subdirectory(odm_georef)
|
||||
add_subdirectory(odm_meshing)
|
||||
add_subdirectory(odm_orthophoto)
|
||||
add_subdirectory(odm_texturing)
|
||||
|
|
@ -17,7 +17,7 @@ find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED)
|
|||
# Only link with required opencv modules.
|
||||
set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui)
|
||||
|
||||
# Add the PCL, Eigen and OpenCV include dirs.
|
||||
# Add the PCL, Eigen and OpenCV include dirs.
|
||||
# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.)
|
||||
include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR})
|
||||
include_directories(${EIGEN_ROOT})
|
||||
|
@ -30,5 +30,4 @@ aux_source_directory("./src" SRC_LIST)
|
|||
|
||||
# Add exectuteable
|
||||
add_executable(${PROJECT_NAME} ${SRC_LIST})
|
||||
target_link_libraries(odm_orthophoto ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${OpenCV_LIBS})
|
||||
|
||||
target_link_libraries(odm_orthophoto ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${OpenCV_LIBS})
|
|
@ -342,7 +342,7 @@ void OdmTexturing::loadCameras()
|
|||
cam.pose = transform;
|
||||
|
||||
std::getline(imageListFile, dummyLine);
|
||||
size_t firstWhitespace = dummyLine.find_first_of(" ");
|
||||
/*size_t firstWhitespace = dummyLine.find_first_of(" ");
|
||||
|
||||
if (firstWhitespace != std::string::npos)
|
||||
{
|
||||
|
@ -351,7 +351,12 @@ void OdmTexturing::loadCameras()
|
|||
else
|
||||
{
|
||||
cam.texture_file = imagesPath_ + "/" + dummyLine.substr(2);
|
||||
}
|
||||
}*/
|
||||
|
||||
if (imagesPath_.size() == 0)
|
||||
cam.texture_file = dummyLine;
|
||||
else
|
||||
cam.texture_file = imagesPath_ + "/" + dummyLine;
|
||||
|
||||
// Read image to get full resolution size
|
||||
cv::Mat image = cv::imread(cam.texture_file);
|
|
@ -0,0 +1,220 @@
|
|||
import argparse
|
||||
|
||||
# parse arguments
|
||||
processopts = ['resize', 'opensfm', 'cmvs', 'pmvs',
|
||||
'odm_meshing', 'odm_texturing', 'odm_georeferencing',
|
||||
'odm_orthophoto']
|
||||
|
||||
parser = argparse.ArgumentParser(description='OpenDroneMap')
|
||||
parser.add_argument('--project-path',
|
||||
metavar='<string>',
|
||||
help='Path to the project to process')
|
||||
|
||||
parser.add_argument('--resize-to', # currently doesn't support 'orig'
|
||||
metavar='<integer>',
|
||||
default=2400,
|
||||
type=int,
|
||||
help='resizes images by the largest side')
|
||||
|
||||
parser.add_argument('--start-with', '-s',
|
||||
metavar='<string>',
|
||||
default='resize',
|
||||
choices=processopts,
|
||||
help=('Can be one of: ' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
default='odm_orthophoto',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--rerun', '-r',
|
||||
metavar='<string>',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--force-focal',
|
||||
metavar='<positive float>',
|
||||
type=float,
|
||||
help=('Override the focal length information for the '
|
||||
'images'))
|
||||
|
||||
parser.add_argument('--force-ccd',
|
||||
metavar='<positive float>',
|
||||
type=float,
|
||||
help='Override the ccd width information for the images')
|
||||
|
||||
parser.add_argument('--min-num-features',
|
||||
metavar='<integer>',
|
||||
default=4000,
|
||||
type=int,
|
||||
help=('Minimum number of features to extract per image. '
|
||||
'More features leads to better results but slower '
|
||||
'execution. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--matcher-threshold',
|
||||
metavar='<percent>',
|
||||
default=2.0,
|
||||
type=float,
|
||||
help=('Ignore matched keypoints if the two images share '
|
||||
'less than <float> percent of keypoints. Default:'
|
||||
' $(default)s'))
|
||||
|
||||
parser.add_argument('--matcher-ratio',
|
||||
metavar='<float>',
|
||||
default=0.6,
|
||||
type=float,
|
||||
help=('Ratio of the distance to the next best matched '
|
||||
'keypoint. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--matcher-neighbors',
|
||||
type=int,
|
||||
metavar='<integer>',
|
||||
default=8,
|
||||
help='Number of nearest images to pre-match based on GPS '
|
||||
'exif data. Set to 0 to skip pre-matching. '
|
||||
'Neighbors works together with Distance parameter, '
|
||||
'set both to 0 to not use pre-matching. OpenSFM '
|
||||
'uses both parameters at the same time, Bundler '
|
||||
'uses only one which has value, prefering the '
|
||||
'Neighbors parameter. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--matcher-distance',
|
||||
metavar='<integer>',
|
||||
default=0,
|
||||
type=int,
|
||||
help='Distance threshold in meters to find pre-matching '
|
||||
'images based on GPS exif data. Set to 0 to skip '
|
||||
'pre-matching. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--cmvs-maxImages',
|
||||
metavar='<integer>',
|
||||
default=500,
|
||||
type=int,
|
||||
help='The maximum number of images per cluster. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pmvs-level',
|
||||
metavar='<positive integer>',
|
||||
default=1,
|
||||
type=int,
|
||||
help=('The level in the image pyramid that is used '
|
||||
'for the computation. see '
|
||||
'http://www.di.ens.fr/pmvs/documentation.html for '
|
||||
'more pmvs documentation. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--pmvs-csize',
|
||||
metavar='< positive integer>',
|
||||
default=2,
|
||||
type=int,
|
||||
help='Cell size controls the density of reconstructions'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pmvs-threshold',
|
||||
metavar='<float: -1.0 <= x <= 1.0>',
|
||||
default=0.7,
|
||||
type=float,
|
||||
help=('A patch reconstruction is accepted as a success '
|
||||
'and kept if its associated photometric consistency '
|
||||
'measure is above this threshold. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--pmvs-wsize',
|
||||
metavar='<positive integer>',
|
||||
default=7,
|
||||
type=int,
|
||||
help='pmvs samples wsize x wsize pixel colors from '
|
||||
'each image to compute photometric consistency '
|
||||
'score. For example, when wsize=7, 7x7=49 pixel '
|
||||
'colors are sampled in each image. Increasing the '
|
||||
'value leads to more stable reconstructions, but '
|
||||
'the program becomes slower. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pmvs-minImageNum',
|
||||
metavar='<positive integer>',
|
||||
default=3,
|
||||
type=int,
|
||||
help=('Each 3D point must be visible in at least '
|
||||
'minImageNum images for being reconstructed. 3 is '
|
||||
'suggested in general. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--pmvs-num-cores',
|
||||
metavar='<positive integer>',
|
||||
default=1,
|
||||
type=int,
|
||||
help=('The maximum number of cores to use in dense '
|
||||
'reconstruction. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_meshing-maxVertexCount',
|
||||
metavar='<positive integer>',
|
||||
default=100000,
|
||||
type=int,
|
||||
help=('The maximum vertex count of the output mesh '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_meshing-octreeDepth',
|
||||
metavar='<positive integer>',
|
||||
default=9,
|
||||
type=int,
|
||||
help=('Oct-tree depth used in the mesh reconstruction, '
|
||||
'increase to get more vertices, recommended '
|
||||
'values are 8-12. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_meshing-samplesPerNode',
|
||||
metavar='<float >= 1.0>',
|
||||
default=1.0,
|
||||
type=float,
|
||||
help=('Number of points per octree node, recommended '
|
||||
'and default value: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_meshing-solverDivide',
|
||||
metavar='<positive integer>',
|
||||
default=9,
|
||||
type=int,
|
||||
help=('Oct-tree depth at which the Laplacian equation '
|
||||
'is solved in the surface reconstruction step. '
|
||||
'Increasing this value increases computation '
|
||||
'times slightly but helps reduce memory usage. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_texturing-textureResolution',
|
||||
metavar='<positive integer>',
|
||||
default=4096,
|
||||
type=int,
|
||||
help=('The resolution of the output textures. Must be '
|
||||
'greater than textureWithSize. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_texturing-textureWithSize',
|
||||
metavar='<positive integer>',
|
||||
default=3600,
|
||||
type=int,
|
||||
help=('The resolution to rescale the images performing '
|
||||
'the texturing. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--odm_georeferencing-gcpFile',
|
||||
metavar='<path string>',
|
||||
default='gcp_list.txt',
|
||||
help=('path to the file containing the ground control '
|
||||
'points used for georeferencing. Default: '
|
||||
'%(default)s. The file needs to '
|
||||
'be on the following line format: \neasting '
|
||||
'northing height pixelrow pixelcol imagename'))
|
||||
|
||||
parser.add_argument('--odm_georeferencing-useGcp',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Enabling GCPs from the file above. The GCP file '
|
||||
'is not used by default.')
|
||||
|
||||
parser.add_argument('--odm_orthophoto-resolution',
|
||||
metavar='<float > 0.0>',
|
||||
default=20.0,
|
||||
type=float,
|
||||
help=('Orthophoto ground resolution in pixels/meter'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--zip-results',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='compress the results using gunzip')
|
||||
|
||||
args = vars(parser.parse_args())
|
|
@ -0,0 +1,34 @@
|
|||
import os
|
||||
import sys
|
||||
import multiprocessing
|
||||
|
||||
# Define some needed locations
|
||||
scripts_path = os.path.abspath(os.path.dirname(__file__))
|
||||
root_path, _ = os.path.split(scripts_path)
|
||||
|
||||
superbuild_path = os.path.join(root_path, 'SuperBuild')
|
||||
ccd_widths_path = os.path.join(root_path, 'data/ccd_defs.json')
|
||||
|
||||
# add opencv to python path
|
||||
pyopencv_path = os.path.join(superbuild_path, 'install/lib/python2.7/dist-packages')
|
||||
sys.path.append(pyopencv_path)
|
||||
|
||||
# define opensfm path
|
||||
opensfm_path = os.path.join(superbuild_path, "src/opensfm")
|
||||
|
||||
# define pmvs path
|
||||
cmvs_path = os.path.join(superbuild_path, "install/bin/cmvs")
|
||||
cmvs_opts_path = os.path.join(superbuild_path, "install/bin/genOption")
|
||||
pmvs2_path = os.path.join(superbuild_path, "install/bin/pmvs2")
|
||||
|
||||
# define txt2las path
|
||||
txt2las_path = os.path.join(superbuild_path, 'src/las-tools/bin')
|
||||
|
||||
# define odm modules path
|
||||
odm_modules_path = os.path.join(root_path, "build/bin")
|
||||
|
||||
# Define supported image extensions
|
||||
supported_extensions = {'.jpg','.jpeg','.png'}
|
||||
|
||||
# Define the number of cores
|
||||
num_cores = multiprocessing.cpu_count()
|
|
@ -0,0 +1,25 @@
|
|||
import os
|
||||
|
||||
def get_files_list(path_dir):
|
||||
return os.listdir(path_dir)
|
||||
|
||||
def absolute_path_file(path_file):
|
||||
return os.path.abspath(path_file)
|
||||
|
||||
def extract_file_from_path_file(path_file):
|
||||
path, file = os.path.split(path_file)
|
||||
return file
|
||||
|
||||
def extract_path_from_file(file):
|
||||
path_file = os.path.abspath(os.path.dirname(file))
|
||||
path, file = os.path.split(path_file)
|
||||
return path
|
||||
|
||||
def join_paths(path1, path2):
|
||||
return os.path.join(path1, path2)
|
||||
|
||||
def file_exists(path_file):
|
||||
return os.path.isfile(path_file)
|
||||
|
||||
def dir_exists(dirname):
|
||||
return os.path.isdir(dirname)
|
|
@ -0,0 +1,18 @@
|
|||
HEADER = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
OKGREEN = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
def ODM_INFO(str):
|
||||
print OKBLUE + '[INFO] ' + str + ENDC
|
||||
|
||||
def ODM_WARNING(str):
|
||||
print WARNING + '[WARNING] ' + str + ENDC
|
||||
|
||||
def ODM_ERROR(str):
|
||||
print FAIL + '[ERROR] ' + str + ENDC
|
||||
|
||||
def ODM_DEBUG(str):
|
||||
print OKGREEN + '[DEBUG] ' + str + ENDC
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import errno
|
||||
import json
|
||||
import datetime
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from opendm import context
|
||||
from opendm import log
|
||||
|
||||
def get_ccd_widths():
|
||||
"""Return the CCD Width of the camera listed in the JSON defs file."""
|
||||
with open(context.ccd_widths_path) as jsonFile:
|
||||
return json.load(jsonFile)
|
||||
|
||||
def run(cmd):
|
||||
"""Run a system command"""
|
||||
log.ODM_DEBUG('running %s' % cmd)
|
||||
returnCode = os.system(cmd)
|
||||
|
||||
if (returnCode != 0):
|
||||
# TODO(edgar): add as log.ODM_ERROR
|
||||
sys.exit("\nquitting cause: \n\t" + cmd + "\nreturned with code " +
|
||||
str(returnCode) + ".\n")
|
||||
|
||||
def now():
|
||||
"""Return the current time"""
|
||||
return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
def run_and_return(cmdSrc, cmdDest=None):
|
||||
"""Run a system command and return the output"""
|
||||
process = subprocess.Popen(cmdSrc, stdout=subprocess.PIPE, shell=True)
|
||||
stdout, stderr = process.communicate()
|
||||
return stdout.decode('ascii')
|
||||
|
||||
|
||||
def mkdir_p(path):
|
||||
'''Make a directory including parent directories.
|
||||
'''
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except os.error as exc:
|
||||
if exc.errno != errno.EEXIST or not os.path.isdir(path):
|
||||
raise
|
||||
|
||||
|
||||
def calculate_EPSG(utmZone, south):
|
||||
"""Calculate and return the EPSG"""
|
||||
if south:
|
||||
return 32700 + utmZone
|
||||
else:
|
||||
return 32600 + utmZone
|
||||
|
||||
|
||||
def parse_coordinate_system():
|
||||
"""Write attributes to jobOptions from coord file"""
|
||||
if os.path.isfile(jobOptions['jobDir'] +
|
||||
'/odm_georeferencing/coordFile.txt'):
|
||||
with open(jobOptions['jobDir'] + '/odm_georeferencing/coordFile.txt') as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 3:
|
||||
utmZoneString = tokens[2][0:len(tokens[2])-2].strip()
|
||||
utmSouthBool = (tokens[2][len(tokens[2])-2].strip() == 'S')
|
||||
jobOptions['csString'] = '+datum=WGS84 +proj=utm +zone=' \
|
||||
+ utmZoneString + (' +south' if utmSouthBool else '')
|
||||
jobOptions['epsg'] = calculate_EPSG(int(utmZoneString), utmSouthBool)
|
||||
elif lineNumber == 1:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 2:
|
||||
jobOptions['utmEastOffset'] = int(tokens[0].strip())
|
||||
jobOptions['utmNorthOffset'] = int(tokens[1].strip())
|
||||
else:
|
||||
break
|
|
@ -0,0 +1,150 @@
|
|||
import log
|
||||
import system
|
||||
|
||||
import dataset
|
||||
import types
|
||||
|
||||
from scripts.resize import resize
|
||||
from scripts.opensfm import opensfm
|
||||
|
||||
# Define pipeline tasks
|
||||
tasks_dict = { '1': 'resize',
|
||||
'2': 'opensfm',
|
||||
'3': 'cmvs',
|
||||
'4': 'pmvs',
|
||||
'5': 'odm_meshing',
|
||||
'6': 'odm_texturing',
|
||||
'7': 'odm_georeferencing',
|
||||
'8': 'odm_orthophoto',
|
||||
'9': 'zip_results' }
|
||||
|
||||
|
||||
class ODMTaskManager(object):
|
||||
"""docstring for ODMTaskManager"""
|
||||
def __init__(self, odm_app):
|
||||
self.odm_app = odm_app
|
||||
self.initial_task_id = 0
|
||||
self.current_task_id = 0
|
||||
self.final_task_id = len(tasks_dict)
|
||||
self.tasks = self.init_tasks(tasks_dict, self.odm_app)
|
||||
|
||||
def init_tasks(self, _tasks_dict, _odm_app):
|
||||
# dict to store tasks objects
|
||||
tasks = {}
|
||||
# loop over tasks dict
|
||||
for key, in _tasks_dict:
|
||||
# instantiate and append ODMTask
|
||||
task_name = _tasks_dict[key]
|
||||
tasks[key] = ODMTask(key, task_name)
|
||||
|
||||
# setup tasks
|
||||
if task_name == 'resize':
|
||||
# setup this task
|
||||
command = resize
|
||||
inputs = { 'project_path': _odm_app.project_path,
|
||||
'args': _odm_app.args,
|
||||
'photos': _odm_app.photos }
|
||||
|
||||
elif task_name == 'opensfm':
|
||||
# setup this task
|
||||
command = opensfm
|
||||
inputs = { 'project_path': _odm_app.project_path,
|
||||
'args': _odm_app.args,
|
||||
'photos': _odm_app.photos }
|
||||
|
||||
elif task_name == 'cmvs':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'pmvs':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'odm_meshing':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'odm_texturing':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'odm_georeferencing':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'odm_orthophoto':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
elif task_name == 'zip_results':
|
||||
# setup this task
|
||||
command = None
|
||||
inputs = {}
|
||||
|
||||
else:
|
||||
log.ODM_ERROR('task_name %s is not valid' % task_name)
|
||||
|
||||
# setup task configuration
|
||||
task = tasks[key]
|
||||
task.command = command
|
||||
task.inputs = inputs
|
||||
|
||||
return tasks
|
||||
|
||||
def run_tasks(self):
|
||||
|
||||
#curr_task = self.tasks['resize']
|
||||
|
||||
#self.tasks['resize']
|
||||
|
||||
for id in range(self.initial_task_id, self.final_task_id + 1):
|
||||
# catch task with current id
|
||||
task = self.tasks[str(id)]
|
||||
# update task tracking
|
||||
log.ODM_INFO('Running task %s: %s' % (task.id, task.name))
|
||||
self.current_task_id = task.id
|
||||
# run task
|
||||
task.state = task.run()
|
||||
if task.state == 2:
|
||||
log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now()))
|
||||
else:
|
||||
log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name))
|
||||
|
||||
|
||||
|
||||
class ODMTask(object):
|
||||
"""docstring for ODMTask"""
|
||||
def __init__(self, id, name):
|
||||
# task definition
|
||||
self.id = id
|
||||
self.name = name
|
||||
# task i/o
|
||||
self.command = None
|
||||
self.inputs = {}
|
||||
# Current task state (0:waiting, 1:running, 2:succeded: 3:failed)
|
||||
# By default we set a task in waiting state
|
||||
self.state = 0
|
||||
|
||||
# Launch task
|
||||
def run(self):
|
||||
# while doing something
|
||||
self.state = 1
|
||||
return self.launch_command()
|
||||
|
||||
def launch_command(self):
|
||||
if self.command is None:
|
||||
log.ODM_ERROR('Call method for task %s not defined' % self.name)
|
||||
return 3 # failed
|
||||
# run conmmand
|
||||
try:
|
||||
succeed = self.command(**self.inputs)
|
||||
return 2 if succeed else 3 # 2:succeed, 3:failed
|
||||
except Exception, e:
|
||||
log.ODM_ERROR(str(e))
|
||||
return 3 # failed
|
|
@ -0,0 +1,339 @@
|
|||
import os
|
||||
import cv2
|
||||
import pyexiv2
|
||||
import subprocess
|
||||
import re
|
||||
from fractions import Fraction
|
||||
|
||||
import log
|
||||
import io
|
||||
import system
|
||||
import context
|
||||
|
||||
class ODM_Photo:
|
||||
""" ODMPhoto - a class for ODMPhotos
|
||||
"""
|
||||
def __init__(self, path_file, force_focal, force_ccd):
|
||||
# general purpose
|
||||
self.path_file = path_file
|
||||
self.filename = io.extract_file_from_path_file(path_file)
|
||||
# useful attibutes
|
||||
self.width = None
|
||||
self.height = None
|
||||
self.ccd_width = None
|
||||
self.focal_length = None
|
||||
self.focal_length_px = None
|
||||
# other attributes
|
||||
self.camera_make = None
|
||||
self.camera_model = None
|
||||
# parse values from metadata
|
||||
self.parse_pyexiv2_values(self.path_file, force_focal, force_ccd)
|
||||
# compute focal lenght into pixels
|
||||
self.update_focal()
|
||||
|
||||
# print log message
|
||||
log.ODM_DEBUG('Loaded %s | dimensions: %s x %s | focal: %s | ccd: %s' % \
|
||||
(self.filename, self.width, self.height, self.focal_length, self.ccd_width))
|
||||
|
||||
def update_focal(self):
|
||||
# compute focal length in pixels
|
||||
if self.focal_length and self.ccd_width:
|
||||
# take width or height as reference
|
||||
if self.width > self.height:
|
||||
# f(px) = w(px) * f(mm) / ccd(mm)
|
||||
self.focal_length_px = \
|
||||
self.width * (self.focal_length / self.ccd_width)
|
||||
else:
|
||||
# f(px) = h(px) * f(mm) / ccd(mm)
|
||||
self.focal_length_px = \
|
||||
self.height * (self.focal_length / self.ccd_width)
|
||||
|
||||
def parse_pyexiv2_values(self, _path_file, _force_focal, _force_ccd):
|
||||
# read image metadata
|
||||
metadata = pyexiv2.ImageMetadata(_path_file)
|
||||
metadata.read()
|
||||
# loop over image tags
|
||||
for key in metadata:
|
||||
# try/catch tag value due to weird bug in pyexiv2
|
||||
# ValueError: invalid literal for int() with base 10: ''
|
||||
try:
|
||||
val = metadata[key].value
|
||||
# parse tag names
|
||||
if key == 'Exif.Image.Make': self.camera_make = val
|
||||
elif key == 'Exif.Image.Model': self.camera_model = val
|
||||
elif key == 'Exif.Photo.FocalLength': self.focal_length = float(val)
|
||||
except Exception, e:
|
||||
pass
|
||||
|
||||
# needed to do that since sometimes metadata contains wrong data
|
||||
img = cv2.imread(_path_file)
|
||||
self.width = img.shape[1]
|
||||
self.height = img.shape[0]
|
||||
|
||||
# force focal and ccd_width with user parameter
|
||||
if _force_focal: self.focal_length = _force_focal
|
||||
if _force_ccd: self.ccd_width = _force_ccd
|
||||
|
||||
# find ccd_width from file if needed
|
||||
if self.ccd_width is None and self.camera_model is not None:
|
||||
# load ccd_widths from file
|
||||
ccd_widths = system.get_ccd_widths()
|
||||
# search ccd by camera model
|
||||
key = [x for x in ccd_widths.keys() if self.camera_model in x]
|
||||
# convert to float if found
|
||||
if key: self.ccd_width = float(ccd_widths[key[0]])
|
||||
# else:
|
||||
# log.ODM_ERROR('Could not find ccd_width in file')
|
||||
|
||||
|
||||
# TODO: finish this class
|
||||
class ODM_Reconstruction(object):
|
||||
"""docstring for ODMReconstruction"""
|
||||
def __init__(self, arg):
|
||||
super(ODMReconstruction, self).__init__()
|
||||
self.arg = arg
|
||||
|
||||
|
||||
class ODM_GCPoint(object):
|
||||
"""docstring for ODMPoint"""
|
||||
def __init__(self, x, y, z):
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.z = z
|
||||
|
||||
|
||||
class ODM_GeoRef(object):
|
||||
"""docstring for ODMUtmZone"""
|
||||
def __init__(self):
|
||||
self.datum = 'WGS84'
|
||||
self.epsg = None
|
||||
self.utm_zone = 0
|
||||
self.utm_pole = 'N'
|
||||
self.utm_east_offset = 0
|
||||
self.utm_north_offset = 0
|
||||
self.gcps = []
|
||||
|
||||
def calculate_EPSG(self, _utm_zone, _pole):
|
||||
"""Calculate and return the EPSG"""
|
||||
if _pole == 'S':
|
||||
return 32700 + _utm_zone
|
||||
elif _pole == 'N':
|
||||
return 32600 + _utm_zone
|
||||
else:
|
||||
log.ODM_ERROR('Unknown pole format %s' % _pole)
|
||||
return
|
||||
|
||||
|
||||
def convert_to_las(self, _file):
|
||||
|
||||
if not self.epsg:
|
||||
log.ODM_ERROR('Empty EPSG: Could not convert to LAS')
|
||||
return
|
||||
|
||||
kwargs = { 'bin': context.txt2las_path,
|
||||
'f_in': _file,
|
||||
'f_out': _file + '.laz',
|
||||
'east': self.utm_east_offset,
|
||||
'north': self.utm_north_offset,
|
||||
'epsg': self.epsg }
|
||||
|
||||
# call txt2las
|
||||
system.run('{bin}/txt2las -i {f_in} -o {f_out} -skip 30 -parse xyzRGBssss ' \
|
||||
'-set_scale 0.01 0.01 0.01 -set_offset {east} {north} 0 ' \
|
||||
'-translate_xyz 0 -epsg {epsg}'.format(**kwargs))
|
||||
|
||||
|
||||
def utm_to_latlon(self, _file, _photo, idx):
|
||||
|
||||
gcp = self.gcps[idx]
|
||||
|
||||
kwargs = { 'datum': self.datum,
|
||||
'zone': self.utm_zone,
|
||||
'file': _file,
|
||||
'x': gcp.x + self.utm_east_offset,
|
||||
'y': gcp.y + self.utm_north_offset,
|
||||
'z': gcp.z }
|
||||
|
||||
latlon = system.run_and_return('echo {x} {y} | cs2cs +proj=utm ' \
|
||||
'+datum={datum} +ellps={datum} +zone={zone} +units=m +to ' \
|
||||
'+proj=latlon +ellps={datum}'.format(**kwargs)).split()
|
||||
|
||||
# Example: 83d18'16.285"W
|
||||
# Example: 41d2'11.789"N
|
||||
# Example: 0.998
|
||||
|
||||
if len(latlon) == 3:
|
||||
lon_str, lat_str, alt_str = latlon
|
||||
elif len(latlon) == 2:
|
||||
lon_str, lat_str = latlon
|
||||
alt_str = ''
|
||||
else:
|
||||
log.ODM_ERROR('Something went wrong %s' % latlon)
|
||||
|
||||
tokens = re.split("[d '\"]+", lon_str)
|
||||
if len(tokens) >= 4:
|
||||
lon_deg, lon_min, lon_sec = tokens[:3]
|
||||
lon_sec_frac = Fraction(lon_sec)
|
||||
lon_sec_numerator = str(lon_sec_frac._numerator)
|
||||
lon_sec_denominator = str(lon_sec_frac._denominator)
|
||||
lon_ref = tokens[3]
|
||||
|
||||
tokens = re.split("[d '\"]+", lat_str)
|
||||
if len(tokens) >= 4:
|
||||
lat_deg, lat_min, lat_sec = tokens[:3]
|
||||
lat_sec_frac = Fraction(lat_sec)
|
||||
lat_sec_numerator = str(lat_sec_frac._numerator)
|
||||
lat_sec_denominator = str(lat_sec_frac._denominator)
|
||||
lat_ref = tokens[3]
|
||||
|
||||
alt_numerator = arc_denominator = 0 # BUG: arc_denominator is never used
|
||||
|
||||
if alt_str:
|
||||
alt_frac = Fraction(alt_str)
|
||||
alt_numerator = alt_frac._numerator
|
||||
alt_denominator = alt_frac._denominator
|
||||
|
||||
# read image metadata
|
||||
metadata = pyexiv2.ImageMetadata(_photo.path_file)
|
||||
metadata.read()
|
||||
|
||||
## set values
|
||||
|
||||
# GPS latitude
|
||||
key = 'Exif.GPSInfo.GPSLatitude'
|
||||
value = [Fraction(int(lat_deg), 1), Fraction(int(lat_min), 1), \
|
||||
Fraction(int(lat_sec_numerator), int(lat_sec_denominator))]
|
||||
metadata[key] = pyexiv2.ExifTag(key, value)
|
||||
|
||||
key = 'Exif.GPSInfo.GPSLatitudeRef'
|
||||
value = '%s' % lat_ref
|
||||
metadata[key] = pyexiv2.ExifTag(key, value)
|
||||
|
||||
# GPS longitude
|
||||
key = 'Exif.GPSInfo.GPSLongitude'
|
||||
value = [Fraction(int(lon_deg), 1), Fraction(int(lon_min), 1), \
|
||||
Fraction(int(lon_sec_numerator), int(lon_sec_denominator))]
|
||||
metadata[key] = pyexiv2.ExifTag(key, value)
|
||||
|
||||
key = 'Exif.GPSInfo.GPSLongitudeRef'
|
||||
value = '%s' % lon_ref
|
||||
metadata[key] = pyexiv2.ExifTag(key, value)
|
||||
|
||||
# GPS altitude
|
||||
key = 'Exif.GPSInfo.GPSAltitude'
|
||||
value = Fraction(int(gcp.z), 1)
|
||||
metadata[key] = pyexiv2.ExifTag(key, value)
|
||||
|
||||
key = 'Exif.GPSInfo.GPSAltitudeRef'
|
||||
metadata[key] = pyexiv2.ExifTag(key, '0')
|
||||
|
||||
## write values
|
||||
metadata.write()
|
||||
|
||||
|
||||
def parse_coordinate_system(self, _file):
|
||||
"""Write attributes to jobOptions from coord file"""
|
||||
# check for coordinate file existence
|
||||
if not io.file_exists(_file):
|
||||
log.ODM_ERROR('Could not find file %s' % _coords_file)
|
||||
return
|
||||
|
||||
with open(_file) as f:
|
||||
# extract reference system and utm zone from first line.
|
||||
# We will assume the following format:
|
||||
# 'WGS84 UTM 17N'
|
||||
line = f.readline().split(' ')
|
||||
self.datum = line[0]
|
||||
self.utm_pole = line[2][len(line)-1]
|
||||
self.utm_zone = int(line[2][:len(line)-1])
|
||||
# extract east and west offsets from second line.
|
||||
# We will assume the following format:
|
||||
# '440143 4588391'
|
||||
line = f.readline().split(' ')
|
||||
self.utm_east_offset = int(line[0])
|
||||
self.utm_north_offset = int(line[1])
|
||||
# parse coordinates
|
||||
lines = f.readlines()
|
||||
for l in lines:
|
||||
x, y, z = l.split(' ')[:3]
|
||||
self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
|
||||
|
||||
# update EPSG
|
||||
self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole)
|
||||
|
||||
class ODM_Tree(object):
|
||||
def __init__(self, root_path):
|
||||
### root path to the project
|
||||
self.root_path = io.absolute_path_file(root_path)
|
||||
|
||||
### modules paths
|
||||
|
||||
# here are defined where all modules should be located in
|
||||
# order to keep track all files al directories during the
|
||||
# whole reconstruction process.
|
||||
self.dataset_raw = io.join_paths(self.root_path, 'images')
|
||||
self.dataset_resize = io.join_paths(self.root_path, 'images_resize')
|
||||
self.opensfm = io.join_paths(self.root_path, 'opensfm')
|
||||
self.pmvs = io.join_paths(self.root_path, 'pmvs')
|
||||
self.odm_meshing = io.join_paths(self.root_path, 'odm_meshing')
|
||||
self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing')
|
||||
self.odm_georeferencing = io.join_paths(self.root_path, 'odm_georeferencing')
|
||||
self.odm_orthophoto = io.join_paths(self.root_path, 'odm_orthophoto')
|
||||
|
||||
### important files paths
|
||||
|
||||
# opensfm
|
||||
self.opensfm_bundle = io.join_paths(self.opensfm, 'bundle_r000.out')
|
||||
self.opensfm_bundle_list = io.join_paths(self.opensfm, 'list_r000.out')
|
||||
self.opensfm_image_list = io.join_paths(self.opensfm, 'image_list.txt')
|
||||
self.opensfm_reconstruction = io.join_paths(self.opensfm, 'reconstruction.json')
|
||||
|
||||
# pmvs
|
||||
self.pmvs_rec_path = io.join_paths(self.pmvs, 'recon0')
|
||||
self.pmvs_bundle = io.join_paths(self.pmvs_rec_path, 'bundle.rd.out')
|
||||
self.pmvs_visdat = io.join_paths(self.pmvs_rec_path, 'vis.dat')
|
||||
self.pmvs_options = io.join_paths(self.pmvs_rec_path, 'pmvs_options.txt')
|
||||
self.pmvs_model = io.join_paths(self.pmvs_rec_path, 'models/option-0000.ply')
|
||||
|
||||
# odm_meshing
|
||||
self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply')
|
||||
self.odm_meshing_log = io.join_paths(self.odm_meshing, 'odm_meshing_log.txt')
|
||||
|
||||
# odm_texturing
|
||||
self.odm_textured_model_obj = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model.obj')
|
||||
self.odm_textured_model_mtl = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model.mtl')
|
||||
self.odm_textured_model_txt_geo = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model_geo.txt')
|
||||
self.odm_textured_model_ply_geo = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model_geo.ply')
|
||||
self.odm_textured_model_obj_geo = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model_geo.obj')
|
||||
self.odm_textured_model_mtl_geo = io.join_paths(
|
||||
self.odm_texturing, 'odm_textured_model_geo.mtl')
|
||||
self.odm_texuring_log = io.join_paths(
|
||||
self.odm_texturing, 'odm_texturing_log.txt')
|
||||
|
||||
# odm_georeferencing
|
||||
self.odm_georeferencing_latlon = io.join_paths(
|
||||
self.odm_georeferencing, 'latlon.txt')
|
||||
self.odm_georeferencing_coords = io.join_paths(
|
||||
self.odm_georeferencing, 'coords.txt')
|
||||
self.odm_georeferencing_gcp = io.join_paths(
|
||||
self.odm_georeferencing, 'gcp_list.txt')
|
||||
self.odm_georeferencing_utm_log = io.join_paths(
|
||||
self.odm_georeferencing, 'odm_georeferencing_utm_log.txt')
|
||||
self.odm_georeferencing_log = io.join_paths(
|
||||
self.odm_georeferencing, 'odm_georeferencing_log.txt')
|
||||
|
||||
# odm_orthophoto
|
||||
self.odm_orthophoto_file = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.png')
|
||||
self.odm_orthophoto_corners = io.join_paths(self.odm_orthophoto, 'odm_orthphoto_corners.txt')
|
||||
self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt')
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,56 @@
|
|||
import ecto
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMCmvsCell(ecto.Cell):
|
||||
|
||||
def declare_params(self, params):
|
||||
params.declare("max_images", 'The maximum number of images '
|
||||
'per cluster', 500)
|
||||
params.declare("cores", 'The maximum number of cores to use '
|
||||
'in dense reconstruction.', context.num_cores)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "Struct with paths", [])
|
||||
inputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
outputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD CMVS Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'cmvs'
|
||||
|
||||
if not io.file_exists(tree.pmvs_bundle) or rerun_cell:
|
||||
log.ODM_DEBUG('Writting CMVS vis in: %s' % tree.pmvs_bundle)
|
||||
|
||||
# copy bundle file to pmvs dir
|
||||
from shutil import copyfile
|
||||
copyfile(tree.opensfm_bundle,
|
||||
tree.pmvs_bundle)
|
||||
|
||||
kwargs = {
|
||||
'bin': context.cmvs_path,
|
||||
'prefix': self.inputs.tree.pmvs_rec_path,
|
||||
'max_images': self.params.max_images,
|
||||
'cores': self.params.cores
|
||||
}
|
||||
|
||||
# run cmvs
|
||||
system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid CMVS file in: %s' %
|
||||
(tree.pmvs_bundle))
|
||||
|
||||
log.ODM_INFO('Running OMD CMVS Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'cmvs' else ecto.QUIT
|
|
@ -0,0 +1,68 @@
|
|||
import os
|
||||
import ecto
|
||||
|
||||
from opendm import context
|
||||
from opendm import io
|
||||
from opendm import types
|
||||
from opendm import log
|
||||
|
||||
class ODMLoadDatasetCell(ecto.Cell):
|
||||
|
||||
def declare_params(self, params):
|
||||
params.declare("force_focal", 'Override the focal length information for the '
|
||||
'images', None)
|
||||
params.declare("force_ccd", 'Override the ccd widht information for the '
|
||||
'images', None)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
outputs.declare("photos", "list of ODMPhoto's", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
# check if the extension is sopported
|
||||
def supported_extension(file_name):
|
||||
(pathfn, ext) = os.path.splitext(file_name)
|
||||
return ext.lower() in context.supported_extensions
|
||||
|
||||
log.ODM_INFO('Running ODM Load Dataset Cell')
|
||||
|
||||
# get inputs
|
||||
tree = self.inputs.tree
|
||||
|
||||
# set images directory
|
||||
images_dir = tree.dataset_resize
|
||||
|
||||
if not io.dir_exists(images_dir):
|
||||
images_dir = tree.dataset_raw
|
||||
if not io.dir_exists(images_dir):
|
||||
log.ODM_ERROR("You must put your pictures into an <images> directory")
|
||||
return ecto.QUIT
|
||||
|
||||
log.ODM_DEBUG('Loading dataset from: %s' % images_dir)
|
||||
|
||||
# find files in the given directory
|
||||
files = io.get_files_list(images_dir)
|
||||
|
||||
# filter images for its extension type
|
||||
files = [f for f in files if supported_extension(f)]
|
||||
|
||||
if files:
|
||||
# create ODMPhoto list
|
||||
photos = []
|
||||
for f in files:
|
||||
path_file = io.join_paths(images_dir, f)
|
||||
photo = types.ODM_Photo(path_file,
|
||||
self.params.force_focal,
|
||||
self.params.force_ccd)
|
||||
photos.append(photo)
|
||||
|
||||
log.ODM_INFO('Found %s usable images' % len(photos))
|
||||
else:
|
||||
log.ODM_ERROR('Not enough supported images in %s' % images_dir)
|
||||
return ecto.QUIT
|
||||
|
||||
# append photos to cell output
|
||||
outputs.photos = photos
|
||||
|
||||
log.ODM_INFO('Running ODM Load Dataset Cell - Finished')
|
||||
return ecto.OK
|
|
@ -0,0 +1,35 @@
|
|||
import ecto
|
||||
import numpy as np
|
||||
class ClusterDetector(ecto.Cell):
|
||||
def declare_params(self, params):
|
||||
params.declare("n", "Max number of clusters.", 10)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
outputs.declare("clusters", "Clusters output. list of tuples", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
clusters = []
|
||||
for i in range(int(np.random.uniform(0, self.params.n))):
|
||||
clusters.append( (i, 'c%d'%i) )
|
||||
outputs.clusters = clusters
|
||||
|
||||
class ClusterPrinter(ecto.Cell):
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("clusters", "Clusters input")
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
print "Clusters: ",
|
||||
for c in inputs.clusters:
|
||||
print c,
|
||||
print "\n"
|
||||
|
||||
def app():
|
||||
cd = ClusterDetector(n=20)
|
||||
cp = ClusterPrinter()
|
||||
plasm = ecto.Plasm()
|
||||
plasm.connect(cd['clusters'] >> cp['clusters'])
|
||||
sched = ecto.Scheduler(plasm)
|
||||
sched.execute(niter=3)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
|
@ -0,0 +1,122 @@
|
|||
import ecto
|
||||
|
||||
from opendm import context
|
||||
from opendm import types
|
||||
from opendm import config
|
||||
|
||||
from dataset import ODMLoadDatasetCell
|
||||
from resize import ODMResizeCell
|
||||
from opensfm import ODMOpenSfMCell
|
||||
from pmvs import ODMPmvsCell
|
||||
from cmvs import ODMCmvsCell
|
||||
from odm_meshing import ODMeshingCell
|
||||
from odm_texturing import ODMTexturingCell
|
||||
from odm_georeferencing import ODMGeoreferencingCell
|
||||
from odm_orthophoto import ODMOrthoPhotoCell
|
||||
|
||||
class ODMApp(ecto.BlackBox):
|
||||
''' ODMApp - a class for ODM Activities
|
||||
'''
|
||||
def __init__(self, *args, **kwargs):
|
||||
ecto.BlackBox.__init__(self, *args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def declare_direct_params(p):
|
||||
p.declare("args", "The application arguments.", {})
|
||||
|
||||
@staticmethod
|
||||
def declare_cells(p):
|
||||
"""
|
||||
Implement the virtual function from the base class
|
||||
Only cells from which something is forwarded have to be declared
|
||||
"""
|
||||
cells = { 'args': ecto.Constant(value=p.args),
|
||||
'dataset': ODMLoadDatasetCell(force_focal=p.args['force_focal'],
|
||||
force_ccd=p.args['force_ccd']),
|
||||
'resize': ODMResizeCell(resize_to=p.args['resize_to']),
|
||||
'opensfm': ODMOpenSfMCell(use_exif_size=False,
|
||||
feature_process_size=p.args['resize_to'],
|
||||
feature_min_frames=p.args['min_num_features'],
|
||||
processes=context.num_cores,
|
||||
matching_gps_neighbors=p.args['matcher_neighbors'],
|
||||
matching_gps_distance=p.args['matcher_distance']),
|
||||
'cmvs': ODMCmvsCell(max_images=p.args['cmvs_maxImages']),
|
||||
'pmvs': ODMPmvsCell(level=p.args['pmvs_level'],
|
||||
csize=p.args['pmvs_csize'],
|
||||
thresh=p.args['pmvs_threshold'],
|
||||
wsize=p.args['pmvs_wsize'],
|
||||
min_imgs=p.args['pmvs_minImageNum'],
|
||||
cores=p.args['pmvs_num_cores']),
|
||||
'meshing': ODMeshingCell(max_vertex=p.args['odm_meshing_maxVertexCount'],
|
||||
oct_tree=p.args['odm_meshing_octreeDepth'],
|
||||
samples=p.args['odm_meshing_samplesPerNode'],
|
||||
solver=p.args['odm_meshing_solverDivide']),
|
||||
'texturing': ODMTexturingCell(resize=p.args['resize_to'],
|
||||
resolution=p.args['odm_texturing_textureResolution'],
|
||||
size=p.args['odm_texturing_textureWithSize']),
|
||||
'georeferencing': ODMGeoreferencingCell(img_size=p.args['resize_to'],
|
||||
gcp_file=p.args['odm_georeferencing_gcpFile'],
|
||||
use_gcp=p.args['odm_georeferencing_useGcp']),
|
||||
'orthophoto': ODMOrthoPhotoCell(resolution=p.args['odm_orthophoto_resolution'])
|
||||
|
||||
}
|
||||
|
||||
return cells
|
||||
|
||||
def configure(self, p, _i, _o):
|
||||
tree = types.ODM_Tree(p.args['project_path'])
|
||||
self.tree = ecto.Constant(value=tree)
|
||||
|
||||
def connections(self, _p):
|
||||
# define initial task
|
||||
initial_task = _p.args['start_with']
|
||||
initial_task_id = config.processopts.index(initial_task)
|
||||
|
||||
## define the connections like you would for the plasm
|
||||
connections = []
|
||||
|
||||
## load the dataset
|
||||
connections = [ self.tree[:] >> self.dataset['tree'] ]
|
||||
|
||||
# run resize cell
|
||||
connections += [ self.tree[:] >> self.resize['tree'],
|
||||
self.args[:] >> self.resize['args'],
|
||||
self.dataset['photos'] >> self.resize['photos'] ]
|
||||
|
||||
# run opensfm with images from load dataset
|
||||
connections += [ self.tree[:] >> self.opensfm['tree'],
|
||||
self.args[:] >> self.opensfm['args'],
|
||||
self.resize['photos'] >> self.opensfm['photos'] ]
|
||||
|
||||
# run cmvs
|
||||
connections += [ self.tree[:] >> self.cmvs['tree'],
|
||||
self.args[:] >> self.cmvs['args'],
|
||||
self.opensfm['reconstruction'] >> self.cmvs['reconstruction'] ]
|
||||
|
||||
# run pmvs
|
||||
connections += [ self.tree[:] >> self.pmvs['tree'],
|
||||
self.args[:] >> self.pmvs['args'],
|
||||
self.cmvs['reconstruction'] >> self.pmvs['reconstruction'] ]
|
||||
|
||||
# create odm mesh
|
||||
connections += [ self.tree[:] >> self.meshing['tree'],
|
||||
self.args[:] >> self.meshing['args'],
|
||||
self.pmvs['reconstruction'] >> self.meshing['reconstruction'] ]
|
||||
|
||||
# create odm texture
|
||||
connections += [ self.tree[:] >> self.texturing['tree'],
|
||||
self.args[:] >> self.texturing['args'],
|
||||
self.meshing['reconstruction'] >> self.texturing['reconstruction'] ]
|
||||
|
||||
# create odm georeference
|
||||
connections += [ self.tree[:] >> self.georeferencing['tree'],
|
||||
self.args[:] >> self.georeferencing['args'],
|
||||
self.dataset['photos'] >> self.georeferencing['photos'],
|
||||
self.texturing['reconstruction'] >> self.georeferencing['reconstruction'] ]
|
||||
|
||||
## create odm orthophoto
|
||||
connections += [ self.tree[:] >> self.orthophoto['tree'],
|
||||
self.args[:] >> self.orthophoto['args'],
|
||||
self.georeferencing['reconstruction'] >> self.orthophoto['reconstruction'] ]
|
||||
|
||||
return connections
|
|
@ -0,0 +1,266 @@
|
|||
import ecto
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import types
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMGeoreferencingCell(ecto.Cell):
|
||||
def declare_params(self, params):
|
||||
params.declare("gcp_file", 'path to the file containing the ground control '
|
||||
'points used for georeferencing.The file needs to '
|
||||
'be on the following line format: \neasting '
|
||||
'northing height pixelrow pixelcol imagename', 'gcp_list.txt')
|
||||
params.declare("use_gcp", 'set to true for enabling GCPs from the file above', False)
|
||||
params.declare("img_size", 'image size used in calibration', 2400)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("photos", "list of ODMPhoto's", [])
|
||||
inputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
outputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD Georeferencing Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# define paths and create working directories
|
||||
system.mkdir_p(tree.odm_georeferencing)
|
||||
|
||||
# in case a gcp file it's not provided, let's try to generate it using
|
||||
# images metadata. Internally calls jhead.
|
||||
if not self.params.use_gcp and \
|
||||
not io.file_exists(tree.odm_georeferencing_coords):
|
||||
|
||||
log.ODM_WARNING('Warning: No coordinates file. ' \
|
||||
'Generating coordinates file in: %s' % tree.odm_georeferencing_coords)
|
||||
try:
|
||||
# odm_georeference definitions
|
||||
kwargs = {
|
||||
'bin': context.odm_modules_path,
|
||||
'imgs': tree.dataset_resize,
|
||||
'imgs_list': tree.opensfm_bundle_list,
|
||||
'coords': tree.odm_georeferencing_coords,
|
||||
'log': tree.odm_georeferencing_utm_log
|
||||
}
|
||||
|
||||
# run UTM extraction binary
|
||||
system.run('{bin}/odm_extract_utm -imagesPath {imgs}/ ' \
|
||||
'-imageListFile {imgs_list} -outputCoordFile {coords} ' \
|
||||
'-logFile {log}'.format(**kwargs))
|
||||
|
||||
except Exception, e:
|
||||
log.ODM_ERROR('Could not generate GCP file from images metadata.' \
|
||||
'Consider rerunning with argument --odm_georeferencing-useGcp' \
|
||||
' and provide a proper GCP file')
|
||||
log.ODM_ERROR(e)
|
||||
return ecto.QUIT
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'odm_georeferencing'
|
||||
|
||||
if not io.file_exists(tree.odm_textured_model_obj_geo) or \
|
||||
not io.file_exists(tree.odm_textured_model_ply_geo) or rerun_cell:
|
||||
|
||||
# odm_georeference definitions
|
||||
kwargs = {
|
||||
'bin': context.odm_modules_path,
|
||||
'bundle': tree.opensfm_bundle,
|
||||
'imgs': tree.dataset_resize,
|
||||
'imgs_list': tree.opensfm_bundle_list,
|
||||
'model': tree.odm_textured_model_obj,
|
||||
'pc': tree.pmvs_model,
|
||||
'log': tree.odm_georeferencing_log,
|
||||
'coords': tree.odm_georeferencing_coords,
|
||||
'pc_geo': tree.odm_textured_model_ply_geo,
|
||||
'geo_sys': tree.odm_textured_model_txt_geo,
|
||||
'model_geo': tree.odm_textured_model_obj_geo,
|
||||
'size': self.params.img_size,
|
||||
'gcp': io.join_paths(tree.root_path, self.params.gcp_file),
|
||||
|
||||
}
|
||||
|
||||
if self.params.use_gcp and \
|
||||
io.file_exists(tree.odm_georeferencing_coords):
|
||||
|
||||
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} ' \
|
||||
'-bundleResizedTo {size} -inputFile {model} -outputFile {model_geo} ' \
|
||||
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} ' \
|
||||
'-logFile {log} -georefFileOutputPath {geo_sys} -gcpFile {gcp} ' \
|
||||
'-outputCoordFile {coords}'.format(**kwargs))
|
||||
else:
|
||||
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} ' \
|
||||
'-inputFile {model} -outputFile {model_geo} ' \
|
||||
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} ' \
|
||||
'-logFile {log} -georefFileOutputPath {geo_sys}'.format(**kwargs))
|
||||
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid georeferenced model in: %s' \
|
||||
% tree.odm_textured_model_ply_geo)
|
||||
|
||||
|
||||
# update images metadata
|
||||
geo_ref = types.ODM_GeoRef()
|
||||
geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords)
|
||||
|
||||
for idx, photo in enumerate(self.inputs.photos):
|
||||
geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx)
|
||||
|
||||
# convert ply model to LAS reference system
|
||||
geo_ref.convert_to_las(tree.odm_textured_model_ply_geo)
|
||||
|
||||
|
||||
log.ODM_INFO('Running OMD Georeferencing Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'odm_georeferencing' else ecto.QUIT
|
||||
|
||||
|
||||
def odm_georeferencing():
|
||||
"""Run odm_georeferencing"""
|
||||
print "\n - running georeferencing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_georeferencing")
|
||||
except:
|
||||
pass
|
||||
|
||||
if not args.odm_georeferencing_useGcp:
|
||||
run("\"" + BIN_PATH + "/odm_extract_utm\" -imagesPath " + jobOptions["srcDir"] + "/ -imageListFile " \
|
||||
+ jobOptions["jobDir"] + "/pmvs/list.rd.txt -outputCoordFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/coordFile.txt")
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/bundle.rd.out -inputCoordFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/coordFile.txt -inputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo.obj -inputPointCloudFile " \
|
||||
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
|
||||
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
|
||||
elif os.path.isfile(jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile):
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/bundle.rd.out -gcpFile " + jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile \
|
||||
+ " -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/list.rd.txt -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -inputFile " \
|
||||
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " \
|
||||
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -outputCoordFile " \
|
||||
+ jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputPointCloudFile " \
|
||||
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
|
||||
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
else:
|
||||
print "Warning: No GCP file. Consider rerunning with argument --odm_georeferencing-useGcp false --start-with odm_georeferencing"
|
||||
print "Skipping orthophoto"
|
||||
args.end_with = "odm_georeferencing"
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
images = []
|
||||
with open(jobOptions["jobDir"] + "/pmvs/list.rd.txt") as f:
|
||||
images = f.readlines()
|
||||
|
||||
if len(images) > 0:
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber >= 2 and lineNumber - 2 < len(images):
|
||||
tokens = line.split(' ')
|
||||
|
||||
if len(tokens) >= 3:
|
||||
x = float(tokens[0])
|
||||
y = float(tokens[1])
|
||||
z = float(tokens[2])
|
||||
filename = images[lineNumber - 2]
|
||||
|
||||
run("echo " + str(x + jobOptions["utmEastOffset"]) + " " \
|
||||
+ str(y + jobOptions["utmNorthOffset"]) + " " + str(z) \
|
||||
+ " | cs2cs " + jobOptions["csString"] + " +to +datum=WGS84 +proj=latlong > " \
|
||||
+ jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt")
|
||||
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") as latlongFile:
|
||||
latlongLine = latlongFile.readline()
|
||||
tokens = latlongLine.split()
|
||||
if len(tokens) >= 2:
|
||||
exifGpsInfoWritten = False
|
||||
|
||||
lonString = tokens[0] # Example: 83d18'16.285"W
|
||||
latString = tokens[1] # Example: 41d2'11.789"N
|
||||
altString = ""
|
||||
if len(tokens) > 2:
|
||||
|
||||
altString = tokens[2] # Example: 0.998
|
||||
|
||||
tokens = re.split("[d '\"]+", lonString)
|
||||
if len(tokens) >= 4:
|
||||
lonDeg = tokens[0]
|
||||
lonMin = tokens[1]
|
||||
lonSec = tokens[2]
|
||||
lonSecFrac = fractions.Fraction(lonSec)
|
||||
lonSecNumerator = str(lonSecFrac._numerator)
|
||||
lonSecDenominator = str(lonSecFrac._denominator)
|
||||
lonRef = tokens[3]
|
||||
|
||||
tokens = re.split("[d '\"]+", latString)
|
||||
if len(tokens) >= 4:
|
||||
latDeg = tokens[0]
|
||||
latMin = tokens[1]
|
||||
latSec = tokens[2]
|
||||
latSecFrac = fractions.Fraction(latSec)
|
||||
latSecNumerator = str(latSecFrac._numerator)
|
||||
latSecDenominator = str(latSecFrac._denominator)
|
||||
latRef = tokens[3]
|
||||
|
||||
exivCmd = "exiv2 -q"
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitude " + latDeg + "/1 " \
|
||||
+ latMin + "/1 " + latSecNumerator + "/" + latSecDenominator + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitudeRef " + latRef + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitude " + lonDeg + "/1 " \
|
||||
+ lonMin + "/1 " + lonSecNumerator + "/" + lonSecDenominator + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitudeRef " + lonRef + "\""
|
||||
|
||||
altNumerator = arcDenominator = 0 # BUG: arcDenominator is never used
|
||||
|
||||
if altString:
|
||||
altFrac = fractions.Fraction(altString)
|
||||
altNumerator = str(altFrac._numerator)
|
||||
altDenominator = str(altFrac._denominator)
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitude " + altNumerator + "/" + altDenominator + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitudeRef 0\""
|
||||
|
||||
exivCmd += " " + filename
|
||||
run(exivCmd)
|
||||
exifGpsInfoWritten = True
|
||||
|
||||
if not exifGpsInfoWritten:
|
||||
print(" Warning: Failed setting EXIF GPS info for " \
|
||||
+ filename + " based on " + latlongLine)
|
||||
|
||||
if "epsg" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
lasCmd = "\"" + BIN_PATH + "/txt2las\" -i " + jobOptions["jobDir"] + \
|
||||
"-results/option-0000_georef.ply -o " + jobOptions["jobDir"] \
|
||||
+ "-results/pointcloud_georef.laz -skip 30 -parse xyzRGBssss -set_scale 0.01 0.01 0.01 -set_offset " \
|
||||
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -translate_xyz " \
|
||||
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) \
|
||||
+ " 0 -epsg " + str(jobOptions["epsg"])
|
||||
|
||||
print(" Creating geo-referenced LAS file (expecting warning)...")
|
||||
|
||||
run(lasCmd)
|
||||
|
||||
|
||||
if args['--end-with'] != "odm_georeferencing":
|
||||
odm_orthophoto()
|
|
@ -0,0 +1,68 @@
|
|||
import ecto
|
||||
|
||||
from opendm import log
|
||||
from opendm import io
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMeshingCell(ecto.Cell):
|
||||
|
||||
def declare_params(self, params):
|
||||
params.declare("max_vertex", 'The maximum vertex count of the output '
|
||||
'mesh', 100000)
|
||||
params.declare("oct_tree", 'Oct-tree depth used in the mesh reconstruction, '
|
||||
'increase to get more vertices, recommended '
|
||||
'values are 8-12', 9)
|
||||
params.declare("samples", 'Number of points per octree node, recommended '
|
||||
'value: 1.0', 1)
|
||||
params.declare("solver", 'Oct-tree depth at which the Laplacian equation '
|
||||
'is solved in the surface reconstruction step. '
|
||||
'Increasing this value increases computation '
|
||||
'times slightly but helps reduce memory usage.', 9)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
|
||||
outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD Meshing Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# define paths and create working directories
|
||||
system.mkdir_p(tree.odm_meshing)
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'odm_meshing'
|
||||
|
||||
if not io.file_exists(tree.odm_mesh) or rerun_cell:
|
||||
log.ODM_DEBUG('Writting ODM Mesh file in: %s' % tree.odm_mesh)
|
||||
|
||||
kwargs = {
|
||||
'bin': context.odm_modules_path,
|
||||
'infile': tree.pmvs_model,
|
||||
'outfile': tree.odm_mesh,
|
||||
'log': tree.odm_meshing_log,
|
||||
'max_vertex': self.params.max_vertex,
|
||||
'oct_tree': self.params.oct_tree,
|
||||
'samples': self.params.samples,
|
||||
'solver':self.params.solver
|
||||
}
|
||||
|
||||
# run meshing binary
|
||||
system.run('{bin}/odm_meshing -inputFile {infile} ' \
|
||||
'-outputFile {outfile} -logFile {log} ' \
|
||||
'-maxVertexCount {max_vertex} -octreeDepth {oct_tree} ' \
|
||||
'-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
|
||||
(tree.odm_mesh))
|
||||
|
||||
log.ODM_INFO('Running OMD Meshing Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'odm_meshing' else ecto.QUIT
|
|
@ -0,0 +1,103 @@
|
|||
import ecto
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMOrthoPhotoCell(ecto.Cell):
|
||||
def declare_params(self, params):
|
||||
params.declare("resolution", 'Orthophoto ground resolution in pixels/meter', 20)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD OrthoPhoto Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# define paths and create working directories
|
||||
system.mkdir_p(tree.odm_orthophoto)
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'odm_orthophoto'
|
||||
|
||||
if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:
|
||||
|
||||
# odm_georeference definitions
|
||||
kwargs = {
|
||||
'bin': context.odm_modules_path,
|
||||
'model_geo': tree.odm_textured_model_obj_geo,
|
||||
'log': tree.odm_orthophoto_log,
|
||||
'ortho': tree.odm_orthophoto_file,
|
||||
'corners': tree.odm_orthophoto_corners,
|
||||
'res': self.params.resolution
|
||||
}
|
||||
|
||||
# run odm_georeference
|
||||
system.run('{bin}/odm_orthophoto -inputFile {model_geo} ' \
|
||||
'-logFile {log} -outputFile {ortho} -resolution {res} ' \
|
||||
'-outputCornerFile {corners}'.format(**kwargs))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)
|
||||
|
||||
log.ODM_INFO('Running OMD OrthoPhoto Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'odm_orthophoto' else ecto.QUIT
|
||||
|
||||
|
||||
def odm_orthophoto():
|
||||
"""Run odm_orthophoto"""
|
||||
print "\n - running orthophoto generation - " + system.now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_orthophoto")
|
||||
except:
|
||||
pass
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_orthophoto\" -inputFile " + jobOptions["jobDir"] + \
|
||||
"-results/odm_texturing/odm_textured_model_geo.obj -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_orthophoto/odm_orthophoto_log.txt -outputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_orthphoto.png -resolution 20.0 -outputCornerFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_orthphoto_corners.txt")
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
geoTiffCreated = False
|
||||
if ("csString" in jobOptions and
|
||||
"utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions):
|
||||
ulx = uly = lrx = lry = 0.0
|
||||
with open(jobOptions["jobDir"] +
|
||||
"/odm_orthphoto_corners.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 4:
|
||||
ulx = float(tokens[0]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
lry = float(tokens[1]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
lrx = float(tokens[2]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
uly = float(tokens[3]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
|
||||
print(" Creating GeoTIFF...")
|
||||
sys.stdout.write(" ")
|
||||
run("gdal_translate -a_ullr " + str(ulx) + " " + str(uly) + " " +
|
||||
str(lrx) + " " + str(lry) + " -a_srs \"" + jobOptions["csString"] +
|
||||
"\" " + jobOptions["jobDir"] + "-results/odm_orthphoto.png " +
|
||||
jobOptions["jobDir"] + "-results/odm_orthphoto.tif")
|
||||
geoTiffCreated = True
|
||||
|
||||
if not geoTiffCreated:
|
||||
|
||||
print " Warning: No geo-referenced orthophoto created due to missing geo-referencing or corner coordinates."
|
|
@ -0,0 +1,67 @@
|
|||
import ecto
|
||||
|
||||
from opendm import log
|
||||
from opendm import io
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMTexturingCell(ecto.Cell):
|
||||
|
||||
def declare_params(self, params):
|
||||
params.declare("resize", 'resizes images by the largest side', 2400)
|
||||
params.declare("resolution", 'The resolution of the output textures. Must be '
|
||||
'greater than textureWithSize.', 4096)
|
||||
params.declare("size", 'The resolution to rescale the images performing '
|
||||
'the texturing.', 3600)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
|
||||
outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD Texturing Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# define paths and create working directories
|
||||
system.mkdir_p(tree.odm_texturing)
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'odm_texturing'
|
||||
|
||||
if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
|
||||
log.ODM_DEBUG('Writting ODM Textured file in: %s' \
|
||||
% tree.odm_textured_model_obj)
|
||||
|
||||
# odm_texturing definitions
|
||||
kwargs = {
|
||||
'bin': context.odm_modules_path,
|
||||
'out_dir': tree.odm_texturing,
|
||||
'bundle': tree.opensfm_bundle,
|
||||
'imgs_path': tree.dataset_resize,
|
||||
'imgs_list': tree.opensfm_bundle_list,
|
||||
'model': tree.odm_mesh,
|
||||
'log': tree.odm_texuring_log,
|
||||
'resize': self.params.resize,
|
||||
'resolution': self.params.resolution,
|
||||
'size': self.params.size
|
||||
}
|
||||
|
||||
# run texturing binary
|
||||
system.run('{bin}/odm_texturing -bundleFile {bundle} ' \
|
||||
'-imagesPath {imgs_path} -imagesListPath {imgs_list} ' \
|
||||
'-inputModelPath {model} -outputFolder {out_dir}/ ' \
|
||||
'-textureResolution {resolution} -bundleResizedTo {resize} ' \
|
||||
'-textureWithSize {size} -logFile {log}'.format(**kwargs))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid ODM Texture file in: %s' \
|
||||
% tree.odm_textured_model_obj)
|
||||
|
||||
log.ODM_INFO('Running OMD Texturing Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'odm_texturing' else ecto.QUIT
|
|
@ -0,0 +1,100 @@
|
|||
import ecto
|
||||
|
||||
from opendm import log
|
||||
from opendm import io
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMOpenSfMCell(ecto.Cell):
|
||||
def declare_params(self, params):
|
||||
params.declare("use_exif_size", "The application arguments.", False)
|
||||
params.declare("feature_process_size", "The application arguments.", 2400)
|
||||
params.declare("feature_min_frames", "The application arguments.", 4000)
|
||||
params.declare("processes", "The application arguments.", context.num_cores)
|
||||
params.declare("matching_gps_neighbors", "The application arguments.", 8)
|
||||
params.declare("matching_gps_distance", "The application arguments.", 0)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("photos", "list of ODMPhoto's", [])
|
||||
outputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD OpenSfm Cell')
|
||||
|
||||
# get inputs
|
||||
tree = self.inputs.tree
|
||||
args = self.inputs.args
|
||||
photos = self.inputs.photos
|
||||
|
||||
if not photos:
|
||||
log.ODM_ERROR('Not enough photos in photos array to start OpenSfm')
|
||||
return ecto.QUIT
|
||||
|
||||
# create working directories
|
||||
system.mkdir_p(tree.opensfm)
|
||||
system.mkdir_p(tree.pmvs)
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'opensfm'
|
||||
|
||||
|
||||
### check if reconstruction was done before
|
||||
|
||||
if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
|
||||
# create file list
|
||||
list_path = io.join_paths(tree.opensfm, 'image_list.txt')
|
||||
with open(list_path, 'w') as fout:
|
||||
for photo in photos:
|
||||
fout.write('%s\n' % photo.path_file)
|
||||
|
||||
# create config file for OpenSfM
|
||||
config = [
|
||||
"use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'),
|
||||
"feature_process_size: %s" % self.params.feature_process_size,
|
||||
"feature_min_frames: %s" % self.params.feature_min_frames,
|
||||
"processes: %s" % self.params.processes,
|
||||
"matching_gps_neighbors: %s" % self.params.matching_gps_neighbors
|
||||
]
|
||||
|
||||
if args['matcher_distance']>0:
|
||||
config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)
|
||||
|
||||
# write config file
|
||||
config_filename = io.join_paths(tree.opensfm, 'config.yaml')
|
||||
with open(config_filename, 'w') as fout:
|
||||
fout.write("\n".join(config))
|
||||
|
||||
# run OpenSfM reconstruction
|
||||
system.run('PYTHONPATH=%s %s/bin/run_all %s' %
|
||||
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid OpenSfm file in: %s' %
|
||||
(tree.opensfm_reconstruction))
|
||||
|
||||
|
||||
### check if reconstruction was exported to bundler before
|
||||
|
||||
if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
|
||||
# convert back to bundler's format
|
||||
system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
|
||||
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid Bundler file in: %s' %
|
||||
(tree.opensfm_reconstruction))
|
||||
|
||||
|
||||
### check if reconstruction was exported to pmvs before
|
||||
|
||||
if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
|
||||
# run PMVS converter
|
||||
system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
|
||||
(context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat)
|
||||
|
||||
log.ODM_INFO('Running OMD OpenSfm Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'opensfm' else ecto.QUIT
|
|
@ -0,0 +1,73 @@
|
|||
import ecto
|
||||
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import context
|
||||
|
||||
class ODMPmvsCell(ecto.Cell):
|
||||
|
||||
def declare_params(self, params):
|
||||
params.declare("level", 'The level in the image pyramid that is used '
|
||||
'for the computation', 1)
|
||||
params.declare("csize", 'Cell size controls the density of reconstructions', 2)
|
||||
params.declare("thresh", 'A patch reconstruction is accepted as a success '
|
||||
'and kept, if its associcated photometric consistency '
|
||||
'measure is above this threshold.', 0.7)
|
||||
params.declare("wsize", 'pmvs samples wsize x wsize pixel colors from '
|
||||
'each image to compute photometric consistency '
|
||||
'score. For example, when wsize=7, 7x7=49 pixel '
|
||||
'colors are sampled in each image. Increasing the '
|
||||
'value leads to more stable reconstructions, but '
|
||||
'the program becomes slower.', 7)
|
||||
params.declare("min_imgs", 'Each 3D point must be visible in at least '
|
||||
'minImageNum images for being reconstructed. 3 is '
|
||||
'suggested in general.', 3)
|
||||
params.declare("cores", 'The maximum number of cores to use in dense '
|
||||
' reconstruction.', context.num_cores)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", {})
|
||||
inputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
outputs.declare("reconstruction", "list of ODMReconstructions", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running OMD PMVS Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'pmvs'
|
||||
|
||||
if not io.file_exists(tree.pmvs_model) or rerun_cell:
|
||||
log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)
|
||||
|
||||
kwargs = {
|
||||
'bin': context.cmvs_opts_path,
|
||||
'prefix': tree.pmvs_rec_path,
|
||||
'level': self.params.level,
|
||||
'csize': self.params.csize,
|
||||
'thresh': self.params.thresh,
|
||||
'wsize': self.params.wsize,
|
||||
'min_imgs': self.params.min_imgs,
|
||||
'cores': self.params.cores
|
||||
}
|
||||
|
||||
# generate pmvs2 options
|
||||
system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} ' \
|
||||
'{min_imgs} {cores}'.format(**kwargs))
|
||||
|
||||
# run pmvs2
|
||||
system.run('%s %s/ option-0000' % \
|
||||
(context.pmvs2_path, tree.pmvs_rec_path))
|
||||
|
||||
else:
|
||||
log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)
|
||||
|
||||
log.ODM_INFO('Running OMD PMVS Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'pmvs' else ecto.QUIT
|
|
@ -0,0 +1,91 @@
|
|||
import ecto
|
||||
import cv2
|
||||
import pyexiv2
|
||||
|
||||
from opendm import log
|
||||
from opendm import system
|
||||
from opendm import io
|
||||
from opendm import types
|
||||
|
||||
class ODMResizeCell(ecto.Cell):
|
||||
def declare_params(self, params):
|
||||
params.declare("resize_to", "resizes images by the largest side", 2400)
|
||||
|
||||
def declare_io(self, params, inputs, outputs):
|
||||
inputs.declare("tree", "Struct with paths", [])
|
||||
inputs.declare("args", "The application arguments.", [])
|
||||
inputs.declare("photos", "Clusters inputs. list of ODMPhoto's", [])
|
||||
outputs.declare("photos", "Clusters output. list of ODMPhoto's", [])
|
||||
|
||||
def process(self, inputs, outputs):
|
||||
|
||||
log.ODM_INFO('Running ODM Resize Cell')
|
||||
|
||||
# get inputs
|
||||
args = self.inputs.args
|
||||
tree = self.inputs.tree
|
||||
photos = self.inputs.photos
|
||||
|
||||
if not photos:
|
||||
log.ODM_ERROR('Not enough photos in photos to resize')
|
||||
return ecto.QUIT
|
||||
|
||||
# create working directory
|
||||
system.mkdir_p(tree.dataset_resize)
|
||||
|
||||
log.ODM_DEBUG('Resizing dataset to: %s' % tree.dataset_resize)
|
||||
|
||||
# check if we rerun cell or not
|
||||
rerun_cell = args['rerun'] is not None \
|
||||
and args['rerun'] == 'resize'
|
||||
|
||||
# loop over photos
|
||||
for photo in photos:
|
||||
# define image paths
|
||||
path_file = photo.path_file
|
||||
new_path_file = io.join_paths(tree.dataset_resize, photo.filename)
|
||||
# set raw image path in case we want to rerun cell
|
||||
if io.file_exists(new_path_file) and rerun_cell:
|
||||
path_file = io.join_paths(tree.dataset_raw, photo.filename)
|
||||
|
||||
if not io.file_exists(new_path_file) or rerun_cell:
|
||||
# open and resize image with opencv
|
||||
img = cv2.imread(path_file)
|
||||
# compute new size
|
||||
max_side = max(img.shape[0], img.shape[1])
|
||||
ratio = float(self.params.resize_to) / float(max_side)
|
||||
img_r = cv2.resize(img, None, fx=ratio, fy=ratio)
|
||||
# write image with opencv
|
||||
cv2.imwrite(new_path_file, img_r)
|
||||
# read metadata with pyexiv2
|
||||
old_meta = pyexiv2.ImageMetadata(path_file)
|
||||
new_meta = pyexiv2.ImageMetadata(new_path_file)
|
||||
old_meta.read()
|
||||
new_meta.read()
|
||||
# copy metadata
|
||||
old_meta.copy(new_meta)
|
||||
# update metadata size
|
||||
new_meta['Exif.Photo.PixelXDimension'].value = img_r.shape[0]
|
||||
new_meta['Exif.Photo.PixelYDimension'].value = img_r.shape[1]
|
||||
new_meta.write()
|
||||
# update photos array with new values
|
||||
photo.path_file = new_path_file
|
||||
photo.width = img_r.shape[0]
|
||||
photo.height = img_r.shape[1]
|
||||
photo.update_focal()
|
||||
|
||||
# log message
|
||||
log.ODM_DEBUG('Resized %s | dimensions: %s' % \
|
||||
(photo.filename, img_r.shape))
|
||||
else:
|
||||
# log message
|
||||
log.ODM_WARNING('Already resized %s | dimensions: %s x %s' % \
|
||||
(photo.filename, photo.width, photo.height))
|
||||
|
||||
log.ODM_INFO('Resized %s images' % len(photos))
|
||||
|
||||
# append photos to cell output
|
||||
self.outputs.photos = photos
|
||||
|
||||
log.ODM_INFO('Running ODM Resize Cell - Finished')
|
||||
return ecto.OK if args['end_with'] != 'resize' else ecto.QUIT
|
|
@ -1 +0,0 @@
|
|||
Subproject commit b34cc5dcd54345272028b68f82fa4d0ae8a103c9
|
BIN
vlfeat.tar.gz
BIN
vlfeat.tar.gz
Plik binarny nie jest wyświetlany.
Ładowanie…
Reference in New Issue