From 3bd80dcab536b6240920e94c291ec34c4f44ebcb Mon Sep 17 00:00:00 2001 From: Dakota Benjamin Date: Thu, 20 Aug 2015 19:16:25 +0000 Subject: [PATCH] Add Python files and update installation script --- .gitignore | 2 + install.sh | 25 + knnMatch_exif.py | 119 +++ odm_extract_utm/src/main.cpp | 2 +- odm_georef/src/FindTransform.hpp | 2 +- odm_georef/src/Georef.cpp | 206 +++++- odm_georef/src/Georef.hpp | 42 +- odm_orthophoto/CMakeLists.txt | 0 odm_orthophoto/src/Logger.cpp | 0 odm_orthophoto/src/Logger.hpp | 0 odm_orthophoto/src/OdmOrthoPhoto.cpp | 70 +- odm_orthophoto/src/OdmOrthoPhoto.hpp | 1 + odm_orthophoto/src/main.cpp | 0 run.py | 1004 ++++++++++++++++++++++++++ 14 files changed, 1415 insertions(+), 58 deletions(-) create mode 100755 knnMatch_exif.py mode change 100755 => 100644 odm_orthophoto/CMakeLists.txt mode change 100755 => 100644 odm_orthophoto/src/Logger.cpp mode change 100755 => 100644 odm_orthophoto/src/Logger.hpp mode change 100755 => 100644 odm_orthophoto/src/OdmOrthoPhoto.cpp mode change 100755 => 100644 odm_orthophoto/src/OdmOrthoPhoto.hpp mode change 100755 => 100644 odm_orthophoto/src/main.cpp create mode 100755 run.py diff --git a/.gitignore b/.gitignore index 40869556..5adf4a14 100644 --- a/.gitignore +++ b/.gitignore @@ -8,4 +8,6 @@ odm_texturing-build/ *.user cmvs.tar.gz parallel.tar.bz2 +LAStools.zip pcl.tar.gz +*.pyc diff --git a/install.sh b/install.sh index 09d45459..5d4ceded 100755 --- a/install.sh +++ b/install.sh @@ -41,6 +41,7 @@ echo " - script started - `date`" GRACLUS_PATH="$TOOLS_SRC_PATH/graclus" PCL_PATH="$TOOLS_SRC_PATH/pcl" + LASTOOLS_PATH="$TOOLS_SRC_PATH/lastools" ODM_MESHING_PATH="$TOOLS_SRC_PATH/odm_meshing" ODM_TEXTURING_PATH="$TOOLS_SRC_PATH/odm_texturing" ODM_ORTHOPHOTO_PATH="$TOOLS_SRC_PATH/odm_orthophoto" @@ -120,6 +121,8 @@ sudo apt-get install --assume-yes --install-recommends \ libzip-dev \ libswitch-perl libjson-perl \ libcv-dev libcvaux-dev libopencv-dev \ + gdal-bin \ + exiv2 \ > "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1 else sudo apt-get install --assume-yes --install-recommends \ @@ -133,6 +136,8 @@ sudo apt-get install --assume-yes --install-recommends \ libzip-dev \ libswitch-perl \ libcv-dev libcvaux-dev libopencv-dev \ + gdal-bin \ + exiv2 \ > "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1 fi @@ -167,6 +172,7 @@ vlfeat.tar.gz http://www.vlfeat.org/download/vlfeat-0.9.13-bin.tar.gz cmvs.tar.gz http://www.di.ens.fr/cmvs/cmvs-fix2.tar.gz graclus.tar.gz http://smathermather.github.io/BundlerTools/patched_files/src/graclus/graclus1.2.tar.gz pcl.tar.gz https://github.com/PointCloudLibrary/pcl/archive/pcl-1.7.2.tar.gz +LAStools.zip http://lastools.org/download/LAStools.zip EOF echo " < done - `date`" @@ -195,6 +201,8 @@ mv -f parallel-20141022 "$PARALLEL_PATH" mv -f PoissonRecon "$PSR_PATH" mv -f cmvs "$CMVS_PATH" mv -f pcl-pcl-1.7.2 "$PCL_PATH" +mv -f LAStools "$LASTOOLS_PATH" + echo " < done - `date`" @@ -308,6 +316,23 @@ echo " > vlfeat" echo " < done - `date`" echo +echo " > LAStools" + cd "$LASTOOLS_PATH" + + echo " - installing LAStools" + + make > "$TOOLS_LOG_PATH/lastools_1_build.log" 2>&1 + + if [ "$ARCH" = "i686" ]; then + cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las" + fi + + if [ "$ARCH" = "x86_64" ]; then + cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las" + fi +echo " < done - `date`" +echo + echo " > cmvs/pmvs" cd "$CMVS_PATH/program/main" diff --git a/knnMatch_exif.py b/knnMatch_exif.py new file mode 100755 index 00000000..e36f7f2c --- /dev/null +++ b/knnMatch_exif.py @@ -0,0 +1,119 @@ +import os +import math +import subprocess + +# Purpose: +# Preselect camera pairs for meature matching based by using GPS exif data. + +def preselect_pairs(utm_extractor_path, image_list, k_distance, use_knn_mode = True, verbose = False): + + # Parameterss: + # utm_extractor_path: Path to the odm utm_extraction program that parses exif data. + # images_path: Path to the folder containing the images. + # k_distance: Nearest neighbor count in case of use_knn_mode = True, otherwise + # the minimum distance between cameras to be matched. + # use_knn_mode: True if knn mode is used to preselect matches, False for distance thresholding. + +# Temporary files + image_list_file = 'image_list_tmp.txt' + utm_coord_file = 'utm_coord_tmp.txt' + +# Parse the list of files with successfullt detectd points. + image_folder = "" + filenames = [] + with open(image_list, 'r') as keypoint_file: + first_line = keypoint_file.readline() + image_folder = first_line[:first_line.rfind("/")] + keypoint_file.seek(0) + for line in keypoint_file: + filename = line[line.rfind("/")+1:line.rfind(".")] + filename += ".jpg" + filenames.append(filename) + + data = open(image_list_file, "w") + for filename in filenames: + data.write("%s\n" % filename) + data.close() + + +# Define call parameters for calling the odm_extract_utm module + utm_cmd = '' + if verbose: + utm_cmd = [utm_extractor_path, '-verbose', '-imagesPath', image_folder, \ + '-imageListFile', image_list_file, '-outputCoordFile', utm_coord_file] + else: + utm_cmd = [utm_extractor_path, '-imagesPath', image_folder, '-imageListFile', \ + image_list_file, '-outputCoordFile', utm_coord_file] + +# Perform UTM extraction + utm_process = subprocess.Popen(utm_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output, err = utm_process.communicate() + +# Check return code + if utm_process.returncode != 0: + print "Error when performing utm_extraction, dumping log file:" + print output + os.remove(image_list_file) + return [] + +# Parse odm_extract_utm output + coords = [] + with open('utm_coord_tmp.txt', 'r') as coord_file: + # Skip header lines + next(coord_file) + next(coord_file) + for line in coord_file: + first_space = line.find(' ') + second_space = first_space + line[first_space+1:].find(' ') + xCoord = float(line[0: first_space]) + yCoord = float(line[first_space+1: second_space+1]) + coords.append([xCoord, yCoord]) + +# Calculate distances between camera pairs + distances = [] + for xo,yo in coords: + distances.append([]) + for xi, yi in coords: + current_image = len(distances) + xDist = xo-xi + yDist = yo-yi + distance = math.sqrt(xDist*xDist + yDist*yDist) + current_image = len(distances[-1]) + distances[-1].append([current_image, distance, False]) + distances[-1].sort(key=lambda tup: tup[1]) + +# Select matched pairs and make there are no doubles + matched_pairs = [] + if use_knn_mode: + # Make sure that image count > k + if len(coords) <= k_distance: + print "Warning, k >= image count, the result will be equivalent to exhaustive matching" + k_distance = len(coords)-1 + for outer_index, sorted_list in enumerate(distances): + # Skip first element as distance will always be zero + for sorted_index in xrange(1, k_distance+1): + image_index, distance, dummy = distances[outer_index][sorted_index] + is_added = distances[outer_index][image_index][2] + if not is_added: + matched_pairs.append([outer_index, image_index]) + distances[outer_index][image_index][2] = True + distances[image_index][outer_index][2] = True + else: # Distance mode + for outer_index, sorted_list in enumerate(distances): + # Skip first element as distance will always be zero + for image_index, distance, dummy in sorted_list: + if outer_index == image_index: + continue + if distance > k_distance: + break + is_added = distances[outer_index][image_index][2] + if not is_added: + matched_pairs.append([outer_index, image_index]) + distances[outer_index][image_index][2] = True + distances[image_index][outer_index][2] = True + + # Remove temporary files + os.remove(image_list_file) + os.remove(utm_coord_file) + + return matched_pairs diff --git a/odm_extract_utm/src/main.cpp b/odm_extract_utm/src/main.cpp index 7e1a55e1..1d1aa158 100644 --- a/odm_extract_utm/src/main.cpp +++ b/odm_extract_utm/src/main.cpp @@ -5,5 +5,5 @@ int main (int argc, char **argv) { UtmExtractor utmExtractor; - utmExtractor.run(argc, argv); + return utmExtractor.run(argc, argv); } diff --git a/odm_georef/src/FindTransform.hpp b/odm_georef/src/FindTransform.hpp index 908e58a6..19842e42 100644 --- a/odm_georef/src/FindTransform.hpp +++ b/odm_georef/src/FindTransform.hpp @@ -64,7 +64,7 @@ class OnMat3 public: OnMat3(Vec3 r1, Vec3 r2, Vec3 r3); OnMat3(const OnMat3 &o); - + Vec3 r1_; /**< The first row of the matrix. **/ Vec3 r2_; /**< The second row of the matrix. **/ Vec3 r3_; /**< The third row of the matrix. **/ diff --git a/odm_georef/src/Georef.cpp b/odm_georef/src/Georef.cpp index 3f9208ce..4b9ceca7 100644 --- a/odm_georef/src/Georef.cpp +++ b/odm_georef/src/Georef.cpp @@ -218,11 +218,15 @@ std::ostream& operator<<(std::ostream &os, const GeorefCamera &cam) Georef::Georef() : log_(false) { + georeferencePointCloud_ = false; useGCP_ = false; bundleFilename_ = ""; - coordFilename_ = ""; + inputCoordFilename_ = ""; + outputCoordFilename_ = ""; inputObjFilename_ = ""; outputObjFilename_ = ""; + exportCoordinateFile_ = false; + exportGeorefSystem_ = false; } Georef::~Georef() @@ -267,10 +271,13 @@ int Georef::run(int argc, char *argv[]) void Georef::parseArguments(int argc, char *argv[]) { bool outputSpecified = false; + bool outputPointCloudSpecified = false; bool imageListSpecified = false; bool gcpFileSpecified = false; bool imageLocation = false; bool bundleResized = false; + bool outputCoordSpecified = false; + bool inputCoordSpecified = false; logFile_ = std::string(argv[0]) + "_log.txt"; log_ << logFile_ << "\n"; @@ -326,15 +333,28 @@ void Georef::parseArguments(int argc, char *argv[]) bundleFilename_ = std::string(argv[argIndex]); log_ << "Reading cameras from: " << bundleFilename_ << "\n"; } - else if(argument == "-coordFile" && argIndex < argc) + else if(argument == "-inputCoordFile" && argIndex < argc) { argIndex++; if (argIndex >= argc) { throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); } - coordFilename_ = std::string(argv[argIndex]); - log_ << "Reading cameras georeferenced positions from: " << coordFilename_ << "\n"; + inputCoordFilename_ = std::string(argv[argIndex]); + log_ << "Reading cameras gps exif positions from: " << inputCoordFilename_ << "\n"; + inputCoordSpecified = true; + } + else if(argument == "-outputCoordFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputCoordFilename_ = std::string(argv[argIndex]); + log_ << "Exporting cameras georeferenced gps positions to: " << outputCoordFilename_ << "\n"; + exportCoordinateFile_ = true; + outputCoordSpecified = true; } else if(argument == "-inputFile" && argIndex < argc) { @@ -346,6 +366,17 @@ void Georef::parseArguments(int argc, char *argv[]) inputObjFilename_ = std::string(argv[argIndex]); log_ << "Reading textured mesh from: " << inputObjFilename_ << "\n"; } + else if(argument == "-inputPointCloudFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputPointCloudFilename_ = std::string(argv[argIndex]); + log_ << "Reading point cloud from: " << inputPointCloudFilename_ << "\n"; + georeferencePointCloud_ = true; + } else if(argument == "-gcpFile" && argIndex < argc) { argIndex++; @@ -379,6 +410,17 @@ void Georef::parseArguments(int argc, char *argv[]) log_ << "Images location is set to: " << imagesLocation_ << "\n"; imageLocation = true; } + else if(argument == "-georefFileOutputPath" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + georefFilename_ = std::string(argv[argIndex]); + log_ << "Georef file output path is set to: " << georefFilename_ << "\n"; + exportGeorefSystem_ = true; + } else if(argument == "-bundleResizedTo" && argIndex < argc) { argIndex++; @@ -406,6 +448,17 @@ void Georef::parseArguments(int argc, char *argv[]) log_ << "Writing output to: " << outputObjFilename_ << "\n"; outputSpecified = true; } + else if(argument == "-outputPointCloudFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputPointCloudFilename_ = std::string(argv[argIndex]); + log_ << "Writing output to: " << outputPointCloudFilename_ << "\n"; + outputPointCloudSpecified = true; + } else { printHelp(); @@ -413,6 +466,11 @@ void Georef::parseArguments(int argc, char *argv[]) } } + if (inputCoordSpecified && outputCoordSpecified) + { + throw GeorefException("Both output and input coordfile specified, only one of those are accepted."); + } + if (imageListSpecified && gcpFileSpecified && imageLocation && bundleResized) { useGCP_ = true; @@ -423,6 +481,11 @@ void Georef::parseArguments(int argc, char *argv[]) log_ << "Missing input in order to use GCP for georeferencing. Using EXIF data instead.\n"; } + if(georeferencePointCloud_ && !outputPointCloudSpecified) + { + setDefaultPointCloudOutput(); + } + if(!outputSpecified) { setDefaultOutput(); @@ -455,12 +518,18 @@ void Georef::printHelp() log_ << "The file needs to be on the following line format:\n"; log_ << "easting northing height pixelrow pixelcol imagename\n\n"; - log_ << "\"-coordFile \" (mandatory if using exif data)" << "\n"; + log_ << "\"-inputCoordFile \" (mandatory if using exif data)" << "\n"; log_ << "\"Input cameras geroreferenced coords file.\n\n"; + + log_ << "\"-outputCoordFile \" (optional)" << "\n"; + log_ << "\"Output cameras geroreferenced coords file.\n\n"; log_ << "\"-inputFile \" (mandatory)" << "\n"; log_ << "\"Input obj file that must contain a textured mesh.\n\n"; + log_ << "\"-inputPointCloudFile \" (optional)" << "\n"; + log_ << "\"Input ply file that must contain a point cloud.\n\n"; + log_ << "\"-imagesListPath \" (mandatory if using ground control points)\n"; log_ << "Path to the list containing the image names used in the bundle.out file.\n\n"; @@ -472,6 +541,9 @@ void Georef::printHelp() log_ << "\"-outputFile \" (optional, default _geo)" << "\n"; log_ << "\"Output obj file that will contain the georeferenced texture mesh.\n\n"; + + log_ << "\"-outputPointCloudFile \" (mandatory if georeferencing a point cloud)" << "\n"; + log_ << "\"Output ply file that will contain the georeferenced point cloud.\n\n"; log_.setIsPrintingInCout(printInCoutPop); } @@ -480,7 +552,7 @@ void Georef::setDefaultOutput() { if(inputObjFilename_.empty()) { - throw GeorefException("Tried to generate default ouptut file without having an input file."); + throw GeorefException("Tried to generate default output file without having an input file."); } std::string tmp = inputObjFilename_; @@ -497,6 +569,27 @@ void Georef::setDefaultOutput() log_ << "Writing output to: " << outputObjFilename_ << "\n"; } +void Georef::setDefaultPointCloudOutput() +{ + if(inputPointCloudFilename_.empty()) + { + throw GeorefException("Tried to generate default point cloud ouptut file without having an input file."); + } + + std::string tmp = inputPointCloudFilename_; + size_t findPos = tmp.find_last_of("."); + + if(std::string::npos == findPos) + { + throw GeorefException("Tried to generate default ouptut file, could not find .ply in the input file:\n\'"+inputPointCloudFilename_+"\'"); + } + + tmp = tmp.substr(0, findPos); + + outputPointCloudFilename_ = tmp + "_geo.ply"; + log_ << "Writing output to: " << outputPointCloudFilename_ << "\n"; +} + void Georef::createGeoreferencedModel() { if (useGCP_) @@ -535,7 +628,7 @@ void Georef::readGCPs() std::ifstream imageListStream(imagesListPath_.c_str()); if (!imageListStream.good()) { - throw GeorefException("Failed opening " + imagesListPath_ + " for reading.\n"); + throw GeorefException("Failed opening image path " + imagesListPath_ + " for reading.\n"); } for (size_t i=0; i::Ptr pointCloud; + pcl::PointCloud::Ptr pointCloud(new pcl::PointCloud()); + if(pcl::io::loadPLYFile (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) { + throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n"); + } + else + { + log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n"; + } + log_ << '\n'; + log_ << "Applying transform to point cloud...\n"; + pcl::transformPointCloud(*pointCloud, *pointCloud, transform); + log_ << ".. point cloud transformed.\n"; + + pcl::PLYWriter plyWriter; + + log_ << '\n'; + log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n"; + //pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get()); + plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false); + log_ << ".. point cloud file saved.\n"; + } + + if(exportCoordinateFile_) + { + log_ << '\n'; + log_ << "Saving georeferenced camera positions to "; + log_ << outputCoordFilename_; + log_<< "\n"; + std::ofstream coordStream(outputCoordFilename_.c_str()); + coordStream << georefSystem_.system_ <(georefSystem_.eastingOffset_) << " " << static_cast(georefSystem_.northingOffset_) << std::endl; + for(size_t cameraIndex = 0; cameraIndex < cameras_.size(); ++cameraIndex) + { + Vec3 globalCameraPosition = (transFinal.transform_)*(cameras_[cameraIndex].getPos()); + coordStream << globalCameraPosition.x_ << " " << globalCameraPosition.y_ << " " << globalCameraPosition.z_ << std::endl; + } + coordStream.close(); + log_ << "...coordinate file saved.\n"; + } + + if(exportGeorefSystem_) + { + printGeorefSystem(); + } } void Georef::createGeoreferencedModelFromGCPData() @@ -864,10 +1002,10 @@ void Georef::createGeoreferencedModelFromExifData() readCameras(); // Read coords from coord file generated by extract_utm tool - std::ifstream coordStream(coordFilename_.c_str()); + std::ifstream coordStream(inputCoordFilename_.c_str()); if (!coordStream.good()) { - throw GeorefException("Failed opening " + coordFilename_ + " for reading." + '\n'); + throw GeorefException("Failed opening coordinate file " + inputCoordFilename_ + " for reading." + '\n'); } std::string coordString; @@ -891,7 +1029,7 @@ void Georef::createGeoreferencedModelFromExifData() { if(nGeorefCameras >= cameras_.size()) { - throw GeorefException("Error, to many cameras in \'" + coordFilename_ + "\' coord file.\n"); + throw GeorefException("Error, to many cameras in \'" + inputCoordFilename_ + "\' coord file.\n"); } std::istringstream istr(coordString); @@ -903,7 +1041,7 @@ void Georef::createGeoreferencedModelFromExifData() if(nGeorefCameras < cameras_.size()) { - throw GeorefException("Not enough cameras in \'" + coordFilename_ + "\' coord file.\n"); + throw GeorefException("Not enough cameras in \'" + inputCoordFilename_ + "\' coord file.\n"); } // The optimal camera triplet. @@ -938,7 +1076,6 @@ void Georef::createGeoreferencedModelFromExifData() log_ << '\n'; log_ << "Reading mesh file...\n"; - // The textureds mesh.e pcl::TextureMesh mesh; pcl::io::loadOBJFile(inputObjFilename_, mesh); log_ << ".. mesh file read.\n"; @@ -956,7 +1093,6 @@ void Georef::createGeoreferencedModelFromExifData() // Update the mesh. pcl::toPCLPointCloud2 (*meshCloud, mesh.cloud); - // Iterate over each part of the mesh (one per material), to make texture file paths relative the .mtl file. for(size_t t = 0; t < mesh.tex_materials.size(); ++t) { @@ -975,7 +1111,35 @@ void Georef::createGeoreferencedModelFromExifData() saveOBJFile(outputObjFilename_, mesh, 8); log_ << ".. mesh file saved.\n"; - printGeorefSystem(); + if(georeferencePointCloud_) + { + //pcl::PointCloud2::Ptr pointCloud; + pcl::PointCloud::Ptr pointCloud(new pcl::PointCloud()); + if(pcl::io::loadPLYFile (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) { + throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n"); + } + else + { + log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n"; + } + log_ << '\n'; + log_ << "Applying transform to point cloud...\n"; + pcl::transformPointCloud(*pointCloud, *pointCloud, transform); + log_ << ".. point cloud transformed.\n"; + + pcl::PLYWriter plyWriter; + + log_ << '\n'; + log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n"; + //pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get()); + plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false); + log_ << ".. point cloud file saved.\n"; + } + + if(exportGeorefSystem_) + { + printGeorefSystem(); + } } void Georef::chooseBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2) @@ -1073,12 +1237,12 @@ void Georef::printGeorefSystem() throw GeorefException("Tried to generate default ouptut file, could not find .obj in the output file:\n\'"+outputObjFilename_+"\'"); } - tmp = tmp.substr(0, findPos); + //tmp = tmp.substr(0, findPos); - tmp = tmp + "_georef_system.txt"; + //tmp = tmp + "_georef_system.txt"; log_ << '\n'; - log_ << "Saving georeference system file to \'" << tmp << "\'...\n"; - std::ofstream geoStream(tmp.c_str()); + log_ << "Saving georeference system file to \'" << georefFilename_ << "\'...\n"; + std::ofstream geoStream(georefFilename_.c_str()); geoStream << georefSystem_ << std::endl; geoStream.close(); log_ << "... georeference system saved.\n"; diff --git a/odm_georef/src/Georef.hpp b/odm_georef/src/Georef.hpp index cf31dfd6..ad89506c 100644 --- a/odm_georef/src/Georef.hpp +++ b/odm_georef/src/Georef.hpp @@ -144,6 +144,11 @@ private: */ void setDefaultOutput(); + /*! + * \brief setDefaultPointCloudOutput Setup the output file name given the input file name. + */ + void setDefaultPointCloudOutput(); + /*! * \brief createGeoreferencedModel Makes the input file georeferenced and saves it to the output file. */ @@ -199,25 +204,32 @@ private: **/ void printGeorefSystem(); - Logger log_; /**< Logging object. */ - std::string logFile_; /**< The path to the output log file. */ + Logger log_; /**< Logging object. */ + std::string logFile_; /**< The path to the output log file. */ - std::string bundleFilename_; /**< The path to the cameras bundle file. **/ - std::string coordFilename_; /**< The path to the cameras georeference file. **/ - std::string gcpFilename_; /**< The path to the GCP file **/ - std::string imagesListPath_; /**< Path to the image list. **/ - std::string imagesLocation_; /**< The folder containing the images in the image list. **/ - std::string inputObjFilename_; /**< The path to the input mesh obj file. **/ - std::string outputObjFilename_; /**< The path to the output mesh obj file. **/ + std::string bundleFilename_; /**< The path to the cameras bundle file. **/ + std::string inputCoordFilename_; /**< The path to the cameras exif gps positions file. **/ + std::string outputCoordFilename_; /**< The path to the cameras georeferenced gps positions file. **/ + std::string gcpFilename_; /**< The path to the GCP file **/ + std::string imagesListPath_; /**< Path to the image list. **/ + std::string imagesLocation_; /**< The folder containing the images in the image list. **/ + std::string inputObjFilename_; /**< The path to the input mesh obj file. **/ + std::string outputObjFilename_; /**< The path to the output mesh obj file. **/ + std::string inputPointCloudFilename_; /**< The path to the input point cloud file. **/ + std::string outputPointCloudFilename_; /**< The path to the output point cloud file. **/ + std::string georefFilename_; /**< The path to the output offset file. **/ - bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/ - double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */ + bool georeferencePointCloud_; + bool exportCoordinateFile_; + bool exportGeorefSystem_; + bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/ + double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */ - std::vector cameras_; /**< A vector of all cameras. **/ - std::vector gcps_; /**< A vector of all GCPs. **/ - std::vector imageList_; /**< A vector containing the names of the corresponding cameras. **/ + std::vector cameras_; /**< A vector of all cameras. **/ + std::vector gcps_; /**< A vector of all GCPs. **/ + std::vector imageList_; /**< A vector containing the names of the corresponding cameras. **/ - GeorefSystem georefSystem_; /**< Contains the georeference system. **/ + GeorefSystem georefSystem_; /**< Contains the georeference system. **/ }; /*! diff --git a/odm_orthophoto/CMakeLists.txt b/odm_orthophoto/CMakeLists.txt old mode 100755 new mode 100644 diff --git a/odm_orthophoto/src/Logger.cpp b/odm_orthophoto/src/Logger.cpp old mode 100755 new mode 100644 diff --git a/odm_orthophoto/src/Logger.hpp b/odm_orthophoto/src/Logger.hpp old mode 100755 new mode 100644 diff --git a/odm_orthophoto/src/OdmOrthoPhoto.cpp b/odm_orthophoto/src/OdmOrthoPhoto.cpp old mode 100755 new mode 100644 index e7a04b70..ce5056f4 --- a/odm_orthophoto/src/OdmOrthoPhoto.cpp +++ b/odm_orthophoto/src/OdmOrthoPhoto.cpp @@ -1,6 +1,7 @@ // C++ #include #include +#include // This #include "OdmOrthoPhoto.hpp" @@ -44,6 +45,7 @@ OdmOrthoPhoto::OdmOrthoPhoto() inputGeoRefFile_ = ""; outputFile_ = "ortho.jpg"; logFile_ = "log.txt"; + outputCornerFile_ = ""; resolution_ = 0.0f; @@ -229,6 +231,16 @@ void OdmOrthoPhoto::parseArguments(int argc, char *argv[]) outputFile_ = std::string(argv[argIndex]); log_ << "Writing output to: " << outputFile_ << "\n"; } + else if(argument == "-outputCornerFile") + { + argIndex++; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputCornerFile_ = std::string(argv[argIndex]); + log_ << "Writing corners to: " << outputCornerFile_ << "\n"; + } else { printHelp(); @@ -258,12 +270,15 @@ void OdmOrthoPhoto::printHelp() log_ << "\"-inputFile \" (mandatory)\n"; log_ << "\"Input obj file that must contain a textured mesh.\n\n"; - log_ << "\"-inputGeoRefFile \" (optional, if specified boundary points are assumed to be givan as world coordinates. If not specified, the boundary points are assumed to be local coordinates)\n"; + log_ << "\"-inputGeoRefFile \" (optional, if specified boundary points are assumed to be given as world coordinates. If not specified, the boundary points are assumed to be local coordinates)\n"; log_ << "\"Input geograpical reference system file that describes the world position of the model's origin.\n\n"; log_ << "\"-outputFile \" (optional, default: ortho.jpg)\n"; log_ << "\"Target file in which the orthophoto is saved.\n\n"; + log_ << "\"-outputCornerFile \" (optional)\n"; + log_ << "\"Target text file for boundary corner points, written as \"xmin ymin xmax ymax\".\n\n"; + log_ << "\"-resolution \" (mandatory)\n"; log_ << "\"The number of pixels used per meter.\n\n"; @@ -364,7 +379,7 @@ void OdmOrthoPhoto::createOrthoPhoto() } // Init ortho photo - photo_ = cv::Mat::zeros(rowRes, colRes, CV_8UC3) + cv::Scalar(255, 255, 255); + photo_ = cv::Mat::zeros(rowRes, colRes, CV_8UC4) + cv::Scalar(255, 255, 255, 0); depth_ = cv::Mat::zeros(rowRes, colRes, CV_32F) - std::numeric_limits::infinity(); // Contains the vertices of the mesh. @@ -431,6 +446,21 @@ void OdmOrthoPhoto::createOrthoPhoto() log_ << '\n'; log_ << "Writing ortho photo to " << outputFile_ << "\n"; cv::imwrite(outputFile_, photo_); + + if (!outputCornerFile_.empty()) + { + log_ << "Writing corner coordinates to " << outputCornerFile_ << "\n"; + std::ofstream cornerStream(outputCornerFile_.c_str()); + if (!cornerStream.is_open()) + { + throw OdmOrthoPhotoException("Failed opening output corner file " + outputCornerFile_ + "."); + } + cornerStream.setf(std::ios::scientific, std::ios::floatfield); + cornerStream.precision(17); + cornerStream << xMin << " " << yMin << " " << xMax << " " << yMax; + cornerStream.close(); + } + log_ << "Orthophoto generation done.\n"; } @@ -443,34 +473,34 @@ void OdmOrthoPhoto::adjustBoundsForGeoRef() // The system name std::string system; - // The false easting and northing - int falseEasting, falseNorthing; + // The east and north offsets + int eastOffset, northOffset; // Parse file std::getline(geoRefStream, system); - if(!(geoRefStream >> falseEasting)) + if(!(geoRefStream >> eastOffset)) { - throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract false easting."); + throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract east offset."); } - if(!(geoRefStream >> falseNorthing)) + if(!(geoRefStream >> northOffset)) { - throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract false northing."); + throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract north offset."); } - log_ << "Georeference system\n"; + log_ << "Georeference system:\n"; log_ << system << "\n"; - log_ << "False easting " << falseEasting << "\n"; - log_ << "False northing " << falseNorthing << "\n"; + log_ << "East offset: " << eastOffset << "\n"; + log_ << "North offset: " << northOffset << "\n"; // Adjust boundary points. - boundaryPoint1_[0] = static_cast(worldPoint1_.eastInteger_ - falseEasting) + worldPoint1_.eastFractional_; - boundaryPoint1_[1] = static_cast(worldPoint1_.northInteger_ - falseNorthing) + worldPoint1_.northFractional_; - boundaryPoint2_[0] = static_cast(worldPoint2_.eastInteger_ - falseEasting) + worldPoint2_.eastFractional_; - boundaryPoint2_[1] = static_cast(worldPoint2_.northInteger_ - falseNorthing) + worldPoint2_.northFractional_; - boundaryPoint3_[0] = static_cast(worldPoint3_.eastInteger_ - falseEasting) + worldPoint3_.eastFractional_; - boundaryPoint3_[1] = static_cast(worldPoint3_.northInteger_ - falseNorthing) + worldPoint3_.northFractional_; - boundaryPoint4_[0] = static_cast(worldPoint4_.eastInteger_ - falseEasting) + worldPoint4_.eastFractional_; - boundaryPoint4_[1] = static_cast(worldPoint4_.northInteger_ - falseNorthing) + worldPoint4_.northFractional_; + boundaryPoint1_[0] = static_cast(worldPoint1_.eastInteger_ - eastOffset) + worldPoint1_.eastFractional_; + boundaryPoint1_[1] = static_cast(worldPoint1_.northInteger_ - northOffset) + worldPoint1_.northFractional_; + boundaryPoint2_[0] = static_cast(worldPoint2_.eastInteger_ - eastOffset) + worldPoint2_.eastFractional_; + boundaryPoint2_[1] = static_cast(worldPoint2_.northInteger_ - northOffset) + worldPoint2_.northFractional_; + boundaryPoint3_[0] = static_cast(worldPoint3_.eastInteger_ - eastOffset) + worldPoint3_.eastFractional_; + boundaryPoint3_[1] = static_cast(worldPoint3_.northInteger_ - northOffset) + worldPoint3_.northFractional_; + boundaryPoint4_[0] = static_cast(worldPoint4_.eastInteger_ - eastOffset) + worldPoint4_.eastFractional_; + boundaryPoint4_[1] = static_cast(worldPoint4_.northInteger_ - northOffset) + worldPoint4_.northFractional_; log_ << "Local boundary points:\n"; log_ << "Point 1: " << boundaryPoint1_[0] << " " << boundaryPoint1_[1] << "\n"; @@ -908,7 +938,7 @@ void OdmOrthoPhoto::renderPixel(int row, int col, float s, float t, const cv::Ma b += static_cast(bl[0]) * dr * dt; b += static_cast(br[0]) * dl * dt; - photo_.at(row,col) = cv::Vec3b(static_cast(b), static_cast(g), static_cast(r)); + photo_.at(row,col) = cv::Vec4b(static_cast(b), static_cast(g), static_cast(r), 255); } void OdmOrthoPhoto::getBarycentricCoordiantes(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3, float x, float y, float &l1, float &l2, float &l3) const diff --git a/odm_orthophoto/src/OdmOrthoPhoto.hpp b/odm_orthophoto/src/OdmOrthoPhoto.hpp old mode 100755 new mode 100644 index 426084e0..de2793d0 --- a/odm_orthophoto/src/OdmOrthoPhoto.hpp +++ b/odm_orthophoto/src/OdmOrthoPhoto.hpp @@ -177,6 +177,7 @@ private: std::string inputFile_; /**< Path to the textured mesh as an obj-file. */ std::string inputGeoRefFile_; /**< Path to the georeference system file. */ std::string outputFile_; /**< Path to the destination file. */ + std::string outputCornerFile_; /**< Path to the output corner file. */ std::string logFile_; /**< Path to the log file. */ float resolution_; /**< The number of pixels per meter in the ortho photo. */ diff --git a/odm_orthophoto/src/main.cpp b/odm_orthophoto/src/main.cpp old mode 100755 new mode 100644 diff --git a/run.py b/run.py new file mode 100755 index 00000000..c7ffe5c8 --- /dev/null +++ b/run.py @@ -0,0 +1,1004 @@ +#!/usr/bin/python + +import os, sys, multiprocessing, json, datetime, re, subprocess, shutil, shlex, collections, fractions +import knnMatch_exif +## the defs + + +CURRENT_DIR = os.getcwd() +BIN_PATH_ABS = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) +CORES = multiprocessing.cpu_count() + + +def getCcdWidths(): + with open(BIN_PATH_ABS + "/ccd_defs.json") as jsonFile: + return json.load(jsonFile) + +objects = [] +ccdWidths = getCcdWidths() + +BIN_PATH = BIN_PATH_ABS + "/bin" + + +objectStats = {'count': 0, "good": 0, "bad": 0, "minWidth": 0, "minHeight": 0, "maxWidth": 0, "maxHeight": 0} +jobOptions = {'resizeTo': 0, 'srcDir': CURRENT_DIR, 'utmZone': -999, 'utmSouth': False, 'utmEastOffset': 0, 'utmNorthOffset': 0} +args = {} + +def run(cmd): + returnCode = os.system(cmd) + if (returnCode != 0): + sys.exit("\nquitting cause: \n\t" + cmd + "\nreturned with code " + str(returnCode) + ".\n") + +def now(): + return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y') + +def parseArgs(): + + ## defaults + #args['--match-size'] = 200 + + args['--resize-to'] = 2400 + + args['--start-with'] = "resize" + args['--end-with'] = "odm_orthophoto" + + args['--cmvs-maxImages'] = 500 + + args['--matcher-ratio'] = 0.6 + args['--matcher-threshold'] = 2.0 + args['--matcher-preselect'] = True + args['--matcher-useKnn'] = True + args['--matcher-kDistance'] = 20 + + args['--pmvs-level'] = 1 + args['--pmvs-csize'] = 2 + args['--pmvs-threshold'] = 0.7 + args['--pmvs-wsize'] = 7 + args['--pmvs-minImageNum'] = 3 + + args['--odm_meshing-maxVertexCount'] = 100000 + args['--odm_meshing-octreeDepth'] = 9 + args['--odm_meshing-samplesPerNode'] = 1 + args['--odm_meshing-solverDivide'] = 9 + + args['--odm_texturing-textureResolution'] = 4096 + args['--odm_texturing-textureWithSize'] = 3600 + + args['--odm_georeferencing-gcpFile'] = "gcp_list.txt" + args['--odm_georeferencing-useGcp'] = False + + args['--zip-results'] = True + + for i in range(1, len(sys.argv)): + if re.search("^--[^a-z\-]*", sys.argv[i]): + args[sys.argv[i]] = True + + if not re.search("^--[^a-z\-]*", sys.argv[i + 1]): + if sys.argv[i] == "--resize-to": + if sys.argv[i + 1] == "orig": + args[sys.argv[i]] = 0 + elif re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + print(str(args[sys.argv[i]])) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--start-with": + if (sys.argv[i + 1] == "resize" or sys.argv[i + 1] == "getKeypoints" or sys.argv[i + 1] == "match" or + sys.argv[i + 1] == "bundler" or sys.argv[i + 1] == "cmvs" or sys.argv[i + 1] == "pmvs" or sys.argv[i + 1] == "odm_meshing" or + sys.argv[i + 1] == "odm_texturing" or sys.argv[i + 1] == "odm_georeferencing" or sys.argv[i + 1] == "odm_orthophoto"): + args[sys.argv[i]] = sys.argv[i + 1] + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1] + "\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\",\"odm_meshing\", \"odm_texturing\", \"odm_georeferencing\", \"odm_orthophoto\"") + + if sys.argv[i] == "--end-with": + if (sys.argv[i + 1] == "resize" or sys.argv[i + 1] == "getKeypoints" or sys.argv[i + 1] == "match" or sys.argv[i + 1] == "bundler" or + sys.argv[i + 1] == "cmvs" or sys.argv[i + 1] == "pmvs" or sys.argv[i + 1] == "odm_meshing" or sys.argv[i + 1] == "odm_texturing" or + sys.argv[i + 1] == "odm_georeferencing" or sys.argv[i + 1] == "odm_orthophoto"): + args[sys.argv[i]] = sys.argv[i + 1] + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1] + "\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\", \"odm_meshing\", \"odm_texturing\", \"odm_georeferencing\", \"odm_orthophoto\"") + + if sys.argv[i] == "--run-only": + if (sys.argv[i + 1] == "resize" or sys.argv[i + 1] == "getKeypoints" or sys.argv[i + 1] == "match" or sys.argv[i + 1] == "bundler" or + sys.argv[i + 1] == "cmvs" or sys.argv[i + 1] == "pmvs" or sys.argv[i + 1] == "odm_meshing" or sys.argv[i + 1] == "odm_texturing" or + sys.argv[i + 1] == "odm_georeferencing" or sys.argv[i + 1] == "odm_orthophoto"): + args['--start-with'] = sys.argv[i + 1] + args['--end-with'] = sys.argv[i + 1] + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1] + "\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\", \"odm_meshing\", \"odm_texturing\", \"odm_georeferencing\", \"odm_orthophoto\"") + + if sys.argv[i] == "--matcher-threshold": + if re.search("^-?[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--matcher-ratio": + if re.search("^-?[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--matcher-preselect": + if sys.argv[i + 1] == "true" or sys.argv[i + 1] == "false": + args[sys.argv[i]] = (sys.argv[i + 1].strip() == "true") + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--matcher-useKnn": + if sys.argv[i + 1] == "true" or sys.argv[i + 1] == "false": + args[sys.argv[i]] = (sys.argv[i + 1].strip() == "true") + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--matcher-kDistance": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1]) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--cmvs-maxImages": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--pmvs-level": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--pmvs-csize": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--pmvs-threshold": + if re.search("^-?[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--pmvs-wsize": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--pmvs-minImageNum": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + + if sys.argv[i] == "--force-focal": + if re.search("^[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--force-ccd": + if re.search("^[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_meshing-maxVertexCount": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_meshing-octreeDepth": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_meshing-samplesPerNode": + if re.search("^[0-9]*\.?[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = float(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_meshing-solverDivide": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_texturing-textureResolution": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_texturing-textureWithSize": + if re.search("^[0-9]*$", sys.argv[i + 1]): + args[sys.argv[i]] = int(sys.argv[i + 1].strip()) + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--odm_georeferencing-gcpFile": + args[sys.argv[i]] = sys.argv[i + 1] + + if sys.argv[i] == "--odm_georeferencing-useGcp": + if sys.argv[i + 1].strip() == "true" or sys.argv[i + 1].strip() == "false": + args[sys.argv[i]] = (sys.argv[i + 1].strip() == "true") + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if sys.argv[i] == "--zip-results": + if sys.argv[i + 1].strip() == "true" or sys.argv[i + 1].strip() == "false": + args[sys.argv[i]] = (sys.argv[i + 1].strip() == "true") + else: + sys.exit("\n invalid parameter for \"" + sys.argv[i] + "\": " + sys.argv[i + 1]) + + if '--help' in args: + print "Usage: run.py [options]" + print "it should be run from the folder contining the images to which should reconstructed" + print "" + print "options:" + print " --help: " + print " Prints this screen" + print " " + + print " --resize-to: " + print " default: 2400" + print " will resize the images so that the maximum width/height of the images are smaller or equal to the specified number" + print " If \"--resize-to orig\" is used it will use the images without resizing" + print " " + + print " --start-with: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\"|\"odm_meshing\"|\"odm_texturing\"|\"odm_georeferencing\">" + print " default: resize" + print " Will start the sript at the specified step" + print " " + + print " --end-with: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\"|\"odm_meshing\"|\"odm_texturing\"|\"odm_georeferencing\">" + print " default: odm_orthophoto" + print " Will stop the sript after the specified step" + print " " + + print " --run-only: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\"|\"odm_meshing\"|\"odm_texturing\"|\"odm_georeferencing\">" + print " Will only execute the specified step" + print " equal to --start-with --end-with " + print " " + + print " --force-focal: " + print " Override the focal length information for the images" + print " " + + print " --force-ccd: " + print " Override the ccd width information for the images" + print " " + + print " --matcher-threshold: (percent)" + print " default: 2.0" + print " Ignore matched keypoints if the two images share less than percent of keypoints" + print " " + + print " --matcher-ratio: " + print " default: 0.6" + print " Ratio of the distance to the next best matched keypoint" + print " " + + print " --matcher-preselect: " + print " default: true" + print " use GPS exif data, if available, to match each image only with its k-nearest neighbors, " + print " or all images within a certain distance threshold" + print " " + + print " --matcher-useKnn: " + print " default: true" + print " true means knn is to be used for preselection of pairs. " + print " set to false to use distance thresholding instead." + print " " + + print " --matcher-kDistance: " + print " default: 20" + print " the k value if knnmode is used, denoting how many images each image should be matched against." + print " if distance matching is used, the parameter is the minimum distance between images in meters." + print " " + + print " --cmvs-maxImages: " + print " default: 100" + print " The maximum number of images per cluster" + print " " + + print " --pmvs-level: " + print " default: 1" + print " " + + print " --pmvs-csize: " + print " default: 2" + print " " + + print " --pmvs-threshold: " + print " default: 0.7" + print " " + + print " --pmvs-wsize: " + print " default: 7" + print " " + + print " --pmvs-minImageNum: " + print " default: 3" + print " See http://grail.cs.washington.edu/software/pmvs/documentation.html for an explanation of these parameters" + print " " + print " " + + print " --odm_meshing-maxVertexCount: " + print " default: 100000" + print " The maximum vertex count of the output mesh." + print " " + + print " --odm_meshing-octreeDepth: " + print " default: 9" + print " Oct-tree depth used in the mesh reconstruction, increase to get more vertices, recommended values are 8-12" + print " " + + print " --odm_meshing-samplesPerNode: " + print " default: 1" + print " Number of points per octree node, recommended value: 1.0" + print " " + + print " --odm_meshing-solverDivide: " + print " default: 9" + print " Oct-tree depth at which the Laplacian equation is solved in the surface reconstruction step." + print " Increasing this value increases computation times slightly but helps reduce memory usage." + print " " + + print " --odm_texturing-textureResolution: " + print " default: 4096" + print " The resolution of the output textures. Must be greater than textureWithSize." + print " " + + print " --odm_texturing-textureWithSize: " + print " default: 3600" + print " The resolution to rescale the images performing the texturing." + print " " + + print " --odm_georeferencing-gcpFile: " + print " Path to the file containing the ground control points used for georeferencing." + print " The file needs to be on the following line format:" + print " easting northing height pixelrow pixelcol imagename" + print " " + + print " --odm_georeferencing-useGcp: " + print " Set to true for enabling GCPs from the file above." + print " " + + print " --zip-results: " + print " default: true" + print " Set to false if you do not want to have gunzipped tarball of the results." + print " " + + sys.exit() + + print "\n - configuration:" + + for key in collections.OrderedDict(sorted(args.items())): + if key != "": + print " " + key + ": " + str(args[key]) + + print "\n" + +def runAndReturn(cmdSrc, cmdDest): + srcProcess = subprocess.Popen(shlex.split(cmdSrc), stdout=subprocess.PIPE) + if cmdDest: + destProcess = subprocess.Popen(shlex.split(cmdDest), stdin=srcProcess.stdout, stdout=subprocess.PIPE) + stdout, stderr = destProcess.communicate() + else: + stdout, stderr = srcProcess.communicate() + + return stdout.decode('ascii') + +def calculateEpsg(utmZone, south): + if south: + return 32700 + utmZone + else: + return 32600 + utmZone + +def parseCoordinateSystem(): + if os.path.isfile(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt"): + with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f: + for lineNumber, line in enumerate(f): + if lineNumber == 0: + tokens = line.split(' ') + if len(tokens) == 3: + utmZoneString = tokens[2][0:len(tokens[2])-2].strip() + utmSouthBool = (tokens[2][len(tokens[2])-2].strip() == 'S') + jobOptions["csString"] = "+datum=WGS84 +proj=utm +zone=" + utmZoneString + (" +south" if utmSouthBool else "") + jobOptions["epsg"] = calculateEpsg(int(utmZoneString), utmSouthBool) + elif lineNumber == 1: + tokens = line.split(' ') + if len(tokens) == 2: + jobOptions["utmEastOffset"] = int(tokens[0].strip()) + jobOptions["utmNorthOffset"] = int(tokens[1].strip()) + else: + break + +def prepareObjects(): + ## get the source list + source_files = runAndReturn('ls -1', 'egrep "\.[jJ]{1}[pP]{1}[eE]{0,1}[gG]{1}"') + + print "\n - source files - " + now() + + for filename in source_files.split('\n'): + filename = filename.rstrip('\n') + if not filename: + continue + file_make = runAndReturn('jhead "' + filename + '"', 'grep "Camera make"') + file_model = runAndReturn('jhead "' + filename + '"', 'grep "Camera model"') + file_focal = runAndReturn('jhead "' + filename + '"', 'grep "Focal length"') + file_ccd = runAndReturn('jhead "' + filename + '"', 'grep "CCD width"') + file_resolution = runAndReturn('jhead "' + filename + '"', 'grep "Resolution"') + + fileObject = {} + + fileObject["src"] = filename + fileObject["base"] = re.sub("\.[^\.]*$", "", filename) + + match = re.search(": ([^\n\r]*)", file_make) + if match: + fileObject["make"] = match.group(1).strip() + + match = re.search(": ([^\n\r]*)", file_model) + if match: + fileObject["model"] = match.group(1).strip() + + if "make" in fileObject: + fileObject["make"] = re.sub("^\s+", "", fileObject["make"]) + fileObject["make"] = re.sub("\s+$", "", fileObject["make"]) + + if "model" in fileObject: + fileObject["model"] = re.sub("^\s+", "", fileObject["model"]) + fileObject["model"] = re.sub("\s+$", "", fileObject["model"]) + + if "make" in fileObject: + fileObject["id"] = fileObject["make"] + if "model" in fileObject: + fileObject["id"] += " " + fileObject["model"] + + match = re.search(": ([0-9]*) x ([0-9]*)", file_resolution) + if match: + fileObject["width"] = int(match.group(1).strip()) + fileObject["height"] = int(match.group(2).strip()) + + if not '--force-focal' in args: + match = re.search(":[\ ]*([0-9\.]*)mm", file_focal) + if match: + fileObject["focal"] = float((match.group()[1:-2]).strip()) + else: + fileObject["focal"] = args['--force-focal'] + + if not '--force-ccd' in args: + match = re.search(":[\ ]*([0-9\.]*)mm", file_ccd) + if match: + fileObject["ccd"] = float(match.group()[1:-2].strip()) + + if (not "ccd" in fileObject) and ("id" in fileObject): + fileObject["ccd"] = float(ccdWidths[fileObject["id"]]) + else: + fileObject["ccd"] = args['--force-ccd'] + + if "ccd" in fileObject and "focal" in fileObject and "width" in fileObject and "height" in fileObject: + if fileObject["width"] > fileObject["height"]: + fileObject["focalpx"] = fileObject["width"] * fileObject["focal"] / fileObject["ccd"] + else: + fileObject["focalpx"] = fileObject["height"] * fileObject["focal"] / fileObject["ccd"] + + fileObject["isOk"] = True + objectStats["good"] += 1 + + print " using " + fileObject["src"] + " dimensions: " + str(fileObject["width"]) + "x" + str(fileObject["height"]) + " / focal: " + str(fileObject["focal"]) + "mm / ccd: " + str(fileObject["ccd"]) + "mm" + else: + fileObject["isOk"] = False + objectStats["bad"] += 1 + + if "id" in fileObject: + print "\n no CCD width or focal length found for " + fileObject["src"] + " - camera: \"" + fileObject["id"] + "\"" + else: + print "\n no CCD width or focal length found" + + objectStats["count"] += 1 + + if "width" in fileObject and "height" in fileObject: + if objectStats["minWidth"] == 0: + objectStats["minWidth"] = fileObject["width"] + if objectStats["minHeight"] == 0: + objectStats["minHeight"] = fileObject["height"] + + if objectStats["minWidth"] < fileObject["width"]: + objectStats["minWidth"] = objectStats["minWidth"] + else: + objectStats["minWidth"] = fileObject["width"] + + if objectStats["minHeight"] < fileObject["height"]: + objectStats["minHeight"] = objectStats["minHeight"] + else: + objectStats["minHeight"] = fileObject["height"] + + if objectStats["maxWidth"] > fileObject["width"]: + objectStats["maxWidth"] = objectStats["maxWidth"] + else: + objectStats["maxWidth"] = fileObject["width"] + + if objectStats["maxHeight"] > fileObject["height"]: + objectStats["maxHeight"] = objectStats["maxHeight"] + else: + objectStats["maxHeight"] = fileObject["height"] + + objects.append(fileObject) + + + if not "good" in objectStats: + print "\n found no usable images - quitting\n" + sys.exit() + else: + print "\n found " + str(objectStats["good"]) + " usable images" + + print "\n" + + jobOptions["resizeTo"] = args['--resize-to'] + + print " using max image size of " + str(jobOptions["resizeTo"]) + " x " + str(jobOptions["resizeTo"]) + + jobOptions["jobDir"] = jobOptions["srcDir"] + "/reconstruction-with-image-size-" + str(jobOptions["resizeTo"]) + + jobOptions["step_1_convert"] = jobOptions["jobDir"] + "/_convert.templist.txt" + jobOptions["step_1_vlsift"] = jobOptions["jobDir"] + "/_vlsift.templist.txt" + jobOptions["step_1_gzip"] = jobOptions["jobDir"] + "/_gzip.templist.txt" + + jobOptions["step_2_filelist"] = jobOptions["jobDir"] + "/_filelist.templist.txt" + jobOptions["step_2_macthes_jobs"] = jobOptions["jobDir"] + "/_matches_jobs.templist.txt" + jobOptions["step_2_matches_dir"] = jobOptions["jobDir"] + "/matches" + jobOptions["step_2_matches"] = jobOptions["jobDir"] + "/matches.init.txt" + + jobOptions["step_3_filelist"] = jobOptions["jobDir"] + "/list.txt" + jobOptions["step_3_bundlerOptions"] = jobOptions["jobDir"] + "/options.txt" + + try: + os.mkdir(jobOptions["jobDir"]) + except: + pass + + for fileObject in objects: + if fileObject["isOk"]: + fileObject["step_0_resizedImage"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".jpg" + fileObject["step_1_pgmFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".pgm" + fileObject["step_1_keyFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".key" + fileObject["step_1_gzFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.gz" + +def resize(): + print "\n - preparing images - " + now() + + os.chdir(jobOptions["jobDir"]) + + for fileObject in objects: + if fileObject["isOk"]: + if not os.path.isfile(fileObject["step_0_resizedImage"]): + if jobOptions["resizeTo"] != 0 and ((int(fileObject["width"]) > jobOptions["resizeTo"]) or (fileObject["height"] > jobOptions["resizeTo"])): + sys.stdout.write(" resizing " + fileObject["src"] +" \tto " + fileObject["step_0_resizedImage"]) + run("convert -resize " + str(jobOptions["resizeTo"]) + "x" + str(jobOptions["resizeTo"]) +" -quality 100 \"" + jobOptions["srcDir"] + "/" + fileObject["src"] + "\" \"" + fileObject["step_0_resizedImage"] + "\"") + else: + sys.stdout.write(" copying " + fileObject["src"] + " \tto " + fileObject["step_0_resizedImage"]) + shutil.copyfile(CURRENT_DIR + "/" + fileObject["src"], fileObject["step_0_resizedImage"]) + else: + print " using existing " + fileObject["src"] + " \tto " + fileObject["step_0_resizedImage"] + + file_resolution = runAndReturn('jhead "' + fileObject["step_0_resizedImage"] + '"', 'grep "Resolution"') + match = re.search(": ([0-9]*) x ([0-9]*)", file_resolution) + if match: + fileObject["width"] = int(match.group(1).strip()) + fileObject["height"] = int(match.group(2).strip()) + print "\t (" + str(fileObject["width"]) + " x " + str(fileObject["height"]) + ")" + + if args['--end-with'] != "resize": + getKeypoints() + +def getKeypoints(): + print "\n - finding keypoints - " + now() + + os.chdir(jobOptions["jobDir"]) + + vlsiftJobs = "" + c = 0 + + for fileObject in objects: + c += 1 + + if fileObject["isOk"]: + if '--lowe-sift' in args: + vlsiftJobs += "echo -n \"" + str(c) + "/" + str(objectStats["good"]) + " - \" && convert -format pgm \"" + fileObject["step_0_resizedImage"] + "\" \"" + fileObject["step_1_pgmFile"] + "\"" + vlsiftJobs += " && \"" + BIN_PATH + "/sift\" < \"" + fileObject["step_1_pgmFile"] + "\" > \"" + fileObject["step_1_keyFile"] + "\"" + vlsiftJobs += " && gzip -f \"" + fileObject["step_1_keyFile"] + "\"" + vlsiftJobs += " && rm -f \"" + fileObject["step_1_pgmFile"] + "\"" + vlsiftJobs += " && rm -f \"" + fileObject["step_1_keyFile"] + ".sift\"\n" + else: + if not os.path.isfile(jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.bin"): + vlsiftJobs += "echo -n \"" + str(c) + "/" + str(objectStats["good"]) + " - \" && convert -format pgm \"" + fileObject["step_0_resizedImage"] + "\" \"" + fileObject["step_1_pgmFile"] + "\"" + vlsiftJobs += " && \"" + BIN_PATH + "/vlsift\" \"" + fileObject["step_1_pgmFile"] + "\" -o \"" + fileObject["step_1_keyFile"] + ".sift\" > /dev/null && perl \"" + BIN_PATH + "/../convert_vlsift_to_lowesift.pl\" \"" + jobOptions["jobDir"] + "/" + fileObject["base"] + "\"" + vlsiftJobs += " && gzip -f \"" + fileObject["step_1_keyFile"] + "\"" + vlsiftJobs += " && rm -f \"" + fileObject["step_1_pgmFile"] + "\"" + vlsiftJobs += " && rm -f \"" + fileObject["step_1_keyFile"] + ".sift\"\n" + else: + print "using existing " + jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.bin" + + siftDest = open(jobOptions["step_1_vlsift"], 'w') + siftDest.write(vlsiftJobs) + siftDest.close() + + run("\"" + BIN_PATH + "/parallel\" --no-notice --halt-on-error 1 -j+0 < \"" + jobOptions["step_1_vlsift"] + "\"") + + if args['--end-with'] != "getKeypoints": + match() + +def match(): + print "\n - matching keypoints - " + now() + + os.chdir(jobOptions["jobDir"]) + try: + os.mkdir(jobOptions["step_2_matches_dir"]) + except: + pass + + matchesJobs = "" + c = 0 + t = (objectStats["good"] - 1) * (objectStats["good"] / 2) + + preselected_pairs = [] + + # Create a file list with all keypoint files + filesList = "" + for fileObject in objects: + if fileObject["isOk"]: + filesList += fileObject["step_1_keyFile"] + "\n" + matchDest = open(jobOptions["step_2_filelist"], 'w') + matchDest.write(filesList) + matchDest.close() + + #Check if preselection is to be run + if args["--matcher-preselect"]: + useKnn = True + if args["--matcher-useKnn"]: + useKnn = False + preselected_pairs = knnMatch_exif.preselect_pairs(BIN_PATH + "/odm_extract_utm", jobOptions["step_2_filelist"], args["--matcher-kDistance"], args["--matcher-useKnn"]) + if len(preselected_pairs) != 0: + for i, j, in preselected_pairs: + c += 1 + if i < 10: + print i, j + if not os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt"): + matchesJobs += "echo -n \".\" && touch \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" && \"" + BIN_PATH + "/KeyMatch\" \"" + objects[i]["step_1_keyFile"] + "\" \"" + objects[j]["step_1_keyFile"] + "\" \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" " + str(args['--matcher-ratio']) + " " + str(args['--matcher-threshold']) + "\n" + + +# Match all image pairs + else: + if args["--matcher-preselect"] == "true": + print "Failed to run pair preselection, proceeding with exhaustive matching." + for i in range(0, objectStats["good"]): + for j in range(i + 1, objectStats["good"]): + c += 1 + if not os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt"): + matchesJobs += "echo -n \".\" && touch \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" && \"" + BIN_PATH + "/KeyMatch\" \"" + objects[i]["step_1_keyFile"] + "\" \"" + objects[j]["step_1_keyFile"] + "\" \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" " + str(args['--matcher-ratio']) + " " + str(args['--matcher-threshold']) + "\n" + + matchDest = open(jobOptions["step_2_macthes_jobs"], 'w') + matchDest.write(matchesJobs) + matchDest.close() + + run("\"" + BIN_PATH + "/parallel\" --no-notice --halt-on-error 1 -j+0 < \"" + jobOptions["step_2_macthes_jobs"] + "\"") + run("rm -f \"" + jobOptions["step_2_matches"] + "\"") + + for i in range(0, objectStats["good"]): + for j in range(i + 1, objectStats["good"]): + c += 1 + if os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt") and os.path.getsize(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt") > 0: + run("echo \"" + str(i) + " " + str(j) + "\" >> \"" + jobOptions["step_2_matches"] + "\" && cat \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" >> \"" + jobOptions["step_2_matches"] + "\"") + + filesList = "" + for fileObject in objects: + if fileObject["isOk"]: + filesList += fileObject["step_1_keyFile"] + "\n" + + matchDest = open(jobOptions["step_2_filelist"], 'w') + matchDest.write(filesList) + matchDest.close() + + # run("\"" + BIN_PATH + "/KeyMatchFull\" \"" + jobOptions["step_2_filelist"] + "\" \"" + jobOptions["step_2_matches"] + "\" ") + + if args['--end-with'] != "match": + bundler() + +def bundler(): + print "\n - running bundler - " + now() + + os.chdir(jobOptions["jobDir"]) + + try: + os.mkdir(jobOptions["jobDir"] + "/bundle") + except: + pass + try: + os.mkdir(jobOptions["jobDir"] + "/pmvs") + except: + pass + try: + os.mkdir(jobOptions["jobDir"] + "/pmvs/txt") + except: + pass + try: + os.mkdir(jobOptions["jobDir"] + "/pmvs/visualize") + except: + pass + try: + os.mkdir(jobOptions["jobDir"] + "/pmvs/models") + except: + pass + + filesList = "" + + for fileObject in objects: + if fileObject["isOk"]: + filesList += "./" + fileObject["base"] + ".jpg 0 {:.5f}\n".format(fileObject["focalpx"]) + + filesList = filesList.rstrip('\n') + + bundlerOptions = "--match_table matches.init.txt\n" + bundlerOptions += "--output bundle.out\n" + bundlerOptions += "--output_all bundle_\n" + bundlerOptions += "--output_dir bundle\n" + bundlerOptions += "--variable_focal_length\n" + bundlerOptions += "--use_focal_estimate\n" + bundlerOptions += "--constrain_focal\n" + bundlerOptions += "--constrain_focal_weight 0.0\n" + bundlerOptions += "--estimate_distortion\n" + bundlerOptions += "--run_bundle" + + run("echo \"" + bundlerOptions + "\" > \"" + jobOptions["step_3_bundlerOptions"] + "\"") + + bundlerDest = open(jobOptions["step_3_filelist"], 'w') + bundlerDest.write(filesList) + bundlerDest.close() + + run("\"" + BIN_PATH + "/bundler\" \"" + jobOptions["step_3_filelist"] + "\" --options_file \"" + jobOptions["step_3_bundlerOptions"] + "\" > bundle/out") + run("\"" + BIN_PATH + "/Bundle2PMVS\" \"" + jobOptions["step_3_filelist"] + "\" bundle/bundle.out") + run("\"" + BIN_PATH + "/RadialUndistort\" \"" + jobOptions["step_3_filelist"] + "\" bundle/bundle.out pmvs") + + i = 0 + for fileObject in objects: + if fileObject["isOk"]: + if os.path.isfile("pmvs/" + fileObject["base"] + ".rd.jpg"): + nr = "{0:08d}".format(i) + i += 1 + + run("mv pmvs/" + fileObject["base"] + ".rd.jpg pmvs/visualize/" + str(nr) + ".jpg") + run("mv pmvs/" + str(nr) + ".txt pmvs/txt/" + str(nr) + ".txt") + + run("\"" + BIN_PATH + "/Bundle2Vis\" pmvs/bundle.rd.out pmvs/vis.dat") + + if args['--end-with'] != "bundler": + cmvs() + +def cmvs(): + print "\n - running cmvs - " + now() + + os.chdir(jobOptions["jobDir"]) + + run("\"" + BIN_PATH + "/cmvs\" pmvs/ " + str(args['--cmvs-maxImages']) + " " + str(CORES)) + run("\"" + BIN_PATH + "/genOption\" pmvs/ " + str(args['--pmvs-level']) + " " + str(args['--pmvs-csize']) + " " + str(args['--pmvs-threshold']) + " " + str(args['--pmvs-wsize']) + " " + str(args['--pmvs-minImageNum']) + " " + str(CORES)) + + if args['--end-with'] != "cmvs": + pmvs() + +def pmvs(): + print "\n - running pmvs - " + now() + + os.chdir(jobOptions["jobDir"]) + + run("\"" + BIN_PATH + "/pmvs2\" pmvs/ option-0000") + + run("cp -Rf \"" + jobOptions["jobDir"] + "/pmvs/models\" \"" + jobOptions["jobDir"] + "-results\"") + + if args['--end-with'] != "pmvs": + odm_meshing() + +def odm_meshing(): + print "\n - running meshing - " + now() + + os.chdir(jobOptions["jobDir"]) + try: + os.mkdir(jobOptions["jobDir"] + "/odm_meshing") + except: + pass + + run("\"" + BIN_PATH + "/odm_meshing\" -inputFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputFile " + jobOptions["jobDir"] + "-results/odm_mesh-0000.ply -logFile " + jobOptions["jobDir"] + "/odm_meshing/odm_meshing_log.txt -maxVertexCount " + str(args['--odm_meshing-maxVertexCount']) + " -octreeDepth " + str(args['--odm_meshing-octreeDepth']) + " -samplesPerNode " + str(args['--odm_meshing-samplesPerNode']) + " -solverDivide " + str(args['--odm_meshing-solverDivide'])) + + if args['--end-with'] != "odm_meshing": + odm_texturing() + +def odm_texturing(): + print "\n - running texturing - " + now() + + os.chdir(jobOptions["jobDir"]) + try: + os.mkdir(jobOptions["jobDir"] + "/odm_texturing") + os.mkdir(jobOptions["jobDir"] + "-results/odm_texturing") + except: + pass + + run("\"" + BIN_PATH + "/odm_texturing\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -imagesPath "+ jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -inputModelPath " + jobOptions["jobDir"] + "-results/odm_mesh-0000.ply -outputFolder " + jobOptions["jobDir"] + "-results/odm_texturing/ -textureResolution " + str(args['--odm_texturing-textureResolution']) + " -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -textureWithSize " + str(args['--odm_texturing-textureWithSize']) + " -logFile " + jobOptions["jobDir"] + "/odm_texturing/odm_texturing_log.txt") + + if args['--end-with'] != "odm_texturing": + odm_georeferencing() + +def odm_georeferencing(): + print "\n - running georeferencing - " + now() + + os.chdir(jobOptions["jobDir"]) + try: + os.mkdir(jobOptions["jobDir"] + "/odm_georeferencing") + except: + pass + + if args['--odm_georeferencing-useGcp'] == False: + run("\"" + BIN_PATH + "/odm_extract_utm\" -imagesPath " + jobOptions["srcDir"] + "/ -imageListFile " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -outputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") + run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -inputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -inputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] + "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo_georef_system.txt") + elif os.path.isfile(jobOptions["srcDir"] + "/" + args['--odm_georeferencing-gcpFile']): + run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -gcpFile " + jobOptions["srcDir"] + "/" + args['--odm_georeferencing-gcpFile'] + " -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -outputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] + "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo_georef_system.txt") + else: + print "Warning: No GCP file. Consider rerunning with argument --odm_georeferencing-useGcp false --start-with odm_georeferencing" + print "Skipping orthophoto" + args['--end-with'] = "odm_georeferencing" + + if not "csString" in jobOptions: + parseCoordinateSystem() + + if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions: + images = [] + with open(jobOptions["jobDir"] + "/pmvs/list.rd.txt") as f: + images = f.readlines() + + if len(images) > 0: + with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f: + for lineNumber, line in enumerate(f): + if lineNumber >= 2 and lineNumber - 2 < len(images): + tokens = line.split(' ') + if len(tokens) >= 3: + x = float(tokens[0]) + y = float(tokens[1]) + z = float(tokens[2]) + filename = images[lineNumber - 2] + run("echo " + str(x + jobOptions["utmEastOffset"]) + " " + str(y + jobOptions["utmNorthOffset"]) + " " + str(z) + " | cs2cs " + jobOptions["csString"] + " +to +datum=WGS84 +proj=latlong > " + jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") + with open(jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") as latlongFile: + latlongLine = latlongFile.readline() + tokens = latlongLine.split() + if len(tokens) >= 2: + exifGpsInfoWritten = False + + lonString = tokens[0] # Example: 83d18'16.285"W + latString = tokens[1] # Example: 41d2'11.789"N + altString = "" + if len(tokens) > 2: + altString = tokens[2] # Example: 0.998 + + tokens = re.split("[d '\"]+", lonString) + if len(tokens) >= 4: + lonDeg = tokens[0] + lonMin = tokens[1] + lonSec = tokens[2] + lonSecFrac = fractions.Fraction(lonSec) + lonSecNumerator = str(lonSecFrac._numerator) + lonSecDenominator = str(lonSecFrac._denominator) + lonRef = tokens[3] + + tokens = re.split("[d '\"]+", latString) + if len(tokens) >= 4: + latDeg = tokens[0] + latMin = tokens[1] + latSec = tokens[2] + latSecFrac = fractions.Fraction(latSec) + latSecNumerator = str(latSecFrac._numerator) + latSecDenominator = str(latSecFrac._denominator) + latRef = tokens[3] + + exivCmd = "exiv2 -q" + exivCmd += " -M\"set Exif.GPSInfo.GPSLatitude " + latDeg + "/1 " + latMin + "/1 " + latSecNumerator + "/" + latSecDenominator + "\"" + exivCmd += " -M\"set Exif.GPSInfo.GPSLatitudeRef " + latRef + "\"" + exivCmd += " -M\"set Exif.GPSInfo.GPSLongitude " + lonDeg + "/1 " + lonMin + "/1 " + lonSecNumerator + "/" + lonSecDenominator + "\"" + exivCmd += " -M\"set Exif.GPSInfo.GPSLongitudeRef " + lonRef + "\"" + + altNumerator = arcDenominator = 0 + if altString: + altFrac = fractions.Fraction(altString) + altNumerator = str(altFrac._numerator) + altDenominator = str(altFrac._denominator) + exivCmd += " -M\"set Exif.GPSInfo.GPSAltitude " + altNumerator + "/" + altDenominator + "\"" + exivCmd += " -M\"set Exif.GPSInfo.GPSAltitudeRef 0\"" + + exivCmd += " " + filename + run(exivCmd) + exifGpsInfoWritten = True + + if not exifGpsInfoWritten: + print(" Warning: Failed setting EXIF GPS info for " + filename + " based on " + latlongLine) + + if "epsg" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions: + lasCmd = "\"" + BIN_PATH + "/txt2las\" -i " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -o " + jobOptions["jobDir"] + "-results/pointcloud_georef.laz -skip 30 -parse xyzRGBssss -set_scale 0.01 0.01 0.01 -set_offset " + str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -translate_xyz " + str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -epsg " + str(jobOptions["epsg"]) + print(" Creating geo-referenced LAS file (expecting warning)...") + run(lasCmd) + + if args['--end-with'] != "odm_georeferencing": + odm_orthophoto() + + +def odm_orthophoto(): + print "\n - running orthophoto generation - " + now() + + os.chdir(jobOptions["jobDir"]) + try: + os.mkdir(jobOptions["jobDir"] + "/odm_orthophoto") + except: + pass + + run("\"" + BIN_PATH + "/odm_orthophoto\" -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -logFile " + jobOptions["jobDir"] + "/odm_orthophoto/odm_orthophoto_log.txt -outputFile " + jobOptions["jobDir"] + "-results/odm_orthphoto.png -resolution 20.0 -outputCornerFile " + jobOptions["jobDir"] + "/odm_orthphoto_corners.txt") + + if not "csString" in jobOptions: + parseCoordinateSystem() + + geoTiffCreated = False + if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions: + ulx = uly = lrx = lry = 0.0 + with open(jobOptions["jobDir"] + "/odm_orthphoto_corners.txt") as f: + for lineNumber, line in enumerate(f): + if lineNumber == 0: + tokens = line.split(' ') + if len(tokens) == 4: + ulx = float(tokens[0]) + float(jobOptions["utmEastOffset"]) + lry = float(tokens[1]) + float(jobOptions["utmNorthOffset"]) + lrx = float(tokens[2]) + float(jobOptions["utmEastOffset"]) + uly = float(tokens[3]) + float(jobOptions["utmNorthOffset"]) + + print(" Creating GeoTIFF...") + sys.stdout.write(" ") + run("gdal_translate -a_ullr " + str(ulx) + " " + str(uly) + " " + str(lrx) + " " + str(lry) + " -a_srs \"" + jobOptions["csString"] + "\" " + jobOptions["jobDir"] + "-results/odm_orthphoto.png " + jobOptions["jobDir"] + "-results/odm_orthphoto.tif") + geoTiffCreated = True + + if not geoTiffCreated: + print " Warning: No geo-referenced orthophoto created due to missing geo-referencing or corner coordinates." + +parseArgs() +prepareObjects() + +os.chdir(jobOptions["jobDir"]) + +if args['--start-with'] == "resize": + resize() +elif args['--start-with'] == "getKeypoints": + getKeypoints() +elif args['--start-with'] == "match": + match() +elif args['--start-with'] == "bundler": + bundler() +elif args['--start-with'] == "cmvs": + cmvs() +elif args['--start-with'] == "pmvs": + pmvs() +elif args['--start-with'] == "odm_meshing": + odm_meshing() +elif args['--start-with'] == "odm_texturing": + odm_texturing() +elif args['--start-with'] == "odm_georeferencing": + odm_georeferencing() +elif args['--start-with'] == "odm_orthophoto": + odm_orthophoto() + +if args['--zip-results'] == True: + print "\nCompressing results - " + now() + run("cd " + jobOptions["jobDir"] + "-results/ && tar -czf " + jobOptions["jobDir"] + "-results.tar.gz *") + +print "\n - done - " + now()