kopia lustrzana https://github.com/OpenDroneMap/ODM
Merge remote-tracking branch 'upstream/python-port' into opensfm
Conflicts: .gitignore install.shpull/167/head
commit
cfa4259995
|
@ -9,5 +9,8 @@ odm_texturing-build/
|
|||
*.user
|
||||
cmvs.tar.gz
|
||||
parallel.tar.bz2
|
||||
LAStools.zip
|
||||
pcl.tar.gz
|
||||
ceres-solver.tar.gz
|
||||
*.pyc
|
||||
pcl.tar.gz
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
[submodule "src/bundler"]
|
||||
path = src/bundler
|
||||
url = https://github.com/chris-cooper/bundler_sfm
|
||||
[submodule "src/ceres-solver"]
|
||||
path = src/ceres-solver
|
||||
url = https://ceres-solver.googlesource.com/ceres-solver
|
BIN
bundler.zip
BIN
bundler.zip
Plik binarny nie jest wyświetlany.
|
@ -262,6 +262,7 @@
|
|||
"PENTAX Corporation PENTAX Optio S5z": 5.76,
|
||||
"PENTAX Corporation PENTAX Optio SV": 5.76,
|
||||
"PENTAX Corporation PENTAX Optio WP": 5.75,
|
||||
"PHANTOM VISION FC200": 6.17,
|
||||
"RICOH CaplioG3 modelM": 5.27,
|
||||
"RICOH Caplio GX": 7.176,
|
||||
"RICOH Caplio R30": 5.75,
|
||||
|
|
57
install.sh
57
install.sh
|
@ -38,10 +38,11 @@ echo " - script started - `date`"
|
|||
VLFEAT_PATH="$TOOLS_SRC_PATH/vlfeat"
|
||||
PARALLEL_PATH="$TOOLS_SRC_PATH/parallel"
|
||||
PSR_PATH="$TOOLS_SRC_PATH/PoissonRecon"
|
||||
|
||||
GRACLUS_PATH="$TOOLS_SRC_PATH/graclus"
|
||||
GRACLUS_PATH="$TOOLS_SRC_PATH/graclus"
|
||||
CERES_PATH="$TOOLS_SRC_PATH/ceres-solver"
|
||||
|
||||
PCL_PATH="$TOOLS_SRC_PATH/pcl"
|
||||
LASTOOLS_PATH="$TOOLS_SRC_PATH/lastools"
|
||||
ODM_MESHING_PATH="$TOOLS_SRC_PATH/odm_meshing"
|
||||
ODM_TEXTURING_PATH="$TOOLS_SRC_PATH/odm_texturing"
|
||||
ODM_ORTHOPHOTO_PATH="$TOOLS_SRC_PATH/odm_orthophoto"
|
||||
|
@ -107,6 +108,10 @@ uname -a > "$TOOLS_LOG_PATH/sysinfo.txt"
|
|||
echo
|
||||
echo " > installing required packages"
|
||||
|
||||
echo " - installing git submodules"
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
echo " - updating"
|
||||
sudo apt-get update --assume-yes > "$TOOLS_LOG_PATH/apt-get_get.log" 2>&1
|
||||
|
||||
|
@ -124,6 +129,9 @@ sudo apt-get install --assume-yes --install-recommends \
|
|||
libzip-dev \
|
||||
libswitch-perl libjson-perl \
|
||||
libcv-dev libcvaux-dev libopencv-dev \
|
||||
gdal-bin \
|
||||
exiv2 \
|
||||
libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev \
|
||||
> "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1
|
||||
else
|
||||
sudo apt-get install --assume-yes --install-recommends \
|
||||
|
@ -140,6 +148,8 @@ sudo apt-get install --assume-yes --install-recommends \
|
|||
libgoogle-glog-dev libatlas-base-dev libeigen3-dev libsuitesparse-dev \
|
||||
libboost-python-dev \
|
||||
python-numpy-dev python-pyexiv2 \
|
||||
gdal-bin \
|
||||
exiv2 \
|
||||
> "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1
|
||||
fi
|
||||
|
||||
|
@ -168,13 +178,13 @@ do
|
|||
done <<EOF
|
||||
parallel.tar.bz2 http://ftp.gnu.org/gnu/parallel/parallel-20141022.tar.bz2
|
||||
clapack.tgz http://www.netlib.org/clapack/clapack-3.2.1-CMAKE.tgz
|
||||
bundler.zip http://phototour.cs.washington.edu/bundler/distr/bundler-v0.4-source.zip
|
||||
PoissonRecon.zip http://www.cs.jhu.edu/~misha/Code/PoissonRecon/Version2/PoissonRecon.zip
|
||||
vlfeat.tar.gz http://www.vlfeat.org/download/vlfeat-0.9.13-bin.tar.gz
|
||||
cmvs.tar.gz http://www.di.ens.fr/cmvs/cmvs-fix2.tar.gz
|
||||
graclus.tar.gz http://smathermather.github.io/BundlerTools/patched_files/src/graclus/graclus1.2.tar.gz
|
||||
pcl.tar.gz https://github.com/PointCloudLibrary/pcl/archive/pcl-1.7.2.tar.gz
|
||||
ceres-solver.tar.gz http://ceres-solver.org/ceres-solver-1.10.0.tar.gz
|
||||
LAStools.zip http://lastools.org/download/LAStools.zip
|
||||
EOF
|
||||
git clone https://github.com/mapillary/OpenSfM.git $OPENSFM_PATH
|
||||
|
||||
|
@ -199,12 +209,13 @@ wait
|
|||
mv -f graclus1.2 "$GRACLUS_PATH"
|
||||
mv -f clapack-3.2.1-CMAKE "$CLAPACK_PATH"
|
||||
mv -f vlfeat-0.9.13 "$VLFEAT_PATH"
|
||||
mv -f bundler-v0.4-source "$BUNDLER_PATH"
|
||||
mv -f parallel-20141022 "$PARALLEL_PATH"
|
||||
mv -f PoissonRecon "$PSR_PATH"
|
||||
mv -f cmvs "$CMVS_PATH"
|
||||
mv -f pcl-pcl-1.7.2 "$PCL_PATH"
|
||||
mv -f ceres-solver-1.10.0 "$CERES_PATH"
|
||||
mv -f LAStools "$LASTOOLS_PATH"
|
||||
|
||||
|
||||
echo " < done - `date`"
|
||||
|
||||
|
@ -318,6 +329,23 @@ echo " > vlfeat"
|
|||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > LAStools"
|
||||
cd "$LASTOOLS_PATH"
|
||||
|
||||
echo " - installing LAStools"
|
||||
|
||||
make > "$TOOLS_LOG_PATH/lastools_1_build.log" 2>&1
|
||||
|
||||
if [ "$ARCH" = "i686" ]; then
|
||||
cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las"
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
cp -f "$LASTOOLS_PATH/bin/txt2las" "$TOOLS_BIN_PATH/txt2las"
|
||||
fi
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
||||
echo " > cmvs/pmvs"
|
||||
cd "$CMVS_PATH/program/main"
|
||||
|
@ -356,21 +384,34 @@ echo " > cmvs/pmvs"
|
|||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > ceres"
|
||||
cd "$CERES_PATH"
|
||||
|
||||
echo " - configuring ceres"
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=$TOOLS_PATH \
|
||||
-DCMAKE_C_FLAGS=-fPIC -DCMAKE_CXX_FLAGS=-fPIC \
|
||||
-DBUILD_EXAMPLES=OFF -DBUILD_TESTING=OFF > "$TOOLS_LOG_PATH/ceres_1_config.log" 2>&1
|
||||
|
||||
echo " - building ceres"
|
||||
make install > "$TOOLS_LOG_PATH/ceres_1_build.log" 2>&1
|
||||
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
echo " > bundler"
|
||||
cd "$BUNDLER_PATH"
|
||||
|
||||
sed -i "$BUNDLER_PATH/src/BundlerApp.h" -e "620c\ BundlerApp();"
|
||||
|
||||
echo " - cleaning bundler"
|
||||
make clean > "$TOOLS_LOG_PATH/bundler_1_clean.log" 2>&1
|
||||
|
||||
echo " - building bundler"
|
||||
make -j > "$TOOLS_LOG_PATH/bundler_2_build.log" 2>&1
|
||||
|
||||
cp -f "$BUNDLER_PATH/bin/Bundle2PMVS" "$BUNDLER_PATH/bin/Bundle2Vis" "$BUNDLER_PATH/bin/KeyMatchFull" "$BUNDLER_PATH/bin/KeyMatch" "$BUNDLER_PATH/bin/bundler" "$BUNDLER_PATH/bin/RadialUndistort" "$TOOLS_BIN_PATH/"
|
||||
ln -s "$BUNDLER_PATH/bin/Bundle2PMVS" "$BUNDLER_PATH/bin/Bundle2Vis" "$BUNDLER_PATH/bin/KeyMatchFull" "$BUNDLER_PATH/bin/KeyMatch" "$BUNDLER_PATH/bin/bundler" "$BUNDLER_PATH/bin/RadialUndistort" "$TOOLS_BIN_PATH/"
|
||||
|
||||
cp -f "$BUNDLER_PATH/lib/libANN_char.so" "$TOOLS_LIB_PATH/"
|
||||
ln -s "$BUNDLER_PATH/lib/libANN_char.so" "$TOOLS_LIB_PATH/"
|
||||
echo " < done - `date`"
|
||||
echo
|
||||
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
import os
|
||||
import math
|
||||
import subprocess
|
||||
|
||||
# Purpose:
|
||||
# Preselect camera pairs for meature matching based by using GPS exif data.
|
||||
|
||||
def preselect_pairs(utm_extractor_path, image_list, k_distance, use_knn_mode = True, verbose = False):
|
||||
|
||||
# Parameterss:
|
||||
# utm_extractor_path: Path to the odm utm_extraction program that parses exif data.
|
||||
# images_path: Path to the folder containing the images.
|
||||
# k_distance: Nearest neighbor count in case of use_knn_mode = True, otherwise
|
||||
# the minimum distance between cameras to be matched.
|
||||
# use_knn_mode: True if knn mode is used to preselect matches, False for distance thresholding.
|
||||
|
||||
# Temporary files
|
||||
image_list_file = 'image_list_tmp.txt'
|
||||
utm_coord_file = 'utm_coord_tmp.txt'
|
||||
|
||||
# Parse the list of files with successfullt detectd points.
|
||||
image_folder = ""
|
||||
filenames = []
|
||||
with open(image_list, 'r') as keypoint_file:
|
||||
first_line = keypoint_file.readline()
|
||||
image_folder = first_line[:first_line.rfind("/")]
|
||||
keypoint_file.seek(0)
|
||||
for line in keypoint_file:
|
||||
filename = line[line.rfind("/")+1:line.rfind(".")]
|
||||
filename += ".jpg"
|
||||
filenames.append(filename)
|
||||
|
||||
data = open(image_list_file, "w")
|
||||
for filename in filenames:
|
||||
data.write("%s\n" % filename)
|
||||
data.close()
|
||||
|
||||
|
||||
# Define call parameters for calling the odm_extract_utm module
|
||||
utm_cmd = ''
|
||||
if verbose:
|
||||
utm_cmd = [utm_extractor_path, '-verbose', '-imagesPath', image_folder, \
|
||||
'-imageListFile', image_list_file, '-outputCoordFile', utm_coord_file]
|
||||
else:
|
||||
utm_cmd = [utm_extractor_path, '-imagesPath', image_folder, '-imageListFile', \
|
||||
image_list_file, '-outputCoordFile', utm_coord_file]
|
||||
|
||||
# Perform UTM extraction
|
||||
utm_process = subprocess.Popen(utm_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
output, err = utm_process.communicate()
|
||||
|
||||
# Check return code
|
||||
if utm_process.returncode != 0:
|
||||
print "Error when performing utm_extraction, dumping log file:"
|
||||
print output
|
||||
os.remove(image_list_file)
|
||||
return []
|
||||
|
||||
# Parse odm_extract_utm output
|
||||
coords = []
|
||||
with open('utm_coord_tmp.txt', 'r') as coord_file:
|
||||
# Skip header lines
|
||||
next(coord_file)
|
||||
next(coord_file)
|
||||
for line in coord_file:
|
||||
first_space = line.find(' ')
|
||||
second_space = first_space + line[first_space+1:].find(' ')
|
||||
xCoord = float(line[0: first_space])
|
||||
yCoord = float(line[first_space+1: second_space+1])
|
||||
coords.append([xCoord, yCoord])
|
||||
|
||||
# Calculate distances between camera pairs
|
||||
distances = []
|
||||
for xo,yo in coords:
|
||||
distances.append([])
|
||||
for xi, yi in coords:
|
||||
current_image = len(distances)
|
||||
xDist = xo-xi
|
||||
yDist = yo-yi
|
||||
distance = math.sqrt(xDist*xDist + yDist*yDist)
|
||||
current_image = len(distances[-1])
|
||||
distances[-1].append([current_image, distance, False])
|
||||
distances[-1].sort(key=lambda tup: tup[1])
|
||||
|
||||
# Select matched pairs and make there are no doubles
|
||||
matched_pairs = []
|
||||
if use_knn_mode:
|
||||
# Make sure that image count > k
|
||||
if len(coords) <= k_distance:
|
||||
print "Warning, k >= image count, the result will be equivalent to exhaustive matching"
|
||||
k_distance = len(coords)-1
|
||||
for outer_index, sorted_list in enumerate(distances):
|
||||
# Skip first element as distance will always be zero
|
||||
for sorted_index in xrange(1, k_distance+1):
|
||||
image_index, distance, dummy = distances[outer_index][sorted_index]
|
||||
is_added = distances[outer_index][image_index][2]
|
||||
if not is_added:
|
||||
matched_pairs.append([outer_index, image_index])
|
||||
distances[outer_index][image_index][2] = True
|
||||
distances[image_index][outer_index][2] = True
|
||||
else: # Distance mode
|
||||
for outer_index, sorted_list in enumerate(distances):
|
||||
# Skip first element as distance will always be zero
|
||||
for image_index, distance, dummy in sorted_list:
|
||||
if outer_index == image_index:
|
||||
continue
|
||||
if distance > k_distance:
|
||||
break
|
||||
is_added = distances[outer_index][image_index][2]
|
||||
if not is_added:
|
||||
matched_pairs.append([outer_index, image_index])
|
||||
distances[outer_index][image_index][2] = True
|
||||
distances[image_index][outer_index][2] = True
|
||||
|
||||
# Remove temporary files
|
||||
os.remove(image_list_file)
|
||||
os.remove(utm_coord_file)
|
||||
|
||||
return matched_pairs
|
|
@ -5,5 +5,5 @@
|
|||
int main (int argc, char **argv)
|
||||
{
|
||||
UtmExtractor utmExtractor;
|
||||
utmExtractor.run(argc, argv);
|
||||
return utmExtractor.run(argc, argv);
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ class OnMat3
|
|||
public:
|
||||
OnMat3(Vec3 r1, Vec3 r2, Vec3 r3);
|
||||
OnMat3(const OnMat3 &o);
|
||||
|
||||
|
||||
Vec3 r1_; /**< The first row of the matrix. **/
|
||||
Vec3 r2_; /**< The second row of the matrix. **/
|
||||
Vec3 r3_; /**< The third row of the matrix. **/
|
||||
|
|
|
@ -218,11 +218,15 @@ std::ostream& operator<<(std::ostream &os, const GeorefCamera &cam)
|
|||
|
||||
Georef::Georef() : log_(false)
|
||||
{
|
||||
georeferencePointCloud_ = false;
|
||||
useGCP_ = false;
|
||||
bundleFilename_ = "";
|
||||
coordFilename_ = "";
|
||||
inputCoordFilename_ = "";
|
||||
outputCoordFilename_ = "";
|
||||
inputObjFilename_ = "";
|
||||
outputObjFilename_ = "";
|
||||
exportCoordinateFile_ = false;
|
||||
exportGeorefSystem_ = false;
|
||||
}
|
||||
|
||||
Georef::~Georef()
|
||||
|
@ -267,10 +271,13 @@ int Georef::run(int argc, char *argv[])
|
|||
void Georef::parseArguments(int argc, char *argv[])
|
||||
{
|
||||
bool outputSpecified = false;
|
||||
bool outputPointCloudSpecified = false;
|
||||
bool imageListSpecified = false;
|
||||
bool gcpFileSpecified = false;
|
||||
bool imageLocation = false;
|
||||
bool bundleResized = false;
|
||||
bool outputCoordSpecified = false;
|
||||
bool inputCoordSpecified = false;
|
||||
|
||||
logFile_ = std::string(argv[0]) + "_log.txt";
|
||||
log_ << logFile_ << "\n";
|
||||
|
@ -326,15 +333,28 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
bundleFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Reading cameras from: " << bundleFilename_ << "\n";
|
||||
}
|
||||
else if(argument == "-coordFile" && argIndex < argc)
|
||||
else if(argument == "-inputCoordFile" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
coordFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Reading cameras georeferenced positions from: " << coordFilename_ << "\n";
|
||||
inputCoordFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Reading cameras gps exif positions from: " << inputCoordFilename_ << "\n";
|
||||
inputCoordSpecified = true;
|
||||
}
|
||||
else if(argument == "-outputCoordFile" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
outputCoordFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Exporting cameras georeferenced gps positions to: " << outputCoordFilename_ << "\n";
|
||||
exportCoordinateFile_ = true;
|
||||
outputCoordSpecified = true;
|
||||
}
|
||||
else if(argument == "-inputFile" && argIndex < argc)
|
||||
{
|
||||
|
@ -346,6 +366,17 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
inputObjFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Reading textured mesh from: " << inputObjFilename_ << "\n";
|
||||
}
|
||||
else if(argument == "-inputPointCloudFile" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
inputPointCloudFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Reading point cloud from: " << inputPointCloudFilename_ << "\n";
|
||||
georeferencePointCloud_ = true;
|
||||
}
|
||||
else if(argument == "-gcpFile" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
|
@ -379,6 +410,17 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
log_ << "Images location is set to: " << imagesLocation_ << "\n";
|
||||
imageLocation = true;
|
||||
}
|
||||
else if(argument == "-georefFileOutputPath" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
georefFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Georef file output path is set to: " << georefFilename_ << "\n";
|
||||
exportGeorefSystem_ = true;
|
||||
}
|
||||
else if(argument == "-bundleResizedTo" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
|
@ -406,6 +448,17 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
log_ << "Writing output to: " << outputObjFilename_ << "\n";
|
||||
outputSpecified = true;
|
||||
}
|
||||
else if(argument == "-outputPointCloudFile" && argIndex < argc)
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
outputPointCloudFilename_ = std::string(argv[argIndex]);
|
||||
log_ << "Writing output to: " << outputPointCloudFilename_ << "\n";
|
||||
outputPointCloudSpecified = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
printHelp();
|
||||
|
@ -413,6 +466,11 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
}
|
||||
}
|
||||
|
||||
if (inputCoordSpecified && outputCoordSpecified)
|
||||
{
|
||||
throw GeorefException("Both output and input coordfile specified, only one of those are accepted.");
|
||||
}
|
||||
|
||||
if (imageListSpecified && gcpFileSpecified && imageLocation && bundleResized)
|
||||
{
|
||||
useGCP_ = true;
|
||||
|
@ -423,6 +481,11 @@ void Georef::parseArguments(int argc, char *argv[])
|
|||
log_ << "Missing input in order to use GCP for georeferencing. Using EXIF data instead.\n";
|
||||
}
|
||||
|
||||
if(georeferencePointCloud_ && !outputPointCloudSpecified)
|
||||
{
|
||||
setDefaultPointCloudOutput();
|
||||
}
|
||||
|
||||
if(!outputSpecified)
|
||||
{
|
||||
setDefaultOutput();
|
||||
|
@ -455,12 +518,18 @@ void Georef::printHelp()
|
|||
log_ << "The file needs to be on the following line format:\n";
|
||||
log_ << "easting northing height pixelrow pixelcol imagename\n\n";
|
||||
|
||||
log_ << "\"-coordFile <path>\" (mandatory if using exif data)" << "\n";
|
||||
log_ << "\"-inputCoordFile <path>\" (mandatory if using exif data)" << "\n";
|
||||
log_ << "\"Input cameras geroreferenced coords file.\n\n";
|
||||
|
||||
log_ << "\"-outputCoordFile <path>\" (optional)" << "\n";
|
||||
log_ << "\"Output cameras geroreferenced coords file.\n\n";
|
||||
|
||||
log_ << "\"-inputFile <path>\" (mandatory)" << "\n";
|
||||
log_ << "\"Input obj file that must contain a textured mesh.\n\n";
|
||||
|
||||
log_ << "\"-inputPointCloudFile <path>\" (optional)" << "\n";
|
||||
log_ << "\"Input ply file that must contain a point cloud.\n\n";
|
||||
|
||||
log_ << "\"-imagesListPath <path>\" (mandatory if using ground control points)\n";
|
||||
log_ << "Path to the list containing the image names used in the bundle.out file.\n\n";
|
||||
|
||||
|
@ -472,6 +541,9 @@ void Georef::printHelp()
|
|||
|
||||
log_ << "\"-outputFile <path>\" (optional, default <inputFile>_geo)" << "\n";
|
||||
log_ << "\"Output obj file that will contain the georeferenced texture mesh.\n\n";
|
||||
|
||||
log_ << "\"-outputPointCloudFile <path>\" (mandatory if georeferencing a point cloud)" << "\n";
|
||||
log_ << "\"Output ply file that will contain the georeferenced point cloud.\n\n";
|
||||
|
||||
log_.setIsPrintingInCout(printInCoutPop);
|
||||
}
|
||||
|
@ -480,7 +552,7 @@ void Georef::setDefaultOutput()
|
|||
{
|
||||
if(inputObjFilename_.empty())
|
||||
{
|
||||
throw GeorefException("Tried to generate default ouptut file without having an input file.");
|
||||
throw GeorefException("Tried to generate default output file without having an input file.");
|
||||
}
|
||||
|
||||
std::string tmp = inputObjFilename_;
|
||||
|
@ -497,6 +569,27 @@ void Georef::setDefaultOutput()
|
|||
log_ << "Writing output to: " << outputObjFilename_ << "\n";
|
||||
}
|
||||
|
||||
void Georef::setDefaultPointCloudOutput()
|
||||
{
|
||||
if(inputPointCloudFilename_.empty())
|
||||
{
|
||||
throw GeorefException("Tried to generate default point cloud ouptut file without having an input file.");
|
||||
}
|
||||
|
||||
std::string tmp = inputPointCloudFilename_;
|
||||
size_t findPos = tmp.find_last_of(".");
|
||||
|
||||
if(std::string::npos == findPos)
|
||||
{
|
||||
throw GeorefException("Tried to generate default ouptut file, could not find .ply in the input file:\n\'"+inputPointCloudFilename_+"\'");
|
||||
}
|
||||
|
||||
tmp = tmp.substr(0, findPos);
|
||||
|
||||
outputPointCloudFilename_ = tmp + "_geo.ply";
|
||||
log_ << "Writing output to: " << outputPointCloudFilename_ << "\n";
|
||||
}
|
||||
|
||||
void Georef::createGeoreferencedModel()
|
||||
{
|
||||
if (useGCP_)
|
||||
|
@ -535,7 +628,7 @@ void Georef::readGCPs()
|
|||
std::ifstream imageListStream(imagesListPath_.c_str());
|
||||
if (!imageListStream.good())
|
||||
{
|
||||
throw GeorefException("Failed opening " + imagesListPath_ + " for reading.\n");
|
||||
throw GeorefException("Failed opening image path " + imagesListPath_ + " for reading.\n");
|
||||
}
|
||||
|
||||
for (size_t i=0; i<cameras_.size(); ++i)
|
||||
|
@ -551,7 +644,7 @@ void Georef::readGCPs()
|
|||
std::ifstream gcpStream(gcpFilename_.c_str());
|
||||
if (!gcpStream.good())
|
||||
{
|
||||
throw GeorefException("Failed opening " + gcpFilename_ + " for reading.\n");
|
||||
throw GeorefException("Failed opening gcp file " + gcpFilename_ + " for reading.\n");
|
||||
}
|
||||
std::string gcpString;
|
||||
|
||||
|
@ -820,7 +913,6 @@ void Georef::performGeoreferencingWithGCP()
|
|||
// Update the mesh.
|
||||
pcl::toPCLPointCloud2 (*meshCloud, mesh.cloud);
|
||||
|
||||
|
||||
// Iterate over each part of the mesh (one per material), to make texture file paths relative the .mtl file.
|
||||
for(size_t t = 0; t < mesh.tex_materials.size(); ++t)
|
||||
{
|
||||
|
@ -844,7 +936,53 @@ void Georef::performGeoreferencingWithGCP()
|
|||
log_ << "Successfully saved model.\n";
|
||||
}
|
||||
|
||||
printGeorefSystem();
|
||||
if(georeferencePointCloud_)
|
||||
{
|
||||
//pcl::PointCloud2<pcl::PointNormal>::Ptr pointCloud;
|
||||
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloud(new pcl::PointCloud<pcl::PointXYZRGBNormal>());
|
||||
if(pcl::io::loadPLYFile<pcl::PointXYZRGBNormal> (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) {
|
||||
throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n";
|
||||
}
|
||||
log_ << '\n';
|
||||
log_ << "Applying transform to point cloud...\n";
|
||||
pcl::transformPointCloud(*pointCloud, *pointCloud, transform);
|
||||
log_ << ".. point cloud transformed.\n";
|
||||
|
||||
pcl::PLYWriter plyWriter;
|
||||
|
||||
log_ << '\n';
|
||||
log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n";
|
||||
//pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get());
|
||||
plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false);
|
||||
log_ << ".. point cloud file saved.\n";
|
||||
}
|
||||
|
||||
if(exportCoordinateFile_)
|
||||
{
|
||||
log_ << '\n';
|
||||
log_ << "Saving georeferenced camera positions to ";
|
||||
log_ << outputCoordFilename_;
|
||||
log_<< "\n";
|
||||
std::ofstream coordStream(outputCoordFilename_.c_str());
|
||||
coordStream << georefSystem_.system_ <<std::endl;
|
||||
coordStream << static_cast<int>(georefSystem_.eastingOffset_) << " " << static_cast<int>(georefSystem_.northingOffset_) << std::endl;
|
||||
for(size_t cameraIndex = 0; cameraIndex < cameras_.size(); ++cameraIndex)
|
||||
{
|
||||
Vec3 globalCameraPosition = (transFinal.transform_)*(cameras_[cameraIndex].getPos());
|
||||
coordStream << globalCameraPosition.x_ << " " << globalCameraPosition.y_ << " " << globalCameraPosition.z_ << std::endl;
|
||||
}
|
||||
coordStream.close();
|
||||
log_ << "...coordinate file saved.\n";
|
||||
}
|
||||
|
||||
if(exportGeorefSystem_)
|
||||
{
|
||||
printGeorefSystem();
|
||||
}
|
||||
}
|
||||
|
||||
void Georef::createGeoreferencedModelFromGCPData()
|
||||
|
@ -864,10 +1002,10 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
readCameras();
|
||||
|
||||
// Read coords from coord file generated by extract_utm tool
|
||||
std::ifstream coordStream(coordFilename_.c_str());
|
||||
std::ifstream coordStream(inputCoordFilename_.c_str());
|
||||
if (!coordStream.good())
|
||||
{
|
||||
throw GeorefException("Failed opening " + coordFilename_ + " for reading." + '\n');
|
||||
throw GeorefException("Failed opening coordinate file " + inputCoordFilename_ + " for reading." + '\n');
|
||||
}
|
||||
|
||||
std::string coordString;
|
||||
|
@ -891,7 +1029,7 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
{
|
||||
if(nGeorefCameras >= cameras_.size())
|
||||
{
|
||||
throw GeorefException("Error, to many cameras in \'" + coordFilename_ + "\' coord file.\n");
|
||||
throw GeorefException("Error, to many cameras in \'" + inputCoordFilename_ + "\' coord file.\n");
|
||||
}
|
||||
|
||||
std::istringstream istr(coordString);
|
||||
|
@ -903,7 +1041,7 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
|
||||
if(nGeorefCameras < cameras_.size())
|
||||
{
|
||||
throw GeorefException("Not enough cameras in \'" + coordFilename_ + "\' coord file.\n");
|
||||
throw GeorefException("Not enough cameras in \'" + inputCoordFilename_ + "\' coord file.\n");
|
||||
}
|
||||
|
||||
// The optimal camera triplet.
|
||||
|
@ -938,7 +1076,6 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
|
||||
log_ << '\n';
|
||||
log_ << "Reading mesh file...\n";
|
||||
// The textureds mesh.e
|
||||
pcl::TextureMesh mesh;
|
||||
pcl::io::loadOBJFile(inputObjFilename_, mesh);
|
||||
log_ << ".. mesh file read.\n";
|
||||
|
@ -956,7 +1093,6 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
// Update the mesh.
|
||||
pcl::toPCLPointCloud2 (*meshCloud, mesh.cloud);
|
||||
|
||||
|
||||
// Iterate over each part of the mesh (one per material), to make texture file paths relative the .mtl file.
|
||||
for(size_t t = 0; t < mesh.tex_materials.size(); ++t)
|
||||
{
|
||||
|
@ -975,7 +1111,35 @@ void Georef::createGeoreferencedModelFromExifData()
|
|||
saveOBJFile(outputObjFilename_, mesh, 8);
|
||||
log_ << ".. mesh file saved.\n";
|
||||
|
||||
printGeorefSystem();
|
||||
if(georeferencePointCloud_)
|
||||
{
|
||||
//pcl::PointCloud2<pcl::PointNormal>::Ptr pointCloud;
|
||||
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pointCloud(new pcl::PointCloud<pcl::PointXYZRGBNormal>());
|
||||
if(pcl::io::loadPLYFile<pcl::PointXYZRGBNormal> (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) {
|
||||
throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n";
|
||||
}
|
||||
log_ << '\n';
|
||||
log_ << "Applying transform to point cloud...\n";
|
||||
pcl::transformPointCloud(*pointCloud, *pointCloud, transform);
|
||||
log_ << ".. point cloud transformed.\n";
|
||||
|
||||
pcl::PLYWriter plyWriter;
|
||||
|
||||
log_ << '\n';
|
||||
log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n";
|
||||
//pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get());
|
||||
plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false);
|
||||
log_ << ".. point cloud file saved.\n";
|
||||
}
|
||||
|
||||
if(exportGeorefSystem_)
|
||||
{
|
||||
printGeorefSystem();
|
||||
}
|
||||
}
|
||||
|
||||
void Georef::chooseBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2)
|
||||
|
@ -1073,12 +1237,12 @@ void Georef::printGeorefSystem()
|
|||
throw GeorefException("Tried to generate default ouptut file, could not find .obj in the output file:\n\'"+outputObjFilename_+"\'");
|
||||
}
|
||||
|
||||
tmp = tmp.substr(0, findPos);
|
||||
//tmp = tmp.substr(0, findPos);
|
||||
|
||||
tmp = tmp + "_georef_system.txt";
|
||||
//tmp = tmp + "_georef_system.txt";
|
||||
log_ << '\n';
|
||||
log_ << "Saving georeference system file to \'" << tmp << "\'...\n";
|
||||
std::ofstream geoStream(tmp.c_str());
|
||||
log_ << "Saving georeference system file to \'" << georefFilename_ << "\'...\n";
|
||||
std::ofstream geoStream(georefFilename_.c_str());
|
||||
geoStream << georefSystem_ << std::endl;
|
||||
geoStream.close();
|
||||
log_ << "... georeference system saved.\n";
|
||||
|
|
|
@ -144,6 +144,11 @@ private:
|
|||
*/
|
||||
void setDefaultOutput();
|
||||
|
||||
/*!
|
||||
* \brief setDefaultPointCloudOutput Setup the output file name given the input file name.
|
||||
*/
|
||||
void setDefaultPointCloudOutput();
|
||||
|
||||
/*!
|
||||
* \brief createGeoreferencedModel Makes the input file georeferenced and saves it to the output file.
|
||||
*/
|
||||
|
@ -199,25 +204,32 @@ private:
|
|||
**/
|
||||
void printGeorefSystem();
|
||||
|
||||
Logger log_; /**< Logging object. */
|
||||
std::string logFile_; /**< The path to the output log file. */
|
||||
Logger log_; /**< Logging object. */
|
||||
std::string logFile_; /**< The path to the output log file. */
|
||||
|
||||
std::string bundleFilename_; /**< The path to the cameras bundle file. **/
|
||||
std::string coordFilename_; /**< The path to the cameras georeference file. **/
|
||||
std::string gcpFilename_; /**< The path to the GCP file **/
|
||||
std::string imagesListPath_; /**< Path to the image list. **/
|
||||
std::string imagesLocation_; /**< The folder containing the images in the image list. **/
|
||||
std::string inputObjFilename_; /**< The path to the input mesh obj file. **/
|
||||
std::string outputObjFilename_; /**< The path to the output mesh obj file. **/
|
||||
std::string bundleFilename_; /**< The path to the cameras bundle file. **/
|
||||
std::string inputCoordFilename_; /**< The path to the cameras exif gps positions file. **/
|
||||
std::string outputCoordFilename_; /**< The path to the cameras georeferenced gps positions file. **/
|
||||
std::string gcpFilename_; /**< The path to the GCP file **/
|
||||
std::string imagesListPath_; /**< Path to the image list. **/
|
||||
std::string imagesLocation_; /**< The folder containing the images in the image list. **/
|
||||
std::string inputObjFilename_; /**< The path to the input mesh obj file. **/
|
||||
std::string outputObjFilename_; /**< The path to the output mesh obj file. **/
|
||||
std::string inputPointCloudFilename_; /**< The path to the input point cloud file. **/
|
||||
std::string outputPointCloudFilename_; /**< The path to the output point cloud file. **/
|
||||
std::string georefFilename_; /**< The path to the output offset file. **/
|
||||
|
||||
bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/
|
||||
double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */
|
||||
bool georeferencePointCloud_;
|
||||
bool exportCoordinateFile_;
|
||||
bool exportGeorefSystem_;
|
||||
bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/
|
||||
double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */
|
||||
|
||||
std::vector<GeorefCamera> cameras_; /**< A vector of all cameras. **/
|
||||
std::vector<GeorefGCP> gcps_; /**< A vector of all GCPs. **/
|
||||
std::vector<std::string> imageList_; /**< A vector containing the names of the corresponding cameras. **/
|
||||
std::vector<GeorefCamera> cameras_; /**< A vector of all cameras. **/
|
||||
std::vector<GeorefGCP> gcps_; /**< A vector of all GCPs. **/
|
||||
std::vector<std::string> imageList_; /**< A vector containing the names of the corresponding cameras. **/
|
||||
|
||||
GeorefSystem georefSystem_; /**< Contains the georeference system. **/
|
||||
GeorefSystem georefSystem_; /**< Contains the georeference system. **/
|
||||
};
|
||||
|
||||
/*!
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// C++
|
||||
#include <math.h>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
|
||||
// This
|
||||
#include "OdmOrthoPhoto.hpp"
|
||||
|
@ -44,6 +45,7 @@ OdmOrthoPhoto::OdmOrthoPhoto()
|
|||
inputGeoRefFile_ = "";
|
||||
outputFile_ = "ortho.jpg";
|
||||
logFile_ = "log.txt";
|
||||
outputCornerFile_ = "";
|
||||
|
||||
resolution_ = 0.0f;
|
||||
|
||||
|
@ -229,6 +231,16 @@ void OdmOrthoPhoto::parseArguments(int argc, char *argv[])
|
|||
outputFile_ = std::string(argv[argIndex]);
|
||||
log_ << "Writing output to: " << outputFile_ << "\n";
|
||||
}
|
||||
else if(argument == "-outputCornerFile")
|
||||
{
|
||||
argIndex++;
|
||||
if (argIndex >= argc)
|
||||
{
|
||||
throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided.");
|
||||
}
|
||||
outputCornerFile_ = std::string(argv[argIndex]);
|
||||
log_ << "Writing corners to: " << outputCornerFile_ << "\n";
|
||||
}
|
||||
else
|
||||
{
|
||||
printHelp();
|
||||
|
@ -258,12 +270,15 @@ void OdmOrthoPhoto::printHelp()
|
|||
log_ << "\"-inputFile <path>\" (mandatory)\n";
|
||||
log_ << "\"Input obj file that must contain a textured mesh.\n\n";
|
||||
|
||||
log_ << "\"-inputGeoRefFile <path>\" (optional, if specified boundary points are assumed to be givan as world coordinates. If not specified, the boundary points are assumed to be local coordinates)\n";
|
||||
log_ << "\"-inputGeoRefFile <path>\" (optional, if specified boundary points are assumed to be given as world coordinates. If not specified, the boundary points are assumed to be local coordinates)\n";
|
||||
log_ << "\"Input geograpical reference system file that describes the world position of the model's origin.\n\n";
|
||||
|
||||
log_ << "\"-outputFile <path>\" (optional, default: ortho.jpg)\n";
|
||||
log_ << "\"Target file in which the orthophoto is saved.\n\n";
|
||||
|
||||
log_ << "\"-outputCornerFile <path>\" (optional)\n";
|
||||
log_ << "\"Target text file for boundary corner points, written as \"xmin ymin xmax ymax\".\n\n";
|
||||
|
||||
log_ << "\"-resolution <pixels/m>\" (mandatory)\n";
|
||||
log_ << "\"The number of pixels used per meter.\n\n";
|
||||
|
||||
|
@ -364,7 +379,7 @@ void OdmOrthoPhoto::createOrthoPhoto()
|
|||
}
|
||||
|
||||
// Init ortho photo
|
||||
photo_ = cv::Mat::zeros(rowRes, colRes, CV_8UC3) + cv::Scalar(255, 255, 255);
|
||||
photo_ = cv::Mat::zeros(rowRes, colRes, CV_8UC4) + cv::Scalar(255, 255, 255, 0);
|
||||
depth_ = cv::Mat::zeros(rowRes, colRes, CV_32F) - std::numeric_limits<float>::infinity();
|
||||
|
||||
// Contains the vertices of the mesh.
|
||||
|
@ -431,6 +446,21 @@ void OdmOrthoPhoto::createOrthoPhoto()
|
|||
log_ << '\n';
|
||||
log_ << "Writing ortho photo to " << outputFile_ << "\n";
|
||||
cv::imwrite(outputFile_, photo_);
|
||||
|
||||
if (!outputCornerFile_.empty())
|
||||
{
|
||||
log_ << "Writing corner coordinates to " << outputCornerFile_ << "\n";
|
||||
std::ofstream cornerStream(outputCornerFile_.c_str());
|
||||
if (!cornerStream.is_open())
|
||||
{
|
||||
throw OdmOrthoPhotoException("Failed opening output corner file " + outputCornerFile_ + ".");
|
||||
}
|
||||
cornerStream.setf(std::ios::scientific, std::ios::floatfield);
|
||||
cornerStream.precision(17);
|
||||
cornerStream << xMin << " " << yMin << " " << xMax << " " << yMax;
|
||||
cornerStream.close();
|
||||
}
|
||||
|
||||
log_ << "Orthophoto generation done.\n";
|
||||
}
|
||||
|
||||
|
@ -443,34 +473,34 @@ void OdmOrthoPhoto::adjustBoundsForGeoRef()
|
|||
|
||||
// The system name
|
||||
std::string system;
|
||||
// The false easting and northing
|
||||
int falseEasting, falseNorthing;
|
||||
// The east and north offsets
|
||||
int eastOffset, northOffset;
|
||||
|
||||
// Parse file
|
||||
std::getline(geoRefStream, system);
|
||||
if(!(geoRefStream >> falseEasting))
|
||||
if(!(geoRefStream >> eastOffset))
|
||||
{
|
||||
throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract false easting.");
|
||||
throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract east offset.");
|
||||
}
|
||||
if(!(geoRefStream >> falseNorthing))
|
||||
if(!(geoRefStream >> northOffset))
|
||||
{
|
||||
throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract false northing.");
|
||||
throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract north offset.");
|
||||
}
|
||||
|
||||
log_ << "Georeference system\n";
|
||||
log_ << "Georeference system:\n";
|
||||
log_ << system << "\n";
|
||||
log_ << "False easting " << falseEasting << "\n";
|
||||
log_ << "False northing " << falseNorthing << "\n";
|
||||
log_ << "East offset: " << eastOffset << "\n";
|
||||
log_ << "North offset: " << northOffset << "\n";
|
||||
|
||||
// Adjust boundary points.
|
||||
boundaryPoint1_[0] = static_cast<float>(worldPoint1_.eastInteger_ - falseEasting) + worldPoint1_.eastFractional_;
|
||||
boundaryPoint1_[1] = static_cast<float>(worldPoint1_.northInteger_ - falseNorthing) + worldPoint1_.northFractional_;
|
||||
boundaryPoint2_[0] = static_cast<float>(worldPoint2_.eastInteger_ - falseEasting) + worldPoint2_.eastFractional_;
|
||||
boundaryPoint2_[1] = static_cast<float>(worldPoint2_.northInteger_ - falseNorthing) + worldPoint2_.northFractional_;
|
||||
boundaryPoint3_[0] = static_cast<float>(worldPoint3_.eastInteger_ - falseEasting) + worldPoint3_.eastFractional_;
|
||||
boundaryPoint3_[1] = static_cast<float>(worldPoint3_.northInteger_ - falseNorthing) + worldPoint3_.northFractional_;
|
||||
boundaryPoint4_[0] = static_cast<float>(worldPoint4_.eastInteger_ - falseEasting) + worldPoint4_.eastFractional_;
|
||||
boundaryPoint4_[1] = static_cast<float>(worldPoint4_.northInteger_ - falseNorthing) + worldPoint4_.northFractional_;
|
||||
boundaryPoint1_[0] = static_cast<float>(worldPoint1_.eastInteger_ - eastOffset) + worldPoint1_.eastFractional_;
|
||||
boundaryPoint1_[1] = static_cast<float>(worldPoint1_.northInteger_ - northOffset) + worldPoint1_.northFractional_;
|
||||
boundaryPoint2_[0] = static_cast<float>(worldPoint2_.eastInteger_ - eastOffset) + worldPoint2_.eastFractional_;
|
||||
boundaryPoint2_[1] = static_cast<float>(worldPoint2_.northInteger_ - northOffset) + worldPoint2_.northFractional_;
|
||||
boundaryPoint3_[0] = static_cast<float>(worldPoint3_.eastInteger_ - eastOffset) + worldPoint3_.eastFractional_;
|
||||
boundaryPoint3_[1] = static_cast<float>(worldPoint3_.northInteger_ - northOffset) + worldPoint3_.northFractional_;
|
||||
boundaryPoint4_[0] = static_cast<float>(worldPoint4_.eastInteger_ - eastOffset) + worldPoint4_.eastFractional_;
|
||||
boundaryPoint4_[1] = static_cast<float>(worldPoint4_.northInteger_ - northOffset) + worldPoint4_.northFractional_;
|
||||
|
||||
log_ << "Local boundary points:\n";
|
||||
log_ << "Point 1: " << boundaryPoint1_[0] << " " << boundaryPoint1_[1] << "\n";
|
||||
|
@ -908,7 +938,7 @@ void OdmOrthoPhoto::renderPixel(int row, int col, float s, float t, const cv::Ma
|
|||
b += static_cast<float>(bl[0]) * dr * dt;
|
||||
b += static_cast<float>(br[0]) * dl * dt;
|
||||
|
||||
photo_.at<cv::Vec3b>(row,col) = cv::Vec3b(static_cast<unsigned char>(b), static_cast<unsigned char>(g), static_cast<unsigned char>(r));
|
||||
photo_.at<cv::Vec4b>(row,col) = cv::Vec4b(static_cast<unsigned char>(b), static_cast<unsigned char>(g), static_cast<unsigned char>(r), 255);
|
||||
}
|
||||
|
||||
void OdmOrthoPhoto::getBarycentricCoordiantes(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3, float x, float y, float &l1, float &l2, float &l3) const
|
||||
|
|
|
@ -177,6 +177,7 @@ private:
|
|||
std::string inputFile_; /**< Path to the textured mesh as an obj-file. */
|
||||
std::string inputGeoRefFile_; /**< Path to the georeference system file. */
|
||||
std::string outputFile_; /**< Path to the destination file. */
|
||||
std::string outputCornerFile_; /**< Path to the output corner file. */
|
||||
std::string logFile_; /**< Path to the log file. */
|
||||
|
||||
float resolution_; /**< The number of pixels per meter in the ortho photo. */
|
||||
|
|
|
@ -1,124 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu)
|
||||
* and the University of Washington
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* KeyMatch.cpp */
|
||||
/* Read in keys, match, write results to a file */
|
||||
|
||||
#include <time.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "keys2a.h"
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
char *keys1_in;
|
||||
char *keys2_in;
|
||||
char *file_out;
|
||||
double ratio, threshold;
|
||||
|
||||
if (argc != 6) {
|
||||
printf("Usage: %s <keys1.in> <keys2.in> <out.txt> <ratio> <threshold>\n", argv[0]);
|
||||
return -1;
|
||||
}
|
||||
|
||||
keys1_in = argv[1];
|
||||
keys2_in = argv[2];
|
||||
file_out = argv[3];
|
||||
|
||||
ratio = atof(argv[4]);
|
||||
threshold = atof(argv[5]);
|
||||
|
||||
clock_t start = clock();
|
||||
|
||||
unsigned char *keys1, *keys2;
|
||||
|
||||
int num1 = ReadKeyFile(keys1_in, &keys1);
|
||||
int num2 = ReadKeyFile(keys2_in, &keys2);
|
||||
|
||||
|
||||
/* Compute likely matches between two sets of keypoints */
|
||||
std::vector<KeypointMatch> matches =
|
||||
MatchKeys(num1, keys1, num2, keys2, ratio);
|
||||
|
||||
#if 0
|
||||
std::vector<KeypointMatch> matches_sym =
|
||||
MatchKeys(num2, keys2, num1, keys1);
|
||||
#endif
|
||||
|
||||
int num_matches = (int) matches.size();
|
||||
// int num_matches_sym = (int) matches_sym.size();
|
||||
|
||||
// printf("num_matches = %d\n", num_matches);
|
||||
// printf("num_matches_sym = %d\n", num_matches_sym);
|
||||
|
||||
#if 0
|
||||
/* Prune asymmetric matches */
|
||||
for (int i = 0; i < num_matches; i++) {
|
||||
int idx1 = matches[i].m_idx1;
|
||||
int idx2 = matches[i].m_idx2;
|
||||
|
||||
for (int j = 0; j < num_matches_sym; j++) {
|
||||
if (matches_sym[j].m_idx1 == idx2) {
|
||||
if (matches_sym[j].m_idx2 != idx1) {
|
||||
matches.erase(matches.begin() + i);
|
||||
i--;
|
||||
num_matches--;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
clock_t end = clock();
|
||||
|
||||
int m = (num1 < num2 ? num1 : num2);
|
||||
float r = ((float)num_matches * 100 / m);
|
||||
|
||||
bool used = false;
|
||||
|
||||
if (num_matches >= 16 && r > threshold) {
|
||||
used = true;
|
||||
|
||||
FILE *f = fopen(file_out, "w");
|
||||
|
||||
/* Write the number of matches */
|
||||
fprintf(f, "%d\n", (int) matches.size());
|
||||
|
||||
for (int i = 0; i < num_matches; i++) {
|
||||
fprintf(f, "%d %d\n", matches[i].m_idx1, matches[i].m_idx2);
|
||||
}
|
||||
|
||||
fclose(f);
|
||||
}
|
||||
|
||||
|
||||
if(used) printf("\n%8d matches (%4.1f%%) took %5.2fs for %s\t",
|
||||
num_matches,
|
||||
r,
|
||||
(end - start) / ((double) CLOCKS_PER_SEC),
|
||||
file_out);
|
||||
|
||||
|
||||
/* printf("%8d matches = %5.2f%% %d in %6.3fs in %s\n",
|
||||
num_matches,
|
||||
r,
|
||||
m,
|
||||
(end - start) / ((double) CLOCKS_PER_SEC),
|
||||
file_out);
|
||||
*/
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
# Makefile for bundler
|
||||
|
||||
CC=gcc
|
||||
OPTFLAGS=-O3 -Wall
|
||||
|
||||
OS=$(shell uname -o)
|
||||
|
||||
ifeq ($(OS), Cygwin)
|
||||
BUNDLER=bundler.exe
|
||||
KEYMATCHFULL=KeyMatchFull.exe
|
||||
KEYMATCH=KeyMatch.exe
|
||||
BUNDLE2PMVS=Bundle2PMVS.exe
|
||||
BUNDLE2VIS=Bundle2Vis.exe
|
||||
RADIALUNDISTORT=RadialUndistort.exe
|
||||
else
|
||||
BUNDLER=bundler
|
||||
KEYMATCHFULL=KeyMatchFull
|
||||
KEYMATCH=KeyMatch
|
||||
BUNDLE2PMVS=Bundle2PMVS
|
||||
BUNDLE2VIS=Bundle2Vis
|
||||
RADIALUNDISTORT=RadialUndistort
|
||||
endif
|
||||
|
||||
INCLUDE_PATH=-I../lib/imagelib -I../lib/sfm-driver -I../lib/matrix \
|
||||
-I../lib/5point -I../lib/sba-1.5 -I../lib/ann_1.1_char/include
|
||||
|
||||
LIB_PATH=-L../lib -L../lib/ann_1.1_char/lib
|
||||
|
||||
CPPFLAGS=$(OPTFLAGS) $(OTHERFLAGS) $(INCLUDE_PATH) $(DEFINES)
|
||||
|
||||
BUNDLER_DEFINES=-D__NO_UI__ -D__BUNDLER__ -D__BUNDLER_DISTR__
|
||||
|
||||
BUNDLER_OBJS=BaseApp.o BundlerApp.o keys.o Register.o Epipolar.o \
|
||||
Bundle.o BundleFast.o MatchTracks.o Camera.o Geometry.o \
|
||||
ImageData.o SifterUtil.o BaseGeometry.o BundlerGeometry.o \
|
||||
BoundingBox.o BundleAdd.o ComputeTracks.o BruteForceSearch.o \
|
||||
BundleIO.o ProcessBundle.o BundleTwo.o Decompose.o \
|
||||
RelativePose.o Distortion.o TwoFrameModel.o LoadJPEG.o
|
||||
|
||||
BUNDLER_LIBS=-limage -lsfmdrv -lsba.v1.5 -lmatrix -lz -llapack -lblas \
|
||||
-lcblas -lminpack -lm -l5point -ljpeg -lANN_char -lgfortran
|
||||
|
||||
|
||||
all: $(BUNDLER) $(KEYMATCHFULL) $(KEYMATCH) $(BUNDLE2PMVS) $(BUNDLE2VIS) $(RADIALUNDISTORT)
|
||||
|
||||
%.o : %.cpp
|
||||
$(CXX) -c -o $@ $(CPPFLAGS) $(WXFLAGS) $(BUNDLER_DEFINES) $<
|
||||
|
||||
$(BUNDLER): $(BUNDLER_OBJS)
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) \
|
||||
$(BUNDLER_DEFINES) $(BUNDLER_OBJS) $(BUNDLER_LIBS)
|
||||
cp $@ ../bin
|
||||
|
||||
$(KEYMATCHFULL): KeyMatchFull.o keys2a.o
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) KeyMatchFull.o keys2a.o \
|
||||
-lANN_char -lz
|
||||
cp $@ ../bin
|
||||
|
||||
$(KEYMATCH): KeyMatch.o keys2a.o
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) KeyMatch.o keys2a.o \
|
||||
-lANN_char -lz
|
||||
cp $@ ../bin
|
||||
|
||||
$(BUNDLE2PMVS): Bundle2PMVS.o LoadJPEG.o
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) Bundle2PMVS.o LoadJPEG.o \
|
||||
-limage -lmatrix -llapack -lblas -lcblas -lgfortran \
|
||||
-lminpack -ljpeg
|
||||
cp $@ ../bin
|
||||
|
||||
$(BUNDLE2VIS): Bundle2Vis.o
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) Bundle2Vis.o
|
||||
cp $@ ../bin
|
||||
|
||||
$(RADIALUNDISTORT): RadialUndistort.o LoadJPEG.o
|
||||
$(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) $^ \
|
||||
-limage -lmatrix -llapack -lblas -lcblas -lgfortran \
|
||||
-lminpack -ljpeg
|
||||
cp $@ ../bin
|
||||
|
||||
clean:
|
||||
rm -f *.o *~ $(BUNDLER) $(KEYMATCHFULL) $(KEYMATCH) $(BUNDLE2PMVS) \
|
||||
$(BUNDLE2VIS) $(RADIALUNDISTORT)
|
|
@ -1,465 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu)
|
||||
* and the University of Washington
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* keys2.cpp */
|
||||
/* Class for SIFT keypoints */
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
|
||||
#include <zlib.h>
|
||||
|
||||
#include "keys2a.h"
|
||||
|
||||
int GetNumberOfKeysNormal(FILE *fp)
|
||||
{
|
||||
int num, len;
|
||||
|
||||
if (fscanf(fp, "%d %d", &num, &len) != 2) {
|
||||
printf("Invalid keypoint file.\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return num;
|
||||
}
|
||||
|
||||
int GetNumberOfKeysGzip(gzFile fp)
|
||||
{
|
||||
int num, len;
|
||||
|
||||
char header[256];
|
||||
gzgets(fp, header, 256);
|
||||
|
||||
if (sscanf(header, "%d %d", &num, &len) != 2) {
|
||||
printf("Invalid keypoint file.\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
return num;
|
||||
}
|
||||
|
||||
/* Returns the number of keys in a file */
|
||||
int GetNumberOfKeys(const char *filename)
|
||||
{
|
||||
FILE *file;
|
||||
|
||||
file = fopen (filename, "r");
|
||||
if (! file) {
|
||||
/* Try to file a gzipped keyfile */
|
||||
char buf[1024];
|
||||
sprintf(buf, "%s.gz", filename);
|
||||
gzFile gzf = gzopen(buf, "rb");
|
||||
|
||||
if (gzf == NULL) {
|
||||
printf("Could not open file: %s\n", filename);
|
||||
return 0;
|
||||
} else {
|
||||
int n = GetNumberOfKeysGzip(gzf);
|
||||
gzclose(gzf);
|
||||
return n;
|
||||
}
|
||||
}
|
||||
|
||||
int n = GetNumberOfKeysNormal(file);
|
||||
fclose(file);
|
||||
return n;
|
||||
}
|
||||
|
||||
/* This reads a keypoint file from a given filename and returns the list
|
||||
* of keypoints. */
|
||||
int ReadKeyFile(const char *filename, unsigned char **keys, keypt_t **info)
|
||||
{
|
||||
FILE *file;
|
||||
|
||||
file = fopen (filename, "r");
|
||||
if (! file) {
|
||||
/* Try to file a gzipped keyfile */
|
||||
char buf[1024];
|
||||
sprintf(buf, "%s.bin", filename);
|
||||
FILE *binfile = fopen(buf, "rb");
|
||||
|
||||
if (binfile == NULL) {
|
||||
printf("Could not open file: %s\n", filename);
|
||||
return 0;
|
||||
} else {
|
||||
int n = ReadKeysBin(binfile, keys, info);
|
||||
fclose(binfile);
|
||||
return n;
|
||||
}
|
||||
}
|
||||
|
||||
int n = ReadKeys(file, keys, info);
|
||||
fclose(file);
|
||||
return n;
|
||||
|
||||
// return ReadKeysMMAP(file);
|
||||
}
|
||||
|
||||
#if 0
|
||||
/* Read keys using MMAP to speed things up */
|
||||
std::vector<Keypoint *> ReadKeysMMAP(FILE *fp)
|
||||
{
|
||||
int i, j, num, len, val, n;
|
||||
|
||||
std::vector<Keypoint *> kps;
|
||||
|
||||
struct stat sb;
|
||||
|
||||
/* Stat the file */
|
||||
if (fstat(fileno(fp), &sb) < 0) {
|
||||
printf("[ReadKeysMMAP] Error: could not stat file\n");
|
||||
return kps;
|
||||
}
|
||||
|
||||
char *file = (char *)mmap(NULL, sb.st_size, PROT_READ, MAP_SHARED,
|
||||
fileno(fp), 0);
|
||||
|
||||
char *file_start = file;
|
||||
|
||||
if (sscanf(file, "%d %d%n", &num, &len, &n) != 2) {
|
||||
printf("[ReadKeysMMAP] Invalid keypoint file beginning.");
|
||||
return kps;
|
||||
}
|
||||
|
||||
file += n;
|
||||
|
||||
if (len != 128) {
|
||||
printf("[ReadKeysMMAP] Keypoint descriptor length invalid "
|
||||
"(should be 128).");
|
||||
return kps;
|
||||
}
|
||||
|
||||
for (i = 0; i < num; i++) {
|
||||
/* Allocate memory for the keypoint. */
|
||||
unsigned char *d = new unsigned char[len];
|
||||
float x, y, scale, ori;
|
||||
|
||||
if (sscanf(file, "%f %f %f %f%n", &y, &x, &scale, &ori, &n) != 4) {
|
||||
printf("[ReadKeysMMAP] Invalid keypoint file format.");
|
||||
return kps;
|
||||
}
|
||||
|
||||
file += n;
|
||||
|
||||
for (j = 0; j < len; j++) {
|
||||
if (sscanf(file, "%d%n", &val, &n) != 1 || val < 0 || val > 255) {
|
||||
printf("[ReadKeysMMAP] Invalid keypoint file value.");
|
||||
return kps;
|
||||
}
|
||||
d[j] = (unsigned char) val;
|
||||
file += n;
|
||||
}
|
||||
|
||||
kps.push_back(new Keypoint(x, y, scale, ori, d));
|
||||
}
|
||||
|
||||
/* Unmap */
|
||||
if (munmap(file_start, sb.st_size) < 0) {
|
||||
printf("[ReadKeysMMAP] Error: could not unmap memory\n");
|
||||
return kps;
|
||||
}
|
||||
|
||||
return kps;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Read keypoints from the given file pointer and return the list of
|
||||
* keypoints. The file format starts with 2 integers giving the total
|
||||
* number of keypoints and the size of descriptor vector for each
|
||||
* keypoint (currently assumed to be 128). Then each keypoint is
|
||||
* specified by 4 floating point numbers giving subpixel row and
|
||||
* column location, scale, and orientation (in radians from -PI to
|
||||
* PI). Then the descriptor vector for each keypoint is given as a
|
||||
* list of integers in range [0,255]. */
|
||||
int ReadKeys(FILE *fp, unsigned char **keys, keypt_t **info)
|
||||
{
|
||||
int i, num, len;
|
||||
|
||||
std::vector<Keypoint *> kps;
|
||||
|
||||
if (fscanf(fp, "%d %d", &num, &len) != 2) {
|
||||
printf("Invalid keypoint file\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (len != 128) {
|
||||
printf("Keypoint descriptor length invalid (should be 128).");
|
||||
return 0;
|
||||
}
|
||||
|
||||
*keys = new unsigned char[128 * num + 8];
|
||||
|
||||
if (info != NULL)
|
||||
*info = new keypt_t[num];
|
||||
|
||||
unsigned char *p = *keys;
|
||||
for (i = 0; i < num; i++) {
|
||||
/* Allocate memory for the keypoint. */
|
||||
// short int *d = new short int[128];
|
||||
float x, y, scale, ori;
|
||||
|
||||
if (fscanf(fp, "%f %f %f %f\n", &y, &x, &scale, &ori) != 4) {
|
||||
printf("Invalid keypoint file format.");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (info != NULL) {
|
||||
(*info)[i].x = x;
|
||||
(*info)[i].y = y;
|
||||
(*info)[i].scale = scale;
|
||||
(*info)[i].orient = ori;
|
||||
}
|
||||
|
||||
char buf[1024];
|
||||
for (int line = 0; line < 7; line++) {
|
||||
fgets(buf, 1024, fp);
|
||||
|
||||
if (line < 6) {
|
||||
sscanf(buf,
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu "
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu",
|
||||
p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7, p+8, p+9,
|
||||
p+10, p+11, p+12, p+13, p+14,
|
||||
p+15, p+16, p+17, p+18, p+19);
|
||||
|
||||
p += 20;
|
||||
} else {
|
||||
sscanf(buf,
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu",
|
||||
p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7);
|
||||
p += 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return num; // kps;
|
||||
}
|
||||
|
||||
int ReadKeysBin(FILE* fp, unsigned char **keys, keypt_t **info)
|
||||
{
|
||||
|
||||
int32_t num_keys;
|
||||
fread(&num_keys, sizeof(int), 1, fp);
|
||||
// printf("num_keys: %d\n", num_keys);
|
||||
|
||||
|
||||
*keys = new unsigned char[128 * num_keys + 8];
|
||||
|
||||
if (info != NULL)
|
||||
*info = new keypt_t[num_keys];
|
||||
|
||||
unsigned char *p = *keys;
|
||||
for (int i = 0; i < num_keys; i++) {
|
||||
/* Allocate memory for the keypoint. */
|
||||
// short int *d = new short int[128];
|
||||
float x, y, scale, ori;
|
||||
|
||||
fread(&x, sizeof(float), 1, fp);
|
||||
fread(&y, sizeof(float), 1, fp);
|
||||
fread(&scale, sizeof(float), 1, fp);
|
||||
fread(&ori, sizeof(float), 1, fp);
|
||||
|
||||
if (info != NULL) {
|
||||
(*info)[i].x = x;
|
||||
(*info)[i].y = y;
|
||||
(*info)[i].scale = scale;
|
||||
(*info)[i].orient = ori;
|
||||
}
|
||||
|
||||
fread(p, sizeof(unsigned char), 128, fp);
|
||||
|
||||
// printf("key %d: %f, %f, %f, %f - %d\n", i, x, y, scale, ori, *p);
|
||||
|
||||
p += 128;
|
||||
}
|
||||
|
||||
|
||||
return num_keys;
|
||||
}
|
||||
int ReadKeysGzip(gzFile fp, unsigned char **keys, keypt_t **info)
|
||||
{
|
||||
int i, num, len;
|
||||
|
||||
std::vector<Keypoint *> kps;
|
||||
char header[256];
|
||||
gzgets(fp, header, 256);
|
||||
|
||||
if (sscanf(header, "%d %d", &num, &len) != 2) {
|
||||
printf("Invalid keypoint file.\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (len != 128) {
|
||||
printf("Keypoint descriptor length invalid (should be 128).");
|
||||
return 0;
|
||||
}
|
||||
|
||||
*keys = new unsigned char[128 * num + 8];
|
||||
|
||||
if (info != NULL)
|
||||
*info = new keypt_t[num];
|
||||
|
||||
unsigned char *p = *keys;
|
||||
for (i = 0; i < num; i++) {
|
||||
/* Allocate memory for the keypoint. */
|
||||
// short int *d = new short int[128];
|
||||
float x, y, scale, ori;
|
||||
char buf[1024];
|
||||
gzgets(fp, buf, 1024);
|
||||
|
||||
if (sscanf(buf, "%f %f %f %f\n", &y, &x, &scale, &ori) != 4) {
|
||||
printf("Invalid keypoint file format.");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (info != NULL) {
|
||||
(*info)[i].x = x;
|
||||
(*info)[i].y = y;
|
||||
(*info)[i].scale = scale;
|
||||
(*info)[i].orient = ori;
|
||||
}
|
||||
|
||||
for (int line = 0; line < 7; line++) {
|
||||
char *str = gzgets(fp, buf, 1024);
|
||||
assert(str != Z_NULL);
|
||||
|
||||
if (line < 6) {
|
||||
sscanf(buf,
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu "
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu",
|
||||
p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7, p+8, p+9,
|
||||
p+10, p+11, p+12, p+13, p+14,
|
||||
p+15, p+16, p+17, p+18, p+19);
|
||||
|
||||
p += 20;
|
||||
} else {
|
||||
sscanf(buf,
|
||||
"%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu",
|
||||
p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7);
|
||||
p += 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert(p == *keys + 128 * num);
|
||||
|
||||
return num; // kps;
|
||||
}
|
||||
|
||||
/* Create a search tree for the given set of keypoints */
|
||||
ANNkd_tree *CreateSearchTree(int num_keys, unsigned char *keys)
|
||||
{
|
||||
// clock_t start = clock();
|
||||
|
||||
/* Create a new array of points */
|
||||
ANNpointArray pts = annAllocPts(num_keys, 128);
|
||||
|
||||
for (int i = 0; i < num_keys; i++) {
|
||||
memcpy(pts[i], keys + 128 * i, sizeof(unsigned char) * 128);
|
||||
}
|
||||
|
||||
/* Create a search tree for k2 */
|
||||
ANNkd_tree *tree = new ANNkd_tree(pts, num_keys, 128, 16);
|
||||
// clock_t end = clock();
|
||||
|
||||
// printf("Building tree took %0.3fs\n",
|
||||
// (end - start) / ((double) CLOCKS_PER_SEC));
|
||||
|
||||
return tree;
|
||||
}
|
||||
|
||||
std::vector<KeypointMatch> MatchKeys(int num_keys1, unsigned char *k1,
|
||||
ANNkd_tree *tree2,
|
||||
double ratio, int max_pts_visit)
|
||||
{
|
||||
annMaxPtsVisit(max_pts_visit);
|
||||
std::vector<KeypointMatch> matches;
|
||||
|
||||
/* Now do the search */
|
||||
// clock_t start = clock();
|
||||
for (int i = 0; i < num_keys1; i++) {
|
||||
ANNidx nn_idx[2];
|
||||
ANNdist dist[2];
|
||||
|
||||
tree2->annkPriSearch(k1 + 128 * i, 2, nn_idx, dist, 0.0);
|
||||
|
||||
if (((double) dist[0]) < ratio * ratio * ((double) dist[1])) {
|
||||
matches.push_back(KeypointMatch(i, nn_idx[0]));
|
||||
}
|
||||
}
|
||||
// clock_t end = clock();
|
||||
|
||||
// printf("Searching tree took %0.3fs\n",
|
||||
// (end - start) / ((double) CLOCKS_PER_SEC));
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
/* Compute likely matches between two sets of keypoints */
|
||||
std::vector<KeypointMatch> MatchKeys(int num_keys1, unsigned char *k1,
|
||||
int num_keys2, unsigned char *k2,
|
||||
double ratio, int max_pts_visit)
|
||||
{
|
||||
annMaxPtsVisit(max_pts_visit);
|
||||
|
||||
int num_pts = 0;
|
||||
std::vector<KeypointMatch> matches;
|
||||
|
||||
num_pts = num_keys2;
|
||||
clock_t start = clock();
|
||||
|
||||
/* Create a new array of points */
|
||||
ANNpointArray pts = annAllocPts(num_pts, 128);
|
||||
|
||||
for (int i = 0; i < num_pts; i++) {
|
||||
memcpy(pts[i], k2 + 128 * i, sizeof(unsigned char) * 128);
|
||||
}
|
||||
|
||||
/* Create a search tree for k2 */
|
||||
ANNkd_tree *tree = new ANNkd_tree(pts, num_pts, 128, 16);
|
||||
clock_t end = clock();
|
||||
|
||||
// printf("Building tree took %0.3fs\n",
|
||||
// (end - start) / ((double) CLOCKS_PER_SEC));
|
||||
|
||||
/* Now do the search */
|
||||
start = clock();
|
||||
for (int i = 0; i < num_keys1; i++) {
|
||||
ANNidx nn_idx[2];
|
||||
ANNdist dist[2];
|
||||
|
||||
tree->annkPriSearch(k1 + 128 * i, 2, nn_idx, dist, 0.0);
|
||||
|
||||
if (((double) dist[0]) < ratio * ratio * ((double) dist[1])) {
|
||||
matches.push_back(KeypointMatch(i, nn_idx[0]));
|
||||
}
|
||||
}
|
||||
end = clock();
|
||||
// printf("Searching tree took %0.3fs\n",
|
||||
// (end - start) / ((double) CLOCKS_PER_SEC));
|
||||
|
||||
/* Cleanup */
|
||||
annDeallocPts(pts);
|
||||
// annDeallocPt(axis_weights);
|
||||
|
||||
delete tree;
|
||||
|
||||
return matches;
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu)
|
||||
* and the University of Washington
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* keys2a.h */
|
||||
/* Class for SIFT keypoints */
|
||||
|
||||
#ifndef __keys2a_h__
|
||||
#define __keys2a_h__
|
||||
|
||||
#include <vector>
|
||||
#include <stdio.h>
|
||||
|
||||
#include <zlib.h>
|
||||
|
||||
#include "ANN/ANN.h"
|
||||
|
||||
using namespace ann_1_1_char;
|
||||
|
||||
class Keypoint {
|
||||
public:
|
||||
Keypoint(float x, float y, float scale, float ori, short int *d) :
|
||||
m_x(x), m_y(y), m_scale(scale), m_ori(ori), m_d(d)
|
||||
{ }
|
||||
|
||||
float m_x, m_y; /* Subpixel location of keypoint. */
|
||||
float m_scale, m_ori; /* Scale and orientation (range [-PI,PI]) */
|
||||
short int *m_d; /* Vector of descriptor values */
|
||||
};
|
||||
|
||||
|
||||
/* Data struct for matches */
|
||||
class KeypointMatch {
|
||||
public:
|
||||
KeypointMatch()
|
||||
{ }
|
||||
|
||||
#if 0
|
||||
KeypointMatch(int idx1, int idx2, float x1, float y1, float x2, float y2) :
|
||||
m_idx1(idx1), m_idx2(idx2), m_x1(x1), m_y1(y1), m_x2(x2), m_y2(y2)
|
||||
{ }
|
||||
#endif
|
||||
|
||||
KeypointMatch(int idx1, int idx2) :
|
||||
m_idx1(idx1), m_idx2(idx2)
|
||||
{ }
|
||||
|
||||
int m_idx1, m_idx2;
|
||||
// float m_x1, m_y1;
|
||||
// float m_x2, m_y2;
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
float x, y;
|
||||
float scale;
|
||||
float orient;
|
||||
} keypt_t;
|
||||
|
||||
/* Returns the number of keys in a file */
|
||||
int GetNumberOfKeys(const char *filename);
|
||||
|
||||
/* This reads a keypoint file from a given filename and returns the list
|
||||
* of keypoints. */
|
||||
int ReadKeyFile(const char *filename, unsigned char **keys,
|
||||
keypt_t **info = NULL);
|
||||
|
||||
int ReadKeyPositions(const char *filename, keypt_t **info);
|
||||
|
||||
/* Read keypoints from the given file pointer and return the list of
|
||||
* keypoints. The file format starts with 2 integers giving the total
|
||||
* number of keypoints and the size of descriptor vector for each
|
||||
* keypoint (currently assumed to be 128). Then each keypoint is
|
||||
* specified by 4 floating point numbers giving subpixel row and
|
||||
* column location, scale, and orientation (in radians from -PI to
|
||||
* PI). Then the descriptor vector for each keypoint is given as a
|
||||
* list of integers in range [0,255]. */
|
||||
int ReadKeys(FILE *fp, unsigned char **keys, keypt_t **info = NULL);
|
||||
int ReadKeysBin(FILE *fp, unsigned char **keys, keypt_t **info = NULL);
|
||||
int ReadKeysGzip(gzFile fp, unsigned char **keys, keypt_t **info = NULL);
|
||||
|
||||
/* Read keys using MMAP to speed things up */
|
||||
std::vector<Keypoint *> ReadKeysMMAP(FILE *fp);
|
||||
|
||||
/* Create a search tree for the given set of keypoints */
|
||||
ANNkd_tree *CreateSearchTree(int num_keys, unsigned char *keys);
|
||||
|
||||
/* Compute likely matches between two sets of keypoints */
|
||||
std::vector<KeypointMatch> MatchKeys(int num_keys1, unsigned char *k1,
|
||||
int num_keys2, unsigned char *k2,
|
||||
double ratio = 0.6,
|
||||
int max_pts_visit = 200);
|
||||
|
||||
std::vector<KeypointMatch> MatchKeys(int num_keys1, unsigned char *k1,
|
||||
ANNkd_tree *tree2,
|
||||
double ratio = 0.6,
|
||||
int max_pts_visit = 200);
|
||||
|
||||
#endif /* __keys2_h__ */
|
1
run.pl
1
run.pl
|
@ -697,6 +697,7 @@ sub bundler {
|
|||
$bundlerOptions .= "--constrain_focal\n";
|
||||
$bundlerOptions .= "--constrain_focal_weight 0.0\n";
|
||||
$bundlerOptions .= "--estimate_distortion\n";
|
||||
$bundlerOptions .= "--use_ceres\n";
|
||||
$bundlerOptions .= "--run_bundle";
|
||||
|
||||
system("echo \"$bundlerOptions\" > \"$jobOptions{step_3_bundlerOptions}\"");
|
||||
|
|
|
@ -0,0 +1,904 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import multiprocessing
|
||||
import json
|
||||
import datetime
|
||||
import re
|
||||
import subprocess
|
||||
import shutil
|
||||
import shlex
|
||||
# import collections # Never used
|
||||
import fractions
|
||||
import argparse
|
||||
|
||||
import knnMatch_exif
|
||||
|
||||
# the defs
|
||||
CURRENT_DIR = os.getcwd()
|
||||
BIN_PATH_ABS = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
|
||||
CORES = multiprocessing.cpu_count()
|
||||
|
||||
|
||||
def get_ccd_widths():
|
||||
"""Return the CCD Width of the camera listed in the JSON defs file."""
|
||||
with open(BIN_PATH_ABS + '/ccd_defs.json') as jsonFile:
|
||||
return json.load(jsonFile)
|
||||
|
||||
objects = []
|
||||
ccdWidths = get_ccd_widths()
|
||||
|
||||
BIN_PATH = BIN_PATH_ABS + '/bin'
|
||||
|
||||
objectStats = {
|
||||
'count': 0, 'good': 0, 'bad': 0, 'minWidth': 0, 'minHeight': 0,
|
||||
'maxWidth': 0, 'maxHeight': 0
|
||||
}
|
||||
|
||||
jobOptions = {'resizeTo': 0, 'srcDir': CURRENT_DIR, 'utmZone': -999,
|
||||
'utmSouth': False, 'utmEastOffset': 0, 'utmNorthOffset': 0}
|
||||
|
||||
# parse arguments
|
||||
processopts = ['resize', 'getKeypoints', 'match', 'bundler', 'cmvs', 'pmvs',
|
||||
'odm_meshing', 'odm_texturing', 'odm_georeferencing',
|
||||
'odm_orthophoto']
|
||||
|
||||
parser = argparse.ArgumentParser(description='OpenDroneMap')
|
||||
parser.add_argument('--resize-to', # currently doesn't support 'orig'
|
||||
metavar='<integer>',
|
||||
default=2400,
|
||||
type=int,
|
||||
help='resizes images by the largest side')
|
||||
|
||||
parser.add_argument('--start-with', '-s',
|
||||
metavar='<string>',
|
||||
default='resize',
|
||||
choices=processopts,
|
||||
help=('Can be one of: ' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
default='odm_orthophoto',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--run-only',
|
||||
metavar='<string>',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
parser.add_argument('--force-focal',
|
||||
metavar='<positive float>',
|
||||
type=float,
|
||||
help=('Override the focal length information for the '
|
||||
'images'))
|
||||
|
||||
parser.add_argument('--force-ccd',
|
||||
metavar='<positive float>',
|
||||
type=float,
|
||||
help='Override the ccd width information for the images')
|
||||
|
||||
parser.add_argument('--matcher-threshold',
|
||||
metavar='<percent>',
|
||||
default=2.0,
|
||||
type=float,
|
||||
help=('Ignore matched keypoints if the two images share '
|
||||
'less than <float> percent of keypoints'))
|
||||
|
||||
parser.add_argument('--matcher-ratio',
|
||||
metavar='<float>',
|
||||
default=0.6,
|
||||
type=float,
|
||||
help=('Ratio of the distance to the next best matched '
|
||||
'keypoint'))
|
||||
|
||||
parser.add_argument('--matcher-preselect',
|
||||
type=bool,
|
||||
metavar='',
|
||||
default=True,
|
||||
help=('use GPS exif data, if available, to match each '
|
||||
'image only with its k-nearest neighbors, or all '
|
||||
'images within a certain distance threshold'))
|
||||
|
||||
parser.add_argument('--matcher-useKnn',
|
||||
type=bool,
|
||||
metavar='',
|
||||
default=True,
|
||||
help=('use GPS exif data, if available, to match each '
|
||||
'image only with its k-nearest neighbors, or all '
|
||||
'images within a certain distance threshold'))
|
||||
|
||||
parser.add_argument('--matcher-kDistance',
|
||||
metavar='<integer>',
|
||||
default=20,
|
||||
type=int,
|
||||
help='')
|
||||
|
||||
parser.add_argument('--cmvs-maxImages',
|
||||
metavar='<integer>',
|
||||
default=500,
|
||||
type=int,
|
||||
help='The maximum number of images per cluster')
|
||||
|
||||
parser.add_argument('--pmvs-level',
|
||||
metavar='<positive integer>',
|
||||
default=1,
|
||||
type=int,
|
||||
help=('The level in the image pyramid that is used '
|
||||
'for the computation. see '
|
||||
'http://www.di.ens.fr/pmvs/documentation.html for '
|
||||
'more pmvs documentation'))
|
||||
|
||||
parser.add_argument('--pmvs-csize',
|
||||
metavar='< positive integer>',
|
||||
default=2,
|
||||
type=int,
|
||||
help='Cell size controls the density of reconstructions')
|
||||
|
||||
parser.add_argument('--pmvs-threshold',
|
||||
metavar='<float: -1.0 <= x <= 1.0>',
|
||||
default=0.7,
|
||||
type=float,
|
||||
help=('A patch reconstruction is accepted as a success '
|
||||
'and kept, if its associcated photometric consistency '
|
||||
'measure is above this threshold.'))
|
||||
|
||||
parser.add_argument('--pmvs-wsize',
|
||||
metavar='<positive integer>',
|
||||
default=7,
|
||||
type=int,
|
||||
help=('pmvs samples wsize x wsize pixel colors from '
|
||||
'each image to compute photometric consistency '
|
||||
'score. For example, when wsize=7, 7x7=49 pixel '
|
||||
'colors are sampled in each image. Increasing the '
|
||||
'value leads to more stable reconstructions, but '
|
||||
'the program becomes slower.'))
|
||||
|
||||
parser.add_argument('--pmvs-minImageNum',
|
||||
metavar='<positive integer>',
|
||||
default=3,
|
||||
type=int,
|
||||
help=('Each 3D point must be visible in at least '
|
||||
'minImageNum images for being reconstructed. 3 is '
|
||||
'suggested in general.'))
|
||||
|
||||
parser.add_argument('--odm_meshing-maxVertexCount',
|
||||
metavar='<positive integer>',
|
||||
default=100000,
|
||||
type=int,
|
||||
help='The maximum vertex count of the output mesh')
|
||||
|
||||
parser.add_argument('--odm_meshing-octreeDepth',
|
||||
metavar='<positive integer>',
|
||||
default=9,
|
||||
type=int,
|
||||
help=('Oct-tree depth used in the mesh reconstruction, '
|
||||
'increase to get more vertices, recommended '
|
||||
'values are 8-12'))
|
||||
|
||||
parser.add_argument('--odm_meshing-samplesPerNode',
|
||||
metavar='<float >= 1.0>',
|
||||
default=1,
|
||||
type=float,
|
||||
help=('Number of points per octree node, recommended '
|
||||
'value: 1.0'))
|
||||
|
||||
parser.add_argument('--odm_meshing-solverDivide',
|
||||
metavar='<positive integer>',
|
||||
default=9,
|
||||
type=int,
|
||||
help=('Oct-tree depth at which the Laplacian equation '
|
||||
'is solved in the surface reconstruction step. '
|
||||
'Increasing this value increases computation '
|
||||
'times slightly but helps reduce memory usage.'))
|
||||
|
||||
parser.add_argument('--odm_texturing-textureResolution',
|
||||
metavar='<positive integer>',
|
||||
default=4096,
|
||||
type=int,
|
||||
help=('The resolution of the output textures. Must be '
|
||||
'greater than textureWithSize.'))
|
||||
|
||||
parser.add_argument('--odm_texturing-textureWithSize',
|
||||
metavar='<positive integer>',
|
||||
default=3600,
|
||||
type=int,
|
||||
help=('The resolution to rescale the images performing '
|
||||
'the texturing.'))
|
||||
|
||||
parser.add_argument('--odm_georeferencing-gcpFile',
|
||||
metavar='<path string>',
|
||||
default='gcp_list.txt',
|
||||
help=('path to the file containing the ground control '
|
||||
'points used for georeferencing.The file needs to '
|
||||
'be on the following line format: \neasting '
|
||||
'northing height pixelrow pixelcol imagename'))
|
||||
|
||||
parser.add_argument('--odm_georeferencing-useGcp',
|
||||
type = bool,
|
||||
default = False,
|
||||
help = 'set to true for enabling GCPs from the file above')
|
||||
|
||||
parser.add_argument('--zip-results',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='compress the results using gunzip')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print "\n - configuration:"
|
||||
|
||||
print vars(args)
|
||||
# for key in vars(args):
|
||||
# if key != "":
|
||||
# print " " + key + ": " + str(args[key])
|
||||
# print "\n"
|
||||
|
||||
|
||||
def run(cmd):
|
||||
"""Run a system command"""
|
||||
returnCode = os.system(cmd)
|
||||
if (returnCode != 0):
|
||||
sys.exit("\nquitting cause: \n\t" + cmd + "\nreturned with code " +
|
||||
str(returnCode) + ".\n")
|
||||
|
||||
|
||||
def now():
|
||||
"""Return the current time"""
|
||||
return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
|
||||
def run_and_return(cmdSrc, cmdDest):
|
||||
"""Run a system command and return the output"""
|
||||
srcProcess = subprocess.Popen(shlex.split(cmdSrc), stdout=subprocess.PIPE)
|
||||
if cmdDest:
|
||||
destProcess = subprocess.Popen(shlex.split(cmdDest),
|
||||
stdin=srcProcess.stdout,
|
||||
stdout=subprocess.PIPE)
|
||||
stdout, stderr = destProcess.communicate()
|
||||
else:
|
||||
stdout, stderr = srcProcess.communicate()
|
||||
|
||||
return stdout.decode('ascii')
|
||||
|
||||
|
||||
def calculate_EPSG(utmZone, south):
|
||||
"""Calculate and return the EPSG"""
|
||||
if south:
|
||||
return 32700 + utmZone
|
||||
else:
|
||||
return 32600 + utmZone
|
||||
|
||||
|
||||
def parse_coordinate_system():
|
||||
"""Write attributes to jobOptions from coord file"""
|
||||
if os.path.isfile(jobOptions['jobDir'] +
|
||||
'/odm_georeferencing/coordFile.txt'):
|
||||
with open(jobOptions['jobDir'] + '/odm_georeferencing/coordFile.txt') as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 3:
|
||||
utmZoneString = tokens[2][0:len(tokens[2])-2].strip()
|
||||
utmSouthBool = (tokens[2][len(tokens[2])-2].strip() == 'S')
|
||||
jobOptions['csString'] = '+datum=WGS84 +proj=utm +zone=' + utmZoneString + (' +south' if utmSouthBool else '')
|
||||
jobOptions['epsg'] = calculate_EPSG(int(utmZoneString), utmSouthBool)
|
||||
elif lineNumber == 1:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 2:
|
||||
jobOptions['utmEastOffset'] = int(tokens[0].strip())
|
||||
jobOptions['utmNorthOffset'] = int(tokens[1].strip())
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def prepare_objects():
|
||||
"""Prepare the jobOptions and fileObjects dicts"""
|
||||
source_files = run_and_return('ls -1', 'egrep "\.[jJ]{1}[pP]{1}[eE]{0,1}[gG]{1}"')
|
||||
|
||||
print "\n - source files - " + now()
|
||||
|
||||
for filename in source_files.split('\n'):
|
||||
filename = filename.rstrip('\n')
|
||||
if not filename:
|
||||
continue
|
||||
file_make = run_and_return('jhead "' + filename + '"', 'grep "Camera make"')
|
||||
file_model = run_and_return('jhead "' + filename + '"', 'grep "Camera model"')
|
||||
file_focal = run_and_return('jhead "' + filename + '"', 'grep "Focal length"')
|
||||
file_ccd = run_and_return('jhead "' + filename + '"', 'grep "CCD width"')
|
||||
file_resolution = run_and_return('jhead "' + filename + '"', 'grep "Resolution"')
|
||||
|
||||
fileObject = {}
|
||||
|
||||
fileObject["src"] = filename
|
||||
fileObject["base"] = re.sub("\.[^\.]*$", "", filename)
|
||||
|
||||
match = re.search(": ([^\n\r]*)", file_make)
|
||||
if match:
|
||||
fileObject["make"] = match.group(1).strip()
|
||||
|
||||
match = re.search(": ([^\n\r]*)", file_model)
|
||||
if match:
|
||||
fileObject["model"] = match.group(1).strip()
|
||||
|
||||
if "make" in fileObject:
|
||||
fileObject["make"] = re.sub("^\s+", "", fileObject["make"])
|
||||
fileObject["make"] = re.sub("\s+$", "", fileObject["make"])
|
||||
|
||||
if "model" in fileObject:
|
||||
fileObject["model"] = re.sub("^\s+", "", fileObject["model"])
|
||||
fileObject["model"] = re.sub("\s+$", "", fileObject["model"])
|
||||
|
||||
if "make" in fileObject:
|
||||
fileObject["id"] = fileObject["make"]
|
||||
if "model" in fileObject:
|
||||
fileObject["id"] += " " + fileObject["model"]
|
||||
|
||||
match = re.search(": ([0-9]*) x ([0-9]*)", file_resolution)
|
||||
if match:
|
||||
fileObject["width"] = int(match.group(1).strip())
|
||||
fileObject["height"] = int(match.group(2).strip())
|
||||
|
||||
if '--force-focal' not in args:
|
||||
match = re.search(":[\ ]*([0-9\.]*)mm", file_focal)
|
||||
if match:
|
||||
fileObject["focal"] = float((match.group()[1:-2]).strip())
|
||||
else:
|
||||
fileObject["focal"] = args.force_focal
|
||||
|
||||
if '--force-ccd' not in args:
|
||||
match = re.search(":[\ ]*([0-9\.]*)mm", file_ccd)
|
||||
if match:
|
||||
fileObject["ccd"] = float(match.group()[1:-2].strip())
|
||||
|
||||
if ("ccd" not in fileObject) and ("id" in fileObject):
|
||||
fileObject["ccd"] = float(ccdWidths[fileObject["id"]])
|
||||
else:
|
||||
fileObject["ccd"] = args.force_ccd
|
||||
|
||||
if "ccd" in fileObject and "focal" in fileObject and "width" in fileObject and "height" in fileObject:
|
||||
if fileObject["width"] > fileObject["height"]:
|
||||
fileObject["focalpx"] = fileObject["width"] * fileObject["focal"] / fileObject["ccd"]
|
||||
else:
|
||||
fileObject["focalpx"] = fileObject["height"] * fileObject["focal"] / fileObject["ccd"]
|
||||
|
||||
fileObject["isOk"] = True
|
||||
objectStats["good"] += 1
|
||||
|
||||
print " using " + fileObject["src"] + " dimensions: " + str(fileObject["width"]) + "x" + str(fileObject["height"]) + " / focal: " + str(fileObject["focal"]) + "mm / ccd: " + str(fileObject["ccd"]) + "mm"
|
||||
else:
|
||||
fileObject["isOk"] = False
|
||||
objectStats["bad"] += 1
|
||||
|
||||
if "id" in fileObject:
|
||||
print "\n no CCD width or focal length found for " + fileObject["src"] + " - camera: \"" + fileObject["id"] + "\""
|
||||
else:
|
||||
print "\n no CCD width or focal length found"
|
||||
|
||||
objectStats["count"] += 1
|
||||
|
||||
if "width" in fileObject and "height" in fileObject:
|
||||
if objectStats["minWidth"] == 0:
|
||||
objectStats["minWidth"] = fileObject["width"]
|
||||
if objectStats["minHeight"] == 0:
|
||||
objectStats["minHeight"] = fileObject["height"]
|
||||
|
||||
if objectStats["minWidth"] < fileObject["width"]:
|
||||
objectStats["minWidth"] = objectStats["minWidth"]
|
||||
else:
|
||||
objectStats["minWidth"] = fileObject["width"]
|
||||
|
||||
if objectStats["minHeight"] < fileObject["height"]:
|
||||
objectStats["minHeight"] = objectStats["minHeight"]
|
||||
else:
|
||||
objectStats["minHeight"] = fileObject["height"]
|
||||
|
||||
if objectStats["maxWidth"] > fileObject["width"]:
|
||||
objectStats["maxWidth"] = objectStats["maxWidth"]
|
||||
else:
|
||||
objectStats["maxWidth"] = fileObject["width"]
|
||||
|
||||
if objectStats["maxHeight"] > fileObject["height"]:
|
||||
objectStats["maxHeight"] = objectStats["maxHeight"]
|
||||
else:
|
||||
objectStats["maxHeight"] = fileObject["height"]
|
||||
|
||||
objects.append(fileObject)
|
||||
|
||||
if "good" not in objectStats:
|
||||
print "\n found no usable images - quitting\n"
|
||||
sys.exit()
|
||||
else:
|
||||
print "\n found " + str(objectStats["good"]) + " usable images"
|
||||
|
||||
print "\n"
|
||||
|
||||
jobOptions["resizeTo"] = args.resize_to
|
||||
|
||||
print " using max image size of " + str(jobOptions["resizeTo"]) + " x " + str(jobOptions["resizeTo"])
|
||||
|
||||
jobOptions["jobDir"] = jobOptions["srcDir"] + "/reconstruction-with-image-size-" + str(jobOptions["resizeTo"])
|
||||
|
||||
jobOptions["step_1_convert"] = jobOptions["jobDir"] + "/_convert.templist.txt"
|
||||
jobOptions["step_1_vlsift"] = jobOptions["jobDir"] + "/_vlsift.templist.txt"
|
||||
jobOptions["step_1_gzip"] = jobOptions["jobDir"] + "/_gzip.templist.txt"
|
||||
|
||||
jobOptions["step_2_filelist"] = jobOptions["jobDir"] + "/_filelist.templist.txt"
|
||||
jobOptions["step_2_macthes_jobs"] = jobOptions["jobDir"] + "/_matches_jobs.templist.txt"
|
||||
jobOptions["step_2_matches_dir"] = jobOptions["jobDir"] + "/matches"
|
||||
jobOptions["step_2_matches"] = jobOptions["jobDir"] + "/matches.init.txt"
|
||||
|
||||
jobOptions["step_3_filelist"] = jobOptions["jobDir"] + "/list.txt"
|
||||
jobOptions["step_3_bundlerOptions"] = jobOptions["jobDir"] + "/options.txt"
|
||||
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"])
|
||||
except:
|
||||
pass
|
||||
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
fileObject["step_0_resizedImage"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".jpg"
|
||||
fileObject["step_1_pgmFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".pgm"
|
||||
fileObject["step_1_keyFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".key"
|
||||
fileObject["step_1_gzFile"] = jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.gz"
|
||||
|
||||
|
||||
def resize():
|
||||
"""Resize images"""
|
||||
print "\n - preparing images - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
if not os.path.isfile(fileObject["step_0_resizedImage"]):
|
||||
if jobOptions["resizeTo"] != 0 and ((int(fileObject["width"]) > jobOptions["resizeTo"]) or (fileObject["height"] > jobOptions["resizeTo"])):
|
||||
sys.stdout.write(" resizing " + fileObject["src"] + " \tto " + fileObject["step_0_resizedImage"])
|
||||
run("convert -resize " + str(jobOptions["resizeTo"]) + "x" + str(jobOptions["resizeTo"]) + " -quality 100 \"" + jobOptions["srcDir"] + "/" + fileObject["src"] + "\" \"" + fileObject["step_0_resizedImage"] + "\"")
|
||||
else:
|
||||
sys.stdout.write(" copying " + fileObject["src"] + " \tto " + fileObject["step_0_resizedImage"])
|
||||
shutil.copyfile(CURRENT_DIR + "/" + fileObject["src"], fileObject["step_0_resizedImage"])
|
||||
else:
|
||||
print " using existing " + fileObject["src"] + " \tto " + fileObject["step_0_resizedImage"]
|
||||
|
||||
file_resolution = run_and_return('jhead "' + fileObject["step_0_resizedImage"] + '"', 'grep "Resolution"')
|
||||
match = re.search(": ([0-9]*) x ([0-9]*)", file_resolution)
|
||||
if match:
|
||||
fileObject["width"] = int(match.group(1).strip())
|
||||
fileObject["height"] = int(match.group(2).strip())
|
||||
print "\t (" + str(fileObject["width"]) + " x " + str(fileObject["height"]) + ")"
|
||||
|
||||
if args.end_with != "resize":
|
||||
getKeypoints()
|
||||
|
||||
|
||||
def getKeypoints():
|
||||
"""Run vlsift to create keypoint files for each image"""
|
||||
print "\n - finding keypoints - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
vlsiftJobs = ""
|
||||
c = 0
|
||||
|
||||
for fileObject in objects:
|
||||
c += 1
|
||||
|
||||
if fileObject["isOk"]:
|
||||
if '--lowe-sift' in args:
|
||||
vlsiftJobs += "echo -n \"" + str(c) + "/" + str(objectStats["good"]) + " - \" && convert -format pgm \"" + fileObject["step_0_resizedImage"] + "\" \"" + fileObject["step_1_pgmFile"] + "\""
|
||||
vlsiftJobs += " && \"" + BIN_PATH + "/sift\" < \"" + fileObject["step_1_pgmFile"] + "\" > \"" + fileObject["step_1_keyFile"] + "\""
|
||||
vlsiftJobs += " && gzip -f \"" + fileObject["step_1_keyFile"] + "\""
|
||||
vlsiftJobs += " && rm -f \"" + fileObject["step_1_pgmFile"] + "\""
|
||||
vlsiftJobs += " && rm -f \"" + fileObject["step_1_keyFile"] + ".sift\"\n"
|
||||
else:
|
||||
if not os.path.isfile(jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.bin"):
|
||||
vlsiftJobs += "echo -n \"" + str(c) + "/" + str(objectStats["good"]) + " - \" && convert -format pgm \"" + fileObject["step_0_resizedImage"] + "\" \"" + fileObject["step_1_pgmFile"] + "\""
|
||||
vlsiftJobs += " && \"" + BIN_PATH + "/vlsift\" \"" + fileObject["step_1_pgmFile"] + "\" -o \"" + fileObject["step_1_keyFile"] + ".sift\" > /dev/null && perl \"" + BIN_PATH + "/../convert_vlsift_to_lowesift.pl\" \"" + jobOptions["jobDir"] + "/" + fileObject["base"] + "\""
|
||||
vlsiftJobs += " && gzip -f \"" + fileObject["step_1_keyFile"] + "\""
|
||||
vlsiftJobs += " && rm -f \"" + fileObject["step_1_pgmFile"] + "\""
|
||||
vlsiftJobs += " && rm -f \"" + fileObject["step_1_keyFile"] + ".sift\"\n"
|
||||
else:
|
||||
print "using existing " + jobOptions["jobDir"] + "/" + fileObject["base"] + ".key.bin"
|
||||
|
||||
siftDest = open(jobOptions["step_1_vlsift"], 'w')
|
||||
siftDest.write(vlsiftJobs)
|
||||
siftDest.close()
|
||||
|
||||
run("\"" + BIN_PATH + "/parallel\" --no-notice --halt-on-error 1 -j+0 < \"" + jobOptions["step_1_vlsift"] + "\"")
|
||||
|
||||
if args.end_with != "getKeypoints":
|
||||
match()
|
||||
|
||||
|
||||
def match():
|
||||
"""Run matches on images"""
|
||||
print "\n - matching keypoints - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["step_2_matches_dir"])
|
||||
except:
|
||||
pass
|
||||
|
||||
matchesJobs = ""
|
||||
c = 0
|
||||
t = (objectStats["good"] - 1) * (objectStats["good"] / 2) # BUG:unused
|
||||
|
||||
preselected_pairs = []
|
||||
|
||||
# Create a file list with all keypoint files
|
||||
filesList = ""
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
filesList += fileObject["step_1_keyFile"] + "\n"
|
||||
matchDest = open(jobOptions["step_2_filelist"], 'w')
|
||||
matchDest.write(filesList)
|
||||
matchDest.close()
|
||||
|
||||
# Check if preselection is to be run
|
||||
if args.matcher_preselect:
|
||||
useKnn = True
|
||||
if args.matcher_useKnn:
|
||||
useKnn = False # BUG: never used
|
||||
preselected_pairs = knnMatch_exif.preselect_pairs(BIN_PATH + "/odm_extract_utm", jobOptions["step_2_filelist"], args.matcher_kDistance, args.matcher_useKnn)
|
||||
if len(preselected_pairs) != 0:
|
||||
for i, j, in preselected_pairs:
|
||||
c += 1
|
||||
if i < 10:
|
||||
print i, j
|
||||
if not os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt"):
|
||||
matchesJobs += "echo -n \".\" && touch \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" && \"" + BIN_PATH + "/KeyMatch\" \"" + objects[i]["step_1_keyFile"] + "\" \"" + objects[j]["step_1_keyFile"] + "\" \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" " + str(args.matcher_ratio) + " " + str(args.matcher_threshold) + "\n"
|
||||
|
||||
|
||||
# Match all image pairs
|
||||
else:
|
||||
if args.matcher_preselect == "true":
|
||||
print "Failed to run pair preselection, proceeding with exhaustive matching."
|
||||
for i in range(0, objectStats["good"]):
|
||||
for j in range(i + 1, objectStats["good"]):
|
||||
c += 1
|
||||
if not os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt"):
|
||||
matchesJobs += "echo -n \".\" && touch \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" && \"" + BIN_PATH + "/KeyMatch\" \"" + objects[i]["step_1_keyFile"] + "\" \"" + objects[j]["step_1_keyFile"] + "\" \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" " + str(args.matcher_ratio) + " " + str(args.matcher_threshold) + "\n"
|
||||
|
||||
matchDest = open(jobOptions["step_2_macthes_jobs"], 'w')
|
||||
matchDest.write(matchesJobs)
|
||||
matchDest.close()
|
||||
|
||||
run("\"" + BIN_PATH + "/parallel\" --no-notice --halt-on-error 1 -j+0 < \"" + jobOptions["step_2_macthes_jobs"] + "\"")
|
||||
run("rm -f \"" + jobOptions["step_2_matches"] + "\"")
|
||||
|
||||
for i in range(0, objectStats["good"]):
|
||||
for j in range(i + 1, objectStats["good"]):
|
||||
c += 1
|
||||
if os.path.isfile(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt") and os.path.getsize(jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt") > 0:
|
||||
run("echo \"" + str(i) + " " + str(j) + "\" >> \"" + jobOptions["step_2_matches"] + "\" && cat \"" + jobOptions["step_2_matches_dir"] + "/" + str(i) + "-" + str(j) + ".txt\" >> \"" + jobOptions["step_2_matches"] + "\"")
|
||||
|
||||
filesList = ""
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
filesList += fileObject["step_1_keyFile"] + "\n"
|
||||
|
||||
matchDest = open(jobOptions["step_2_filelist"], 'w')
|
||||
matchDest.write(filesList)
|
||||
matchDest.close()
|
||||
|
||||
# run("\"" + BIN_PATH + "/KeyMatchFull\" \"" + jobOptions["step_2_filelist"] + "\" \"" + jobOptions["step_2_matches"] + "\" ")
|
||||
|
||||
if args.end_with != "match":
|
||||
bundler()
|
||||
|
||||
|
||||
def bundler():
|
||||
"""Run bundler and prepare bundle for PMVS"""
|
||||
print "\n - running bundler - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/bundle")
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/pmvs")
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/pmvs/txt")
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/pmvs/visualize")
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/pmvs/models")
|
||||
except:
|
||||
pass
|
||||
|
||||
filesList = ""
|
||||
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
filesList += "./" + fileObject["base"] + ".jpg 0 {:.5f}\n".format(fileObject["focalpx"])
|
||||
|
||||
filesList = filesList.rstrip('\n')
|
||||
|
||||
bundlerOptions = "--match_table matches.init.txt\n"
|
||||
bundlerOptions += "--output bundle.out\n"
|
||||
bundlerOptions += "--output_all bundle_\n"
|
||||
bundlerOptions += "--output_dir bundle\n"
|
||||
bundlerOptions += "--variable_focal_length\n"
|
||||
bundlerOptions += "--use_focal_estimate\n"
|
||||
bundlerOptions += "--constrain_focal\n"
|
||||
bundlerOptions += "--constrain_focal_weight 0.0\n"
|
||||
bundlerOptions += "--estimate_distortion\n"
|
||||
bundlerOptions += "--run_bundle"
|
||||
|
||||
run("echo \"" + bundlerOptions + "\" > \"" + jobOptions["step_3_bundlerOptions"] + "\"")
|
||||
|
||||
bundlerDest = open(jobOptions["step_3_filelist"], 'w')
|
||||
bundlerDest.write(filesList)
|
||||
bundlerDest.close()
|
||||
|
||||
run("\"" + BIN_PATH + "/bundler\" \"" + jobOptions["step_3_filelist"] + "\" --options_file \"" + jobOptions["step_3_bundlerOptions"] + "\" > bundle/out")
|
||||
run("\"" + BIN_PATH + "/Bundle2PMVS\" \"" + jobOptions["step_3_filelist"] + "\" bundle/bundle.out")
|
||||
run("\"" + BIN_PATH + "/RadialUndistort\" \"" + jobOptions["step_3_filelist"] + "\" bundle/bundle.out pmvs")
|
||||
|
||||
i = 0
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
if os.path.isfile("pmvs/" + fileObject["base"] + ".rd.jpg"):
|
||||
nr = "{0:08d}".format(i)
|
||||
i += 1
|
||||
|
||||
run("mv pmvs/" + fileObject["base"] + ".rd.jpg pmvs/visualize/" + str(nr) + ".jpg")
|
||||
run("mv pmvs/" + str(nr) + ".txt pmvs/txt/" + str(nr) + ".txt")
|
||||
|
||||
run("\"" + BIN_PATH + "/Bundle2Vis\" pmvs/bundle.rd.out pmvs/vis.dat")
|
||||
|
||||
if args.end_with != "bundler":
|
||||
cmvs()
|
||||
|
||||
|
||||
def cmvs():
|
||||
"""Run CMVS"""
|
||||
print "\n - running cmvs - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
run("\"" + BIN_PATH + "/cmvs\" pmvs/ " + str(args.cmvs_maxImages) + " " + str(CORES))
|
||||
run("\"" + BIN_PATH + "/genOption\" pmvs/ " + str(args.pmvs_level) + " " + str(args.pmvs_csize) + " " + str(args.pmvs_threshold) + " " + str(args.pmvs_wsize) + " " + str(args.pmvs_minImageNum) + " " + str(CORES))
|
||||
|
||||
if args.end_with != "cmvs":
|
||||
pmvs()
|
||||
|
||||
|
||||
def pmvs():
|
||||
"""Run PMVS"""
|
||||
print "\n - running pmvs - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
run("\"" + BIN_PATH + "/pmvs2\" pmvs/ option-0000")
|
||||
|
||||
run("cp -Rf \"" + jobOptions["jobDir"] + "/pmvs/models\" \"" + jobOptions["jobDir"] + "-results\"")
|
||||
|
||||
if args.end_with != "pmvs":
|
||||
odm_meshing()
|
||||
|
||||
|
||||
def odm_meshing():
|
||||
"""Run odm_meshing"""
|
||||
print "\n - running meshing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_meshing")
|
||||
except:
|
||||
pass
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_meshing\" -inputFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputFile " + jobOptions["jobDir"] + "-results/odm_mesh-0000.ply -logFile " + jobOptions["jobDir"] + "/odm_meshing/odm_meshing_log.txt -maxVertexCount " + str(args.odm_meshing_maxVertexCount) + " -octreeDepth " + str(args.odm_meshing_octreeDepth) + " -samplesPerNode " + str(args.odm_meshing_samplesPerNode) + " -solverDivide " + str(args.odm_meshing_solverDivide))
|
||||
|
||||
if args.end_with != "odm_meshing":
|
||||
odm_texturing()
|
||||
|
||||
|
||||
def odm_texturing():
|
||||
"""Run odm_texturing"""
|
||||
print "\n - running texturing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_texturing")
|
||||
os.mkdir(jobOptions["jobDir"] + "-results/odm_texturing")
|
||||
except:
|
||||
pass
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_texturing\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -inputModelPath " + jobOptions["jobDir"] + "-results/odm_mesh-0000.ply -outputFolder " + jobOptions["jobDir"] + "-results/odm_texturing/ -textureResolution " + str(args.odm_texturing_textureResolution) + " -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -textureWithSize " + str(args.odm_texturing_textureWithSize) + " -logFile " + jobOptions["jobDir"] + "/odm_texturing/odm_texturing_log.txt")
|
||||
|
||||
if args.end_with != "odm_texturing":
|
||||
odm_georeferencing()
|
||||
|
||||
|
||||
def odm_georeferencing():
|
||||
"""Run odm_georeferencing"""
|
||||
print "\n - running georeferencing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_georeferencing")
|
||||
except:
|
||||
pass
|
||||
|
||||
if not args.odm_georeferencing_useGcp:
|
||||
run("\"" + BIN_PATH + "/odm_extract_utm\" -imagesPath " + jobOptions["srcDir"] + "/ -imageListFile " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -outputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt")
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -inputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -inputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] + "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
elif os.path.isfile(jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile):
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -gcpFile " + jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile + " -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -outputCoordFile " + jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] + "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
else:
|
||||
print "Warning: No GCP file. Consider rerunning with argument --odm_georeferencing-useGcp false --start-with odm_georeferencing"
|
||||
print "Skipping orthophoto"
|
||||
args.end_with = "odm_georeferencing"
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
images = []
|
||||
with open(jobOptions["jobDir"] + "/pmvs/list.rd.txt") as f:
|
||||
images = f.readlines()
|
||||
|
||||
if len(images) > 0:
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber >= 2 and lineNumber - 2 < len(images):
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) >= 3:
|
||||
x = float(tokens[0])
|
||||
y = float(tokens[1])
|
||||
z = float(tokens[2])
|
||||
filename = images[lineNumber - 2]
|
||||
run("echo " + str(x + jobOptions["utmEastOffset"]) + " " + str(y + jobOptions["utmNorthOffset"]) + " " + str(z) + " | cs2cs " + jobOptions["csString"] + " +to +datum=WGS84 +proj=latlong > " + jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt")
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") as latlongFile:
|
||||
latlongLine = latlongFile.readline()
|
||||
tokens = latlongLine.split()
|
||||
if len(tokens) >= 2:
|
||||
exifGpsInfoWritten = False
|
||||
|
||||
lonString = tokens[0] # Example: 83d18'16.285"W
|
||||
latString = tokens[1] # Example: 41d2'11.789"N
|
||||
altString = ""
|
||||
if len(tokens) > 2:
|
||||
altString = tokens[2] # Example: 0.998
|
||||
|
||||
tokens = re.split("[d '\"]+", lonString)
|
||||
if len(tokens) >= 4:
|
||||
lonDeg = tokens[0]
|
||||
lonMin = tokens[1]
|
||||
lonSec = tokens[2]
|
||||
lonSecFrac = fractions.Fraction(lonSec)
|
||||
lonSecNumerator = str(lonSecFrac._numerator)
|
||||
lonSecDenominator = str(lonSecFrac._denominator)
|
||||
lonRef = tokens[3]
|
||||
|
||||
tokens = re.split("[d '\"]+", latString)
|
||||
if len(tokens) >= 4:
|
||||
latDeg = tokens[0]
|
||||
latMin = tokens[1]
|
||||
latSec = tokens[2]
|
||||
latSecFrac = fractions.Fraction(latSec)
|
||||
latSecNumerator = str(latSecFrac._numerator)
|
||||
latSecDenominator = str(latSecFrac._denominator)
|
||||
latRef = tokens[3]
|
||||
|
||||
exivCmd = "exiv2 -q"
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitude " + latDeg + "/1 " + latMin + "/1 " + latSecNumerator + "/" + latSecDenominator + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitudeRef " + latRef + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitude " + lonDeg + "/1 " + lonMin + "/1 " + lonSecNumerator + "/" + lonSecDenominator + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitudeRef " + lonRef + "\""
|
||||
|
||||
altNumerator = arcDenominator = 0 # BUG: arcDenominator is never used
|
||||
if altString:
|
||||
altFrac = fractions.Fraction(altString)
|
||||
altNumerator = str(altFrac._numerator)
|
||||
altDenominator = str(altFrac._denominator)
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitude " + altNumerator + "/" + altDenominator + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitudeRef 0\""
|
||||
|
||||
exivCmd += " " + filename
|
||||
run(exivCmd)
|
||||
exifGpsInfoWritten = True
|
||||
|
||||
if not exifGpsInfoWritten:
|
||||
print(" Warning: Failed setting EXIF GPS info for " + filename + " based on " + latlongLine)
|
||||
|
||||
if "epsg" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
lasCmd = "\"" + BIN_PATH + "/txt2las\" -i " + jobOptions["jobDir"] + "-results/option-0000_georef.ply -o " + jobOptions["jobDir"] + "-results/pointcloud_georef.laz -skip 30 -parse xyzRGBssss -set_scale 0.01 0.01 0.01 -set_offset " + str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -translate_xyz " + str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -epsg " + str(jobOptions["epsg"])
|
||||
print(" Creating geo-referenced LAS file (expecting warning)...")
|
||||
run(lasCmd)
|
||||
|
||||
if args.end_with != "odm_georeferencing":
|
||||
odm_orthophoto()
|
||||
|
||||
|
||||
def odm_orthophoto():
|
||||
"""Run odm_orthophoto"""
|
||||
print "\n - running orthophoto generation - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_orthophoto")
|
||||
except:
|
||||
pass
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_orthophoto\" -inputFile " + jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -logFile " + jobOptions["jobDir"] + "/odm_orthophoto/odm_orthophoto_log.txt -outputFile " + jobOptions["jobDir"] + "-results/odm_orthphoto.png -resolution 20.0 -outputCornerFile " + jobOptions["jobDir"] + "/odm_orthphoto_corners.txt")
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
geoTiffCreated = False
|
||||
if ("csString" in jobOptions and
|
||||
"utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions):
|
||||
ulx = uly = lrx = lry = 0.0
|
||||
with open(jobOptions["jobDir"] +
|
||||
"/odm_orthphoto_corners.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 4:
|
||||
ulx = float(tokens[0]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
lry = float(tokens[1]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
lrx = float(tokens[2]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
uly = float(tokens[3]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
|
||||
print(" Creating GeoTIFF...")
|
||||
sys.stdout.write(" ")
|
||||
run("gdal_translate -a_ullr " + str(ulx) + " " + str(uly) + " " +
|
||||
str(lrx) + " " + str(lry) + " -a_srs \"" + jobOptions["csString"] +
|
||||
"\" " + jobOptions["jobDir"] + "-results/odm_orthphoto.png " +
|
||||
jobOptions["jobDir"] + "-results/odm_orthphoto.tif")
|
||||
geoTiffCreated = True
|
||||
|
||||
if not geoTiffCreated:
|
||||
print(" Warning: No geo-referenced orthophoto created due to "
|
||||
"missing geo-referencing or corner coordinates.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
prepare_objects()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
if args.start_with == "resize":
|
||||
resize()
|
||||
elif args.start_with == "getKeypoints":
|
||||
getKeypoints()
|
||||
elif args.start_with == "match":
|
||||
match()
|
||||
elif args.start_with == "bundler":
|
||||
bundler()
|
||||
elif args.start_with == "cmvs":
|
||||
cmvs()
|
||||
elif args.start_with == "pmvs":
|
||||
pmvs()
|
||||
elif args.start_with == "odm_meshing":
|
||||
odm_meshing()
|
||||
elif args.start_with == "odm_texturing":
|
||||
odm_texturing()
|
||||
elif args.start_with == "odm_georeferencing":
|
||||
odm_georeferencing()
|
||||
elif args.start_with == "odm_orthophoto":
|
||||
odm_orthophoto()
|
||||
|
||||
if args.zip_results:
|
||||
print "\nCompressing results - " + now()
|
||||
run("cd " + jobOptions["jobDir"] + "-results/ && tar -czf " +
|
||||
jobOptions["jobDir"] + "-results.tar.gz *")
|
||||
|
||||
print "\n - done - " + now()
|
|
@ -0,0 +1 @@
|
|||
Subproject commit b34cc5dcd54345272028b68f82fa4d0ae8a103c9
|
|
@ -0,0 +1 @@
|
|||
Subproject commit b0696f6269d12abc6ea63083d565fe2ec8172da9
|
Ładowanie…
Reference in New Issue