kopia lustrzana https://github.com/OpenDroneMap/ODM
rodzic
94d8a2121c
commit
349838d599
|
@ -0,0 +1,85 @@
|
|||
class ODMJob:
|
||||
''' ODMJob - a class for ODM Activities
|
||||
'''
|
||||
def __init__(self, inputDir, args):
|
||||
self.args = args
|
||||
self.pathDirJPGs = inputDir
|
||||
|
||||
self.count = 0
|
||||
self.good = 0
|
||||
self.bad = 0
|
||||
self.minWidth = 0.
|
||||
self.minHeight= 0
|
||||
self.maxWidth = 0
|
||||
self.maxHeight= 0
|
||||
|
||||
self.resizeTo = 0.
|
||||
self.srcDir = CURRENT_DIR
|
||||
self.utmZone = -999
|
||||
self.utmSouth = False
|
||||
self.utmEastOffset = 0.
|
||||
self.utmNorthOffset = 0.
|
||||
|
||||
self.jobOptions = {'resizeTo': 0, 'srcDir': CURRENT_DIR, 'utmZone': -999,
|
||||
'utmSouth': False, 'utmEastOffset': 0, 'utmNorthOffset': 0}
|
||||
|
||||
self.dictJobLocations = {} # hold our filepaths and dirs
|
||||
|
||||
self.listFiles = os.listdir(CURRENT_DIR)
|
||||
self.listJPG = []
|
||||
self.listObjPhotos = []
|
||||
|
||||
|
||||
# create obj.listJPG of all jpegs
|
||||
for files in self.listFiles:
|
||||
(pathfn,ext) = os.path.splitext(files)
|
||||
if 'jpg' in ext.lower() :
|
||||
#print files
|
||||
self.listJPG.append(files)
|
||||
elif 'jpeg' in ext.lower() :
|
||||
self.listJPG.append(files)
|
||||
|
||||
print "\n - source files - " + now()
|
||||
|
||||
for filename in self.listJPG:
|
||||
filename = filename.rstrip('\n')
|
||||
if not filename:
|
||||
continue
|
||||
filename = CURRENT_DIR + os.sep + filename
|
||||
print filename
|
||||
self.listObjPhotos.append( ODMPhoto( filename, self) )
|
||||
|
||||
def resize(self):
|
||||
print "\n - preparing images - " + now()
|
||||
|
||||
os.chdir(self.jobDir)
|
||||
|
||||
for objPhotos in self.listObjPhotos:
|
||||
if objPhotos.isOk:
|
||||
if not os.path.isfile(objPhotos.dictStepVals["step_0_resizedImage"]):
|
||||
if self.resizeTo != 0 and \
|
||||
((int(objPhotos.width) > self.resizeTo) or (objPhotos.height > self.resizeTo)):
|
||||
|
||||
sys.stdout.write(" resizing " + objPhotos.strFileName +" \tto " \
|
||||
+ objPhotos.dictStepVals["step_0_resizedImage"])
|
||||
|
||||
run("convert -resize " + str(self.resizeTo) + "x" + str(self.resizeTo)\
|
||||
+" -quality 100 \"" + self.srcDir + "/" + objPhotos.strFileName + "\" \"" \
|
||||
+ objPhotos.dictStepVals["step_0_resizedImage"] + "\"")
|
||||
|
||||
else:
|
||||
sys.stdout.write(" copying " + objPhotos.strFileName + " \tto " \
|
||||
+ objPhotos.dictStepVals["step_0_resizedImage"])
|
||||
shutil.copyfile(CURRENT_DIR + "/" + objPhotos.strFileName, objPhotos.dictStepVals["step_0_resizedImage"])
|
||||
else:
|
||||
print " using existing " + objPhotos.strFileName + " \tto " \
|
||||
+ objPhotos.dictStepVals["step_0_resizedImage"]
|
||||
|
||||
file_resolution = runAndReturn('jhead "' + objPhotos.dictStepVals["step_0_resizedImage"] \
|
||||
+ '"', 'grep "Resolution"')
|
||||
match = re.search(": ([0-9]*) x ([0-9]*)", file_resolution)
|
||||
if match:
|
||||
objPhotos.width = int(match.group(1).strip())
|
||||
objPhotos.height = int(match.group(2).strip())
|
||||
print "\t (" + str(objPhotos.width) + " x " + str(objPhotos.height) + ")"
|
||||
|
|
@ -0,0 +1,172 @@
|
|||
class ODMPhoto:
|
||||
""" ODMPhoto - a class for ODMPhotos
|
||||
"""
|
||||
|
||||
def __init__(self, inputJPG, objODMJob):
|
||||
#general purpose
|
||||
verbose = False
|
||||
# object attributes
|
||||
self.dictStepVals = {}
|
||||
self.pathToContainingFolder = os.path.split(inputJPG)[0]
|
||||
self.strFileName = os.path.split(inputJPG)[1]
|
||||
self.strFileNameBase = os.path.splitext(self.strFileName)[0]
|
||||
self.strFileNameExt = os.path.splitext(self.strFileName)[1]
|
||||
|
||||
#start pipe for jhead
|
||||
cmdSrc = BIN_PATH + os.sep + "jhead "+ CURRENT_DIR + os.sep + self.strFileName
|
||||
srcProcess = subprocess.Popen(cmdSrc, stdout=subprocess.PIPE)
|
||||
|
||||
stdout, stderr = srcProcess.communicate()
|
||||
stringOutput = stdout.decode('ascii')
|
||||
|
||||
#listOutput is the list of params to be processed
|
||||
listOutput_ori = stringOutput.splitlines()
|
||||
listOutput = remove_values_from_list(listOutput_ori,u"")
|
||||
|
||||
intListCount = 0
|
||||
intNumCameraAtts = len(listOutput)
|
||||
|
||||
flagDoneList = False
|
||||
|
||||
if verbose: print listOutput
|
||||
|
||||
for lines in listOutput:
|
||||
# check if we've read all atts
|
||||
intListCount += 1
|
||||
if intListCount == intNumCameraAtts: flagDoneList = True
|
||||
|
||||
#extract and proceed
|
||||
firstColon = lines.find(":")
|
||||
tempKey = lines[:firstColon].strip()
|
||||
tempVal = lines[firstColon+1:].strip()
|
||||
|
||||
if verbose: print tempKey,tempVal
|
||||
# all them values
|
||||
if tempKey == 'File name': self.fileName = tempVal
|
||||
elif tempKey == 'File size': self.fileSize= tempVal
|
||||
elif tempKey == 'File date': self.fileDate = tempVal
|
||||
elif tempKey == 'Camera make': self.cameraMake = tempVal
|
||||
elif tempKey == 'Camera model': self.cameraModel = tempVal
|
||||
elif tempKey == 'Date/Time': self.dateTime = tempVal
|
||||
elif tempKey == 'Resolution': self.resolution = tempVal
|
||||
elif tempKey == 'Flash used': self.flashUsed = tempVal
|
||||
elif tempKey == 'Focal length': self.focalLength = tempVal
|
||||
elif tempKey == 'CCD width': self.ccdWidth = tempVal
|
||||
elif tempKey == 'Exposure time': self.exposureTime = tempVal
|
||||
elif tempKey == 'Aperture': self.aperture = tempVal
|
||||
elif tempKey == 'Focus dist.': self.focusDist = tempVal
|
||||
elif tempKey == 'ISO equiv.': self.isoEquiv= tempVal
|
||||
elif tempKey == 'Whitebalance': self.whitebalance = tempVal
|
||||
elif tempKey == 'Metering Mode': self.meteringMode = tempVal
|
||||
elif tempKey == 'GPS Latitude': self.gpsLatitude = tempVal
|
||||
elif tempKey == 'GPS Longitude': self.gpsLongitude = tempVal
|
||||
elif tempKey == 'GPS Altitude': self.gpsAltitude = tempVal
|
||||
elif tempKey == 'JPEG Quality': self.jpgQuality = tempVal
|
||||
# better object attribute names; keep old for compatability
|
||||
# shallow references point to same stack space
|
||||
self.fullPathAndName = self.fileName
|
||||
|
||||
# attribute 'id' set to more specific of the maker or model
|
||||
try:
|
||||
if self.cameraMake:
|
||||
self.make = self.cameraMake
|
||||
self.id = self.cameraMake
|
||||
except: pass
|
||||
|
||||
|
||||
try:
|
||||
if self.cameraModel:
|
||||
self.model = self.cameraModel
|
||||
self.id = self.cameraModel
|
||||
except: pass
|
||||
|
||||
# parse resolution field
|
||||
try:
|
||||
match = re.search("([0-9]*) x ([0-9]*)",self.resolution)
|
||||
if match:
|
||||
self.width = int(match.group(1).strip())
|
||||
self.height = int(match.group(2).strip())
|
||||
except: pass
|
||||
|
||||
#parse force-focal
|
||||
try:
|
||||
if not '--force-focal' in args:
|
||||
match = re.search(":[\ ]*([0-9\.]*)mm", self.focalLength)
|
||||
if match:
|
||||
self.focal = float((match.group()[1:-2]).strip())
|
||||
else:
|
||||
self.focal = args['--force-focal']
|
||||
except: pass
|
||||
|
||||
#parse force-ccd
|
||||
if 'ccd' in lines.lower():
|
||||
if not '--force-ccd' in args:
|
||||
try:
|
||||
floats = extractFloat(self.ccdWidth)
|
||||
self.ccd = floats[0]
|
||||
except:
|
||||
try:
|
||||
self.ccd = float(ccdWidths[self.id])
|
||||
except: pass
|
||||
else:
|
||||
self.ccd = args['--force-ccd']
|
||||
|
||||
|
||||
if verbose: print intListCount
|
||||
|
||||
if flagDoneList:
|
||||
try:
|
||||
if self.width > self.height:
|
||||
self.focalpx = self.width * (self.focal / self.ccd)
|
||||
else:
|
||||
self.focalpx = self.height * (self.focal / self.ccd)
|
||||
|
||||
self.isOk = True
|
||||
objODMJob.good += 1
|
||||
|
||||
print " using " + self.fileName + " dimensions: " + \
|
||||
str(self.width) + "x" + str(self.height)\
|
||||
+ " | focal: " + str(self.focal) \
|
||||
+ "mm | ccd: " + str(self.ccd) + "mm"
|
||||
|
||||
except:
|
||||
self.isOk = False
|
||||
objODMJob.bad += 1
|
||||
|
||||
try:
|
||||
print "\n no CCD width or focal length found for "\
|
||||
+ self.fileName+ " - camera: \"" + self.id+ "\""
|
||||
except:
|
||||
print "\n no CCD width or focal length found"
|
||||
|
||||
#either way increment total count
|
||||
objODMJob.count += 1
|
||||
|
||||
#populate & update max/mins
|
||||
|
||||
if objODMJob.minWidth == 0:
|
||||
objODMJob.minWidth = self.width
|
||||
|
||||
if objODMJob.minHeight == 0:
|
||||
objODMJob.minHeight = self.height
|
||||
|
||||
if objODMJob.minWidth < self.width:
|
||||
objODMJob.minWidth = self.minWidth
|
||||
else:
|
||||
objODMJob.minWidth = self.width
|
||||
|
||||
if objODMJob.minHeight < self.height:
|
||||
objODMJob.minHeight = objODMJob.minHeight
|
||||
else:
|
||||
objODMJob.minHeight = self.height
|
||||
|
||||
if objODMJob.maxWidth > self.width:
|
||||
objODMJob.maxWidth = objODMJob.maxWidth
|
||||
else:
|
||||
objODMJob.maxWidth = self.width
|
||||
|
||||
if objODMJob.maxHeight > self.height:
|
||||
objODMJob.maxHeight = objODMJob.maxHeight
|
||||
else:
|
||||
objODMJob.maxHeight = self.height
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import system
|
||||
|
||||
def cmvs():
|
||||
"""Run CMVS"""
|
||||
print "\n - running cmvs - " + system.now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
run("\"" + BIN_PATH + "/cmvs\" pmvs/ " + str(args['--cmvs-maxImages']) \
|
||||
+ " " + str(CORES))
|
||||
run("\"" + BIN_PATH + "/genOption\" pmvs/ " + str(args['--pmvs-level']) \
|
||||
+ " " + str(args['--pmvs-csize']) + " " + str(args['--pmvs-threshold']) \
|
||||
+ " " + str(args['--pmvs-wsize']) + " " + str(args['--pmvs-minImageNum']) \
|
||||
+ " " + str(CORES))
|
||||
|
||||
if args['--end-with'] != "cmvs":
|
||||
pmvs()
|
||||
|
||||
|
||||
def pmvs():
|
||||
"""Run PMVS"""
|
||||
print "\n - running pmvs - " + system.now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
run("\"" + BIN_PATH + "/pmvs2\" pmvs/ option-0000")
|
||||
|
||||
run("cp -Rf \"" + jobOptions["jobDir"] + "/pmvs/models\" \"" + jobOptions["jobDir"] + "-results\"")
|
||||
|
||||
|
||||
if args['--end-with'] != "pmvs":
|
||||
odm_meshing()
|
|
@ -0,0 +1,143 @@
|
|||
def odm_georeferencing():
|
||||
"""Run odm_georeferencing"""
|
||||
print "\n - running georeferencing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_georeferencing")
|
||||
except:
|
||||
pass
|
||||
|
||||
if not args.odm_georeferencing_useGcp:
|
||||
run("\"" + BIN_PATH + "/odm_extract_utm\" -imagesPath " + jobOptions["srcDir"] + "/ -imageListFile " \
|
||||
+ jobOptions["jobDir"] + "/pmvs/list.rd.txt -outputCoordFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/coordFile.txt")
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/bundle.rd.out -inputCoordFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/coordFile.txt -inputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo.obj -inputPointCloudFile " \
|
||||
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
|
||||
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
|
||||
elif os.path.isfile(jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile):
|
||||
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/bundle.rd.out -gcpFile " + jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile \
|
||||
+ " -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] \
|
||||
+ "/pmvs/list.rd.txt -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -inputFile " \
|
||||
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " \
|
||||
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -outputCoordFile " \
|
||||
+ jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputPointCloudFile " \
|
||||
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
|
||||
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
|
||||
else:
|
||||
print "Warning: No GCP file. Consider rerunning with argument --odm_georeferencing-useGcp false --start-with odm_georeferencing"
|
||||
print "Skipping orthophoto"
|
||||
args.end_with = "odm_georeferencing"
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
images = []
|
||||
with open(jobOptions["jobDir"] + "/pmvs/list.rd.txt") as f:
|
||||
images = f.readlines()
|
||||
|
||||
if len(images) > 0:
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber >= 2 and lineNumber - 2 < len(images):
|
||||
tokens = line.split(' ')
|
||||
|
||||
if len(tokens) >= 3:
|
||||
x = float(tokens[0])
|
||||
y = float(tokens[1])
|
||||
z = float(tokens[2])
|
||||
filename = images[lineNumber - 2]
|
||||
|
||||
run("echo " + str(x + jobOptions["utmEastOffset"]) + " " \
|
||||
+ str(y + jobOptions["utmNorthOffset"]) + " " + str(z) \
|
||||
+ " | cs2cs " + jobOptions["csString"] + " +to +datum=WGS84 +proj=latlong > " \
|
||||
+ jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt")
|
||||
|
||||
with open(jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") as latlongFile:
|
||||
latlongLine = latlongFile.readline()
|
||||
tokens = latlongLine.split()
|
||||
if len(tokens) >= 2:
|
||||
exifGpsInfoWritten = False
|
||||
|
||||
lonString = tokens[0] # Example: 83d18'16.285"W
|
||||
latString = tokens[1] # Example: 41d2'11.789"N
|
||||
altString = ""
|
||||
if len(tokens) > 2:
|
||||
|
||||
altString = tokens[2] # Example: 0.998
|
||||
|
||||
tokens = re.split("[d '\"]+", lonString)
|
||||
if len(tokens) >= 4:
|
||||
lonDeg = tokens[0]
|
||||
lonMin = tokens[1]
|
||||
lonSec = tokens[2]
|
||||
lonSecFrac = fractions.Fraction(lonSec)
|
||||
lonSecNumerator = str(lonSecFrac._numerator)
|
||||
lonSecDenominator = str(lonSecFrac._denominator)
|
||||
lonRef = tokens[3]
|
||||
|
||||
tokens = re.split("[d '\"]+", latString)
|
||||
if len(tokens) >= 4:
|
||||
latDeg = tokens[0]
|
||||
latMin = tokens[1]
|
||||
latSec = tokens[2]
|
||||
latSecFrac = fractions.Fraction(latSec)
|
||||
latSecNumerator = str(latSecFrac._numerator)
|
||||
latSecDenominator = str(latSecFrac._denominator)
|
||||
latRef = tokens[3]
|
||||
|
||||
exivCmd = "exiv2 -q"
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitude " + latDeg + "/1 " \
|
||||
+ latMin + "/1 " + latSecNumerator + "/" + latSecDenominator + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitudeRef " + latRef + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitude " + lonDeg + "/1 " \
|
||||
+ lonMin + "/1 " + lonSecNumerator + "/" + lonSecDenominator + "\""
|
||||
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitudeRef " + lonRef + "\""
|
||||
|
||||
altNumerator = arcDenominator = 0 # BUG: arcDenominator is never used
|
||||
|
||||
if altString:
|
||||
altFrac = fractions.Fraction(altString)
|
||||
altNumerator = str(altFrac._numerator)
|
||||
altDenominator = str(altFrac._denominator)
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitude " + altNumerator + "/" + altDenominator + "\""
|
||||
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitudeRef 0\""
|
||||
|
||||
exivCmd += " " + filename
|
||||
run(exivCmd)
|
||||
exifGpsInfoWritten = True
|
||||
|
||||
if not exifGpsInfoWritten:
|
||||
print(" Warning: Failed setting EXIF GPS info for " \
|
||||
+ filename + " based on " + latlongLine)
|
||||
|
||||
if "epsg" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
|
||||
lasCmd = "\"" + BIN_PATH + "/txt2las\" -i " + jobOptions["jobDir"] + \
|
||||
"-results/option-0000_georef.ply -o " + jobOptions["jobDir"] \
|
||||
+ "-results/pointcloud_georef.laz -skip 30 -parse xyzRGBssss -set_scale 0.01 0.01 0.01 -set_offset " \
|
||||
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -translate_xyz " \
|
||||
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) \
|
||||
+ " 0 -epsg " + str(jobOptions["epsg"])
|
||||
|
||||
print(" Creating geo-referenced LAS file (expecting warning)...")
|
||||
|
||||
run(lasCmd)
|
||||
|
||||
|
||||
if args['--end-with'] != "odm_georeferencing":
|
||||
odm_orthophoto()
|
|
@ -0,0 +1,24 @@
|
|||
import system
|
||||
|
||||
def odm_meshing():
|
||||
"""Run odm_meshing"""
|
||||
print "\n - running meshing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_meshing")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_meshing\" -inputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/option-0000.ply -outputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_mesh-0000.ply -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_meshing/odm_meshing_log.txt -maxVertexCount " \
|
||||
+ str(args['--odm_meshing-maxVertexCount']) + " -octreeDepth " \
|
||||
+ str(args['--odm_meshing-octreeDepth']) + " -samplesPerNode " \
|
||||
+ str(args['--odm_meshing-samplesPerNode']) + " -solverDivide " \
|
||||
+ str(args['--odm_meshing-solverDivide']))
|
||||
|
||||
if args['--end-with'] != "odm_meshing":
|
||||
odm_texturing()
|
|
@ -0,0 +1,51 @@
|
|||
import system
|
||||
|
||||
def odm_orthophoto():
|
||||
"""Run odm_orthophoto"""
|
||||
print "\n - running orthophoto generation - " + system.now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_orthophoto")
|
||||
except:
|
||||
pass
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_orthophoto\" -inputFile " + jobOptions["jobDir"] + \
|
||||
"-results/odm_texturing/odm_textured_model_geo.obj -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_orthophoto/odm_orthophoto_log.txt -outputFile " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_orthphoto.png -resolution 20.0 -outputCornerFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_orthphoto_corners.txt")
|
||||
|
||||
if "csString" not in jobOptions:
|
||||
parse_coordinate_system()
|
||||
|
||||
geoTiffCreated = False
|
||||
if ("csString" in jobOptions and
|
||||
"utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions):
|
||||
ulx = uly = lrx = lry = 0.0
|
||||
with open(jobOptions["jobDir"] +
|
||||
"/odm_orthphoto_corners.txt") as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 4:
|
||||
ulx = float(tokens[0]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
lry = float(tokens[1]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
lrx = float(tokens[2]) + \
|
||||
float(jobOptions["utmEastOffset"])
|
||||
uly = float(tokens[3]) + \
|
||||
float(jobOptions["utmNorthOffset"])
|
||||
|
||||
print(" Creating GeoTIFF...")
|
||||
sys.stdout.write(" ")
|
||||
run("gdal_translate -a_ullr " + str(ulx) + " " + str(uly) + " " +
|
||||
str(lrx) + " " + str(lry) + " -a_srs \"" + jobOptions["csString"] +
|
||||
"\" " + jobOptions["jobDir"] + "-results/odm_orthphoto.png " +
|
||||
jobOptions["jobDir"] + "-results/odm_orthphoto.tif")
|
||||
geoTiffCreated = True
|
||||
|
||||
if not geoTiffCreated:
|
||||
|
||||
print " Warning: No geo-referenced orthophoto created due to missing geo-referencing or corner coordinates."
|
|
@ -0,0 +1,22 @@
|
|||
def odm_texturing():
|
||||
"""Run odm_texturing"""
|
||||
print "\n - running texturing - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
try:
|
||||
os.mkdir(jobOptions["jobDir"] + "/odm_texturing")
|
||||
os.mkdir(jobOptions["jobDir"] + "-results/odm_texturing")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
run("\"" + BIN_PATH + "/odm_texturing\" -bundleFile " + jobOptions["jobDir"] + "/pmvs/bundle.rd.out -imagesPath "\
|
||||
+ jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] + "/pmvs/list.rd.txt -inputModelPath " \
|
||||
+ jobOptions["jobDir"] + "-results/odm_mesh-0000.ply -outputFolder " + jobOptions["jobDir"] \
|
||||
+ "-results/odm_texturing/ -textureResolution " + str(args['--odm_texturing-textureResolution']) \
|
||||
+ " -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -textureWithSize " + \
|
||||
str(args['--odm_texturing-textureWithSize']) + " -logFile " + jobOptions["jobDir"] \
|
||||
+ "/odm_texturing/odm_texturing_log.txt")
|
||||
|
||||
if args['--end-with'] != "odm_texturing":
|
||||
odm_georeferencing()
|
|
@ -0,0 +1,45 @@
|
|||
def opensfm():
|
||||
print "\n - running OpenSfM - " + now()
|
||||
|
||||
os.chdir(jobOptions["jobDir"])
|
||||
|
||||
# Create bundler's list.txt
|
||||
filesList = ""
|
||||
for fileObject in objects:
|
||||
if fileObject["isOk"]:
|
||||
filesList += "./" + fileObject["src"] + " 0 {:.5f}\n".format(fileObject["focalpx"])
|
||||
filesList = filesList.rstrip('\n')
|
||||
|
||||
with open(jobOptions["step_3_filelist"], 'w') as fout:
|
||||
fout.write(filesList)
|
||||
|
||||
# Create opensfm working folder
|
||||
mkdir_p("opensfm")
|
||||
|
||||
# Configure OpenSfM
|
||||
config = [
|
||||
"use_exif_size: no",
|
||||
"feature_process_size: {}".format(jobOptions["resizeTo"]),
|
||||
"feature_min_frames: {}".format(args.min_num_features),
|
||||
"processes: {}".format(CORES),
|
||||
]
|
||||
if args.matcher_preselect:
|
||||
config.append("matching_gps_neighbors: {}".format(args.matcher_k))
|
||||
|
||||
with open('opensfm/config.yaml', 'w') as fout:
|
||||
fout.write("\n".join(config))
|
||||
|
||||
print 'running import_bundler'
|
||||
# Convert bundler's input to opensfm
|
||||
run('PYTHONPATH={} "{}/bin/import_bundler" opensfm --list list.txt'.format(PYOPENCV_PATH, OPENSFM_PATH))
|
||||
|
||||
# Run OpenSfM reconstruction
|
||||
run('PYTHONPATH={} "{}/bin/run_all" opensfm'.format(PYOPENCV_PATH, OPENSFM_PATH))
|
||||
|
||||
# Convert back to bundler's format
|
||||
run('PYTHONPATH={} "{}/bin/export_bundler" opensfm'.format(PYOPENCV_PATH, OPENSFM_PATH))
|
||||
|
||||
bundler_to_pmvs("opensfm/bundle_r000.out")
|
||||
|
||||
if args.end_with != "bundler":
|
||||
cmvs()
|
|
@ -0,0 +1,44 @@
|
|||
if __name__ == '__main__':
|
||||
|
||||
objODMJob = prepare_objects()
|
||||
|
||||
os.chdir(objODMJob.jobDir)
|
||||
|
||||
dictSteps = {0:"resize", 1:"getKeypoints", 2:"match",
|
||||
3:"bundler", 4:"cmvs", 5:"pmvs", 6:"odm_meshing",
|
||||
7:"odm_texturing", 8:"odm_georeferencing",
|
||||
9:"odm_orthophoto", 10:"zip_results"}
|
||||
|
||||
listJobQueue = []
|
||||
intStart = -1
|
||||
intEnd = -2
|
||||
|
||||
# Create a dict for holding our steps
|
||||
# key is step number, val is call
|
||||
# Construct the calls below in eval by iterating through the composed dict of steps
|
||||
# --> Allows for running steps in any arbitray sequence, e.g. rebatching certain steps
|
||||
for keys in dictSteps.keys():
|
||||
if dictSteps[keys] == objODMJob.args.start_with:
|
||||
intStart = keys
|
||||
if dictSteps[keys] == objODMJob.args.end_with:
|
||||
intEnd = keys
|
||||
if intStart > intEnd:
|
||||
sys.stdout.writelines("No Valid Steps - Exitting.")
|
||||
exit(0)
|
||||
|
||||
for i in range(intStart,intEnd,1):
|
||||
listJobQueue.append(dictSteps[i])
|
||||
|
||||
for steps in listJobQueue:
|
||||
methodCall = steps+"()"
|
||||
strEval = "objODMJob."+methodCall
|
||||
# EVAL safety - *only* internally generated strings (no user strings)
|
||||
eval(strEval)
|
||||
|
||||
if args.zip_results:
|
||||
print "\nCompressing results - " + now()
|
||||
run("cd " + jobOptions["jobDir"] + "-results/ && tar -czf " +
|
||||
jobOptions["jobDir"] + "-results.tar.gz *")
|
||||
|
||||
|
||||
print "\n - done - " + now()
|
|
@ -0,0 +1,68 @@
|
|||
import datetime
|
||||
|
||||
def run(cmd):
|
||||
"""Run a system command"""
|
||||
print 'running', cmd
|
||||
returnCode = os.system(cmd)
|
||||
print 'b'
|
||||
if (returnCode != 0):
|
||||
sys.exit("\nquitting cause: \n\t" + cmd + "\nreturned with code " +
|
||||
str(returnCode) + ".\n")
|
||||
|
||||
def now():
|
||||
"""Return the current time"""
|
||||
return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y')
|
||||
|
||||
def run_and_return(cmdSrc, cmdDest):
|
||||
"""Run a system command and return the output"""
|
||||
srcProcess = subprocess.Popen(shlex.split(cmdSrc), stdout=subprocess.PIPE)
|
||||
if cmdDest:
|
||||
destProcess = subprocess.Popen(shlex.split(cmdDest),
|
||||
stdin=srcProcess.stdout,
|
||||
stdout=subprocess.PIPE)
|
||||
stdout, stderr = destProcess.communicate()
|
||||
else:
|
||||
stdout, stderr = srcProcess.communicate()
|
||||
|
||||
return stdout.decode('ascii')
|
||||
|
||||
|
||||
def mkdir_p(path):
|
||||
'''Make a directory including parent directories.
|
||||
'''
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except os.error as exc:
|
||||
if exc.errno != errno.EEXIST or not os.path.isdir(path):
|
||||
raise
|
||||
|
||||
|
||||
def calculate_EPSG(utmZone, south):
|
||||
"""Calculate and return the EPSG"""
|
||||
if south:
|
||||
return 32700 + utmZone
|
||||
else:
|
||||
return 32600 + utmZone
|
||||
|
||||
|
||||
def parse_coordinate_system():
|
||||
"""Write attributes to jobOptions from coord file"""
|
||||
if os.path.isfile(jobOptions['jobDir'] +
|
||||
'/odm_georeferencing/coordFile.txt'):
|
||||
with open(jobOptions['jobDir'] + '/odm_georeferencing/coordFile.txt') as f:
|
||||
for lineNumber, line in enumerate(f):
|
||||
if lineNumber == 0:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 3:
|
||||
utmZoneString = tokens[2][0:len(tokens[2])-2].strip()
|
||||
utmSouthBool = (tokens[2][len(tokens[2])-2].strip() == 'S')
|
||||
jobOptions['csString'] = '+datum=WGS84 +proj=utm +zone=' \
|
||||
+ utmZoneString + (' +south' if utmSouthBool else '')
|
||||
jobOptions['epsg'] = calculate_EPSG(int(utmZoneString), utmSouthBool)
|
||||
elif lineNumber == 1:
|
||||
tokens = line.split(' ')
|
||||
if len(tokens) == 2:
|
||||
jobOptions['utmEastOffset'] = int(tokens[0].strip())
|
||||
jobOptions['utmNorthOffset'] = int(tokens[1].strip())
|
||||
else:
|
||||
break
|
Ładowanie…
Reference in New Issue