Added grass engine, grass-core

pull/979/head
Piero Toffanin 2019-04-24 18:33:12 -04:00
rodzic 940efcded9
commit d1d790024c
7 zmienionych plików z 213 dodań i 87 usunięć

Wyświetl plik

@ -7,7 +7,7 @@ ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update -y \
&& apt-get install -y \
software-properties-common \
&& add-apt-repository -y ppa:ubuntugis/ppa \
&& add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable \
&& add-apt-repository -y ppa:george-edison55/cmake-3.x \
&& apt-get update -y
@ -55,7 +55,8 @@ RUN apt-get install --no-install-recommends -y \
python-pyproj \
python-software-properties \
python-wheel \
swig2.0
swig2.0 \
grass-core
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip

Wyświetl plik

@ -13,9 +13,7 @@ install() {
## Before installing
echo "Updating the system"
apt-get update
add-apt-repository -y ppa:ubuntugis/ppa
add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
apt-get update
echo "Installing Required Requisites"
@ -28,7 +26,8 @@ install() {
libgeotiff-dev \
pkg-config \
libjsoncpp-dev \
python-gdal
python-gdal \
grass-core
echo "Getting CMake 3.1 for MVS-Texturing"
apt-get install -y software-properties-common python-software-properties

Wyświetl plik

@ -0,0 +1,127 @@
import shutil
import tempfile
import subprocess
import os
from opendm import log
from opendm import system
from string import Template
class GrassEngine:
def __init__(self):
self.grass_binary = system.which('grass7') or \
system.which('grass72') or \
system.which('grass74') or \
system.which('grass76') or \
system.which('grass78')
if self.grass_binary is None:
log.ODM_WARNING("Could not find a GRASS 7 executable. GRASS scripts will not work.")
else:
log.ODM_INFO("Initializing GRASS engine using {}".format(self.grass_binary))
def create_context(self, serialized_context = {}):
if self.grass_binary is None: raise GrassEngineException("GRASS engine is unavailable")
return GrassContext(self.grass_binary, **serialized_context)
class GrassContext:
def __init__(self, grass_binary, tmpdir = None, template_args = {}, location = None, auto_cleanup=True):
self.grass_binary = grass_binary
if tmpdir is None:
tmpdir = tempfile.mkdtemp('_grass_engine')
self.tmpdir = tmpdir
self.template_args = template_args
self.location = location
self.auto_cleanup = auto_cleanup
def get_cwd(self):
return self.tmpdir
def add_file(self, filename, source, use_as_location=False):
param = os.path.splitext(filename)[0] # filename without extension
dst_path = os.path.abspath(os.path.join(self.get_cwd(), filename))
with open(dst_path, 'w') as f:
f.write(source)
self.template_args[param] = dst_path
if use_as_location:
self.set_location(self.template_args[param])
return dst_path
def add_param(self, param, value):
self.template_args[param] = value
def set_location(self, location):
"""
:param location: either a "epsg:XXXXX" string or a path to a geospatial file defining the location
"""
if not location.lower().startswith('epsg:'):
location = os.path.abspath(location)
self.location = location
def execute(self, script):
"""
:param script: path to .grass script
:return: script output
"""
if self.location is None: raise GrassEngineException("Location is not set")
script = os.path.abspath(script)
# Create grass script via template substitution
try:
with open(script) as f:
script_content = f.read()
except FileNotFoundError:
raise GrassEngineException("Script does not exist: {}".format(script))
tmpl = Template(script_content)
# Write script to disk
if not os.path.exists(self.get_cwd()):
os.mkdir(self.get_cwd())
with open(os.path.join(self.get_cwd(), 'script.sh'), 'w') as f:
f.write(tmpl.substitute(self.template_args))
# Execute it
log.ODM_INFO("Executing grass script from {}: {} -c {} location --exec sh script.sh".format(self.get_cwd(), self.grass_binary, self.location))
p = subprocess.Popen([self.grass_binary, '-c', self.location, 'location', '--exec', 'sh', 'script.sh'],
cwd=self.get_cwd(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
out = out.decode('utf-8').strip()
err = err.decode('utf-8').strip()
if p.returncode == 0:
return out
else:
raise GrassEngineException("Could not execute GRASS script {} from {}: {}".format(script, self.get_cwd(), err))
def serialize(self):
return {
'tmpdir': self.tmpdir,
'template_args': self.template_args,
'location': self.location,
'auto_cleanup': self.auto_cleanup
}
def cleanup(self):
if os.path.exists(self.get_cwd()):
shutil.rmtree(self.get_cwd())
def __del__(self):
if self.auto_cleanup:
self.cleanup()
class GrassEngineException(Exception):
pass
def cleanup_grass_context(serialized_context):
ctx = grass.create_context(serialized_context)
ctx.cleanup()
grass = GrassEngine()

Wyświetl plik

@ -81,10 +81,10 @@ def mkdir_p(path):
if exc.errno != errno.EEXIST or not os.path.isdir(path):
raise
def calculate_EPSG(utmZone, south):
"""Calculate and return the EPSG"""
if south:
return 32700 + utmZone
else:
return 32600 + utmZone
# Python2 shutil.which
def which(program):
path=os.getenv('PATH')
for p in path.split(os.path.pathsep):
p=os.path.join(p,program)
if os.path.exists(p) and os.access(p,os.X_OK):
return p

Wyświetl plik

@ -172,19 +172,6 @@ class ODM_GeoRef(object):
self.transform = []
self.gcps = []
def calculate_EPSG(self, _utm_zone, _pole):
"""Calculate and return the EPSG"""
if _pole == 'S':
return 32700 + _utm_zone
elif _pole == 'N':
return 32600 + _utm_zone
else:
log.ODM_ERROR('Unknown pole format %s' % _pole)
return
def calculate_EPSG(self, proj):
return proj
def coord_to_fractions(self, coord, refs):
deg_dec = abs(float(coord))
deg = int(deg_dec)

Wyświetl plik

@ -7,7 +7,7 @@ ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update -y \
&& apt-get install -y \
software-properties-common \
&& add-apt-repository -y ppa:ubuntugis/ppa \
&& add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable \
&& add-apt-repository -y ppa:george-edison55/cmake-3.x \
&& apt-get update -y
@ -55,7 +55,8 @@ RUN apt-get install --no-install-recommends -y \
python-pyproj \
python-software-properties \
python-wheel \
swig2.0
swig2.0 \
grass-core
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip

Wyświetl plik

@ -17,78 +17,89 @@ class ODMSplitStage(types.ODM_Stage):
outputs['large'] = len(photos) > args.split
if outputs['large']:
log.ODM_INFO("Large dataset detected (%s photos) and split set at %s. Preparing split merge." % (len(photos), args.split))
config = [
"submodels_relpath: ../submodels/opensfm",
"submodel_relpath_template: ../submodels/submodel_%04d/opensfm",
"submodel_images_relpath_template: ../submodels/submodel_%04d/images",
"submodel_size: %s" % args.split,
"submodel_overlap: %s" % args.split_overlap,
]
osfm.setup(args, tree.dataset_raw, tree.opensfm, photos, gcp_path=tree.odm_georeferencing_gcp, append_config=config, rerun=self.rerun())
osfm.feature_matching(tree.opensfm, self.rerun())
split_done_file = os.path.join(tree.opensfm, "split_done.txt")
# Create submodels
if not io.dir_exists(tree.submodels_path) or self.rerun():
if io.dir_exists(tree.submodels_path):
log.ODM_WARNING("Removing existing submodels directory: %s" % tree.submodels_path)
shutil.rmtree(tree.submodels_path)
if not io.file_exists(split_done_file) or self.rerun():
osfm.run("create_submodels", tree.opensfm)
else:
log.ODM_WARNING("Submodels directory already exist at: %s" % tree.submodels_path)
# TODO: on a network workflow we probably stop here
# and let NodeODM take over
# exit(0)
# Find paths of all submodels
mds = metadataset.MetaDataSet(tree.opensfm)
submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()]
# Reconstruct each submodel
log.ODM_INFO("Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths))
for sp in submodel_paths:
log.ODM_INFO("Reconstructing %s" % sp)
osfm.reconstruct(sp, self.rerun())
# Align
alignment_file = io.join_paths(tree.opensfm, 'alignment_done.txt')
if not io.file_exists(alignment_file) or self.rerun():
log.ODM_INFO("Aligning submodels...")
osfm.run('align_submodels', tree.opensfm)
with open(alignment_file, 'w') as fout:
fout.write("Alignment done!\n")
else:
log.ODM_WARNING('Found a alignment matching done progress file in: %s' % alignment_file)
# Dense reconstruction for each submodel
for sp in submodel_paths:
# TODO: network workflow
log.ODM_INFO("Large dataset detected (%s photos) and split set at %s. Preparing split merge." % (len(photos), args.split))
config = [
"submodels_relpath: ../submodels/opensfm",
"submodel_relpath_template: ../submodels/submodel_%04d/opensfm",
"submodel_images_relpath_template: ../submodels/submodel_%04d/images",
"submodel_size: %s" % args.split,
"submodel_overlap: %s" % args.split_overlap,
]
# We have already done matching
osfm.mark_feature_matching_done(sp)
osfm.setup(args, tree.dataset_raw, tree.opensfm, photos, gcp_path=tree.odm_georeferencing_gcp, append_config=config, rerun=self.rerun())
osfm.feature_matching(tree.opensfm, self.rerun())
submodel_name = os.path.basename(os.path.abspath(os.path.join(sp, "..")))
# Create submodels
if not io.dir_exists(tree.submodels_path) or self.rerun():
if io.dir_exists(tree.submodels_path):
log.ODM_WARNING("Removing existing submodels directory: %s" % tree.submodels_path)
shutil.rmtree(tree.submodels_path)
log.ODM_INFO("========================")
log.ODM_INFO("Processing %s" % submodel_name)
log.ODM_INFO("========================")
osfm.run("create_submodels", tree.opensfm)
else:
log.ODM_WARNING("Submodels directory already exist at: %s" % tree.submodels_path)
# TODO: on a network workflow we probably stop here
# and let NodeODM take over
# exit(0)
argv = osfm.get_submodel_argv(args, tree.submodels_path, submodel_name)
# Find paths of all submodels
mds = metadataset.MetaDataSet(tree.opensfm)
submodel_paths = [os.path.abspath(p) for p in mds.get_submodel_paths()]
# Re-run the ODM toolchain on the submodel
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
# Reconstruct each submodel
log.ODM_INFO("Dataset has been split into %s submodels. Reconstructing each submodel..." % len(submodel_paths))
for sp in submodel_paths:
log.ODM_INFO("Reconstructing %s" % sp)
osfm.reconstruct(sp, self.rerun())
# Align
alignment_file = io.join_paths(tree.opensfm, 'alignment_done.txt')
if not io.file_exists(alignment_file) or self.rerun():
log.ODM_INFO("Aligning submodels...")
osfm.run('align_submodels', tree.opensfm)
with open(alignment_file, 'w') as fout:
fout.write("Alignment done!\n")
else:
log.ODM_WARNING('Found a alignment matching done progress file in: %s' % alignment_file)
# Dense reconstruction for each submodel
for sp in submodel_paths:
# TODO: network workflow
# We have already done matching
osfm.mark_feature_matching_done(sp)
submodel_name = os.path.basename(os.path.abspath(os.path.join(sp, "..")))
log.ODM_INFO("========================")
log.ODM_INFO("Processing %s" % submodel_name)
log.ODM_INFO("========================")
argv = osfm.get_submodel_argv(args, tree.submodels_path, submodel_name)
# Re-run the ODM toolchain on the submodel
system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
with open(split_done_file, 'w') as fout:
fout.write("Split done!\n")
else:
log.ODM_WARNING('Found a split done file in: %s' % split_done_file)
else:
log.ODM_INFO("Normal dataset, will process all at once.")
class ODMMergeStage(types.ODM_Stage):
def process(self, args, outputs):
from opendm import grass_engine
tree = outputs['tree']
reconstruction = outputs['reconstruction']