kopia lustrzana https://github.com/OpenDroneMap/NodeODM
Merge pull request #99 from pierotofy/multispec
Changes for new split-merge point cloud merging logicpull/108/head
commit
60615c6f91
|
@ -5,7 +5,7 @@ EXPOSE 3000
|
|||
|
||||
USER root
|
||||
RUN curl --silent --location https://deb.nodesource.com/setup_10.x | bash -
|
||||
RUN apt-get install -y nodejs python-gdal && npm install -g nodemon && \
|
||||
RUN apt-get install -y nodejs python-gdal p7zip-full && npm install -g nodemon && \
|
||||
ln -s /code/SuperBuild/install/bin/entwine /usr/bin/entwine && \
|
||||
ln -s /code/SuperBuild/install/bin/pdal /usr/bin/pdal
|
||||
|
||||
|
|
|
@ -54,11 +54,11 @@ If you are already running [ODM](https://github.com/OpenDroneMap/ODM) on Ubuntu
|
|||
|
||||
1) Install Entwine: https://entwine.io/quickstart.html#installation
|
||||
|
||||
2) Install node.js and npm dependencies:
|
||||
2) Install node.js, npm dependencies and 7zip:
|
||||
|
||||
```bash
|
||||
sudo curl --silent --location https://deb.nodesource.com/setup_6.x | sudo bash -
|
||||
sudo apt-get install -y nodejs python-gdal
|
||||
sudo apt-get install -y nodejs python-gdal p7zip-full
|
||||
git clone https://github.com/OpenDroneMap/NodeODM
|
||||
cd NodeODM
|
||||
npm install
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
"port": 3000,
|
||||
"deamon": false,
|
||||
"parallelQueueProcessing": 2,
|
||||
"parallelQueueProcessing": 1,
|
||||
"cleanupTasksAfter": 2880,
|
||||
"test": false,
|
||||
"testSkipOrthophotos": false,
|
||||
|
|
|
@ -20,6 +20,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|||
let fs = require('fs');
|
||||
let argv = require('minimist')(process.argv.slice(2));
|
||||
let utils = require('./libs/utils');
|
||||
const spawnSync = require('child_process').spawnSync;
|
||||
|
||||
if (argv.help){
|
||||
console.log(`
|
||||
|
@ -93,7 +94,7 @@ config.logger.logDirectory = fromConfigFile("logger.logDirectory", ''); // Set t
|
|||
|
||||
config.port = parseInt(argv.port || argv.p || fromConfigFile("port", process.env.PORT || 3000));
|
||||
config.deamon = argv.deamonize || argv.d || fromConfigFile("daemon", false);
|
||||
config.parallelQueueProcessing = parseInt(argv.parallel_queue_processing || argv.q || fromConfigFile("parallelQueueProcessing", 2));
|
||||
config.parallelQueueProcessing = parseInt(argv.parallel_queue_processing || argv.q || fromConfigFile("parallelQueueProcessing", 1));
|
||||
config.cleanupTasksAfter = parseInt(argv.cleanup_tasks_after || fromConfigFile("cleanupTasksAfter", 2880));
|
||||
config.cleanupUploadsAfter = parseInt(argv.cleanup_uploads_after || fromConfigFile("cleanupUploadsAfter", 2880));
|
||||
config.test = argv.test || fromConfigFile("test", false);
|
||||
|
@ -115,4 +116,8 @@ config.s3UploadEverything = argv.s3_upload_everything || fromConfigFile("s3Uploa
|
|||
config.maxConcurrency = parseInt(argv.max_concurrency || fromConfigFile("maxConcurrency", 0));
|
||||
config.maxRuntime = parseInt(argv.max_runtime || fromConfigFile("maxRuntime", -1));
|
||||
|
||||
// Detect 7z availability
|
||||
const childProcess = spawnSync("7z", ['--help']);
|
||||
config.has7z = childProcess.status === 0;
|
||||
|
||||
module.exports = config;
|
||||
|
|
|
@ -8,7 +8,7 @@ REST API to access ODM
|
|||
|
||||
=== Version information
|
||||
[%hardbreaks]
|
||||
_Version_ : 1.5.3
|
||||
_Version_ : 1.6.0
|
||||
|
||||
|
||||
=== Contact information
|
||||
|
@ -279,6 +279,48 @@ _required_|UUID of the task|string|
|
|||
|===
|
||||
|
||||
|
||||
[[_task_list_get]]
|
||||
=== GET /task/list
|
||||
|
||||
==== Description
|
||||
Gets the list of tasks available on this node.
|
||||
|
||||
|
||||
==== Parameters
|
||||
|
||||
[options="header", cols=".^2,.^3,.^9,.^4,.^2"]
|
||||
|===
|
||||
|Type|Name|Description|Schema|Default
|
||||
|*Query*|*token* +
|
||||
_optional_|Token required for authentication (when authentication is required).|string|
|
||||
|===
|
||||
|
||||
|
||||
==== Responses
|
||||
|
||||
[options="header", cols=".^2,.^14,.^4"]
|
||||
|===
|
||||
|HTTP Code|Description|Schema
|
||||
|*200*|Task List|< <<_task_list_get_response_200,Response 200>> > array
|
||||
|*default*|Error|<<_error,Error>>
|
||||
|===
|
||||
|
||||
[[_task_list_get_response_200]]
|
||||
*Response 200*
|
||||
|
||||
[options="header", cols=".^3,.^11,.^4"]
|
||||
|===
|
||||
|Name|Description|Schema
|
||||
|*uuid* +
|
||||
_required_|UUID|string
|
||||
|===
|
||||
|
||||
|
||||
==== Tags
|
||||
|
||||
* task
|
||||
|
||||
|
||||
[[_task_new_post]]
|
||||
=== POST /task/new
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
42
index.js
42
index.js
|
@ -292,6 +292,44 @@ let getTaskFromUuid = (req, res, next) => {
|
|||
} else res.json({ error: `${req.params.uuid} not found` });
|
||||
};
|
||||
|
||||
/** @swagger
|
||||
* /task/list:
|
||||
* get:
|
||||
* description: Gets the list of tasks available on this node.
|
||||
* tags: [task]
|
||||
* parameters:
|
||||
* -
|
||||
* name: token
|
||||
* in: query
|
||||
* description: 'Token required for authentication (when authentication is required).'
|
||||
* required: false
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Task List
|
||||
* schema:
|
||||
* title: TaskList
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* required: [uuid]
|
||||
* properties:
|
||||
* uuid:
|
||||
* type: string
|
||||
* description: UUID
|
||||
* default:
|
||||
* description: Error
|
||||
* schema:
|
||||
* $ref: '#/definitions/Error'
|
||||
*/
|
||||
app.get('/task/list', authCheck, (req, res) => {
|
||||
const tasks = [];
|
||||
for (let uuid in taskManager.tasks){
|
||||
tasks.push({uuid});
|
||||
}
|
||||
res.json(tasks);
|
||||
});
|
||||
|
||||
/** @swagger
|
||||
* /task/{uuid}/info:
|
||||
* get:
|
||||
|
@ -863,6 +901,10 @@ if (config.test) {
|
|||
if (config.testDropUploads) logger.info("Uploads will drop at random");
|
||||
}
|
||||
|
||||
if (!config.has7z){
|
||||
logger.warn("The 7z program is not installed, falling back to legacy (zipping will be slower)");
|
||||
}
|
||||
|
||||
let commands = [
|
||||
cb => odmInfo.initialize(cb),
|
||||
cb => auth.initialize(cb),
|
||||
|
|
42
libs/Task.js
42
libs/Task.js
|
@ -22,12 +22,10 @@ const async = require('async');
|
|||
const assert = require('assert');
|
||||
const logger = require('./logger');
|
||||
const fs = require('fs');
|
||||
const glob = require("glob");
|
||||
const path = require('path');
|
||||
const rmdir = require('rimraf');
|
||||
const odmRunner = require('./odmRunner');
|
||||
const processRunner = require('./processRunner');
|
||||
const archiver = require('archiver');
|
||||
const Directories = require('./Directories');
|
||||
const kill = require('tree-kill');
|
||||
const S3 = require('./S3');
|
||||
|
@ -249,6 +247,40 @@ module.exports = class Task{
|
|||
|
||||
const postProcess = () => {
|
||||
const createZipArchive = (outputFilename, files) => {
|
||||
return (done) => {
|
||||
this.output.push(`Compressing ${outputFilename}\n`);
|
||||
|
||||
const zipFile = path.resolve(this.getAssetsArchivePath(outputFilename));
|
||||
const sourcePath = !config.test ?
|
||||
this.getProjectFolderPath() :
|
||||
path.join("tests", "processing_results");
|
||||
|
||||
const pathsToArchive = [];
|
||||
files.forEach(f => {
|
||||
if (fs.existsSync(path.join(sourcePath, f))){
|
||||
pathsToArchive.push(f);
|
||||
}
|
||||
});
|
||||
|
||||
processRunner.sevenZip({
|
||||
destination: zipFile,
|
||||
pathsToArchive,
|
||||
cwd: sourcePath
|
||||
}, (err, code, _) => {
|
||||
if (err){
|
||||
logger.error(`Could not archive .zip file: ${err.message}`);
|
||||
done(err);
|
||||
}else{
|
||||
if (code === 0){
|
||||
this.updateProgress(97);
|
||||
done();
|
||||
}else done(new Error(`Could not archive .zip file, 7z exited with code ${code}`));
|
||||
}
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const createZipArchiveLegacy = (outputFilename, files) => {
|
||||
return (done) => {
|
||||
this.output.push(`Compressing ${outputFilename}\n`);
|
||||
|
||||
|
@ -327,7 +359,7 @@ module.exports = class Task{
|
|||
this.runningProcesses.push(
|
||||
processRunner.runPostProcessingScript({
|
||||
projectFolderPath: this.getProjectFolderPath()
|
||||
}, (err, code, signal) => {
|
||||
}, (err, code, _) => {
|
||||
if (err) done(err);
|
||||
else{
|
||||
if (code === 0){
|
||||
|
@ -388,7 +420,9 @@ module.exports = class Task{
|
|||
}
|
||||
|
||||
if (!this.skipPostProcessing) tasks.push(runPostProcessingScript());
|
||||
tasks.push(createZipArchive('all.zip', allPaths));
|
||||
|
||||
const archiveFunc = config.has7z ? createZipArchive : createZipArchiveLegacy;
|
||||
tasks.push(archiveFunc('all.zip', allPaths));
|
||||
|
||||
// Upload to S3 all paths + all.zip file (if config says so)
|
||||
if (S3.enabled()){
|
||||
|
|
|
@ -25,7 +25,7 @@ let logger = require('./logger');
|
|||
let utils = require('./utils');
|
||||
|
||||
|
||||
function makeRunner(command, args, requiredOptions = [], outputTestFile = null){
|
||||
function makeRunner(command, args, requiredOptions = [], outputTestFile = null, skipOnTest = true){
|
||||
return function(options, done, outputReceived){
|
||||
for (let requiredOption of requiredOptions){
|
||||
assert(options[requiredOption] !== undefined, `${requiredOption} must be defined`);
|
||||
|
@ -36,14 +36,16 @@ function makeRunner(command, args, requiredOptions = [], outputTestFile = null){
|
|||
|
||||
logger.info(`About to run: ${command} ${commandArgs.join(" ")}`);
|
||||
|
||||
if (config.test){
|
||||
if (config.test && skipOnTest){
|
||||
logger.info("Test mode is on, command will not execute");
|
||||
|
||||
if (outputTestFile){
|
||||
fs.readFile(path.resolve(__dirname, outputTestFile), 'utf8', (err, text) => {
|
||||
if (!err){
|
||||
let lines = text.split("\n");
|
||||
lines.forEach(line => outputReceived(line));
|
||||
if (outputReceived !== undefined){
|
||||
let lines = text.split("\n");
|
||||
lines.forEach(line => outputReceived(line));
|
||||
}
|
||||
|
||||
done(null, 0, null);
|
||||
}else{
|
||||
|
@ -62,20 +64,21 @@ function makeRunner(command, args, requiredOptions = [], outputTestFile = null){
|
|||
const env = utils.clone(process.env);
|
||||
env.LD_LIBRARY_PATH = path.join(config.odm_path, "SuperBuild", "install", "lib");
|
||||
|
||||
try{
|
||||
let childProcess = spawn(command, commandArgs, { env });
|
||||
childProcess
|
||||
.on('exit', (code, signal) => done(null, code, signal))
|
||||
.on('error', done);
|
||||
|
||||
let cwd = undefined;
|
||||
if (options.cwd) cwd = options.cwd;
|
||||
|
||||
let childProcess = spawn(command, commandArgs, { env, cwd });
|
||||
|
||||
childProcess
|
||||
.on('exit', (code, signal) => done(null, code, signal))
|
||||
.on('error', done);
|
||||
|
||||
if (outputReceived !== undefined){
|
||||
childProcess.stdout.on('data', chunk => outputReceived(chunk.toString()));
|
||||
childProcess.stderr.on('data', chunk => outputReceived(chunk.toString()));
|
||||
return childProcess;
|
||||
}catch(e){
|
||||
// Catch errors such as ENOMEM
|
||||
logger.warn(`Error: ${e.message}`);
|
||||
done(e);
|
||||
}
|
||||
|
||||
return childProcess;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -84,5 +87,12 @@ module.exports = {
|
|||
function(options){
|
||||
return [options.projectFolderPath];
|
||||
},
|
||||
["projectFolderPath"])
|
||||
["projectFolderPath"]),
|
||||
|
||||
sevenZip: makeRunner("7z", function(options){
|
||||
return ["a", "-r", "-bd", options.destination].concat(options.pathsToArchive);
|
||||
},
|
||||
["destination", "pathsToArchive", "cwd"],
|
||||
null,
|
||||
false)
|
||||
};
|
||||
|
|
|
@ -48,6 +48,24 @@ const removeDirectory = function(dir, cb = () => {}){
|
|||
});
|
||||
};
|
||||
|
||||
const assureUniqueFilename = (dstPath, filename, cb) => {
|
||||
const dstFile = path.join(dstPath, filename);
|
||||
fs.exists(dstFile, exists => {
|
||||
if (!exists) cb(null, filename);
|
||||
else{
|
||||
const parts = filename.split(".");
|
||||
if (parts.length > 1){
|
||||
assureUniqueFilename(dstPath,
|
||||
`${parts.slice(0, parts.length - 1).join(".")}_.${parts[parts.length - 1]}`,
|
||||
cb);
|
||||
}else{
|
||||
// Filename without extension? Strange..
|
||||
assureUniqueFilename(dstPath, filename + "_", cb);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const upload = multer({
|
||||
storage: multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
|
@ -65,7 +83,9 @@ const upload = multer({
|
|||
filename: (req, file, cb) => {
|
||||
let filename = utils.sanitize(file.originalname);
|
||||
if (filename === "body.json") filename = "_body.json";
|
||||
cb(null, filename);
|
||||
|
||||
let dstPath = path.join("tmp", req.id);
|
||||
assureUniqueFilename(dstPath, filename, cb);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "NodeODM",
|
||||
"version": "1.5.3",
|
||||
"version": "1.6.0",
|
||||
"description": "REST API to access ODM",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
|
|
@ -141,7 +141,13 @@
|
|||
</form>
|
||||
</div>
|
||||
<div class="col-md-7" id="taskList">
|
||||
<p data-bind="visible: tasks().length === 0">No running tasks.</p>
|
||||
<div data-bind="visible: error() != ''">
|
||||
<div class="alert alert-warning" role="alert" data-bind="text: error()"></div>
|
||||
</div>
|
||||
<div data-bind="visible: loading()">
|
||||
Loading task list... <span class="glyphicon glyphicon-refresh spinning"></span>
|
||||
</div>
|
||||
<p data-bind="visible: !loading() && tasks().length === 0">No running tasks.</p>
|
||||
<div data-bind="foreach: tasks">
|
||||
<div class="task" data-bind="css: {pulsePositive: info().status && info().status.code === 40, pulseNegative: info().status && info().status.code === 30}">
|
||||
<p data-bind="visible: loading()">Retrieving <span data-bind="text: uuid"></span> ... <span class="glyphicon glyphicon-refresh spinning"></span></p>
|
||||
|
@ -208,7 +214,7 @@
|
|||
<script src="js/vendor/knockout-3.4.0.js"></script>
|
||||
<script src="js/vendor/ko.observableDictionary.js"></script>
|
||||
<script src="js/dropzone.js" type="text/javascript"></script>
|
||||
<script src="js/main.js?t=1"></script>
|
||||
<script src="js/main.js?t=2"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
|
|
@ -205,27 +205,34 @@ $(function() {
|
|||
}
|
||||
|
||||
function TaskList() {
|
||||
var uuids = JSON.parse(localStorage.getItem("odmTaskList") || "[]");
|
||||
if (Object.prototype.toString.call(uuids) !== "[object Array]") uuids = [];
|
||||
var self = this;
|
||||
var url = "/task/list?token=" + token;
|
||||
this.error = ko.observable("");
|
||||
this.loading = ko.observable(true);
|
||||
this.tasks = ko.observableArray();
|
||||
|
||||
this.tasks = ko.observableArray($.map(uuids, function(uuid) {
|
||||
return new Task(uuid);
|
||||
}));
|
||||
$.get(url)
|
||||
.done(function(tasksJson) {
|
||||
if (tasksJson.error){
|
||||
self.error(tasksJson.error);
|
||||
}else{
|
||||
for (var i in tasksJson){
|
||||
self.tasks.push(new Task(tasksJson[i].uuid));
|
||||
}
|
||||
}
|
||||
})
|
||||
.fail(function() {
|
||||
self.error(url + " is unreachable.");
|
||||
})
|
||||
.always(function() { self.loading(false); });
|
||||
}
|
||||
TaskList.prototype.add = function(task) {
|
||||
this.tasks.push(task);
|
||||
this.saveTaskListToLocalStorage();
|
||||
};
|
||||
TaskList.prototype.saveTaskListToLocalStorage = function() {
|
||||
localStorage.setItem("odmTaskList", JSON.stringify($.map(this.tasks(), function(task) {
|
||||
return task.uuid;
|
||||
})));
|
||||
};
|
||||
TaskList.prototype.remove = function(task) {
|
||||
this.tasks.remove(function(t) {
|
||||
return t === task;
|
||||
});
|
||||
this.saveTaskListToLocalStorage();
|
||||
};
|
||||
|
||||
var codes = {
|
||||
|
|
|
@ -42,19 +42,20 @@ orthophoto_path="odm_orthophoto/odm_orthophoto.tif"
|
|||
|
||||
if [ -e "$orthophoto_path" ]; then
|
||||
python "$script_path/gdal2tiles.py" $g2t_options $orthophoto_path orthophoto_tiles
|
||||
|
||||
# Check for DEM tiles also
|
||||
for dem_product in ${dem_products[@]}; do
|
||||
colored_dem_path="odm_dem/""$dem_product""_colored_hillshade.tif"
|
||||
if [ -e "$colored_dem_path" ]; then
|
||||
python "$script_path/gdal2tiles.py" $g2t_options $colored_dem_path "$dem_product""_tiles"
|
||||
else
|
||||
echo "No $dem_product found at $colored_dem_path: will skip tiling"
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "No orthophoto found at $orthophoto_path: will skip tiling"
|
||||
fi
|
||||
|
||||
for dem_product in ${dem_products[@]}; do
|
||||
colored_dem_path="odm_dem/""$dem_product""_colored_hillshade.tif"
|
||||
if [ -e "$colored_dem_path" ]; then
|
||||
python "$script_path/gdal2tiles.py" $g2t_options $colored_dem_path "$dem_product""_tiles"
|
||||
else
|
||||
echo "No $dem_product found at $colored_dem_path: will skip tiling"
|
||||
fi
|
||||
done
|
||||
|
||||
# Generate point cloud (if entwine or potreeconverter is available)
|
||||
pointcloud_input_path=""
|
||||
for path in "odm_georeferencing/odm_georeferenced_model.laz" \
|
||||
|
@ -71,12 +72,6 @@ for path in "odm_georeferencing/odm_georeferenced_model.laz" \
|
|||
fi
|
||||
done
|
||||
|
||||
# Never generate point cloud tiles with split-merge workflows
|
||||
if [ -e "submodels" ] && [ -e "entwine_pointcloud" ]; then
|
||||
pointcloud_input_path=""
|
||||
echo "Split-merge dataset with point cloud detected. No need to regenerate point cloud tiles."
|
||||
fi
|
||||
|
||||
if [ ! -z "$pointcloud_input_path" ]; then
|
||||
# Convert the failsafe PLY point cloud to laz in odm_georeferencing
|
||||
# if necessary, otherwise it will not get zipped
|
||||
|
@ -93,13 +88,12 @@ if [ ! -z "$pointcloud_input_path" ]; then
|
|||
fi
|
||||
|
||||
if hash entwine 2>/dev/null; then
|
||||
# Optionally cleanup previous results (from a restart)
|
||||
if [ -e "entwine_pointcloud" ]; then
|
||||
rm -fr "entwine_pointcloud"
|
||||
if [ ! -e "entwine_pointcloud" ]; then
|
||||
entwine build --threads $(nproc) --tmp "entwine_pointcloud-tmp" -i "$pointcloud_input_path" -o entwine_pointcloud
|
||||
else
|
||||
echo "Entwine point cloud is already built."
|
||||
fi
|
||||
|
||||
entwine build --threads $(nproc) --tmp "entwine_pointcloud-tmp" -i "$pointcloud_input_path" -o entwine_pointcloud
|
||||
|
||||
# Cleanup
|
||||
if [ -e "entwine_pointcloud-tmp" ]; then
|
||||
rm -fr "entwine_pointcloud-tmp"
|
||||
|
|
Ładowanie…
Reference in New Issue