diff --git a/README.md b/README.md index 0a6eb67..9e2272a 100644 --- a/README.md +++ b/README.md @@ -156,9 +156,4 @@ Make a pull request for small contributions. For big contributions, please open ## Roadmap -- [X] Command line options for OpenDroneMap -- [X] GPC List support -- [ ] Video support when the [SLAM module](https://github.com/OpenDroneMap/OpenDroneMap/pull/317) becomes available -- [ ] Continuous Integration Setup -- [X] Documentation -- [ ] Unit Testing +See the [list of wanted features](https://github.com/OpenDroneMap/NodeODM/issues?q=is%3Aopen+is%3Aissue+label%3A%22new+feature%22). diff --git a/index.js b/index.js index f9aedcf..df6fdea 100644 --- a/index.js +++ b/index.js @@ -22,10 +22,8 @@ const config = require('./config.js'); const packageJson = JSON.parse(fs.readFileSync('./package.json')); const logger = require('./libs/logger'); -const path = require('path'); const async = require('async'); const mime = require('mime'); -const rmdir = require('rimraf'); const express = require('express'); const app = express(); @@ -33,12 +31,8 @@ const app = express(); const bodyParser = require('body-parser'); const TaskManager = require('./libs/TaskManager'); -const Task = require('./libs/Task'); const odmInfo = require('./libs/odmInfo'); -const Directories = require('./libs/Directories'); -const unzip = require('node-unzip-2'); const si = require('systeminformation'); -const mv = require('mv'); const S3 = require('./libs/S3'); const auth = require('./libs/auth/factory').fromConfig(config); @@ -89,6 +83,12 @@ let server; * required: false * type: boolean * - + * name: webhook + * in: formData + * description: Optional URL to call when processing has ended (either successfully or unsuccessfully). + * required: false + * type: string + * - * name: token * in: query * description: 'Token required for authentication (when authentication is required).' @@ -165,6 +165,12 @@ app.post('/task/new/commit/:uuid', authCheck, (req, res) => { * required: false * type: boolean * - + * name: webhook + * in: formData + * description: Optional URL to call when processing has ended (either successfully or unsuccessfully). + * required: false + * type: string + * - * name: token * in: query * description: 'Token required for authentication (when authentication is required).' @@ -191,141 +197,11 @@ app.post('/task/new/commit/:uuid', authCheck, (req, res) => { * schema: * $ref: '#/definitions/Error' */ -app.post('/task/new', authCheck, taskNew.assignUUID, taskNew.uploadImages, (req, res) => { - // TODO: consider doing the file moving in the background - // and return a response more quickly instead of a long timeout. - req.setTimeout(1000 * 60 * 20); - - let srcPath = path.join("tmp", req.id); - - // Print error message and cleanup - const die = (error) => { - res.json({error}); - - // Check if tmp/ directory needs to be cleaned - if (fs.stat(srcPath, (err, stats) => { - if (!err && stats.isDirectory()) rmdir(srcPath, () => {}); // ignore errors, don't wait - })); - }; - - if ((!req.files || req.files.length === 0) && !req.body.zipurl) die("Need at least 1 file or a zip file url."); - else if (config.maxImages && req.files && req.files.length > config.maxImages) die(`${req.files.length} images uploaded, but this node can only process up to ${config.maxImages}.`); - - else { - let destPath = path.join(Directories.data, req.id); - let destImagesPath = path.join(destPath, "images"); - let destGpcPath = path.join(destPath, "gpc"); - - async.series([ - cb => { - odmInfo.filterOptions(req.body.options, (err, options) => { - if (err) cb(err); - else { - req.body.options = options; - cb(null); - } - }); - }, - - // Move all uploads to data//images dir (if any) - cb => { - if (req.files && req.files.length > 0) { - fs.stat(destPath, (err, stat) => { - if (err && err.code === 'ENOENT') cb(); - else cb(new Error(`Directory exists (should not have happened: ${err.code})`)); - }); - } else { - cb(); - } - }, - - // Unzips zip URL to tmp// (if any) - cb => { - if (req.body.zipurl) { - let archive = "zipurl.zip"; - - upload.storage.getDestination(req, archive, (err, dstPath) => { - if (err) cb(err); - else{ - let archiveDestPath = path.join(dstPath, archive); - - download(req.body.zipurl, archiveDestPath, cb); - } - }); - } else { - cb(); - } - }, - - cb => fs.mkdir(destPath, undefined, cb), - cb => fs.mkdir(destGpcPath, undefined, cb), - cb => mv(srcPath, destImagesPath, cb), - - cb => { - // Find any *.zip file and extract - fs.readdir(destImagesPath, (err, entries) => { - if (err) cb(err); - else { - async.eachSeries(entries, (entry, cb) => { - if (/\.zip$/gi.test(entry)) { - let filesCount = 0; - fs.createReadStream(path.join(destImagesPath, entry)).pipe(unzip.Parse()) - .on('entry', function(entry) { - if (entry.type === 'File') { - filesCount++; - entry.pipe(fs.createWriteStream(path.join(destImagesPath, path.basename(entry.path)))); - } else { - entry.autodrain(); - } - }) - .on('close', () => { - // Verify max images limit - if (config.maxImages && filesCount > config.maxImages) cb(`${filesCount} images uploaded, but this node can only process up to ${config.maxImages}.`); - else cb(); - }) - .on('error', cb); - } else cb(); - }, cb); - } - }); - }, - - cb => { - // Find any *.txt (GPC) file and move it to the data//gpc directory - // also remove any lingering zipurl.zip - fs.readdir(destImagesPath, (err, entries) => { - if (err) cb(err); - else { - async.eachSeries(entries, (entry, cb) => { - if (/\.txt$/gi.test(entry)) { - mv(path.join(destImagesPath, entry), path.join(destGpcPath, entry), cb); - }else if (/\.zip$/gi.test(entry)){ - fs.unlink(path.join(destImagesPath, entry), cb); - } else cb(); - }, cb); - } - }); - }, - - // Create task - cb => { - new Task(req.id, req.body.name, (err, task) => { - if (err) cb(err); - else { - taskManager.addNew(task); - res.json({ uuid: req.id }); - cb(); - } - }, req.body.options, - req.body.webhook, - req.body.skipPostProcessing === 'true'); - } - ], err => { - if (err) die(err.message); - }); - } - -}); +app.post('/task/new', authCheck, taskNew.assignUUID, taskNew.uploadImages, (req, res, next) => { + if ((!req.files || req.files.length === 0) && !req.body.zipurl) req.error = "Need at least 1 file or a zip file url."; + else if (config.maxImages && req.files && req.files.length > config.maxImages) req.error = `${req.files.length} images uploaded, but this node can only process up to ${config.maxImages}.`; + next(); +}, taskNew.handleTaskNew); let getTaskFromUuid = (req, res, next) => { let task = taskManager.find(req.params.uuid); diff --git a/libs/Task.js b/libs/Task.js index d4c1266..ad5a6df 100644 --- a/libs/Task.js +++ b/libs/Task.js @@ -46,7 +46,7 @@ module.exports = class Task{ this.processingTime = -1; this.setStatus(statusCodes.QUEUED); this.options = options; - this.gpcFiles = []; + this.gcpFiles = []; this.output = []; this.runningProcesses = []; this.webhook = webhook; @@ -67,15 +67,15 @@ module.exports = class Task{ // Find GCP (if any) cb => { - fs.readdir(this.getGpcFolderPath(), (err, files) => { + fs.readdir(this.getGcpFolderPath(), (err, files) => { if (err) cb(err); else{ files.forEach(file => { if (/\.txt$/gi.test(file)){ - this.gpcFiles.push(file); + this.gcpFiles.push(file); } }); - logger.debug(`Found ${this.gpcFiles.length} GPC files (${this.gpcFiles.join(" ")}) for ${this.uuid}`); + logger.debug(`Found ${this.gcpFiles.length} GCP files (${this.gcpFiles.join(" ")}) for ${this.uuid}`); cb(null); } }); @@ -110,10 +110,10 @@ module.exports = class Task{ return path.join(this.getProjectFolderPath(), "images"); } - // Get path where GPC file(s) are stored + // Get path where GCP file(s) are stored // (relative to nodejs process CWD) - getGpcFolderPath(){ - return path.join(this.getProjectFolderPath(), "gpc"); + getGcpFolderPath(){ + return path.join(this.getProjectFolderPath(), "gcp"); } // Get path of project (where all images and assets folder are contained) @@ -385,8 +385,8 @@ module.exports = class Task{ runnerOptions["project-path"] = fs.realpathSync(Directories.data); - if (this.gpcFiles.length > 0){ - runnerOptions.gcp = fs.realpathSync(path.join(this.getGpcFolderPath(), this.gpcFiles[0])); + if (this.gcpFiles.length > 0){ + runnerOptions.gcp = fs.realpathSync(path.join(this.getGcpFolderPath(), this.gcpFiles[0])); } this.runningProcesses.push(odmRunner.run(runnerOptions, this.uuid, (err, code, signal) => { diff --git a/libs/taskNew.js b/libs/taskNew.js index ff8ad33..0d84735 100644 --- a/libs/taskNew.js +++ b/libs/taskNew.js @@ -21,6 +21,12 @@ const fs = require('fs'); const path = require('path'); const TaskManager = require('./TaskManager'); const uuidv4 = require('uuid/v4'); +const config = require('../config.js'); +const rmdir = require('rimraf'); +const Directories = require('./Directories'); +const unzip = require('node-unzip-2'); +const mv = require('mv'); +const Task = require('./Task'); const upload = multer({ storage: multer.diskStorage({ @@ -64,7 +70,14 @@ module.exports = { uploadImages: upload.array("images"), - handleTaskNew: (res, res) => { + setupFiles: (req, res, next) => { + // populate req.id (here or somehwere else) + // populate req.files from directory + // populate req.body from metadata file + + }, + + handleTaskNew: (req, res) => { // TODO: consider doing the file moving in the background // and return a response more quickly instead of a long timeout. req.setTimeout(1000 * 60 * 20); @@ -81,13 +94,12 @@ module.exports = { })); }; - if ((!req.files || req.files.length === 0) && !req.body.zipurl) die("Need at least 1 file or a zip file url."); - else if (config.maxImages && req.files && req.files.length > config.maxImages) die(`${req.files.length} images uploaded, but this node can only process up to ${config.maxImages}.`); - - else { + if (req.error !== undefined){ + die(req.error); + }else{ let destPath = path.join(Directories.data, req.id); let destImagesPath = path.join(destPath, "images"); - let destGpcPath = path.join(destPath, "gpc"); + let destGcpPath = path.join(destPath, "gcp"); async.series([ cb => { @@ -100,7 +112,7 @@ module.exports = { }); }, - // Move all uploads to data//images dir (if any) + // Check if dest directory already exists cb => { if (req.files && req.files.length > 0) { fs.stat(destPath, (err, stat) => { @@ -130,8 +142,9 @@ module.exports = { } }, + // Move all uploads to data//images dir (if any) cb => fs.mkdir(destPath, undefined, cb), - cb => fs.mkdir(destGpcPath, undefined, cb), + cb => fs.mkdir(destGcpPath, undefined, cb), cb => mv(srcPath, destImagesPath, cb), cb => { @@ -164,14 +177,14 @@ module.exports = { }, cb => { - // Find any *.txt (GPC) file and move it to the data//gpc directory + // Find any *.txt (GCP) file and move it to the data//gcp directory // also remove any lingering zipurl.zip fs.readdir(destImagesPath, (err, entries) => { if (err) cb(err); else { async.eachSeries(entries, (entry, cb) => { if (/\.txt$/gi.test(entry)) { - mv(path.join(destImagesPath, entry), path.join(destGpcPath, entry), cb); + mv(path.join(destImagesPath, entry), path.join(destGcpPath, entry), cb); }else if (/\.zip$/gi.test(entry)){ fs.unlink(path.join(destImagesPath, entry), cb); } else cb(); @@ -185,7 +198,7 @@ module.exports = { new Task(req.id, req.body.name, (err, task) => { if (err) cb(err); else { - taskManager.addNew(task); + TaskManager.singleton().addNew(task); res.json({ uuid: req.id }); cb(); }