Merge pull request #237 from lucadelu/new_outputs

added new files to export
pull/239/head
Piero Toffanin 2025-06-29 09:56:05 +02:00 zatwierdzone przez GitHub
commit e2633b7259
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
1 zmienionych plików z 32 dodań i 29 usunięć

Wyświetl plik

@ -148,10 +148,10 @@ module.exports = class Task{
} }
static CreateFromSerialized(taskJson, done){ static CreateFromSerialized(taskJson, done){
const task = new Task(taskJson.uuid, const task = new Task(taskJson.uuid,
taskJson.name, taskJson.name,
taskJson.options, taskJson.options,
taskJson.webhook, taskJson.webhook,
taskJson.skipPostProcessing, taskJson.skipPostProcessing,
taskJson.outputs, taskJson.outputs,
taskJson.dateCreated); taskJson.dateCreated);
@ -200,7 +200,7 @@ module.exports = class Task{
}else{ }else{
return false; // Invalid return false; // Invalid
} }
return path.join(this.getProjectFolderPath(), filename); return path.join(this.getProjectFolderPath(), filename);
} }
@ -223,7 +223,7 @@ module.exports = class Task{
updateProgress(globalProgress){ updateProgress(globalProgress){
globalProgress = Math.min(100, Math.max(0, globalProgress)); globalProgress = Math.min(100, Math.max(0, globalProgress));
// Progress updates are asynchronous (via UDP) // Progress updates are asynchronous (via UDP)
// so things could be out of order. We ignore all progress // so things could be out of order. We ignore all progress
// updates that are lower than what we might have previously received. // updates that are lower than what we might have previously received.
@ -279,7 +279,7 @@ module.exports = class Task{
// the process will immediately terminate. // the process will immediately terminate.
// For eaxmple in the case of the ODM process, the process will continue running for a while // For eaxmple in the case of the ODM process, the process will continue running for a while
// This might need to be fixed on ODM's end. // This might need to be fixed on ODM's end.
// During testing, proc is undefined // During testing, proc is undefined
if (proc) kill(proc.pid); if (proc) kill(proc.pid);
}); });
@ -301,15 +301,15 @@ module.exports = class Task{
this.stopTrackingProcessingTime(); this.stopTrackingProcessingTime();
done(err); done(err);
}; };
const postProcess = () => { const postProcess = () => {
const createZipArchive = (outputFilename, files) => { const createZipArchive = (outputFilename, files) => {
return (done) => { return (done) => {
this.output.push(`Compressing ${outputFilename}\n`); this.output.push(`Compressing ${outputFilename}\n`);
const zipFile = path.resolve(this.getAssetsArchivePath(outputFilename)); const zipFile = path.resolve(this.getAssetsArchivePath(outputFilename));
const sourcePath = !config.test ? const sourcePath = !config.test ?
this.getProjectFolderPath() : this.getProjectFolderPath() :
path.join("tests", "processing_results"); path.join("tests", "processing_results");
const pathsToArchive = []; const pathsToArchive = [];
@ -359,15 +359,15 @@ module.exports = class Task{
archive.pipe(output); archive.pipe(output);
let globs = []; let globs = [];
const sourcePath = !config.test ? const sourcePath = !config.test ?
this.getProjectFolderPath() : this.getProjectFolderPath() :
path.join("tests", "processing_results"); path.join("tests", "processing_results");
// Process files and directories first // Process files and directories first
files.forEach(file => { files.forEach(file => {
let filePath = path.join(sourcePath, file); let filePath = path.join(sourcePath, file);
// Skip non-existing items // Skip non-existing items
if (!fs.existsSync(filePath)) return; if (!fs.existsSync(filePath)) return;
@ -415,7 +415,7 @@ module.exports = class Task{
return (done) => { return (done) => {
this.runningProcesses.push( this.runningProcesses.push(
processRunner.runPostProcessingScript({ processRunner.runPostProcessingScript({
projectFolderPath: this.getProjectFolderPath() projectFolderPath: this.getProjectFolderPath()
}, (err, code, _) => { }, (err, code, _) => {
if (err) done(err); if (err) done(err);
else{ else{
@ -441,25 +441,28 @@ module.exports = class Task{
} }
// All paths are relative to the project directory (./data/<uuid>/) // All paths are relative to the project directory (./data/<uuid>/)
let allPaths = ['odm_orthophoto/odm_orthophoto.tif', let allPaths = ['odm_orthophoto/odm_orthophoto.tif',
'odm_orthophoto/odm_orthophoto.tfw',
'odm_orthophoto/odm_orthophoto.png', 'odm_orthophoto/odm_orthophoto.png',
'odm_orthophoto/odm_orthophoto.wld',
'odm_orthophoto/odm_orthophoto.mbtiles', 'odm_orthophoto/odm_orthophoto.mbtiles',
'odm_orthophoto/odm_orthophoto.kmz', 'odm_orthophoto/odm_orthophoto.kmz',
'odm_orthophoto/odm_orthophoto_extent.dxf',
'odm_orthophoto/cutline.gpkg', 'odm_orthophoto/cutline.gpkg',
'odm_georeferencing', 'odm_texturing', 'odm_georeferencing', 'odm_texturing',
'odm_dem/dsm.tif', 'odm_dem/dtm.tif', 'dsm_tiles', 'dtm_tiles', 'odm_dem/dsm.tif', 'odm_dem/dtm.tif', 'dsm_tiles', 'dtm_tiles',
'odm_dem/dsm.euclideand.tif', 'odm_dem/dtm.euclideand.tif', 'odm_dem/dsm.euclideand.tif', 'odm_dem/dtm.euclideand.tif',
'orthophoto_tiles', 'potree_pointcloud', 'entwine_pointcloud', 'orthophoto_tiles', 'potree_pointcloud', 'entwine_pointcloud',
'3d_tiles', '3d_tiles',
'images.json', 'cameras.json', 'images.json', 'cameras.json',
'task_output.txt', 'log.json', 'task_output.txt', 'log.json',
'odm_report']; 'odm_report'];
// Did the user request different outputs than the default? // Did the user request different outputs than the default?
if (this.outputs.length > 0) allPaths = this.outputs; if (this.outputs.length > 0) allPaths = this.outputs;
let tasks = []; let tasks = [];
if (config.test){ if (config.test){
if (config.testSkipOrthophotos){ if (config.testSkipOrthophotos){
logger.info("Test mode will skip orthophoto generation"); logger.info("Test mode will skip orthophoto generation");
@ -469,7 +472,7 @@ module.exports = class Task{
allPaths.splice(allPaths.indexOf(dir), 1); allPaths.splice(allPaths.indexOf(dir), 1);
}); });
} }
if (config.testSkipDems){ if (config.testSkipDems){
logger.info("Test mode will skip DEMs generation"); logger.info("Test mode will skip DEMs generation");
@ -490,7 +493,7 @@ module.exports = class Task{
} }
} }
// postprocess.sh is still here for legacy/backward compatibility // postprocess.sh is still here for legacy/backward compatibility
// purposes, but we might remove it in the future. The new logic // purposes, but we might remove it in the future. The new logic
// instructs the processing engine to do the necessary processing // instructs the processing engine to do the necessary processing
@ -500,13 +503,13 @@ module.exports = class Task{
if (os.platform() !== "win32" && !this.skipPostProcessing){ if (os.platform() !== "win32" && !this.skipPostProcessing){
tasks.push(runPostProcessingScript()); tasks.push(runPostProcessingScript());
} }
const taskOutputFile = path.join(this.getProjectFolderPath(), 'task_output.txt'); const taskOutputFile = path.join(this.getProjectFolderPath(), 'task_output.txt');
tasks.push(saveTaskOutput(taskOutputFile)); tasks.push(saveTaskOutput(taskOutputFile));
const archiveFunc = config.has7z ? createZipArchive : createZipArchiveLegacy; const archiveFunc = config.has7z ? createZipArchive : createZipArchiveLegacy;
tasks.push(archiveFunc('all.zip', allPaths)); tasks.push(archiveFunc('all.zip', allPaths));
// Upload to S3 all paths + all.zip file (if config says so) // Upload to S3 all paths + all.zip file (if config says so)
if (S3.enabled()){ if (S3.enabled()){
tasks.push((done) => { tasks.push((done) => {
@ -516,8 +519,8 @@ module.exports = class Task{
}else{ }else{
s3Paths = ['all.zip']; s3Paths = ['all.zip'];
} }
S3.uploadPaths(this.getProjectFolderPath(), config.s3Bucket, this.uuid, s3Paths, S3.uploadPaths(this.getProjectFolderPath(), config.s3Bucket, this.uuid, s3Paths,
err => { err => {
if (!err) this.output.push("Done uploading to S3!"); if (!err) this.output.push("Done uploading to S3!");
done(err); done(err);
@ -664,14 +667,14 @@ module.exports = class Task{
getOutput(startFromLine = 0){ getOutput(startFromLine = 0){
return this.output.slice(startFromLine, this.output.length); return this.output.slice(startFromLine, this.output.length);
} }
// Reads the contents of the tasks's // Reads the contents of the tasks's
// images.json and returns its JSON representation // images.json and returns its JSON representation
readImagesDatabase(callback){ readImagesDatabase(callback){
const imagesDbPath = !config.test ? const imagesDbPath = !config.test ?
path.join(this.getProjectFolderPath(), 'images.json') : path.join(this.getProjectFolderPath(), 'images.json') :
path.join('tests', 'processing_results', 'images.json'); path.join('tests', 'processing_results', 'images.json');
fs.readFile(imagesDbPath, 'utf8', (err, data) => { fs.readFile(imagesDbPath, 'utf8', (err, data) => {
if (err) callback(err); if (err) callback(err);
else{ else{
@ -686,7 +689,7 @@ module.exports = class Task{
} }
callWebhooks(){ callWebhooks(){
// Hooks can be passed via command line // Hooks can be passed via command line
// or for each individual task // or for each individual task
const hooks = [this.webhook, config.webhook]; const hooks = [this.webhook, config.webhook];