2018-11-22 03:22:37 +00:00
|
|
|
/*
|
|
|
|
Node-OpenDroneMap Node.js App and REST API to access OpenDroneMap.
|
|
|
|
Copyright (C) 2016 Node-OpenDroneMap Contributors
|
|
|
|
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
"use strict";
|
|
|
|
const async = require('async');
|
|
|
|
const AWS = require('aws-sdk');
|
|
|
|
const fs = require('fs');
|
2018-11-23 18:19:52 +00:00
|
|
|
const glob = require('glob');
|
|
|
|
const path = require('path');
|
|
|
|
const logger = require('./logger');
|
|
|
|
const config = require('../config');
|
|
|
|
|
|
|
|
let s3 = null;
|
2018-11-22 03:22:37 +00:00
|
|
|
|
|
|
|
module.exports = {
|
2018-11-23 18:19:52 +00:00
|
|
|
enabled: function(){
|
|
|
|
return s3 !== null;
|
|
|
|
},
|
|
|
|
|
|
|
|
initialize: function(cb){
|
|
|
|
if (config.s3Endpoint && config.s3Bucket && config.s3AccessKey && config.s3SecretKey){
|
|
|
|
const spacesEndpoint = new AWS.Endpoint(config.s3Endpoint);
|
|
|
|
s3 = new AWS.S3({
|
|
|
|
endpoint: spacesEndpoint,
|
|
|
|
signatureVersion: ('v' + config.s3SignatureVersion) || 'v4',
|
|
|
|
accessKeyId: config.s3AccessKey,
|
2020-06-03 10:42:49 +00:00
|
|
|
secretAccessKey: config.s3SecretKey,
|
|
|
|
s3ForcePathStyle: config.s3ForcePathStyle,
|
2018-11-23 18:19:52 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
// Test connection
|
|
|
|
s3.putObject({
|
|
|
|
Bucket: config.s3Bucket,
|
|
|
|
Key: 'test.txt',
|
|
|
|
Body: ''
|
|
|
|
}, err => {
|
|
|
|
if (!err){
|
|
|
|
logger.info("Connected to S3");
|
|
|
|
cb();
|
|
|
|
}else{
|
|
|
|
cb(new Error("Cannot connect to S3. Check your S3 configuration: " + err.code));
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}else cb();
|
|
|
|
},
|
|
|
|
|
|
|
|
// @param srcFolder {String} folder where to find paths (on local machine)
|
|
|
|
// @param bucket {String} S3 destination bucket
|
|
|
|
// @param dstFolder {String} prefix where to upload files on S3
|
|
|
|
// @param paths [{String}] list of paths relative to srcFolder
|
|
|
|
// @param cb {Function} callback
|
|
|
|
// @param onOutput {Function} (optional) callback when output lines are available
|
|
|
|
uploadPaths: function(srcFolder, bucket, dstFolder, paths, cb, onOutput){
|
|
|
|
if (!s3) throw new Error("S3 is not initialized");
|
|
|
|
|
2018-12-05 15:16:23 +00:00
|
|
|
const PARALLEL_UPLOADS = 5;
|
|
|
|
const MAX_RETRIES = 6;
|
2018-11-22 03:22:37 +00:00
|
|
|
|
2018-11-23 18:19:52 +00:00
|
|
|
const q = async.queue((file, done) => {
|
|
|
|
logger.debug(`Uploading ${file.src} --> ${file.dest}`);
|
2018-11-22 03:22:37 +00:00
|
|
|
s3.upload({
|
2018-11-23 18:19:52 +00:00
|
|
|
Bucket: bucket,
|
|
|
|
Key: file.dest,
|
|
|
|
Body: fs.createReadStream(file.src),
|
|
|
|
ACL: 'public-read'
|
2018-12-19 16:39:01 +00:00
|
|
|
}, {partSize: 5 * 1024 * 1024, queueSize: 1}, err => {
|
2018-11-23 18:19:52 +00:00
|
|
|
if (err){
|
|
|
|
logger.debug(err);
|
2018-12-05 15:16:23 +00:00
|
|
|
const msg = `Cannot upload file to S3: ${err.code}, retrying... ${file.retries}`;
|
|
|
|
if (onOutput) onOutput(msg);
|
|
|
|
if (file.retries < MAX_RETRIES){
|
|
|
|
file.retries++;
|
|
|
|
setTimeout(() => {
|
|
|
|
q.push(file, errHandler);
|
|
|
|
done();
|
|
|
|
}, (2 ** file.retries) * 1000);
|
|
|
|
}else{
|
|
|
|
done(new Error(msg));
|
|
|
|
}
|
2018-11-23 18:19:52 +00:00
|
|
|
}else done();
|
|
|
|
});
|
2018-11-22 03:22:37 +00:00
|
|
|
}, PARALLEL_UPLOADS);
|
2018-11-23 18:19:52 +00:00
|
|
|
|
2018-12-05 15:16:23 +00:00
|
|
|
const errHandler = err => {
|
|
|
|
if (err){
|
|
|
|
q.kill();
|
|
|
|
if (!cbCalled){
|
|
|
|
cbCalled = true;
|
|
|
|
cb(err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-11-23 18:19:52 +00:00
|
|
|
let uploadList = [];
|
|
|
|
|
|
|
|
paths.forEach(p => {
|
|
|
|
const fullPath = path.join(srcFolder, p);
|
|
|
|
|
|
|
|
// Skip non-existing items
|
|
|
|
if (!fs.existsSync(fullPath)) return;
|
|
|
|
|
|
|
|
if (fs.lstatSync(fullPath).isDirectory()){
|
|
|
|
let globPaths = glob.sync(`${p}/**`, { cwd: srcFolder, nodir: true, nosort: true });
|
|
|
|
|
|
|
|
globPaths.forEach(gp => {
|
|
|
|
uploadList.push({
|
|
|
|
src: path.join(srcFolder, gp),
|
2018-12-05 15:16:23 +00:00
|
|
|
dest: path.join(dstFolder, gp),
|
|
|
|
retries: 0
|
2018-11-23 18:19:52 +00:00
|
|
|
});
|
|
|
|
});
|
|
|
|
}else{
|
|
|
|
uploadList.push({
|
|
|
|
src: fullPath,
|
2018-12-05 15:16:23 +00:00
|
|
|
dest: path.join(dstFolder, p),
|
|
|
|
retries: 0
|
2018-11-23 18:19:52 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let cbCalled = false;
|
|
|
|
q.drain = () => {
|
2018-11-23 19:48:22 +00:00
|
|
|
if (!cbCalled){
|
|
|
|
cbCalled = true;
|
|
|
|
cb();
|
|
|
|
}
|
2018-11-23 18:19:52 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if (onOutput) onOutput(`Uploading ${uploadList.length} files to S3...`);
|
2018-12-05 15:16:23 +00:00
|
|
|
q.push(uploadList, errHandler);
|
2018-11-22 03:22:37 +00:00
|
|
|
}
|
|
|
|
};
|