kopia lustrzana https://github.com/manuelkasper/sotlas-api
Initial import
rodzic
62b7d809bb
commit
2dd4d9c7f5
|
@ -1,2 +1,3 @@
|
|||
# sotlas-api
|
||||
SOTLAS backend
|
||||
|
||||
Backend server for SOTLAS (https://sotl.as), serving summit and activator data to the frontend, handling photo uploads and distributing SOTAwatch spots asynchronously via WebSocket.
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
const axios = require('axios');
|
||||
const moment = require('moment');
|
||||
const express = require('express');
|
||||
const config = require('./config');
|
||||
const db = require('./db');
|
||||
const summits = require('./summits');
|
||||
|
||||
let router = express.Router();
|
||||
module.exports = router;
|
||||
|
||||
router.get('/:callsign', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
db.getDb().collection('activators').findOne({callsign: req.params.callsign}, (err, activator) => {
|
||||
if (err) {
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
if (!activator) {
|
||||
res.status(404).end();
|
||||
return;
|
||||
}
|
||||
|
||||
axios.get('https://api-db.sota.org.uk/admin/activator_log_by_id', { params: { id: activator.userId, year: 'all' } })
|
||||
.then(response => {
|
||||
let activations = response.data.map(activation => {
|
||||
return {
|
||||
id: activation[0],
|
||||
date: moment.utc(activation.ActivationDate).toDate(),
|
||||
callsignUsed: activation.OwnCallsign,
|
||||
qsos: activation.QSOs,
|
||||
modeQsos: extractModeQsos(activation),
|
||||
bandQsos: extractBandQsos(activation),
|
||||
points: activation.Points,
|
||||
bonus: activation.BonusPoints,
|
||||
summit: {
|
||||
code: activation.Summit.substring(0, activation.Summit.indexOf(' '))
|
||||
}
|
||||
}
|
||||
});
|
||||
summits.lookupSummits(activations)
|
||||
.then(activationsWithSummits => {
|
||||
res.json(activationsWithSummits);
|
||||
})
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
if (error.response && error.response.status === 401) {
|
||||
res.status(401);
|
||||
} else {
|
||||
res.status(500);
|
||||
}
|
||||
res.end();
|
||||
return;
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
function extractModeQsos(activation) {
|
||||
return {
|
||||
'cw': activation.QSOcw,
|
||||
'ssb': activation.QSOssb,
|
||||
'fm': activation.QSOfm
|
||||
}
|
||||
}
|
||||
|
||||
function extractBandQsos(activation) {
|
||||
let bands = ['160','80','60','40','30','20','17','15','12','10','6','4','2','70c','23c'];
|
||||
let bandQsos = {};
|
||||
bands.forEach(band => {
|
||||
if (activation['QSO' + band] > 0) {
|
||||
bandQsos[band + 'm'] = activation['QSO' + band];
|
||||
}
|
||||
});
|
||||
return bandQsos;
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
const axios = require('axios');
|
||||
const express = require('express');
|
||||
const config = require('./config');
|
||||
const summits = require('./summits');
|
||||
|
||||
let router = express.Router();
|
||||
module.exports = router;
|
||||
|
||||
let alertCache = [];
|
||||
let lastLoadDate;
|
||||
let pendingLoad;
|
||||
|
||||
router.get('/', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
loadAlerts(req.query.noCache)
|
||||
.then(alerts => {
|
||||
res.json(alerts);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
})
|
||||
});
|
||||
|
||||
function loadAlerts(noCache) {
|
||||
if (noCache) {
|
||||
console.log('load alerts (no cache)');
|
||||
return loadAlertsDirect();
|
||||
}
|
||||
|
||||
if (lastLoadDate && (new Date() - lastLoadDate) < config.alerts.minUpdateInterval) {
|
||||
return Promise.resolve(alertCache);
|
||||
}
|
||||
|
||||
if (!pendingLoad) {
|
||||
console.log('load alerts (cache)');
|
||||
pendingLoad = loadAlertsDirect()
|
||||
.then(response => {
|
||||
pendingLoad = null;
|
||||
return response;
|
||||
})
|
||||
.catch(err => {
|
||||
pendingLoad = null;
|
||||
return Promise.reject(err);
|
||||
})
|
||||
}
|
||||
|
||||
return pendingLoad;
|
||||
}
|
||||
|
||||
function loadAlertsDirect() {
|
||||
return axios.get(config.alerts.url)
|
||||
.then(response => {
|
||||
if (response.status !== 200) {
|
||||
console.error(`Got status ${response.status} when loading alerts`);
|
||||
return Promise.reject('Cannot load alerts from SOTAwatch');
|
||||
}
|
||||
|
||||
let newAlerts = response.data.map(alert => {
|
||||
return {
|
||||
id: alert.id,
|
||||
timeStamp: new Date(alert.timeStamp),
|
||||
dateActivated: new Date(alert.dateActivated),
|
||||
summit: {code: alert.associationCode + '/' + alert.summitCode},
|
||||
activatorCallsign: alert.activatingCallsign.toUpperCase().replace(/[^A-Z0-9\/-]/g, ''),
|
||||
posterCallsign: alert.posterCallsign,
|
||||
frequency: alert.frequency,
|
||||
comments: alert.comments !== '(null)' ? alert.comments : ''
|
||||
};
|
||||
});
|
||||
|
||||
return summits.lookupSummits(newAlerts)
|
||||
.then(alerts => {
|
||||
alertCache = alerts;
|
||||
lastLoadDate = new Date();
|
||||
return alerts;
|
||||
});
|
||||
})
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
var config = {};
|
||||
module.exports = config;
|
||||
|
||||
config.http = {
|
||||
host: '127.0.0.1',
|
||||
port: 8081
|
||||
};
|
||||
|
||||
config.mongodb = {
|
||||
url: 'mongodb://sotlas:XXXXXXXX@localhost:27017/sotlas',
|
||||
dbName: 'sotlas',
|
||||
batchSize: 1000
|
||||
};
|
||||
|
||||
config.sotaspots = {
|
||||
initialLoadSpots: -24,
|
||||
periodicLoadSpots: 100,
|
||||
maxSpotAge: 86400000,
|
||||
updateInterval: 30000,
|
||||
url: 'https://api2.sota.org.uk/api/spots'
|
||||
};
|
||||
|
||||
config.alerts = {
|
||||
minUpdateInterval: 60000,
|
||||
url: 'https://api2.sota.org.uk/api/alerts/12'
|
||||
};
|
||||
|
||||
config.rbn = {
|
||||
server: {
|
||||
host: 'telnet.reversebeacon.net',
|
||||
port: 7000
|
||||
},
|
||||
login: "HB9DQM-3",
|
||||
timeout: 180000,
|
||||
maxSpotHistory: 1000
|
||||
};
|
||||
|
||||
config.geoip = {
|
||||
path: 'GeoLite2-City.mmdb'
|
||||
};
|
||||
|
||||
config.summitListUrl = 'https://www.sotadata.org.uk/summitslist.csv';
|
||||
|
||||
config.sotatrailsUrl = 'https://sotatrails.ch/api.php';
|
||||
|
||||
config.photos = {
|
||||
paths: {
|
||||
original: '/data/images/photos/original',
|
||||
thumb: '/data/images/photos/thumb',
|
||||
large: '/data/images/photos/large'
|
||||
},
|
||||
sizes: {
|
||||
large: {
|
||||
width: 1600,
|
||||
height: 1600
|
||||
},
|
||||
thumb: {
|
||||
width: 512,
|
||||
height: 256
|
||||
}
|
||||
},
|
||||
uploadPath: '/data/upload/photos'
|
||||
};
|
||||
|
||||
config.tracks = {
|
||||
paths: {
|
||||
original: '/data/tracks/original',
|
||||
simple: '/data/tracks/simple'
|
||||
},
|
||||
tolerance: 0.00001,
|
||||
uploadPath: '/data/upload/tracks'
|
||||
};
|
||||
|
||||
config.sso = {
|
||||
jwksUri: 'https://sso.sota.org.uk/auth/realms/SOTA/protocol/openid-connect/certs'
|
||||
};
|
||||
|
||||
config.mail = {
|
||||
host: "neon1.net",
|
||||
port: 587
|
||||
};
|
|
@ -0,0 +1,28 @@
|
|||
const MongoClient = require('mongodb').MongoClient;
|
||||
const config = require('./config');
|
||||
const assert = require('assert');
|
||||
|
||||
let db = null
|
||||
let client
|
||||
const connectPromise = new Promise((resolve, reject) => {
|
||||
client = new MongoClient(config.mongodb.url, {useUnifiedTopology: true})
|
||||
client.connect(function (err) {
|
||||
assert.equal(null, err)
|
||||
db = client.db(config.mongodb.dbName)
|
||||
resolve()
|
||||
});
|
||||
});
|
||||
|
||||
exports.getDb = function() {
|
||||
return db
|
||||
}
|
||||
|
||||
exports.waitDb = function(callback) {
|
||||
connectPromise.then(callback)
|
||||
}
|
||||
|
||||
exports.closeDb = function() {
|
||||
if (client) {
|
||||
client.close()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,289 @@
|
|||
const axios = require('axios');
|
||||
const moment = require('moment');
|
||||
const express = require('express');
|
||||
const config = require('./config');
|
||||
const db = require('./db');
|
||||
|
||||
let router = express.Router();
|
||||
module.exports = router;
|
||||
|
||||
router.get('/associations/:association.gpx', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
res.set('Content-Type', 'application/gpx+xml');
|
||||
res.set('Content-Disposition', 'attachment; filename="' + req.params.association + '.gpx"');
|
||||
gpxForQuery('^' + req.params.association + '/', `SOTA Association ${req.params.association}`, req.query, (err, gpx) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.send(gpx).end();
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/associations/:association.kml', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
res.set('Content-Type', 'application/vnd.google-earth.kml+xml');
|
||||
res.set('Content-Disposition', 'attachment; filename="' + req.params.association + '.kml"');
|
||||
kmlForAssociation(req.params.association, req.query, (err, kml) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!kml) {
|
||||
res.status(404).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.send(kml).end();
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/regions/:association/:region.gpx', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
res.set('Content-Type', 'application/gpx+xml');
|
||||
res.set('Content-Disposition', 'attachment; filename="' + req.params.association + '_' + req.params.region + '.gpx"');
|
||||
gpxForQuery('^' + req.params.association + '/' + req.params.region + '-', `SOTA Region ${req.params.association + '/' + req.params.region}`, req.query, (err, gpx) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.send(gpx).end();
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/regions/:association/:region.kml', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
res.set('Content-Type', 'application/vnd.google-earth.kml+xml');
|
||||
res.set('Content-Disposition', 'attachment; filename="' + req.params.association + '_' + req.params.region + '.kml"');
|
||||
kmlForRegion(req.params.association, req.params.region, req.query, (err, kml) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!kml) {
|
||||
res.status(404).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.send(kml).end();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function gpxForQuery(query, name, options, callback) {
|
||||
let filter = {code: {$regex: query}};
|
||||
if (!options.inactive) {
|
||||
filter.validFrom = {$lte: new Date()};
|
||||
filter.validTo = {$gte: new Date()};
|
||||
}
|
||||
db.getDb().collection('summits').find(filter).sort({code: 1}).toArray((err, summits) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
let minlat, minlon, maxlat, maxlon;
|
||||
summits.forEach(summit => {
|
||||
if (!minlat || summit.coordinates.latitude < minlat) {
|
||||
minlat = summit.coordinates.latitude;
|
||||
}
|
||||
if (!minlon || summit.coordinates.longitude < minlon) {
|
||||
minlon = summit.coordinates.longitude;
|
||||
}
|
||||
if (!maxlat || summit.coordinates.latitude > maxlat) {
|
||||
maxlat = summit.coordinates.latitude;
|
||||
}
|
||||
if (!maxlon || summit.coordinates.longitude > maxlon) {
|
||||
maxlon = summit.coordinates.longitude;
|
||||
}
|
||||
});
|
||||
|
||||
let now = moment.utc();
|
||||
let gpx = `<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<gpx version="1.1"
|
||||
creator="SOTLAS"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns="http://www.topografix.com/GPX/1/1"
|
||||
xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd">
|
||||
<metadata>
|
||||
<name>${name}</name>
|
||||
<author>
|
||||
<name>SOTLAS</name>
|
||||
</author>
|
||||
<link href="https://sotl.as">
|
||||
<text>SOTLAS</text>
|
||||
</link>
|
||||
<time>${now.toISOString()}</time>
|
||||
<bounds minlat="${minlat}" minlon="${minlon}" maxlat="${maxlat}" maxlon="${maxlon}"/>
|
||||
</metadata>
|
||||
`;
|
||||
|
||||
summits.forEach(summit => {
|
||||
gpx += ` <wpt lat="${summit.coordinates.latitude}" lon="${summit.coordinates.longitude}">
|
||||
<ele>${summit.altitude}</ele>
|
||||
<name><![CDATA[${summitName(summit, options)}]]></name>
|
||||
<cmt><![CDATA[${summit.name}]]></cmt>
|
||||
<sym>SOTA${('0' + summit.points).substr(-2)}</sym>
|
||||
<type>Summit</type>
|
||||
</wpt>
|
||||
`;
|
||||
});
|
||||
|
||||
gpx += "</gpx>";
|
||||
callback(null, gpx);
|
||||
});
|
||||
}
|
||||
|
||||
function kmlForAssociation(associationCode, options, callback) {
|
||||
db.getDb().collection('associations').findOne({code: associationCode}, (err, association) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!association) {
|
||||
callback(null, null);
|
||||
return;
|
||||
}
|
||||
|
||||
let filter = {code: {$regex: "^" + association.code + "/"}};
|
||||
if (!options.inactive) {
|
||||
filter.validFrom = {$lte: new Date()};
|
||||
filter.validTo = {$gte: new Date()};
|
||||
}
|
||||
db.getDb().collection('summits').find(filter).sort({code: 1}).toArray((err, summits) => {
|
||||
let now = moment.utc();
|
||||
let kmlName = 'SOTA Association ' + association.code + ' - ' + association.name;
|
||||
let kml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<Document>
|
||||
<atom:author><![CDATA[SOTLAS]]></atom:author>
|
||||
<atom:link href="https://sotl.as/summits/${association.code}"/>
|
||||
<name><![CDATA[${kmlName}]]></name>
|
||||
<TimeStamp>
|
||||
<when>${now.toISOString()}</when>
|
||||
</TimeStamp>
|
||||
`;
|
||||
|
||||
association.regions.forEach(region => {
|
||||
kml += ` <Folder>
|
||||
<name><![CDATA[${association.code}/${region.code} - ${region.name}]]></name>
|
||||
<atom:link href="https://sotl.as/summits/${association.code}/${region.code}"/>
|
||||
`;
|
||||
|
||||
summits.filter(summit => {return summit.code.startsWith(association.code + '/' + region.code)}).forEach(summit => {
|
||||
kml += kmlForSummit(summit, options);
|
||||
});
|
||||
|
||||
kml += ` </Folder>
|
||||
`;
|
||||
});
|
||||
|
||||
kml += ` </Document>
|
||||
</kml>
|
||||
`;
|
||||
|
||||
callback(null, kml);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function kmlForRegion(associationCode, regionCode, options, callback) {
|
||||
db.getDb().collection('associations').findOne({code: associationCode}, (err, association) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!association) {
|
||||
callback(null, null);
|
||||
return;
|
||||
}
|
||||
|
||||
let filter = {code: {$regex: "^" + association.code + "/" + regionCode + '-'}};
|
||||
if (!options.inactive) {
|
||||
filter.validFrom = {$lte: new Date()};
|
||||
filter.validTo = {$gte: new Date()};
|
||||
}
|
||||
db.getDb().collection('summits').find(filter).sort({code: 1}).toArray((err, summits) => {
|
||||
let now = moment.utc();
|
||||
let kmlName = 'SOTA Region ' + associationCode + '/' + regionCode;
|
||||
let kml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<Document>
|
||||
<atom:author><![CDATA[SOTLAS]]></atom:author>
|
||||
<TimeStamp>
|
||||
<when>${now.toISOString()}</when>
|
||||
</TimeStamp>
|
||||
`;
|
||||
|
||||
association.regions.forEach(region => {
|
||||
if (regionCode && region.code !== regionCode) {
|
||||
return;
|
||||
}
|
||||
kml += ` <name>SOTA Region <![CDATA[${association.code}/${region.code} - ${region.name}]]></name>
|
||||
<atom:link href="https://sotl.as/summits/${association.code}/${region.code}"/>
|
||||
`;
|
||||
|
||||
summits.filter(summit => {return summit.code.startsWith(association.code + '/' + region.code)}).forEach(summit => {
|
||||
kml += kmlForSummit(summit, options);
|
||||
});
|
||||
});
|
||||
|
||||
kml += ` </Document>
|
||||
</kml>
|
||||
`;
|
||||
|
||||
callback(null, kml);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function summitName(summit, options) {
|
||||
let name = summit.code;
|
||||
let nameopts = [];
|
||||
if (options.nameopts) {
|
||||
nameopts = options.nameopts.split(',')
|
||||
}
|
||||
if (nameopts.includes('name')) {
|
||||
name += ' - ' + summit.name;
|
||||
}
|
||||
if (nameopts.includes('altitude')) {
|
||||
name += ', ' + summit.altitude + 'm';
|
||||
}
|
||||
if (nameopts.includes('points')) {
|
||||
name += ', ' + summit.points + 'pt';
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
function kmlForSummit(summit, options) {
|
||||
return ` <Placemark id="${summit.code}">
|
||||
<name><![CDATA[${summitName(summit, options)}]]></name>
|
||||
<description><![CDATA[${summit.name}, ${summit.altitude}m, ${summit.points}pt]]></description>
|
||||
<Point>
|
||||
<coordinates>${summit.coordinates.longitude},${summit.coordinates.latitude},${summit.altitude}</coordinates>
|
||||
</Point>
|
||||
</Placemark>
|
||||
`;
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
const KEY_DECOMPRESSION_MAP = {
|
||||
a: 'altitude',
|
||||
ac: 'activatorCallsign',
|
||||
ao: 'activationCount',
|
||||
c: 'comments',
|
||||
d: 'code',
|
||||
e: 'speed',
|
||||
f: 'frequency',
|
||||
hc: 'homeCallsign',
|
||||
i: 'isActivator',
|
||||
ic: 'isoCode',
|
||||
l: 'callsign',
|
||||
m: 'mode',
|
||||
n: 'name',
|
||||
o: 'continent',
|
||||
p: 'points',
|
||||
s: 'summit',
|
||||
t: 'spotter',
|
||||
ts: 'timeStamp'
|
||||
}
|
||||
|
||||
let KEY_COMPRESSION_MAP = null
|
||||
|
||||
function compressKeys (obj) {
|
||||
// Lazy init
|
||||
if (KEY_COMPRESSION_MAP === null) {
|
||||
KEY_COMPRESSION_MAP = {}
|
||||
Object.keys(KEY_DECOMPRESSION_MAP).forEach(key => {
|
||||
KEY_COMPRESSION_MAP[KEY_DECOMPRESSION_MAP[key]] = key
|
||||
})
|
||||
}
|
||||
|
||||
return mapKeys(obj, KEY_COMPRESSION_MAP)
|
||||
}
|
||||
|
||||
function decompressKeys (obj) {
|
||||
return mapKeys(obj, KEY_DECOMPRESSION_MAP)
|
||||
}
|
||||
|
||||
function mapKeys (obj, map) {
|
||||
if (obj === null) {
|
||||
return null
|
||||
} else if (Array.isArray(obj)) {
|
||||
return obj.map(el => {
|
||||
return mapKeys(el, map)
|
||||
})
|
||||
} else if (typeof obj === 'object' && !(obj instanceof Date)) {
|
||||
let ret = {}
|
||||
Object.keys(obj).forEach(key => {
|
||||
let val = mapKeys(obj[key], map)
|
||||
|
||||
if (map[key]) {
|
||||
ret[map[key]] = val
|
||||
} else {
|
||||
ret[key] = val
|
||||
}
|
||||
})
|
||||
return ret
|
||||
} else {
|
||||
return obj
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compressKeys, decompressKeys
|
||||
}
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "sotlas-api",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@turf/simplify": "^5.1.5",
|
||||
"axios": "^0.19.0",
|
||||
"carrier": "^0.3.0",
|
||||
"csv-parse": "^4.6.3",
|
||||
"diacritics": "^1.3.0",
|
||||
"exif-parser": "^0.1.12",
|
||||
"exif-reader": "^1.0.3",
|
||||
"express": "^4.17.1",
|
||||
"express-bearer-token": "^2.4.0",
|
||||
"express-cache-controller": "^1.1.0",
|
||||
"express-jwt": "^5.3.1",
|
||||
"express-ws": "^4.0.0",
|
||||
"hasha": "^5.1.0",
|
||||
"htmlparser2": "^3.10.1",
|
||||
"jwks-rsa": "^1.6.0",
|
||||
"maxmind": "^3.1.2",
|
||||
"moment": "^2.24.0",
|
||||
"mongodb": "^3.5.6",
|
||||
"multer": "^1.4.2",
|
||||
"nodemailer": "^6.4.6",
|
||||
"reconnect-net": "^1.1.1",
|
||||
"request": "^2.88.0",
|
||||
"sharp": "^0.23.4",
|
||||
"togeojson": "^0.16.0",
|
||||
"togpx": "^0.5.4",
|
||||
"treemap-js": "^1.2.1"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
const sharp = require('sharp')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const fsPromises = require('fs').promises
|
||||
const exif = require('exif-reader')
|
||||
const path = require('path')
|
||||
const hasha = require('hasha')
|
||||
const config = require('./config')
|
||||
const db = require('./db')
|
||||
|
||||
module.exports = {
|
||||
importPhoto: async function(filename, author) {
|
||||
// Hash input file to determine filename
|
||||
let hash = await hasha.fromFile(filename, {algorithm: 'sha256'})
|
||||
let hashFilename = hash.substr(0, 32) + '.jpg'
|
||||
let originalPath = config.photos.paths.original + '/' + hashFilename.substr(0, 2) + '/' + hashFilename
|
||||
await fsPromises.mkdir(path.dirname(originalPath), {recursive: true})
|
||||
|
||||
let metadata = await getMetadata(filename)
|
||||
if (metadata.format !== 'jpeg' && metadata.format != 'png' && metadata.format != 'heif') {
|
||||
throw new Error('Bad input format, must be JPEG, PNG or HEIF')
|
||||
}
|
||||
|
||||
await fsPromises.copyFile(filename, originalPath)
|
||||
|
||||
let photo = {
|
||||
filename: hashFilename,
|
||||
width: Math.round(metadata.width),
|
||||
height: Math.round(metadata.height),
|
||||
author,
|
||||
uploadDate: new Date()
|
||||
}
|
||||
|
||||
if (metadata.orientation && metadata.orientation >= 5) {
|
||||
// Swap width/height
|
||||
let tmp = photo.width
|
||||
photo.width = photo.height
|
||||
photo.height = tmp
|
||||
}
|
||||
|
||||
if (metadata.exif) {
|
||||
let exifParsed = exif(metadata.exif)
|
||||
if (exifParsed) {
|
||||
if (exifParsed.gps && exifParsed.gps.GPSLatitude && exifParsed.gps.GPSLongitude &&
|
||||
(!exifParsed.gps.GPSStatus || exifParsed.gps.GPSStatus === 'A') &&
|
||||
!isNaN(exifParsed.gps.GPSLatitude[0]) && !isNaN(exifParsed.gps.GPSLongitude[0])) {
|
||||
photo.coordinates = {}
|
||||
photo.coordinates.latitude = exifParsed.gps.GPSLatitude[0] + exifParsed.gps.GPSLatitude[1]/60 + exifParsed.gps.GPSLatitude[2]/3600
|
||||
if (exifParsed.gps.GPSLatitudeRef === 'S') {
|
||||
photo.coordinates.latitude = -photo.coordinates.latitude
|
||||
}
|
||||
photo.coordinates.longitude = exifParsed.gps.GPSLongitude[0] + exifParsed.gps.GPSLongitude[1]/60 + exifParsed.gps.GPSLongitude[2]/3600
|
||||
if (exifParsed.gps.GPSLongitudeRef === 'W') {
|
||||
photo.coordinates.longitude = -photo.coordinates.longitude
|
||||
}
|
||||
|
||||
if (exifParsed.gps.GPSImgDirection && exifParsed.gps.GPSImgDirection >= 0 && exifParsed.gps.GPSImgDirection < 360) {
|
||||
photo.direction = Math.round(exifParsed.gps.GPSImgDirection)
|
||||
}
|
||||
|
||||
if (exifParsed.gps.GPSHPositioningError) {
|
||||
photo.positioningError = Math.round(exifParsed.gps.GPSHPositioningError)
|
||||
}
|
||||
}
|
||||
|
||||
if (exifParsed.image && exifParsed.image.Make && exifParsed.image.Model) {
|
||||
photo.camera = exifParsed.image.Make + ' ' + exifParsed.image.Model
|
||||
}
|
||||
|
||||
if (exifParsed.exif) {
|
||||
if (exifParsed.exif.DateTimeDigitized) {
|
||||
photo.date = exifParsed.exif.DateTimeDigitized
|
||||
} else if (exifParsed.exif.DateTimeOriginal) {
|
||||
photo.date = exifParsed.exif.DateTimeOriginal
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mkdirTasks = []
|
||||
let resizeTasks = []
|
||||
Object.keys(config.photos.sizes).forEach(sizeDescr => {
|
||||
let outPath = config.photos.paths[sizeDescr] + '/' + hashFilename.substr(0, 2) + '/' + hashFilename
|
||||
mkdirTasks.push(fsPromises.mkdir(path.dirname(outPath), {recursive: true}))
|
||||
resizeTasks.push(makeResized(originalPath, outPath, config.photos.sizes[sizeDescr].width, config.photos.sizes[sizeDescr].height))
|
||||
})
|
||||
|
||||
await Promise.all(mkdirTasks)
|
||||
await Promise.all(resizeTasks)
|
||||
|
||||
db.getDb().collection('uploads').insertOne({
|
||||
uploadDate: new Date(),
|
||||
type: 'photo',
|
||||
filename: hashFilename,
|
||||
author
|
||||
})
|
||||
|
||||
return photo
|
||||
}
|
||||
}
|
||||
|
||||
function getMetadata(src) {
|
||||
return sharp(src).metadata()
|
||||
}
|
||||
|
||||
function makeResized(src, dst, maxWidth, maxHeight) {
|
||||
return sharp(src).rotate().resize({ height: maxHeight, width: maxWidth, fit: 'inside' }).toFile(dst)
|
||||
}
|
|
@ -0,0 +1,216 @@
|
|||
const express = require('express')
|
||||
const multer = require('multer')
|
||||
const config = require('./config')
|
||||
const photos = require('./photos')
|
||||
const jwt = require('express-jwt')
|
||||
const jwksRsa = require('jwks-rsa')
|
||||
const nodemailer = require('nodemailer')
|
||||
const db = require('./db')
|
||||
|
||||
let upload = multer({dest: config.photos.uploadPath})
|
||||
|
||||
let router = express.Router()
|
||||
module.exports = router
|
||||
|
||||
let jwtCallback = jwt({
|
||||
secret: jwksRsa.expressJwtSecret({
|
||||
cache: true,
|
||||
rateLimit: true,
|
||||
jwksRequestsPerMinute: 5,
|
||||
jwksUri: config.sso.jwksUri
|
||||
})
|
||||
})
|
||||
|
||||
router.post('/summits/:association/:code/upload', jwtCallback, upload.array('photo'), async (req, res) => {
|
||||
try {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
}
|
||||
|
||||
if (!req.user.callsign) {
|
||||
res.status(401).send('Missing callsign in SSO token').end()
|
||||
return
|
||||
}
|
||||
|
||||
let summitCode = req.params.association + '/' + req.params.code
|
||||
let summit = await db.getDb().collection('summits').findOne({code: summitCode})
|
||||
if (!summit) {
|
||||
res.status(404).end()
|
||||
return
|
||||
}
|
||||
|
||||
if (req.files) {
|
||||
let dbPhotos = []
|
||||
for (let file of req.files) {
|
||||
let photo = await photos.importPhoto(file.path, req.user.callsign)
|
||||
dbPhotos.push(photo)
|
||||
}
|
||||
|
||||
// Check for duplicates
|
||||
if (summit.photos) {
|
||||
dbPhotos = dbPhotos.filter(photo => !summit.photos.some(summitPhoto => summitPhoto.filename === photo.filename ))
|
||||
}
|
||||
|
||||
if (dbPhotos.length > 0) {
|
||||
await db.getDb().collection('summits').updateOne({code: summitCode}, { $push: { photos: { $each: dbPhotos } } })
|
||||
|
||||
let transporter = nodemailer.createTransport(config.mail)
|
||||
transporter.sendMail({
|
||||
from: 'api@sotl.as',
|
||||
to: 'mk@neon1.net',
|
||||
subject: 'New photos added to summit ' + summitCode + ' by ' + req.user.callsign,
|
||||
text: `${dbPhotos.length} new photos have been added. https://sotl.as/summits/${summitCode}\n`,
|
||||
attachments: dbPhotos.map(photo => {
|
||||
return {
|
||||
filename: photo.filename,
|
||||
path: config.photos.paths.thumb + '/' + photo.filename.substr(0, 2) + '/' + photo.filename
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
res.json(dbPhotos)
|
||||
} else {
|
||||
res.status(400).end()
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
res.status(500).end()
|
||||
}
|
||||
})
|
||||
|
||||
router.delete('/summits/:association/:code/:filename', jwtCallback, async (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
}
|
||||
|
||||
if (!req.user.callsign) {
|
||||
res.status(401).send('Missing callsign in SSO token').end()
|
||||
return
|
||||
}
|
||||
|
||||
let summitCode = req.params.association + '/' + req.params.code
|
||||
let summit = await db.getDb().collection('summits').findOne({code: summitCode})
|
||||
let photo = summit.photos.find(photo => photo.filename === req.params.filename)
|
||||
if (!photo) {
|
||||
res.status(404).end()
|
||||
return
|
||||
}
|
||||
|
||||
// Check that uploader is currently logged in user
|
||||
if (photo.author !== req.user.callsign) {
|
||||
res.status(401).send('Cannot delete another user\'s photos').end()
|
||||
return
|
||||
}
|
||||
|
||||
await db.getDb().collection('summits').updateOne({code: summitCode}, { $pull: { photos: { filename: req.params.filename } } })
|
||||
|
||||
res.status(204).end()
|
||||
})
|
||||
|
||||
router.post('/summits/:association/:code/reorder', jwtCallback, async (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
}
|
||||
|
||||
if (!req.user.callsign) {
|
||||
res.status(401).send('Missing callsign in SSO token').end()
|
||||
return
|
||||
}
|
||||
|
||||
let summitCode = req.params.association + '/' + req.params.code
|
||||
|
||||
// Assign new sortOrder index to photos of this user, in the order given by req.body.filenames
|
||||
let updates = req.body.filenames.map((filename, index) => {
|
||||
return db.getDb().collection('summits').updateOne(
|
||||
{ code: summitCode, 'photos.author': req.user.callsign, 'photos.filename': filename },
|
||||
{ $set: { 'photos.$.sortOrder': index + 1 } }
|
||||
)
|
||||
})
|
||||
|
||||
await Promise.all(updates)
|
||||
|
||||
res.status(204).end()
|
||||
})
|
||||
|
||||
router.post('/summits/:association/:code/:filename', jwtCallback, async (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
}
|
||||
|
||||
if (!req.user.callsign) {
|
||||
res.status(401).send('Missing callsign in SSO token').end()
|
||||
return
|
||||
}
|
||||
|
||||
let summitCode = req.params.association + '/' + req.params.code
|
||||
let summit = await db.getDb().collection('summits').findOne({code: summitCode})
|
||||
let photo = summit.photos.find(photo => photo.filename === req.params.filename)
|
||||
if (!photo) {
|
||||
res.status(404).end()
|
||||
return
|
||||
}
|
||||
|
||||
// Check that editor is the currently logged in user
|
||||
if (photo.author !== req.user.callsign) {
|
||||
res.status(401).send('Cannot delete another user\'s photos').end()
|
||||
return
|
||||
}
|
||||
|
||||
let update = {
|
||||
$set: {},
|
||||
$unset: {}
|
||||
}
|
||||
|
||||
if (req.body.title) {
|
||||
update.$set['photos.$.title'] = req.body.title
|
||||
} else {
|
||||
update.$unset['photos.$.title'] = ''
|
||||
}
|
||||
|
||||
if (req.body.date) {
|
||||
update.$set['photos.$.date'] = new Date(req.body.date)
|
||||
} else {
|
||||
update.$unset['photos.$.date'] = ''
|
||||
}
|
||||
|
||||
if (req.body.coordinates) {
|
||||
update.$set['photos.$.coordinates'] = req.body.coordinates
|
||||
update.$set['photos.$.positioningError'] = req.body.positioningError
|
||||
} else {
|
||||
update.$unset['photos.$.coordinates'] = ''
|
||||
update.$unset['photos.$.positioningError'] = ''
|
||||
}
|
||||
|
||||
if (req.body.direction !== null && req.body.direction !== undefined && req.body.direction !== '') {
|
||||
update.$set['photos.$.direction'] = req.body.direction
|
||||
} else {
|
||||
update.$unset['photos.$.direction'] = ''
|
||||
}
|
||||
|
||||
if (req.body.isCover) {
|
||||
update.$set['photos.$.isCover'] = true
|
||||
|
||||
// Only one photo can be the cover photo, so unmark all others first
|
||||
await db.getDb().collection('summits').updateOne(
|
||||
{ code: summitCode },
|
||||
{ $unset: { 'photos.$[].isCover': '' } }
|
||||
)
|
||||
} else {
|
||||
update.$unset['photos.$.isCover'] = ''
|
||||
}
|
||||
|
||||
if (Object.keys(update.$set).length === 0) {
|
||||
delete update.$set
|
||||
}
|
||||
if (Object.keys(update.$unset).length === 0) {
|
||||
delete update.$unset
|
||||
}
|
||||
|
||||
await db.getDb().collection('summits').updateOne(
|
||||
{ code: summitCode, 'photos.filename': req.params.filename },
|
||||
update
|
||||
)
|
||||
|
||||
res.status(204).end()
|
||||
})
|
|
@ -0,0 +1,158 @@
|
|||
const reconnect = require('reconnect-net');
|
||||
const wsManager = require('./ws-manager');
|
||||
const carrier = require('carrier');
|
||||
const config = require('./config');
|
||||
const db = require('./db');
|
||||
const utils = require('./utils');
|
||||
|
||||
const rbnSpotRegex = /^DX de (\S+):\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+dB\s+(\S+)\s+\S+\s+(CQ|DX)\s+(\d+)Z$/;
|
||||
|
||||
class RbnReceiver {
|
||||
start() {
|
||||
this.restartConnection();
|
||||
|
||||
wsManager.on('message', (ws, message) => {
|
||||
if (message.rbnFilter !== undefined) {
|
||||
console.log("Set RBN filter to " + JSON.stringify(message.rbnFilter));
|
||||
ws.rbnFilter = message.rbnFilter;
|
||||
|
||||
this.sendSpotHistory(ws)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
restartConnection() {
|
||||
if (this.re)
|
||||
this.re.disconnect();
|
||||
|
||||
this.resetTimer();
|
||||
this.re = reconnect((stream) => {
|
||||
console.log("Connected to RBN");
|
||||
stream.write(config.rbn.login + "\r\n");
|
||||
if (config.rbn.server.commands) {
|
||||
config.rbn.server.commands.forEach(command => {
|
||||
stream.write(command + "\r\n");
|
||||
});
|
||||
}
|
||||
|
||||
carrier.carry(stream, (line) => {
|
||||
this.handleLine(line);
|
||||
});
|
||||
});
|
||||
|
||||
this.re.on('error', (err) => {
|
||||
console.error(`RBN connection error: ${err}`);
|
||||
});
|
||||
|
||||
this.re.connect(config.rbn.server);
|
||||
}
|
||||
|
||||
resetTimer() {
|
||||
if (this.timeout) {
|
||||
clearTimeout(this.timeout);
|
||||
}
|
||||
|
||||
this.timeout = setTimeout(() => {
|
||||
console.error("RBN: timeout, reconnecting");
|
||||
this.restartConnection();
|
||||
}, config.rbn.timeout);
|
||||
}
|
||||
|
||||
handleLine(line) {
|
||||
this.resetTimer();
|
||||
let matches = rbnSpotRegex.exec(line);
|
||||
if (matches) {
|
||||
let spot = {
|
||||
timeStamp: new Date(),
|
||||
callsign: matches[3],
|
||||
homeCallsign: this.homeCallsign(matches[3]),
|
||||
spotter: matches[1].replace("-#", ""),
|
||||
frequency: parseFloat((matches[2]/1000).toFixed(4)),
|
||||
mode: matches[4],
|
||||
snr: parseInt(matches[5]),
|
||||
speed: parseInt(matches[6])
|
||||
};
|
||||
|
||||
// Check if this is a known SOTA activator
|
||||
db.getDb().collection('activators').countDocuments({callsign: { $in: utils.makeCallsignVariations(spot.homeCallsign) }}, (error, result) => {
|
||||
if (result > 0) {
|
||||
spot.isActivator = true;
|
||||
}
|
||||
db.getDb().collection('rbnspots').insertOne(spot, (error, result) => {
|
||||
// _id has now been added, but not in our preferred format
|
||||
spot._id = spot._id.toHexString()
|
||||
wsManager.broadcast({'rbnSpot': spot}, (ws) => {
|
||||
if (!ws.rbnFilter) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ws.rbnFilter.homeCallsign && ws.rbnFilter.homeCallsign.includes(spot.homeCallsign)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ws.rbnFilter.isActivator && spot.isActivator) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
sendSpotHistory(ws) {
|
||||
// Send the spot history for the currently defined RBN filter
|
||||
if (!ws.rbnFilter.homeCallsign && !ws.rbnFilter.isActivator) {
|
||||
return;
|
||||
}
|
||||
|
||||
let query = {};
|
||||
|
||||
if (ws.rbnFilter.homeCallsign) {
|
||||
query.homeCallsign = ws.rbnFilter.homeCallsign;
|
||||
}
|
||||
if (ws.rbnFilter.isActivator) {
|
||||
query.isActivator = true;
|
||||
}
|
||||
|
||||
let maxAge = parseInt(ws.rbnFilter.maxAge) || 3600000;
|
||||
query.timeStamp = {$gte: new Date(new Date().getTime() - maxAge)};
|
||||
|
||||
db.getDb().collection('rbnspots').find(query).sort({timeStamp: -1}).limit(config.rbn.maxSpotHistory).toArray((err, rbnSpots) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
return;
|
||||
}
|
||||
|
||||
rbnSpots.forEach(spot => {
|
||||
spot._id = spot._id.toHexString();
|
||||
});
|
||||
|
||||
let response = {rbnSpotHistory: rbnSpots};
|
||||
if (ws.rbnFilter.viewId) {
|
||||
response.viewId = ws.rbnFilter.viewId;
|
||||
}
|
||||
wsManager.unicast(response, ws);
|
||||
});
|
||||
}
|
||||
|
||||
homeCallsign(callsign) {
|
||||
let parts = callsign.split('/');
|
||||
let longestPart = '';
|
||||
parts.forEach(part => {
|
||||
if (part.length > longestPart.length) {
|
||||
longestPart = part;
|
||||
}
|
||||
})
|
||||
|
||||
// For UK callsigns, normalize them all to 2E/G/M for the sake of comparison
|
||||
let matches = longestPart.match(/^(2[DEIJMUW]|G[DIJMUW]?|M[DIJMUW]?)(\d[A-Z]{2,3})$/)
|
||||
if (matches) {
|
||||
longestPart = matches[1].replace(/^2./, '2E').replace(/^G[DIJMUW]/, 'G').replace(/^M[DIJMUW]/, 'M') + matches[2]
|
||||
}
|
||||
return longestPart;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RbnReceiver;
|
|
@ -0,0 +1,266 @@
|
|||
const express = require('express');
|
||||
const config = require('./config');
|
||||
const assert = require('assert');
|
||||
const app = express();
|
||||
const expressWs = require('express-ws')(app);
|
||||
const cacheControl = require('express-cache-controller');
|
||||
const bearerToken = require('express-bearer-token');
|
||||
const wsManager = require('./ws-manager');
|
||||
const SotaSpotReceiver = require('./sotaspots');
|
||||
const RbnReceiver = require('./rbn');
|
||||
const db = require('./db');
|
||||
const alerts = require('./alerts');
|
||||
const geoexport = require('./geoexport');
|
||||
const activations = require('./activations');
|
||||
const utils = require('./utils');
|
||||
const photos_router = require('./photos_router')
|
||||
const tracks_router = require('./tracks_router')
|
||||
const maxmind = require('maxmind');
|
||||
|
||||
let dbChecker = (req, res, next) => {
|
||||
if (db.getDb() == null) {
|
||||
console.error('DB error');
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
next();
|
||||
};
|
||||
app.enable('trust proxy');
|
||||
app.use(express.json());
|
||||
app.use(dbChecker);
|
||||
app.use(cacheControl({
|
||||
maxAge: 3600
|
||||
}));
|
||||
app.use(bearerToken());
|
||||
app.use('/ws', wsManager.router);
|
||||
app.use('/alerts', alerts);
|
||||
app.use('/geoexport', geoexport);
|
||||
app.use('/activations', activations);
|
||||
app.use('/photos', photos_router);
|
||||
app.use('/tracks', tracks_router);
|
||||
|
||||
let sotaSpotReceiver = new SotaSpotReceiver();
|
||||
let rbnReceiver = new RbnReceiver();
|
||||
rbnReceiver.start();
|
||||
|
||||
app.get('/summits/search', (req, res) => {
|
||||
let limit = 100;
|
||||
if (req.query.limit) {
|
||||
let limitOverride = parseInt(req.query.limit);
|
||||
if (limitOverride > 0 && limitOverride < limit) {
|
||||
limit = limitOverride;
|
||||
}
|
||||
}
|
||||
db.getDb().collection('summits').find({$or: [{code: {'$regex': req.query.q, '$options': 'i'}}, {name: {'$regex': req.query.q, '$options': 'i'}}, {nameNd: {'$regex': req.query.q, '$options': 'i'}}]}, {projection: {'_id': false, 'photos': false, 'routes': false, 'links': false, 'resources': false}}).limit(limit).toArray((err, summits) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(summits);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/summits/near', (req, res) => {
|
||||
let limit = 100;
|
||||
if (req.query.limit) {
|
||||
let limitOverride = parseInt(req.query.limit);
|
||||
if (limitOverride > 0 && limitOverride < limit) {
|
||||
limit = limitOverride;
|
||||
}
|
||||
}
|
||||
let query = {
|
||||
coordinates: {$near: {$geometry: {type: "Point", coordinates: [parseFloat(req.query.lon), parseFloat(req.query.lat)]}}}
|
||||
};
|
||||
if (req.query.maxDistance) {
|
||||
query.coordinates.$near.$maxDistance = parseFloat(req.query.maxDistance);
|
||||
}
|
||||
if (!req.query.inactive) {
|
||||
query.validFrom = {$lte: new Date()};
|
||||
query.validTo = {$gte: new Date()};
|
||||
}
|
||||
db.getDb().collection('summits').find(query, {projection: {'_id': false, 'photos': false, 'routes': false, 'links': false, 'resources': false}}).limit(limit).toArray((err, summits) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(summits);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/summits/:association/:code', (req, res) => {
|
||||
db.getDb().collection('summits').findOne({code: req.params.association + '/' + req.params.code}, {projection: {'_id': false}}, (err, summit) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!summit) {
|
||||
res.status(404).end();
|
||||
return;
|
||||
}
|
||||
|
||||
let associationCode = summit.code.substring(0, summit.code.indexOf('/'));
|
||||
db.getDb().collection('associations').findOne({code: associationCode}, (err, association) => {
|
||||
if (association) {
|
||||
summit.isoCode = association.isoCode;
|
||||
summit.continent = association.continent;
|
||||
}
|
||||
res.json(summit);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Dummy POST endpoint to help browser invalidate cache after uploading photos
|
||||
app.post('/summits/:association/:code', (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
};
|
||||
|
||||
res.status(204).end();
|
||||
});
|
||||
|
||||
app.get('/associations/all', (req, res) => {
|
||||
db.getDb().collection('associations').find({}, {projection: {'_id': false}}).toArray((err, associations) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(associations);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/associations/:association', (req, res) => {
|
||||
db.getDb().collection('associations').findOne({code: req.params.association}, {projection: {'_id': false}}, (err, association) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!association) {
|
||||
res.status(404).end();
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(association);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/regions/:association/:region', (req, res) => {
|
||||
let region = req.params.association + '/' + req.params.region;
|
||||
if (!region.match(/^[A-Z0-9]+\/[A-Z0-9]+$/)) {
|
||||
res.status(400).end();
|
||||
return;
|
||||
}
|
||||
db.getDb().collection('summits').find({code: {'$regex': '^' + region}}, {projection: {'_id': false, 'routes': false, 'links': false, 'resources': false}}).toArray((err, summits) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
summits.forEach(summit => {
|
||||
if (summit.photos && summit.photos.length > 0) {
|
||||
summit.hasPhotos = true;
|
||||
}
|
||||
delete summit.photos;
|
||||
});
|
||||
|
||||
res.json(summits);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/activators/search', (req, res) => {
|
||||
let skip = 0;
|
||||
if (req.query.skip) {
|
||||
skip = parseInt(req.query.skip);
|
||||
}
|
||||
let limit = 100;
|
||||
if (req.query.limit) {
|
||||
if (parseInt(req.query.limit) <= limit) {
|
||||
limit = parseInt(req.query.limit);
|
||||
}
|
||||
}
|
||||
let sortField = 'score';
|
||||
let sortDirection = -1;
|
||||
if (req.query.sort === 'callsign' || req.query.sort === 'points' || req.query.sort === 'bonusPoints' || req.query.sort === 'score' || req.query.sort === 'summits' || req.query.sort === 'avgPoints') {
|
||||
sortField = req.query.sort;
|
||||
}
|
||||
if (req.query.sortDirection == 'desc') {
|
||||
sortDirection = -1;
|
||||
} else {
|
||||
sortDirection = 1;
|
||||
}
|
||||
let sort = {};
|
||||
sort[sortField] = sortDirection;
|
||||
let query = {};
|
||||
if (req.query.q !== undefined && req.query.q !== '') {
|
||||
query = {callsign: {'$regex': req.query.q, '$options': 'i'}};
|
||||
}
|
||||
let cursor = db.getDb().collection('activators').find(query, {projection: {'_id': false}}).sort(sort);
|
||||
cursor.count((err, count) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
|
||||
cursor.skip(skip).limit(limit).toArray((err, activators) => {
|
||||
res.json({activators, total: count});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/activators/:callsign', (req, res) => {
|
||||
db.getDb().collection('activators').findOne({callsign: req.params.callsign}, {projection: {'_id': false}}, (err, activator) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.status(500).end();
|
||||
return;
|
||||
}
|
||||
if (!activator) {
|
||||
// Try alternative variations
|
||||
db.getDb().collection('activators').findOne({callsign: { $in: utils.makeCallsignVariations(req.params.callsign) }}, {projection: {'_id': false}}, (err, activator) => {
|
||||
if (activator) {
|
||||
res.json(activator);
|
||||
} else {
|
||||
res.status(404).end();
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(activator);
|
||||
});
|
||||
});
|
||||
|
||||
let geoLookup;
|
||||
maxmind.open(config.geoip.path).then((lookup) => {
|
||||
geoLookup = lookup;
|
||||
});
|
||||
app.get('/map_server', (req, res) => {
|
||||
let mapServer = 'us';
|
||||
let geo = geoLookup.get(req.ip);
|
||||
if (geo.continent.code === 'AF' || geo.continent.code === 'EU') {
|
||||
mapServer = 'eu';
|
||||
}
|
||||
res.json({mapServer});
|
||||
});
|
||||
|
||||
app.get('/my_coordinates', (req, res) => {
|
||||
let geo = geoLookup.get(req.ip);
|
||||
if (!geo) {
|
||||
res.json({});
|
||||
} else {
|
||||
res.json({latitude: geo.location.latitude, longitude: geo.location.longitude});
|
||||
}
|
||||
});
|
||||
|
||||
app.listen(config.http.port, config.http.host);
|
|
@ -0,0 +1,135 @@
|
|||
const axios = require('axios');
|
||||
const wsManager = require('./ws-manager');
|
||||
const config = require('./config');
|
||||
const db = require('./db');
|
||||
const TreeMap = require("treemap-js");
|
||||
|
||||
class SotaSpotReceiver {
|
||||
constructor() {
|
||||
this.latestSpots = new TreeMap();
|
||||
this.lastUpdate = null;
|
||||
|
||||
wsManager.on('connect', (ws) => {
|
||||
let spots = []
|
||||
this.latestSpots.each(spot => {
|
||||
spots.push(spot)
|
||||
});
|
||||
wsManager.unicast({spots}, ws);
|
||||
})
|
||||
|
||||
this.loadSpots();
|
||||
setInterval(() => {
|
||||
this.loadSpots();
|
||||
}, config.sotaspots.updateInterval);
|
||||
}
|
||||
|
||||
loadSpots() {
|
||||
let numSpotsToLoad = config.sotaspots.periodicLoadSpots;
|
||||
if (this.latestSpots.getLength() == 0) {
|
||||
numSpotsToLoad = config.sotaspots.initialLoadSpots;
|
||||
}
|
||||
console.log(`load ${numSpotsToLoad} spots`);
|
||||
axios.get(config.sotaspots.url + '/' + numSpotsToLoad + '/all')
|
||||
.then(response => {
|
||||
let minSpotId = undefined;
|
||||
let currentSpotIds = new Set();
|
||||
response.data.forEach(spot => {
|
||||
spot.summit = {code: spot.associationCode.toUpperCase().trim() + '/' + spot.summitCode.toUpperCase().trim()};
|
||||
spot.timeStamp = new Date(spot.timeStamp);
|
||||
spot.activatorCallsign = spot.activatorCallsign.toUpperCase().replace(/[^A-Z0-9\/-]/g, '')
|
||||
delete spot.associationCode;
|
||||
delete spot.summitCode;
|
||||
delete spot.summitDetails;
|
||||
delete spot.highlightColor;
|
||||
delete spot.activatorName;
|
||||
if (spot.comments === '(null)') {
|
||||
spot.comments = '';
|
||||
}
|
||||
this.updateSpot(spot);
|
||||
|
||||
currentSpotIds.add(spot.id);
|
||||
if (minSpotId === undefined || spot.id < minSpotId) {
|
||||
minSpotId = spot.id;
|
||||
}
|
||||
});
|
||||
this.removeDeletedSpots(minSpotId, currentSpotIds);
|
||||
this.removeExpiredSpots();
|
||||
});
|
||||
}
|
||||
|
||||
updateSpot(spot) {
|
||||
// Check if we already have this spot in the list, and if it has changed
|
||||
if (this.spotsAreEqual(this.latestSpots.get(spot.id), spot)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Spot is new or modified
|
||||
console.log("New/modified spot id " + spot.id);
|
||||
this.lookupSummit(spot.summit.code)
|
||||
.then(summit => {
|
||||
if (summit) {
|
||||
spot.summit = summit;
|
||||
}
|
||||
|
||||
this.lookupAssociation(spot.summit.code.substring(0, spot.summit.code.indexOf('/')))
|
||||
.then(association => {
|
||||
if (association) {
|
||||
spot.summit.isoCode = association.isoCode;
|
||||
spot.summit.continent = association.continent;
|
||||
}
|
||||
|
||||
this.latestSpots.set(spot.id, spot);
|
||||
wsManager.broadcast({spot});
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
deleteSpotById(spotId) {
|
||||
console.log("Deleted spot id " + spotId);
|
||||
if (this.latestSpots.get(spotId) !== undefined) {
|
||||
this.latestSpots.remove(spotId);
|
||||
wsManager.broadcast({deleteSpot: {id: spotId}});
|
||||
}
|
||||
}
|
||||
|
||||
removeDeletedSpots(minSpotId, currentSpotIds) {
|
||||
// Consider all spots with ID >= minSpotId and not in currentSpotIds as deleted
|
||||
this.latestSpots.each((spot, curId) => {
|
||||
if (curId >= minSpotId && !currentSpotIds.has(curId)) {
|
||||
this.deleteSpotById(curId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
removeExpiredSpots() {
|
||||
let now = new Date();
|
||||
while (this.latestSpots.getLength() > 0) {
|
||||
let minKey = this.latestSpots.getMinKey();
|
||||
if ((now - this.latestSpots.get(minKey).timeStamp) > config.sotaspots.maxSpotAge) {
|
||||
console.log('Remove spot ID ' + minKey);
|
||||
this.latestSpots.remove(minKey);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lookupSummit(summitCode, callback) {
|
||||
return db.getDb().collection('summits').findOne({code: summitCode}, {projection: {'_id': false, code: true, name: true, altitude: true, points: true, activationCount: true}});
|
||||
}
|
||||
|
||||
lookupAssociation(associationCode, callback) {
|
||||
return db.getDb().collection('associations').findOne({code: associationCode});
|
||||
}
|
||||
|
||||
spotsAreEqual(spot1, spot2) {
|
||||
if (spot1 === undefined || spot2 === undefined) {
|
||||
return false;
|
||||
}
|
||||
return (spot1.id === spot2.id && spot1.comments === spot2.comments && spot1.callsign === spot2.callsign &&
|
||||
spot1.summit.code === spot2.summit.code && spot1.activatorCallsign === spot2.activatorCallsign &&
|
||||
spot1.frequency === spot2.frequency && spot1.mode === spot2.mode);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SotaSpotReceiver;
|
|
@ -0,0 +1,74 @@
|
|||
const axios = require('axios');
|
||||
const wsManager = require('./ws-manager');
|
||||
const config = require('./config');
|
||||
const db = require('./db');
|
||||
const TreeMap = require("treemap-js");
|
||||
|
||||
const latestSpots = new TreeMap();
|
||||
const maxSpots = 100;
|
||||
const updateInterval = 30000;
|
||||
let lastUpdate = null;
|
||||
|
||||
wsManager.on('connect', (ws) => {
|
||||
let spots = []
|
||||
latestSpots.each(spot => {
|
||||
spots.push(spot)
|
||||
});
|
||||
wsManager.unicast({spots}, ws);
|
||||
})
|
||||
|
||||
loadSpots();
|
||||
setInterval(loadSpots, updateInterval);
|
||||
|
||||
function loadSpots() {
|
||||
console.log('load spots');
|
||||
axios.get('https://sota-api2.azurewebsites.net/api/spots/' + maxSpots + '/all')
|
||||
.then(response => {
|
||||
response.data.forEach(spot => {
|
||||
spot.summit = {code: spot.associationCode + '/' + spot.summitCode};
|
||||
delete spot.associationCode;
|
||||
delete spot.summitCode;
|
||||
delete spot.summitDetails;
|
||||
delete spot.highlightColor;
|
||||
if (spot.comments === '(null)') {
|
||||
spot.comments = '';
|
||||
}
|
||||
updateSpot(spot);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function updateSpot(spot) {
|
||||
// Check if we already have this spot in the list, and if it has changed
|
||||
if (spotsAreEqual(latestSpots.get(spot.id), spot)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Spot is new or modified
|
||||
console.log("New/modified spot id " + spot.id);
|
||||
lookupSummit(spot.summit.code)
|
||||
.then(summit => {
|
||||
if (summit) {
|
||||
spot.summit = summit;
|
||||
}
|
||||
|
||||
latestSpots.set(spot.id, spot);
|
||||
while (latestSpots.getLength() > maxSpots) {
|
||||
latestSpots.remove(latestSpots.getMinKey());
|
||||
}
|
||||
wsManager.broadcast({spot});
|
||||
})
|
||||
}
|
||||
|
||||
function lookupSummit(summitCode, callback) {
|
||||
return db.getDb().collection('summits').findOne({code: summitCode}, {projection: {'_id': false, code: true, name: true, altitude: true, points: true, activationCount: true}});
|
||||
}
|
||||
|
||||
function spotsAreEqual(spot1, spot2) {
|
||||
if (spot1 === undefined || spot2 === undefined) {
|
||||
return false;
|
||||
}
|
||||
return (spot1.id === spot2.id && spot1.comments === spot2.comments && spot1.callsign === spot2.callsign &&
|
||||
spot1.summit.code === spot2.summit.code && spot1.activatorCallsign === spot2.activatorCallsign &&
|
||||
spot1.frequency === spot2.frequency && spot1.mode === spot2.mode);
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
const moment = require('moment');
|
||||
const db = require('./db');
|
||||
|
||||
module.exports = {
|
||||
lookupSummits: function(objects) {
|
||||
// Get all summit refs so we can look them all up in one go
|
||||
let summitCodes = new Set();
|
||||
let associationCodes = new Set();
|
||||
objects.forEach(obj => {
|
||||
summitCodes.add(obj.summit.code);
|
||||
associationCodes.add(obj.summit.code.substring(0, obj.summit.code.indexOf('/')));
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db.getDb().collection('summits').find({code: {$in: [...summitCodes]}}, {projection: {'_id': false, code: true, name: true, altitude: true, points: true, coordinates: true, activationCount: true, validFrom: true, validTo: true, 'photos.author': true}})
|
||||
.toArray((err, summits) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
let summitMap = {};
|
||||
let now = moment();
|
||||
summits.forEach(summit => {
|
||||
if (now.isBefore(summit.validFrom) || now.isAfter(summit.validTo)) {
|
||||
summit.invalid = true;
|
||||
}
|
||||
delete summit.validFrom;
|
||||
delete summit.validTo;
|
||||
if (summit.photos) {
|
||||
let photoAuthors = new Set();
|
||||
summit.photos.forEach(photo => {
|
||||
photoAuthors.add(photo.author);
|
||||
});
|
||||
summit.photoAuthors = [...photoAuthors];
|
||||
delete summit.photos;
|
||||
}
|
||||
summitMap[summit.code] = summit;
|
||||
});
|
||||
|
||||
db.getDb().collection('associations').find({code: {$in: [...associationCodes]}}).toArray((err, associations) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
let associationMap = {};
|
||||
associations.forEach(association => {
|
||||
associationMap[association.code] = association;
|
||||
});
|
||||
|
||||
objects.forEach(object => {
|
||||
let association = object.summit.code.substring(0, object.summit.code.indexOf('/'));
|
||||
if (summitMap[object.summit.code]) {
|
||||
object.summit = summitMap[object.summit.code];
|
||||
if (object.summit) {
|
||||
object.summit.isoCode = associationMap[association].isoCode;
|
||||
object.summit.continent = associationMap[association].continent;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
resolve(objects);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
const axios = require('axios');
|
||||
const MongoClient = require('mongodb').MongoClient;
|
||||
const config = require('../config');
|
||||
const assert = require('assert');
|
||||
const htmlparser = require('htmlparser2');
|
||||
|
||||
const client = new MongoClient(config.mongodb.url, {useUnifiedTopology: true});
|
||||
client.connect(err => {
|
||||
assert.equal(null, err);
|
||||
|
||||
importActivators(client.db(config.mongodb.dbName));
|
||||
});
|
||||
|
||||
async function importActivators(db) {
|
||||
let response = await axios.get('https://api-db.sota.org.uk/admin/activator_roll?associationID=-1')
|
||||
|
||||
// Weed out duplicate callsigns, keeping only the record with the higher number of points
|
||||
let activators = new Map();
|
||||
response.data.forEach(record => {
|
||||
let callsign = record.Callsign.toUpperCase().trim().replace('/P', '');
|
||||
let existingActivator = activators.get(callsign);
|
||||
if (existingActivator === undefined || existingActivator.Points < record.Points) {
|
||||
activators.set(callsign, record);
|
||||
}
|
||||
});
|
||||
|
||||
let lastUpdate = new Date();
|
||||
let bulkWrites = [];
|
||||
for (let record of activators.values()) {
|
||||
let activator = {
|
||||
callsign: record.Callsign.toUpperCase().trim().replace('/P', ''),
|
||||
username: record.Username,
|
||||
userId: record.UserID,
|
||||
summits: record.Summits,
|
||||
points: record.Points,
|
||||
bonusPoints: record.BonusPoints,
|
||||
score: record.totalPoints,
|
||||
avgPoints: parseFloat(record.Average),
|
||||
lastUpdate
|
||||
};
|
||||
|
||||
bulkWrites.push({updateOne: {
|
||||
filter: {callsign: activator.callsign},
|
||||
update: { $set: activator},
|
||||
upsert: true
|
||||
}});
|
||||
|
||||
if (bulkWrites.length >= config.mongodb.batchSize) {
|
||||
await db.collection('activators').bulkWrite(bulkWrites);
|
||||
bulkWrites = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (bulkWrites.length > 0) {
|
||||
await db.collection('activators').bulkWrite(bulkWrites);
|
||||
}
|
||||
|
||||
await db.collection('activators').deleteMany({lastUpdate: {$lt: lastUpdate}});
|
||||
|
||||
await client.close();
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
const MongoClient = require('mongodb').MongoClient
|
||||
const config = require('../config')
|
||||
const assert = require('assert')
|
||||
const photos = require('../photos')
|
||||
const db = require('../db')
|
||||
|
||||
let author = process.argv[2]
|
||||
if (!author) {
|
||||
console.error("usage: author file file ...")
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
db.waitDb(() => {
|
||||
let imports = []
|
||||
process.argv.slice(3).forEach(filename => {
|
||||
imports.push(photos.importPhoto(filename, author))
|
||||
})
|
||||
|
||||
// Run imports in series
|
||||
return imports.reduce((promiseChain, currentImport) => {
|
||||
return promiseChain.then(chainResults =>
|
||||
currentImport.then(currentResult =>
|
||||
[ ...chainResults, currentResult ]
|
||||
)
|
||||
)
|
||||
}, Promise.resolve([])).then(photos => {
|
||||
console.log(JSON.stringify(photos))
|
||||
|
||||
db.closeDb()
|
||||
}).catch(err => {
|
||||
console.error(err)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,97 @@
|
|||
3Y,no,AF
|
||||
4O,me,EU
|
||||
4X,il,AS
|
||||
5B,cy,AS
|
||||
9A,hr,EU
|
||||
9H,mt,EU
|
||||
9M,my,AS
|
||||
9V,sg,AS
|
||||
A6,ae,AS
|
||||
BV,tw,AS
|
||||
CT,pt,EU
|
||||
CU,pt,EU
|
||||
CX,uy,SA
|
||||
D,de,EU
|
||||
E5,ck,OC
|
||||
E7,ba,EU
|
||||
EA,es,EU
|
||||
EI,ie,EU
|
||||
ES,ee,EU
|
||||
FG,gp,NA
|
||||
FH,yt,AF
|
||||
FJ,fr,NA
|
||||
FK,nc,OC
|
||||
FM,mq,NA
|
||||
FO,pf,OC
|
||||
FP,fr,NA
|
||||
FR,fr,AF
|
||||
FT,fr,AF
|
||||
F,fr,EU
|
||||
GD,im,EU
|
||||
GI,gb-nir,EU
|
||||
GJ,je,EU
|
||||
GM,gb-sct,EU
|
||||
GU,gg,EU
|
||||
GW,gb-wls,EU
|
||||
G,gb-eng,EU
|
||||
HA,hu,EU
|
||||
HB0,li,EU
|
||||
HB,ch,EU
|
||||
HI,do,NA
|
||||
HL,kr,AS
|
||||
HR,hn,NA
|
||||
I,it,EU
|
||||
JA,jp,AS
|
||||
JX,sj,EU
|
||||
KP4,pr,NA
|
||||
KH2,gu,OC
|
||||
KH0,mp,OC
|
||||
K,us,NA
|
||||
LA,no,EU
|
||||
LU,ar,SA
|
||||
LX,lu,EU
|
||||
LY,lt,EU
|
||||
LZ,bg,EU
|
||||
OD,lb,AS
|
||||
OE,at,EU
|
||||
OH,fi,EU
|
||||
OK,cz,EU
|
||||
OM,sk,EU
|
||||
ON,be,EU
|
||||
OY,fo,EU
|
||||
OZ,dk,EU
|
||||
PA,nl,EU
|
||||
PP,br,SA
|
||||
PQ,br,SA
|
||||
PR,br,SA
|
||||
PS,br,SA
|
||||
PT,br,SA
|
||||
PY,br,SA
|
||||
R,ru,EU
|
||||
S5,si,EU
|
||||
S7,sc,AF
|
||||
SM,se,EU
|
||||
SP,pl,EU
|
||||
SV,gr,EU
|
||||
TF,is,EU
|
||||
TI,cr,NA
|
||||
TK,fr,EU
|
||||
UT,ua,EU
|
||||
V5,na,AF
|
||||
VE,ca,NA
|
||||
VK,au,OC
|
||||
VO,ca,NA
|
||||
VP8,fk,SA
|
||||
VY,ca,EU
|
||||
W,us,NA
|
||||
XE,mx,NA
|
||||
XF,mx,NA
|
||||
YB,id,AS
|
||||
YL,lv,EU
|
||||
YO,ro,EU
|
||||
YU,rs,EU
|
||||
Z3,mk,EU
|
||||
ZB2,gi,EU
|
||||
ZD,gb,AF
|
||||
ZL,nz,OC
|
||||
ZS,za,AF
|
|
@ -0,0 +1,32 @@
|
|||
const config = require('../config')
|
||||
const db = require('../db')
|
||||
const sharp = require('sharp')
|
||||
|
||||
function regenerateThumbnails() {
|
||||
// Fetch all summits with photos
|
||||
db.getDb().collection('summits').find({'photos': {$exists: true, $ne: []}})
|
||||
.each((err, summit) => {
|
||||
if (summit) {
|
||||
summit.photos.forEach(photo => {
|
||||
regenerateThumbnailForPhoto(photo)
|
||||
})
|
||||
} else {
|
||||
db.closeDb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function regenerateThumbnailForPhoto(photo) {
|
||||
console.dir(photo)
|
||||
|
||||
let sizeDescr = 'thumb'
|
||||
let originalPath = config.photos.paths.original + '/' + photo.filename.substr(0, 2) + '/' + photo.filename
|
||||
let outPath = config.photos.paths[sizeDescr] + '/' + photo.filename.substr(0, 2) + '/' + photo.filename
|
||||
makeResized(originalPath, outPath, config.photos.sizes[sizeDescr].width, config.photos.sizes[sizeDescr].height)
|
||||
}
|
||||
|
||||
function makeResized(src, dst, maxWidth, maxHeight) {
|
||||
return sharp(src).rotate().resize({ height: maxHeight, width: maxWidth, fit: 'inside' }).toFile(dst)
|
||||
}
|
||||
|
||||
db.waitDb(regenerateThumbnails)
|
|
@ -0,0 +1,194 @@
|
|||
const request = require('request');
|
||||
const MongoClient = require('mongodb').MongoClient;
|
||||
const config = require('../config');
|
||||
const assert = require('assert');
|
||||
const parse = require('csv-parse');
|
||||
const fs = require('fs');
|
||||
const removeDiacritics = require('diacritics').remove;
|
||||
|
||||
const client = new MongoClient(config.mongodb.url, {useUnifiedTopology: true});
|
||||
client.connect(function (err) {
|
||||
assert.equal(null, err);
|
||||
|
||||
processSummitList(client.db(config.mongodb.dbName));
|
||||
});
|
||||
|
||||
function processSummitList(db) {
|
||||
let associations = new Map();
|
||||
let now = new Date();
|
||||
|
||||
let prefixToIsoCode = [];
|
||||
parse(fs.readFileSync(__dirname + '/isocodes.txt'), function(err, isocodes) {
|
||||
assert.equal(err, null);
|
||||
prefixToIsoCode = isocodes;
|
||||
});
|
||||
|
||||
request(config.summitListUrl, (error, response, body) => {
|
||||
assert.equal(error, null);
|
||||
|
||||
body = body.substring(body.indexOf("\n")+1, body.length);
|
||||
|
||||
parse(body, {columns: true, relax_column_count: true}, function(err, summits) {
|
||||
assert.equal(err, null);
|
||||
|
||||
if (summits.length < 100000) {
|
||||
console.error("Bad number of summits, expecting more than 100000");
|
||||
client.close();
|
||||
return;
|
||||
}
|
||||
|
||||
let bulkWrites = [];
|
||||
for (let summit of summits) {
|
||||
summit.SummitCode = summit.SummitCode.trim(); //anomaly GW/NW-003
|
||||
summit.ValidFrom = dateToMongo(summit.ValidFrom);
|
||||
summit.ValidTo = dateToMongo(summit.ValidTo, true);
|
||||
if (summit.ActivationDate) {
|
||||
summit.ActivationDate = dateToMongo(summit.ActivationDate);
|
||||
} else {
|
||||
summit.ActivationDate = null;
|
||||
summit.ActivationCall = null;
|
||||
}
|
||||
|
||||
bulkWrites.push({updateOne: {
|
||||
filter: {code: summit.SummitCode},
|
||||
update: { $set: {
|
||||
code: summit.SummitCode,
|
||||
name: summit.SummitName,
|
||||
nameNd: removeDiacritics(summit.SummitName),
|
||||
altitude: parseInt(summit.AltM),
|
||||
points: parseInt(summit.Points),
|
||||
bonusPoints: parseInt(summit.BonusPoints),
|
||||
coordinates: {
|
||||
longitude: Number(parseFloat(summit.Longitude).toFixed(5)),
|
||||
latitude: Number(parseFloat(summit.Latitude).toFixed(5))
|
||||
},
|
||||
validFrom: summit.ValidFrom,
|
||||
validTo: summit.ValidTo,
|
||||
activationCount: parseInt(summit.ActivationCount),
|
||||
activationCall: summit.ActivationCall,
|
||||
activationDate: summit.ActivationDate
|
||||
}},
|
||||
upsert: true
|
||||
}});
|
||||
|
||||
if (bulkWrites.length >= config.mongodb.batchSize) {
|
||||
db.collection('summits').bulkWrite(bulkWrites);
|
||||
bulkWrites = [];
|
||||
}
|
||||
|
||||
let SummitAssociation = getAssociation(summit.SummitCode);
|
||||
let SummitRegion = getRegion(summit.SummitCode);
|
||||
|
||||
let isValid = (summit.ValidFrom <= now && summit.ValidTo >= now);
|
||||
let association = associations.get(SummitAssociation);
|
||||
if (!association) {
|
||||
let info = isoCodeForPrefix(SummitAssociation, prefixToIsoCode)
|
||||
association = {code: SummitAssociation, name: summit.AssociationName, isoCode: info.isoCode, continent: info.continent, regions: new Map(), summitCount: 0};
|
||||
associations.set(SummitAssociation, association);
|
||||
}
|
||||
let region = association.regions.get(SummitRegion);
|
||||
if (!region) {
|
||||
region = {code: SummitRegion, name: summit.RegionName, summitCount: 0};
|
||||
association.regions.set(SummitRegion, region);
|
||||
}
|
||||
if (isValid) {
|
||||
association.summitCount++;
|
||||
region.summitCount++;
|
||||
}
|
||||
|
||||
let lat = parseFloat(summit.Latitude);
|
||||
let lon = parseFloat(summit.Longitude);
|
||||
|
||||
if (!region.bounds) {
|
||||
region.bounds = [[lon, lat], [lon, lat]];
|
||||
} else {
|
||||
region.bounds[0][0] = Math.min(region.bounds[0][0], lon);
|
||||
region.bounds[0][1] = Math.min(region.bounds[0][1], lat);
|
||||
region.bounds[1][0] = Math.max(region.bounds[1][0], lon);
|
||||
region.bounds[1][1] = Math.max(region.bounds[1][1], lat);
|
||||
}
|
||||
|
||||
if (!association.bounds) {
|
||||
association.bounds = [[lon, lat], [lon, lat]];
|
||||
} else {
|
||||
association.bounds[0][0] = Math.min(association.bounds[0][0], lon);
|
||||
association.bounds[0][1] = Math.min(association.bounds[0][1], lat);
|
||||
association.bounds[1][0] = Math.max(association.bounds[1][0], lon);
|
||||
association.bounds[1][1] = Math.max(association.bounds[1][1], lat);
|
||||
}
|
||||
}
|
||||
|
||||
if (bulkWrites.length > 0) {
|
||||
db.collection('summits').bulkWrite(bulkWrites);
|
||||
}
|
||||
|
||||
for (let association of associations.values()) {
|
||||
association.regions = [...association.regions.values()];
|
||||
}
|
||||
|
||||
let associationCollection = db.collection('associations');
|
||||
associationCollection.deleteMany({}, () => {
|
||||
associationCollection.insertMany([...associations.values()], (err, r) => {
|
||||
if (err)
|
||||
console.error(err);
|
||||
client.close();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function dateToMongo(date, endOfDay = false) {
|
||||
let dateRegex = /^(\d\d)\/(\d\d)\/(\d\d\d\d)$/;
|
||||
let dateRegex2 = /^(\d\d\d\d)-(\d\d)-(\d\d)/;
|
||||
let matches = dateRegex.exec(date);
|
||||
let matches2 = dateRegex2.exec(date);
|
||||
if (matches) {
|
||||
if (endOfDay) {
|
||||
return new Date(Date.UTC(matches[3], matches[2]-1, matches[1], 23, 59, 59, 999));
|
||||
} else {
|
||||
return new Date(Date.UTC(matches[3], matches[2]-1, matches[1]));
|
||||
}
|
||||
} else if (matches2) {
|
||||
if (endOfDay) {
|
||||
return new Date(Date.UTC(matches2[1], matches2[2]-1, matches2[3], 23, 59, 59, 999));
|
||||
} else {
|
||||
return new Date(Date.UTC(matches2[1], matches2[2]-1, matches2[3]));
|
||||
}
|
||||
} else {
|
||||
throw Error("Bad date " + date);
|
||||
}
|
||||
}
|
||||
|
||||
let summitRegex = /^(.+)\/(.+)-(\d+)$/;
|
||||
function getAssociation(summitRef) {
|
||||
let matches = summitRegex.exec(summitRef);
|
||||
if (matches) {
|
||||
return matches[1];
|
||||
} else {
|
||||
throw Error("Bad summit ref '" + summitRef + "'");
|
||||
}
|
||||
}
|
||||
|
||||
function getRegion(summitRef) {
|
||||
let matches = summitRegex.exec(summitRef);
|
||||
if (matches) {
|
||||
return matches[2];
|
||||
} else {
|
||||
throw Error("Bad summit ref '" + summitRef + "'");
|
||||
}
|
||||
}
|
||||
|
||||
function isoCodeForPrefix(prefix, prefixToIsoCode) {
|
||||
let isoCodeEnt = prefixToIsoCode.find(el => {
|
||||
if (prefix.startsWith(el[0])) {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
if (isoCodeEnt) {
|
||||
return {isoCode: isoCodeEnt[1], continent: isoCodeEnt[2]};
|
||||
} else {
|
||||
console.error(`ISO code not found for prefix ${prefix}`);
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
const axios = require('axios');
|
||||
const MongoClient = require('mongodb').MongoClient;
|
||||
const config = require('../config');
|
||||
const assert = require('assert');
|
||||
|
||||
const client = new MongoClient(config.mongodb.url, {useUnifiedTopology: true});
|
||||
client.connect(function (err) {
|
||||
assert.equal(null, err);
|
||||
|
||||
updateSotatrails(client.db(config.mongodb.dbName));
|
||||
});
|
||||
|
||||
function updateSotatrails(db) {
|
||||
axios.get(config.sotatrailsUrl)
|
||||
.then(response => {
|
||||
let bulkWrites = [];
|
||||
response.data.forEach(report => {
|
||||
let summitCode = report.association + '/' + report.region + '-' + report.code;
|
||||
bulkWrites.push({updateOne: {
|
||||
filter: {code: summitCode},
|
||||
update: { $set: {
|
||||
'resources.sotatrails': {
|
||||
url: report.url,
|
||||
details: report.details === 'true'
|
||||
}
|
||||
}},
|
||||
upsert: false
|
||||
}});
|
||||
});
|
||||
|
||||
db.collection('summits').bulkWrite(bulkWrites, (err, r) => {
|
||||
if (err)
|
||||
console.error(err);
|
||||
client.close();
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error);
|
||||
client.close();
|
||||
})
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
const togeojson = require('togeojson')
|
||||
const fsPromises = require('fs').promises
|
||||
const DOMParser = require('xmldom').DOMParser
|
||||
const simplify = require('@turf/simplify')
|
||||
const togpx = require('togpx')
|
||||
const hasha = require('hasha')
|
||||
const path = require('path')
|
||||
const config = require('./config')
|
||||
const db = require('./db')
|
||||
|
||||
module.exports = {
|
||||
importTrack: async function(filename, author) {
|
||||
// Hash input file to determine filename
|
||||
let hash = await hasha.fromFile(filename, {algorithm: 'sha256'})
|
||||
let hashFilename = hash.substr(0, 32)
|
||||
let originalPath = config.tracks.paths.original + '/' + hashFilename.substr(0, 2) + '/' + hashFilename
|
||||
await fsPromises.mkdir(path.dirname(originalPath), {recursive: true})
|
||||
|
||||
// Parse first to check if it's valid GPX/KML
|
||||
let gpxData = await fsPromises.readFile(filename, 'utf-8')
|
||||
let dom = new DOMParser().parseFromString(gpxData, 'text/xml')
|
||||
if (!dom) {
|
||||
throw new Error('Bad XML document')
|
||||
}
|
||||
let geojson
|
||||
if (dom.documentElement.tagName === 'kml') {
|
||||
geojson = togeojson.kml(dom)
|
||||
originalPath += '.kml'
|
||||
} else {
|
||||
geojson = togeojson.gpx(dom)
|
||||
originalPath += '.gpx'
|
||||
}
|
||||
|
||||
if (geojson.type !== 'FeatureCollection') {
|
||||
throw new Error('Expected feature collection')
|
||||
}
|
||||
if (geojson.features.length === 0) {
|
||||
throw new Error('No features found')
|
||||
}
|
||||
|
||||
await fsPromises.copyFile(filename, originalPath)
|
||||
|
||||
// Remove times, if present
|
||||
geojson.features.forEach(feature => {
|
||||
if (feature.type !== 'Feature') {
|
||||
throw new Error('Expected feature')
|
||||
}
|
||||
|
||||
if (feature.properties.coordTimes) {
|
||||
delete feature.properties.coordTimes
|
||||
}
|
||||
})
|
||||
|
||||
let simplified = simplify(geojson, {tolerance: config.tracks.tolerance, highQuality: true})
|
||||
let simpleGpx = togpx(simplified)
|
||||
|
||||
let outPath = config.tracks.paths.simple + '/' + hashFilename.substr(0, 2) + '/' + hashFilename + '.gpx'
|
||||
await fsPromises.mkdir(path.dirname(outPath), {recursive: true})
|
||||
await fsPromises.writeFile(outPath, simpleGpx)
|
||||
|
||||
db.getDb().collection('uploads').insertOne({
|
||||
uploadDate: new Date(),
|
||||
type: 'track',
|
||||
filename: hashFilename + '.gpx',
|
||||
author
|
||||
})
|
||||
|
||||
return {
|
||||
filename: hashFilename + '.gpx',
|
||||
author
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
const express = require('express')
|
||||
const multer = require('multer')
|
||||
const config = require('./config')
|
||||
const tracks = require('./tracks')
|
||||
const jwt = require('express-jwt')
|
||||
const jwksRsa = require('jwks-rsa')
|
||||
|
||||
let upload = multer({dest: config.tracks.uploadPath})
|
||||
|
||||
let router = express.Router()
|
||||
module.exports = router
|
||||
|
||||
let jwtCallback = jwt({
|
||||
secret: jwksRsa.expressJwtSecret({
|
||||
cache: true,
|
||||
rateLimit: true,
|
||||
jwksRequestsPerMinute: 5,
|
||||
jwksUri: config.sso.jwksUri
|
||||
})
|
||||
})
|
||||
|
||||
router.post('/upload', jwtCallback, upload.single('track'), (req, res) => {
|
||||
res.cacheControl = {
|
||||
noCache: true
|
||||
}
|
||||
|
||||
if (!req.user.callsign) {
|
||||
res.status(401).send('Missing callsign in SSO token').end()
|
||||
return
|
||||
}
|
||||
|
||||
if (req.file) {
|
||||
tracks.importTrack(req.file.path, req.user.callsign)
|
||||
.then(track => {
|
||||
res.json(track)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
res.status(500).end()
|
||||
})
|
||||
}
|
||||
})
|
|
@ -0,0 +1,17 @@
|
|||
module.exports = {
|
||||
|
||||
makeCallsignVariations(callsign) {
|
||||
let matches = callsign.match(/^(2[DEIJMUW]|G[DIJMUW]?|M[DIJMUW]?)(\d[A-Z]{2,3})$/)
|
||||
if (matches) {
|
||||
if (matches[1].substring(0, 1) === '2') {
|
||||
return ['2D' + matches[2], '2E' + matches[2], '2I' + matches[2], '2J' + matches[2], '2M' + matches[2], '2U' + matches[2], '2W' + matches[2]];
|
||||
} else if (matches[1].substring(0, 1) === 'G') {
|
||||
return ['GD' + matches[2], 'G' + matches[2], 'GI' + matches[2], 'GJ' + matches[2], 'GM' + matches[2], 'GU' + matches[2], 'GW' + matches[2]];
|
||||
} else if (matches[1].substring(0, 1) === 'M') {
|
||||
return ['MD' + matches[2], 'M' + matches[2], 'MI' + matches[2], 'MJ' + matches[2], 'MM' + matches[2], 'MU' + matches[2], 'MW' + matches[2]];
|
||||
}
|
||||
} else {
|
||||
return [callsign];
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,82 @@
|
|||
const express = require('express');
|
||||
const EventEmitter = require('events');
|
||||
const keyzipper = require('./keyzipper')
|
||||
|
||||
const PING_INTERVAL = 30000;
|
||||
|
||||
class WebSocketManager extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this.webSocketClients = new Set();
|
||||
this.router = express.Router();
|
||||
|
||||
this.router.ws('/', (ws, req) => {
|
||||
console.log('WebSocket client connected');
|
||||
ws.isAlive = true;
|
||||
this.webSocketClients.add(ws);
|
||||
console.log("Number of clients: " + this.webSocketClients.size);
|
||||
|
||||
this.emit('connect', ws);
|
||||
|
||||
ws.on('message', (data) => {
|
||||
try {
|
||||
let message = JSON.parse(data);
|
||||
this.emit('message', ws, message);
|
||||
} catch (e) {}
|
||||
});
|
||||
ws.on('pong', () => {
|
||||
ws.isAlive = true;
|
||||
});
|
||||
ws.on('close', () => {
|
||||
console.log("WebSocket closed");
|
||||
clearInterval(ws.pingInterval);
|
||||
this.webSocketClients.delete(ws);
|
||||
console.log("Number of clients: " + this.webSocketClients.size);
|
||||
});
|
||||
ws.on('error', (error) => {
|
||||
console.log("WebSocket error: " + error);
|
||||
clearInterval(ws.pingInterval);
|
||||
this.webSocketClients.delete(ws);
|
||||
console.log("Number of clients: " + this.webSocketClients.size);
|
||||
});
|
||||
|
||||
ws.pingInterval = setInterval(() => {
|
||||
if (!ws.isAlive) {
|
||||
console.log("WebSocket ping timeout");
|
||||
ws.terminate();
|
||||
return;
|
||||
}
|
||||
ws.isAlive = false;
|
||||
ws.ping();
|
||||
}, PING_INTERVAL);
|
||||
});
|
||||
}
|
||||
|
||||
broadcast(message, filter) {
|
||||
let str = JSON.stringify(keyzipper.compressKeys(message));
|
||||
for (const ws of this.webSocketClients) {
|
||||
if (filter && !filter(ws)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
ws.send(str);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unicast(message, ws) {
|
||||
ws.send(JSON.stringify(keyzipper.compressKeys(message)));
|
||||
}
|
||||
|
||||
numberOfClients() {
|
||||
return this.webSocketClients.size;
|
||||
}
|
||||
}
|
||||
|
||||
let wsManager = new WebSocketManager();
|
||||
|
||||
// This is a singleton for ease of use
|
||||
module.exports = wsManager;
|
Ładowanie…
Reference in New Issue