kopia lustrzana https://github.com/c9/core
Merge pull request +15453 from c9/all-improve-rate-limiter
improve rate limiterpull/428/merge
commit
28253e7f34
|
@ -1,4 +1,5 @@
|
|||
var error = require("http-error");
|
||||
var RateLimiter = require('limiter').RateLimiter;
|
||||
|
||||
var MAX_EXPIRE_INTERVAL = 5000;
|
||||
|
||||
|
@ -8,34 +9,14 @@ var MAX_EXPIRE_INTERVAL = 5000;
|
|||
module.exports = ratelimit;
|
||||
|
||||
function ratelimit(key, duration, max) {
|
||||
var requests = Object.create(null); // in case there handles like 'constructor'
|
||||
|
||||
var buckets = Object.create(null); // in case there handles like 'constructor'
|
||||
var rootKey = "params";
|
||||
if (/^req\./.test(key)) {
|
||||
rootKey = null;
|
||||
key = key.replace(/^req\./, "");
|
||||
}
|
||||
|
||||
setInterval(function() {
|
||||
Object.keys(requests).forEach(expireRequests);
|
||||
}, Math.min(duration * 0.75, MAX_EXPIRE_INTERVAL));
|
||||
|
||||
function expireRequests(handle) {
|
||||
var requestsForHandle = requests[handle];
|
||||
var totalToSplice = 0;
|
||||
var expireTime = Date.now() - duration;
|
||||
/* Requests are already sorted by date as they are appended, so we just loop
|
||||
until we find one that shouldn't have expired and splice them from the list */
|
||||
for (totalToSplice = 0; totalToSplice < requestsForHandle.length; totalToSplice++) {
|
||||
if (requestsForHandle[totalToSplice] >= expireTime) break;
|
||||
}
|
||||
requests[handle].splice(0, totalToSplice);
|
||||
if (requests[handle].length == 0) {
|
||||
delete requests[handle];
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Returns a deep value from an object. E.g. resolveValue({user: {id: 5}}, "user.id") === 5
|
||||
function resolveValue(obj, path) {
|
||||
if (path === "*")
|
||||
|
@ -46,18 +27,28 @@ function ratelimit(key, duration, max) {
|
|||
}, obj);
|
||||
}
|
||||
|
||||
// cleanup empty buckets
|
||||
setInterval(function() {
|
||||
Object.keys(buckets).forEach(function(handle) {
|
||||
var bucket = buckets[handle];
|
||||
if (bucket.tokenBucket.content === 0) {
|
||||
delete buckets[handle];
|
||||
}
|
||||
});
|
||||
}, 5 * 1000);
|
||||
|
||||
return function(req, res, next) {
|
||||
var root = rootKey ? req[rootKey] : req;
|
||||
var handle = resolveValue(root, key);
|
||||
|
||||
requests[handle] = requests[handle] || [];
|
||||
if (requests[handle].length >= max) {
|
||||
buckets[handle] = buckets[handle] || new RateLimiter(max, duration, true);
|
||||
var removed = buckets[handle].tryRemoveTokens(1);
|
||||
if (!removed) {
|
||||
var err = new error.TooManyRequests("Rate limit exceeded");
|
||||
err.retryIn = Math.min(duration, 5000);
|
||||
return next(err);
|
||||
}
|
||||
|
||||
requests[handle].push(Date.now());
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
|
|
@ -111,25 +111,24 @@ describe("ratelimit", function() {
|
|||
it("Should expire keys at the correct times", function (done) {
|
||||
var clock = sinon.useFakeTimers();
|
||||
var limiter = ratelimit("username", 50, 2);
|
||||
limiter({params: {username: "mario"}}, null, function(err){
|
||||
limiter({params: {username: "mario"}}, null, function(err) {
|
||||
assert(!err, err);
|
||||
});
|
||||
clock.tick(40);
|
||||
limiter({params: {username: "mario"}}, null, function(err){
|
||||
limiter({params: {username: "mario"}}, null, function(err) {
|
||||
assert(!err, err);
|
||||
});
|
||||
clock.tick(45);
|
||||
limiter({params: {username: "mario"}}, null, function(err){
|
||||
limiter({params: {username: "mario"}}, null, function(err) {
|
||||
assert(!err, err);
|
||||
});
|
||||
limiter({params: {username: "mario"}}, null, function(err){
|
||||
limiter({params: {username: "mario"}}, null, function(err) {
|
||||
assert(!err, err);
|
||||
});
|
||||
limiter({params: {username: "mario"}}, null, function(err) {
|
||||
assert(err);
|
||||
assert.equal(err.code, 429);
|
||||
});
|
||||
clock.tick(40);
|
||||
limiter({params: {username: "mario"}}, null, function(err){
|
||||
assert(!err, err);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
Ładowanie…
Reference in New Issue