-
-
Notifications
You must be signed in to change notification settings - Fork 159
Overall example
All examples are written for ExpressJS and Redis store, but the same idea can be applied for all limiters with any Koa, Hapi, Nest, pure NodeJS application, etc.
- Create rate limiter
- Login endpoint protection
- Websocket connection prevent flooding
- Dynamic block duration
- Different limits for authorized users
- Different limits for different parts of application
- Apply in-memory Block Strategy against massive DDoS
- Setup Insurance Strategy
Any store limiter like Mongo, MySQL, etc can be used for distributed environment as well.
const express = require('express');
const Redis = require('ioredis');
const redisClient = new Redis({ enableOfflineQueue: false });
const app = express();
const rateLimiterRedis = new RateLimiterRedis({
storeClient: redisClient,
points: 10, // Number of points
duration: 1, // Per second
});
const rateLimiterMiddleware = (req, res, next) => {
rateLimiterRedis.consume(req.ip)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
};
app.use(rateLimiterMiddleware);
Rate limiter consumes 1 point by IP for every request to an application. This limits a user to make only 10 requests per second. It works in distributed environments as it stores all limits on Redis.
Memory limiter can be used if application is launched as single process.
Cluster limiter is available for application launched on single server.
It should be protected against brute force attacks. Additionally, it should be rate limited, if rate limits are not set on reverse-proxy or load balancer. This example describes one possible way to protect against brute-force and does not include global rate limiting.
Create 2 limiters. The first counts number of consecutive failed attempts and allows maximum 10 by username and IP pair. The second blocks IP for 1 day on 100 failed attempts per day.
const http = require('http');
const express = require('express');
const redis = require('redis');
const { RateLimiterRedis } = require('rate-limiter-flexible');
const redisClient = redis.createClient({
enable_offline_queue: false,
});
const maxWrongAttemptsByIPperDay = 100;
const maxConsecutiveFailsByUsernameAndIP = 10;
const limiterSlowBruteByIP = new RateLimiterRedis({
storeClient: redisClient,
keyPrefix: 'login_fail_ip_per_day',
points: maxWrongAttemptsByIPperDay,
duration: 60 * 60 * 24,
blockDuration: 60 * 60 * 24, // Block for 1 day, if 100 wrong attempts per day
});
const limiterConsecutiveFailsByUsernameAndIP = new RateLimiterRedis({
storeClient: redisClient,
keyPrefix: 'login_fail_consecutive_username_and_ip',
points: maxConsecutiveFailsByUsernameAndIP,
duration: 60 * 60 * 24 * 90, // Store number for 90 days since first fail
blockDuration: 60 * 60, // Block for 1 hour
});
const getUsernameIPkey = (username, ip) => `${username}_${ip}`;
async function loginRoute(req, res) {
const ipAddr = req.ip;
const usernameIPkey = getUsernameIPkey(req.body.email, ipAddr);
const [resUsernameAndIP, resSlowByIP] = await Promise.all([
limiterConsecutiveFailsByUsernameAndIP.get(usernameIPkey),
limiterSlowBruteByIP.get(ipAddr),
]);
let retrySecs = 0;
// Check if IP or Username + IP is already blocked
if (resSlowByIP !== null && resSlowByIP.remainingPoints <= 0) {
retrySecs = Math.round(resSlowByIP.msBeforeNext / 1000) || 1;
} else if (resUsernameAndIP !== null && resUsernameAndIP.remainingPoints <= 0) {
retrySecs = Math.round(resUsernameAndIP.msBeforeNext / 1000) || 1;
}
if (retrySecs > 0) {
res.set('Retry-After', String(retrySecs));
res.status(429).send('Too Many Requests');
} else {
const user = authorise(req.body.email, req.body.password);
if (!user.isLoggedIn) {
// Consume 1 point from limiters on wrong attempt and block if limits reached
try {
const promises = [limiterSlowBruteByIP.consume(ipAddr)];
if (user.exists) {
// Count failed attempts by Username + IP only for registered users
promises.push(limiterConsecutiveFailsByUsernameAndIP.consume(usernameIPkey));
}
await promises;
res.status(400).end('email or password is wrong');
} catch (rlRejected) {
if (rlRejected instanceof Error) {
throw rlRejected;
} else {
res.set('Retry-After', String(Math.round(rlRejected.msBeforeNext / 1000)) || 1);
res.status(429).send('Too Many Requests');
}
}
}
if (user.isLoggedIn) {
if (resUsernameAndIP !== null && resUsernameAndIP.consumedPoints > 0) {
// Reset on successful authorisation
await limiterConsecutiveFailsByUsernameAndIP.delete(usernameIPkey);
}
res.end('authorized');
}
}
}
const app = express();
app.post('/login', async (req, res) => {
try {
await loginRoute(req, res);
} catch (err) {
res.status(500).end();
}
});
See more examples of login endpoint protection in "Brute-force protection Node.js examples" article
The most simple is rate limiting by IP.
const app = require('http').createServer();
const io = require('socket.io')(app);
const { RateLimiterMemory } = require('rate-limiter-flexible');
app.listen(3000);
const rateLimiter = new RateLimiterMemory(
{
points: 5, // 5 points
duration: 1, // per second
});
io.on('connection', (socket) => {
socket.on('bcast', async (data) => {
try {
await rateLimiter.consume(socket.handshake.address); // consume 1 point per event from IP
socket.emit('news', { 'data': data });
socket.broadcast.emit('news', { 'data': data });
} catch(rejRes) {
// no available points to consume
// emit error or warning message
socket.emit('blocked', { 'retry-ms': rejRes.msBeforeNext });
}
});
});
It may be issue if there are many users behind one IP address. If there is some login procedure or uniqueUserId
, use it to limit on per user basis. Otherwise, you may try to limit by socket.id
and limit number of allowed re-connections from the same IP.
If websocket server is launched as cluster
or PM2
, you should use RateLimiterCluster or RateLimiterCluster with PM2.
Cluster and PM2 limiter is also enough if you use sticky load balancing. However, if cluster master process is restarted, all counters are reset.
Consider RateLimiterRedis or any other store limiter for multiple websocket server nodes.
Well known authorisation protection technique is increasing block duration on consecutive failed attempts.
Here is the logic:
- maximum 5 fails per 15 minutes. Consume one point on failed login attempt.
- if there are on remaining points, increment a counter N for a user who failed.
- block authorisation for the user depending on N.
- clear counter N on successful login.
function getFibonacciBlockDurationMinutes(countConsecutiveOutOfLimits) {
if (countConsecutiveOutOfLimits <= 1) {
return 1;
}
return getFibonacciBlockDurationMinutes(countConsecutiveOutOfLimits - 1) + getFibonacciBlockDurationMinutes(countConsecutiveOutOfLimits - 2);
}
async function loginRoute(req, res) {
const userId = req.body.email;
const resById = await loginLimiter.get(userId);
let retrySecs = 0;
if (resById !== null && resById.remainingPoints <= 0) {
retrySecs = Math.round(resById.msBeforeNext / 1000) || 1;
}
if (retrySecs > 0) {
res.set('Retry-After', String(retrySecs));
res.status(429).send('Too Many Requests');
} else {
const user = authorise(req.body.email, req.body.password);
if (!user.isLoggedIn) {
if (user.exists) {
try {
const resConsume = await loginLimiter.consume(userId);
if (resConsume.remainingPoints <= 0) {
const resPenalty = await limiterConsecutiveOutOfLimits.penalty(userId);
await loginLimiter.block(userId, 60 * getFibonacciBlockDurationMinutes(resPenalty.consumedPoints));
}
} catch (rlRejected) {
if (rlRejected instanceof Error) {
throw rlRejected;
} else {
res.set('Retry-After', String(Math.round(rlRejected.msBeforeNext / 1000)) || 1);
res.status(429).send('Too Many Requests');
}
}
}
res.status(400).end('email or password is wrong');
}
if (user.isLoggedIn) {
await limiterConsecutiveOutOfLimits.delete(userId);
res.end('authorized');
}
}
}
Note, this example may be not a good fit. If a hacker makes attack on user's account by email, real user should have a way to prove, that he is real. Also, see more flexible example of login protection here.
Sometimes it is reasonable to make the difference between authorized and not authorized requests. For example, an application must provide public access as well as serve for registered and authorized users with different limits.
const express = require('express');
const Redis = require('ioredis');
const redisClient = new Redis({ enableOfflineQueue: false });
const app = express();
const rateLimiterRedis = new RateLimiterRedis({
storeClient: redisClient,
points: 300, // Number of points
duration: 60, // Per 60 seconds
});
// req.userId should be set by someAuthMiddleware. It is up to you, how to do that
app.use(someAuthMiddleware);
const rateLimiterMiddleware = (req, res, next) => {
// req.userId should be set
const key = req.userId ? req.userId : req.ip;
const pointsToConsume = req.userId ? 1 : 30;
rateLimiterRedis.consume(key, pointsToConsume)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
};
app.use(rateLimiterMiddleware);
This example not ideally clean, because in some weird cases userId
may be equal to remoteAddress
. Make sure this never happens.
It consumes 30 points for every not authorized request or 1 point, if application recognises a user by ID.
This can be achieved by creating of independent limiters.
const express = require('express');
const Redis = require('ioredis');
const redisClient = new Redis({ enableOfflineQueue: false });
const app = express();
const rateLimiterRedis = new RateLimiterRedis({
storeClient: redisClient,
points: 300, // Number of points
duration: 60, // Per 60 seconds
});
const rateLimiterRedisReports = new RateLimiterRedis({
keyPrefix: 'rlreports',
storeClient: redisClient,
points: 10, // Only 10 points for reports per user
duration: 60, // Per 60 seconds
});
// req.userId should be set by someAuthMiddleware. It is up to you, how to do that
app.use(someAuthMiddleware);
const rateLimiterMiddleware = (req, res, next) => {
const key = req.userId ? req.userId : req.ip;
if (req.path.indexOf('/report') === 0) {
const pointsToConsume = req.userId ? 1 : 5;
rateLimiterRedisReports.consume(key, pointsToConsume)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
} else {
const pointsToConsume = req.userId ? 1 : 30;
rateLimiterRedis.consume(key, pointsToConsume)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
};
}
app.use(rateLimiterMiddleware);
Different limiters can be set on per endpoint level as well. It is all up to requirements.
Even if an application is protected with really fast store like Redis to store limits, there is still a way to brake it by massive amount of requests per second.
You should setup Block Strategy to protect against that.
const express = require('express');
const Redis = require('ioredis');
const redisClient = new Redis({ enableOfflineQueue: false });
const app = express();
const rateLimiterRedis = new RateLimiterRedis({
storeClient: redisClient,
points: 300, // Number of points
duration: 60, // Per 60 seconds,
blockDuration: 120, // Block duration in store
inmemoryBlockOnConsumed: 301, // If userId or IP consume >300 points per minute
inmemoryBlockDuration: 120, // Block it for two minutes in memory, so no requests go to Redis
});
// req.userId should be set by someAuthMiddleware. It is up to you, how to do that
app.use(someAuthMiddleware);
const rateLimiterMiddleware = (req, res, next) => {
// req.userId should be set
const key = req.userId ? req.userId : req.ip;
const pointsToConsume = req.userId ? 1 : 30;
rateLimiterRedis.consume(key, pointsToConsume)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
};
app.use(rateLimiterMiddleware);
UserId is blocked in a store for 2 minutes with blockDuration
, when 301 points are consumed. Still we want to avoid massive amount to a store, so we setup inmemoryBlockOnConsumed
and inmemoryBlockDuration
.
More details on in-memory Block Strategy here
There may be many reasons to take care of cases when limits store like Redis is down:
- you have just started your project and do not want to spend time on setting up Redis Cluster or any other stable infrastructure just to handle limits more stable.
- you do not want to spend more money on setting up 2 or more instances of database.
- you need to limit access to an application and you want just sleep well over weekend.
This example demonstrates memory limiter as insurance. Yes, it would work wrong if redis is down and redis limiter has 300 points for all NodeJS processes and then it works in memory with the same 300 points per process not overall. We can level that.
const express = require('express');
const Redis = require('ioredis');
const redisClient = new Redis({ enableOfflineQueue: false });
const app = express();
const rateLimiterMemory = new RateLimiterMemory({
points: 60, // 300 / 5 if there are 5 processes at all
duration: 60,
});
const rateLimiterRedis = new RateLimiterRedis({
storeClient: redisClient,
points: 300, // Number of points
duration: 60, // Per 60 seconds,
inmemoryBlockOnConsumed: 301, // If userId or IP consume >=301 points per minute
inmemoryBlockDuration: 60, // Block it for a minute in memory, so no requests go to Redis
insuranceLimiter: rateLimiterMemory,
});
// req.userId should be set by someAuthMiddleware. It is up to you, how to do that
app.use(someAuthMiddleware);
const rateLimiterMiddleware = (req, res, next) => {
// req.userId should be set
const key = req.userId ? req.userId : req.ip;
const pointsToConsume = req.userId ? 1 : 30;
rateLimiterRedis.consume(key, pointsToConsume)
.then(() => {
next();
})
.catch(_ => {
res.status(429).send('Too Many Requests');
});
};
app.use(rateLimiterMiddleware);
Added insurance rateLimiterMemory
is used only when Redis can not process request by some reason. Any limiter from this package can be used as insurance limiter. You can have another Redis up and running for a case if the first is down as well.
Get started
Middlewares and plugins
Migration from other packages
Limiters:
- Redis
- Memory
- DynamoDB
- Prisma
- MongoDB (with sharding support)
- PostgreSQL
- MySQL
- BurstyRateLimiter
- Cluster
- PM2 Cluster
- Memcached
- RateLimiterUnion
- RateLimiterQueue
Wrappers:
- RLWrapperBlackAndWhite Black and White lists
Knowledge base:
- Block Strategy in memory
- Insurance Strategy
- Comparative benchmarks
- Smooth out traffic peaks
-
Usage example
- Minimal protection against password brute-force
- Login endpoint protection
- Websocket connection prevent flooding
- Dynamic block duration
- Different limits for authorized users
- Different limits for different parts of application
- Block Strategy in memory
- Insurance Strategy
- Third-party API, crawler, bot rate limiting