Why Redis?
Redis is an in-memory data store that can dramatically improve API performance by caching frequently accessed data.
Setting Up Redis
# Install Redis client
npm install redis
# Start Redis server (Docker)
docker run -d -p 6379:6379 redis:alpine
Basic Redis Connection
const redis = require('redis');
const client = redis.createClient({
host: 'localhost',
port: 6379
});
client.on('error', (err) => {
console.error('Redis Client Error', err);
});
await client.connect();
Cache Middleware
const cacheMiddleware = async (req, res, next) => {
const key = req.originalUrl;
try {
// Try to get from cache
const cached = await client.get(key);
if (cached) {
return res.json(JSON.parse(cached));
}
// Store original json function
const originalJson = res.json.bind(res);
// Override json to cache response
res.json = function(data) {
// Cache for 5 minutes
client.setEx(key, 300, JSON.stringify(data));
originalJson(data);
};
next();
} catch (error) {
next();
}
};
// Use middleware
app.get('/api/users', cacheMiddleware, async (req, res) => {
const users = await User.find();
res.json(users);
});
Cache Helper Functions
// Get from cache or execute function
async function getOrSetCache(key, callback, expiration = 300) {
try {
const cached = await client.get(key);
if (cached) {
return JSON.parse(cached);
}
const data = await callback();
await client.setEx(key, expiration, JSON.stringify(data));
return data;
} catch (error) {
console.error('Cache error:', error);
return await callback();
}
}
// Usage
app.get('/api/users/:id', async (req, res) => {
const user = await getOrSetCache(
`user:${req.params.id}`,
async () => await User.findById(req.params.id),
600 // 10 minutes
);
res.json(user);
});
// Invalidate cache
async function invalidateCache(pattern) {
const keys = await client.keys(pattern);
if (keys.length > 0) {
await client.del(keys);
}
}
// Invalidate on update
app.put('/api/users/:id', async (req, res) => {
const user = await User.findByIdAndUpdate(req.params.id, req.body);
await invalidateCache(`user:${req.params.id}*`);
res.json(user);
});
Rate Limiting with Redis
const rateLimitMiddleware = async (req, res, next) => {
const key = `rate_limit:${req.ip}`;
const limit = 100;
const window = 60; // 1 minute
try {
const current = await client.incr(key);
if (current === 1) {
await client.expire(key, window);
}
if (current > limit) {
return res.status(429).json({
error: 'Too many requests',
retryAfter: window
});
}
next();
} catch (error) {
next();
}
};
Best Practices
- Set appropriate TTL values
- Invalidate cache on data updates
- Use cache keys with prefixes
- Handle cache failures gracefully
- Monitor cache hit rates