Episode 3 — NodeJS MongoDB Backend Architecture / 3.16 — Caching with Redis
3.16.c — Redis with Node.js
The
node-redis(v4+) package provides a modern, Promise-based client for connecting Node.js applications to Redis, enabling you to cache database results, implement Express middleware caching, manage TTL-based expiration, and invalidate stale data on writes.
<< Previous: 3.16.b — Introduction to Redis | Next: 3.16.d — Advanced Redis Features >>
1. Setting Up node-redis (v4+)
Installation
# Install node-redis v4+
npm install redis
# Verify version (should be 4.x+)
npm list redis
Basic Connection
const { createClient } = require('redis');
async function connectRedis() {
// Create client with default settings (localhost:6379)
const client = createClient();
// Handle connection events
client.on('error', (err) => console.error('Redis Client Error:', err));
client.on('connect', () => console.log('Redis connecting...'));
client.on('ready', () => console.log('Redis connected and ready'));
client.on('reconnecting', () => console.log('Redis reconnecting...'));
// v4+ requires explicit connect
await client.connect();
return client;
}
Connection with Custom Configuration
const client = createClient({
url: 'redis://localhost:6379', // Full URL format
});
// Or with individual options
const client2 = createClient({
socket: {
host: 'localhost',
port: 6379,
reconnectStrategy: (retries) => {
if (retries > 10) {
return new Error('Max retries reached');
}
return Math.min(retries * 100, 3000); // Increasing delay, max 3s
},
},
password: process.env.REDIS_PASSWORD, // If authentication is required
database: 0, // Database number (0-15)
});
// Connection with authentication (Redis Cloud, production)
const prodClient = createClient({
url: `redis://:${process.env.REDIS_PASSWORD}@${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`,
});
Reusable Redis Module
// src/config/redis.js
const { createClient } = require('redis');
let client = null;
async function getRedisClient() {
if (client && client.isOpen) {
return client;
}
client = createClient({
url: process.env.REDIS_URL || 'redis://localhost:6379',
});
client.on('error', (err) => console.error('Redis Error:', err));
await client.connect();
console.log('Redis connected');
return client;
}
async function closeRedis() {
if (client && client.isOpen) {
await client.quit();
console.log('Redis disconnected');
}
}
module.exports = { getRedisClient, closeRedis };
2. Basic Operations: SET, GET, DEL
const { getRedisClient } = require('./config/redis');
async function basicOperations() {
const client = await getRedisClient();
// SET — store a value
await client.set('greeting', 'Hello, Redis!');
// GET — retrieve a value
const value = await client.get('greeting');
console.log(value); // "Hello, Redis!"
// SET with TTL (expires in 60 seconds)
await client.set('session:abc123', 'user-data', { EX: 60 });
// SET with TTL in milliseconds
await client.set('temp', 'data', { PX: 5000 }); // 5 seconds
// SET only if NOT exists (NX) — useful for locks
const wasSet = await client.set('lock:order:42', 'processing', {
NX: true,
EX: 30, // Auto-expire the lock in 30s
});
console.log(wasSet); // "OK" if set, null if key already existed
// DEL — delete a key
const deleted = await client.del('greeting');
console.log(deleted); // 1 (number of keys deleted)
// Delete multiple keys
await client.del(['key1', 'key2', 'key3']);
// Check if key EXISTS
const exists = await client.exists('session:abc123');
console.log(exists); // 1 (exists) or 0 (not found)
// Get TTL remaining
const ttl = await client.ttl('session:abc123');
console.log(ttl); // Seconds remaining, -1 = no expiry, -2 = not found
// INCR / DECR — atomic counters
await client.set('visitors', '0');
await client.incr('visitors'); // 1
await client.incrBy('visitors', 10); // 11
await client.decr('visitors'); // 10
// MSET / MGET — multiple keys at once
await client.mSet({
'user:1:name': 'Alice',
'user:1:email': 'alice@example.com',
'user:1:role': 'admin',
});
const values = await client.mGet(['user:1:name', 'user:1:email', 'user:1:role']);
console.log(values); // ["Alice", "alice@example.com", "admin"]
}
3. TTL (Time To Live) Management
async function ttlManagement(client) {
// Set key with TTL at creation
await client.set('cache:products', 'data', { EX: 3600 }); // 1 hour
// Add TTL to an existing key
await client.expire('existing-key', 600); // 10 minutes
// Add TTL in milliseconds
await client.pExpire('existing-key', 60000); // 60 seconds
// Set expiration at a specific Unix timestamp
const oneHourFromNow = Math.floor(Date.now() / 1000) + 3600;
await client.expireAt('existing-key', oneHourFromNow);
// Check remaining TTL
const ttl = await client.ttl('cache:products');
console.log(`Expires in ${ttl} seconds`);
// Check TTL in milliseconds (more precise)
const pttl = await client.pTTL('cache:products');
console.log(`Expires in ${pttl} milliseconds`);
// Remove TTL (make key persistent)
await client.persist('cache:products');
// TTL return values:
// -1 → Key exists but has no expiration
// -2 → Key does not exist
// >0 → Seconds until expiration
}
4. Storing JSON Data
Redis stores strings. To cache JavaScript objects, you must serialize them.
Manual JSON Serialization
async function cacheUserProfile(client, userId) {
const cacheKey = `user:${userId}`;
// Check cache first
const cached = await client.get(cacheKey);
if (cached) {
console.log('Cache HIT');
return JSON.parse(cached);
}
// Cache miss — fetch from database
console.log('Cache MISS — querying database');
const user = await User.findById(userId).lean();
if (user) {
// Store as JSON string with 30-minute TTL
await client.set(cacheKey, JSON.stringify(user), { EX: 1800 });
}
return user;
}
// Usage
const user = await cacheUserProfile(redisClient, '64abc123def456');
Using Hashes for Objects (Alternative)
async function cacheUserAsHash(client, userId, userData) {
const key = `user:${userId}`;
// Store as Hash (individual fields)
await client.hSet(key, {
name: userData.name,
email: userData.email,
role: userData.role,
loginCount: String(userData.loginCount),
});
await client.expire(key, 1800);
// Read individual fields
const name = await client.hGet(key, 'name');
// Read all fields
const allFields = await client.hGetAll(key);
// { name: "Alice", email: "alice@example.com", role: "admin", loginCount: "42" }
// Increment a field
await client.hIncrBy(key, 'loginCount', 1);
}
5. Express Middleware Caching Pattern
The most powerful pattern: a reusable middleware that caches route responses.
Basic Cache Middleware
// src/middleware/cache.js
const { getRedisClient } = require('../config/redis');
function cacheMiddleware(keyPrefix, ttlSeconds = 300) {
return async (req, res, next) => {
try {
const client = await getRedisClient();
// Build cache key from URL and query params
const cacheKey = `${keyPrefix}:${req.originalUrl}`;
// Check cache
const cached = await client.get(cacheKey);
if (cached) {
console.log(`Cache HIT: ${cacheKey}`);
const data = JSON.parse(cached);
return res.json(data);
}
console.log(`Cache MISS: ${cacheKey}`);
// Override res.json to intercept the response and cache it
const originalJson = res.json.bind(res);
res.json = async (body) => {
// Cache the response
await client.set(cacheKey, JSON.stringify(body), { EX: ttlSeconds });
// Send the original response
return originalJson(body);
};
next();
} catch (error) {
// If Redis fails, continue without caching
console.error('Cache middleware error:', error.message);
next();
}
};
}
module.exports = { cacheMiddleware };
Using the Middleware
const express = require('express');
const { cacheMiddleware } = require('./middleware/cache');
const app = express();
// Cache product list for 5 minutes
app.get('/api/products', cacheMiddleware('products', 300), async (req, res) => {
const products = await Product.find().lean();
res.json({ success: true, data: products });
});
// Cache individual product for 10 minutes
app.get('/api/products/:id', cacheMiddleware('product', 600), async (req, res) => {
const product = await Product.findById(req.params.id).lean();
if (!product) {
return res.status(404).json({ success: false, message: 'Not found' });
}
res.json({ success: true, data: product });
});
// Do NOT cache user-specific data without including user ID in key
app.get('/api/me/dashboard', isAuthenticated, cacheMiddleware('dashboard', 120), async (req, res) => {
// The cache key will be: dashboard:/api/me/dashboard
// Problem: same cache for all users! (see improved version below)
const stats = await getUserDashboard(req.user.id);
res.json({ success: true, data: stats });
});
Improved Middleware with User-Aware Keys
function cacheMiddleware(keyPrefix, ttlSeconds = 300, options = {}) {
return async (req, res, next) => {
try {
const client = await getRedisClient();
// Build cache key
let cacheKey = `${keyPrefix}:${req.originalUrl}`;
// Include user ID for user-specific caching
if (options.perUser && req.user) {
cacheKey = `${keyPrefix}:user:${req.user.id}:${req.originalUrl}`;
}
const cached = await client.get(cacheKey);
if (cached) {
return res.json(JSON.parse(cached));
}
const originalJson = res.json.bind(res);
res.json = async (body) => {
// Only cache successful responses
if (res.statusCode >= 200 && res.statusCode < 300) {
await client.set(cacheKey, JSON.stringify(body), { EX: ttlSeconds });
}
return originalJson(body);
};
next();
} catch (error) {
console.error('Cache error:', error.message);
next();
}
};
}
// Usage with per-user caching
app.get('/api/me/dashboard',
isAuthenticated,
cacheMiddleware('dashboard', 120, { perUser: true }),
dashboardHandler
);
6. Cache Invalidation on Writes
When data is created, updated, or deleted, the cache must be invalidated to avoid stale data.
Pattern: Invalidate on Write
// src/controllers/productController.js
const { getRedisClient } = require('../config/redis');
// CREATE — invalidate list cache
async function createProduct(req, res) {
const product = await Product.create(req.body);
const client = await getRedisClient();
// Invalidate the product list cache (all pages, filters)
const keys = await client.keys('products:*');
if (keys.length > 0) {
await client.del(keys);
}
res.status(201).json({ success: true, data: product });
}
// UPDATE — invalidate specific product and list caches
async function updateProduct(req, res) {
const { id } = req.params;
const product = await Product.findByIdAndUpdate(id, req.body, { new: true });
const client = await getRedisClient();
// Invalidate the specific product cache
await client.del(`product:/api/products/${id}`);
// Invalidate list caches that might include this product
const listKeys = await client.keys('products:*');
if (listKeys.length > 0) {
await client.del(listKeys);
}
res.json({ success: true, data: product });
}
// DELETE — invalidate both specific and list caches
async function deleteProduct(req, res) {
const { id } = req.params;
await Product.findByIdAndDelete(id);
const client = await getRedisClient();
await client.del(`product:/api/products/${id}`);
const listKeys = await client.keys('products:*');
if (listKeys.length > 0) {
await client.del(listKeys);
}
res.json({ success: true, message: 'Deleted' });
}
Using SCAN Instead of KEYS (Production-Safe)
// KEYS is O(n) and blocks Redis. Use SCAN in production.
async function invalidateByPattern(client, pattern) {
let cursor = 0;
const keysToDelete = [];
do {
const result = await client.scan(cursor, { MATCH: pattern, COUNT: 100 });
cursor = result.cursor;
keysToDelete.push(...result.keys);
} while (cursor !== 0);
if (keysToDelete.length > 0) {
await client.del(keysToDelete);
console.log(`Invalidated ${keysToDelete.length} cache entries`);
}
}
// Usage
await invalidateByPattern(client, 'products:*');
7. Complete Caching Example: Express + MongoDB + Redis
// src/app.js
const express = require('express');
const mongoose = require('mongoose');
const { getRedisClient, closeRedis } = require('./config/redis');
const productRoutes = require('./routes/products');
const app = express();
app.use(express.json());
// Initialize connections
async function start() {
await mongoose.connect(process.env.MONGO_URI);
console.log('MongoDB connected');
await getRedisClient();
console.log('Redis connected');
app.use('/api/products', productRoutes);
app.listen(3000, () => console.log('Server running on port 3000'));
}
// Graceful shutdown
process.on('SIGTERM', async () => {
await closeRedis();
await mongoose.disconnect();
process.exit(0);
});
start().catch(console.error);
// src/models/Product.js
const mongoose = require('mongoose');
const productSchema = new mongoose.Schema({
name: { type: String, required: true },
price: { type: Number, required: true },
category: { type: String, required: true },
description: String,
inStock: { type: Boolean, default: true },
}, { timestamps: true });
module.exports = mongoose.model('Product', productSchema);
// src/services/productService.js
const Product = require('../models/Product');
const { getRedisClient } = require('../config/redis');
const CACHE_TTL = 600; // 10 minutes
async function getAllProducts(query = {}) {
const client = await getRedisClient();
const cacheKey = `products:all:${JSON.stringify(query)}`;
// 1. Check cache
const cached = await client.get(cacheKey);
if (cached) {
return { data: JSON.parse(cached), source: 'cache' };
}
// 2. Query database
const products = await Product.find(query).lean();
// 3. Cache the result
await client.set(cacheKey, JSON.stringify(products), { EX: CACHE_TTL });
return { data: products, source: 'database' };
}
async function getProductById(id) {
const client = await getRedisClient();
const cacheKey = `product:${id}`;
const cached = await client.get(cacheKey);
if (cached) {
return { data: JSON.parse(cached), source: 'cache' };
}
const product = await Product.findById(id).lean();
if (!product) return { data: null, source: 'database' };
await client.set(cacheKey, JSON.stringify(product), { EX: CACHE_TTL });
return { data: product, source: 'database' };
}
async function createProduct(data) {
const product = await Product.create(data);
const client = await getRedisClient();
// Invalidate list caches
await invalidatePattern(client, 'products:all:*');
return product;
}
async function updateProduct(id, data) {
const product = await Product.findByIdAndUpdate(id, data, { new: true }).lean();
const client = await getRedisClient();
// Invalidate specific and list caches
await client.del(`product:${id}`);
await invalidatePattern(client, 'products:all:*');
return product;
}
async function deleteProduct(id) {
await Product.findByIdAndDelete(id);
const client = await getRedisClient();
await client.del(`product:${id}`);
await invalidatePattern(client, 'products:all:*');
}
async function invalidatePattern(client, pattern) {
let cursor = 0;
do {
const result = await client.scan(cursor, { MATCH: pattern, COUNT: 100 });
cursor = result.cursor;
if (result.keys.length > 0) {
await client.del(result.keys);
}
} while (cursor !== 0);
}
module.exports = { getAllProducts, getProductById, createProduct, updateProduct, deleteProduct };
// src/routes/products.js
const express = require('express');
const router = express.Router();
const productService = require('../services/productService');
// GET /api/products
router.get('/', async (req, res) => {
try {
const { data, source } = await productService.getAllProducts(req.query);
res.json({
success: true,
source,
count: data.length,
data,
});
} catch (error) {
res.status(500).json({ success: false, message: error.message });
}
});
// GET /api/products/:id
router.get('/:id', async (req, res) => {
try {
const { data, source } = await productService.getProductById(req.params.id);
if (!data) {
return res.status(404).json({ success: false, message: 'Product not found' });
}
res.json({ success: true, source, data });
} catch (error) {
res.status(500).json({ success: false, message: error.message });
}
});
// POST /api/products
router.post('/', async (req, res) => {
try {
const product = await productService.createProduct(req.body);
res.status(201).json({ success: true, data: product });
} catch (error) {
res.status(400).json({ success: false, message: error.message });
}
});
// PUT /api/products/:id
router.put('/:id', async (req, res) => {
try {
const product = await productService.updateProduct(req.params.id, req.body);
if (!product) {
return res.status(404).json({ success: false, message: 'Product not found' });
}
res.json({ success: true, data: product });
} catch (error) {
res.status(400).json({ success: false, message: error.message });
}
});
// DELETE /api/products/:id
router.delete('/:id', async (req, res) => {
try {
await productService.deleteProduct(req.params.id);
res.json({ success: true, message: 'Product deleted' });
} catch (error) {
res.status(500).json({ success: false, message: error.message });
}
});
module.exports = router;
8. Error Handling and Graceful Degradation
A good caching layer should never break the application. If Redis is down, the app should fall back to the database.
async function getWithFallback(cacheKey, dbFetchFn, ttl = 600) {
try {
const client = await getRedisClient();
const cached = await client.get(cacheKey);
if (cached) return JSON.parse(cached);
} catch (error) {
// Redis is down — log and continue to database
console.warn('Redis unavailable, falling back to database:', error.message);
}
// Fetch from database
const data = await dbFetchFn();
// Try to cache the result (non-blocking)
try {
const client = await getRedisClient();
await client.set(cacheKey, JSON.stringify(data), { EX: ttl });
} catch (error) {
console.warn('Failed to cache result:', error.message);
}
return data;
}
// Usage
const products = await getWithFallback(
'products:featured',
() => Product.find({ featured: true }).lean(),
600
);
Key Takeaways
- node-redis v4+ uses Promises (async/await) -- always call
await client.connect()before using - Create a reusable Redis module with singleton pattern and event handlers
- JSON.stringify/parse is required for caching JavaScript objects as Redis stores strings
- The Express middleware pattern intercepts
res.jsonto cache responses transparently - Invalidate caches on writes -- delete affected keys when data is created, updated, or deleted
- Use SCAN instead of KEYS in production to avoid blocking the Redis server
- Always implement graceful degradation -- if Redis fails, fall back to the database
- Include TTL on every cached entry to prevent stale data from living forever
Explain-It Challenge
Scenario: You have an Express API for a blog. The GET /api/posts endpoint queries MongoDB for posts with pagination, filtering by category, and sorting by date. It receives 2000 requests per minute. Posts are created or updated about 5 times per hour.
Design a complete caching solution: create the Redis module, the cache middleware, the service layer with cache-aside pattern, and the invalidation logic for when a post is created or updated. How would you handle the cache key for different pagination and filter combinations? What TTL would you choose and why?
<< Previous: 3.16.b — Introduction to Redis | Next: 3.16.d — Advanced Redis Features >>