Episode 3 — NodeJS MongoDB Backend Architecture / 3.16 — Caching with Redis

3.16.d — Advanced Redis Features

Beyond basic caching, Redis supports advanced operations on Hashes, Lists, Sets, and Sorted Sets, along with Pub/Sub messaging, transactions (MULTI/EXEC), session management via connect-redis, and rate limiting -- making it a Swiss Army knife for backend systems.


<< Previous: 3.16.c — Redis with Node.js | Next: Exercise Questions >>


1. Hash Operations in Node.js

Hashes are ideal for storing objects without JSON serialization overhead. Each field can be read or updated individually.

const { getRedisClient } = require('./config/redis');

async function hashOperations() {
  const client = await getRedisClient();

  // HSET — set multiple fields at once
  await client.hSet('user:1001', {
    name: 'Alice',
    email: 'alice@example.com',
    age: '30',
    role: 'admin',
    loginCount: '0',
  });

  // Set TTL on the hash key
  await client.expire('user:1001', 3600);

  // HGET — get a single field
  const name = await client.hGet('user:1001', 'name');
  console.log(name); // "Alice"

  // HGETALL — get all fields as an object
  const user = await client.hGetAll('user:1001');
  console.log(user);
  // { name: "Alice", email: "alice@example.com", age: "30", role: "admin", loginCount: "0" }

  // HMGET — get specific fields (returns array)
  // Note: v4 uses hGet for single, but you can get multiple by calling hGetAll or individual hGet calls

  // HEXISTS — check if a field exists
  const hasEmail = await client.hExists('user:1001', 'email');
  console.log(hasEmail); // true

  // HINCRBY — atomically increment a numeric field
  const newCount = await client.hIncrBy('user:1001', 'loginCount', 1);
  console.log(newCount); // 1

  // HDEL — delete a specific field
  await client.hDel('user:1001', 'role');

  // HKEYS — get all field names
  const fields = await client.hKeys('user:1001');
  console.log(fields); // ["name", "email", "age", "loginCount"]

  // HVALS — get all values
  const values = await client.hVals('user:1001');

  // HLEN — count fields
  const fieldCount = await client.hLen('user:1001');
  console.log(fieldCount); // 4
}

Practical Example: User Profile Cache

async function getUserProfile(userId) {
  const client = await getRedisClient();
  const key = `profile:${userId}`;

  // Check if profile is cached
  const cached = await client.hGetAll(key);
  if (cached && Object.keys(cached).length > 0) {
    console.log('Profile cache HIT');
    return cached;
  }

  // Fetch from database
  const user = await User.findById(userId).lean();
  if (!user) return null;

  // Cache as hash (flatten nested objects)
  await client.hSet(key, {
    name: user.name,
    email: user.email,
    avatar: user.avatar || '',
    bio: user.bio || '',
    followers: String(user.followers),
    following: String(user.following),
  });
  await client.expire(key, 1800); // 30 minutes

  return user;
}

// Update a single field without fetching the full object
async function incrementFollowerCount(userId) {
  const client = await getRedisClient();
  await client.hIncrBy(`profile:${userId}`, 'followers', 1);
}

2. List Operations in Node.js

Lists are perfect for queues, recent activity feeds, and ordered data.

async function listOperations() {
  const client = await getRedisClient();

  // LPUSH — push to the beginning (left)
  await client.lPush('notifications:user:1001', JSON.stringify({
    type: 'like',
    message: 'Alice liked your post',
    timestamp: Date.now(),
  }));

  // RPUSH — push to the end (right)
  await client.rPush('queue:emails', JSON.stringify({
    to: 'user@example.com',
    subject: 'Welcome!',
  }));

  // LRANGE — get a range of elements (0 to -1 = all)
  const allNotifications = await client.lRange('notifications:user:1001', 0, -1);
  const recent5 = await client.lRange('notifications:user:1001', 0, 4);

  // Parse JSON from each element
  const parsed = allNotifications.map(JSON.parse);

  // LPOP / RPOP — remove and return from beginning/end
  const next = await client.lPop('queue:emails');

  // LLEN — get list length
  const queueLength = await client.lLen('queue:emails');

  // LTRIM — keep only a range (trim the rest)
  // Keep only the 100 most recent notifications
  await client.lTrim('notifications:user:1001', 0, 99);

  // LINDEX — get element by index
  const first = await client.lIndex('notifications:user:1001', 0);

  // BLPOP — blocking pop (waits for data, great for job queues)
  // Waits up to 30 seconds for an item
  const item = await client.blPop('queue:jobs', 30);
  // Returns { key: 'queue:jobs', element: '...' } or null if timeout
}

Practical Example: Recent Activity Feed

async function addToActivityFeed(userId, activity) {
  const client = await getRedisClient();
  const key = `feed:${userId}`;

  // Push new activity to the beginning
  await client.lPush(key, JSON.stringify({
    ...activity,
    timestamp: Date.now(),
  }));

  // Keep only the latest 200 items
  await client.lTrim(key, 0, 199);

  // Set TTL (feed expires if user is inactive for 7 days)
  await client.expire(key, 604800);
}

async function getActivityFeed(userId, page = 1, limit = 20) {
  const client = await getRedisClient();
  const key = `feed:${userId}`;

  const start = (page - 1) * limit;
  const end = start + limit - 1;

  const items = await client.lRange(key, start, end);
  const total = await client.lLen(key);

  return {
    items: items.map(JSON.parse),
    total,
    page,
    pages: Math.ceil(total / limit),
  };
}

3. Set Operations in Node.js

Sets store unique values and support mathematical set operations.

async function setOperations() {
  const client = await getRedisClient();

  // SADD — add members
  await client.sAdd('tags:post:42', ['javascript', 'nodejs', 'redis', 'caching']);

  // SMEMBERS — get all members
  const tags = await client.sMembers('tags:post:42');
  console.log(tags); // ["javascript", "nodejs", "redis", "caching"]

  // SISMEMBER — check membership
  const hasTag = await client.sIsMember('tags:post:42', 'redis');
  console.log(hasTag); // true

  // SCARD — count members
  const tagCount = await client.sCard('tags:post:42');
  console.log(tagCount); // 4

  // SREM — remove a member
  await client.sRem('tags:post:42', 'caching');

  // Set operations
  await client.sAdd('user:1:skills', ['javascript', 'react', 'nodejs']);
  await client.sAdd('user:2:skills', ['javascript', 'python', 'django']);

  // SINTER — intersection (common skills)
  const commonSkills = await client.sInter(['user:1:skills', 'user:2:skills']);
  console.log(commonSkills); // ["javascript"]

  // SUNION — union (all unique skills)
  const allSkills = await client.sUnion(['user:1:skills', 'user:2:skills']);

  // SDIFF — difference (skills user:1 has that user:2 does not)
  const uniqueSkills = await client.sDiff(['user:1:skills', 'user:2:skills']);
  console.log(uniqueSkills); // ["react", "nodejs"]

  // SRANDMEMBER — random member(s) without removing
  const random = await client.sRandMember('tags:post:42');
}

Practical Example: Online Users Tracker

async function userGoesOnline(userId) {
  const client = await getRedisClient();
  await client.sAdd('online-users', userId);
}

async function userGoesOffline(userId) {
  const client = await getRedisClient();
  await client.sRem('online-users', userId);
}

async function getOnlineUserCount() {
  const client = await getRedisClient();
  return client.sCard('online-users');
}

async function isUserOnline(userId) {
  const client = await getRedisClient();
  return client.sIsMember('online-users', userId);
}

async function getOnlineFriends(userId) {
  const client = await getRedisClient();
  // Intersection of user's friends and online users
  return client.sInter([`friends:${userId}`, 'online-users']);
}

4. Sorted Set Operations in Node.js

Sorted Sets associate a score with each member, maintaining automatic ordering.

async function sortedSetOperations() {
  const client = await getRedisClient();

  // ZADD — add members with scores
  await client.zAdd('leaderboard', [
    { score: 1500, value: 'Alice' },
    { score: 1350, value: 'Bob' },
    { score: 1420, value: 'Charlie' },
    { score: 1280, value: 'Diana' },
  ]);

  // ZRANGE — get by rank ascending (with scores)
  const ascending = await client.zRangeWithScores('leaderboard', 0, -1);
  console.log(ascending);
  // [{ value: "Diana", score: 1280 }, { value: "Bob", score: 1350 }, ...]

  // ZREVRANGE — get by rank descending (top players)
  // In v4, use zRange with REV option
  const top3 = await client.zRange('leaderboard', 0, 2, { REV: true });
  console.log(top3); // ["Alice", "Charlie", "Bob"]

  // ZSCORE — get score of a member
  const aliceScore = await client.zScore('leaderboard', 'Alice');
  console.log(aliceScore); // 1500

  // ZRANK / ZREVRANK — get rank (0-based)
  const rank = await client.zRevRank('leaderboard', 'Alice');
  console.log(rank); // 0 (top position)

  // ZINCRBY — increment score
  await client.zIncrBy('leaderboard', 50, 'Bob');
  // Bob's score is now 1400

  // ZRANGEBYSCORE — get members within a score range
  const eliteUsers = await client.zRangeByScore('leaderboard', 1400, '+inf');

  // ZCARD — count members
  const playerCount = await client.zCard('leaderboard');

  // ZCOUNT — count members in a score range
  const above1300 = await client.zCount('leaderboard', 1300, '+inf');

  // ZREM — remove a member
  await client.zRem('leaderboard', 'Diana');
}

Practical Example: Real-Time Leaderboard

async function addScore(userId, points) {
  const client = await getRedisClient();
  // ZINCRBY creates the member if it does not exist
  const newScore = await client.zIncrBy('leaderboard:weekly', points, userId);
  return newScore;
}

async function getLeaderboard(page = 1, limit = 10) {
  const client = await getRedisClient();
  const start = (page - 1) * limit;
  const end = start + limit - 1;

  const results = await client.zRangeWithScores('leaderboard:weekly', start, end, { REV: true });
  const total = await client.zCard('leaderboard:weekly');

  return {
    rankings: results.map((entry, index) => ({
      rank: start + index + 1,
      userId: entry.value,
      score: entry.score,
    })),
    total,
    page,
    pages: Math.ceil(total / limit),
  };
}

async function getUserRank(userId) {
  const client = await getRedisClient();
  const rank = await client.zRevRank('leaderboard:weekly', userId);
  const score = await client.zScore('leaderboard:weekly', userId);
  return { rank: rank !== null ? rank + 1 : null, score };
}

5. Pub/Sub Messaging

Redis Pub/Sub enables real-time messaging between different parts of your application.

Publisher(s)                          Subscriber(s)
┌──────────┐    ┌──────────────┐    ┌──────────┐
│ Service A │──>│  Channel:     │──>│ Service B │
│  PUBLISH  │   │  "orders"     │   │ SUBSCRIBE │
└──────────┘    └──────────────┘    └──────────┘
                       │
                       └──────────>┌──────────┐
                                   │ Service C │
                                   │ SUBSCRIBE │
                                   └──────────┘

Publisher

const { createClient } = require('redis');

async function setupPublisher() {
  const publisher = createClient();
  await publisher.connect();

  // Publish a message to a channel
  await publisher.publish('orders', JSON.stringify({
    orderId: 'ORD-001',
    status: 'created',
    userId: 'user:1001',
    total: 59.99,
    timestamp: Date.now(),
  }));

  console.log('Message published to "orders" channel');

  // Publish to different channels
  await publisher.publish('notifications', JSON.stringify({
    type: 'order-placed',
    userId: 'user:1001',
    message: 'Your order has been placed!',
  }));

  return publisher;
}

Subscriber

async function setupSubscriber() {
  const subscriber = createClient();
  await subscriber.connect();

  // Subscribe to a channel
  await subscriber.subscribe('orders', (message, channel) => {
    const data = JSON.parse(message);
    console.log(`Received on ${channel}:`, data);

    // Process the order event
    if (data.status === 'created') {
      processNewOrder(data);
    }
  });

  // Subscribe to multiple channels
  await subscriber.subscribe('notifications', (message) => {
    const data = JSON.parse(message);
    sendPushNotification(data);
  });

  // Pattern-based subscription (all channels matching a pattern)
  await subscriber.pSubscribe('events:*', (message, channel) => {
    console.log(`Pattern match on ${channel}:`, message);
  });

  console.log('Subscriber listening...');
  return subscriber;
}

Practical Example: Cache Invalidation via Pub/Sub

// In a multi-server setup, use Pub/Sub to notify all servers to invalidate cache

// Server that performs the write
async function updateProductAndNotify(productId, data) {
  const product = await Product.findByIdAndUpdate(productId, data, { new: true });

  // Publish invalidation event to all servers
  const publisher = await getRedisClient();
  await publisher.publish('cache-invalidation', JSON.stringify({
    type: 'product-updated',
    key: `product:${productId}`,
    patterns: ['products:all:*'],
  }));

  return product;
}

// Each server subscribes to invalidation events
async function setupCacheInvalidationListener() {
  const subscriber = createClient({ url: process.env.REDIS_URL });
  await subscriber.connect();

  await subscriber.subscribe('cache-invalidation', async (message) => {
    const { type, key, patterns } = JSON.parse(message);
    console.log(`Cache invalidation event: ${type}`);

    const client = await getRedisClient();

    // Delete specific key
    if (key) await client.del(key);

    // Delete by patterns
    if (patterns) {
      for (const pattern of patterns) {
        let cursor = 0;
        do {
          const result = await client.scan(cursor, { MATCH: pattern, COUNT: 100 });
          cursor = result.cursor;
          if (result.keys.length > 0) await client.del(result.keys);
        } while (cursor !== 0);
      }
    }
  });
}

Important: A subscribed client cannot run other commands. Use separate clients for subscribing and regular operations.


6. Transactions (MULTI/EXEC)

Transactions group multiple commands to execute atomically -- either all succeed or none do.

async function transactionExample() {
  const client = await getRedisClient();

  // MULTI/EXEC — atomic transaction
  const results = await client.multi()
    .set('account:A:balance', '500')
    .set('account:B:balance', '300')
    .decrBy('account:A:balance', 100)
    .incrBy('account:B:balance', 100)
    .exec();

  console.log(results);
  // ["OK", "OK", 400, 400]
  // All commands executed atomically
}

// Practical: Transfer money atomically
async function transferFunds(fromUser, toUser, amount) {
  const client = await getRedisClient();

  // Check if sender has sufficient balance
  const balance = await client.hGet(`wallet:${fromUser}`, 'balance');
  if (parseInt(balance) < amount) {
    throw new Error('Insufficient funds');
  }

  // Execute transfer atomically
  const results = await client.multi()
    .hIncrBy(`wallet:${fromUser}`, 'balance', -amount)
    .hIncrBy(`wallet:${toUser}`, 'balance', amount)
    .rPush('transactions', JSON.stringify({
      from: fromUser,
      to: toUser,
      amount,
      timestamp: Date.now(),
    }))
    .exec();

  return results;
}

Transaction with WATCH (Optimistic Locking)

async function transferWithWatch(fromUser, toUser, amount) {
  const client = await getRedisClient();
  const fromKey = `wallet:${fromUser}`;

  // Watch the key for changes
  await client.watch(fromKey);

  const balance = await client.hGet(fromKey, 'balance');
  if (parseInt(balance) < amount) {
    await client.unwatch();
    throw new Error('Insufficient funds');
  }

  try {
    const results = await client.multi()
      .hIncrBy(fromKey, 'balance', -amount)
      .hIncrBy(`wallet:${toUser}`, 'balance', amount)
      .exec();

    if (results === null) {
      // Transaction aborted — another client modified the watched key
      throw new Error('Transaction conflict, please retry');
    }

    return results;
  } catch (error) {
    await client.unwatch();
    throw error;
  }
}

7. Redis as Session Store (connect-redis)

Storing Express sessions in Redis provides persistence across restarts and sharing across multiple servers.

Setup

npm install express-session connect-redis redis

Configuration

const express = require('express');
const session = require('express-session');
const { createClient } = require('redis');
const RedisStore = require('connect-redis').default;

async function setupApp() {
  const app = express();

  // Create Redis client for sessions
  const redisClient = createClient({
    url: process.env.REDIS_URL || 'redis://localhost:6379',
  });
  await redisClient.connect();

  // Configure session middleware with Redis store
  app.use(session({
    store: new RedisStore({
      client: redisClient,
      prefix: 'sess:',          // Key prefix in Redis
      ttl: 86400,               // Session TTL in seconds (24 hours)
      disableTouch: false,       // Reset TTL on each request
    }),
    secret: process.env.SESSION_SECRET,
    resave: false,               // Don't save if not modified
    saveUninitialized: false,    // Don't create session until something is stored
    cookie: {
      secure: process.env.NODE_ENV === 'production', // HTTPS only in production
      httpOnly: true,            // Prevent XSS access to cookie
      maxAge: 86400000,          // Cookie max age in ms (24 hours)
      sameSite: 'lax',
    },
  }));

  // Use sessions in routes
  app.post('/api/login', async (req, res) => {
    const { email, password } = req.body;
    const user = await authenticateUser(email, password);

    if (!user) {
      return res.status(401).json({ message: 'Invalid credentials' });
    }

    // Store user data in session
    req.session.userId = user.id;
    req.session.role = user.role;
    req.session.loginTime = Date.now();

    res.json({ message: 'Logged in', user: { id: user.id, name: user.name } });
  });

  app.get('/api/me', (req, res) => {
    if (!req.session.userId) {
      return res.status(401).json({ message: 'Not authenticated' });
    }
    res.json({
      userId: req.session.userId,
      role: req.session.role,
      loginTime: req.session.loginTime,
    });
  });

  app.post('/api/logout', (req, res) => {
    req.session.destroy((err) => {
      if (err) {
        return res.status(500).json({ message: 'Logout failed' });
      }
      res.clearCookie('connect.sid');
      res.json({ message: 'Logged out' });
    });
  });

  return app;
}

How Sessions Look in Redis

# In redis-cli:
KEYS sess:*
# "sess:abc123def456..."

GET sess:abc123def456
# {"cookie":{"originalMaxAge":86400000,"expires":"...","httpOnly":true,"secure":false,"sameSite":"lax"},"userId":"64abc123","role":"admin","loginTime":1700000000}

8. Rate Limiting with Redis

Redis is perfect for rate limiting because of its atomic INCR operation and TTL support.

Fixed Window Rate Limiter

async function rateLimiter(identifier, limit = 100, windowSeconds = 60) {
  const client = await getRedisClient();
  const key = `ratelimit:${identifier}:${Math.floor(Date.now() / 1000 / windowSeconds)}`;

  const current = await client.incr(key);

  // Set TTL on first request in this window
  if (current === 1) {
    await client.expire(key, windowSeconds);
  }

  return {
    allowed: current <= limit,
    current,
    limit,
    remaining: Math.max(0, limit - current),
    resetIn: await client.ttl(key),
  };
}

Express Rate Limiting Middleware

function createRateLimiter(limit = 100, windowSeconds = 60) {
  return async (req, res, next) => {
    try {
      const identifier = req.ip || req.headers['x-forwarded-for'] || 'unknown';
      const result = await rateLimiter(identifier, limit, windowSeconds);

      // Set rate limit headers
      res.set({
        'X-RateLimit-Limit': result.limit,
        'X-RateLimit-Remaining': result.remaining,
        'X-RateLimit-Reset': result.resetIn,
      });

      if (!result.allowed) {
        return res.status(429).json({
          success: false,
          message: 'Too many requests, please try again later',
          retryAfter: result.resetIn,
        });
      }

      next();
    } catch (error) {
      // If Redis is down, allow the request (fail open)
      console.error('Rate limiter error:', error.message);
      next();
    }
  };
}

// Apply to routes
app.use('/api/', createRateLimiter(100, 60));             // 100 req/min for all API routes
app.use('/api/auth/login', createRateLimiter(5, 300));     // 5 attempts per 5 min for login
app.use('/api/upload', createRateLimiter(10, 3600));       // 10 uploads per hour

Sliding Window Rate Limiter (More Accurate)

async function slidingWindowRateLimiter(identifier, limit = 100, windowSeconds = 60) {
  const client = await getRedisClient();
  const key = `ratelimit:sliding:${identifier}`;
  const now = Date.now();
  const windowStart = now - (windowSeconds * 1000);

  // Use a Sorted Set with timestamps as scores
  const results = await client.multi()
    // Remove entries outside the window
    .zRemRangeByScore(key, '-inf', windowStart)
    // Add current request
    .zAdd(key, { score: now, value: `${now}:${Math.random()}` })
    // Count requests in window
    .zCard(key)
    // Set TTL to auto-clean
    .expire(key, windowSeconds)
    .exec();

  const requestCount = results[2];

  return {
    allowed: requestCount <= limit,
    current: requestCount,
    limit,
    remaining: Math.max(0, limit - requestCount),
  };
}

9. Combining Features: Production-Ready Example

// src/config/redis.js
const { createClient } = require('redis');

const client = createClient({ url: process.env.REDIS_URL || 'redis://localhost:6379' });
const subscriber = createClient({ url: process.env.REDIS_URL || 'redis://localhost:6379' });

async function initRedis() {
  await client.connect();
  await subscriber.connect();

  client.on('error', (err) => console.error('Redis error:', err));

  // Listen for cache invalidation events
  await subscriber.subscribe('cache:invalidate', async (message) => {
    const { keys, patterns } = JSON.parse(message);
    if (keys) await client.del(keys);
    // Handle patterns with SCAN (omitted for brevity)
  });

  console.log('Redis initialized with Pub/Sub listener');
}

module.exports = { client, subscriber, initRedis };

Key Takeaways

  1. Hashes are more efficient than JSON strings for objects -- update individual fields atomically
  2. Lists power queues, feeds, and recent-item tracking with LPUSH/LTRIM patterns
  3. Sets enable unique tracking and powerful intersection/union/difference operations
  4. Sorted Sets are perfect for leaderboards, rankings, and time-based data
  5. Pub/Sub enables real-time messaging between services and cross-server cache invalidation
  6. MULTI/EXEC transactions execute commands atomically; WATCH provides optimistic locking
  7. connect-redis stores Express sessions in Redis for persistence and multi-server sharing
  8. Rate limiting with Redis INCR and TTL is simple, fast, and production-ready

Explain-It Challenge

Scenario: You are building a multiplayer quiz game backend. Requirements: (1) A real-time leaderboard showing top 100 players updated after every answer, (2) A system where each player can only answer 1 question per 10 seconds (rate limiting), (3) Chat messages in game rooms using Pub/Sub, and (4) Player sessions that persist across server restarts.

Design the Redis data structures and operations for each requirement. Which data type would you use for each? Write the Node.js code for the leaderboard update, the per-player rate limiter, the Pub/Sub chat, and the session configuration. How would you handle a player disconnecting and reconnecting?


<< Previous: 3.16.c — Redis with Node.js | Next: Exercise Questions >>