`n

Caching Strategy Implementation - Complete Guide

Published: September 25, 2024 | Reading time: 20 minutes

Caching Strategy Overview

Effective caching strategies improve application performance and reduce server load:

Caching Benefits
# Caching Benefits
- Faster response times
- Reduced server load
- Lower bandwidth usage
- Improved scalability
- Better user experience
- Cost reduction
- Database load reduction

Redis Caching

Redis Setup and Configuration

Redis Implementation
# Redis Installation and Setup
# Install Redis
sudo apt update
sudo apt install redis-server

# Configure Redis
sudo nano /etc/redis/redis.conf

# Key Redis Configuration
bind 127.0.0.1
port 6379
timeout 300
tcp-keepalive 300
maxmemory 256mb
maxmemory-policy allkeys-lru
save 900 1
save 300 10
save 60 10000

# Start Redis
sudo systemctl start redis-server
sudo systemctl enable redis-server

# Redis CLI Commands
redis-cli
SET user:123 '{"name":"John","email":"john@example.com"}'
GET user:123
EXPIRE user:123 3600
TTL user:123
DEL user:123

# Node.js Redis Implementation
const redis = require('redis');
const client = redis.createClient({
    host: 'localhost',
    port: 6379,
    retry_strategy: (options) => {
        if (options.error && options.error.code === 'ECONNREFUSED') {
            return new Error('Redis server connection refused');
        }
        if (options.total_retry_time > 1000 * 60 * 60) {
            return new Error('Retry time exhausted');
        }
        if (options.attempt > 10) {
            return undefined;
        }
        return Math.min(options.attempt * 100, 3000);
    }
});

# Redis Caching Functions
class RedisCache {
    constructor() {
        this.client = redis.createClient();
        this.client.on('error', (err) => console.log('Redis Client Error', err));
    }
    
    async set(key, value, ttl = 3600) {
        try {
            const serialized = JSON.stringify(value);
            await this.client.setex(key, ttl, serialized);
            return true;
        } catch (error) {
            console.error('Redis set error:', error);
            return false;
        }
    }
    
    async get(key) {
        try {
            const value = await this.client.get(key);
            return value ? JSON.parse(value) : null;
        } catch (error) {
            console.error('Redis get error:', error);
            return null;
        }
    }
    
    async del(key) {
        try {
            await this.client.del(key);
            return true;
        } catch (error) {
            console.error('Redis delete error:', error);
            return false;
        }
    }
    
    async exists(key) {
        try {
            const result = await this.client.exists(key);
            return result === 1;
        } catch (error) {
            console.error('Redis exists error:', error);
            return false;
        }
    }
}

# Application Integration
const cache = new RedisCache();

async function getUserById(id) {
    const cacheKey = `user:${id}`;
    
    // Try to get from cache first
    let user = await cache.get(cacheKey);
    
    if (!user) {
        // If not in cache, fetch from database
        user = await database.getUserById(id);
        
        if (user) {
            // Store in cache for 1 hour
            await cache.set(cacheKey, user, 3600);
        }
    }
    
    return user;
}

Memcached Implementation

Memcached Setup and Usage

Memcached Implementation
# Memcached Installation
sudo apt update
sudo apt install memcached

# Configure Memcached
sudo nano /etc/memcached.conf

# Key Memcached Configuration
-m 64
-p 11211
-u memcache
-l 127.0.0.1
-c 1024
-t 4

# Start Memcached
sudo systemctl start memcached
sudo systemctl enable memcached

# Memcached CLI Commands
telnet localhost 11211
set user:123 0 3600 50
{"name":"John","email":"john@example.com"}
STORED
get user:123
VALUE user:123 0 50
{"name":"John","email":"john@example.com"}
END
delete user:123
DELETED

# Node.js Memcached Implementation
const Memcached = require('memcached');
const memcached = new Memcached('localhost:11211');

class MemcachedCache {
    constructor() {
        this.client = new Memcached('localhost:11211', {
            retries: 10,
            retry: 10000,
            remove: true,
            failOverServers: ['192.168.0.100:11211']
        });
    }
    
    set(key, value, ttl = 3600) {
        return new Promise((resolve, reject) => {
            this.client.set(key, value, ttl, (err) => {
                if (err) reject(err);
                else resolve(true);
            });
        });
    }
    
    get(key) {
        return new Promise((resolve, reject) => {
            this.client.get(key, (err, data) => {
                if (err) reject(err);
                else resolve(data);
            });
        });
    }
    
    delete(key) {
        return new Promise((resolve, reject) => {
            this.client.del(key, (err) => {
                if (err) reject(err);
                else resolve(true);
            });
        });
    }
}

# Application Usage
const memcache = new MemcachedCache();

async function getProductById(id) {
    const cacheKey = `product:${id}`;
    
    try {
        let product = await memcache.get(cacheKey);
        
        if (!product) {
            product = await database.getProductById(id);
            
            if (product) {
                await memcache.set(cacheKey, product, 1800); // 30 minutes
            }
        }
        
        return product;
    } catch (error) {
        console.error('Cache error:', error);
        // Fallback to database
        return await database.getProductById(id);
    }
}

Application-Level Caching

In-Memory Caching

Application Caching Strategies
# Application-Level Caching

# 1. Simple In-Memory Cache
class InMemoryCache {
    constructor(maxSize = 1000, ttl = 3600000) {
        this.cache = new Map();
        this.maxSize = maxSize;
        this.ttl = ttl;
    }
    
    set(key, value) {
        // Remove oldest entries if cache is full
        if (this.cache.size >= this.maxSize) {
            const firstKey = this.cache.keys().next().value;
            this.cache.delete(firstKey);
        }
        
        this.cache.set(key, {
            value,
            timestamp: Date.now()
        });
    }
    
    get(key) {
        const item = this.cache.get(key);
        
        if (!item) {
            return null;
        }
        
        // Check if expired
        if (Date.now() - item.timestamp > this.ttl) {
            this.cache.delete(key);
            return null;
        }
        
        return item.value;
    }
    
    delete(key) {
        return this.cache.delete(key);
    }
    
    clear() {
        this.cache.clear();
    }
    
    size() {
        return this.cache.size;
    }
}

# 2. LRU Cache Implementation
class LRUCache {
    constructor(capacity = 100) {
        this.capacity = capacity;
        this.cache = new Map();
    }
    
    get(key) {
        if (this.cache.has(key)) {
            // Move to end (most recently used)
            const value = this.cache.get(key);
            this.cache.delete(key);
            this.cache.set(key, value);
            return value;
        }
        return null;
    }
    
    set(key, value) {
        if (this.cache.has(key)) {
            // Update existing
            this.cache.delete(key);
        } else if (this.cache.size >= this.capacity) {
            // Remove least recently used (first item)
            const firstKey = this.cache.keys().next().value;
            this.cache.delete(firstKey);
        }
        
        this.cache.set(key, value);
    }
}

# 3. Cache-Aside Pattern
class CacheAsidePattern {
    constructor(cache, database) {
        this.cache = cache;
        this.database = database;
    }
    
    async get(key) {
        // Try cache first
        let data = await this.cache.get(key);
        
        if (!data) {
            // Cache miss - get from database
            data = await this.database.get(key);
            
            if (data) {
                // Store in cache
                await this.cache.set(key, data);
            }
        }
        
        return data;
    }
    
    async set(key, value) {
        // Update database
        await this.database.set(key, value);
        
        // Update cache
        await this.cache.set(key, value);
    }
    
    async delete(key) {
        // Delete from database
        await this.database.delete(key);
        
        // Delete from cache
        await this.cache.delete(key);
    }
}

# 4. Write-Through Cache Pattern
class WriteThroughCache {
    constructor(cache, database) {
        this.cache = cache;
        this.database = database;
    }
    
    async get(key) {
        return await this.cache.get(key);
    }
    
    async set(key, value) {
        // Write to cache first
        await this.cache.set(key, value);
        
        // Then write to database
        await this.database.set(key, value);
    }
    
    async delete(key) {
        // Delete from cache
        await this.cache.delete(key);
        
        // Delete from database
        await this.database.delete(key);
    }
}

# 5. Write-Behind Cache Pattern
class WriteBehindCache {
    constructor(cache, database, batchSize = 100, flushInterval = 5000) {
        this.cache = cache;
        this.database = database;
        this.batchSize = batchSize;
        this.flushInterval = flushInterval;
        this.pendingWrites = new Map();
        
        // Start background flush
        setInterval(() => this.flush(), this.flushInterval);
    }
    
    async get(key) {
        return await this.cache.get(key);
    }
    
    async set(key, value) {
        // Write to cache immediately
        await this.cache.set(key, value);
        
        // Queue for database write
        this.pendingWrites.set(key, value);
        
        // Flush if batch size reached
        if (this.pendingWrites.size >= this.batchSize) {
            await this.flush();
        }
    }
    
    async flush() {
        if (this.pendingWrites.size === 0) {
            return;
        }
        
        const writes = Array.from(this.pendingWrites.entries());
        this.pendingWrites.clear();
        
        try {
            await this.database.batchWrite(writes);
        } catch (error) {
            console.error('Batch write failed:', error);
            // Re-queue failed writes
            writes.forEach(([key, value]) => {
                this.pendingWrites.set(key, value);
            });
        }
    }
}

CDN Caching

CDN Configuration

CDN Caching Setup
# CDN Caching Configuration

# 1. Cloudflare CDN Setup
# Cloudflare Page Rules
# URL Pattern: example.com/static/*
# Settings:
# - Cache Level: Cache Everything
# - Edge Cache TTL: 1 month
# - Browser Cache TTL: 1 month

# 2. AWS CloudFront Configuration
# CloudFront Distribution Settings
{
    "Origins": [
        {
            "DomainName": "example.com",
            "OriginPath": "",
            "CustomOriginConfig": {
                "HTTPPort": 80,
                "HTTPSPort": 443,
                "OriginProtocolPolicy": "https-only"
            }
        }
    ],
    "DefaultCacheBehavior": {
        "TargetOriginId": "example.com",
        "ViewerProtocolPolicy": "redirect-to-https",
        "CachePolicyId": "4135ea2d-6df8-44a3-9df3-4b5a84be39ad",
        "Compress": true
    },
    "CacheBehaviors": [
        {
            "PathPattern": "/static/*",
            "TargetOriginId": "example.com",
            "ViewerProtocolPolicy": "redirect-to-https",
            "CachePolicyId": "4135ea2d-6df8-44a3-9df3-4b5a84be39ad",
            "TTL": {
                "DefaultTTL": 31536000,
                "MaxTTL": 31536000
            }
        }
    ]
}

# 3. Cache Headers Configuration
# Nginx Cache Headers
location ~* \.(css|js|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
    expires 1y;
    add_header Cache-Control "public, immutable";
    add_header Vary Accept-Encoding;
    
    # Enable gzip compression
    gzip_static on;
}

location ~* \.(html|htm)$ {
    expires 1h;
    add_header Cache-Control "public, must-revalidate";
}

# 4. Cache Invalidation
# CloudFront Invalidation
aws cloudfront create-invalidation \
    --distribution-id E1234567890ABC \
    --paths "/static/css/*" "/static/js/*"

# 5. Dynamic Content Caching
# Cache API responses
app.get('/api/users/:id', async (req, res) => {
    const userId = req.params.id;
    const cacheKey = `user:${userId}`;
    
    // Try cache first
    let user = await cache.get(cacheKey);
    
    if (!user) {
        user = await database.getUserById(userId);
        
        if (user) {
            // Cache for 1 hour
            await cache.set(cacheKey, user, 3600);
        }
    }
    
    // Set cache headers
    res.set({
        'Cache-Control': 'public, max-age=3600',
        'ETag': `"${userId}-${user.updated_at}"`
    });
    
    res.json(user);
});

# 6. Cache Busting Strategies
# Version-based cache busting



# Hash-based cache busting



# 7. Service Worker Caching
# Service Worker Implementation
const CACHE_NAME = 'app-cache-v1';
const urlsToCache = [
    '/',
    '/css/style.css',
    '/js/app.js',
    '/images/logo.png'
];

self.addEventListener('install', (event) => {
    event.waitUntil(
        caches.open(CACHE_NAME)
            .then((cache) => cache.addAll(urlsToCache))
    );
});

self.addEventListener('fetch', (event) => {
    event.respondWith(
        caches.match(event.request)
            .then((response) => {
                if (response) {
                    return response;
                }
                return fetch(event.request);
            })
    );
});

Database Query Caching

Query Result Caching

Database Query Caching
# Database Query Caching

# 1. MySQL Query Cache (MySQL 5.7 and earlier)
# Enable query cache
SET GLOBAL query_cache_type = 1;
SET GLOBAL query_cache_size = 268435456; # 256MB
SET GLOBAL query_cache_limit = 2097152;  # 2MB

# Check query cache status
SHOW STATUS LIKE 'Qcache%';

# 2. Application-Level Query Caching
class QueryCache {
    constructor(redisClient) {
        this.redis = redisClient;
        this.defaultTTL = 3600; // 1 hour
    }
    
    generateKey(query, params = []) {
        const queryString = query.replace(/\s+/g, ' ').trim();
        const paramsString = JSON.stringify(params);
        return `query:${Buffer.from(queryString + paramsString).toString('base64')}`;
    }
    
    async get(query, params = []) {
        const key = this.generateKey(query, params);
        const cached = await this.redis.get(key);
        
        if (cached) {
            return JSON.parse(cached);
        }
        
        return null;
    }
    
    async set(query, params = [], result, ttl = null) {
        const key = this.generateKey(query, params);
        const ttlToUse = ttl || this.defaultTTL;
        
        await this.redis.setex(key, ttlToUse, JSON.stringify(result));
    }
    
    async invalidate(pattern) {
        const keys = await this.redis.keys(`query:${pattern}*`);
        if (keys.length > 0) {
            await this.redis.del(...keys);
        }
    }
}

# 3. Cached Database Queries
class CachedDatabase {
    constructor(database, queryCache) {
        this.db = database;
        this.cache = queryCache;
    }
    
    async query(sql, params = [], ttl = 3600) {
        // Try cache first
        let result = await this.cache.get(sql, params);
        
        if (!result) {
            // Cache miss - execute query
            result = await this.db.query(sql, params);
            
            // Cache the result
            await this.cache.set(sql, params, result, ttl);
        }
        
        return result;
    }
    
    async getUserById(id) {
        const sql = 'SELECT * FROM users WHERE id = ?';
        const result = await this.query(sql, [id], 1800); // 30 minutes
        return result[0];
    }
    
    async getUsersByStatus(status) {
        const sql = 'SELECT * FROM users WHERE status = ? ORDER BY created_at DESC';
        return await this.query(sql, [status], 900); // 15 minutes
    }
}

# 4. Cache Invalidation Strategies
class CacheInvalidator {
    constructor(queryCache) {
        this.cache = queryCache;
    }
    
    async invalidateUser(userId) {
        // Invalidate user-related queries
        await this.cache.invalidate(`SELECT * FROM users WHERE id = ${userId}`);
        await this.cache.invalidate(`SELECT * FROM users WHERE status =`);
    }
    
    async invalidateUsersByStatus(status) {
        await this.cache.invalidate(`SELECT * FROM users WHERE status = ${status}`);
    }
    
    async invalidateAllUsers() {
        await this.cache.invalidate('SELECT * FROM users');
    }
}

# 5. Cache Warming
class CacheWarmer {
    constructor(cachedDatabase) {
        this.db = cachedDatabase;
    }
    
    async warmUserCache() {
        console.log('Warming user cache...');
        
        // Pre-load frequently accessed users
        const activeUsers = await this.db.getUsersByStatus('active');
        console.log(`Loaded ${activeUsers.length} active users into cache`);
        
        // Pre-load user profiles
        for (const user of activeUsers.slice(0, 100)) {
            await this.db.getUserById(user.id);
        }
        
        console.log('User cache warming completed');
    }
    
    async warmProductCache() {
        console.log('Warming product cache...');
        
        // Pre-load popular products
        const popularProducts = await this.db.query(
            'SELECT * FROM products WHERE status = "active" ORDER BY views DESC LIMIT 50',
            [],
            1800
        );
        
        console.log(`Loaded ${popularProducts.length} popular products into cache`);
    }
}

# 6. Cache Statistics
class CacheStats {
    constructor(redisClient) {
        this.redis = redisClient;
    }
    
    async getStats() {
        const info = await this.redis.info('memory');
        const keyspace = await this.redis.info('keyspace');
        
        return {
            memory: this.parseMemoryInfo(info),
            keyspace: this.parseKeyspaceInfo(keyspace),
            hitRate: await this.calculateHitRate()
        };
    }
    
    async calculateHitRate() {
        const keys = await this.redis.keys('query:*');
        let hits = 0;
        let misses = 0;
        
        for (const key of keys) {
            const ttl = await this.redis.ttl(key);
            if (ttl > 0) {
                hits++;
            } else {
                misses++;
            }
        }
        
        return hits / (hits + misses) * 100;
    }
    
    parseMemoryInfo(info) {
        const lines = info.split('\n');
        const memory = {};
        
        lines.forEach(line => {
            if (line.includes(':')) {
                const [key, value] = line.split(':');
                memory[key.trim()] = value.trim();
            }
        });
        
        return memory;
    }
    
    parseKeyspaceInfo(info) {
        const lines = info.split('\n');
        const keyspace = {};
        
        lines.forEach(line => {
            if (line.startsWith('db')) {
                const [db, stats] = line.split(':');
                keyspace[db] = stats.trim();
            }
        });
        
        return keyspace;
    }
}

Cache Monitoring

Cache Performance Monitoring

Cache Monitoring Setup
# Cache Performance Monitoring

# 1. Redis Monitoring
# Redis CLI monitoring
redis-cli --stat
redis-cli --latency
redis-cli --latency-history

# Redis info command
redis-cli info memory
redis-cli info stats
redis-cli info replication

# 2. Custom Cache Metrics
class CacheMetrics {
    constructor() {
        this.metrics = {
            hits: 0,
            misses: 0,
            sets: 0,
            deletes: 0,
            errors: 0
        };
    }
    
    recordHit() {
        this.metrics.hits++;
    }
    
    recordMiss() {
        this.metrics.misses++;
    }
    
    recordSet() {
        this.metrics.sets++;
    }
    
    recordDelete() {
        this.metrics.deletes++;
    }
    
    recordError() {
        this.metrics.errors++;
    }
    
    getHitRate() {
        const total = this.metrics.hits + this.metrics.misses;
        return total > 0 ? (this.metrics.hits / total) * 100 : 0;
    }
    
    getStats() {
        return {
            ...this.metrics,
            hitRate: this.getHitRate()
        };
    }
}

# 3. Cache Health Check
class CacheHealthCheck {
    constructor(cache) {
        this.cache = cache;
    }
    
    async checkHealth() {
        try {
            // Test basic operations
            const testKey = 'health-check';
            const testValue = { timestamp: Date.now() };
            
            // Test set
            await this.cache.set(testKey, testValue, 60);
            
            // Test get
            const retrieved = await this.cache.get(testKey);
            
            if (!retrieved || retrieved.timestamp !== testValue.timestamp) {
                throw new Error('Cache get/set mismatch');
            }
            
            // Test delete
            await this.cache.delete(testKey);
            
            // Verify deletion
            const afterDelete = await this.cache.get(testKey);
            if (afterDelete !== null) {
                throw new Error('Cache delete failed');
            }
            
            return { status: 'healthy', timestamp: Date.now() };
        } catch (error) {
            return { status: 'unhealthy', error: error.message, timestamp: Date.now() };
        }
    }
}

# 4. Cache Alerting
class CacheAlerts {
    constructor(cache, metrics) {
        this.cache = cache;
        this.metrics = metrics;
        this.thresholds = {
            hitRate: 80,        // Minimum hit rate percentage
            errorRate: 5,       // Maximum error rate percentage
            responseTime: 100   // Maximum response time in ms
        };
    }
    
    async checkAlerts() {
        const alerts = [];
        const stats = this.metrics.getStats();
        
        // Check hit rate
        if (stats.hitRate < this.thresholds.hitRate) {
            alerts.push({
                type: 'low_hit_rate',
                message: `Cache hit rate is ${stats.hitRate.toFixed(2)}%, below threshold of ${this.thresholds.hitRate}%`,
                severity: 'warning'
            });
        }
        
        // Check error rate
        const totalOperations = stats.hits + stats.misses + stats.sets + stats.deletes;
        const errorRate = totalOperations > 0 ? (stats.errors / totalOperations) * 100 : 0;
        
        if (errorRate > this.thresholds.errorRate) {
            alerts.push({
                type: 'high_error_rate',
                message: `Cache error rate is ${errorRate.toFixed(2)}%, above threshold of ${this.thresholds.errorRate}%`,
                severity: 'critical'
            });
        }
        
        return alerts;
    }
    
    async sendAlert(alert) {
        // Send alert via email, Slack, etc.
        console.log(`ALERT [${alert.severity.toUpperCase()}]: ${alert.message}`);
        
        // Example: Send to Slack
        if (process.env.SLACK_WEBHOOK_URL) {
            const payload = {
                text: `Cache Alert: ${alert.message}`,
                channel: '#alerts',
                username: 'Cache Monitor'
            };
            
            await fetch(process.env.SLACK_WEBHOOK_URL, {
                method: 'POST',
                headers: { 'Content-Type': 'application/json' },
                body: JSON.stringify(payload)
            });
        }
    }
}

# 5. Cache Dashboard
class CacheDashboard {
    constructor(cache, metrics) {
        this.cache = cache;
        this.metrics = metrics;
    }
    
    async getDashboardData() {
        const stats = this.metrics.getStats();
        const health = await new CacheHealthCheck(this.cache).checkHealth();
        
        return {
            metrics: stats,
            health: health,
            timestamp: new Date().toISOString()
        };
    }
    
    async renderDashboard() {
        const data = await this.getDashboardData();
        
        console.log('=== Cache Dashboard ===');
        console.log(`Hit Rate: ${data.metrics.hitRate.toFixed(2)}%`);
        console.log(`Total Hits: ${data.metrics.hits}`);
        console.log(`Total Misses: ${data.metrics.misses}`);
        console.log(`Total Sets: ${data.metrics.sets}`);
        console.log(`Total Deletes: ${data.metrics.deletes}`);
        console.log(`Errors: ${data.metrics.errors}`);
        console.log(`Health Status: ${data.health.status}`);
        console.log(`Last Updated: ${data.timestamp}`);
    }
}

Best Practices

Caching Strategy Guidelines

Caching Best Practices

  • Cache frequently accessed data
  • Use appropriate TTL values
  • Implement cache invalidation
  • Monitor cache performance
  • Use cache warming strategies
  • Handle cache failures gracefully
  • Implement cache hierarchies

Common Mistakes

  • Over-caching or under-caching
  • Incorrect TTL values
  • No cache invalidation
  • Ignoring cache failures
  • Not monitoring performance
  • Cache key collisions
  • Memory leaks in cache

Summary

Caching strategy implementation involves several key components:

  • Redis Caching: In-memory data store, high performance
  • Memcached: Distributed memory caching system
  • Application Caching: In-memory, LRU, cache patterns
  • CDN Caching: Edge caching, static asset optimization
  • Database Caching: Query result caching, cache warming
  • Monitoring: Performance metrics, health checks, alerting
  • Best Practices: TTL management, invalidation, failure handling

Need More Help?

Struggling with caching strategy implementation or need help optimizing your application performance? Our performance experts can help you implement effective caching solutions.

Get Caching Help