# Caching Architectures

## Multi-Level Caching Strategy

**Multi-level caching** implements cache layers at different levels untuk optimal performance.

```yaml
# Caching Architecture Levels

## Level 1: Browser Cache
location: "Client browser"
storage: "Browser memory/disk"
ttl: "Configurable via HTTP headers"
technologies:
  - HTTP Cache-Control headers
  - ETags for validation
  - Service Workers
  - Local Storage

advantages:
  - Fastest access time
  - Reduces server load
  - Offline capability
  - Improved user experience

challenges:
  - Limited storage capacity
  - Cache invalidation complexity
  - Browser compatibility
  - Privacy concerns

## Level 2: CDN Cache
location: "Edge servers globally"
storage: "Distributed CDN nodes"
ttl: "Hours to days"
technologies:
  - Cloudflare
  - AWS CloudFront
  - Akamai
  - Fastly

advantages:
  - Geographic distribution
  - High availability
  - DDoS protection
  - Reduced bandwidth costs

challenges:
  - Cache propagation delay
  - Configuration complexity
  - Cost considerations
  - Limited dynamic content

## Level 3: Application Cache
location: "Application server memory"
storage: "In-memory data structures"
ttl: "Minutes to hours"
technologies:
  - Redis
  - Memcached
  - In-memory HashMap
  - Application-level caching

advantages:
  - Fast access speed
  - Centralized management
  - Flexible cache policies
  - Rich data structures

challenges:
  - Memory limitations
  - Cache coherence
  - Single point of failure
  - Complexity in distributed systems

## Level 4: Database Cache
location: "Database layer"
storage: "Database buffer pool"
ttl: "Automatic management"
technologies:
  - PostgreSQL buffer pool
  - MySQL query cache
  - Oracle buffer cache
  - MongoDB internal cache

advantages:
  - Transparent to application
  - Automatic management
  - Reduces disk I/O
  - Query optimization

challenges:
  - Limited control
  - Database-specific
  - Memory contention
  - Configuration complexity
```

## Cache Implementation Patterns

```javascript
// Multi-Level Cache Manager
class MultiLevelCacheManager {
  constructor(options = {}) {
    this.levels = options.levels || [];
    this.stats = {
      hits: 0,
      misses: 0,
      levelHits: new Array(options.levels?.length || 0).fill(0)
    };
  }

  async get(key) {
    // Try each cache level in order
    for (let i = 0; i < this.levels.length; i++) {
      const cache = this.levels[i];

      try {
        const value = await cache.get(key);
        if (value !== null && value !== undefined) {
          // Cache hit
          this.stats.hits++;
          this.stats.levelHits[i]++;

          // Promote to higher levels (cache warming)
          await this.promoteToHigherLevels(key, value, i);

          return value;
        }
      } catch (error) {
        console.error(`Cache level ${i} error:`, error);
      }
    }

    // Cache miss
    this.stats.misses++;
    return null;
  }

  async set(key, value, options = {}) {
    const promises = this.levels.map((cache, index) => {
      const levelOptions = {
        ...options,
        ttl: this.getTTLForLevel(index, options.ttl)
      };

      return cache.set(key, value, levelOptions).catch(error => {
        console.error(`Cache level ${index} set error:`, error);
      });
    });

    await Promise.allSettled(promises);
  }

  async delete(key) {
    const promises = this.levels.map(cache =>
      cache.delete(key).catch(error => {
        console.error('Cache delete error:', error);
      })
    );

    await Promise.allSettled(promises);
  }

  async promoteToHigherLevels(key, value, currentLevel) {
    // Promote to higher levels (lower index)
    for (let i = 0; i < currentLevel; i++) {
      try {
        await this.levels[i].set(key, value, {
          ttl: this.getTTLForLevel(i)
        });
      } catch (error) {
        console.error(`Promotion to level ${i} failed:`, error);
      }
    }
  }

  getTTLForLevel(level, baseTTL = 3600) {
    // Higher levels (lower index) get longer TTL
    const ttlMultipliers = [4, 2, 1, 0.5]; // Example multipliers
    const multiplier = ttlMultipliers[level] || 1;
    return baseTTL * multiplier;
  }

  getStats() {
    const hitRate = this.stats.hits / (this.stats.hits + this.stats.misses) || 0;

    return {
      ...this.stats,
      hitRate,
      levelHitRates: this.stats.levelHits.map((hits, index) => ({
        level: index,
        hits,
        hitRate: hits / this.stats.hits || 0
      }))
    };
  }
}

// Cache-Aside Pattern Implementation
class CacheAsideManager {
  constructor(cache, dataLoader, options = {}) {
    this.cache = cache;
    this.dataLoader = dataLoader;
    this.options = {
      ttl: options.ttl || 3600,
      keyPrefix: options.keyPrefix || '',
      ...options
    };
  }

  async get(key) {
    const cacheKey = this.getCacheKey(key);

    // Try cache first
    let value = await this.cache.get(cacheKey);

    if (value !== null && value !== undefined) {
      return value;
    }

    // Cache miss, load from data source
    try {
      value = await this.dataLoader.load(key);

      // Store in cache
      if (value !== null && value !== undefined) {
        await this.cache.set(cacheKey, value, { ttl: this.options.ttl });
      }

      return value;
    } catch (error) {
      console.error('Data loader error:', error);
      throw error;
    }
  }

  async set(key, value, options = {}) {
    const cacheKey = this.getCacheKey(key);
    const ttl = options.ttl || this.options.ttl;

    // Update cache
    await this.cache.set(cacheKey, value, { ttl });

    // Update data source
    try {
      await this.dataLoader.save(key, value);
    } catch (error) {
      console.error('Data save error:', error);
      // Optionally invalidate cache on save failure
      await this.cache.delete(cacheKey);
      throw error;
    }
  }

  async invalidate(key) {
    const cacheKey = this.getCacheKey(key);
    await this.cache.delete(cacheKey);
  }

  async refresh(key) {
    // Force refresh from data source
    const cacheKey = this.getCacheKey(key);
    await this.cache.delete(cacheKey);
    return await this.get(key);
  }

  getCacheKey(key) {
    return this.options.keyPrefix + key;
  }
}

// Write-Through Cache Implementation
class WriteThroughCacheManager {
  constructor(cache, dataStore, options = {}) {
    this.cache = cache;
    this.dataStore = dataStore;
    this.options = {
      ttl: options.ttl || 3600,
      keyPrefix: options.keyPrefix || '',
      writeBehind: options.writeBehind || false,
      ...options
    };
  }

  async get(key) {
    const cacheKey = this.getCacheKey(key);

    // Try cache first
    let value = await this.cache.get(cacheKey);

    if (value !== null && value !== undefined) {
      return value;
    }

    // Cache miss, load from data store
    value = await this.dataStore.get(key);

    if (value !== null && value !== undefined) {
      await this.cache.set(cacheKey, value, { ttl: this.options.ttl });
    }

    return value;
  }

  async set(key, value, options = {}) {
    const cacheKey = this.getCacheKey(key);
    const ttl = options.ttl || this.options.ttl;

    if (this.options.writeBehind) {
      // Write-behind: Update cache immediately, data store asynchronously
      await this.cache.set(cacheKey, value, { ttl });

      // Async write to data store
      this.dataStore.set(key, value).catch(error => {
        console.error('Write-behind failed:', error);
        // Optionally invalidate cache
        this.cache.delete(cacheKey);
      });
    } else {
      // Write-through: Update both cache and data store
      await this.dataStore.set(key, value);
      await this.cache.set(cacheKey, value, { ttl });
    }
  }

  async delete(key) {
    const cacheKey = this.getCacheKey(key);

    await this.dataStore.delete(key);
    await this.cache.delete(cacheKey);
  }

  getCacheKey(key) {
    return this.options.keyPrefix + key;
  }
}

// Cache Warmer for Preloading Popular Content
class CacheWarmer {
  constructor(cache, dataLoader, metricsCollector) {
    this.cache = cache;
    this.dataLoader = dataLoader;
    this.metricsCollector = metricsCollector;
    this.warmingJobs = new Map();
  }

  async warmCache(popularKeys, options = {}) {
    const {
      concurrency = 10,
      retryAttempts = 3,
      retryDelay = 1000
    } = options;

    console.log(`Starting cache warming for ${popularKeys.length} keys`);

    // Process keys in batches
    for (let i = 0; i < popularKeys.length; i += concurrency) {
      const batch = popularKeys.slice(i, i + concurrency);

      const promises = batch.map(async (key) => {
        for (let attempt = 0; attempt < retryAttempts; attempt++) {
          try {
            const value = await this.dataLoader.load(key);
            if (value !== null && value !== undefined) {
              await this.cache.set(key, value);
              this.metricsCollector.increment('cache_warmed');
            }
            return;
          } catch (error) {
            console.error(`Failed to warm cache for key ${key}:`, error);
            if (attempt < retryAttempts - 1) {
              await new Promise(resolve => setTimeout(resolve, retryDelay));
            }
          }
        }
      });

      await Promise.allSettled(promises);
    }

    console.log('Cache warming completed');
  }

  async scheduleWarmingJob(key, schedule) {
    // Schedule periodic warming for specific key
    const job = setInterval(async () => {
      try {
        await this.warmCache([key]);
      } catch (error) {
        console.error(`Scheduled warming failed for key ${key}:`, error);
      }
    }, schedule);

    this.warmingJobs.set(key, job);
  }

  cancelWarmingJob(key) {
    const job = this.warmingJobs.get(key);
    if (job) {
      clearInterval(job);
      this.warmingJobs.delete(key);
    }
  }

  async warmByAccessPattern(options = {}) {
    const {
      timeWindow = 3600000, // 1 hour
      minAccessCount = 5,
```
