# Database Scaling

## Horizontal Database Scaling

**Horizontal scaling** membagi database menjadi multiple partitions untuk handle growth.

````yaml
# Database Scaling Strategies

## Sharding (Horizontal Partitioning)
definition: "Split database into smaller partitions called shards"
strategies:
  range_based:
    - Partition data by value ranges
    - Example: Users 1-1000 in shard 1, 1001-2000 in shard 2
    - Easy to implement
    - Uneven data distribution possible

  hash_based:
    - Partition data using hash function
    - Example: Hash(user_id) % number_of_shards
    - Even distribution
    - Complex re-sharding

  directory_based:
    - Central lookup service for shard location
    - Flexible shard assignment
    - Additional lookup overhead
    - Single point of failure concern

advantages:
  - Linear scalability
  - Better performance for large datasets
  - Parallel query processing
  - Geographic distribution

challenges:
  - Cross-shard queries
  - Data consistency
  - Rebalancing complexity
  - Increased operational overhead

## Replication
definition: "Create copies of database for read scaling and availability"
types:
  master_slave:
    - One master for writes
    - Multiple slaves for reads
    - Asynchronous replication
    - eventual consistency

  master_master:
    - Multiple masters for writes
    - Conflict resolution required
    - Higher availability
    - Complex consistency management

  read_replicas:
    - Dedicated read-only replicas
    - Reporting and analytics
    - Reduced load on primary
    - Slight replication lag

## Implementation Example
```javascript
// Sharded Database Manager
class ShardedDatabaseManager {
  constructor(shards, shardKey = 'id') {
    this.shards = shards;
    this.shardKey = shardKey;
    this.shardCount = shards.length;
  }

  getShard(key) {
    const hash = this.hashCode(key);
    const shardIndex = Math.abs(hash) % this.shardCount;
    return this.shards[shardIndex];
  }

  async create(data) {
    const shard = this.getShard(data[this.shardKey]);
    return await shard.create(data);
  }

  async read(key) {
    const shard = this.getShard(key);
    return await shard.read(key);
  }

  async update(key, data) {
    const shard = this.getShard(key);
    return await shard.update(key, data);
  }

  async delete(key) {
    const shard = this.getShard(key);
    return await shard.delete(key);
  }

  async query(conditions, options = {}) {
    if (this.isSingleShardQuery(conditions)) {
      const shardKey = this.extractShardKey(conditions);
      const shard = this.getShard(shardKey);
      return await shard.query(conditions, options);
    } else {
      // Multi-shard query
      return await this.multiShardQuery(conditions, options);
    }
  }

  async multiShardQuery(conditions, options) {
    const results = [];

    for (const shard of this.shards) {
      const shardResults = await shard.query(conditions, options);
      results.push(...shardResults);
    }

    // Sort, paginate, and filter results
    return this.processMultiShardResults(results, options);
  }

  isSingleShardQuery(conditions) {
    // Check if query filters include shard key
    return conditions && conditions[this.shardKey];
  }

  extractShardKey(conditions) {
    return conditions[this.shardKey];
  }

  processMultiShardResults(results, options) {
    // Sort results
    if (options.sort) {
      results.sort((a, b) => {
        for (const [field, direction] of Object.entries(options.sort)) {
          if (a[field] !== b[field]) {
            return direction === 'asc'
              ? a[field] - b[field]
              : b[field] - a[field];
          }
        }
        return 0;
      });
    }

    // Apply pagination
    if (options.limit) {
      const offset = options.offset || 0;
      return results.slice(offset, offset + options.limit);
    }

    return results;
  }

  hashCode(str) {
    let hash = 0;
    for (let i = 0; i < str.length; i++) {
      const char = str.charCodeAt(i);
      hash = ((hash << 5) - hash) + char;
      hash = hash & hash;
    }
    return hash;
  }
}

// Database Replication Manager
class ReplicationManager {
  constructor(master, slaves) {
    this.master = master;
    this.slaves = slaves;
    this.readStrategy = 'round_robin'; // round_robin, least_loaded, random
    this.currentSlaveIndex = 0;
  }

  async write(data) {
    // Write to master
    const result = await this.master.write(data);

    // Asynchronously replicate to slaves
    this.replicateToSlaves(data).catch(error => {
      console.error('Replication failed:', error);
    });

    return result;
  }

  async read(query, options = {}) {
    switch (options.readFrom || this.readStrategy) {
      case 'master':
        return await this.master.read(query, options);
      case 'random':
        return await this.readFromRandomSlave(query, options);
      case 'least_loaded':
        return await this.readFromLeastLoadedSlave(query, options);
      default:
        return await this.readFromRoundRobinSlave(query, options);
    }
  }

  async readFromRoundRobinSlave(query, options) {
    if (this.slaves.length === 0) {
      return await this.master.read(query, options);
    }

    const slave = this.slaves[this.currentSlaveIndex];
    this.currentSlaveIndex = (this.currentSlaveIndex + 1) % this.slaves.length;

    try {
      return await slave.read(query, options);
    } catch (error) {
      console.error(`Slave read failed, falling back to master:`, error);
      return await this.master.read(query, options);
    }
  }

  async readFromRandomSlave(query, options) {
    if (this.slaves.length === 0) {
      return await this.master.read(query, options);
    }

    const randomIndex = Math.floor(Math.random() * this.slaves.length);
    const slave = this.slaves[randomIndex];

    try {
      return await slave.read(query, options);
    } catch (error) {
      console.error(`Slave read failed, falling back to master:`, error);
      return await this.master.read(query, options);
    }
  }

  async readFromLeastLoadedSlave(query, options) {
    if (this.slaves.length === 0) {
      return await this.master.read(query, options);
    }

    // Get load metrics for each slave
    const slaveLoads = await Promise.all(
      this.slaves.map(async slave => ({
        slave,
        load: await slave.getLoadMetrics()
      }))
    );

    // Select slave with lowest load
    const leastLoadedSlave = slaveLoads.reduce((min, current) =>
      current.load < min.load ? current : min
    ).slave;

    try {
      return await leastLoadedSlave.read(query, options);
    } catch (error) {
      console.error(`Slave read failed, falling back to master:`, error);
      return await this.master.read(query, options);
    }
  }

  async replicateToSlaves(data) {
    const replicationPromises = this.slaves.map(slave =>
      slave.replicate(data).catch(error => {
        console.error(`Replication to slave failed:`, error);
      })
    );

    await Promise.allSettled(replicationPromises);
  }

  async checkReplicationLag() {
    const lagPromises = this.slaves.map(async slave => {
      try {
        const masterPosition = await this.master.getReplicationPosition();
        const slavePosition = await slave.getReplicationPosition();
        return {
          slave: slave.id,
          lag: masterPosition - slavePosition
        };
      } catch (error) {
        return {
          slave: slave.id,
          lag: Infinity,
          error: error.message
        };
      }
    });

    return await Promise.all(lagPromises);
  }

  async addSlave(slave) {
    this.slaves.push(slave);

    // Initial data synchronization
    const masterData = await this.master.getAllData();
    await slave.syncData(masterData);
  }

  async removeSlave(slaveId) {
    this.slaves = this.slaves.filter(slave => slave.id !== slaveId);
  }
}
````

## Database Connection Pooling

**Connection pooling** optimizes database performance dengan reusing connections.

```javascript
// Database Connection Pool
class DatabaseConnectionPool {
  constructor(options = {}) {
    this.options = {
      min: options.min || 2,
      max: options.max || 10,
      idleTimeoutMillis: options.idleTimeoutMillis || 30000,
      acquireTimeoutMillis: options.acquireTimeoutMillis || 60000,
      createTimeoutMillis: options.createTimeoutMillis || 30000,
      destroyTimeoutMillis: options.destroyTimeoutMillis || 5000,
      ...options
    };

    this.pool = [];
    this.waitingQueue = [];
    this.activeConnections = 0;
    this.totalConnections = 0;
  }

  async acquire() {
    return new Promise((resolve, reject) => {
      // Check for available connection in pool
      if (this.pool.length > 0) {
        const connection = this.pool.pop();
        if (this.isValidConnection(connection)) {
          this.activeConnections++;
          resolve(connection);
        } else {
          // Invalid connection, try again
          this.totalConnections--;
          this.acquire().then(resolve).catch(reject);
        }
        return;
      }

      // Check if we can create new connection
      if (this.totalConnections < this.options.max) {
        this.createConnection()
          .then(connection => {
            this.activeConnections++;
            this.totalConnections++;
            resolve(connection);
          })
          .catch(reject);
        return;
      }

      // Add to waiting queue
      const timeout = setTimeout(() => {
        const index = this.waitingQueue.findIndex(item => item.resolve === resolve);
        if (index !== -1) {
          this.waitingQueue.splice(index, 1);
          reject(new Error('Connection acquire timeout'));
        }
      }, this.options.acquireTimeoutMillis);

      this.waitingQueue.push({ resolve, reject, timeout });
    });
  }

  async release(connection) {
    if (!this.isValidConnection(connection)) {
      this.activeConnections--;
      this.totalConnections--;
      this.destroyConnection(connection);
      return;
    }

    // Check if there are waiting requests
    if (this.waitingQueue.length > 0) {
      const waiting = this.waitingQueue.shift();
      clearTimeout(waiting.timeout);
      waiting.resolve(connection);
      return;
    }

    // Add connection back to pool
    this.activeConnections--;

    // Check if we should destroy connection (too many idle connections)
    if (this.totalConnections > this.options.min) {
      this.totalConnections--;
      this.destroyConnection(connection);
      return;
    }

    // Mark connection as idle
    connection.lastUsed = Date.now();
    this.pool.push(connection);
  }

  async createConnection() {
    return new Promise((resolve, reject) => {
      const timeout = setTimeout(() => {
        reject(new Error('Connection creation timeout'));
      }, this.options.createTimeoutMillis);

      this.options.factory()
        .then(connection => {
          clearTimeout(timeout);
          connection.created = Date.now();
          connection.lastUsed = Date.now();
          resolve(connection);
        })
        .catch(error => {
          clearTimeout(timeout);
          reject(error);
        });
    });
  }

  isValidConnection(connection) {
    if (!connection) return false;

    // Check connection age
    const age = Date.now() - connection.created;
    if (age > this.options.maxConnectionAge) {
      return false;
    }

    // Check idle timeout
    const idleTime = Date.now() - connection.lastUsed;
    if (idleTime > this.options.idleTimeoutMillis) {
      return false;
    }

    // Check if connection is still alive
    return this.options.validate ? this.options.validate(connection) : true;
  }

  async destroyConnection(connection) {
    try {
      if (this.options.destroy) {
        await this.options.destroy(connection);
      }
    } catch (error) {
      console.error('Error destroying connection:', error);
    }
  }

  async destroy() {
    // Clear waiting queue
    for (const waiting of this.waitingQueue) {
      clearTimeout(waiting.timeout);
      waiting.reject(new Error('Connection pool destroyed'));
    }
    this.waitingQueue = [];

    // Destroy all connections
    const destroyPromises = [
      ...this.pool,
      ...Array(this.activeConnections).fill(null)
    ].map(() => {
      if (this.pool.length > 0) {
        return this.destroyConnection(this.pool.pop());
```
