Chapter 10: Performance Optimization and Scalability

Haiyue
37min

Chapter 10: Performance Optimization and Scalability

Learning Objectives

  1. Analyze and optimize Server performance bottlenecks
  2. Implement caching and resource pool management
  3. Master concurrent processing and asynchronous programming
  4. Learn memory management and garbage collection optimization
  5. Design scalable architecture patterns

1. Performance Analysis and Monitoring

1.1 Performance Metrics System

interface PerformanceMetrics {
  // Response time metrics
  responseTime: {
    mean: number;
    median: number;
    p95: number;
    p99: number;
  };

  // Throughput metrics
  throughput: {
    requestsPerSecond: number;
    bytesPerSecond: number;
    operationsPerSecond: number;
  };

  // Resource usage metrics
  resources: {
    cpuUsage: number;
    memoryUsage: number;
    diskIO: number;
    networkIO: number;
  };

  // Error rate metrics
  errorRate: {
    total: number;
    byType: Record<string, number>;
  };
}

class PerformanceProfiler {
  private metrics: Map<string, number[]> = new Map();
  private timers: Map<string, number> = new Map();
  private counters: Map<string, number> = new Map();

  // Start timing
  start(label: string): void {
    this.timers.set(label, process.hrtime.bigint());
  }

  // End timing and record
  end(label: string): number {
    const start = this.timers.get(label);
    if (!start) {
      throw new Error(`Timer ${label} was not started`);
    }

    const duration = Number(process.hrtime.bigint() - start) / 1_000_000; // Convert to milliseconds
    this.recordMetric(label, duration);
    this.timers.delete(label);

    return duration;
  }

  // Record metric
  recordMetric(name: string, value: number): void {
    const values = this.metrics.get(name) || [];
    values.push(value);

    // Keep the most recent 1000 data points
    if (values.length > 1000) {
      values.shift();
    }

    this.metrics.set(name, values);
  }

  // Increment counter
  increment(name: string, value: number = 1): void {
    const current = this.counters.get(name) || 0;
    this.counters.set(name, current + value);
  }

  // Get statistics
  getStats(name: string): { count: number; sum: number; mean: number; min: number; max: number; p95: number; p99: number } | null {
    const values = this.metrics.get(name);
    if (!values || values.length === 0) {
      return null;
    }

    const sorted = [...values].sort((a, b) => a - b);
    const sum = values.reduce((a, b) => a + b, 0);

    return {
      count: values.length,
      sum,
      mean: sum / values.length,
      min: sorted[0],
      max: sorted[sorted.length - 1],
      p95: sorted[Math.floor(sorted.length * 0.95)],
      p99: sorted[Math.floor(sorted.length * 0.99)]
    };
  }

  // Get all metrics overview
  getAllStats(): Record<string, any> {
    const stats: Record<string, any> = {};

    for (const [name] of this.metrics) {
      stats[name] = this.getStats(name);
    }

    // Add counters
    for (const [name, value] of this.counters) {
      stats[`counter_${name}`] = value;
    }

    // Add system metrics
    stats.system = {
      memory: process.memoryUsage(),
      cpu: process.cpuUsage(),
      uptime: process.uptime()
    };

    return stats;
  }

  // Reset all metrics
  reset(): void {
    this.metrics.clear();
    this.timers.clear();
    this.counters.clear();
  }
}

1.2 Performance Monitoring Decorator

function performanceMonitor(metricName?: string) {
  return function(target: any, propertyName: string, descriptor: PropertyDescriptor) {
    const method = descriptor.value;
    const name = metricName || `${target.constructor.name}.${propertyName}`;

    descriptor.value = async function(...args: any[]) {
      const profiler = getGlobalProfiler();
      const startTime = Date.now();

      try {
        profiler.increment(`${name}_calls`);
        const result = await method.apply(this, args);

        const duration = Date.now() - startTime;
        profiler.recordMetric(`${name}_duration`, duration);
        profiler.increment(`${name}_success`);

        return result;
      } catch (error) {
        const duration = Date.now() - startTime;
        profiler.recordMetric(`${name}_duration`, duration);
        profiler.increment(`${name}_errors`);

        throw error;
      }
    };
  };
}

// Global profiler instance
let globalProfiler: PerformanceProfiler;

function getGlobalProfiler(): PerformanceProfiler {
  if (!globalProfiler) {
    globalProfiler = new PerformanceProfiler();
  }
  return globalProfiler;
}

// Usage example
class OptimizedToolHandler {
  @performanceMonitor('tool_execution')
  async executeTool(name: string, args: any): Promise<any> {
    // Tool execution logic
    return { result: 'success' };
  }
}

2. Cache System Design

2.1 Multi-level Cache Architecture

interface CacheConfig {
  maxSize: number;
  ttl: number; // Time to live (seconds)
  maxAge?: number; // Maximum age
  strategy: 'LRU' | 'LFU' | 'FIFO';
}

interface CacheEntry<T> {
  value: T;
  createdAt: Date;
  accessedAt: Date;
  accessCount: number;
  expiresAt: Date;
}

abstract class CacheStore<T> {
  protected config: CacheConfig;
  protected entries: Map<string, CacheEntry<T>> = new Map();

  constructor(config: CacheConfig) {
    this.config = config;

    // Periodically clean up expired entries
    setInterval(() => this.cleanup(), 60000);
  }

  abstract evict(): void;

  set(key: string, value: T, customTTL?: number): void {
    // Check if eviction is needed
    if (this.entries.size >= this.config.maxSize && !this.entries.has(key)) {
      this.evict();
    }

    const ttl = (customTTL || this.config.ttl) * 1000;
    const now = new Date();

    const entry: CacheEntry<T> = {
      value,
      createdAt: now,
      accessedAt: now,
      accessCount: 1,
      expiresAt: new Date(now.getTime() + ttl)
    };

    this.entries.set(key, entry);
  }

  get(key: string): T | undefined {
    const entry = this.entries.get(key);
    if (!entry) {
      return undefined;
    }

    // Check if expired
    if (entry.expiresAt < new Date()) {
      this.entries.delete(key);
      return undefined;
    }

    // Update access information
    entry.accessedAt = new Date();
    entry.accessCount++;

    return entry.value;
  }

  has(key: string): boolean {
    return this.get(key) !== undefined;
  }

  delete(key: string): boolean {
    return this.entries.delete(key);
  }

  clear(): void {
    this.entries.clear();
  }

  size(): number {
    return this.entries.size;
  }

  private cleanup(): void {
    const now = new Date();
    for (const [key, entry] of this.entries) {
      if (entry.expiresAt < now) {
        this.entries.delete(key);
      }
    }
  }
}

class LRUCache<T> extends CacheStore<T> {
  evict(): void {
    if (this.entries.size === 0) return;

    let oldestKey: string | null = null;
    let oldestTime = new Date();

    for (const [key, entry] of this.entries) {
      if (entry.accessedAt < oldestTime) {
        oldestTime = entry.accessedAt;
        oldestKey = key;
      }
    }

    if (oldestKey) {
      this.entries.delete(oldestKey);
    }
  }
}

class LFUCache<T> extends CacheStore<T> {
  evict(): void {
    if (this.entries.size === 0) return;

    let leastUsedKey: string | null = null;
    let leastCount = Infinity;

    for (const [key, entry] of this.entries) {
      if (entry.accessCount < leastCount) {
        leastCount = entry.accessCount;
        leastUsedKey = key;
      }
    }

    if (leastUsedKey) {
      this.entries.delete(leastUsedKey);
    }
  }
}

2.2 Layered Cache Manager

interface CacheLayer<T> {
  name: string;
  cache: CacheStore<T>;
  priority: number;
}

class LayeredCacheManager<T> {
  private layers: CacheLayer<T>[] = [];
  private stats = {
    hits: 0,
    misses: 0,
    writes: 0
  };

  addLayer(name: string, cache: CacheStore<T>, priority: number): void {
    this.layers.push({ name, cache, priority });
    this.layers.sort((a, b) => b.priority - a.priority); // Higher priority first
  }

  async get(key: string): Promise<T | undefined> {
    for (const layer of this.layers) {
      const value = layer.cache.get(key);
      if (value !== undefined) {
        this.stats.hits++;

        // Backfill to higher priority layers
        await this.backfill(key, value, layer.priority);

        return value;
      }
    }

    this.stats.misses++;
    return undefined;
  }

  async set(key: string, value: T, ttl?: number): Promise<void> {
    this.stats.writes++;

    // Write to all layers
    for (const layer of this.layers) {
      layer.cache.set(key, value, ttl);
    }
  }

  async delete(key: string): Promise<void> {
    // Delete from all layers
    for (const layer of this.layers) {
      layer.cache.delete(key);
    }
  }

  private async backfill(key: string, value: T, fromPriority: number): Promise<void> {
    // Backfill to higher priority layers
    for (const layer of this.layers) {
      if (layer.priority > fromPriority) {
        layer.cache.set(key, value);
      }
    }
  }

  getStats(): { hits: number; misses: number; writes: number; hitRate: number } {
    const total = this.stats.hits + this.stats.misses;
    return {
      ...this.stats,
      hitRate: total > 0 ? this.stats.hits / total : 0
    };
  }

  clear(): void {
    for (const layer of this.layers) {
      layer.cache.clear();
    }
    this.stats = { hits: 0, misses: 0, writes: 0 };
  }
}

// Usage example: Tool result cache
class ToolResultCache {
  private cache: LayeredCacheManager<any>;

  constructor() {
    this.cache = new LayeredCacheManager<any>();

    // L1: Memory cache (fast access)
    this.cache.addLayer(
      'memory',
      new LRUCache({ maxSize: 100, ttl: 300, strategy: 'LRU' }),
      3
    );

    // L2: Persistent cache (larger capacity)
    // This could integrate Redis or other external cache
  }

  async getToolResult(toolName: string, args: any): Promise<any> {
    const cacheKey = this.generateCacheKey(toolName, args);
    return await this.cache.get(cacheKey);
  }

  async cacheToolResult(toolName: string, args: any, result: any, ttl: number = 300): Promise<void> {
    const cacheKey = this.generateCacheKey(toolName, args);
    await this.cache.set(cacheKey, result, ttl);
  }

  private generateCacheKey(toolName: string, args: any): string {
    const argsHash = require('crypto')
      .createHash('sha256')
      .update(JSON.stringify(args))
      .digest('hex')
      .substring(0, 16);

    return `tool:${toolName}:${argsHash}`;
  }
}

3. Concurrency and Asynchronous Processing

3.1 Request Queue Management

interface QueueConfig {
  maxConcurrency: number;
  maxQueueSize: number;
  timeout: number;
  retryAttempts: number;
  retryDelay: number;
}

interface QueuedTask<T> {
  id: string;
  task: () => Promise<T>;
  resolve: (value: T) => void;
  reject: (error: Error) => void;
  attempts: number;
  createdAt: Date;
  timeout?: NodeJS.Timeout;
}

class ConcurrencyController<T> {
  private config: QueueConfig;
  private queue: QueuedTask<T>[] = [];
  private running: Set<string> = new Set();
  private stats = {
    processed: 0,
    failed: 0,
    retried: 0,
    timeout: 0
  };

  constructor(config: QueueConfig) {
    this.config = config;
  }

  async execute<R>(task: () => Promise<R>): Promise<R> {
    return new Promise<R>((resolve, reject) => {
      if (this.queue.length >= this.config.maxQueueSize) {
        reject(new Error('Queue is full'));
        return;
      }

      const taskId = this.generateTaskId();
      const queuedTask: QueuedTask<R> = {
        id: taskId,
        task: task as any,
        resolve: resolve as any,
        reject,
        attempts: 0,
        createdAt: new Date()
      };

      // Set timeout
      if (this.config.timeout > 0) {
        queuedTask.timeout = setTimeout(() => {
          this.handleTimeout(taskId);
        }, this.config.timeout);
      }

      this.queue.push(queuedTask as any);
      this.processQueue();
    });
  }

  private async processQueue(): Promise<void> {
    if (this.running.size >= this.config.maxConcurrency || this.queue.length === 0) {
      return;
    }

    const queuedTask = this.queue.shift();
    if (!queuedTask) return;

    this.running.add(queuedTask.id);

    try {
      const result = await queuedTask.task();

      if (queuedTask.timeout) {
        clearTimeout(queuedTask.timeout);
      }

      queuedTask.resolve(result);
      this.stats.processed++;

    } catch (error) {
      queuedTask.attempts++;

      if (queuedTask.attempts < this.config.retryAttempts) {
        // Retry
        setTimeout(() => {
          this.queue.unshift(queuedTask);
          this.processQueue();
        }, this.config.retryDelay);
        this.stats.retried++;
      } else {
        if (queuedTask.timeout) {
          clearTimeout(queuedTask.timeout);
        }
        queuedTask.reject(error as Error);
        this.stats.failed++;
      }
    } finally {
      this.running.delete(queuedTask.id);

      // Continue processing queue
      setImmediate(() => this.processQueue());
    }
  }

  private handleTimeout(taskId: string): void {
    const task = this.queue.find(t => t.id === taskId) ||
                 Array.from(this.running).includes(taskId);

    if (task) {
      this.stats.timeout++;
      // More complex timeout handling logic could be added here
    }
  }

  private generateTaskId(): string {
    return `task_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
  }

  getStats(): typeof this.stats & { queueSize: number; runningCount: number } {
    return {
      ...this.stats,
      queueSize: this.queue.length,
      runningCount: this.running.size
    };
  }
}

3.2 Resource Pool Management

interface PoolConfig {
  min: number;
  max: number;
  acquireTimeout: number;
  idleTimeout: number;
  maxUses?: number;
}

interface PoolResource<T> {
  resource: T;
  id: string;
  createdAt: Date;
  lastUsed: Date;
  useCount: number;
  inUse: boolean;
}

abstract class ResourcePool<T> {
  protected config: PoolConfig;
  protected resources: Map<string, PoolResource<T>> = new Map();
  protected waitingQueue: Array<{
    resolve: (resource: T) => void;
    reject: (error: Error) => void;
    timeout: NodeJS.Timeout;
  }> = [];

  constructor(config: PoolConfig) {
    this.config = config;
    this.initializePool();

    // Periodically clean up idle resources
    setInterval(() => this.cleanupIdle(), 30000);
  }

  abstract createResource(): Promise<T>;
  abstract validateResource(resource: T): Promise<boolean>;
  abstract destroyResource(resource: T): Promise<void>;

  async acquire(): Promise<T> {
    // Try to get available resource
    const availableResource = this.findAvailableResource();
    if (availableResource) {
      return this.useResource(availableResource);
    }

    // If pool is not full, create new resource
    if (this.resources.size < this.config.max) {
      try {
        const resource = await this.createResource();
        const poolResource = this.wrapResource(resource);
        this.resources.set(poolResource.id, poolResource);
        return this.useResource(poolResource);
      } catch (error) {
        // Creation failed, add to waiting queue
      }
    }

    // Add to waiting queue
    return new Promise<T>((resolve, reject) => {
      const timeout = setTimeout(() => {
        const index = this.waitingQueue.findIndex(w => w.resolve === resolve);
        if (index !== -1) {
          this.waitingQueue.splice(index, 1);
        }
        reject(new Error('Acquire timeout'));
      }, this.config.acquireTimeout);

      this.waitingQueue.push({ resolve, reject, timeout });
    });
  }

  release(resource: T): void {
    const poolResource = this.findPoolResource(resource);
    if (!poolResource) {
      return; // Resource doesn't belong to this pool
    }

    poolResource.inUse = false;
    poolResource.lastUsed = new Date();

    // Check if resource should be destroyed
    if (this.shouldDestroyResource(poolResource)) {
      this.destroyPoolResource(poolResource.id);
      return;
    }

    // Process waiting queue
    if (this.waitingQueue.length > 0) {
      const waiter = this.waitingQueue.shift();
      if (waiter) {
        clearTimeout(waiter.timeout);
        waiter.resolve(this.useResource(poolResource));
      }
    }
  }

  private async initializePool(): Promise<void> {
    const createPromises = [];
    for (let i = 0; i < this.config.min; i++) {
      createPromises.push(this.createAndAddResource());
    }

    await Promise.allSettled(createPromises);
  }

  private async createAndAddResource(): Promise<void> {
    try {
      const resource = await this.createResource();
      const poolResource = this.wrapResource(resource);
      this.resources.set(poolResource.id, poolResource);
    } catch (error) {
      // Resource creation failed, log but don't throw
      console.error('Failed to create resource:', error);
    }
  }

  private wrapResource(resource: T): PoolResource<T> {
    return {
      resource,
      id: this.generateResourceId(),
      createdAt: new Date(),
      lastUsed: new Date(),
      useCount: 0,
      inUse: false
    };
  }

  private findAvailableResource(): PoolResource<T> | undefined {
    for (const [, poolResource] of this.resources) {
      if (!poolResource.inUse) {
        return poolResource;
      }
    }
    return undefined;
  }

  private findPoolResource(resource: T): PoolResource<T> | undefined {
    for (const [, poolResource] of this.resources) {
      if (poolResource.resource === resource) {
        return poolResource;
      }
    }
    return undefined;
  }

  private useResource(poolResource: PoolResource<T>): T {
    poolResource.inUse = true;
    poolResource.useCount++;
    poolResource.lastUsed = new Date();
    return poolResource.resource;
  }

  private shouldDestroyResource(poolResource: PoolResource<T>): boolean {
    // Check maximum use count
    if (this.config.maxUses && poolResource.useCount >= this.config.maxUses) {
      return true;
    }

    // Check if exceeds minimum count
    if (this.resources.size <= this.config.min) {
      return false;
    }

    return false;
  }

  private async destroyPoolResource(resourceId: string): Promise<void> {
    const poolResource = this.resources.get(resourceId);
    if (!poolResource) return;

    this.resources.delete(resourceId);

    try {
      await this.destroyResource(poolResource.resource);
    } catch (error) {
      console.error('Failed to destroy resource:', error);
    }
  }

  private async cleanupIdle(): Promise<void> {
    const now = new Date();
    const idleThreshold = new Date(now.getTime() - this.config.idleTimeout);

    for (const [id, poolResource] of this.resources) {
      if (!poolResource.inUse &&
          poolResource.lastUsed < idleThreshold &&
          this.resources.size > this.config.min) {
        await this.destroyPoolResource(id);
      }
    }
  }

  private generateResourceId(): string {
    return `res_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
  }

  getStats(): { total: number; available: number; inUse: number; waiting: number } {
    const inUse = Array.from(this.resources.values()).filter(r => r.inUse).length;
    return {
      total: this.resources.size,
      available: this.resources.size - inUse,
      inUse,
      waiting: this.waitingQueue.length
    };
  }
}

// Database connection pool example
class DatabaseConnectionPool extends ResourcePool<any> {
  private connectionConfig: any;

  constructor(config: PoolConfig, connectionConfig: any) {
    super(config);
    this.connectionConfig = connectionConfig;
  }

  async createResource(): Promise<any> {
    // Integrate actual database connection logic here
    // For example: return mysql.createConnection(this.connectionConfig);
    return { connected: true, id: Math.random().toString(36) };
  }

  async validateResource(resource: any): Promise<boolean> {
    // Validate if connection is still valid
    try {
      // For example: await resource.ping();
      return true;
    } catch {
      return false;
    }
  }

  async destroyResource(resource: any): Promise<void> {
    // Close connection
    try {
      // For example: await resource.close();
      console.log('Connection closed');
    } catch (error) {
      console.error('Error closing connection:', error);
    }
  }
}

4. Memory Management Optimization

4.1 Memory Usage Monitoring

interface MemorySnapshot {
  timestamp: Date;
  usage: NodeJS.MemoryUsage;
  gcCount: number;
  heapSizeLimit: number;
}

class MemoryMonitor {
  private snapshots: MemorySnapshot[] = [];
  private gcCount = 0;
  private alertThresholds = {
    heapUsed: 0.8, // 80%
    rss: 1024 * 1024 * 1024, // 1GB
    external: 100 * 1024 * 1024 // 100MB
  };
  private alertCallback?: (snapshot: MemorySnapshot) => void;

  constructor() {
    // Monitor GC events
    if (process.env.NODE_ENV === 'development') {
      this.enableGCMonitoring();
    }

    // Periodically collect memory snapshots
    setInterval(() => this.takeSnapshot(), 10000);
  }

  private enableGCMonitoring(): void {
    const v8 = require('v8');

    // This requires starting with --expose-gc or using gc-stats library
    if (global.gc) {
      const originalGC = global.gc;
      global.gc = () => {
        this.gcCount++;
        return originalGC();
      };
    }
  }

  takeSnapshot(): MemorySnapshot {
    const usage = process.memoryUsage();
    const snapshot: MemorySnapshot = {
      timestamp: new Date(),
      usage,
      gcCount: this.gcCount,
      heapSizeLimit: require('v8').getHeapStatistics().heap_size_limit
    };

    this.snapshots.push(snapshot);

    // Keep the most recent 100 snapshots
    if (this.snapshots.length > 100) {
      this.snapshots.shift();
    }

    // Check alert conditions
    this.checkMemoryAlerts(snapshot);

    return snapshot;
  }

  private checkMemoryAlerts(snapshot: MemorySnapshot): void {
    const { usage, heapSizeLimit } = snapshot;
    const heapUsedRatio = usage.heapUsed / heapSizeLimit;

    let alertTriggered = false;
    let alertMessage = 'Memory usage alert: ';

    if (heapUsedRatio > this.alertThresholds.heapUsed) {
      alertMessage += `Heap usage ${(heapUsedRatio * 100).toFixed(1)}% `;
      alertTriggered = true;
    }

    if (usage.rss > this.alertThresholds.rss) {
      alertMessage += `RSS ${Math.round(usage.rss / 1024 / 1024)}MB `;
      alertTriggered = true;
    }

    if (usage.external > this.alertThresholds.external) {
      alertMessage += `External ${Math.round(usage.external / 1024 / 1024)}MB `;
      alertTriggered = true;
    }

    if (alertTriggered) {
      console.warn(alertMessage);
      if (this.alertCallback) {
        this.alertCallback(snapshot);
      }
    }
  }

  getMemoryTrend(minutes: number = 10): MemorySnapshot[] {
    const cutoff = new Date(Date.now() - minutes * 60 * 1000);
    return this.snapshots.filter(s => s.timestamp >= cutoff);
  }

  analyzeMemoryLeak(): { isLeaking: boolean; trend: number; recommendation: string } {
    if (this.snapshots.length < 10) {
      return {
        isLeaking: false,
        trend: 0,
        recommendation: 'Not enough data to analyze'
      };
    }

    const recentSnapshots = this.snapshots.slice(-10);
    const heapUsages = recentSnapshots.map(s => s.usage.heapUsed);

    // Calculate trend (simple linear regression)
    const trend = this.calculateTrend(heapUsages);
    const isLeaking = trend > 1024 * 1024; // 1MB growth trend

    let recommendation = '';
    if (isLeaking) {
      recommendation = 'Potential memory leak detected. Consider: 1) Check for event listener leaks 2) Review cache configurations 3) Analyze large object retention';
    } else {
      recommendation = 'Memory usage appears stable';
    }

    return { isLeaking, trend, recommendation };
  }

  private calculateTrend(values: number[]): number {
    const n = values.length;
    const x = Array.from({ length: n }, (_, i) => i);
    const y = values;

    const xMean = x.reduce((a, b) => a + b, 0) / n;
    const yMean = y.reduce((a, b) => a + b, 0) / n;

    const numerator = x.reduce((sum, xi, i) => sum + (xi - xMean) * (y[i] - yMean), 0);
    const denominator = x.reduce((sum, xi) => sum + Math.pow(xi - xMean, 2), 0);

    return denominator === 0 ? 0 : numerator / denominator;
  }

  setAlertCallback(callback: (snapshot: MemorySnapshot) => void): void {
    this.alertCallback = callback;
  }

  forceGC(): void {
    if (global.gc) {
      global.gc();
      console.log('Garbage collection triggered manually');
    } else {
      console.warn('Garbage collection not available. Start with --expose-gc');
    }
  }
}

4.2 Object Pooling and Reuse Strategy

interface PoolableObject {
  reset(): void;
  isReusable(): boolean;
}

class ObjectPool<T extends PoolableObject> {
  private pool: T[] = [];
  private factory: () => T;
  private maxSize: number;
  private stats = {
    created: 0,
    reused: 0,
    destroyed: 0
  };

  constructor(factory: () => T, maxSize: number = 100) {
    this.factory = factory;
    this.maxSize = maxSize;
  }

  acquire(): T {
    let obj = this.pool.pop();

    if (!obj) {
      obj = this.factory();
      this.stats.created++;
    } else {
      this.stats.reused++;
    }

    return obj;
  }

  release(obj: T): void {
    if (!obj.isReusable() || this.pool.length >= this.maxSize) {
      this.stats.destroyed++;
      return;
    }

    try {
      obj.reset();
      this.pool.push(obj);
    } catch (error) {
      this.stats.destroyed++;
      console.error('Failed to reset object for pool:', error);
    }
  }

  clear(): void {
    this.pool.length = 0;
  }

  getStats(): typeof this.stats & { poolSize: number } {
    return {
      ...this.stats,
      poolSize: this.pool.length
    };
  }
}

// Buffer pool example
class PooledBuffer implements PoolableObject {
  private buffer: Buffer;
  private size: number;

  constructor(size: number) {
    this.size = size;
    this.buffer = Buffer.allocUnsafe(size);
  }

  reset(): void {
    this.buffer.fill(0);
  }

  isReusable(): boolean {
    return this.buffer.length === this.size;
  }

  getBuffer(): Buffer {
    return this.buffer;
  }
}

// Usage example
const bufferPool = new ObjectPool(() => new PooledBuffer(1024), 50);

function processData(data: any): Buffer {
  const pooledBuffer = bufferPool.acquire();
  try {
    const buffer = pooledBuffer.getBuffer();
    // Process data...
    return buffer.slice(0, data.length); // Return copy of actual data size
  } finally {
    bufferPool.release(pooledBuffer);
  }
}

5. Scalable Architecture Patterns

5.1 Microservice Architecture Adaptation

interface ServiceConfig {
  name: string;
  version: string;
  endpoints: string[];
  healthCheck: string;
  retryPolicy: {
    maxRetries: number;
    backoffMs: number;
  };
  circuitBreaker: {
    failureThreshold: number;
    resetTimeoutMs: number;
  };
}

enum CircuitState {
  CLOSED = 'closed',
  OPEN = 'open',
  HALF_OPEN = 'half_open'
}

class CircuitBreaker {
  private state = CircuitState.CLOSED;
  private failures = 0;
  private lastFailureTime = 0;
  private config: ServiceConfig['circuitBreaker'];

  constructor(config: ServiceConfig['circuitBreaker']) {
    this.config = config;
  }

  async execute<T>(operation: () => Promise<T>): Promise<T> {
    if (this.state === CircuitState.OPEN) {
      if (Date.now() - this.lastFailureTime > this.config.resetTimeoutMs) {
        this.state = CircuitState.HALF_OPEN;
      } else {
        throw new Error('Circuit breaker is OPEN');
      }
    }

    try {
      const result = await operation();

      if (this.state === CircuitState.HALF_OPEN) {
        this.state = CircuitState.CLOSED;
        this.failures = 0;
      }

      return result;
    } catch (error) {
      this.failures++;
      this.lastFailureTime = Date.now();

      if (this.failures >= this.config.failureThreshold) {
        this.state = CircuitState.OPEN;
      }

      throw error;
    }
  }

  getState(): CircuitState {
    return this.state;
  }
}

class MicroserviceClient {
  private config: ServiceConfig;
  private circuitBreaker: CircuitBreaker;
  private loadBalancer: LoadBalancer;

  constructor(config: ServiceConfig) {
    this.config = config;
    this.circuitBreaker = new CircuitBreaker(config.circuitBreaker);
    this.loadBalancer = new LoadBalancer(config.endpoints);
  }

  async call<T>(endpoint: string, data?: any): Promise<T> {
    return this.circuitBreaker.execute(async () => {
      const url = await this.loadBalancer.getEndpoint();
      return this.makeRequest<T>(`${url}${endpoint}`, data);
    });
  }

  private async makeRequest<T>(url: string, data?: any): Promise<T> {
    const fetch = require('node-fetch');

    let attempt = 0;
    let lastError: Error;

    while (attempt < this.config.retryPolicy.maxRetries) {
      try {
        const response = await fetch(url, {
          method: data ? 'POST' : 'GET',
          headers: { 'Content-Type': 'application/json' },
          body: data ? JSON.stringify(data) : undefined
        });

        if (!response.ok) {
          throw new Error(`HTTP ${response.status}: ${response.statusText}`);
        }

        return await response.json();
      } catch (error) {
        lastError = error as Error;
        attempt++;

        if (attempt < this.config.retryPolicy.maxRetries) {
          await this.delay(this.config.retryPolicy.backoffMs * attempt);
        }
      }
    }

    throw lastError!;
  }

  private delay(ms: number): Promise<void> {
    return new Promise(resolve => setTimeout(resolve, ms));
  }
}

class LoadBalancer {
  private endpoints: string[];
  private currentIndex = 0;
  private healthStatus: Map<string, boolean> = new Map();

  constructor(endpoints: string[]) {
    this.endpoints = endpoints;

    // Initialize all endpoints as healthy
    endpoints.forEach(endpoint => {
      this.healthStatus.set(endpoint, true);
    });
  }

  async getEndpoint(): Promise<string> {
    const healthyEndpoints = this.endpoints.filter(ep =>
      this.healthStatus.get(ep) === true
    );

    if (healthyEndpoints.length === 0) {
      throw new Error('No healthy endpoints available');
    }

    // Simple round-robin strategy
    const endpoint = healthyEndpoints[this.currentIndex % healthyEndpoints.length];
    this.currentIndex++;

    return endpoint;
  }

  markUnhealthy(endpoint: string): void {
    this.healthStatus.set(endpoint, false);
  }

  markHealthy(endpoint: string): void {
    this.healthStatus.set(endpoint, true);
  }
}

5.2 Horizontal Scaling Support

interface ClusterConfig {
  nodeId: string;
  nodes: string[];
  shardingStrategy: 'consistent_hash' | 'range' | 'random';
  replicationFactor: number;
}

class ClusterManager {
  private config: ClusterConfig;
  private hashRing: ConsistentHashRing;
  private nodeHealth: Map<string, boolean> = new Map();

  constructor(config: ClusterConfig) {
    this.config = config;
    this.hashRing = new ConsistentHashRing(config.nodes);

    // Initialize node health status
    config.nodes.forEach(node => {
      this.nodeHealth.set(node, true);
    });
  }

  getNodeForKey(key: string): string[] {
    switch (this.config.shardingStrategy) {
      case 'consistent_hash':
        return this.hashRing.getNodes(key, this.config.replicationFactor);
      case 'range':
        return this.getRangeNodes(key);
      case 'random':
        return this.getRandomNodes();
      default:
        throw new Error(`Unknown sharding strategy: ${this.config.shardingStrategy}`);
    }
  }

  private getRangeNodes(key: string): string[] {
    // Simplified range sharding implementation
    const hash = this.simpleHash(key);
    const nodeIndex = hash % this.config.nodes.length;
    return [this.config.nodes[nodeIndex]];
  }

  private getRandomNodes(): string[] {
    const healthyNodes = this.config.nodes.filter(node =>
      this.nodeHealth.get(node) === true
    );

    const selectedNodes = [];
    for (let i = 0; i < Math.min(this.config.replicationFactor, healthyNodes.length); i++) {
      const randomIndex = Math.floor(Math.random() * healthyNodes.length);
      selectedNodes.push(healthyNodes[randomIndex]);
    }

    return selectedNodes;
  }

  private simpleHash(str: string): number {
    let hash = 0;
    for (let i = 0; i < str.length; i++) {
      const char = str.charCodeAt(i);
      hash = ((hash << 5) - hash) + char;
      hash = hash & hash; // Convert to 32-bit integer
    }
    return Math.abs(hash);
  }

  isLocalNode(nodeId: string): boolean {
    return nodeId === this.config.nodeId;
  }

  updateNodeHealth(nodeId: string, healthy: boolean): void {
    this.nodeHealth.set(nodeId, healthy);

    if (!healthy) {
      this.hashRing.removeNode(nodeId);
    } else {
      this.hashRing.addNode(nodeId);
    }
  }
}

class ConsistentHashRing {
  private ring: Map<number, string> = new Map();
  private virtualNodes = 150; // Number of virtual nodes per physical node

  constructor(nodes: string[]) {
    nodes.forEach(node => this.addNode(node));
  }

  addNode(node: string): void {
    for (let i = 0; i < this.virtualNodes; i++) {
      const virtualNodeKey = `${node}:${i}`;
      const hash = this.hash(virtualNodeKey);
      this.ring.set(hash, node);
    }
  }

  removeNode(node: string): void {
    for (let i = 0; i < this.virtualNodes; i++) {
      const virtualNodeKey = `${node}:${i}`;
      const hash = this.hash(virtualNodeKey);
      this.ring.delete(hash);
    }
  }

  getNodes(key: string, count: number = 1): string[] {
    if (this.ring.size === 0) {
      return [];
    }

    const keyHash = this.hash(key);
    const sortedHashes = Array.from(this.ring.keys()).sort((a, b) => a - b);

    // Find first position greater than or equal to keyHash
    let index = sortedHashes.findIndex(hash => hash >= keyHash);
    if (index === -1) {
      index = 0; // Ring structure, wrap to beginning
    }

    const selectedNodes = new Set<string>();
    let currentIndex = index;

    while (selectedNodes.size < count && selectedNodes.size < this.getUniqueNodeCount()) {
      const hash = sortedHashes[currentIndex];
      const node = this.ring.get(hash)!;
      selectedNodes.add(node);

      currentIndex = (currentIndex + 1) % sortedHashes.length;
    }

    return Array.from(selectedNodes);
  }

  private getUniqueNodeCount(): number {
    return new Set(this.ring.values()).size;
  }

  private hash(str: string): number {
    // Use better hash function, simplified implementation here
    const crypto = require('crypto');
    const hash = crypto.createHash('md5').update(str).digest('hex');
    return parseInt(hash.substring(0, 8), 16);
  }
}

6. Best Practices

6.1 Performance Optimization Principles

  1. Measure First - Measure before optimizing, make data-driven decisions
  2. Local Optimization - Focus on hot code paths
  3. Caching Strategy - Reasonable use of multi-level caching
  4. Asynchronous Processing - Fully leverage Node.js async features
  5. Resource Pooling - Reuse expensive resources

6.2 Scalability Design

  1. Stateless Design - No state sharing between service instances
  2. Horizontal Sharding - Support data and request sharding
  3. Load Balancing - Properly distribute request load
  4. Fault Isolation - Prevent single point failures from affecting the whole system
  5. Monitoring and Alerting - Comprehensive monitoring and alerting system

6.3 Memory Optimization Tips

  1. Timely Release - Promptly clean up unused objects
  2. Avoid Leaks - Pay attention to event listener and timer cleanup
  3. Stream Processing - Use streaming for large data
  4. Object Pooling - Reuse frequently created objects
  5. Garbage Collection - Understand and optimize GC behavior

Summary

Through this chapter, we’ve learned:

  • Complete system for performance analysis and monitoring
  • Design and implementation of multi-level cache systems
  • Concurrency control and resource pool management
  • Memory optimization and leak detection
  • Design patterns for scalable architectures

Performance optimization is an ongoing process that requires targeted optimization based on actual business scenarios and load characteristics, while balancing performance, complexity, and maintainability.