Practical Patterns and Best Practices

Let's consolidate everything with real-world patterns for production Node.js applications.

Error Handling Patterns

Never Throw in Async Code

// BAD: Unhandled rejection
async function fetchData() {
  const response = await fetch('https://api.example.com');
  if (!response.ok) {
    throw new Error('Failed to fetch'); // Becomes unhandled rejection
  }
  return response.json();
}

// GOOD: Handle at call site
async function handler(req, res) {
  try {
    const data = await fetchData();
    res.json(data);
  } catch (err) {
    console.error('Fetch error:', err);
    res.status(500).json({ error: 'Internal error' });
  }
}

Global Error Handlers

// Catch unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
  console.error('Unhandled Rejection:', reason);
  // Log to monitoring service
  // Optionally: process.exit(1);
});

// Catch uncaught exceptions
process.on('uncaughtException', (err) => {
  console.error('Uncaught Exception:', err);
  // MUST exit - state is corrupt
  process.exit(1);
});

Error-First Callbacks

// The Node.js convention
fs.readFile('data.txt', (err, data) => {
  if (err) {
    console.error('Read failed:', err);
    return;
  }
  console.log(data);
});

// Promisify for modern code
const { promisify } = require('util');
const readFile = promisify(fs.readFile);

async function read() {
  const data = await readFile('data.txt');
  return data;
}

Stream Pipeline Patterns

Safe Piping with pipeline()

const { pipeline } = require('stream/promises');

async function compressFile(input, output) {
  try {
    await pipeline(
      fs.createReadStream(input),
      zlib.createGzip(),
      fs.createWriteStream(output)
    );
    console.log('Compression complete');
  } catch (err) {
    console.error('Compression failed:', err);
    // Cleanup: remove partial output
    await fs.promises.unlink(output).catch(() => {});
    throw err;
  }
}

Stream with Timeout

function streamWithTimeout(stream, ms) {
  return new Promise((resolve, reject) => {
    const timeout = setTimeout(() => {
      stream.destroy(new Error('Stream timeout'));
    }, ms);

    stream.on('end', () => {
      clearTimeout(timeout);
      resolve();
    });

    stream.on('error', (err) => {
      clearTimeout(timeout);
      reject(err);
    });
  });
}

Event Loop Optimization

Avoid Blocking the Loop

// BAD: Blocks for large arrays
function processAll(items) {
  return items.map(item => expensiveOperation(item));
}

// GOOD: Yield to event loop periodically
async function processAllAsync(items) {
  const results = [];

  for (let i = 0; i < items.length; i++) {
    results.push(expensiveOperation(items[i]));

    // Yield every 100 items
    if (i % 100 === 0) {
      await new Promise(resolve => setImmediate(resolve));
    }
  }

  return results;
}

Monitoring Event Loop Lag

let lastCheck = Date.now();

setInterval(() => {
  const now = Date.now();
  const lag = now - lastCheck - 100; // Expected 100ms

  if (lag > 50) {
    console.warn(`Event loop lag: ${lag}ms`);
  }

  lastCheck = now;
}, 100);

Connection Pool Pattern

class ConnectionPool {
  constructor(factory, size = 10) {
    this.factory = factory;
    this.size = size;
    this.available = [];
    this.pending = [];
  }

  async acquire() {
    if (this.available.length > 0) {
      return this.available.pop();
    }

    if (this.size > 0) {
      this.size--;
      return await this.factory();
    }

    // Wait for available connection
    return new Promise(resolve => {
      this.pending.push(resolve);
    });
  }

  release(connection) {
    if (this.pending.length > 0) {
      const resolve = this.pending.shift();
      resolve(connection);
    } else {
      this.available.push(connection);
    }
  }
}

Request Batching

class RequestBatcher {
  constructor(processBatch, { maxSize = 100, maxWait = 50 } = {}) {
    this.processBatch = processBatch;
    this.maxSize = maxSize;
    this.maxWait = maxWait;
    this.pending = [];
    this.timer = null;
  }

  add(item) {
    return new Promise((resolve, reject) => {
      this.pending.push({ item, resolve, reject });

      if (this.pending.length >= this.maxSize) {
        this.flush();
      } else if (!this.timer) {
        this.timer = setTimeout(() => this.flush(), this.maxWait);
      }
    });
  }

  async flush() {
    if (this.timer) {
      clearTimeout(this.timer);
      this.timer = null;
    }

    const batch = this.pending;
    this.pending = [];

    try {
      const items = batch.map(b => b.item);
      const results = await this.processBatch(items);

      batch.forEach((b, i) => b.resolve(results[i]));
    } catch (err) {
      batch.forEach(b => b.reject(err));
    }
  }
}

// Usage
const batcher = new RequestBatcher(async (ids) => {
  return db.query('SELECT * FROM users WHERE id IN (?)', [ids]);
});

// These batch together
const user1 = await batcher.add(1);
const user2 = await batcher.add(2);

Graceful Degradation

class CircuitBreaker {
  constructor(fn, { threshold = 5, timeout = 30000 } = {}) {
    this.fn = fn;
    this.threshold = threshold;
    this.timeout = timeout;
    this.failures = 0;
    this.lastFailure = null;
    this.state = 'closed';
  }

  async call(...args) {
    if (this.state === 'open') {
      if (Date.now() - this.lastFailure > this.timeout) {
        this.state = 'half-open';
      } else {
        throw new Error('Circuit breaker is open');
      }
    }

    try {
      const result = await this.fn(...args);
      this.reset();
      return result;
    } catch (err) {
      this.recordFailure();
      throw err;
    }
  }

  recordFailure() {
    this.failures++;
    this.lastFailure = Date.now();

    if (this.failures >= this.threshold) {
      this.state = 'open';
    }
  }

  reset() {
    this.failures = 0;
    this.state = 'closed';
  }
}

Memory Management

Avoid Memory Leaks

// BAD: Accumulates data forever
const cache = {};
function addToCache(key, value) {
  cache[key] = value;
}

// GOOD: Use LRU cache with size limit
const LRU = require('lru-cache');
const cache = new LRU({ max: 1000 });

Clean Up Event Listeners

// BAD: Listeners accumulate
function handleRequest(req, res) {
  process.on('SIGTERM', cleanup);
}

// GOOD: Remove listeners when done
function handleRequest(req, res) {
  const cleanup = () => { /* ... */ };

  process.on('SIGTERM', cleanup);

  res.on('finish', () => {
    process.removeListener('SIGTERM', cleanup);
  });
}

Testing Async Code

// Test event loop behavior
describe('Event Loop', () => {
  it('should process microtasks before timers', (done) => {
    const order = [];

    setTimeout(() => order.push('timer'), 0);
    Promise.resolve().then(() => order.push('promise'));
    process.nextTick(() => order.push('nextTick'));

    setImmediate(() => {
      expect(order).toEqual(['nextTick', 'promise', 'timer']);
      done();
    });
  });
});

// Test stream processing
describe('Streams', () => {
  it('should process all chunks', async () => {
    const chunks = [];
    const readable = Readable.from(['a', 'b', 'c']);

    for await (const chunk of readable) {
      chunks.push(chunk);
    }

    expect(chunks).toEqual(['a', 'b', 'c']);
  });
});

Key Takeaways

  1. Always handle errors in async code
  2. Use pipeline() for safe stream processing
  3. Yield to event loop during heavy computation
  4. Monitor event loop lag in production
  5. Implement circuit breakers for external dependencies
  6. Clean up resources (listeners, connections, timers)
  7. Use connection pools for databases
  8. Batch requests to reduce overhead