Asynchronous Control Flow
Asynchronous Control Flow
Managing asynchronous code is one of the biggest challenges in Node.js. This chapter covers Promises, async/await, and patterns for handling complex async workflows—knowledge every senior Node.js developer must master.
Promises: The Foundation
A Promise represents a value that may not be available yet but will be resolved at some point.
Promise States
┌─────────────────────────────────────────┐
│ │
│ PENDING │
│ (initial state) │
│ │
└────────────────┬────────────────────────┘
│
┌───────────────┴───────────────┐
│ │
▼ ▼
┌───────────────────┐ ┌───────────────────┐
│ FULFILLED │ │ REJECTED │
│ (has value) │ │ (has reason) │
└───────────────────┘ └───────────────────┘Creating Promises
// The Promise constructor
const promise = new Promise((resolve, reject) => {
// Async operation
setTimeout(() => {
if (success) {
resolve(value);
} else {
reject(new Error("Something failed"));
}
}, 1000);
});
// Static methods for immediate values
Promise.resolve(42); // Immediately fulfilled
Promise.reject(new Error()); // Immediately rejectedConsuming Promises
promise
.then((value) => {
console.log("Success:", value);
return transformedValue; // Returned value becomes next promise's value
})
.then((newValue) => {
console.log("Transformed:", newValue);
})
.catch((error) => {
console.error("Error:", error);
})
.finally(() => {
console.log("Cleanup"); // Always runs
});Promisifying Callbacks
Convert callback-based functions to Promises:
// Manual promisification
function readFilePromise(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, data) => {
if (err) reject(err);
else resolve(data);
});
});
}
// Using util.promisify (preferred)
const { promisify } = require("util");
const readFile = promisify(fs.readFile);
// Modern: fs/promises module
const { readFile } = require("fs/promises");Async/Await: Syntactic Sugar
Async/await makes asynchronous code look synchronous:
// Promise chain
getUser(id)
.then((user) => getOrders(user.id))
.then((orders) => getOrderDetails(orders[0].id))
.then((details) => console.log(details))
.catch((err) => console.error(err));
// Async/await
async function getOrderDetailsForUser(id) {
try {
const user = await getUser(id);
const orders = await getOrders(user.id);
const details = await getOrderDetails(orders[0].id);
console.log(details);
} catch (err) {
console.error(err);
}
}Key Rules
awaitonly works insideasyncfunctions (or at top-level in ES modules)asyncfunctions always return a Promiseawaitpauses execution until the Promise resolves
async function example() {
return 42;
}
// Equivalent to:
function example() {
return Promise.resolve(42);
}
async function throws() {
throw new Error("Oops");
}
// Equivalent to:
function throws() {
return Promise.reject(new Error("Oops"));
}Error Handling Patterns
Try-Catch Blocks:
async function fetchData() {
try {
const response = await fetch(url);
const data = await response.json();
return data;
} catch (error) {
console.error("Fetch failed:", error);
throw error; // Re-throw if needed
}
}Error-First Destructuring (Go-style):
async function safeAwait(promise) {
try {
const result = await promise;
return [null, result];
} catch (error) {
return [error, null];
}
}
// Usage
const [error, data] = await safeAwait(fetchData());
if (error) {
console.error(error);
return;
}
console.log(data);Catch Wrapper:
function catchAsync(fn) {
return (req, res, next) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
}
// Usage in Express
app.get(
"/users",
catchAsync(async (req, res) => {
const users = await User.findAll();
res.json(users);
}),
);Parallel Execution
Promise.all - All Must Succeed
const results = await Promise.all([fetchUser(1), fetchUser(2), fetchUser(3)]);
// results = [user1, user2, user3]
// If ANY promise rejects, Promise.all rejects immediatelyPromise.allSettled - Get All Results
const results = await Promise.allSettled([
fetchUser(1),
fetchUser(2), // This might fail
fetchUser(3),
]);
// results = [
// { status: 'fulfilled', value: user1 },
// { status: 'rejected', reason: Error },
// { status: 'fulfilled', value: user3 }
// ]
// Process results
const successful = results
.filter((r) => r.status === "fulfilled")
.map((r) => r.value);Promise.race - First to Complete
// Timeout pattern
async function fetchWithTimeout(url, timeout) {
return Promise.race([
fetch(url),
new Promise((_, reject) =>
setTimeout(() => reject(new Error("Timeout")), timeout),
),
]);
}
// First successful response
const fastest = await Promise.race([fetchFromServer1(), fetchFromServer2()]);Promise.any - First to Succeed
// Returns first fulfilled promise (ignores rejections)
const first = await Promise.any([
fetchFromServer1(), // Might fail
fetchFromServer2(), // Might fail
fetchFromServer3(), // If this succeeds, returns its value
]);
// Only rejects if ALL promises reject (AggregateError)Choosing the Right Method
// Scenario: Fetch user data from multiple sources
// All must succeed (dependencies)
const [user, orders, preferences] = await Promise.all([
getUser(id),
getOrders(id),
getPreferences(id),
]);
// Get whatever succeeds (best effort)
const results = await Promise.allSettled([
getUser(id),
getOrders(id),
getPreferences(id),
]);
// Fastest response wins (redundant services)
const user = await Promise.race([getUserFromCache(id), getUserFromDB(id)]);
// First successful response (fallback servers)
const data = await Promise.any([
fetchFromPrimary(),
fetchFromSecondary(),
fetchFromTertiary(),
]);Sequential Processing
Sometimes you need to process items one at a time:
For...of with Await
async function processSequentially(items) {
const results = [];
for (const item of items) {
const result = await processItem(item);
results.push(result);
}
return results;
}Reduce Pattern
async function processSequentially(items) {
return items.reduce(async (previousPromise, item) => {
const results = await previousPromise;
const result = await processItem(item);
return [...results, result];
}, Promise.resolve([]));
}Warning
Don't use forEach with async/await—it doesn't wait for promises:
// WRONG: All requests fire simultaneously, no waiting
items.forEach(async (item) => {
await processItem(item);
});
console.log("Done"); // Logs before processing completes!
// CORRECT: Use for...of
for (const item of items) {
await processItem(item);
}
console.log("Done"); // Logs after all processing completesConcurrency Control
Running everything in parallel can overwhelm resources. Limit concurrency.
What is Concurrency Control?
Concurrency control limits how many async operations run simultaneously. Without it, launching 10,000 parallel requests could:
- Exhaust memory: Each pending promise consumes memory
- Hit rate limits: APIs often limit requests per second
- Overload databases: Too many connections crash the pool
- Cause timeouts: System can't handle the load
The solution: Process items in batches, limiting how many run at once.
Simple Concurrency Limiter
async function mapWithConcurrency(items, fn, concurrency) {
const results = [];
const executing = new Set();
for (const item of items) {
const promise = fn(item).then((result) => {
executing.delete(promise);
return result;
});
results.push(promise);
executing.add(promise);
if (executing.size >= concurrency) {
await Promise.race(executing);
}
}
return Promise.all(results);
}
// Usage: Process 100 items, max 5 concurrent
const results = await mapWithConcurrency(items, processItem, 5);Using p-limit Library
import pLimit from "p-limit";
const limit = pLimit(5); // Max 5 concurrent
const results = await Promise.all(urls.map((url) => limit(() => fetch(url))));Async Queue Pattern
An async queue processes tasks one at a time (or up to N at a time), ensuring controlled execution order. Use it when:
- Tasks must not overwhelm a shared resource
- You need to add tasks dynamically and have them processed in order
- Work should continue even if individual tasks fail
class AsyncQueue {
constructor(concurrency = 1) {
this.concurrency = concurrency;
this.running = 0;
this.queue = [];
}
async push(task) {
return new Promise((resolve, reject) => {
this.queue.push({ task, resolve, reject });
this.process();
});
}
async process() {
while (this.running < this.concurrency && this.queue.length > 0) {
const { task, resolve, reject } = this.queue.shift();
this.running++;
try {
const result = await task();
resolve(result);
} catch (error) {
reject(error);
} finally {
this.running--;
this.process();
}
}
}
}
// Usage
const queue = new AsyncQueue(3);
urls.forEach((url) => {
queue.push(() => fetch(url)).then(handleResponse);
});Advanced Patterns
Retry with Exponential Backoff
What is it? A pattern that automatically retries failed operations, waiting progressively longer between each attempt.
Why do we need it? Network requests fail. Servers go down temporarily. APIs rate-limit you. Instead of failing immediately, retry gives transient failures a chance to resolve.
How it works:
Attempt 1: Fail → Wait 1 second
Attempt 2: Fail → Wait 2 seconds (1s × 2)
Attempt 3: Fail → Wait 4 seconds (2s × 2)
Attempt 4: Fail → Wait 8 seconds (4s × 2)
Attempt 5: Success! (or give up)Exponential backoff prevents overwhelming a struggling server. Jitter (random delay) prevents all clients from retrying at the exact same moment (thundering herd problem).
async function retry(fn, options = {}) {
const {
retries = 3,
baseDelay = 1000,
maxDelay = 30000,
factor = 2,
shouldRetry = () => true,
} = options;
let lastError;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
return await fn();
} catch (error) {
lastError = error;
if (attempt === retries || !shouldRetry(error)) {
throw error;
}
const delay = Math.min(baseDelay * Math.pow(factor, attempt), maxDelay);
// Add jitter to prevent thundering herd
const jitter = delay * 0.2 * Math.random();
console.log(
`Attempt ${attempt + 1} failed, retrying in ${delay + jitter}ms`,
);
await new Promise((r) => setTimeout(r, delay + jitter));
}
}
throw lastError;
}
// Usage
const data = await retry(() => fetch("https://api.example.com/data"), {
retries: 5,
shouldRetry: (err) => err.status >= 500, // Only retry server errors
});Circuit Breaker
What is it? A pattern that prevents an application from repeatedly trying to execute an operation that's likely to fail.
Why do we need it? When an external service is down, continuously retrying:
- Wastes resources on doomed requests
- Adds load to an already struggling service
- Blocks your application waiting for timeouts
- Creates cascading failures across your system
How it works:
The circuit breaker has three states:
┌─────────┐ failures >= threshold ┌─────────┐
│ CLOSED │ ─────────────────────────► │ OPEN │
│(normal) │ │ (fail │
└─────────┘ │ fast) │
▲ └────┬────┘
│ │
│ success timeout expires │
│ ┌─────────┐ ◄────────────────────── │
└──│HALF_OPEN│ │
│ (test) │ ──────────────────────────┘
└─────────┘ failure- CLOSED: Normal operation. Failures are counted.
- OPEN: Fail immediately without calling the service. Wait for timeout.
- HALF_OPEN: Allow one test request. Success closes the circuit; failure reopens it.
class CircuitBreaker {
constructor(options = {}) {
this.failureThreshold = options.failureThreshold || 5;
this.resetTimeout = options.resetTimeout || 30000;
this.state = "CLOSED";
this.failures = 0;
this.lastFailure = null;
}
async execute(fn) {
if (this.state === "OPEN") {
if (Date.now() - this.lastFailure > this.resetTimeout) {
this.state = "HALF_OPEN";
} else {
throw new Error("Circuit breaker is OPEN");
}
}
try {
const result = await fn();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
onSuccess() {
this.failures = 0;
this.state = "CLOSED";
}
onFailure() {
this.failures++;
this.lastFailure = Date.now();
if (this.failures >= this.failureThreshold) {
this.state = "OPEN";
}
}
}
// Usage
const breaker = new CircuitBreaker({ failureThreshold: 3 });
async function callExternalService() {
return breaker.execute(async () => {
return await fetch("https://api.example.com/data");
});
}Debounce and Throttle
What are they? Two patterns to control how often a function executes.
Why do we need them? Some events fire rapidly (keystrokes, scroll, resize). Without control:
- API gets hammered with requests on every keystroke
- Performance degrades from excessive function calls
- Resources are wasted on intermediate states
Debounce waits until activity stops before executing:
User typing: h-e-l-l-o (100ms gaps)
│
wait 300ms after last keystroke
│
▼
Execute once with "hello"Throttle executes at most once per time interval:
Scroll events: ││││││││││ (many per second)
│
200ms throttle
│
▼ ▼ ▼ (execute every 200ms)| Pattern | Behavior | Use Case |
|---|---|---|
| Debounce | Wait for silence | Search input, form validation |
| Throttle | Limit frequency | Scroll handlers, resize, API calls |
// Debounce: Wait until calls stop
function debounce(fn, delay) {
let timeoutId;
return function (...args) {
clearTimeout(timeoutId);
return new Promise((resolve) => {
timeoutId = setTimeout(async () => {
resolve(await fn.apply(this, args));
}, delay);
});
};
}
// Throttle: Limit call frequency
function throttle(fn, interval) {
let lastCall = 0;
let pending = null;
return async function (...args) {
const now = Date.now();
if (now - lastCall >= interval) {
lastCall = now;
return fn.apply(this, args);
}
// Queue the most recent call
if (!pending) {
pending = new Promise((resolve) => {
setTimeout(
async () => {
pending = null;
lastCall = Date.now();
resolve(await fn.apply(this, args));
},
interval - (now - lastCall),
);
});
}
return pending;
};
}Async Iterators
What are they? Iterators that yield values asynchronously, allowing you to use for await...of loops.
Why do we need them? When processing:
- Paginated APIs: Fetch pages on-demand, not all at once
- Streaming data: Process records as they arrive
- Large datasets: Avoid loading everything into memory
Async iterators let you write clean loops over async data sources:
// Instead of callback hell or promise chains:
for await (const record of fetchAllRecords()) {
await process(record);
}Paginated API Example
async function* paginate(fetchPage) {
let page = 1;
let hasMore = true;
while (hasMore) {
const { data, nextPage } = await fetchPage(page);
for (const item of data) {
yield item;
}
hasMore = nextPage !== null;
page = nextPage;
}
}
// Usage
for await (const item of paginate(fetchPage)) {
console.log(item);
}Async Generator with Concurrency
async function* mapConcurrent(iterable, fn, concurrency) {
const executing = new Map();
let index = 0;
for await (const item of iterable) {
const currentIndex = index++;
const promise = fn(item).then((result) => ({
index: currentIndex,
result,
}));
executing.set(currentIndex, promise);
if (executing.size >= concurrency) {
const { index: doneIndex, result } = await Promise.race(
executing.values(),
);
executing.delete(doneIndex);
yield result;
}
}
while (executing.size > 0) {
const { index: doneIndex, result } = await Promise.race(executing.values());
executing.delete(doneIndex);
yield result;
}
}Summary
Mastering async control flow is essential:
| Pattern | Use Case |
|---|---|
Promise.all | Parallel, all must succeed |
Promise.allSettled | Parallel, get all results |
Promise.race | First to complete |
Promise.any | First to succeed |
for...of + await | Sequential processing |
| Concurrency limiter | Parallel with resource limits |
Advanced patterns:
| Pattern | Purpose |
|---|---|
| Retry with backoff | Handle transient failures |
| Circuit breaker | Fail fast when service is down |
| Debounce | Batch rapid calls |
| Throttle | Limit call frequency |
| Async iterators | Stream/paginate large datasets |
Note
The key to clean async code is choosing the right pattern for the situation. Sequential when order matters, parallel for independent operations, and concurrency control when resources are limited.