Concurrency Patterns in JavaScript
Concurrency patterns help manage multiple operations efficiently. This article covers parallel execution, sequential execution, and advanced concurrency techniques.
Introduction
Concurrency patterns enable:
- Parallel execution of independent tasks
- Sequential execution of dependent tasks
- Efficient resource utilization
- Responsive applications
- Complex workflow management
Understanding concurrency helps you:
- Build efficient applications
- Manage complex workflows
- Avoid race conditions
- Optimize performance
Parallel Execution
Promise.all() - All or Nothing
// Execute all promises in parallel
async function parallelExecution() {
const [result1, result2, result3] = await Promise.all([
fetch('/api/users').then(r => r.json()),
fetch('/api/posts').then(r => r.json()),
fetch('/api/comments').then(r => r.json())
]);
return { result1, result2, result3 };
}
// If any promise rejects, all fail
Promise.allSettled() - All Results
// Execute all promises, get all results
async function allResults() {
const results = await Promise.allSettled([
fetch('/api/users').then(r => r.json()),
fetch('/api/posts').then(r => r.json()),
fetch('/api/comments').then(r => r.json())
]);
const successful = results
.filter(r => r.status === 'fulfilled')
.map(r => r.value);
const failed = results
.filter(r => r.status === 'rejected')
.map(r => r.reason);
return { successful, failed };
}
Promise.any() - First Success
// Execute all promises, return first success
async function firstSuccess() {
try {
const result = await Promise.any([
fetch('https://api1.example.com/data'),
fetch('https://api2.example.com/data'),
fetch('https://api3.example.com/data')
]);
return result;
} catch (error) {
console.error('All promises failed');
}
}
Sequential Execution
Sequential with for…of
// Execute promises sequentially
async function sequential() {
const urls = [
'/api/users',
'/api/posts',
'/api/comments'
];
const results = [];
for (const url of urls) {
const response = await fetch(url);
const data = await response.json();
results.push(data);
}
return results;
}
Sequential with reduce()
// โ
Good: Sequential execution with reduce
async function sequentialReduce(urls) {
return urls.reduce(
async (promise, url) => {
const results = await promise;
const response = await fetch(url);
const data = await response.json();
return [...results, data];
},
Promise.resolve([])
);
}
// Usage
const urls = ['/api/users', '/api/posts', '/api/comments'];
const results = await sequentialReduce(urls);
Mixed Patterns
Sequential with Parallel Groups
// โ
Good: Sequential groups of parallel operations
async function mixedExecution() {
// Step 1: Fetch user (sequential)
const userResponse = await fetch('/api/user');
const user = await userResponse.json();
// Step 2: Fetch user-related data in parallel
const [posts, followers] = await Promise.all([
fetch(`/api/users/${user.id}/posts`).then(r => r.json()),
fetch(`/api/users/${user.id}/followers`).then(r => r.json())
]);
// Step 3: Fetch post comments in parallel
const comments = await Promise.all(
posts.map(post =>
fetch(`/api/posts/${post.id}/comments`).then(r => r.json())
)
);
return { user, posts, followers, comments };
}
Advanced Concurrency Patterns
Semaphore - Limit Concurrent Operations
// โ
Good: Limit concurrent operations
class Semaphore {
constructor(max) {
this.max = max;
this.current = 0;
this.queue = [];
}
async acquire() {
if (this.current < this.max) {
this.current++;
return;
}
await new Promise(resolve => this.queue.push(resolve));
this.current++;
}
release() {
this.current--;
const resolve = this.queue.shift();
if (resolve) resolve();
}
}
// Usage
const semaphore = new Semaphore(3); // Max 3 concurrent
async function limitedFetch(url) {
await semaphore.acquire();
try {
const response = await fetch(url);
return response.json();
} finally {
semaphore.release();
}
}
// Fetch 10 URLs with max 3 concurrent
const urls = Array.from({ length: 10 }, (_, i) => `/api/item/${i}`);
const results = await Promise.all(urls.map(limitedFetch));
Queue - Process Items Sequentially
// โ
Good: Queue for sequential processing
class Queue {
constructor() {
this.items = [];
this.processing = false;
}
async add(fn) {
this.items.push(fn);
await this.process();
}
async process() {
if (this.processing || this.items.length === 0) return;
this.processing = true;
while (this.items.length > 0) {
const fn = this.items.shift();
await fn();
}
this.processing = false;
}
}
// Usage
const queue = new Queue();
queue.add(async () => {
console.log('Task 1');
await new Promise(resolve => setTimeout(resolve, 1000));
});
queue.add(async () => {
console.log('Task 2');
await new Promise(resolve => setTimeout(resolve, 1000));
});
queue.add(async () => {
console.log('Task 3');
});
Pool - Reusable Worker Pool
// โ
Good: Worker pool for concurrent tasks
class Pool {
constructor(size) {
this.size = size;
this.workers = [];
this.queue = [];
this.activeCount = 0;
for (let i = 0; i < size; i++) {
this.workers.push(null);
}
}
async run(fn) {
return new Promise((resolve, reject) => {
this.queue.push({ fn, resolve, reject });
this.process();
});
}
async process() {
const availableIndex = this.workers.findIndex(w => w === null);
if (availableIndex === -1 || this.queue.length === 0) return;
const { fn, resolve, reject } = this.queue.shift();
this.workers[availableIndex] = true;
this.activeCount++;
try {
const result = await fn();
resolve(result);
} catch (error) {
reject(error);
} finally {
this.workers[availableIndex] = null;
this.activeCount--;
this.process();
}
}
}
// Usage
const pool = new Pool(3);
const tasks = Array.from({ length: 10 }, (_, i) => () =>
new Promise(resolve => {
console.log(`Task ${i} started`);
setTimeout(() => {
console.log(`Task ${i} completed`);
resolve(i);
}, 1000);
})
);
const results = await Promise.all(tasks.map(task => pool.run(task)));
Race Conditions and Mutual Exclusion
Detecting Race Conditions
// โ Bad: Race condition
let counter = 0;
async function increment() {
const current = counter;
await new Promise(resolve => setTimeout(resolve, 10));
counter = current + 1;
}
// Run 10 increments concurrently
Promise.all(Array(10).fill().map(() => increment()));
// Result: counter = 1 (should be 10)
Mutex - Mutual Exclusion
// โ
Good: Mutex to prevent race conditions
class Mutex {
constructor() {
this.locked = false;
this.queue = [];
}
async lock() {
if (!this.locked) {
this.locked = true;
return;
}
await new Promise(resolve => this.queue.push(resolve));
}
unlock() {
if (this.queue.length > 0) {
const resolve = this.queue.shift();
resolve();
} else {
this.locked = false;
}
}
}
// Usage
const mutex = new Mutex();
let counter = 0;
async function increment() {
await mutex.lock();
try {
const current = counter;
await new Promise(resolve => setTimeout(resolve, 10));
counter = current + 1;
} finally {
mutex.unlock();
}
}
// Run 10 increments concurrently
await Promise.all(Array(10).fill().map(() => increment()));
console.log(counter); // 10 (correct!)
Practical Patterns
Retry with Exponential Backoff
// โ
Good: Retry with backoff
async function retryWithBackoff(fn, maxRetries = 3, baseDelay = 1000) {
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
return await fn();
} catch (error) {
if (attempt === maxRetries - 1) throw error;
const delay = baseDelay * Math.pow(2, attempt);
console.log(`Attempt ${attempt + 1} failed, retrying in ${delay}ms`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
}
// Usage
const data = await retryWithBackoff(
() => fetch('/api/data').then(r => r.json()),
3,
1000
);
Timeout Wrapper
// โ
Good: Add timeout to any promise
function withTimeout(promise, timeoutMs) {
return Promise.race([
promise,
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Timeout')), timeoutMs)
)
]);
}
// Usage
try {
const data = await withTimeout(
fetch('/api/data').then(r => r.json()),
5000
);
} catch (error) {
console.error('Request failed or timed out');
}
Debounce and Throttle
// โ
Good: Debounce async operations
function debounce(fn, delay) {
let timeoutId;
return async function(...args) {
clearTimeout(timeoutId);
return new Promise(resolve => {
timeoutId = setTimeout(() => {
resolve(fn(...args));
}, delay);
});
};
}
// Usage
const debouncedSearch = debounce(async (query) => {
const response = await fetch(`/api/search?q=${query}`);
return response.json();
}, 300);
// Call multiple times, only last call executes
await debouncedSearch('javascript');
Best Practices
-
Use Promise.all() for independent operations:
// โ Good const [a, b, c] = await Promise.all([op1(), op2(), op3()]); -
Use for…of for sequential operations:
// โ Good for (const item of items) { await process(item); } -
Limit concurrent operations:
// โ Good const semaphore = new Semaphore(3); -
Handle race conditions:
// โ Good const mutex = new Mutex();
Common Mistakes
-
Using for…of for independent operations:
// โ Bad - sequential when could be parallel for (const url of urls) { await fetch(url); } // โ Good - parallel await Promise.all(urls.map(fetch)); -
Not handling race conditions:
// โ Bad - race condition counter++; // โ Good - use mutex await mutex.lock(); counter++; mutex.unlock(); -
Ignoring errors in Promise.all():
// โ Bad - fails on first error await Promise.all(promises); // โ Good - handle all results await Promise.allSettled(promises);
Summary
Concurrency patterns are essential for efficient applications. Key takeaways:
- Use Promise.all() for parallel independent operations
- Use for…of for sequential dependent operations
- Implement semaphores to limit concurrency
- Use mutexes to prevent race conditions
- Combine patterns for complex workflows
- Always handle errors properly
Related Resources
Next Steps
- Learn about Rate Limiting and Throttling
- Explore Debouncing and Memoization
- Study Generators and Iterators
- Practice with concurrent patterns
- Build efficient async applications
Comments