JavaScript Performance Optimization: From Basics to Advanced
JavaScript performance can make or break user experience. Let’s dive into practical techniques that will make your JavaScript applications lightning fast.
Memory Management and Garbage Collection
Avoiding Memory Leaks
// Bad: Creates memory leaks
function createLeakyHandler() {
const largeData = new Array(1000000).fill('data');
document.addEventListener('click', function() {
// largeData is captured in closure and never released
console.log('Clicked');
});
}
// Good: Clean up references
function createCleanHandler() {
const handler = function() {
console.log('Clicked');
};
document.addEventListener('click', handler);
// Clean up when needed
return () => {
document.removeEventListener('click', handler);
};
}
WeakMap for Memory-Efficient Caching
const cache = new WeakMap();
class DataProcessor {
process(data) {
if (cache.has(data)) {
return cache.get(data);
}
const result = expensiveOperation(data);
cache.set(data, result);
return result;
}
}
// When data objects are garbage collected,
// cache entries are automatically removed
DOM Optimization Techniques
Batch DOM Updates
// Bad: Multiple reflows
function updateListBad(items) {
const list = document.getElementById('list');
items.forEach(item => {
const li = document.createElement('li');
li.textContent = item.name;
list.appendChild(li); // Triggers reflow each time
});
}
// Good: Single reflow
function updateListGood(items) {
const fragment = document.createDocumentFragment();
items.forEach(item => {
const li = document.createElement('li');
li.textContent = item.name;
fragment.appendChild(li);
});
document.getElementById('list').appendChild(fragment);
}
Virtual Scrolling for Large Lists
class VirtualList {
constructor(container, items, itemHeight = 50) {
this.container = container;
this.items = items;
this.itemHeight = itemHeight;
this.visibleCount = Math.ceil(container.clientHeight / itemHeight);
this.startIndex = 0;
this.render();
this.container.addEventListener('scroll', this.handleScroll.bind(this));
}
handleScroll() {
const scrollTop = this.container.scrollTop;
const newStartIndex = Math.floor(scrollTop / this.itemHeight);
if (newStartIndex !== this.startIndex) {
this.startIndex = newStartIndex;
this.render();
}
}
render() {
const endIndex = Math.min(
this.startIndex + this.visibleCount + 1,
this.items.length
);
const visibleItems = this.items.slice(this.startIndex, endIndex);
this.container.innerHTML = visibleItems
.map((item, index) => `
<div style="
position: absolute;
top: ${(this.startIndex + index) * this.itemHeight}px;
height: ${this.itemHeight}px;
">
${item.name}
</div>
`).join('');
// Set container height to enable scrolling
this.container.style.height = `${this.items.length * this.itemHeight}px`;
}
}
Asynchronous Programming Optimization
Debouncing and Throttling
// Debounce: Execute after delay, reset timer on new calls
function debounce(func, delay) {
let timeoutId;
return function(...args) {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => func.apply(this, args), delay);
};
}
// Throttle: Execute at most once per interval
function throttle(func, interval) {
let lastCall = 0;
return function(...args) {
const now = Date.now();
if (now - lastCall >= interval) {
lastCall = now;
func.apply(this, args);
}
};
}
// Usage
const debouncedSearch = debounce(searchAPI, 300);
const throttledScroll = throttle(handleScroll, 16); // ~60fps
Efficient Promise Handling
// Bad: Sequential execution
async function fetchUserDataBad(userIds) {
const users = [];
for (const id of userIds) {
const user = await fetchUser(id); // Waits for each request
users.push(user);
}
return users;
}
// Good: Parallel execution
async function fetchUserDataGood(userIds) {
const promises = userIds.map(id => fetchUser(id));
return Promise.all(promises);
}
// Better: Controlled concurrency
async function fetchUserDataBetter(userIds, concurrency = 5) {
const results = [];
for (let i = 0; i < userIds.length; i += concurrency) {
const batch = userIds.slice(i, i + concurrency);
const batchResults = await Promise.all(
batch.map(id => fetchUser(id))
);
results.push(...batchResults);
}
return results;
}
Modern JavaScript Optimizations
Using Web Workers for Heavy Computations
// main.js
const worker = new Worker('worker.js');
function processLargeDataset(data) {
return new Promise((resolve, reject) => {
worker.postMessage(data);
worker.onmessage = (e) => {
resolve(e.data);
};
worker.onerror = (error) => {
reject(error);
};
});
}
// worker.js
self.onmessage = function(e) {
const data = e.data;
// Perform heavy computation
const result = data.map(item => {
// Complex calculations that would block main thread
return heavyComputation(item);
});
self.postMessage(result);
};
Intersection Observer for Lazy Loading
class LazyLoader {
constructor() {
this.observer = new IntersectionObserver(
this.handleIntersection.bind(this),
{ threshold: 0.1 }
);
}
observe(elements) {
elements.forEach(el => this.observer.observe(el));
}
handleIntersection(entries) {
entries.forEach(entry => {
if (entry.isIntersecting) {
const img = entry.target;
img.src = img.dataset.src;
img.classList.add('loaded');
this.observer.unobserve(img);
}
});
}
}
// Usage
const lazyLoader = new LazyLoader();
const images = document.querySelectorAll('img[data-src]');
lazyLoader.observe(images);
Performance Monitoring
Custom Performance Metrics
class PerformanceMonitor {
constructor() {
this.metrics = new Map();
}
startTimer(name) {
this.metrics.set(name, performance.now());
}
endTimer(name) {
const startTime = this.metrics.get(name);
if (startTime) {
const duration = performance.now() - startTime;
console.log(`${name}: ${duration.toFixed(2)}ms`);
this.metrics.delete(name);
return duration;
}
}
measureFunction(fn, name) {
return (...args) => {
this.startTimer(name);
const result = fn.apply(this, args);
this.endTimer(name);
return result;
};
}
}
// Usage
const monitor = new PerformanceMonitor();
const optimizedFunction = monitor.measureFunction(expensiveFunction, 'expensive-op');
Performance optimization is an ongoing process. Profile your code, measure the impact of changes, and always optimize based on real user data rather than assumptions.