visit
But creating truly high-performance web apps is littered with pitfalls. Mistakes abound that can drag down the pace of your JavaScript without you even realizing it. Tiny oversights that bloat your code and surreptitiously sap speed away bit by bit.
// globals.js
var color = 'blue';
function printColor() {
console.log(color);
}
printColor(); // Prints 'blue'
// script2.js
var color = 'red';
printColor(); // Prints 'red'!
Because color
is global, script2.js overwritten it! To fix this, declare variables locally inside functions whenever possible:
function printColor() {
var color = 'blue'; // local variable
console.log(color);
}
printColor(); // Prints 'blue'
Now, changes in other scripts won't affect printColor
.
const ul = document.getElementById('list');
for (let i = 0; i < 10; i++) {
const li = document.createElement('li');
li.textContent = i;
ul.appendChild(li);
}
This appends list items one by one. It is better to build a string first then set .innerHTML
:
const ul = document.getElementById('list');
let html = '';
for (let i = 0; i < 10; i++) {
html += `<li>${i}</li>`;
}
ul.innerHTML = html;
Bad:
// New message received
const msg = `<div>${messageText}</div>`;
chatLog.insertAdjacentHTML('beforeend', msg);
This naively inserts on each message. Better to throttle updates:
Good:
let chatLogHTML = '';
const throttleTime = 100; // ms
// New message received
chatLogHTML += `<div>${messageText}</div>`;
// Throttle DOM updates
setTimeout(() => {
chatLog.innerHTML = chatLogHTML;
chatLogHTML = '';
}, throttleTime);
Bad:
const rows = document.querySelectorAll('table tr');
rows.forEach(row => {
const deleteBtn = row.querySelector('.delete');
deleteBtn.addEventListener('click', handleDelete);
});
This adds a listener to each delete button. Better to use event delegation:
Good:
const table = document.querySelector('table');
table.addEventListener('click', e => {
if (e.target.classList.contains('delete')) {
handleDelete(e);
}
});
Now, there's a single listener on the <table>
. Less memory overhead.
let html = '';
for (let i = 0; i < 10; i++) {
html += '<div>' + i + '</div>';
}
const parts = [];
for (let i = 0; i < 10; i++) {
parts.push('<div>', i, '</div>');
}
const html = parts.join('');
Building an array minimizes intermediate strings. .join()
concatenates once at the end.
Bad:
const items = [/*...*/];
for (let i = 0; i < items.length; i++) {
// ...
}
Redundantly checking .length
inhibits optimizations. Better:
Good:
const items = [/*...*/];
const len = items.length;
for (let i = 0; i < len; i++) {
// ...
}
Caching length improves speed. Other optimizations include hoisting invariants out of loops, simplifying termination conditions, and avoiding expensive operations inside iterations.
Bad:
const data = fs.readFileSync('file.json'); // blocks!
This stalls execution while reading from the disk. Instead, use callbacks or promises:
Good:
fs.readFile('file.json', (err, data) => {
// ...
});
Now, the event loop continues while the file is read. For complex flows, async/await
simplifies asynchronous logic. Avoid synchronous operations to prevent blocking.
function countPrimes(max) {
// Unoptimized loop
for (let i = 0; i <= max; i++) {
// ...check if prime...
}
}
countPrimes(1000000); // Long running!
Bad:
try {
// ...
} catch (err) {
console.error(err); // just logging
}
This captures errors but takes no corrective action. Unhandled errors often lead to memory leaks or data corruption.
Better:
try {
// ...
} catch (err) {
console.error(err);
// Emit error event
emitError(err);
// Nullify variables
obj = null;
// Inform user
showErrorNotice();
}
Logging isn't enough! Clean up artifacts, notify users, and consider recovery options. Use tools like Sentry to monitor errors in production. Handle all errors explicitly.
function processData() {
const data = [];
// Use closure to accumulate data
return function() {
data.push(getData());
}
}
const processor = processData();
// Long running...keeps holding reference to growing data array!
Bad:
import _ from 'lodash';
import moment from 'moment';
import validator from 'validator';
// etc...
Importing entire libraries for minor utilities. Better to cherries pick helpers as needed:
Good:
import cloneDeep from 'lodash/cloneDeep';
import { format } from 'date-fns';
import { isEmail } from 'validator';
Only import what you need. Review dependencies regularly to prune unused ones. Keep bundles lean and minimize dependencies.
Bad:
function generateReport() {
// Perform expensive processing
// to generate report data...
}
generateReport(); // Computes
generateReport(); // Computes again!
Since inputs haven't changed, the report could be cached:
Good:
let cachedReport;
function generateReport() {
if (cachedReport) {
return cachedReport;
}
cachedReport = // expensive processing...
return cachedReport;
}
Bad:
// No indexing
db.find({name: 'John', age: 35});
// Unecessary fields
db.find({first: 'John', last:'Doe', email:'[email protected]'}, {first: 1, last: 1});
// Too many separate queries
for (let id of ids) {
const user = db.find({id});
}
This fails to utilize indexes, retrieves unused fields, and executes excessive queries.
Good:
// Use index on 'name'
db.find({name: 'John'}).hint({name: 1});
// Only get 'email' field
db.find({first: 'John'}, {email: 1});
// Get users in one query
const users = db.find({
id: {$in: ids}
});
Analyze and explain plans. Create indexes strategically. Avoid multiple piecemeal queries. Optimize datastore interactions.
Bad:
function getUser() {
return fetch('/user')
.then(r => r.json());
}
getUser();
If fetch
rejects, exception goes unnoticed. Instead:
Good:
function getUser() {
return fetch('/user')
.then(r => r.json())
.catch(err => console.error(err));
}
getUser();
Chaining .catch()
handles errors properly. Other tips:
Bad:
const data = http.getSync('//example.com/data'); // blocks!
This stalls the event loop during the request. Instead, use callbacks:
Good:
http.get('//example.com/data', res => {
// ...
});
fetch('//example.com/data')
.then(res => res.json())
.then(data => {
// ...
});
Async network requests allow other processing while waiting for responses. Avoid synchronous network calls.
Bad:
const contents = fs.readFileSync('file.txt'); // blocks!
This stalls execution during disk I/O. Instead:
Good:
fs.readFile('file.txt', (err, contents) => {
// ...
});
// or promises
fs.promises.readFile('file.txt')
.then(contents => {
// ...
});
This allows the event loop to continue during the file read.
function processFiles(files) {
for (let file of files) {
fs.createReadStream(file)
.pipe(/*...*/);
}
}
Avoid synchronous file operations. Use callbacks, promises, and streams.
// profile.js
function processOrders(orders) {
orders.forEach(o => {
// ...
});
}
processOrders(allOrders);
The profiler shows processOrders
taking 200ms. We investigate and find:
Bad:
// Compute expensive report
function generateReport() {
// ...heavy processing...
}
generateReport(); // Computes
generateReport(); // Computes again!
The same inputs always produce the same output. We should cache:
Good:
// Cache report contents
const cache = {};
function generateReport() {
if (cache.report) {
return cache.report;
}
const report = // ...compute...
cache.report = report;
return report;
}
function userStats(user) {
const name = user.name;
const email = user.email;
// ...logic...
}
function orderStats(order) {
const name = order.customerName;
const email = order.customerEmail;
// ...logic...
}
function getCustomerInfo(data) {
return {
name: data.name,
email: data.email
};
}
function userStats(user) {
const { name, email } = getCustomerInfo(user);
// ...logic...
}
function orderStats(order) {
const { name, email } = getCustomerInfo(order);
// ...logic...
}
If you like my article, feel free to follow me onHackerNoon.
Also published .