Code optimization techniques
In Node.js development, code optimization is key to improving performance, maintainability, and scalability. From reducing redundant operations to leveraging asynchronous features effectively, there are various optimization techniques. Below are some practical tips and concrete examples.
Minimize Synchronous Operations
Node.js's core advantage lies in non-blocking I/O, but synchronous operations can undermine this. For example, fs.readFileSync
blocks the event loop:
// Avoid
const data = fs.readFileSync('file.txt');
// Recommended
fs.readFile('file.txt', (err, data) => {
// Process data
});
For scenarios requiring synchronous operations, isolate them using worker_threads
:
const { Worker } = require('worker_threads');
new Worker(`
const fs = require('fs');
const data = fs.readFileSync('large-file.json');
parentPort.postMessage(data.length);
`, { eval: true });
Use Stream Processing Wisely
For large file operations, streaming significantly reduces memory usage:
// Traditional approach (may cause memory overflow)
fs.readFile('huge.log', (err, data) => {
const lines = data.toString().split('\n');
});
// Stream processing
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream('huge.log')
});
rl.on('line', (line) => {
// Process line by line
});
Prefer streams for HTTP responses as well:
// Inefficient
app.get('/video', (req, res) => {
const video = fs.readFileSync('movie.mp4');
res.send(video);
});
// Efficient
app.get('/video', (req, res) => {
fs.createReadStream('movie.mp4').pipe(res);
});
Optimize Caching Strategies
Use in-memory caching to avoid repeated computations:
const cache = new Map();
function heavyCompute(key) {
if (cache.has(key)) return cache.get(key);
const result = /* Complex computation */;
cache.set(key, result);
return result;
}
For long-term caching, combine with LRU algorithms:
const LRU = require('lru-cache');
const cache = new LRU({ max: 1000 });
app.get('/data/:id', (req, res) => {
const cached = cache.get(req.params.id);
if (cached) return res.json(cached);
db.query('SELECT * FROM data WHERE id=?', [req.params.id], (result) => {
cache.set(req.params.id, result);
res.json(result);
});
});
Avoid Memory Leaks
Improper use of closures can cause memory leaks:
// Problematic example
function createLeak() {
const hugeArray = new Array(1e6).fill('*');
return function() {
console.log('Leak!');
// hugeArray is permanently retained
};
}
Clean up timers promptly:
// Bad practice
setInterval(() => {
// Long-running task
}, 1000);
// Good practice
const timer = setInterval(/* ... */);
process.on('SIGTERM', () => clearInterval(timer));
Optimize Asynchronous Control
Avoid callback hell; prefer Promises/async:
// Callback hell example
fs.readFile('a.txt', (err, a) => {
fs.readFile('b.txt', (err, b) => {
fs.writeFile('c.txt', a + b, () => {
// More nesting...
});
});
});
// Improved approach
(async () => {
const a = await fs.promises.readFile('a.txt');
const b = await fs.promises.readFile('b.txt');
await fs.promises.writeFile('c.txt', Buffer.concat([a, b]));
})();
Use Promise.all
for batch operations:
// Sequential execution (slow)
for (const id of ids) {
await db.query('DELETE FROM items WHERE id=?', [id]);
}
// Parallel execution (fast)
await Promise.all(ids.map(id =>
db.query('DELETE FROM items WHERE id=?', [id])
));
Optimize Performance-Critical Paths
Avoid unnecessary operations in hot code paths:
// Inefficient string concatenation
let html = '';
for (const item of items) {
html += `<li>${item.name}</li>`;
}
// Efficient approach
const html = items.map(item => `<li>${item.name}</li>`).join('');
Precompile regular expressions:
// Recompiled every time
function testString(str) {
return /^[a-z0-9]+$/.test(str);
}
// Precompiled optimization
const ALPHA_NUM = /^[a-z0-9]+$/;
function testString(str) {
return ALPHA_NUM.test(str);
}
Improve Error Handling
Handle asynchronous errors properly:
// Uncaught Promise rejection
app.get('/api', async (req, res) => {
const data = await fetchData(); // May reject
res.json(data);
});
// Correct approach
app.get('/api', async (req, res, next) => {
try {
const data = await fetchData();
res.json(data);
} catch (err) {
next(err);
}
});
Use domains for complex scenarios:
const domain = require('domain');
const d = domain.create();
d.on('error', (err) => {
console.error('Domain caught:', err);
});
d.run(() => {
// Execute error-prone code here
process.nextTick(() => {
throw new Error('Async error');
});
});
Optimize Module Loading
Load non-essential modules dynamically:
// Load immediately at startup
const PDF = require('pdfkit');
// Lazy load
async function generatePDF() {
const { default: PDF } = await import('pdfkit');
// Use module
}
Leverage module caching:
// Repeated loading retrieves from cache
const mod1 = require('./utils');
const mod2 = require('./utils');
console.log(mod1 === mod2); // true
Optimize Event Emitters
Throttle high-frequency events:
const EventEmitter = require('events');
const emitter = new EventEmitter();
// Original high-frequency event
socket.on('data', (chunk) => {
emitter.emit('chunk', chunk);
});
// Optimized approach
let buffer = [];
setInterval(() => {
if (buffer.length) {
emitter.emit('batch', buffer);
buffer = [];
}
}, 100);
socket.on('data', (chunk) => {
buffer.push(chunk);
});
Optimize Process Management
Use the cluster module effectively:
const cluster = require('cluster');
const os = require('os');
if (cluster.isMaster) {
// Fork processes based on CPU cores
for (let i = 0; i < os.cpus().length; i++) {
cluster.fork();
}
} else {
// Worker process code
require('./server');
}
Optimize inter-process communication:
// Parent process
const { fork } = require('child_process');
const child = fork('compute.js');
// Use messages instead of stdin/stdout
child.on('message', (result) => {
console.log('Result:', result);
});
// Child process compute.js
process.on('message', (data) => {
const result = heavyCompute(data);
process.send(result);
});
Optimize Database Operations
Use batch inserts instead of loops:
// Inefficient
for (const user of users) {
await db.query('INSERT INTO users VALUES (?, ?)', [user.id, user.name]);
}
// Efficient batch insert
const values = users.map(u => [u.id, u.name]);
await db.query(
'INSERT INTO users VALUES ?',
[values]
);
Configure connection pools properly:
const pool = mysql.createPool({
connectionLimit: 10, // Adjust based on load testing
acquireTimeout: 10000,
waitForConnections: true
});
// Release connections promptly
app.get('/data', async (req, res) => {
const conn = await pool.getConnection();
try {
const [rows] = await conn.query('SELECT * FROM large_table');
res.json(rows);
} finally {
conn.release();
}
});
Optimize Logging
Avoid synchronous log writes:
// Problematic code
fs.appendFileSync('app.log', `${new Date()} - ${message}\n`);
// Optimized approach
const stream = fs.createWriteStream('app.log', { flags: 'a' });
function log(message) {
stream.write(`${new Date().toISOString()} - ${message}\n`);
}
// Error handling
stream.on('error', (err) => {
console.error('Log write failed:', err);
});
Structured logs aid analysis:
const { createLogger, transports, format } = require('winston');
const logger = createLogger({
format: format.combine(
format.timestamp(),
format.json()
),
transports: [new transports.File({ filename: 'combined.log' })]
});
logger.info('User login', {
userId: 123,
ip: '192.168.1.1'
});
Optimize Testing Environments
Separate environment configurations:
// config.js
module.exports = {
db: process.env.NODE_ENV === 'production' ?
'mongodb://prod-db' :
'mongodb://localhost/test'
};
// Specify environment at startup
// NODE_ENV=production node app.js
Optimize mock testing:
// Original test
test('fetch data', async () => {
const realData = await fetchFromAPI(); // Actual network request
expect(realData).toHaveProperty('id');
});
// Use nock for mocking
const nock = require('nock');
test('fetch data', async () => {
nock('https://api.example.com')
.get('/data')
.reply(200, { id: 123 });
const data = await fetchFromAPI();
expect(data).toEqual({ id: 123 });
});
本站部分内容来自互联网,一切版权均归源网站或源作者所有。
如果侵犯了你的权益请来信告知我们删除。邮箱:cc@cccx.cn