Log management
The Importance of Log Management
Logs are records generated during the operation of an application, used to track system behavior, troubleshoot issues, and analyze performance. Effective log management helps developers quickly locate problems and improves system maintainability. In Node.js, log management is particularly important because Node.js's single-threaded nature makes error tracking more reliant on logging.
Common Log Levels
Logs are typically divided into several levels, each corresponding to different levels of importance:
const levels = {
error: 0, // Errors requiring immediate attention
warn: 1, // Warnings about potential issues
info: 2, // Important information
verbose: 3, // Detailed information
debug: 4, // Debugging information
silly: 5 // Most detailed logs
};
Logging Modules in Node.js
The console
Module
Node.js's built-in console
module is the most basic logging tool:
console.error('Error message');
console.warn('Warning message');
console.info('General information');
console.log('Equivalent to info');
console.debug('Debug information');
The Winston Logging Library
Winston is one of the most popular logging libraries for Node.js:
const winston = require('winston');
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
transports: [
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'combined.log' })
]
});
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.simple()
}));
}
The Bunyan Logging Library
Another popular choice is Bunyan, which is particularly suited for structured logging:
const bunyan = require('bunyan');
const log = bunyan.createLogger({
name: 'myapp',
streams: [
{
level: 'error',
path: '/var/tmp/myapp-error.log'
},
{
level: 'info',
stream: process.stdout
}
]
});
log.info('Server started');
log.error({err: new Error('Example error')}, 'An error occurred');
Log Formatting Best Practices
Structured Logging
Structured logs are easier to analyze and process than plain text logs:
// Bad practice
logger.info('User login: John Doe');
// Good practice
logger.info({
event: 'user_login',
username: 'John Doe',
ip: '192.168.1.1',
timestamp: new Date().toISOString()
});
Including Contextual Information
Logs should include sufficient contextual information:
function processOrder(order) {
const logContext = {
orderId: order.id,
userId: order.userId,
amount: order.amount
};
logger.info(logContext, 'Starting order processing');
try {
// Order processing logic
logger.info(logContext, 'Order processed successfully');
} catch (err) {
logger.error({...logContext, error: err.message}, 'Order processing failed');
throw err;
}
}
Log Storage Strategies
File Storage
The most basic log storage method is writing to files:
const fs = require('fs');
const path = require('path');
function writeLog(level, message) {
const logFile = path.join(__dirname, 'app.log');
const logEntry = `[${new Date().toISOString()}] [${level}] ${message}\n`;
fs.appendFile(logFile, logEntry, (err) => {
if (err) console.error('Failed to write log:', err);
});
}
Log Rotation
To prevent log files from becoming too large, implement log rotation:
const { createGzip } = require('zlib');
const { pipeline } = require('stream');
function rotateLogs() {
const currentDate = new Date();
const oldFile = 'app.log';
const newFile = `app.${currentDate.toISOString().split('T')[0]}.log.gz`;
pipeline(
fs.createReadStream(oldFile),
createGzip(),
fs.createWriteStream(newFile),
(err) => {
if (err) {
console.error('Log rotation failed:', err);
} else {
fs.truncate(oldFile, 0, (err) => {
if (err) console.error('Failed to clear log file:', err);
});
}
}
);
}
// Rotate logs every midnight
setInterval(rotateLogs, 24 * 60 * 60 * 1000);
Log Analysis and Monitoring
ELK Stack
ELK (Elasticsearch, Logstash, Kibana) is a popular log analysis solution:
- Example Logstash configuration:
input {
file {
path => "/var/log/node-app/*.log"
start_position => "beginning"
}
}
filter {
grok {
match => { "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\] \[%{LOGLEVEL:level}\] %{GREEDYDATA:message}" }
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
}
}
Real-Time Log Monitoring
Implement real-time log monitoring using Socket.IO:
const io = require('socket.io')(3001);
const tail = require('tail').Tail;
const logFile = new tail('app.log');
io.on('connection', (socket) => {
logFile.on('line', (data) => {
socket.emit('log', data);
});
});
Performance Considerations
Asynchronous Logging
Synchronous logging blocks the event loop; prefer asynchronous logging:
// Synchronous - not recommended
fs.writeFileSync('sync.log', logEntry);
// Asynchronous - recommended
fs.writeFile('async.log', logEntry, (err) => {
if (err) console.error('Failed to write log asynchronously:', err);
});
Batch Writing
For high-frequency logging scenarios, consider batch writing:
let logBuffer = [];
const BATCH_SIZE = 100;
const BATCH_INTERVAL = 5000; // 5 seconds
function addToBuffer(logEntry) {
logBuffer.push(logEntry);
if (logBuffer.length >= BATCH_SIZE) {
flushLogs();
}
}
function flushLogs() {
if (logBuffer.length === 0) return;
const logsToWrite = logBuffer.join('\n');
logBuffer = [];
fs.appendFile('batch.log', logsToWrite + '\n', (err) => {
if (err) console.error('Failed to write logs in batch:', err);
});
}
// Periodically flush the buffer
setInterval(flushLogs, BATCH_INTERVAL);
Security Considerations
Sensitive Information Filtering
Logs should not contain sensitive information:
function sanitizeLog(data) {
const sensitiveFields = ['password', 'creditCard', 'ssn'];
return JSON.parse(JSON.stringify(data, (key, value) => {
if (sensitiveFields.includes(key)) {
return '[REDACTED]';
}
return value;
}));
}
logger.info(sanitizeLog({
username: 'user1',
password: 'secret123',
action: 'login'
}));
Log Access Control
Ensure log files have appropriate permissions:
// Set log file permissions to 640 (rw-r-----)
fs.chmod('app.log', 0o640, (err) => {
if (err) console.error('Failed to set file permissions:', err);
});
Multi-Environment Log Configuration
Different environments should have different log configurations:
function createLogger(env) {
const commonTransports = [
new winston.transports.File({ filename: 'errors.log', level: 'error' })
];
if (env === 'production') {
return winston.createLogger({
level: 'info',
transports: [
...commonTransports,
new winston.transports.File({ filename: 'combined.log' })
]
});
} else {
return winston.createLogger({
level: 'debug',
transports: [
...commonTransports,
new winston.transports.Console()
]
});
}
}
Request Tracing
In web applications, assign a unique ID to each request for tracing:
const uuid = require('uuid');
app.use((req, res, next) => {
req.requestId = uuid.v4();
logger.info({
requestId: req.requestId,
method: req.method,
url: req.url,
ip: req.ip
}, 'Request received');
const originalEnd = res.end;
res.end = function(...args) {
logger.info({
requestId: req.requestId,
statusCode: res.statusCode,
responseTime: Date.now() - req.startTime
}, 'Request completed');
originalEnd.apply(res, args);
};
req.startTime = Date.now();
next();
});
Error Handling and Logging
Properly handle and log errors:
process.on('uncaughtException', (err) => {
logger.error({
error: err.message,
stack: err.stack
}, 'Uncaught exception');
// Decide whether to exit based on severity
if (err.isFatal) {
process.exit(1);
}
});
process.on('unhandledRejection', (reason, promise) => {
logger.error({
reason: reason instanceof Error ? reason.stack : reason,
promise
}, 'Unhandled promise rejection');
});
Log Testing
Ensure the logging system works correctly:
describe('Logging System', () => {
let logOutput;
const originalWrite = process.stdout.write;
beforeEach(() => {
logOutput = '';
process.stdout.write = (chunk) => {
logOutput += chunk;
};
});
afterEach(() => {
process.stdout.write = originalWrite;
});
it('should correctly log errors', () => {
logger.error('Test error');
expect(logOutput).to.contain('Test error');
expect(logOutput).to.contain('error');
});
});
Log and Performance Monitoring Integration
Integrate logs with APM tools:
const apm = require('elastic-apm-node').start({
serviceName: 'my-node-app'
});
function trackError(err) {
apm.captureError(err);
logger.error({
error: err.message,
stack: err.stack,
transactionId: apm.currentTransaction?.ids['transaction.id']
}, 'Application error');
}
try {
// Code that might fail
} catch (err) {
trackError(err);
}
Custom Log Formats
Create custom log formats:
const { format } = require('winston');
const util = require('util');
const customFormat = format.printf(({ level, message, timestamp, ...metadata }) => {
let msg = `${timestamp} [${level}] ${message}`;
if (Object.keys(metadata).length > 0) {
msg += ' ' + util.inspect(metadata, { colors: true, depth: null });
}
return msg;
});
const logger = winston.createLogger({
format: format.combine(
format.timestamp(),
format.colorize(),
customFormat
),
transports: [new winston.transports.Console()]
});
Log Sampling
Consider log sampling in high-traffic environments:
const sampledLogger = winston.createLogger({
transports: [
new winston.transports.Console({
level: 'info',
sampleRate: 0.1 // Only log 10% of entries
}),
new winston.transports.File({
filename: 'important.log',
level: 'error' // Log all errors
})
]
});
Distributed System Logging
In microservices architectures, centralized log management is needed:
const { createLogger } = require('winston');
const { ElasticsearchTransport } = require('winston-elasticsearch');
const esTransport = new ElasticsearchTransport({
level: 'info',
clientOpts: { node: 'http://localhost:9200' }
});
const logger = createLogger({
transports: [esTransport]
});
// Add service identifier
logger.defaultMeta = { service: 'order-service' };
本站部分内容来自互联网,一切版权均归源网站或源作者所有。
如果侵犯了你的权益请来信告知我们删除。邮箱:cc@cccx.cn