diff --git a/benchmark/logger/basic-json.js b/benchmark/logger/basic-json.js new file mode 100644 index 00000000000000..ee29699bdf7837 --- /dev/null +++ b/benchmark/logger/basic-json.js @@ -0,0 +1,53 @@ +'use strict'; + +const common = require('../common'); +const { createLogger, JSONHandler } = require('node:logger'); +const fs = require('node:fs'); + +const bench = common.createBenchmark(main, { + n: [1e5], + level: ['info', 'debug'], + fields: [0, 5], + type: ['simple', 'child', 'disabled'], +}); + +function main({ n, level, fields, type }) { + // Use /dev/null to avoid I/O overhead in benchmarks + const nullFd = fs.openSync('/dev/null', 'w'); + const handler = new JSONHandler({ stream: nullFd, level: 'info' }); + const logger = createLogger({ handler, level }); + + // Create test data based on fields count + const logData = { msg: 'benchmark test message' }; + for (let i = 0; i < fields; i++) { + logData[`field${i}`] = `value${i}`; + } + + let testLogger; + switch (type) { + case 'simple': + testLogger = logger; + break; + case 'child': + testLogger = logger.child({ requestId: 'bench-123', userId: 456 }); + break; + case 'disabled': { + // When level is debug and handler is info, logs will be disabled + const nullFd2 = fs.openSync('/dev/null', 'w'); + + testLogger = createLogger({ + handler: new JSONHandler({ stream: nullFd2, level: 'warn' }), + level: 'debug', + }); + break; + } + } + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info(logData); + } + bench.end(n); + + handler.end(); +} diff --git a/benchmark/logger/vs-pino.js b/benchmark/logger/vs-pino.js new file mode 100644 index 00000000000000..30a0d82c86ec5e --- /dev/null +++ b/benchmark/logger/vs-pino.js @@ -0,0 +1,146 @@ +'use strict'; + +const common = require('../common'); +const fs = require('node:fs'); + +const bench = common.createBenchmark(main, { + n: [1e5], + logger: ['node-logger', 'pino'], + scenario: ['simple', 'child', 'disabled', 'fields'], +}); + +function main({ n, logger, scenario }) { + const nullFd = fs.openSync('/dev/null', 'w'); + let testLogger; + let consumer; + + if (logger === 'node-logger') { + const { createLogger, JSONConsumer } = require('logger'); + + switch (scenario) { + case 'simple': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + testLogger = createLogger({ level: 'info' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'child': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + const baseLogger = createLogger({ level: 'info' }); + testLogger = baseLogger.child({ requestId: 'req-123', userId: 456 }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'disabled': { + consumer = new JSONConsumer({ stream: nullFd, level: 'warn' }); + consumer.attach(); + testLogger = createLogger({ level: 'warn' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.debug('benchmark test message'); + } + bench.end(n); + break; + } + + case 'fields': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + testLogger = createLogger({ level: 'info' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message', { + field1: 'value1', + field2: 'value2', + field3: 'value3', + field4: 'value4', + field5: 'value5', + }); + } + bench.end(n); + break; + } + } + + if (consumer) { + consumer.flushSync(); + } + fs.closeSync(nullFd); + + } else if (logger === 'pino') { + const pino = require('pino'); + const destination = pino.destination({ dest: nullFd, sync: false }); + + switch (scenario) { + case 'simple': { + testLogger = pino({ level: 'info' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'child': { + const baseLogger = pino({ level: 'info' }, destination); + testLogger = baseLogger.child({ requestId: 'req-123', userId: 456 }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'disabled': { + testLogger = pino({ level: 'warn' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.debug('benchmark test message'); + } + bench.end(n); + break; + } + + case 'fields': { + testLogger = pino({ level: 'info' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info({ + msg: 'benchmark test message', + field1: 'value1', + field2: 'value2', + field3: 'value3', + field4: 'value4', + field5: 'value5', + }); + } + bench.end(n); + break; + } + } + + destination.flushSync(); + } +} diff --git a/doc/api/index.md b/doc/api/index.md index 8db8b8c8806274..c18e083a241e4d 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -65,6 +65,7 @@ * [UDP/datagram](dgram.md) * [URL](url.md) * [Utilities](util.md) +* [Logger](logger.md) * [V8](v8.md) * [VM](vm.md) * [WASI](wasi.md) diff --git a/doc/api/logger.md b/doc/api/logger.md new file mode 100644 index 00000000000000..b72cbe5159a9a4 --- /dev/null +++ b/doc/api/logger.md @@ -0,0 +1,10 @@ +# Logger + + + +> Stability: 1 - Experimental + + + +The `node:logger` module provides structured logging capabilities for Node.js +applications. diff --git a/lib/internal/logger/serializers.js b/lib/internal/logger/serializers.js new file mode 100644 index 00000000000000..78e680d468bda1 --- /dev/null +++ b/lib/internal/logger/serializers.js @@ -0,0 +1,72 @@ +'use strict'; + +/** + * Serializes an Error object + * @param {Error} error + * @returns {object} + */ +function serializeErr(error) { + if (!error || typeof error !== 'object') { + return error; + } + + const obj = { + type: error.constructor.name, + message: error.message, + stack: error.stack, + }; + + // Include additional error properties + for (const key in error) { + if (obj[key] === undefined) { + obj[key] = error[key]; + } + } + + // Handle error code if present + if (error.code !== undefined) { + obj.code = error.code; + } + + // Handle error cause recursively + if (error.cause !== undefined) { + obj.cause = typeof error.cause === 'object' && error.cause !== null ? + serializeErr(error.cause) : + error.cause; + } + + return obj; +} + +/** + * Serializes HTTP request object + * @param {object} req - HTTP request + * @returns {object} + */ +function req(req) { + return { + method: req.method, + url: req.url, + headers: req.headers, + remoteAddress: req.socket?.remoteAddress, + remotePort: req.socket?.remotePort, + }; +} + +/** + * Serializes HTTP response object + * @param {object} res - HTTP response + * @returns {object} + */ +function res(res) { + return { + statusCode: res.statusCode, + headers: res.getHeaders ? res.getHeaders() : res.headers, + }; +} + +module.exports = { + err: serializeErr, + req, + res, +}; diff --git a/lib/logger.js b/lib/logger.js new file mode 100644 index 00000000000000..35cc80de32cb04 --- /dev/null +++ b/lib/logger.js @@ -0,0 +1,584 @@ +'use strict'; + +const { + DateNow, + Error, + JSONStringify, + ObjectAssign, + ObjectKeys, + SafeSet, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_METHOD_NOT_IMPLEMENTED, + }, +} = require('internal/errors'); + +const { + validateObject, + validateString, + validateOneOf, + validateFunction, +} = require('internal/validators'); + +const Utf8Stream = require('internal/streams/fast-utf8-stream'); +const diagnosticsChannel = require('diagnostics_channel'); +const { kEmptyObject } = require('internal/util'); +const stdSerializers = require('internal/logger/serializers'); + +// Create channels for each log level +const channels = { + trace: diagnosticsChannel.channel('log:trace'), + debug: diagnosticsChannel.channel('log:debug'), + info: diagnosticsChannel.channel('log:info'), + warn: diagnosticsChannel.channel('log:warn'), + error: diagnosticsChannel.channel('log:error'), + fatal: diagnosticsChannel.channel('log:fatal'), +}; + +/* +* RFC5424 numerical ordering + log4j interface +* @link https://www.rfc-editor.org/rfc/rfc5424.html +*/ +const LEVELS = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60, +}; + +const LEVEL_NAMES = ObjectKeys(LEVELS); + +// Noop function for disabled log levels +function noop() {} + +/** + * Base consumer class for handling log records + * Consumers subscribe to diagnostics_channel events + */ +class LogConsumer { + #levelValue; + + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + const { level = 'info' } = options; + validateString(level, 'options.level'); + + validateOneOf(level, 'options.level', LEVEL_NAMES); + + this.#levelValue = LEVELS[level]; + } + + /** + * Check if a level would be consumed + * @param {string} level + * @returns {boolean} + */ + enabled(level) { + return LEVELS[level] >= this.#levelValue; + } + + /** + * Attach this consumer to log channels + */ + attach() { + for (const level of LEVEL_NAMES) { + if (this.enabled(level)) { + channels[level].subscribe(this.#handleLog.bind(this, level)); + } + } + } + + /** + * Internal handler called by diagnostics_channel + * @param {string} level + * @param {object} record + * @private + */ + #handleLog(level, record) { + this.handle(record); + } + + /** + * Handle a log record (must be implemented by subclass) + * @param {object} record + */ + handle(record) { + throw new ERR_METHOD_NOT_IMPLEMENTED('LogConsumer.handle()'); + } +} + +/** + * JSON consumer - outputs structured JSON logs + */ +class JSONConsumer extends LogConsumer { + #stream; + #fields; + + constructor(options = kEmptyObject) { + super(options); + const { + stream, + fields = kEmptyObject, + } = options; + + validateObject(fields, 'options.fields'); + + this.#stream = stream ? this.#createStream(stream) : + new Utf8Stream({ fd: 1 }); + this.#fields = fields; + } + + #createStream(stream) { + // Fast path: already a Utf8Stream + if (stream instanceof Utf8Stream) { + return stream; + } + + // Number: file descriptor + if (typeof stream === 'number') { + return new Utf8Stream({ fd: stream }); + } + + // String: file path + if (typeof stream === 'string') { + return new Utf8Stream({ dest: stream }); + } + + // Object: custom stream with write method + if (typeof stream === 'object' && stream !== null && + typeof stream.write === 'function') { + return stream; + } + + throw new ERR_INVALID_ARG_TYPE( + 'options.stream', + ['number', 'string', 'Utf8Stream', 'object with write method'], + stream, + ); + } + + handle(record) { + // Start building JSON manually + let json = '{"level":"' + record.level + '",' + + '"time":' + record.time + ',' + + '"msg":"' + record.msg + '"'; + + // Add consumer fields + const consumerFields = this.#fields; + for (const key in consumerFields) { + const value = consumerFields[key]; + json += ',"' + key + '":'; + json += typeof value === 'string' ? + '"' + value + '"' : + JSONStringify(value); + } + + // Add pre-serialized bindings + json += record.bindingsStr; + + // Add log fields + const fields = record.fields; + for (const key in fields) { + const value = fields[key]; + json += ',"' + key + '":'; + json += typeof value === 'string' ? + '"' + value + '"' : + JSONStringify(value); + } + + json += '}\n'; + this.#stream.write(json); + } + + /** + * Flush pending writes + * @param {Function} callback + */ + flush(callback) { + this.#stream.flush(callback); + } + + /** + * Flush pending writes synchronously + */ + flushSync() { + this.#stream.flushSync(); + } + + /** + * Close the consumer + */ + end() { + this.#stream.end(); + } +} + +/** + * Logger class + */ +class Logger { + #level; + #levelValue; + #bindings; + #bindingsStr; // Pre-serialized bindings JSON string + #serializers; + #hasCustomSerializers; + + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + const { + level = 'info', + bindings = kEmptyObject, + serializers = kEmptyObject, + } = options; + + validateString(level, 'options.level'); + if (!LEVELS[level]) { + throw new ERR_INVALID_ARG_VALUE( + 'options.level', + level, + `must be one of: ${LEVEL_NAMES.join(', ')}`, + ); + } + + validateObject(bindings, 'options.bindings'); + validateObject(serializers, 'options.serializers'); + + // Validate serializers are functions + for (const key in serializers) { + validateFunction(serializers[key], `options.serializers.${key}`); + } + + this.#level = level; + this.#levelValue = LEVELS[level]; + this.#bindings = bindings; + + // Create serializers object with default err serializer + this.#serializers = { __proto__: null }; + // Add default err serializer (can be overridden) + this.#serializers.err = stdSerializers.err; + + // Track if we have custom serializers beyond 'err' + let hasCustom = false; + // Add custom serializers + for (const key in serializers) { + this.#serializers[key] = serializers[key]; + if (key !== 'err') { + hasCustom = true; + } + } + this.#hasCustomSerializers = hasCustom; + + // Pre-serialize bindings + this.#bindingsStr = this.#serializeBindings(bindings); + + this.#setLogMethods(); + } + + /** + * Pre-serialize bindings + * @param {object} bindings + * @returns {string} + * @private + */ + #serializeBindings(bindings) { + if (!bindings || typeof bindings !== 'object') { + return ''; + } + + let result = ''; + for (const key in bindings) { + const value = bindings[key]; + // Apply serializer if exists + const serialized = this.#serializers[key] ? + this.#serializers[key](value) : + value; + result += ',"' + key + '":' + JSONStringify(serialized); + } + return result; + } + + /** + * Replace disabled log methods with noop for performance + * @private + */ + #setLogMethods() { + const levelValue = this.#levelValue; + + if (levelValue > LEVELS.trace) this.trace = noop; + if (levelValue > LEVELS.debug) this.debug = noop; + if (levelValue > LEVELS.info) this.info = noop; + if (levelValue > LEVELS.warn) this.warn = noop; + if (levelValue > LEVELS.error) this.error = noop; + if (levelValue > LEVELS.fatal) this.fatal = noop; + } + + /** + * Check if a level would be logged + * @param {string} level + * @returns {boolean} + */ + enabled(level) { + return LEVELS[level] >= this.#levelValue; + } + + /** + * Create a child logger with additional context + * @param {object} bindings - Context to add to all child logs + * @param {object} [options] - Optional overrides + * @returns {Logger} + */ + child(bindings, options = kEmptyObject) { + validateObject(bindings, 'bindings'); + validateObject(options, 'options'); + + const mergedBindings = ObjectAssign( + { __proto__: null }, + this.#bindings, + bindings, + ); + + // Handle serializers inheritance + let childSerializers; + if (options.serializers) { + validateObject(options.serializers, 'options.serializers'); + + // Create new serializers object + childSerializers = { __proto__: null }; + + // Copy parent serializers + for (const key in this.#serializers) { + childSerializers[key] = this.#serializers[key]; + } + + // Override with child serializers + for (const key in options.serializers) { + validateFunction(options.serializers[key], `options.serializers.${key}`); + childSerializers[key] = options.serializers[key]; + } + } + + const childLogger = new Logger({ + level: options.level || this.#level, + bindings: mergedBindings, + serializers: childSerializers || this.#serializers, + }); + + return childLogger; + } + + /** + * Check if value is an Error object + * @param {*} value + * @returns {boolean} + * @private + */ + #isError(value) { + return value instanceof Error; + } + + /** + * Apply serializers to an object's properties + * @param {object} obj - Object to serialize + * @returns {object} Serialized object + * @private + */ + #applySerializers(obj) { + if (!obj || typeof obj !== 'object') { + return obj; + } + + // Fast path: no custom serializers, return as-is + if (!this.#hasCustomSerializers) { + return obj; + } + + const serialized = { __proto__: null }; + const serializers = this.#serializers; + + for (const key in obj) { + const value = obj[key]; + // Apply serializer if exists for this key + serialized[key] = serializers[key] ? + serializers[key](value) : + value; + } + + return serialized; + } + + /** + * Internal log method + * @param {string} level + * @param {number} levelValue + * @param {string|object} msgOrObj - Message string or object with msg property + * @param {object} [fields] - Optional fields to merge + */ + #log(level, levelValue, msgOrObj, fields) { + if (levelValue < this.#levelValue) { + return; + } + + if (typeof msgOrObj === 'string') { + if (fields !== undefined) { + validateObject(fields, 'fields'); + } + } else if (!this.#isError(msgOrObj)) { + validateObject(msgOrObj, 'obj'); + validateString(msgOrObj.msg, 'obj.msg'); + } + + const channel = channels[level]; + if (!channel.hasSubscribers) { + return; + } + + let msg; + let logFields; + + if (this.#isError(msgOrObj)) { + msg = msgOrObj.message; + // Use err serializer for Error objects + const serializedErr = this.#serializers.err ? + this.#serializers.err(msgOrObj) : + this.#serializeError(msgOrObj); + + logFields = { + err: serializedErr, + }; + + // Apply serializers to additional fields + if (fields) { + const serializedFields = this.#applySerializers(fields); + for (const key in serializedFields) { + logFields[key] = serializedFields[key]; + } + } + } else if (typeof msgOrObj === 'string') { + msg = msgOrObj; + // Apply serializers to fields + logFields = fields ? this.#applySerializers(fields) : kEmptyObject; + } else { + const { msg: extractedMsg, ...restFields } = msgOrObj; + msg = extractedMsg; + + // Apply serializers to object fields + logFields = this.#applySerializers(restFields); + + // Special handling for err/error fields + if (logFields.err && this.#isError(restFields.err)) { + logFields.err = this.#serializers.err ? + this.#serializers.err(restFields.err) : + this.#serializeError(restFields.err); + } + + if (logFields.error && this.#isError(restFields.error)) { + logFields.error = this.#serializers.err ? + this.#serializers.err(restFields.error) : + this.#serializeError(restFields.error); + } + } + + const record = { + level, + msg, + time: DateNow(), + bindingsStr: this.#bindingsStr, // Pre-serialized bindings string + fields: logFields, + }; + + channel.publish(record); + } + + /** + * Serialize Error object for logging (with recursive cause handling) + * @param {object} err - Error object to serialize + * @param {Set} [seen] - Set to track circular references + * @returns {object} Serialized error object + * @private + */ + #serializeError(err, seen = new SafeSet()) { + if (seen.has(err)) { + return '[Circular]'; + } + seen.add(err); + + const serialized = { + message: err.message, + name: err.name, + stack: err.stack, + }; + + if (err.code !== undefined) { + serialized.code = err.code; + } + + if (err.cause !== undefined) { + serialized.cause = this.#isError(err.cause) ? + this.#serializeError(err.cause, seen) : + err.cause; + } + + for (const key in err) { + if (serialized[key] === undefined) { + serialized[key] = err[key]; + } + } + + return serialized; + } + + trace(msgOrObj, fields) { + this.#log('trace', LEVELS.trace, msgOrObj, fields); + } + + debug(msgOrObj, fields) { + this.#log('debug', LEVELS.debug, msgOrObj, fields); + } + + info(msgOrObj, fields) { + this.#log('info', LEVELS.info, msgOrObj, fields); + } + + warn(msgOrObj, fields) { + this.#log('warn', LEVELS.warn, msgOrObj, fields); + } + + error(msgOrObj, fields) { + this.#log('error', LEVELS.error, msgOrObj, fields); + } + + fatal(msgOrObj, fields) { + this.#log('fatal', LEVELS.fatal, msgOrObj, fields); + } +} + +/** + * Create a logger instance (convenience method) + * @param {object} [options] + * @param {string} [options.level] - Minimum log level (default: 'info') + * @param {object} [options.bindings] - Context fields (default: {}) + * @returns {Logger} + */ +function createLogger(options) { + return new Logger(options); +} + +module.exports = { + __proto__: null, + Logger, + LogConsumer, + JSONConsumer, + LEVELS, + channels, + createLogger, + stdSerializers, + Handler: LogConsumer, + JSONHandler: JSONConsumer, +}; diff --git a/test/parallel/test-log-basic.js b/test/parallel/test-log-basic.js new file mode 100644 index 00000000000000..6360f08ed0d830 --- /dev/null +++ b/test/parallel/test-log-basic.js @@ -0,0 +1,500 @@ +'use strict'; + +require('../common'); + +const assert = require('node:assert'); +const { createLogger, Logger, LogConsumer, JSONConsumer, LEVELS, channels } = require('node:logger'); +const fs = require('node:fs'); +const os = require('node:os'); +const path = require('node:path'); + +// Test LEVELS constant +{ + assert.strictEqual(typeof LEVELS, 'object'); + assert.strictEqual(LEVELS.trace, 10); + assert.strictEqual(LEVELS.debug, 20); + assert.strictEqual(LEVELS.info, 30); + assert.strictEqual(LEVELS.warn, 40); + assert.strictEqual(LEVELS.error, 50); + assert.strictEqual(LEVELS.fatal, 60); +} + +// Test createLogger returns a Logger instance +{ + const logger = createLogger(); + assert(logger instanceof Logger); +} + +// Test Logger has all log methods +{ + const logger = createLogger(); + assert.strictEqual(typeof logger.trace, 'function'); + assert.strictEqual(typeof logger.debug, 'function'); + assert.strictEqual(typeof logger.info, 'function'); + assert.strictEqual(typeof logger.warn, 'function'); + assert.strictEqual(typeof logger.error, 'function'); + assert.strictEqual(typeof logger.fatal, 'function'); + assert.strictEqual(typeof logger.enabled, 'function'); + assert.strictEqual(typeof logger.child, 'function'); +} + +// Test level filtering +{ + const logger = createLogger({ level: 'warn' }); + assert.strictEqual(logger.enabled('trace'), false); + assert.strictEqual(logger.enabled('debug'), false); + assert.strictEqual(logger.enabled('info'), false); + assert.strictEqual(logger.enabled('warn'), true); + assert.strictEqual(logger.enabled('error'), true); + assert.strictEqual(logger.enabled('fatal'), true); +} + +// Test msg field is required +{ + const logger = createLogger(); + + // Object without msg should throw + assert.throws(() => { + logger.info({ userId: 123 }); // Missing msg + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + logger.info({ msg: 123 }); // msg is not a string + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + // String without second arg should work (no assertion needed, just shouldn't throw) + logger.info('just a message'); +}// Test child logger context inheritance +{ + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'abc-123' }); + + assert(childLogger instanceof Logger); + assert.notStrictEqual(childLogger, logger); + + // Child should have same level as parent + assert.strictEqual(childLogger.enabled('info'), true); + assert.strictEqual(childLogger.enabled('debug'), false); + + // Test nested child + const grandchildLogger = childLogger.child({ operation: 'query' }); + assert(grandchildLogger instanceof Logger); + assert.notStrictEqual(grandchildLogger, childLogger); +} + +// Test child logger level override +{ + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'abc' }, { level: 'debug' }); + + assert.strictEqual(logger.enabled('debug'), false); + assert.strictEqual(childLogger.enabled('debug'), true); +} + +// Test JSONConsumer output format +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-1.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info({ msg: 'test message', userId: 123 }); + + // Flush synchronously and read the output + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + + // Parse the JSON output + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.level, 'info'); + assert.strictEqual(parsed.msg, 'test message'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(typeof parsed.time, 'number'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test JSONConsumer with additional fields +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-2.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { hostname: 'test-host', pid: 12345 }, + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info({ msg: 'with fields' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.hostname, 'test-host'); + assert.strictEqual(parsed.pid, 12345); + assert.strictEqual(parsed.msg, 'with fields'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test child logger bindings in output +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-3.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'xyz-789' }); + + childLogger.info({ msg: 'child log', action: 'create' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.requestId, 'xyz-789'); + assert.strictEqual(parsed.action, 'create'); + assert.strictEqual(parsed.msg, 'child log'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test invalid log level +{ + assert.throws(() => { + createLogger({ level: 'invalid' }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +} + +// Test LogConsumer is abstract +{ + const consumer = new LogConsumer({ level: 'info' }); + assert.throws(() => { + consumer.handle({}); + }, { + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: /LogConsumer\.handle\(\) method is not implemented/, + }); +} + +// Test disabled level skips processing with diagnostics_channel +{ + let handleCalled = false; + + class TestConsumer extends LogConsumer { + handle() { + handleCalled = true; + } + } + + const consumer = new TestConsumer({ level: 'warn' }); + consumer.attach(); + const logger = createLogger({ level: 'warn' }); + + // This should be skipped (info < warn) + logger.info({ msg: 'skipped' }); + assert.strictEqual(handleCalled, false); + + // This should be processed (error > warn) + logger.error({ msg: 'processed' }); + assert.strictEqual(handleCalled, true); +} + +// Test invalid fields argument +{ + const logger = createLogger(); + assert.throws(() => { + logger.info('message', 'not an object'); // Second arg must be object + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +// Test string message signature +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-string.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info('simple message'); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'simple message'); + assert.strictEqual(parsed.level, 'info'); + + fs.unlinkSync(tmpfile); +} + +// Test string message with fields +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-string-fields.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info('user login', { userId: 123, ip: '127.0.0.1' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'user login'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(parsed.ip, '127.0.0.1'); + + fs.unlinkSync(tmpfile); +} + +// Test Error object serialization +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-error.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + const err = new Error('test error'); + err.code = 'TEST_ERROR'; + logger.error({ msg: 'operation failed', err }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Error should be serialized with stack trace + assert.strictEqual(parsed.msg, 'operation failed'); + assert.strictEqual(typeof parsed.err, 'object'); + assert.strictEqual(parsed.err.message, 'test error'); + assert.strictEqual(parsed.err.code, 'TEST_ERROR'); + assert(parsed.err.stack); + + fs.unlinkSync(tmpfile); +} + +// Test Error as first argument +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-error-first.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + const err = new Error('boom'); + logger.error(err); // Error as first arg + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'boom'); // message from error + assert.strictEqual(typeof parsed.err, 'object'); + assert(parsed.err.stack); + + fs.unlinkSync(tmpfile); +} + +// Test child logger with parent fields merge +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-child-merge.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { service: 'api' }, // consumer fields + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: '123' }); // child bindings + + childLogger.info('request processed', { duration: 150 }); // log fields + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Merge order: consumer fields < bindings < log fields + assert.strictEqual(parsed.service, 'api'); + assert.strictEqual(parsed.requestId, '123'); + assert.strictEqual(parsed.duration, 150); + assert.strictEqual(parsed.msg, 'request processed'); + + fs.unlinkSync(tmpfile); +} + +// Test field override priority +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-override.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { env: 'dev', version: '1.0' }, + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ env: 'staging' }); + + childLogger.info('test', { env: 'production' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Log fields should override everything + assert.strictEqual(parsed.env, 'production'); + assert.strictEqual(parsed.version, '1.0'); + + fs.unlinkSync(tmpfile); +} + +// Test multiple consumers (Qard's use case) +{ + const tmpfile1 = path.join(os.tmpdir(), `test-log-${process.pid}-multi1.json`); + const tmpfile2 = path.join(os.tmpdir(), `test-log-${process.pid}-multi2.json`); + + // Console consumer logs everything (debug+) + const consumer1 = new JSONConsumer({ + stream: fs.openSync(tmpfile1, 'w'), + level: 'debug', + }); + consumer1.attach(); + + // Service consumer logs only warnings+ (warn+) + const consumer2 = new JSONConsumer({ + stream: fs.openSync(tmpfile2, 'w'), + level: 'warn', + }); + consumer2.attach(); + + const logger = createLogger({ level: 'debug' }); + + logger.debug({ msg: 'debug message' }); + logger.info({ msg: 'info message' }); + logger.warn({ msg: 'warn message' }); + logger.error({ msg: 'error message' }); + + consumer1.flushSync(); + consumer1.end(); + consumer2.flushSync(); + consumer2.end(); + + const output1 = fs.readFileSync(tmpfile1, 'utf8'); + const lines1 = output1.trim().split('\n'); + + const output2 = fs.readFileSync(tmpfile2, 'utf8'); + const lines2 = output2.trim().split('\n'); + + // Consumer1 should have all 4 logs (debug+) + assert.strictEqual(lines1.length, 4); + assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); + assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); + assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); + assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); + + // Consumer2 should have only 2 logs (warn+) + assert.strictEqual(lines2.length, 2); + assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); + assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); + + fs.unlinkSync(tmpfile1); + fs.unlinkSync(tmpfile2); +} + +// Test channels export +{ + assert.strictEqual(typeof channels, 'object'); + assert.strictEqual(typeof channels.trace, 'object'); + assert.strictEqual(typeof channels.debug, 'object'); + assert.strictEqual(typeof channels.info, 'object'); + assert.strictEqual(typeof channels.warn, 'object'); + assert.strictEqual(typeof channels.error, 'object'); + assert.strictEqual(typeof channels.fatal, 'object'); +} + +// Test: Support both 'err' and 'error' fields +{ + const logs = []; + const consumer = new JSONConsumer({ + stream: { + write(data) { logs.push(JSON.parse(data)); }, + flush() {}, + flushSync() {}, + end() {}, + }, + level: 'info', + }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + + const err = new Error('Error 1'); + const error = new Error('Error 2'); + + logger.error({ msg: 'Multiple errors', err, error }); + + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].err.message, 'Error 1'); + assert.strictEqual(logs[0].error.message, 'Error 2'); + assert.ok(logs[0].err.stack); + assert.ok(logs[0].error.stack); +} + +// Test: 'error' field serialization +{ + const logs = []; + const consumer = new JSONConsumer({ + stream: { + write(data) { logs.push(JSON.parse(data)); }, + flush() {}, + flushSync() {}, + end() {}, + }, + level: 'info', + }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + const error = new Error('Test error'); + error.code = 'TEST_CODE'; + + logger.error({ msg: 'Operation failed', error }); + + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].error.message, 'Test error'); + assert.strictEqual(logs[0].error.code, 'TEST_CODE'); + assert.ok(logs[0].error.stack); +} diff --git a/test/parallel/test-logger-serializers.js b/test/parallel/test-logger-serializers.js new file mode 100644 index 00000000000000..0ede49d230f30a --- /dev/null +++ b/test/parallel/test-logger-serializers.js @@ -0,0 +1,194 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const { Logger, JSONConsumer, stdSerializers } = require('logger'); +const { Writable } = require('stream'); + +// Test helper to capture log output +class TestStream extends Writable { + constructor() { + super(); + this.logs = []; + } + + _write(chunk, encoding, callback) { + this.logs.push(JSON.parse(chunk.toString())); + callback(); + } + + clear() { + this.logs = []; + } + + // Add flush methods for compatibility + flush(callback) { + if (callback) callback(); + } + + flushSync() { + // No-op for test stream + } +} + +// Default err serializer +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + const error = new Error('Test error'); + error.code = 'TEST_CODE'; + + logger.error(error); + consumer.flushSync(); + + assert.strictEqual(stream.logs.length, 1); + const log = stream.logs[0]; + assert.strictEqual(log.err.type, 'Error'); + assert.strictEqual(log.err.message, 'Test error'); + assert.strictEqual(log.err.code, 'TEST_CODE'); + assert.ok(log.err.stack); +} + +// Custom serializer +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + user: (user) => ({ + id: user.id, + name: user.name, + // Password is filtered out + }), + }, + }); + + logger.info({ + msg: 'User action', + user: { id: 1, name: 'Alice', password: 'secret' }, + }); + consumer.flushSync(); + + assert.strictEqual(stream.logs.length, 1); + const log = stream.logs[0]; + assert.strictEqual(log.user.id, 1); + assert.strictEqual(log.user.name, 'Alice'); + assert.strictEqual(log.user.password, undefined); +} + +// Child logger inherits serializers +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const parent = new Logger({ + level: 'info', + serializers: { + a: () => 'parent-a', + b: () => 'parent-b', + }, + }); + + const child = parent.child( + {}, + { + serializers: { + b: () => 'child-b', // Override + c: () => 'child-c', // New + }, + } + ); + + child.info({ msg: 'Test message', a: 'test', b: 'test', c: 'test' }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.a, 'parent-a'); // Inherited + assert.strictEqual(log.b, 'child-b'); // Overridden + assert.strictEqual(log.c, 'child-c'); // New +} + +// Standard req/res serializers +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + req: stdSerializers.req, + res: stdSerializers.res, + }, + }); + + const mockReq = { + method: 'GET', + url: '/test', + headers: { 'user-agent': 'test' }, + socket: { remoteAddress: '127.0.0.1', remotePort: 12345 }, + }; + + const mockRes = { + statusCode: 200, + getHeaders: () => ({ 'content-type': 'application/json' }), + }; + + logger.info({ msg: 'HTTP request', req: mockReq, res: mockRes }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.req.method, 'GET'); + assert.strictEqual(log.req.url, '/test'); + assert.strictEqual(log.res.statusCode, 200); +} + +// Error cause serialization +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + + const cause = new Error('Root cause'); + const error = new Error('Main error'); + error.cause = cause; + + logger.error(error); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.err.message, 'Main error'); + assert.strictEqual(log.err.cause.message, 'Root cause'); + assert.ok(log.err.cause.stack); +} + +// Serializer with fields parameter +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + data: (data) => ({ sanitized: true, value: data.value }), + }, + }); + + logger.info('Message', { data: { value: 42, secret: 'hidden' } }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.data.sanitized, true); + assert.strictEqual(log.data.value, 42); + assert.strictEqual(log.data.secret, undefined); +}