Skip to content

Commit f2d9074

Browse files
committed
logger: address review feedback
1 parent 95d2f3c commit f2d9074

3 files changed

Lines changed: 247 additions & 17 deletions

File tree

doc/api/logger.md

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -518,8 +518,19 @@ added: REPLACEME
518518
519519
* `options` {Object}
520520
* `level` {string} Minimum log level to consume. **Default:** `'info'`.
521-
* `stream` {number|string|Object} Output destination. Can be a file
522-
descriptor (number), file path (string), or a writable stream object.
521+
* `stream` {number|string|Object} Output destination. One of:
522+
523+
* A file descriptor (number).
524+
* A file path (string).
525+
* A stream-like object implementing all of the following methods:
526+
* `write(chunk)`
527+
* `flush(callback)`
528+
* `flushSync()`
529+
* `end()`
530+
531+
A plain `stream.Writable` (e.g. from `fs.createWriteStream()`) does not
532+
satisfy this contract because it lacks `flush()`/`flushSync()`; wrap it
533+
or use a stream that implements the required methods.
523534
**Default:** `stdout` (fd 1).
524535
* `fields` {Object} Additional fields to include in every log record.
525536
**Default:** `{}`.

lib/logger.js

Lines changed: 44 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,10 @@ const LEVELS = {
6565

6666
const LEVEL_NAMES = ObjectKeys(LEVELS);
6767

68+
function isReservedKey(key) {
69+
return key === 'level' || key === 'time' || key === 'msg';
70+
}
71+
6872
// Noop function for disabled log levels
6973
function noop() {}
7074

@@ -76,13 +80,15 @@ class LogConsumer {
7680
/** @type {number} */
7781
#levelValue;
7882
#handlers;
83+
#attached;
7984

8085
constructor(options = kEmptyObject) {
8186
validateObject(options, 'options');
8287
const { level = 'info' } = options;
8388
validateOneOf(level, 'options.level', LEVEL_NAMES);
8489
this.#levelValue = LEVELS[level];
8590
this.#handlers = { __proto__: null };
91+
this.#attached = false;
8692

8793
// Setup level-specific enabled properties for typo safety
8894
// Allows consumer.info.enabled instead of consumer.enabled('info')
@@ -97,6 +103,10 @@ class LogConsumer {
97103
* Attach this consumer to log channels
98104
*/
99105
attach() {
106+
if (this.#attached) {
107+
return;
108+
}
109+
this.#attached = true;
100110
for (const level of LEVEL_NAMES) {
101111
if (this[level].enabled) {
102112
const handler = this.handle.bind(this);
@@ -110,6 +120,10 @@ class LogConsumer {
110120
* Detach this consumer from log channels
111121
*/
112122
detach() {
123+
if (!this.#attached) {
124+
return;
125+
}
126+
this.#attached = false;
113127
for (const level of LEVEL_NAMES) {
114128
if (this.#handlers[level]) {
115129
channels[level].unsubscribe(this.#handlers[level]);
@@ -164,14 +178,18 @@ class JSONConsumer extends LogConsumer {
164178
return new Utf8Stream({ dest: stream });
165179
}
166180

167-
// Object: custom stream with write method
168-
if (typeof stream?.write === 'function') {
181+
if (stream !== null && typeof stream === 'object' &&
182+
typeof stream.write === 'function' &&
183+
typeof stream.flush === 'function' &&
184+
typeof stream.flushSync === 'function' &&
185+
typeof stream.end === 'function') {
169186
return stream;
170187
}
171188

172189
throw new ERR_INVALID_ARG_TYPE(
173190
'options.stream',
174-
['number', 'string', 'Utf8Stream', 'object with write method'],
191+
['number', 'string', 'Utf8Stream',
192+
'Object with write, flush, flushSync, and end methods'],
175193
stream,
176194
);
177195
}
@@ -181,19 +199,28 @@ class JSONConsumer extends LogConsumer {
181199
// record.level is trusted internal value, record.msg is user input and must be escaped
182200
let json = `{"level":"${record.level}","time":${record.time},"msg":${JSONStringify(record.msg)}`;
183201

184-
// Add consumer fields
185202
const consumerFields = this.#fields;
186-
for (const key of ObjectKeys(consumerFields)) {
187-
json += `,${JSONStringify(key)}:${JSONStringify(consumerFields[key])}`;
203+
const consumerKeys = ObjectKeys(consumerFields);
204+
for (let i = 0; i < consumerKeys.length; i++) {
205+
const key = consumerKeys[i];
206+
if (isReservedKey(key)) continue;
207+
const value = consumerFields[key];
208+
if (value === undefined) continue;
209+
json += `,${JSONStringify(key)}:${JSONStringify(value)}`;
188210
}
189211

190212
// Add pre-serialized bindings
191213
json += record.bindingsStr;
192214

193215
// Add log fields
194216
const fields = record.fields;
195-
for (const key in fields) {
196-
json += `,${JSONStringify(key)}:${JSONStringify(fields[key])}`;
217+
const fieldKeys = ObjectKeys(fields);
218+
for (let i = 0; i < fieldKeys.length; i++) {
219+
const key = fieldKeys[i];
220+
if (isReservedKey(key)) continue;
221+
const value = fields[key];
222+
if (value === undefined) continue;
223+
json += `,${JSONStringify(key)}:${JSONStringify(value)}`;
197224
}
198225

199226
json += '}\n';
@@ -294,8 +321,11 @@ class Logger {
294321

295322
let result = '';
296323
const keys = ObjectKeys(bindings);
297-
for (const key of keys) {
324+
for (let i = 0; i < keys.length; i++) {
325+
const key = keys[i];
326+
if (isReservedKey(key)) continue;
298327
const serialized = this.#serializeValue(bindings[key], key);
328+
if (serialized === undefined) continue;
299329
result += `,${JSONStringify(key)}:${JSONStringify(serialized)}`;
300330
}
301331
return result;
@@ -477,20 +507,19 @@ class Logger {
477507
return;
478508
}
479509

480-
if (typeof msgOrObj === 'string') {
481-
if (fields !== undefined) {
482-
validateObject(fields, 'fields');
483-
}
484-
} else if (!isNativeError(msgOrObj)) {
510+
if (typeof msgOrObj !== 'string' && !isNativeError(msgOrObj)) {
485511
validateObject(msgOrObj, 'obj');
486512
validateString(msgOrObj.msg, 'obj.msg');
487513
}
514+
if (fields !== undefined) {
515+
validateObject(fields, 'fields');
516+
}
488517

489518
let msg;
490519
let logFields;
491520

492521
if (isNativeError(msgOrObj)) {
493-
msg = msgOrObj.message;
522+
msg = msgOrObj.message ?? '';
494523
logFields = {
495524
__proto__: null,
496525
err: this.#serializers.err(msgOrObj),

test/parallel/test-log-basic.js

Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -562,3 +562,193 @@ describe('JSONConsumer flush and end', () => {
562562
consumer.end();
563563
});
564564
});
565+
566+
describe('LogConsumer attach/detach', () => {
567+
it('should be idempotent when attach() is called twice', () => {
568+
const stream = new TestStream();
569+
const consumer = new JSONConsumer({ stream, level: 'info' });
570+
const logger = new Logger({ level: 'info' });
571+
572+
consumer.attach();
573+
consumer.attach();
574+
try {
575+
logger.info('once');
576+
consumer.flushSync();
577+
assert.strictEqual(stream.logs.length, 1);
578+
} finally {
579+
consumer.detach();
580+
}
581+
});
582+
583+
it('should fully detach after a double-attach', () => {
584+
const stream = new TestStream();
585+
const consumer = new JSONConsumer({ stream, level: 'info' });
586+
const logger = new Logger({ level: 'info' });
587+
588+
consumer.attach();
589+
consumer.attach();
590+
consumer.detach();
591+
592+
logger.info('should not be captured');
593+
consumer.flushSync();
594+
assert.strictEqual(stream.logs.length, 0);
595+
});
596+
597+
it('should no-op on detach() without prior attach()', () => {
598+
const stream = new TestStream();
599+
const consumer = new JSONConsumer({ stream, level: 'info' });
600+
consumer.detach();
601+
});
602+
});
603+
604+
describe('JSONConsumer undefined handling', () => {
605+
it('should skip undefined consumer fields to keep JSON valid', () => {
606+
const stream = new TestStream();
607+
const consumer = new JSONConsumer({
608+
stream,
609+
level: 'info',
610+
fields: { service: 'api', region: undefined },
611+
});
612+
consumer.attach();
613+
const logger = new Logger({ level: 'info' });
614+
try {
615+
logger.info('ok');
616+
consumer.flushSync();
617+
assert.strictEqual(stream.logs.length, 1);
618+
assert.strictEqual(stream.logs[0].service, 'api');
619+
assert.ok(!('region' in stream.logs[0]));
620+
} finally {
621+
consumer.detach();
622+
}
623+
});
624+
625+
it('should skip undefined bindings to keep JSON valid', () => {
626+
const stream = new TestStream();
627+
const consumer = new JSONConsumer({ stream, level: 'info' });
628+
consumer.attach();
629+
const logger = new Logger({
630+
level: 'info',
631+
bindings: { requestId: 'abc', traceId: undefined },
632+
});
633+
try {
634+
logger.info('ok');
635+
consumer.flushSync();
636+
assert.strictEqual(stream.logs.length, 1);
637+
assert.strictEqual(stream.logs[0].requestId, 'abc');
638+
assert.ok(!('traceId' in stream.logs[0]));
639+
} finally {
640+
consumer.detach();
641+
}
642+
});
643+
644+
it('should skip undefined log fields to keep JSON valid', () => {
645+
const stream = new TestStream();
646+
const consumer = new JSONConsumer({ stream, level: 'info' });
647+
consumer.attach();
648+
const logger = new Logger({ level: 'info' });
649+
try {
650+
logger.info('ok', { userId: 1, nickname: undefined });
651+
consumer.flushSync();
652+
assert.strictEqual(stream.logs.length, 1);
653+
assert.strictEqual(stream.logs[0].userId, 1);
654+
assert.ok(!('nickname' in stream.logs[0]));
655+
} finally {
656+
consumer.detach();
657+
}
658+
});
659+
});
660+
661+
describe('JSONConsumer reserved keys', () => {
662+
it('should not let consumer fields override level/time/msg', () => {
663+
const stream = new TestStream();
664+
const consumer = new JSONConsumer({
665+
stream,
666+
level: 'info',
667+
fields: { level: 'fake', time: 0, msg: 'fake', service: 'api' },
668+
});
669+
consumer.attach();
670+
const logger = new Logger({ level: 'info' });
671+
try {
672+
logger.info('real');
673+
consumer.flushSync();
674+
const log = stream.logs[0];
675+
assert.strictEqual(log.level, 'info');
676+
assert.strictEqual(log.msg, 'real');
677+
assert.strictEqual(typeof log.time, 'number');
678+
assert.notStrictEqual(log.time, 0);
679+
assert.strictEqual(log.service, 'api');
680+
} finally {
681+
consumer.detach();
682+
}
683+
});
684+
685+
it('should not let bindings override level/time/msg', () => {
686+
const stream = new TestStream();
687+
const consumer = new JSONConsumer({ stream, level: 'info' });
688+
consumer.attach();
689+
const logger = new Logger({
690+
level: 'info',
691+
bindings: { level: 'fake', time: 0, msg: 'fake', requestId: 'r1' },
692+
});
693+
try {
694+
logger.info('real');
695+
consumer.flushSync();
696+
const log = stream.logs[0];
697+
assert.strictEqual(log.level, 'info');
698+
assert.strictEqual(log.msg, 'real');
699+
assert.notStrictEqual(log.time, 0);
700+
assert.strictEqual(log.requestId, 'r1');
701+
} finally {
702+
consumer.detach();
703+
}
704+
});
705+
706+
it('should not let log fields override level/time/msg', () => {
707+
const stream = new TestStream();
708+
const consumer = new JSONConsumer({ stream, level: 'info' });
709+
consumer.attach();
710+
const logger = new Logger({ level: 'info' });
711+
try {
712+
logger.info('real', { level: 'fake', time: 0, msg: 'fake', k: 1 });
713+
consumer.flushSync();
714+
const log = stream.logs[0];
715+
assert.strictEqual(log.level, 'info');
716+
assert.strictEqual(log.msg, 'real');
717+
assert.notStrictEqual(log.time, 0);
718+
assert.strictEqual(log.k, 1);
719+
} finally {
720+
consumer.detach();
721+
}
722+
});
723+
});
724+
725+
describe('Logger Error-branch fields validation', () => {
726+
it('should throw when fields is not an object for Error input', () => {
727+
const stream = new TestStream();
728+
const consumer = new JSONConsumer({ stream, level: 'info' });
729+
consumer.attach();
730+
const logger = new Logger({ level: 'info' });
731+
try {
732+
assert.throws(() => {
733+
logger.error(new Error('boom'), 'not-an-object');
734+
}, { code: 'ERR_INVALID_ARG_TYPE' });
735+
} finally {
736+
consumer.detach();
737+
}
738+
});
739+
});
740+
741+
describe('JSONConsumer stream interface', () => {
742+
it('should throw when object stream is missing flush/flushSync/end', () => {
743+
const badStream = { write() {}, end() {} };
744+
assert.throws(() => {
745+
new JSONConsumer({ stream: badStream, level: 'info' });
746+
}, { code: 'ERR_INVALID_ARG_TYPE' });
747+
});
748+
749+
it('should accept object stream implementing full interface', () => {
750+
const stream = new TestStream();
751+
const consumer = new JSONConsumer({ stream, level: 'info' });
752+
assert.ok(consumer);
753+
});
754+
});

0 commit comments

Comments
 (0)