diff --git a/lib/internal/test_runner/runner.js b/lib/internal/test_runner/runner.js index bb3a8868309a94..f88dbbe1c6921d 100644 --- a/lib/internal/test_runner/runner.js +++ b/lib/internal/test_runner/runner.js @@ -25,6 +25,7 @@ const { SafePromiseAllSettledReturnVoid, SafeSet, String, + StringFromCharCode, StringPrototypeIndexOf, StringPrototypeSlice, StringPrototypeStartsWith, @@ -248,6 +249,7 @@ class FileTest extends Test { #rawBuffer = []; // Raw data waiting to be parsed #rawBufferSize = 0; #reportedChildren = 0; + #pendingPartialV8Header = false; failedSubtests = false; constructor(options) { @@ -339,6 +341,12 @@ class FileTest extends Test { } parseMessage(readData) { let dataLength = TypedArrayPrototypeGetLength(readData); + if (this.#pendingPartialV8Header) { + readData = Buffer.concat([TypedArrayPrototypeSubarray(v8Header, 0, 1), readData]); + dataLength = TypedArrayPrototypeGetLength(readData); + this.#pendingPartialV8Header = false; + } + if (dataLength === 0) return; const partialV8Header = readData[dataLength - 1] === v8Header[0]; @@ -349,22 +357,52 @@ class FileTest extends Test { dataLength--; } - if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) { - this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]); - } else { - ArrayPrototypePush(this.#rawBuffer, readData); + if (dataLength > 0) { + if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) { + this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]); + } else { + ArrayPrototypePush(this.#rawBuffer, readData); + } + this.#rawBufferSize += dataLength; + this.#processRawBuffer(); } - this.#rawBufferSize += dataLength; - this.#processRawBuffer(); if (partialV8Header) { - ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1)); - this.#rawBufferSize++; + this.#pendingPartialV8Header = true; } } #drainRawBuffer() { + if (this.#pendingPartialV8Header) { + ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1)); + this.#rawBufferSize++; + this.#pendingPartialV8Header = false; + } + while (this.#rawBuffer.length > 0) { + const prevBufferLength = this.#rawBuffer.length; + const prevBufferSize = this.#rawBufferSize; this.#processRawBuffer(); + + if (this.#rawBuffer.length === prevBufferLength && + this.#rawBufferSize === prevBufferSize) { + const bufferHead = this.#rawBuffer[0]; + this.addToReport({ + __proto__: null, + type: 'test:stdout', + data: { + __proto__: null, + file: this.name, + message: StringFromCharCode(bufferHead[0]), + }, + }); + + if (TypedArrayPrototypeGetLength(bufferHead) === 1) { + ArrayPrototypeShift(this.#rawBuffer); + } else { + this.#rawBuffer[0] = TypedArrayPrototypeSubarray(bufferHead, 1); + } + this.#rawBufferSize--; + } } } #processRawBuffer() { diff --git a/test/parallel/test-runner-v8-deserializer.mjs b/test/parallel/test-runner-v8-deserializer.mjs index 0f6fea1e64b58d..5e50df441da59e 100644 --- a/test/parallel/test-runner-v8-deserializer.mjs +++ b/test/parallel/test-runner-v8-deserializer.mjs @@ -14,12 +14,29 @@ async function toArray(chunks) { return arr; } -const chunks = await toArray(serializer([ - { type: 'test:diagnostic', data: { nesting: 0, details: {}, message: 'diagnostic' } }, -])); +const diagnosticEvent = { + type: 'test:diagnostic', + data: { nesting: 0, details: {}, message: 'diagnostic' }, +}; +const chunks = await toArray(serializer([diagnosticEvent])); const defaultSerializer = new DefaultSerializer(); defaultSerializer.writeHeader(); const headerLength = defaultSerializer.releaseBuffer().length; +const headerOnly = Buffer.from([0xff, 0x0f]); +const oversizedLengthHeader = Buffer.from([0xff, 0x0f, 0x7f, 0xff, 0xff, 0xff]); +const truncatedLengthHeader = Buffer.from([0xff, 0x0f, 0x00, 0x01, 0x00, 0x00]); +// Expected stdout for oversizedLengthHeader: first byte is emitted via +// String.fromCharCode (byte-by-byte fallback in #drainRawBuffer), remaining +// bytes go through the nonSerialized UTF-8 decode path in #processRawBuffer. +const oversizedLengthStdout = String.fromCharCode(oversizedLengthHeader[0]) + + Buffer.from(oversizedLengthHeader.subarray(1)).toString('utf-8'); + +function collectStdout(reported) { + return reported + .filter((event) => event.type === 'test:stdout') + .map((event) => event.data.message) + .join(''); +} describe('v8 deserializer', common.mustCall(() => { let fileTest; @@ -56,27 +73,78 @@ describe('v8 deserializer', common.mustCall(() => { it('should deserialize a serialized chunk', async () => { const reported = await collectReported(chunks); - assert.deepStrictEqual(reported, [ - { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, - ]); + assert.deepStrictEqual(reported, [diagnosticEvent]); }); it('should deserialize a serialized chunk after non-serialized chunk', async () => { const reported = await collectReported([Buffer.concat([Buffer.from('unknown'), ...chunks])]); assert.deepStrictEqual(reported, [ { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, - { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + diagnosticEvent, ]); }); it('should deserialize a serialized chunk before non-serialized output', async () => { const reported = await collectReported([Buffer.concat([ ...chunks, Buffer.from('unknown')])]); assert.deepStrictEqual(reported, [ - { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + diagnosticEvent, { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, ]); }); + it('should not hang when buffer starts with v8Header followed by oversized length', async () => { + // Regression test for https://github.com/nodejs/node/issues/62693 + // FF 0F is the v8 serializer header; the next 4 bytes are read as a + // big-endian message size. 0x7FFFFFFF far exceeds any actual buffer + // size, causing #processRawBuffer to make no progress and + // #drainRawBuffer to loop forever without the no-progress guard. + const reported = await collectReported([oversizedLengthHeader]); + assert.partialDeepStrictEqual( + reported, + Array.from({ length: reported.length }, () => ({ type: 'test:stdout' })), + ); + assert.strictEqual(collectStdout(reported), oversizedLengthStdout); + }); + + it('should flush incomplete v8 frame as stdout and keep prior valid data', async () => { + // A valid non-serialized message followed by bytes that look like + // a v8 header with a truncated/oversized length. + const reported = await collectReported([ + Buffer.from('hello'), + truncatedLengthHeader, + ]); + assert.strictEqual(collectStdout(reported), `hello${truncatedLengthHeader.toString('latin1')}`); + }); + + it('should flush v8Header-only bytes as stdout when stream ends', async () => { + // Just the two-byte v8 header with no size field at all. + const reported = await collectReported([headerOnly]); + assert(reported.every((event) => event.type === 'test:stdout')); + assert.strictEqual(collectStdout(reported), headerOnly.toString('latin1')); + }); + + it('should resync and parse valid messages after false v8 header', async () => { + // A false v8 header (FF 0F + oversized length) followed by a + // legitimate serialized message. The parser must skip the corrupt + // bytes and still deserialize the real message. + const reported = await collectReported([ + oversizedLengthHeader, + ...chunks, + ]); + assert.deepStrictEqual(reported.at(-1), diagnosticEvent); + assert.strictEqual(reported.filter((event) => event.type === 'test:diagnostic').length, 1); + assert.strictEqual(collectStdout(reported), oversizedLengthStdout); + }); + + it('should preserve a false v8 header split across chunks', async () => { + const reported = await collectReported([ + oversizedLengthHeader.subarray(0, 1), + oversizedLengthHeader.subarray(1), + ]); + assert(reported.every((event) => event.type === 'test:stdout')); + assert.strictEqual(collectStdout(reported), oversizedLengthStdout); + }); + const headerPosition = headerLength * 2 + 4; for (let i = 0; i < headerPosition + 5; i++) { const message = `should deserialize a serialized message split into two chunks {...${i},${i + 1}...}`; @@ -84,7 +152,7 @@ describe('v8 deserializer', common.mustCall(() => { const data = chunks[0]; const reported = await collectReported([data.subarray(0, i), data.subarray(i)]); assert.deepStrictEqual(reported, [ - { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + diagnosticEvent, ]); }); @@ -96,7 +164,7 @@ describe('v8 deserializer', common.mustCall(() => { ]); assert.deepStrictEqual(reported, [ { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, - { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + diagnosticEvent, { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, ]); }