Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 45 additions & 8 deletions lib/internal/test_runner/runner.js
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ class FileTest extends Test {
#rawBuffer = []; // Raw data waiting to be parsed
#rawBufferSize = 0;
#reportedChildren = 0;
#pendingPartialV8Header = false;
failedSubtests = false;

constructor(options) {
Expand Down Expand Up @@ -339,6 +340,12 @@ class FileTest extends Test {
}
parseMessage(readData) {
let dataLength = TypedArrayPrototypeGetLength(readData);
if (this.#pendingPartialV8Header) {
readData = Buffer.concat([TypedArrayPrototypeSubarray(v8Header, 0, 1), readData]);
dataLength = TypedArrayPrototypeGetLength(readData);
this.#pendingPartialV8Header = false;
}

if (dataLength === 0) return;
const partialV8Header = readData[dataLength - 1] === v8Header[0];

Expand All @@ -349,22 +356,52 @@ class FileTest extends Test {
dataLength--;
}

if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) {
this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]);
} else {
ArrayPrototypePush(this.#rawBuffer, readData);
if (dataLength > 0) {
if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) {
this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]);
} else {
ArrayPrototypePush(this.#rawBuffer, readData);
}
this.#rawBufferSize += dataLength;
this.#processRawBuffer();
}
this.#rawBufferSize += dataLength;
this.#processRawBuffer();

if (partialV8Header) {
ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1));
this.#rawBufferSize++;
this.#pendingPartialV8Header = true;
}
}
#drainRawBuffer() {
if (this.#pendingPartialV8Header) {
ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1));
this.#rawBufferSize++;
this.#pendingPartialV8Header = false;
}

while (this.#rawBuffer.length > 0) {
const prevBufferLength = this.#rawBuffer.length;
const prevBufferSize = this.#rawBufferSize;
this.#processRawBuffer();

if (this.#rawBuffer.length === prevBufferLength &&
this.#rawBufferSize === prevBufferSize) {
const bufferHead = this.#rawBuffer[0];
this.addToReport({
__proto__: null,
type: 'test:stdout',
data: {
__proto__: null,
file: this.name,
message: TypedArrayPrototypeSubarray(bufferHead, 0, 1).toString('utf-8'),
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we're dealing with a single-byte UTF-8 char, we don't need the intermediate view I think

Suggested change
message: TypedArrayPrototypeSubarray(bufferHead, 0, 1).toString('utf-8'),
message: StringFromCharCode(bufferHead[0]),

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it changes the output semantics here String.fromCharCode(0xff) gives "ÿ" but Buffer.from([0xff]).toString('utf8') gives "�" which matches the current stdout path so I’m keeping the UTF-8 decode

Copy link
Copy Markdown
Contributor

@aduh95 aduh95 Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

String.fromCodePoint then – but I'm surprised we would get non-ASCII chars here

},
});

if (TypedArrayPrototypeGetLength(bufferHead) === 1) {
ArrayPrototypeShift(this.#rawBuffer);
} else {
this.#rawBuffer[0] = TypedArrayPrototypeSubarray(bufferHead, 1);
}
this.#rawBufferSize--;
}
}
}
#processRawBuffer() {
Expand Down
83 changes: 73 additions & 10 deletions test/parallel/test-runner-v8-deserializer.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,24 @@ async function toArray(chunks) {
return arr;
}

const chunks = await toArray(serializer([
{ type: 'test:diagnostic', data: { nesting: 0, details: {}, message: 'diagnostic' } },
]));
const diagnosticEvent = {
type: 'test:diagnostic',
data: { nesting: 0, details: {}, message: 'diagnostic' },
};
const chunks = await toArray(serializer([diagnosticEvent]));
const defaultSerializer = new DefaultSerializer();
defaultSerializer.writeHeader();
const headerLength = defaultSerializer.releaseBuffer().length;
const headerOnly = Buffer.from([0xff, 0x0f]);
const oversizedLengthHeader = Buffer.from([0xff, 0x0f, 0x7f, 0xff, 0xff, 0xff]);
const truncatedLengthHeader = Buffer.from([0xff, 0x0f, 0x00, 0x01, 0x00, 0x00]);

function collectStdout(reported) {
return reported
.filter((event) => event.type === 'test:stdout')
.map((event) => event.data.message)
.join('');
}

describe('v8 deserializer', common.mustCall(() => {
let fileTest;
Expand Down Expand Up @@ -56,35 +68,86 @@ describe('v8 deserializer', common.mustCall(() => {

it('should deserialize a serialized chunk', async () => {
const reported = await collectReported(chunks);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
]);
assert.deepStrictEqual(reported, [diagnosticEvent]);
});

it('should deserialize a serialized chunk after non-serialized chunk', async () => {
const reported = await collectReported([Buffer.concat([Buffer.from('unknown'), ...chunks])]);
assert.deepStrictEqual(reported, [
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
diagnosticEvent,
]);
});

it('should deserialize a serialized chunk before non-serialized output', async () => {
const reported = await collectReported([Buffer.concat([ ...chunks, Buffer.from('unknown')])]);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
diagnosticEvent,
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
]);
});

it('should not hang when buffer starts with v8Header followed by oversized length', async () => {
// Regression test for https://github.com/nodejs/node/issues/62693
// FF 0F is the v8 serializer header; the next 4 bytes are read as a
// big-endian message size. 0x7FFFFFFF far exceeds any actual buffer
// size, causing #processRawBuffer to make no progress and
// #drainRawBuffer to loop forever without the no-progress guard.
const reported = await collectReported([oversizedLengthHeader]);
assert.partialDeepStrictEqual(
reported,
Array.from({ length: reported.length }, () => ({ type: 'test:stdout' })),
);
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
});

it('should flush incomplete v8 frame as stdout and keep prior valid data', async () => {
// A valid non-serialized message followed by bytes that look like
// a v8 header with a truncated/oversized length.
const reported = await collectReported([
Buffer.from('hello'),
truncatedLengthHeader,
]);
assert.strictEqual(collectStdout(reported), `hello${truncatedLengthHeader.toString('utf8')}`);
});

it('should flush v8Header-only bytes as stdout when stream ends', async () => {
// Just the two-byte v8 header with no size field at all.
const reported = await collectReported([headerOnly]);
assert(reported.every((event) => event.type === 'test:stdout'));
assert.strictEqual(collectStdout(reported), headerOnly.toString('utf8'));
});

it('should resync and parse valid messages after false v8 header', async () => {
// A false v8 header (FF 0F + oversized length) followed by a
// legitimate serialized message. The parser must skip the corrupt
// bytes and still deserialize the real message.
const reported = await collectReported([
oversizedLengthHeader,
...chunks,
]);
assert.deepStrictEqual(reported.at(-1), diagnosticEvent);
assert.strictEqual(reported.filter((event) => event.type === 'test:diagnostic').length, 1);
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
});

it('should preserve a false v8 header split across chunks', async () => {
const reported = await collectReported([
oversizedLengthHeader.subarray(0, 1),
oversizedLengthHeader.subarray(1),
]);
assert(reported.every((event) => event.type === 'test:stdout'));
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
});

const headerPosition = headerLength * 2 + 4;
for (let i = 0; i < headerPosition + 5; i++) {
const message = `should deserialize a serialized message split into two chunks {...${i},${i + 1}...}`;
it(message, async () => {
const data = chunks[0];
const reported = await collectReported([data.subarray(0, i), data.subarray(i)]);
assert.deepStrictEqual(reported, [
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
diagnosticEvent,
]);
});

Expand All @@ -96,7 +159,7 @@ describe('v8 deserializer', common.mustCall(() => {
]);
assert.deepStrictEqual(reported, [
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
diagnosticEvent,
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
]);
}
Expand Down
Loading