From 84b2e49f05d6d07397b870fee85896dd463b75a5 Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Wed, 24 May 2023 00:03:19 +0300 Subject: [PATCH] test_runner: fix test deserialize edge cases PR-URL: https://github.com/nodejs/node/pull/48106 Fixes: https://github.com/nodejs/node/issues/48103 Reviewed-By: Colin Ihrig Reviewed-By: Benjamin Gruenbaum --- .../test_runner/reporter/v8-serializer.js | 10 +- lib/internal/test_runner/runner.js | 48 ++++++-- test/parallel/test-runner-v8-deserializer.mjs | 103 ++++++++++++++++++ 3 files changed, 147 insertions(+), 14 deletions(-) create mode 100644 test/parallel/test-runner-v8-deserializer.mjs diff --git a/lib/internal/test_runner/reporter/v8-serializer.js b/lib/internal/test_runner/reporter/v8-serializer.js index 3044b48d26fe7e..cfeedfdff378c2 100644 --- a/lib/internal/test_runner/reporter/v8-serializer.js +++ b/lib/internal/test_runner/reporter/v8-serializer.js @@ -1,5 +1,8 @@ 'use strict'; +const { + TypedArrayPrototypeGetLength, +} = primordials; const { DefaultSerializer } = require('v8'); const { Buffer } = require('buffer'); const { serializeError } = require('internal/error_serdes'); @@ -7,6 +10,8 @@ const { serializeError } = require('internal/error_serdes'); module.exports = async function* v8Reporter(source) { const serializer = new DefaultSerializer(); + serializer.writeHeader(); + const headerLength = TypedArrayPrototypeGetLength(serializer.releaseBuffer()); for await (const item of source) { const originalError = item.data.details?.error; @@ -16,6 +21,7 @@ module.exports = async function* v8Reporter(source) { // Error is restored after serialization. item.data.details.error = serializeError(originalError); } + serializer.writeHeader(); // Add 4 bytes, to later populate with message length serializer.writeRawBytes(Buffer.allocUnsafe(4)); serializer.writeHeader(); @@ -26,14 +32,14 @@ module.exports = async function* v8Reporter(source) { } const serializedMessage = serializer.releaseBuffer(); - const serializedMessageLength = serializedMessage.length - 4; + const serializedMessageLength = serializedMessage.length - (4 + headerLength); serializedMessage.set([ serializedMessageLength >> 24 & 0xFF, serializedMessageLength >> 16 & 0xFF, serializedMessageLength >> 8 & 0xFF, serializedMessageLength & 0xFF, - ], 0); + ], headerLength); yield serializedMessage; } }; diff --git a/lib/internal/test_runner/runner.js b/lib/internal/test_runner/runner.js index f0b6d315b7c402..fcaf4ded1e42a8 100644 --- a/lib/internal/test_runner/runner.js +++ b/lib/internal/test_runner/runner.js @@ -163,7 +163,8 @@ function getRunArgs({ path, inspectPort, testNamePatterns }) { const serializer = new DefaultSerializer(); serializer.writeHeader(); const v8Header = serializer.releaseBuffer(); -const kSerializedSizeHeader = 4; +const kV8HeaderLength = TypedArrayPrototypeGetLength(v8Header); +const kSerializedSizeHeader = 4 + kV8HeaderLength; class FileTest extends Test { // This class maintains two buffers: @@ -236,9 +237,12 @@ class FileTest extends Test { this.#handleReportItem(item); } reportStarted() {} - report() { + drain() { this.#drainRawBuffer(); this.#drainReportBuffer(); + } + report() { + this.drain(); const skipReporting = this.#skipReporting(); if (!skipReporting) { super.reportStarted(); @@ -246,12 +250,29 @@ class FileTest extends Test { } } parseMessage(readData) { - const dataLength = TypedArrayPrototypeGetLength(readData); + let dataLength = TypedArrayPrototypeGetLength(readData); if (dataLength === 0) return; + const partialV8Header = readData[dataLength - 1] === v8Header[0]; + + if (partialV8Header) { + // This will break if v8Header length (2 bytes) is changed. + // However it is covered by tests. + readData = TypedArrayPrototypeSubarray(readData, 0, dataLength - 1); + dataLength--; + } - ArrayPrototypePush(this.#rawBuffer, readData); + if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) { + this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]); + } else { + ArrayPrototypePush(this.#rawBuffer, readData); + } this.#rawBufferSize += dataLength; this.#proccessRawBuffer(); + + if (partialV8Header) { + ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1)); + this.#rawBufferSize++; + } } #drainRawBuffer() { while (this.#rawBuffer.length > 0) { @@ -264,16 +285,16 @@ class FileTest extends Test { let headerIndex = bufferHead.indexOf(v8Header); let nonSerialized = Buffer.alloc(0); - while (bufferHead && headerIndex !== kSerializedSizeHeader) { + while (bufferHead && headerIndex !== 0) { const nonSerializedData = headerIndex === -1 ? bufferHead : - bufferHead.slice(0, headerIndex - kSerializedSizeHeader); + bufferHead.slice(0, headerIndex); nonSerialized = Buffer.concat([nonSerialized, nonSerializedData]); this.#rawBufferSize -= TypedArrayPrototypeGetLength(nonSerializedData); if (headerIndex === -1) { ArrayPrototypeShift(this.#rawBuffer); } else { - this.#rawBuffer[0] = bufferHead.subarray(headerIndex - kSerializedSizeHeader); + this.#rawBuffer[0] = TypedArrayPrototypeSubarray(bufferHead, headerIndex); } bufferHead = this.#rawBuffer[0]; headerIndex = bufferHead?.indexOf(v8Header); @@ -295,10 +316,10 @@ class FileTest extends Test { // We call `readUInt32BE` manually here, because this is faster than first converting // it to a buffer and using `readUInt32BE` on that. const fullMessageSize = ( - bufferHead[0] << 24 | - bufferHead[1] << 16 | - bufferHead[2] << 8 | - bufferHead[3] + bufferHead[kV8HeaderLength] << 24 | + bufferHead[kV8HeaderLength + 1] << 16 | + bufferHead[kV8HeaderLength + 2] << 8 | + bufferHead[kV8HeaderLength + 3] ) + kSerializedSizeHeader; if (this.#rawBufferSize < fullMessageSize) break; @@ -474,4 +495,7 @@ function run(options) { return root.reporter; } -module.exports = { run }; +module.exports = { + FileTest, // Exported for tests only + run, +}; diff --git a/test/parallel/test-runner-v8-deserializer.mjs b/test/parallel/test-runner-v8-deserializer.mjs new file mode 100644 index 00000000000000..9b4447d5a24291 --- /dev/null +++ b/test/parallel/test-runner-v8-deserializer.mjs @@ -0,0 +1,103 @@ +// Flags: --expose-internals --no-warnings + +import '../common/index.mjs'; +import { describe, it, beforeEach } from 'node:test'; +import assert from 'node:assert'; +import { finished } from 'node:stream/promises'; +import { DefaultSerializer } from 'node:v8'; +import serializer from 'internal/test_runner/reporter/v8-serializer'; +import runner from 'internal/test_runner/runner'; + +async function toArray(chunks) { + const arr = []; + for await (const i of chunks) arr.push(i); + return arr; +} + +const chunks = await toArray(serializer([ + { type: 'test:diagnostic', data: { nesting: 0, details: {}, message: 'diagnostic' } }, +])); +const defaultSerializer = new DefaultSerializer(); +defaultSerializer.writeHeader(); +const headerLength = defaultSerializer.releaseBuffer().length; + +describe('v8 deserializer', () => { + let fileTest; + let reported; + beforeEach(() => { + reported = []; + fileTest = new runner.FileTest({ name: 'filetest' }); + fileTest.reporter.on('data', (data) => reported.push(data)); + assert(fileTest.isClearToSend()); + }); + + async function collectReported(chunks) { + chunks.forEach((chunk) => fileTest.parseMessage(chunk)); + fileTest.drain(); + fileTest.reporter.end(); + await finished(fileTest.reporter); + return reported; + } + + it('should do nothing when no chunks', async () => { + const reported = await collectReported([]); + assert.deepStrictEqual(reported, []); + }); + + it('should deserialize a chunk with no serialization', async () => { + const reported = await collectReported([Buffer.from('unknown')]); + assert.deepStrictEqual(reported, [ + { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, + ]); + }); + + it('should deserialize a serialized chunk', async () => { + const reported = await collectReported(chunks); + assert.deepStrictEqual(reported, [ + { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + ]); + }); + + it('should deserialize a serialized chunk after non-serialized chunk', async () => { + const reported = await collectReported([Buffer.concat([Buffer.from('unknown'), ...chunks])]); + assert.deepStrictEqual(reported, [ + { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, + { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + ]); + }); + + it('should deserialize a serialized chunk before non-serialized output', async () => { + const reported = await collectReported([Buffer.concat([ ...chunks, Buffer.from('unknown')])]); + assert.deepStrictEqual(reported, [ + { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, + ]); + }); + + const headerPosition = headerLength * 2 + 4; + for (let i = 0; i < headerPosition + 5; i++) { + const message = `should deserialize a serialized message split into two chunks {...${i},${i + 1}...}`; + it(message, async () => { + const data = chunks[0]; + const reported = await collectReported([data.subarray(0, i), data.subarray(i)]); + assert.deepStrictEqual(reported, [ + { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + ]); + }); + + it(`${message} wrapped by non-serialized data`, async () => { + const data = chunks[0]; + const reported = await collectReported([ + Buffer.concat([Buffer.from('unknown'), data.subarray(0, i)]), + Buffer.concat([data.subarray(i), Buffer.from('unknown')]), + ]); + assert.deepStrictEqual(reported, [ + { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, + { data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' }, + { data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' }, + ]); + } + ); + } + +});