From 78842cf213aea95a64119b18b036d0255507438c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Mon, 14 Aug 2023 15:23:37 +0000 Subject: [PATCH 01/15] crypto: remove getDefaultEncoding() Refs: https://github.com/nodejs/node/pull/47182 Refs: https://github.com/nodejs/node/pull/47869 Refs: https://github.com/nodejs/node/pull/47943 Refs: https://github.com/nodejs/node/pull/47998 Refs: https://github.com/nodejs/node/pull/49140 Refs: https://github.com/nodejs/node/pull/49145 Refs: https://github.com/nodejs/node/pull/49167 Refs: https://github.com/nodejs/node/pull/49169 PR-URL: https://github.com/nodejs/node/pull/49170 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Filip Skokan Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca --- lib/internal/crypto/util.js | 6 ------ lib/internal/streams/lazy_transform.js | 6 +----- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index cf044e804ad05a..51ca3f4c056fb9 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -75,11 +75,6 @@ const { const kHandle = Symbol('kHandle'); const kKeyObject = Symbol('kKeyObject'); -// TODO(tniessen): remove all call sites and this function -function getDefaultEncoding() { - return 'buffer'; -} - // This is here because many functions accepted binary strings without // any explicit encoding in older versions of node, and we don't want // to break them unnecessarily. @@ -555,7 +550,6 @@ module.exports = { getCiphers, getCurves, getDataViewOrTypedArrayBuffer, - getDefaultEncoding, getHashes, kHandle, kKeyObject, diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js index d9d1407a819594..204ad456cd64b3 100644 --- a/lib/internal/streams/lazy_transform.js +++ b/lib/internal/streams/lazy_transform.js @@ -11,10 +11,6 @@ const { const stream = require('stream'); -const { - getDefaultEncoding, -} = require('internal/crypto/util'); - module.exports = LazyTransform; function LazyTransform(options) { @@ -29,7 +25,7 @@ function makeGetter(name) { this._writableState.decodeStrings = false; if (!this._options || !this._options.defaultEncoding) { - this._writableState.defaultEncoding = getDefaultEncoding(); + this._writableState.defaultEncoding = 'buffer'; // TODO(tniessen): remove } return this[name]; From cb4a6fafe656bd497f51d698ec8870fcb8eb90fb Mon Sep 17 00:00:00 2001 From: Hyunjin Kim Date: Sun, 20 Aug 2023 01:15:22 +0900 Subject: [PATCH 02/15] doc: use same name in the doc as in the code Refs: https://streams.spec.whatwg.org/#bytelengthqueuingstrategy PR-URL: https://github.com/nodejs/node/pull/49216 Reviewed-By: Antoine du Hamel Reviewed-By: Deokjin Kim Reviewed-By: Luigi Pinca --- doc/api/webstreams.md | 8 ++++---- lib/internal/webstreams/queuingstrategies.js | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md index 0c7ac530d21efa..9aadd9ebedb31d 100644 --- a/doc/api/webstreams.md +++ b/doc/api/webstreams.md @@ -1219,13 +1219,13 @@ changes: description: This class is now exposed on the global object. --> -#### `new ByteLengthQueuingStrategy(options)` +#### `new ByteLengthQueuingStrategy(init)` -* `options` {Object} +* `init` {Object} * `highWaterMark` {number} #### `byteLengthQueuingStrategy.highWaterMark` @@ -1256,13 +1256,13 @@ changes: description: This class is now exposed on the global object. --> -#### `new CountQueuingStrategy(options)` +#### `new CountQueuingStrategy(init)` -* `options` {Object} +* `init` {Object} * `highWaterMark` {number} #### `countQueuingStrategy.highWaterMark` diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js index 8fbc87642ebc0c..cb2adefdacaee9 100644 --- a/lib/internal/webstreams/queuingstrategies.js +++ b/lib/internal/webstreams/queuingstrategies.js @@ -69,7 +69,7 @@ class ByteLengthQueuingStrategy { constructor(init) { validateObject(init, 'init'); if (init.highWaterMark === undefined) - throw new ERR_MISSING_OPTION('options.highWaterMark'); + throw new ERR_MISSING_OPTION('init.highWaterMark'); // The highWaterMark value is not checked until the strategy // is actually used, per the spec. @@ -121,7 +121,7 @@ class CountQueuingStrategy { constructor(init) { validateObject(init, 'init'); if (init.highWaterMark === undefined) - throw new ERR_MISSING_OPTION('options.highWaterMark'); + throw new ERR_MISSING_OPTION('init.highWaterMark'); // The highWaterMark value is not checked until the strategy // is actually used, per the spec. From 41a3a1daa28ba1431fe3e7d2c0d15f6e9a816b95 Mon Sep 17 00:00:00 2001 From: Jacob Smith <3012099+JakobJingleheimer@users.noreply.github.com> Date: Sat, 19 Aug 2023 18:45:02 +0200 Subject: [PATCH 03/15] doc: caveat unavailability of `import.meta.resolve` in custom loaders PR-URL: https://github.com/nodejs/node/pull/49242 Reviewed-By: Antoine du Hamel Reviewed-By: Geoffrey Booth --- doc/api/esm.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/api/esm.md b/doc/api/esm.md index 34791f89e0c845..476d076deb3239 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -376,6 +376,9 @@ behind the `--experimental-import-meta-resolve` flag: * `parent` {string|URL} An optional absolute parent module URL to resolve from. +> **Caveat** This feature is not available within custom loaders (it would +> create a deadlock). + ## Interoperability with CommonJS ### `import` statements From 502629376cbc8b8203eba3f62b24416603118de8 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Sat, 19 Aug 2023 17:44:21 -0400 Subject: [PATCH 04/15] build: expand when we run internet tests Refs: https://github.com/nodejs/node/issues/49203 Changes slipped into v18.x regressed test/internet/test-dns-ipv6 as I assume the action did not run because no test under test/internet was changed. Add some of the common paths that include code that might introduce failures in the internet tests. Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/49218 Reviewed-By: Ruy Adorno Reviewed-By: Yagiz Nizipli Reviewed-By: Antoine du Hamel Reviewed-By: Moshe Atlow Reviewed-By: Luigi Pinca --- .github/workflows/test-internet.yml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 1c3113ab6acdd0..dbed086da7056c 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -7,14 +7,22 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js push: branches: - main - canary - v[0-9]+.x-staging - v[0-9]+.x - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} From 0daea967ea517f6674e49e5745433f5acd72489f Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 20 Aug 2023 06:50:06 +0200 Subject: [PATCH 05/15] test: reduce flakiness of `test-esm-loader-hooks` PR-URL: https://github.com/nodejs/node/pull/49248 Reviewed-By: Moshe Atlow Reviewed-By: Jacob Smith Reviewed-By: Geoffrey Booth Reviewed-By: Yagiz Nizipli --- test/es-module/test-esm-loader-hooks.mjs | 16 ++++++++++------ .../es-module-loaders/hooks-initialize.mjs | 5 +++-- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/test/es-module/test-esm-loader-hooks.mjs b/test/es-module/test-esm-loader-hooks.mjs index 445ceedd968aa2..2ea0128596e25b 100644 --- a/test/es-module/test-esm-loader-hooks.mjs +++ b/test/es-module/test-esm-loader-hooks.mjs @@ -599,7 +599,7 @@ describe('Loader hooks', { concurrency: true }, () => { ` import {MessageChannel} from 'node:worker_threads'; import {register} from 'node:module'; - import {setTimeout} from 'node:timers/promises'; + import {once} from 'node:events'; const {port1, port2} = new MessageChannel(); port1.on('message', (msg) => { console.log('message', msg); @@ -610,8 +610,12 @@ describe('Loader hooks', { concurrency: true }, () => { ); console.log('register', result); - await import('node:os'); - await setTimeout(99); // delay to limit flakiness + const timeout = setTimeout(() => {}, 2**31 - 1); // to keep the process alive. + await Promise.all([ + once(port1, 'message').then(() => once(port1, 'message')), + import('node:os'), + ]); + clearTimeout(timeout); port1.close(); `, ]); @@ -707,10 +711,10 @@ describe('Loader hooks', { concurrency: true }, () => { ]); assert.strictEqual(stderr, ''); - assert.deepStrictEqual(stdout.split('\n'), [ 'result 1', - 'result 2', - 'hooks initialize 1', + assert.deepStrictEqual(stdout.split('\n'), [ 'hooks initialize 1', + 'result 1', 'hooks initialize 2', + 'result 2', '' ]); assert.strictEqual(code, 0); assert.strictEqual(signal, null); diff --git a/test/fixtures/es-module-loaders/hooks-initialize.mjs b/test/fixtures/es-module-loaders/hooks-initialize.mjs index 646be145503134..ab6f2c50d146e3 100644 --- a/test/fixtures/es-module-loaders/hooks-initialize.mjs +++ b/test/fixtures/es-module-loaders/hooks-initialize.mjs @@ -1,7 +1,8 @@ +import { writeFileSync } from 'node:fs'; + let counter = 0; export async function initialize() { - counter += 1; - console.log('hooks initialize', counter); + writeFileSync(1, `hooks initialize ${++counter}\n`); return counter; } From 2557932db2392e05d684905155123fac224c9cc9 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 20 Aug 2023 07:08:22 +0200 Subject: [PATCH 06/15] esm: align sync and async load implementations Refs: https://github.com/nodejs/node/pull/48272 PR-URL: https://github.com/nodejs/node/pull/49152 Refs: https://github.com/nodejs/node/pull/47999 Reviewed-By: Geoffrey Booth Reviewed-By: Jacob Smith --- lib/internal/modules/esm/load.js | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/lib/internal/modules/esm/load.js b/lib/internal/modules/esm/load.js index 1998ed1dab67fb..d064296d11c463 100644 --- a/lib/internal/modules/esm/load.js +++ b/lib/internal/modules/esm/load.js @@ -70,25 +70,30 @@ async function getSource(url, context) { return { __proto__: null, responseURL, source }; } +/** + * @param {URL} url URL to the module + * @param {ESModuleContext} context used to decorate error messages + * @returns {{ responseURL: string, source: string | BufferView }} + */ function getSourceSync(url, context) { - const parsed = new URL(url); - const responseURL = url; + const { protocol, href } = url; + const responseURL = href; let source; - if (parsed.protocol === 'file:') { - source = readFileSync(parsed); - } else if (parsed.protocol === 'data:') { - const match = RegExpPrototypeExec(DATA_URL_PATTERN, parsed.pathname); + if (protocol === 'file:') { + source = readFileSync(url); + } else if (protocol === 'data:') { + const match = RegExpPrototypeExec(DATA_URL_PATTERN, url.pathname); if (!match) { - throw new ERR_INVALID_URL(url); + throw new ERR_INVALID_URL(responseURL); } const { 1: base64, 2: body } = match; source = BufferFrom(decodeURIComponent(body), base64 ? 'base64' : 'utf8'); } else { const supportedSchemes = ['file', 'data']; - throw new ERR_UNSUPPORTED_ESM_URL_SCHEME(parsed, supportedSchemes); + throw new ERR_UNSUPPORTED_ESM_URL_SCHEME(url, supportedSchemes); } if (policy?.manifest) { - policy.manifest.assertIntegrity(parsed, source); + policy.manifest.assertIntegrity(url, source); } return { __proto__: null, responseURL, source }; } @@ -159,14 +164,18 @@ function defaultLoadSync(url, context = kEmptyObject) { source, } = context; - format ??= defaultGetFormat(new URL(url), context); + const urlInstance = new URL(url); + + throwIfUnsupportedURLScheme(urlInstance, false); + + format ??= defaultGetFormat(urlInstance, context); validateAssertions(url, format, importAssertions); if (format === 'builtin') { source = null; } else if (source == null) { - ({ responseURL, source } = getSourceSync(url, context)); + ({ responseURL, source } = getSourceSync(urlInstance, context)); } return { From 45e5ec89a66eded97401b45343fe385208678ec6 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Sun, 20 Aug 2023 18:55:44 +0900 Subject: [PATCH 07/15] src: add a condition if the argument of `DomainToUnicode` is empty PR-URL: https://github.com/nodejs/node/pull/49097 Refs: https://github.com/nodejs/node/pull/46410 Reviewed-By: Yagiz Nizipli Reviewed-By: Deokjin Kim --- src/node_url.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/node_url.cc b/src/node_url.cc index 85147ccd1c0d59..60300d08730128 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -100,6 +100,11 @@ void BindingData::DomainToUnicode(const FunctionCallbackInfo& args) { CHECK(args[0]->IsString()); std::string input = Utf8Value(env->isolate(), args[0]).ToString(); + if (input.empty()) { + return args.GetReturnValue().Set( + String::NewFromUtf8(env->isolate(), "").ToLocalChecked()); + } + // It is important to have an initial value that contains a special scheme. // Since it will change the implementation of `set_hostname` according to URL // spec. From 982e7a65cc68b45a0cfe45e2d7c895276501eba1 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Sun, 20 Aug 2023 19:04:34 +0900 Subject: [PATCH 08/15] src: use ARES_SUCCESS instead of 0 PR-URL: https://github.com/nodejs/node/pull/49048 Refs: https://github.com/nodejs/node/pull/48834 Reviewed-By: Paolo Insogna Reviewed-By: Luigi Pinca Reviewed-By: Matteo Collina Reviewed-By: Daeyeon Jeong Reviewed-By: Deokjin Kim --- src/cares_wrap.cc | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index 433c5822953071..8b037356360729 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -830,62 +830,62 @@ void ChannelWrap::EnsureServers() { int AnyTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_any); - return 0; + return ARES_SUCCESS; } int ATraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_a); - return 0; + return ARES_SUCCESS; } int AaaaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_aaaa); - return 0; + return ARES_SUCCESS; } int CaaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, T_CAA); - return 0; + return ARES_SUCCESS; } int CnameTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_cname); - return 0; + return ARES_SUCCESS; } int MxTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_mx); - return 0; + return ARES_SUCCESS; } int NsTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_ns); - return 0; + return ARES_SUCCESS; } int TxtTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_txt); - return 0; + return ARES_SUCCESS; } int SrvTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_srv); - return 0; + return ARES_SUCCESS; } int PtrTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_ptr); - return 0; + return ARES_SUCCESS; } int NaptrTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_naptr); - return 0; + return ARES_SUCCESS; } int SoaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_soa); - return 0; + return ARES_SUCCESS; } int AnyTraits::Parse( @@ -1381,7 +1381,7 @@ int ReverseTraits::Send(GetHostByAddrWrap* wrap, const char* name) { family, GetHostByAddrWrap::Callback, wrap->MakeCallbackPointer()); - return 0; + return ARES_SUCCESS; } int ReverseTraits::Parse( From 484ad833580c978d2530d2257af95a34970db454 Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Sun, 20 Aug 2023 13:52:41 -0700 Subject: [PATCH 09/15] doc: add signature for `module.register` PR-URL: https://github.com/nodejs/node/pull/49251 Reviewed-By: Yagiz Nizipli Reviewed-By: Jacob Smith Reviewed-By: Antoine du Hamel --- doc/api/esm.md | 4 ++-- doc/api/module.md | 23 +++++++++++++++++++---- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/doc/api/esm.md b/doc/api/esm.md index 476d076deb3239..55c5fe9be436b0 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1715,14 +1715,14 @@ for ESM specifiers is [commonjs-extension-resolution-loader][]. [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`initialize`]: #initialize [`module.createRequire()`]: module.md#modulecreaterequirefilename -[`module.register()`]: module.md#moduleregister +[`module.register()`]: module.md#moduleregisterspecifier-parenturl-options [`module.syncBuiltinESMExports()`]: module.md#modulesyncbuiltinesmexports [`package.json`]: packages.md#nodejs-packagejson-field-definitions [`port.postMessage`]: worker_threads.md#portpostmessagevalue-transferlist [`port.ref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portref [`port.unref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portunref [`process.dlopen`]: process.md#processdlopenmodule-filename-flags -[`register`]: module.md#moduleregister +[`register`]: module.md#moduleregisterspecifier-parenturl-options [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String [`util.TextDecoder`]: util.md#class-utiltextdecoder [cjs-module-lexer]: https://github.com/nodejs/cjs-module-lexer/tree/1.2.2 diff --git a/doc/api/module.md b/doc/api/module.md index 04b4d00c04b372..3b9278e5cd96ab 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -80,15 +80,29 @@ isBuiltin('fs'); // true isBuiltin('wss'); // false ``` -### `module.register()` +### `module.register(specifier[, parentURL][, options])` -In addition to using the `--experimental-loader` option in the CLI, -loaders can be registered programmatically using the -`module.register()` method. +> Stability: 1.1 - Active development + +* `specifier` {string} Customization hooks to be registered; this should be the + same string that would be passed to `import()`, except that if it is relative, + it is resolved relative to `parentURL`. +* `parentURL` {string} If you want to resolve `specifier` relative to a base + URL, such as `import.meta.url`, you can pass that URL here. **Default:** + `'data:'` +* `options` {Object} + * `data` {any} Any arbitrary, cloneable JavaScript value to pass into the + [`initialize`][] hook. + * `transferList` {Object\[]} [transferrable objects][] to be passed into the + `initialize` hook. +* Returns: {any} returns whatever was returned by the `initialize` hook. + +Register a module that exports hooks that customize Node.js module resolution +and loading behavior. ```mjs import { register } from 'node:module'; @@ -390,3 +404,4 @@ returned object contains the following keys: [`module`]: modules.md#the-module-object [module wrapper]: modules.md#the-module-wrapper [source map include directives]: https://sourcemaps.info/spec.html#h.lmz475t4mvbx +[transferrable objects]: worker_threads.md#portpostmessagevalue-transferlist From 052434a0c1f8b95ed5df488cb6484528135c8c4d Mon Sep 17 00:00:00 2001 From: Livia Medeiros Date: Tue, 22 Aug 2023 01:41:53 +0900 Subject: [PATCH 10/15] test: use `tmpdir.resolve()` PR-URL: https://github.com/nodejs/node/pull/49136 Reviewed-By: Luigi Pinca --- test/addons/symlinked-module/test.js | 2 +- test/async-hooks/test-statwatcher.js | 5 ++--- test/common/snapshot.js | 7 +++---- test/doctool/test-apilinks.mjs | 2 +- test/doctool/test-doctool-versions.mjs | 3 +-- test/embedding/test-embedding.js | 4 ++-- .../test-esm-extension-lookup-deprecation.mjs | 12 ++++++------ test/es-module/test-esm-resolve-type.mjs | 2 +- test/es-module/test-esm-symlink-main.js | 2 +- test/es-module/test-esm-windows.js | 2 +- test/fixtures/test-runner/concurrency/a.mjs | 4 ++-- test/fixtures/test-runner/concurrency/b.mjs | 4 ++-- test/fixtures/watch-mode/ipc.js | 2 +- test/internet/test-corepack-yarn-install.js | 6 +++--- test/internet/test-trace-events-dns.js | 3 +-- test/node-api/test_general/test.js | 2 +- test/node-api/test_policy/test_policy.js | 3 +-- test/pummel/test-fs-largefile.js | 3 +-- test/pummel/test-fs-readfile-tostring-fail.js | 3 +-- test/pummel/test-fs-watch-file-slow.js | 3 +-- test/pummel/test-policy-integrity-dep.js | 2 +- .../test-policy-integrity-parent-commonjs.js | 2 +- test/pummel/test-policy-integrity-parent-module.js | 2 +- ...test-policy-integrity-parent-no-package-json.js | 2 +- .../test-policy-integrity-worker-commonjs.js | 2 +- test/pummel/test-policy-integrity-worker-module.js | 2 +- ...test-policy-integrity-worker-no-package-json.js | 2 +- test/pummel/test-watch-file.js | 3 +-- test/report/test-report-writereport.js | 6 +++--- test/sequential/test-cpu-prof-dir-absolute.js | 3 +-- test/sequential/test-cpu-prof-dir-and-name.js | 2 +- test/sequential/test-cpu-prof-dir-relative.js | 3 +-- test/sequential/test-cpu-prof-dir-worker.js | 3 +-- test/sequential/test-cpu-prof-name.js | 3 +-- test/sequential/test-diagnostic-dir-cpu-prof.js | 7 +++---- test/sequential/test-diagnostic-dir-heap-prof.js | 6 +++--- .../test-http2-timeout-large-write-file.js | 3 +-- ...application-disable-experimental-sea-warning.js | 10 +++++----- .../test-single-executable-application-empty.js | 9 ++++----- .../test-single-executable-application-snapshot.js | 11 +++++------ ...single-executable-application-use-code-cache.js | 10 +++++----- .../test-single-executable-application.js | 10 +++++----- test/sequential/test-tls-session-timeout.js | 3 +-- test/sequential/test-watch-mode.mjs | 14 +++++++------- test/sequential/test-worker-prof.js | 3 +-- .../test-tick-processor-polyfill-brokenfile.js | 3 +-- test/tick-processor/tick-processor-base.js | 3 +-- test/wasi/test-wasi-stdio.js | 6 +++--- test/wasi/test-wasi-symlinks.js | 6 +++--- 49 files changed, 97 insertions(+), 118 deletions(-) diff --git a/test/addons/symlinked-module/test.js b/test/addons/symlinked-module/test.js index d47a84b98d1ed8..5a98db77771b5a 100644 --- a/test/addons/symlinked-module/test.js +++ b/test/addons/symlinked-module/test.js @@ -16,7 +16,7 @@ const tmpdir = require('../../common/tmpdir'); tmpdir.refresh(); const addonPath = path.join(__dirname, 'build', common.buildType); -const addonLink = path.join(tmpdir.path, 'addon'); +const addonLink = tmpdir.resolve('addon'); try { fs.symlinkSync(addonPath, addonLink, 'dir'); diff --git a/test/async-hooks/test-statwatcher.js b/test/async-hooks/test-statwatcher.js index b8651ab8e0431e..f3c0e74355eeba 100644 --- a/test/async-hooks/test-statwatcher.js +++ b/test/async-hooks/test-statwatcher.js @@ -6,15 +6,14 @@ const assert = require('assert'); const initHooks = require('./init-hooks'); const { checkInvocations } = require('./hook-checks'); const fs = require('fs'); -const path = require('path'); if (!common.isMainThread) common.skip('Worker bootstrapping works differently -> different async IDs'); tmpdir.refresh(); -const file1 = path.join(tmpdir.path, 'file1'); -const file2 = path.join(tmpdir.path, 'file2'); +const file1 = tmpdir.resolve('file1'); +const file2 = tmpdir.resolve('file2'); const onchangex = (x) => (curr, prev) => { console.log(`Watcher: ${x}`); diff --git a/test/common/snapshot.js b/test/common/snapshot.js index 3037ce45639eb9..4a46533facb6fa 100644 --- a/test/common/snapshot.js +++ b/test/common/snapshot.js @@ -2,14 +2,13 @@ const tmpdir = require('../common/tmpdir'); const { spawnSync } = require('child_process'); -const path = require('path'); const fs = require('fs'); const assert = require('assert'); function buildSnapshot(entry, env) { const child = spawnSync(process.execPath, [ '--snapshot-blob', - path.join(tmpdir.path, 'snapshot.blob'), + tmpdir.resolve('snapshot.blob'), '--build-snapshot', entry, ], { @@ -29,14 +28,14 @@ function buildSnapshot(entry, env) { assert.strictEqual(child.status, 0); - const stats = fs.statSync(path.join(tmpdir.path, 'snapshot.blob')); + const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); return { child, stderr, stdout }; } function runWithSnapshot(entry, env) { - const args = ['--snapshot-blob', path.join(tmpdir.path, 'snapshot.blob')]; + const args = ['--snapshot-blob', tmpdir.resolve('snapshot.blob')]; if (entry !== undefined) { args.push(entry); } diff --git a/test/doctool/test-apilinks.mjs b/test/doctool/test-apilinks.mjs index fbbfafc139b711..70b7b4ef8e21c4 100644 --- a/test/doctool/test-apilinks.mjs +++ b/test/doctool/test-apilinks.mjs @@ -19,7 +19,7 @@ fs.readdirSync(apilinks).forEach((fixture) => { const input = path.join(apilinks, fixture); const expectedContent = fs.readFileSync(`${input}on`, 'utf8'); - const outputPath = path.join(tmpdir.path, `${fixture}on`); + const outputPath = tmpdir.resolve(`${fixture}on`); execFileSync( process.execPath, [script, outputPath, input], diff --git a/test/doctool/test-doctool-versions.mjs b/test/doctool/test-doctool-versions.mjs index 10eb8467bb110c..ba5d7291064685 100644 --- a/test/doctool/test-doctool-versions.mjs +++ b/test/doctool/test-doctool-versions.mjs @@ -4,7 +4,6 @@ import tmpdir from '../common/tmpdir.js'; import assert from 'assert'; import { spawnSync } from 'child_process'; import fs from 'fs'; -import path from 'path'; import { fileURLToPath } from 'url'; import util from 'util'; @@ -29,7 +28,7 @@ const expected = [ ]; tmpdir.refresh(); -const versionsFile = path.join(tmpdir.path, 'versions.json'); +const versionsFile = tmpdir.resolve('versions.json'); debuglog(`${process.execPath} ${versionsTool} ${versionsFile}`); const opts = { cwd: tmpdir.path, encoding: 'utf8' }; const cp = spawnSync(process.execPath, [ versionsTool, versionsFile ], opts); diff --git a/test/embedding/test-embedding.js b/test/embedding/test-embedding.js index a0ac4834b566eb..5d448b78a433e8 100644 --- a/test/embedding/test-embedding.js +++ b/test/embedding/test-embedding.js @@ -63,7 +63,7 @@ function getReadFileCodeForPath(path) { for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) { // readSync + eval since snapshots don't support userland require() (yet) const snapshotFixture = fixtures.path('snapshot', 'echo-args.js'); - const blobPath = path.join(tmpdir.path, 'embedder-snapshot.blob'); + const blobPath = tmpdir.resolve('embedder-snapshot.blob'); const buildSnapshotArgs = [ `eval(${getReadFileCodeForPath(snapshotFixture)})`, 'arg1', 'arg2', '--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create', @@ -94,7 +94,7 @@ for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) { // Create workers and vm contexts after deserialization { const snapshotFixture = fixtures.path('snapshot', 'create-worker-and-vm.js'); - const blobPath = path.join(tmpdir.path, 'embedder-snapshot.blob'); + const blobPath = tmpdir.resolve('embedder-snapshot.blob'); const buildSnapshotArgs = [ `eval(${getReadFileCodeForPath(snapshotFixture)})`, '--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create', diff --git a/test/es-module/test-esm-extension-lookup-deprecation.mjs b/test/es-module/test-esm-extension-lookup-deprecation.mjs index dc391486f7edc2..393b554b3e47b2 100644 --- a/test/es-module/test-esm-extension-lookup-deprecation.mjs +++ b/test/es-module/test-esm-extension-lookup-deprecation.mjs @@ -11,7 +11,7 @@ describe('ESM in main field', { concurrency: true }, () => { before(() => tmpdir.refresh()); it('should handle fully-specified relative path without any warning', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -29,7 +29,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should handle fully-specified absolute path without any warning', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -48,7 +48,7 @@ describe('ESM in main field', { concurrency: true }, () => { }); it('should emit warning when "main" and "exports" are missing', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -65,7 +65,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is falsy', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -83,7 +83,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is a relative path without extension', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -101,7 +101,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is an absolute path without extension', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); diff --git a/test/es-module/test-esm-resolve-type.mjs b/test/es-module/test-esm-resolve-type.mjs index 7a0527ff59e554..0f442ed569f848 100644 --- a/test/es-module/test-esm-resolve-type.mjs +++ b/test/es-module/test-esm-resolve-type.mjs @@ -26,7 +26,7 @@ const { defaultResolve: resolve } = internalResolve; -const rel = (file) => path.join(tmpdir.path, file); +const rel = (file) => tmpdir.resolve(file); const previousCwd = process.cwd(); const nmDir = rel('node_modules'); diff --git a/test/es-module/test-esm-symlink-main.js b/test/es-module/test-esm-symlink-main.js index 48b4d8bbe65daf..2be495ad7dcfb5 100644 --- a/test/es-module/test-esm-symlink-main.js +++ b/test/es-module/test-esm-symlink-main.js @@ -9,7 +9,7 @@ const fs = require('fs'); tmpdir.refresh(); const realPath = path.resolve(__dirname, '../fixtures/es-modules/symlink.mjs'); -const symlinkPath = path.resolve(tmpdir.path, 'symlink.mjs'); +const symlinkPath = tmpdir.resolve('symlink.mjs'); try { fs.symlinkSync(realPath, symlinkPath); diff --git a/test/es-module/test-esm-windows.js b/test/es-module/test-esm-windows.js index 76e016217b3ef8..e5c52226ab001d 100644 --- a/test/es-module/test-esm-windows.js +++ b/test/es-module/test-esm-windows.js @@ -15,7 +15,7 @@ const imp = (file) => { (async () => { tmpdir.refresh(); - const rel = (file) => path.join(tmpdir.path, file); + const rel = (file) => tmpdir.resolve(file); { // Load a single script const file = rel('con.mjs'); diff --git a/test/fixtures/test-runner/concurrency/a.mjs b/test/fixtures/test-runner/concurrency/a.mjs index 69954461bfbae0..a34b87e82055ad 100644 --- a/test/fixtures/test-runner/concurrency/a.mjs +++ b/test/fixtures/test-runner/concurrency/a.mjs @@ -3,9 +3,9 @@ import { setTimeout } from 'node:timers/promises'; import fs from 'node:fs/promises'; import path from 'node:path'; -await fs.writeFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'a.mjs'); +await fs.writeFile(tmpdir.resolve('test-runner-concurrency'), 'a.mjs'); while (true) { - const file = await fs.readFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'utf8'); + const file = await fs.readFile(tmpdir.resolve('test-runner-concurrency'), 'utf8'); if (file === 'b.mjs') { break; } diff --git a/test/fixtures/test-runner/concurrency/b.mjs b/test/fixtures/test-runner/concurrency/b.mjs index 09af543a2551eb..395cea1df47b68 100644 --- a/test/fixtures/test-runner/concurrency/b.mjs +++ b/test/fixtures/test-runner/concurrency/b.mjs @@ -4,9 +4,9 @@ import fs from 'node:fs/promises'; import path from 'node:path'; while (true) { - const file = await fs.readFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'utf8'); + const file = await fs.readFile(tmpdir.resolve('test-runner-concurrency'), 'utf8'); if (file === 'a.mjs') { - await fs.writeFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'b.mjs'); + await fs.writeFile(tmpdir.resolve('test-runner-concurrency'), 'b.mjs'); break; } await setTimeout(10); diff --git a/test/fixtures/watch-mode/ipc.js b/test/fixtures/watch-mode/ipc.js index 5881299387e5b4..d2a5a63854f8f9 100644 --- a/test/fixtures/watch-mode/ipc.js +++ b/test/fixtures/watch-mode/ipc.js @@ -3,7 +3,7 @@ const url = require('node:url'); const fs = require('node:fs'); const tmpdir = require('../../common/tmpdir'); -const tmpfile = path.join(tmpdir.path, 'file'); +const tmpfile = tmpdir.resolve('file'); fs.writeFileSync(tmpfile, ''); process.send({ 'watch:require': [path.resolve(__filename)] }); diff --git a/test/internet/test-corepack-yarn-install.js b/test/internet/test-corepack-yarn-install.js index 48a9bdb44cd75f..80c2285cc23c6c 100644 --- a/test/internet/test-corepack-yarn-install.js +++ b/test/internet/test-corepack-yarn-install.js @@ -11,11 +11,11 @@ const fixtures = require('../common/fixtures'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const npmSandbox = path.join(tmpdir.path, 'npm-sandbox'); +const npmSandbox = tmpdir.resolve('npm-sandbox'); fs.mkdirSync(npmSandbox); -const homeDir = path.join(tmpdir.path, 'home'); +const homeDir = tmpdir.resolve('home'); fs.mkdirSync(homeDir); -const installDir = path.join(tmpdir.path, 'install-dir'); +const installDir = tmpdir.resolve('install-dir'); fs.mkdirSync(installDir); const corepackYarnPath = path.join( diff --git a/test/internet/test-trace-events-dns.js b/test/internet/test-trace-events-dns.js index 64efd541fd9d2c..c18a49bc9496c8 100644 --- a/test/internet/test-trace-events-dns.js +++ b/test/internet/test-trace-events-dns.js @@ -2,7 +2,6 @@ const common = require('../common'); const assert = require('assert'); const cp = require('child_process'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); const fs = require('fs'); const util = require('util'); @@ -57,7 +56,7 @@ for (const tr in tests) { throw new Error(`${tr}:\n${util.inspect(proc)}`); } - const file = path.join(tmpdir.path, traceFile); + const file = tmpdir.resolve(traceFile); const data = fs.readFileSync(file); const traces = JSON.parse(data.toString()).traceEvents diff --git a/test/node-api/test_general/test.js b/test/node-api/test_general/test.js index 397bb3c91f629b..c7dd70f2da5f17 100644 --- a/test/node-api/test_general/test.js +++ b/test/node-api/test_general/test.js @@ -19,7 +19,7 @@ tmpdir.refresh(); } { - const urlTestDir = path.join(tmpdir.path, 'foo%#bar'); + const urlTestDir = tmpdir.resolve('foo%#bar'); const urlTestFile = path.join(urlTestDir, path.basename(filename)); fs.mkdirSync(urlTestDir, { recursive: true }); fs.copyFileSync(filename, urlTestFile); diff --git a/test/node-api/test_policy/test_policy.js b/test/node-api/test_policy/test_policy.js index d6cb12b56cb683..428dd905c2e365 100644 --- a/test/node-api/test_policy/test_policy.js +++ b/test/node-api/test_policy/test_policy.js @@ -8,7 +8,6 @@ const tmpdir = require('../../common/tmpdir'); const { spawnSync } = require('child_process'); const crypto = require('crypto'); const fs = require('fs'); -const path = require('path'); const { pathToFileURL } = require('url'); tmpdir.refresh(); @@ -19,7 +18,7 @@ function hash(algo, body) { return h.digest('base64'); } -const policyFilepath = path.join(tmpdir.path, 'policy'); +const policyFilepath = tmpdir.resolve('policy'); const depFilepath = require.resolve(`./build/${common.buildType}/binding.node`); const depURL = pathToFileURL(depFilepath); diff --git a/test/pummel/test-fs-largefile.js b/test/pummel/test-fs-largefile.js index 7f2630f497b817..486f23106f21df 100644 --- a/test/pummel/test-fs-largefile.js +++ b/test/pummel/test-fs-largefile.js @@ -24,14 +24,13 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); try { - const filepath = path.join(tmpdir.path, 'large.txt'); + const filepath = tmpdir.resolve('large.txt'); const fd = fs.openSync(filepath, 'w+'); const offset = 5 * 1024 * 1024 * 1024; // 5GB const message = 'Large File'; diff --git a/test/pummel/test-fs-readfile-tostring-fail.js b/test/pummel/test-fs-readfile-tostring-fail.js index 8428f1f15a0c22..8ffe630076a52d 100644 --- a/test/pummel/test-fs-readfile-tostring-fail.js +++ b/test/pummel/test-fs-readfile-tostring-fail.js @@ -7,7 +7,6 @@ if (!common.enoughTestMem) const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const cp = require('child_process'); const kStringMaxLength = require('buffer').constants.MAX_STRING_LENGTH; if (common.isAIX && (Number(cp.execSync('ulimit -f')) * 512) < kStringMaxLength) @@ -20,7 +19,7 @@ if (!tmpdir.hasEnoughSpace(kStringMaxLength)) { common.skip(`Not enough space in ${tmpdir.path}`); } -const file = path.join(tmpdir.path, 'toobig.txt'); +const file = tmpdir.resolve('toobig.txt'); const stream = fs.createWriteStream(file, { flags: 'a', }); diff --git a/test/pummel/test-fs-watch-file-slow.js b/test/pummel/test-fs-watch-file-slow.js index c7513a18e6fa3e..c6d148df05db47 100644 --- a/test/pummel/test-fs-watch-file-slow.js +++ b/test/pummel/test-fs-watch-file-slow.js @@ -22,13 +22,12 @@ 'use strict'; require('../common'); const assert = require('assert'); -const path = require('path'); const fs = require('fs'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const FILENAME = path.join(tmpdir.path, 'watch-me'); +const FILENAME = tmpdir.resolve('watch-me'); const TIMEOUT = 1300; let nevents = 0; diff --git a/test/pummel/test-policy-integrity-dep.js b/test/pummel/test-policy-integrity-dep.js index 4611dec65007ee..d5a23d96bc2593 100644 --- a/test/pummel/test-policy-integrity-dep.js +++ b/test/pummel/test-policy-integrity-dep.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-commonjs.js b/test/pummel/test-policy-integrity-parent-commonjs.js index d19a28bea5b5ad..07eee598117ba1 100644 --- a/test/pummel/test-policy-integrity-parent-commonjs.js +++ b/test/pummel/test-policy-integrity-parent-commonjs.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-module.js b/test/pummel/test-policy-integrity-parent-module.js index 42f06d83ef0326..a09243ea10f529 100644 --- a/test/pummel/test-policy-integrity-parent-module.js +++ b/test/pummel/test-policy-integrity-parent-module.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-no-package-json.js b/test/pummel/test-policy-integrity-parent-no-package-json.js index dd447c9fa843e4..a6461a9a5835c3 100644 --- a/test/pummel/test-policy-integrity-parent-no-package-json.js +++ b/test/pummel/test-policy-integrity-parent-no-package-json.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-commonjs.js b/test/pummel/test-policy-integrity-worker-commonjs.js index 415e33664413cc..acc4298eb7b23b 100644 --- a/test/pummel/test-policy-integrity-worker-commonjs.js +++ b/test/pummel/test-policy-integrity-worker-commonjs.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-module.js b/test/pummel/test-policy-integrity-worker-module.js index 813d167844104e..65a04841415da9 100644 --- a/test/pummel/test-policy-integrity-worker-module.js +++ b/test/pummel/test-policy-integrity-worker-module.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-no-package-json.js b/test/pummel/test-policy-integrity-worker-no-package-json.js index 108af1b78e0697..fc90f73a03cf31 100644 --- a/test/pummel/test-policy-integrity-worker-no-package-json.js +++ b/test/pummel/test-policy-integrity-worker-no-package-json.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-watch-file.js b/test/pummel/test-watch-file.js index bbbbf396d72227..6d55f08160a23b 100644 --- a/test/pummel/test-watch-file.js +++ b/test/pummel/test-watch-file.js @@ -24,11 +24,10 @@ require('../common'); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const f = path.join(tmpdir.path, 'x.txt'); +const f = tmpdir.resolve('x.txt'); fs.closeSync(fs.openSync(f, 'w')); let changes = 0; diff --git a/test/report/test-report-writereport.js b/test/report/test-report-writereport.js index 971afd84c22281..fd5430a14008e0 100644 --- a/test/report/test-report-writereport.js +++ b/test/report/test-report-writereport.js @@ -50,7 +50,7 @@ function validate() { { // Test with a file argument. const file = process.report.writeReport('custom-name-1.json'); - const absolutePath = path.join(tmpdir.path, file); + const absolutePath = tmpdir.resolve(file); assert.strictEqual(helper.findReports(process.pid, tmpdir.path).length, 0); assert.strictEqual(file, 'custom-name-1.json'); helper.validate(absolutePath); @@ -61,7 +61,7 @@ function validate() { // Test with file and error arguments. const file = process.report.writeReport('custom-name-2.json', new Error('test error')); - const absolutePath = path.join(tmpdir.path, file); + const absolutePath = tmpdir.resolve(file); assert.strictEqual(helper.findReports(process.pid, tmpdir.path).length, 0); assert.strictEqual(file, 'custom-name-2.json'); helper.validate(absolutePath); @@ -117,7 +117,7 @@ function validate() { { // Test the case where the report file cannot be opened. - const reportDir = path.join(tmpdir.path, 'does', 'not', 'exist'); + const reportDir = tmpdir.resolve('does', 'not', 'exist'); const args = [`--report-directory=${reportDir}`, '-e', 'process.report.writeReport()']; diff --git a/test/sequential/test-cpu-prof-dir-absolute.js b/test/sequential/test-cpu-prof-dir-absolute.js index ad0842dbc4c4fc..03d7f50865b650 100644 --- a/test/sequential/test-cpu-prof-dir-absolute.js +++ b/test/sequential/test-cpu-prof-dir-absolute.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -22,7 +21,7 @@ const { // relative --cpu-prof-dir { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-cpu-prof-dir-and-name.js b/test/sequential/test-cpu-prof-dir-and-name.js index 7ce775ebc16973..84af5d8212065d 100644 --- a/test/sequential/test-cpu-prof-dir-and-name.js +++ b/test/sequential/test-cpu-prof-dir-and-name.js @@ -21,7 +21,7 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const file = path.join(dir, 'test.cpuprofile'); const output = spawnSync(process.execPath, [ '--cpu-prof', diff --git a/test/sequential/test-cpu-prof-dir-relative.js b/test/sequential/test-cpu-prof-dir-relative.js index 2d679959efdebd..ac8c46486feae5 100644 --- a/test/sequential/test-cpu-prof-dir-relative.js +++ b/test/sequential/test-cpu-prof-dir-relative.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -37,7 +36,7 @@ const { console.log(output.stderr.toString()); } assert.strictEqual(output.status, 0); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); assert(fs.existsSync(dir)); const profiles = getCpuProfiles(dir); assert.strictEqual(profiles.length, 1); diff --git a/test/sequential/test-cpu-prof-dir-worker.js b/test/sequential/test-cpu-prof-dir-worker.js index fe72af7416d813..22c7f79deb2fca 100644 --- a/test/sequential/test-cpu-prof-dir-worker.js +++ b/test/sequential/test-cpu-prof-dir-worker.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -37,7 +36,7 @@ const { console.log(output.stderr.toString()); } assert.strictEqual(output.status, 0); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); assert(fs.existsSync(dir)); const profiles = getCpuProfiles(dir); assert.strictEqual(profiles.length, 2); diff --git a/test/sequential/test-cpu-prof-name.js b/test/sequential/test-cpu-prof-name.js index 58d9a0ec15862f..3f1c6945c5436f 100644 --- a/test/sequential/test-cpu-prof-name.js +++ b/test/sequential/test-cpu-prof-name.js @@ -8,7 +8,6 @@ const fixtures = require('../common/fixtures'); common.skipIfInspectorDisabled(); const assert = require('assert'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -22,7 +21,7 @@ const { // --cpu-prof-name { tmpdir.refresh(); - const file = path.join(tmpdir.path, 'test.cpuprofile'); + const file = tmpdir.resolve('test.cpuprofile'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-diagnostic-dir-cpu-prof.js b/test/sequential/test-diagnostic-dir-cpu-prof.js index 396a6ca7de0595..75f1d86ef4b2b5 100644 --- a/test/sequential/test-diagnostic-dir-cpu-prof.js +++ b/test/sequential/test-diagnostic-dir-cpu-prof.js @@ -9,7 +9,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -24,7 +23,7 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', @@ -50,8 +49,8 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'diag'); - const dir2 = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('diag'); + const dir2 = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-diagnostic-dir-heap-prof.js b/test/sequential/test-diagnostic-dir-heap-prof.js index 0ec68ab49efdf7..c74c075724185d 100644 --- a/test/sequential/test-diagnostic-dir-heap-prof.js +++ b/test/sequential/test-diagnostic-dir-heap-prof.js @@ -66,7 +66,7 @@ function getHeapProfiles(dir) { // Test --diagnostic-dir changes the default for --cpu-prof { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--heap-prof', '--diagnostic-dir', @@ -91,8 +91,8 @@ function getHeapProfiles(dir) { // Test --heap-prof-dir overwrites --diagnostic-dir { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'diag'); - const dir2 = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('diag'); + const dir2 = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--heap-prof', '--heap-prof-interval', diff --git a/test/sequential/test-http2-timeout-large-write-file.js b/test/sequential/test-http2-timeout-large-write-file.js index 520958bd57f6d4..a35268b6127bae 100644 --- a/test/sequential/test-http2-timeout-large-write-file.js +++ b/test/sequential/test-http2-timeout-large-write-file.js @@ -6,7 +6,6 @@ const assert = require('assert'); const fixtures = require('../common/fixtures'); const fs = require('fs'); const http2 = require('http2'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); @@ -30,7 +29,7 @@ let offsetTimeout = common.platformTimeout(100); let didReceiveData = false; const content = Buffer.alloc(writeSize, 0x44); -const filepath = path.join(tmpdir.path, 'http2-large-write.tmp'); +const filepath = tmpdir.resolve('http2-large-write.tmp'); fs.writeFileSync(filepath, content, 'binary'); const fd = fs.openSync(filepath, 'r'); process.on('beforeExit', () => fs.closeSync(fd)); diff --git a/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js b/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js index a20dce83988228..0b4701b07e1c54 100644 --- a/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js +++ b/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js @@ -21,10 +21,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -43,7 +43,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-single-executable-application-empty.js b/test/sequential/test-single-executable-application-empty.js index 961ae0018368cf..13dc2e834b7caa 100644 --- a/test/sequential/test-single-executable-application-empty.js +++ b/test/sequential/test-single-executable-application-empty.js @@ -15,16 +15,15 @@ skipIfSingleExecutableIsNotSupported(); const tmpdir = require('../common/tmpdir'); const { copyFileSync, writeFileSync, existsSync } = require('fs'); const { execFileSync } = require('child_process'); -const { join } = require('path'); const assert = require('assert'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); -writeFileSync(join(tmpdir.path, 'empty.js'), '', 'utf-8'); +writeFileSync(tmpdir.resolve('empty.js'), '', 'utf-8'); writeFileSync(configFile, ` { "main": "empty.js", diff --git a/test/sequential/test-single-executable-application-snapshot.js b/test/sequential/test-single-executable-application-snapshot.js index d1c44b6dbab3b7..51b09cea662adf 100644 --- a/test/sequential/test-single-executable-application-snapshot.js +++ b/test/sequential/test-single-executable-application-snapshot.js @@ -14,17 +14,16 @@ skipIfSingleExecutableIsNotSupported(); const tmpdir = require('../common/tmpdir'); const { copyFileSync, writeFileSync, existsSync } = require('fs'); const { spawnSync } = require('child_process'); -const { join } = require('path'); const assert = require('assert'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); { tmpdir.refresh(); - writeFileSync(join(tmpdir.path, 'snapshot.js'), '', 'utf-8'); + writeFileSync(tmpdir.resolve('snapshot.js'), '', 'utf-8'); writeFileSync(configFile, ` { "main": "snapshot.js", @@ -57,7 +56,7 @@ const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : }); `; - writeFileSync(join(tmpdir.path, 'snapshot.js'), code, 'utf-8'); + writeFileSync(tmpdir.resolve('snapshot.js'), code, 'utf-8'); writeFileSync(configFile, ` { "main": "snapshot.js", diff --git a/test/sequential/test-single-executable-application-use-code-cache.js b/test/sequential/test-single-executable-application-use-code-cache.js index 6d45fcf289a772..96de5769b1fe6b 100644 --- a/test/sequential/test-single-executable-application-use-code-cache.js +++ b/test/sequential/test-single-executable-application-use-code-cache.js @@ -21,10 +21,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -43,7 +43,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-single-executable-application.js b/test/sequential/test-single-executable-application.js index 99d0c0d6e352dc..e930254cb0a7ae 100644 --- a/test/sequential/test-single-executable-application.js +++ b/test/sequential/test-single-executable-application.js @@ -20,10 +20,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -42,7 +42,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-tls-session-timeout.js b/test/sequential/test-tls-session-timeout.js index 86a29eed46fe73..f0ec612b449867 100644 --- a/test/sequential/test-tls-session-timeout.js +++ b/test/sequential/test-tls-session-timeout.js @@ -45,7 +45,6 @@ function doTest() { const assert = require('assert'); const tls = require('tls'); const fs = require('fs'); - const join = require('path').join; const fixtures = require('../common/fixtures'); const spawn = require('child_process').spawn; @@ -69,7 +68,7 @@ function doTest() { const sessionFileName = (function() { const ticketFileName = 'tls-session-ticket.txt'; - const tmpPath = join(tmpdir.path, ticketFileName); + const tmpPath = tmpdir.resolve(ticketFileName); fs.writeFileSync(tmpPath, fixtures.readSync(ticketFileName)); return tmpPath; }()); diff --git a/test/sequential/test-watch-mode.mjs b/test/sequential/test-watch-mode.mjs index 38654a78a1dc7f..dbe486f5bb2991 100644 --- a/test/sequential/test-watch-mode.mjs +++ b/test/sequential/test-watch-mode.mjs @@ -117,7 +117,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch changes to a file with watch-path', { skip: !supportsRecursive, }, async () => { - const dir = path.join(tmpdir.path, 'subdir1'); + const dir = tmpdir.resolve('subdir1'); mkdirSync(dir); const file = createTmpFile(); const watchedFile = createTmpFile('', '.js', dir); @@ -138,7 +138,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch when running an non-existing file - when specified under --watch-path', { skip: !supportsRecursive }, async () => { - const dir = path.join(tmpdir.path, 'subdir2'); + const dir = tmpdir.resolve('subdir2'); mkdirSync(dir); const file = path.join(dir, 'non-existing.js'); const watchedFile = createTmpFile('', '.js', dir); @@ -156,7 +156,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch when running an non-existing file - when specified under --watch-path with equals', { skip: !supportsRecursive }, async () => { - const dir = path.join(tmpdir.path, 'subdir3'); + const dir = tmpdir.resolve('subdir3'); mkdirSync(dir); const file = path.join(dir, 'non-existing.js'); const watchedFile = createTmpFile('', '.js', dir); @@ -295,21 +295,21 @@ console.log(values.random); it('should not watch when running an missing file', { skip: !supportsRecursive }, async () => { - const nonExistingfile = path.join(tmpdir.path, `${tmpFiles++}.js`); + const nonExistingfile = tmpdir.resolve(`${tmpFiles++}.js`); await failWriteSucceed({ file: nonExistingfile, watchedFile: nonExistingfile }); }); it('should not watch when running an missing mjs file', { skip: !supportsRecursive }, async () => { - const nonExistingfile = path.join(tmpdir.path, `${tmpFiles++}.mjs`); + const nonExistingfile = tmpdir.resolve(`${tmpFiles++}.mjs`); await failWriteSucceed({ file: nonExistingfile, watchedFile: nonExistingfile }); }); it('should watch changes to previously missing dependency', { skip: !supportsRecursive }, async () => { - const dependency = path.join(tmpdir.path, `${tmpFiles++}.js`); + const dependency = tmpdir.resolve(`${tmpFiles++}.js`); const relativeDependencyPath = `./${path.basename(dependency)}`; const dependant = createTmpFile(`console.log(require('${relativeDependencyPath}'))`); @@ -320,7 +320,7 @@ console.log(values.random); skip: !supportsRecursive }, async () => { const relativeDependencyPath = `./${tmpFiles++}.mjs`; - const dependency = path.join(tmpdir.path, relativeDependencyPath); + const dependency = tmpdir.resolve(relativeDependencyPath); const dependant = createTmpFile(`import ${JSON.stringify(relativeDependencyPath)}`, '.mjs'); await failWriteSucceed({ file: dependant, watchedFile: dependency }); diff --git a/test/sequential/test-worker-prof.js b/test/sequential/test-worker-prof.js index c2df47a8e8a121..bcb5a477497d73 100644 --- a/test/sequential/test-worker-prof.js +++ b/test/sequential/test-worker-prof.js @@ -4,7 +4,6 @@ const tmpdir = require('../common/tmpdir'); const fs = require('fs'); const assert = require('assert'); const util = require('util'); -const { join } = require('path'); const { spawnSync } = require('child_process'); // Test that --prof also tracks Worker threads. @@ -67,7 +66,7 @@ if (process.argv[2] === 'child') { for (const logfile of logfiles) { const lines = fs.readFileSync( - join(tmpdir.path, logfile), 'utf8').split('\n'); + tmpdir.resolve(logfile), 'utf8').split('\n'); const ticks = lines.filter((line) => /^tick,/.test(line)).length; // Test that at least 15 ticks have been recorded for both parent and child diff --git a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js index 2089325dff3e99..ac3cb8692b0215 100644 --- a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js +++ b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js @@ -15,10 +15,9 @@ if (isCPPSymbolsNotMapped) { const assert = require('assert'); const { spawn, spawnSync } = require('child_process'); -const path = require('path'); const { writeFileSync } = require('fs'); -const LOG_FILE = path.join(tmpdir.path, 'tick-processor.log'); +const LOG_FILE = tmpdir.resolve('tick-processor.log'); const RETRY_TIMEOUT = 150; const BROKEN_PART = 'tick,'; const WARN_REG_EXP = /\(node:\d+\) \[BROKEN_PROFILE_FILE] Warning: Profile file .* is broken/; diff --git a/test/tick-processor/tick-processor-base.js b/test/tick-processor/tick-processor-base.js index 91307d16928889..a9fd939495091b 100644 --- a/test/tick-processor/tick-processor-base.js +++ b/test/tick-processor/tick-processor-base.js @@ -2,12 +2,11 @@ require('../common'); const fs = require('fs'); const cp = require('child_process'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const LOG_FILE = path.join(tmpdir.path, 'tick-processor.log'); +const LOG_FILE = tmpdir.resolve('tick-processor.log'); const RETRY_TIMEOUT = 150; function runTest(test) { diff --git a/test/wasi/test-wasi-stdio.js b/test/wasi/test-wasi-stdio.js index 29e91281553817..d4c65f238df890 100644 --- a/test/wasi/test-wasi-stdio.js +++ b/test/wasi/test-wasi-stdio.js @@ -7,9 +7,9 @@ const { join } = require('path'); const { WASI } = require('wasi'); const modulePath = join(__dirname, 'wasm', 'stdin.wasm'); const buffer = readFileSync(modulePath); -const stdinFile = join(tmpdir.path, 'stdin.txt'); -const stdoutFile = join(tmpdir.path, 'stdout.txt'); -const stderrFile = join(tmpdir.path, 'stderr.txt'); +const stdinFile = tmpdir.resolve('stdin.txt'); +const stdoutFile = tmpdir.resolve('stdout.txt'); +const stderrFile = tmpdir.resolve('stderr.txt'); tmpdir.refresh(); // Write 33 x's. The test's buffer only holds 31 x's + a terminator. diff --git a/test/wasi/test-wasi-symlinks.js b/test/wasi/test-wasi-symlinks.js index 79369fd4c18247..9c95a0e55757d0 100644 --- a/test/wasi/test-wasi-symlinks.js +++ b/test/wasi/test-wasi-symlinks.js @@ -38,15 +38,15 @@ if (process.argv[2] === 'wasi-child') { // Setup the sandbox environment. tmpdir.refresh(); - const sandbox = path.join(tmpdir.path, 'sandbox'); + const sandbox = tmpdir.resolve('sandbox'); const sandboxedFile = path.join(sandbox, 'input.txt'); - const externalFile = path.join(tmpdir.path, 'outside.txt'); + const externalFile = tmpdir.resolve('outside.txt'); const sandboxedDir = path.join(sandbox, 'subdir'); const sandboxedSymlink = path.join(sandboxedDir, 'input_link.txt'); const escapingSymlink = path.join(sandboxedDir, 'outside.txt'); const loopSymlink1 = path.join(sandboxedDir, 'loop1'); const loopSymlink2 = path.join(sandboxedDir, 'loop2'); - const sandboxedTmp = path.join(tmpdir.path, 'tmp'); + const sandboxedTmp = tmpdir.resolve('tmp'); fs.mkdirSync(sandbox); fs.mkdirSync(sandboxedDir); From eeddbfae6ccc1ec6aabd2659e96f76400998e658 Mon Sep 17 00:00:00 2001 From: Livia Medeiros Date: Tue, 22 Aug 2023 01:42:03 +0900 Subject: [PATCH 11/15] test,benchmark: use `tmpdir.fileURL()` PR-URL: https://github.com/nodejs/node/pull/49138 Refs: https://github.com/nodejs/node/pull/49040 Reviewed-By: Luigi Pinca --- benchmark/esm/esm-loader-import.js | 4 +--- test/es-module/test-esm-dynamic-import-mutating-fs.js | 5 +---- test/es-module/test-esm-dynamic-import-mutating-fs.mjs | 5 +---- test/node-api/test_policy/test_policy.js | 5 ----- 4 files changed, 3 insertions(+), 16 deletions(-) diff --git a/benchmark/esm/esm-loader-import.js b/benchmark/esm/esm-loader-import.js index 9967cd95275469..025afbf616b570 100644 --- a/benchmark/esm/esm-loader-import.js +++ b/benchmark/esm/esm-loader-import.js @@ -2,13 +2,11 @@ // general startup, does not test lazy operations 'use strict'; const fs = require('node:fs'); -const path = require('node:path'); const common = require('../common.js'); const tmpdir = require('../../test/common/tmpdir.js'); -const { pathToFileURL } = require('node:url'); -const benchmarkDirectory = pathToFileURL(path.resolve(tmpdir.path, 'benchmark-import')); +const benchmarkDirectory = tmpdir.fileURL('benchmark-import'); const configs = { n: [1e3], diff --git a/test/es-module/test-esm-dynamic-import-mutating-fs.js b/test/es-module/test-esm-dynamic-import-mutating-fs.js index 09cbffe487959e..b3e3bd899a93e8 100644 --- a/test/es-module/test-esm-dynamic-import-mutating-fs.js +++ b/test/es-module/test-esm-dynamic-import-mutating-fs.js @@ -4,12 +4,9 @@ const tmpdir = require('../common/tmpdir'); const assert = require('node:assert'); const fs = require('node:fs/promises'); -const { pathToFileURL } = require('node:url'); tmpdir.refresh(); -const tmpDir = pathToFileURL(tmpdir.path); - -const target = new URL(`./${Math.random()}.mjs`, tmpDir); +const target = tmpdir.fileURL(`${Math.random()}.mjs`); (async () => { diff --git a/test/es-module/test-esm-dynamic-import-mutating-fs.mjs b/test/es-module/test-esm-dynamic-import-mutating-fs.mjs index 7eb79337065765..74a75ddd1c4824 100644 --- a/test/es-module/test-esm-dynamic-import-mutating-fs.mjs +++ b/test/es-module/test-esm-dynamic-import-mutating-fs.mjs @@ -4,12 +4,9 @@ import tmpdir from '../common/tmpdir.js'; import assert from 'node:assert'; import fs from 'node:fs/promises'; import { execPath } from 'node:process'; -import { pathToFileURL } from 'node:url'; tmpdir.refresh(); -const tmpDir = pathToFileURL(tmpdir.path); - -const target = new URL(`./${Math.random()}.mjs`, tmpDir); +const target = tmpdir.fileURL(`${Math.random()}.mjs`); await assert.rejects(import(target), { code: 'ERR_MODULE_NOT_FOUND' }); diff --git a/test/node-api/test_policy/test_policy.js b/test/node-api/test_policy/test_policy.js index 428dd905c2e365..f14ceff3c4537b 100644 --- a/test/node-api/test_policy/test_policy.js +++ b/test/node-api/test_policy/test_policy.js @@ -23,11 +23,6 @@ const policyFilepath = tmpdir.resolve('policy'); const depFilepath = require.resolve(`./build/${common.buildType}/binding.node`); const depURL = pathToFileURL(depFilepath); -const tmpdirURL = pathToFileURL(tmpdir.path); -if (!tmpdirURL.pathname.endsWith('/')) { - tmpdirURL.pathname += '/'; -} - const depBody = fs.readFileSync(depURL); function writePolicy(...resources) { const manifest = { resources: {} }; From a16b610e86041a900b40f55c935f75f145571603 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Tue, 22 Aug 2023 01:42:12 +0900 Subject: [PATCH 12/15] src: remove unused function `GetName()` in node_perf PR-URL: https://github.com/nodejs/node/pull/49244 Reviewed-By: Antoine du Hamel Reviewed-By: Deokjin Kim --- src/node_perf.cc | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/node_perf.cc b/src/node_perf.cc index 1acaa9dfe47145..360cc8bf673073 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -236,18 +236,6 @@ static void RemoveGarbageCollectionTracking( GarbageCollectionCleanupHook(env); } -// Gets the name of a function -inline Local GetName(Local fn) { - Local val = fn->GetDebugName(); - if (val.IsEmpty() || val->IsUndefined()) { - Local boundFunction = fn->GetBoundFunction(); - if (!boundFunction.IsEmpty() && !boundFunction->IsUndefined()) { - val = GetName(boundFunction.As()); - } - } - return val; -} - // Notify a custom PerformanceEntry to observers void Notify(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); From 62b2cf30f2d1326dde9d4bc047f5611f17c4a20f Mon Sep 17 00:00:00 2001 From: Fedor Indutny <238531+indutny@users.noreply.github.com> Date: Mon, 21 Aug 2023 11:12:09 -0700 Subject: [PATCH 13/15] doc: clarify use of Uint8Array for n-api `napi_get_buffer_info` always supported receiving `Uint8Array` as a `value` argument because `node::Buffer` is a subclass of `Uint8Array` and the underlying V8 APIs don't distinguish between two. With this change we mark both types as supported by the API so that the user code doesn't have to unknowingly use oficially unsupported type of the `value` argument. PR-URL: https://github.com/nodejs/node/pull/48742 Reviewed-By: Luigi Pinca Reviewed-By: Gabriel Schulhof Reviewed-By: Chengzhong Wu --- doc/api/n-api.md | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 26ca5dc83fb34f..8b8f77f9d6a1f8 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3070,13 +3070,18 @@ napi_status napi_get_buffer_info(napi_env env, ``` * `[in] env`: The environment that the API is invoked under. -* `[in] value`: `napi_value` representing the `node::Buffer` being queried. -* `[out] data`: The underlying data buffer of the `node::Buffer`. - If length is `0`, this may be `NULL` or any other pointer value. +* `[in] value`: `napi_value` representing the `node::Buffer` or `Uint8Array` + being queried. +* `[out] data`: The underlying data buffer of the `node::Buffer` or + `Uint8Array`. If length is `0`, this may be `NULL` or any other pointer value. * `[out] length`: Length in bytes of the underlying data buffer. Returns `napi_ok` if the API succeeded. +This method returns the identical `data` and `byte_length` as +[`napi_get_typedarray_info`][]. And `napi_get_typedarray_info` accepts a +`node::Buffer` (a Uint8Array) as the value too. + This API is used to retrieve the underlying data buffer of a `node::Buffer` and its length. @@ -3827,12 +3832,14 @@ napi_status napi_is_buffer(napi_env env, napi_value value, bool* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: The JavaScript value to check. -* `[out] result`: Whether the given `napi_value` represents a `node::Buffer` - object. +* `[out] result`: Whether the given `napi_value` represents a `node::Buffer` or + `Uint8Array` object. Returns `napi_ok` if the API succeeded. -This API checks if the `Object` passed in is a buffer. +This API checks if the `Object` passed in is a buffer or Uint8Array. +[`napi_is_typedarray`][] should be preferred if the caller needs to check if the +value is a Uint8Array. ### `napi_is_date` @@ -6502,11 +6509,13 @@ the add-on's file name during loading. [`napi_get_last_error_info`]: #napi_get_last_error_info [`napi_get_property`]: #napi_get_property [`napi_get_reference_value`]: #napi_get_reference_value +[`napi_get_typedarray_info`]: #napi_get_typedarray_info [`napi_get_value_external`]: #napi_get_value_external [`napi_has_property`]: #napi_has_property [`napi_instanceof`]: #napi_instanceof [`napi_is_error`]: #napi_is_error [`napi_is_exception_pending`]: #napi_is_exception_pending +[`napi_is_typedarray`]: #napi_is_typedarray [`napi_make_callback`]: #napi_make_callback [`napi_open_callback_scope`]: #napi_open_callback_scope [`napi_open_escapable_handle_scope`]: #napi_open_escapable_handle_scope From f6f1131096dab6a79948ef81f9f813d76238de0d Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 14:33:54 +0200 Subject: [PATCH 14/15] src: support snapshot deserialization in RAIIIsolate PR-URL: https://github.com/nodejs/node/pull/49226 Refs: https://github.com/nodejs/node-v8/issues/252 Reviewed-By: Darshan Sen Reviewed-By: Yagiz Nizipli --- src/util.cc | 6 +++++- src/util.h | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/util.cc b/src/util.cc index 76a61aef592641..19fb91c959a205 100644 --- a/src/util.cc +++ b/src/util.cc @@ -27,6 +27,7 @@ #include "node_buffer.h" #include "node_errors.h" #include "node_internals.h" +#include "node_snapshot_builder.h" #include "node_v8_platform-inl.h" #include "string_bytes.h" #include "uv.h" @@ -677,13 +678,16 @@ Local UnionBytes::ToStringChecked(Isolate* isolate) const { } } -RAIIIsolate::RAIIIsolate() +RAIIIsolate::RAIIIsolate(const SnapshotData* data) : allocator_{ArrayBuffer::Allocator::NewDefaultAllocator()} { isolate_ = Isolate::Allocate(); CHECK_NOT_NULL(isolate_); per_process::v8_platform.Platform()->RegisterIsolate(isolate_, uv_default_loop()); Isolate::CreateParams params; + if (data != nullptr) { + SnapshotBuilder::InitializeIsolateParams(data, ¶ms); + } params.array_buffer_allocator = allocator_.get(); Isolate::Initialize(isolate_, params); } diff --git a/src/util.h b/src/util.h index b9369867eed316..344f7753dab2b1 100644 --- a/src/util.h +++ b/src/util.h @@ -971,7 +971,7 @@ void SetConstructorFunction(v8::Isolate* isolate, // Simple RAII class to spin up a v8::Isolate instance. class RAIIIsolate { public: - RAIIIsolate(); + explicit RAIIIsolate(const SnapshotData* data = nullptr); ~RAIIIsolate(); v8::Isolate* get() const { return isolate_; } From 5c9daf458327aacd9db463807f991752ca4e4f3f Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 14:16:55 +0200 Subject: [PATCH 15/15] sea: generate code cache with deserialized isolate V8 now requires code cache to be compiled from an isolate with the same RO space layout as the one that's going to deserialize the cache, so for a binary built with snapshot, we need to compile the code cache using a deserialized isolate. Drive-by: ignore "useCodeCache" when "useSnapshot" is true because the compilation would've been done during build time anyway in that case, and print a warning for it. PR-URL: https://github.com/nodejs/node/pull/49226 Refs: https://github.com/nodejs/node-v8/issues/252 Reviewed-By: Darshan Sen Reviewed-By: Yagiz Nizipli --- src/node_sea.cc | 21 ++++--- ...ble-application-snapshot-and-code-cache.js | 63 +++++++++++++++++++ 2 files changed, 76 insertions(+), 8 deletions(-) create mode 100644 test/sequential/test-single-executable-application-snapshot-and-code-cache.js diff --git a/src/node_sea.cc b/src/node_sea.cc index a8dbfeaa424943..521f2f670b28c8 100644 --- a/src/node_sea.cc +++ b/src/node_sea.cc @@ -411,7 +411,7 @@ ExitCode GenerateSnapshotForSEA(const SeaConfig& config, std::optional GenerateCodeCache(std::string_view main_path, std::string_view main_script) { - RAIIIsolate raii_isolate; + RAIIIsolate raii_isolate(SnapshotBuilder::GetEmbeddedSnapshotData()); Isolate* isolate = raii_isolate.get(); HandleScope handle_scope(isolate); @@ -489,14 +489,19 @@ ExitCode GenerateSingleExecutableBlob( std::optional optional_sv_code_cache; std::string code_cache; if (static_cast(config.flags & SeaFlags::kUseCodeCache)) { - std::optional optional_code_cache = - GenerateCodeCache(config.main_path, main_script); - if (!optional_code_cache.has_value()) { - FPrintF(stderr, "Cannot generate V8 code cache\n"); - return ExitCode::kGenericUserError; + if (builds_snapshot_from_main) { + FPrintF(stderr, + "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); + } else { + std::optional optional_code_cache = + GenerateCodeCache(config.main_path, main_script); + if (!optional_code_cache.has_value()) { + FPrintF(stderr, "Cannot generate V8 code cache\n"); + return ExitCode::kGenericUserError; + } + code_cache = optional_code_cache.value(); + optional_sv_code_cache = code_cache; } - code_cache = optional_code_cache.value(); - optional_sv_code_cache = code_cache; } SeaResource sea{ diff --git a/test/sequential/test-single-executable-application-snapshot-and-code-cache.js b/test/sequential/test-single-executable-application-snapshot-and-code-cache.js new file mode 100644 index 00000000000000..66012e38a4faa6 --- /dev/null +++ b/test/sequential/test-single-executable-application-snapshot-and-code-cache.js @@ -0,0 +1,63 @@ +'use strict'; + +require('../common'); + +const { + injectAndCodeSign, + skipIfSingleExecutableIsNotSupported, +} = require('../common/sea'); + +skipIfSingleExecutableIsNotSupported(); + +// This tests "useCodeCache" is ignored when "useSnapshot" is true. + +const tmpdir = require('../common/tmpdir'); +const { copyFileSync, writeFileSync, existsSync } = require('fs'); +const { spawnSync } = require('child_process'); +const { join } = require('path'); +const assert = require('assert'); + +const configFile = join(tmpdir.path, 'sea-config.json'); +const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); +const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); + +{ + tmpdir.refresh(); + const code = ` + const { + setDeserializeMainFunction, + } = require('v8').startupSnapshot; + + setDeserializeMainFunction(() => { + console.log('Hello from snapshot'); + }); + `; + + writeFileSync(join(tmpdir.path, 'snapshot.js'), code, 'utf-8'); + writeFileSync(configFile, ` + { + "main": "snapshot.js", + "output": "sea-prep.blob", + "useSnapshot": true, + "useCodeCache": true + } + `); + + let child = spawnSync( + process.execPath, + ['--experimental-sea-config', 'sea-config.json'], + { + cwd: tmpdir.path + }); + assert.match( + child.stderr.toString(), + /"useCodeCache" is redundant when "useSnapshot" is true/); + + assert(existsSync(seaPrepBlob)); + + copyFileSync(process.execPath, outputFile); + injectAndCodeSign(outputFile, seaPrepBlob); + + child = spawnSync(outputFile); + assert.strictEqual(child.stdout.toString().trim(), 'Hello from snapshot'); +}