Skip to content

Commit

Permalink
Merge branch 'main' into process/get-sourcemaps-enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
sapphi-red committed Aug 15, 2023
2 parents 47842aa + 9cc7327 commit db5a879
Show file tree
Hide file tree
Showing 806 changed files with 8,538 additions and 2,307 deletions.
1 change: 1 addition & 0 deletions .eslintignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ test/fixtures
test/message/esm_display_syntax_error.mjs
tools/icu
tools/lint-md/lint-md.mjs
tools/github_reporter
benchmark/tmp
benchmark/fixtures
doc/**/*.js
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-tarball.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,4 +94,4 @@ jobs:
- name: Test
run: |
cd $TAR_DIR
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9"
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9"
2 changes: 1 addition & 1 deletion .github/workflows/coverage-linux-without-intl.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ jobs:
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
# The cause is most likely coverage's use of the inspector.
- name: Test
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9" || exit 0
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
- name: Report JS
run: npx c8 report --check-coverage
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/coverage-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ jobs:
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
# The cause is most likely coverage's use of the inspector.
- name: Test
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9" || exit 0
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
- name: Report JS
run: npx c8 report --check-coverage
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,4 @@ jobs:
name: docs
path: out/doc
- name: Test
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --measure-flakiness 9"
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
2 changes: 1 addition & 1 deletion .github/workflows/test-asan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,4 @@ jobs:
- name: Build
run: make build-ci -j2 V=1
- name: Test
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions -t 300 --measure-flakiness 9"
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
2 changes: 1 addition & 1 deletion .github/workflows/test-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,4 @@ jobs:
- name: Build
run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
2 changes: 1 addition & 1 deletion .github/workflows/test-macos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,4 +60,4 @@ jobs:
- name: Build
run: make build-ci -j$(getconf _NPROCESSORS_ONLN) V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test
run: make run-ci -j$(getconf _NPROCESSORS_ONLN) V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"
run: make run-ci -j$(getconf _NPROCESSORS_ONLN) V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
8 changes: 8 additions & 0 deletions .github/workflows/tools.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,14 @@ jobs:
cat temp-output
tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true
rm temp-output
- id: github_reporter
subsystem: tools
label: tools
run: |
./tools/dep_updaters/update-github-reporter.sh > temp-output
cat temp-output
tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true
rm temp-output
- id: googletest
subsystem: deps
label: dependencies, test
Expand Down
36 changes: 34 additions & 2 deletions deps/googletest/include/gtest/gtest-message.h
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,13 @@

#include "gtest/internal/gtest-port.h"

#ifdef GTEST_HAS_ABSL
#include <type_traits>

#include "absl/strings/internal/has_absl_stringify.h"
#include "absl/strings/str_cat.h"
#endif // GTEST_HAS_ABSL

GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \
/* class A needs to have dll-interface to be used by clients of class B */)

Expand Down Expand Up @@ -111,8 +118,17 @@ class GTEST_API_ Message {
*ss_ << str;
}

// Streams a non-pointer value to this object.
template <typename T>
// Streams a non-pointer value to this object. If building a version of
// GoogleTest with ABSL, this overload is only enabled if the value does not
// have an AbslStringify definition.
template <typename T
#ifdef GTEST_HAS_ABSL
,
typename std::enable_if<
!absl::strings_internal::HasAbslStringify<T>::value, // NOLINT
int>::type = 0
#endif // GTEST_HAS_ABSL
>
inline Message& operator<<(const T& val) {
// Some libraries overload << for STL containers. These
// overloads are defined in the global namespace instead of ::std.
Expand All @@ -133,6 +149,22 @@ class GTEST_API_ Message {
return *this;
}

#ifdef GTEST_HAS_ABSL
// Streams a non-pointer value with an AbslStringify definition to this
// object.
template <typename T,
typename std::enable_if<
absl::strings_internal::HasAbslStringify<T>::value, // NOLINT
int>::type = 0>
inline Message& operator<<(const T& val) {
// ::operator<< is needed here for a similar reason as with the non-Abseil
// version above
using ::operator<<;
*ss_ << absl::StrCat(val);
return *this;
}
#endif // GTEST_HAS_ABSL

// Streams a pointer value to this object.
//
// This function is an overload of the previous one. When you
Expand Down
9 changes: 4 additions & 5 deletions deps/googletest/include/gtest/gtest.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@

#include <cstddef>
#include <cstdint>
#include <iomanip>
#include <limits>
#include <memory>
#include <ostream>
Expand Down Expand Up @@ -1574,12 +1573,12 @@ AssertionResult CmpHelperFloatingPointEQ(const char* lhs_expression,
}

::std::stringstream lhs_ss;
lhs_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
<< lhs_value;
lhs_ss.precision(std::numeric_limits<RawType>::digits10 + 2);
lhs_ss << lhs_value;

::std::stringstream rhs_ss;
rhs_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)
<< rhs_value;
rhs_ss.precision(std::numeric_limits<RawType>::digits10 + 2);
rhs_ss << rhs_value;

return EqFailure(lhs_expression, rhs_expression,
StringStreamToString(&lhs_ss), StringStreamToString(&rhs_ss),
Expand Down
1 change: 0 additions & 1 deletion deps/googletest/include/gtest/internal/gtest-internal.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@

#include <cstdint>
#include <functional>
#include <iomanip>
#include <limits>
#include <map>
#include <set>
Expand Down
18 changes: 4 additions & 14 deletions deps/googletest/include/gtest/internal/gtest-port.h
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,6 @@
// GTEST_HAS_ALT_PATH_SEP_ - Always defined to 0 or 1.
// GTEST_WIDE_STRING_USES_UTF16_ - Always defined to 0 or 1.
// GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ - Always defined to 0 or 1.
// GTEST_HAS_DOWNCAST_ - Always defined to 0 or 1.
// GTEST_HAS_NOTIFICATION_- Always defined to 0 or 1.
//
// Synchronization:
Expand Down Expand Up @@ -313,10 +312,6 @@
#include "gtest/internal/custom/gtest-port.h"
#include "gtest/internal/gtest-port-arch.h"

#ifndef GTEST_HAS_DOWNCAST_
#define GTEST_HAS_DOWNCAST_ 0
#endif

#ifndef GTEST_HAS_MUTEX_AND_THREAD_LOCAL_
#define GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ 0
#endif
Expand Down Expand Up @@ -1153,17 +1148,12 @@ inline To ImplicitCast_(To x) {
// check to enforce this.
template <class Derived, class Base>
Derived* CheckedDowncastToActualType(Base* base) {
static_assert(std::is_base_of<Base, Derived>::value,
"target type not derived from source type");
#if GTEST_HAS_RTTI
GTEST_CHECK_(typeid(*base) == typeid(Derived));
#endif

#if GTEST_HAS_DOWNCAST_
return ::down_cast<Derived*>(base);
#elif GTEST_HAS_RTTI
return dynamic_cast<Derived*>(base); // NOLINT
#else
return static_cast<Derived*>(base); // Poor man's downcast.
GTEST_CHECK_(base == nullptr || dynamic_cast<Derived*>(base) != nullptr);
#endif
return static_cast<Derived*>(base);
}

#if GTEST_HAS_STREAM_REDIRECTION
Expand Down
2 changes: 1 addition & 1 deletion deps/googletest/src/gtest-internal-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ class GTEST_API_ UnitTestImpl {
void AddTestInfo(internal::SetUpTestSuiteFunc set_up_tc,
internal::TearDownTestSuiteFunc tear_down_tc,
TestInfo* test_info) {
#ifdef GTEST_HAS_FILE_SYSTEM
#if GTEST_HAS_FILE_SYSTEM
// In order to support thread-safe death tests, we need to
// remember the original working directory when the test program
// was first invoked. We cannot do this in RUN_ALL_TESTS(), as
Expand Down
4 changes: 3 additions & 1 deletion deps/undici/src/docs/api/ProxyAgent.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string.
* **token** `string` (optional) - It can be passed by a string of token for authentication.
* **auth** `string` (**deprecated**) - Use token.
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).

Examples:

Expand Down
7 changes: 2 additions & 5 deletions deps/undici/src/lib/api/abort-signal.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
const { addAbortListener } = require('../core/util')
const { RequestAbortedError } = require('../core/errors')

const kListener = Symbol('kListener')
Expand Down Expand Up @@ -29,11 +30,7 @@ function addSignal (self, signal) {
abort(self)
}

if ('addEventListener' in self[kSignal]) {
self[kSignal].addEventListener('abort', self[kListener])
} else {
self[kSignal].addListener('abort', self[kListener])
}
addAbortListener(self[kSignal], self[kListener])
}

function removeSignal (self) {
Expand Down
9 changes: 6 additions & 3 deletions deps/undici/src/lib/api/readable.js
Original file line number Diff line number Diff line change
Expand Up @@ -155,12 +155,13 @@ module.exports = class BodyReadable extends Readable {
const abortFn = () => {
this.destroy()
}
let signalListenerCleanup
if (signal) {
if (typeof signal !== 'object' || !('aborted' in signal)) {
throw new InvalidArgumentError('signal must be an AbortSignal')
}
util.throwIfAborted(signal)
signal.addEventListener('abort', abortFn, { once: true })
signalListenerCleanup = util.addAbortListener(signal, abortFn)
}
try {
for await (const chunk of this) {
Expand All @@ -173,8 +174,10 @@ module.exports = class BodyReadable extends Readable {
} catch {
util.throwIfAborted(signal)
} finally {
if (signal) {
signal.removeEventListener('abort', abortFn)
if (typeof signalListenerCleanup === 'function') {
signalListenerCleanup()
} else if (signalListenerCleanup) {
signalListenerCleanup[Symbol.dispose]()
}
}
}
Expand Down
19 changes: 19 additions & 0 deletions deps/undici/src/lib/core/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,24 @@ function throwIfAborted (signal) {
}
}

let events
function addAbortListener (signal, listener) {
if (typeof Symbol.dispose === 'symbol') {
if (!events) {
events = require('events')
}
if (typeof events.addAbortListener === 'function' && 'aborted' in signal) {
return events.addAbortListener(signal, listener)
}
}
if ('addEventListener' in signal) {
signal.addEventListener('abort', listener, { once: true })
return () => signal.removeEventListener('abort', listener)
}
signal.addListener('abort', listener)
return () => signal.removeListener('abort', listener)
}

const hasToWellFormed = !!String.prototype.toWellFormed

/**
Expand Down Expand Up @@ -469,6 +487,7 @@ module.exports = {
isFormDataLike,
buildURL,
throwIfAborted,
addAbortListener,
nodeMajor,
nodeMinor,
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13)
Expand Down
2 changes: 1 addition & 1 deletion deps/undici/src/lib/fetch/body.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ function extractBody (object, keepalive = false) {
// Set source to a copy of the bytes held by object.
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
} else if (util.isFormDataLike(object)) {
const boundary = `----formdata-undici-${Math.random()}`.replace('.', '').slice(0, 32)
const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
const prefix = `--${boundary}\r\nContent-Disposition: form-data`

/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
Expand Down
35 changes: 21 additions & 14 deletions deps/undici/src/lib/fetch/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ const {
const { kHeadersList } = require('../core/symbols')
const EE = require('events')
const { Readable, pipeline } = require('stream')
const { isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')
const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')
const { dataURLProcessor, serializeAMimeType } = require('./dataURL')
const { TransformStream } = require('stream/web')
const { getGlobalDispatcher } = require('../global')
Expand Down Expand Up @@ -174,22 +174,22 @@ async function fetch (input, init = {}) {
let controller = null

// 11. Add the following abort steps to requestObject’s signal:
requestObject.signal.addEventListener(
'abort',
addAbortListener(
requestObject.signal,
() => {
// 1. Set locallyAborted to true.
locallyAborted = true

// 2. Abort the fetch() call with p, request, responseObject,
// 2. Assert: controller is non-null.
assert(controller != null)

// 3. Abort controller with requestObject’s signal’s abort reason.
controller.abort(requestObject.signal.reason)

// 4. Abort the fetch() call with p, request, responseObject,
// and requestObject’s signal’s abort reason.
abortFetch(p, request, responseObject, requestObject.signal.reason)

// 3. If controller is not null, then abort controller.
if (controller != null) {
controller.abort()
}
},
{ once: true }
}
)

// 12. Let handleFetchDone given response response be to finalize and
Expand Down Expand Up @@ -319,7 +319,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {
if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {
performance.markResourceTiming(timingInfo, originalURL, initiatorType, globalThis, cacheState)
performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)
}
}

Expand Down Expand Up @@ -1986,7 +1986,7 @@ async function httpNetworkFetch (
if (key.toLowerCase() === 'content-encoding') {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = val.toLowerCase().split(',').map((x) => x.trim())
codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
} else if (key.toLowerCase() === 'location') {
location = val
}
Expand All @@ -2007,7 +2007,14 @@ async function httpNetworkFetch (
for (const coding of codings) {
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
if (coding === 'x-gzip' || coding === 'gzip') {
decoders.push(zlib.createGunzip())
decoders.push(zlib.createGunzip({
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
flush: zlib.constants.Z_SYNC_FLUSH,
finishFlush: zlib.constants.Z_SYNC_FLUSH
}))
} else if (coding === 'deflate') {
decoders.push(zlib.createInflate())
} else if (coding === 'br') {
Expand Down
Loading

0 comments on commit db5a879

Please sign in to comment.