Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

deps: V8: make V8 9.5 ABI-compatible with 9.6 #40422

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@

# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
'v8_embedder_string': '-node.10',
'v8_embedder_string': '-node.11',

##### V8 defaults for Node.js #####

Expand Down
25 changes: 0 additions & 25 deletions deps/v8/include/v8-callbacks.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,31 +210,6 @@ using CreateHistogramCallback = void* (*)(const char* name, int min, int max,

using AddHistogramSampleCallback = void (*)(void* histogram, int sample);

/**
* HostImportModuleDynamicallyCallback is called when we require the
* embedder to load a module. This is used as part of the dynamic
* import syntax.
*
* The referrer contains metadata about the script/module that calls
* import.
*
* The specifier is the name of the module that should be imported.
*
* The embedder must compile, instantiate, evaluate the Module, and
* obtain its namespace object.
*
* The Promise returned from this function is forwarded to userland
* JavaScript. The embedder must resolve this promise with the module
* namespace object. In case of an exception, the embedder must reject
* this promise with the exception. If the promise creation itself
* fails (e.g. due to stack overflow), the embedder must propagate
* that exception by returning an empty MaybeLocal.
*/
using HostImportModuleDynamicallyCallback =
MaybeLocal<Promise> (*)(Local<Context> context,
Local<ScriptOrModule> referrer,
Local<String> specifier);

// --- Exceptions ---

using FatalErrorCallback = void (*)(const char* location, const char* message);
Expand Down
23 changes: 15 additions & 8 deletions deps/v8/include/v8-internal.h
Original file line number Diff line number Diff line change
Expand Up @@ -224,23 +224,30 @@ class Internals {
static const int kExternalOneByteRepresentationTag = 0x0a;

static const uint32_t kNumIsolateDataSlots = 4;
static const int kStackGuardSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableSize = 13 * kApiSystemPointerSize;
static const int kBuiltinTier0TableSize = 13 * kApiSystemPointerSize;

// IsolateData layout guarantees.
static const int kIsolateEmbedderDataOffset = 0;
static const int kIsolateCageBaseOffset = 0;
static const int kIsolateStackGuardOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableOffset =
kIsolateStackGuardOffset + kStackGuardSize;
static const int kBuiltinTier0TableOffset =
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
static const int kIsolateEmbedderDataOffset =
kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
static const int kIsolateFastCCallCallerFpOffset =
kNumIsolateDataSlots * kApiSystemPointerSize;
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
static const int kIsolateFastCCallCallerPcOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateFastApiCallTargetOffset =
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateCageBaseOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kIsolateStackGuardOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateRootsOffset =
kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;

static const int kExternalPointerTableBufferOffset = 0;
static const int kExternalPointerTableLengthOffset =
Expand Down
10 changes: 0 additions & 10 deletions deps/v8/include/v8-isolate.h
Original file line number Diff line number Diff line change
Expand Up @@ -613,16 +613,6 @@ class V8_EXPORT Isolate {
void SetAbortOnUncaughtExceptionCallback(
AbortOnUncaughtExceptionCallback callback);

/**
* This specifies the callback called by the upcoming dynamic
* import() language feature to load modules.
*/
V8_DEPRECATED(
"Use the version of SetHostImportModuleDynamicallyCallback that takes a "
"HostImportModuleDynamicallyWithImportAssertionsCallback instead")
void SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyCallback callback);

/**
* This specifies the callback called by the upcoming dynamic
* import() language feature to load modules.
Expand Down
20 changes: 20 additions & 0 deletions deps/v8/include/v8-platform.h
Original file line number Diff line number Diff line change
Expand Up @@ -516,6 +516,18 @@ class PageAllocator {
virtual bool CanAllocateSharedPages() { return false; }
};

/**
* V8 Allocator used for allocating zone backings.
*/
class ZoneBackingAllocator {
public:
using MallocFn = void* (*)(size_t);
using FreeFn = void (*)(void*);

virtual MallocFn GetMallocFn() const { return ::malloc; }
virtual FreeFn GetFreeFn() const { return ::free; }
};

/**
* V8 Platform abstraction layer.
*
Expand All @@ -534,6 +546,14 @@ class Platform {
return nullptr;
}

/**
* Allows the embedder to specify a custom allocator used for zones.
*/
virtual ZoneBackingAllocator* GetZoneBackingAllocator() {
static ZoneBackingAllocator default_allocator;
return &default_allocator;
}

/**
* Enables the embedder to respond in cases where V8 can't allocate large
* blocks of memory. V8 retries the failed allocation once after calling this
Expand Down
6 changes: 0 additions & 6 deletions deps/v8/src/api/api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8544,12 +8544,6 @@ void Isolate::SetAbortOnUncaughtExceptionCallback(
isolate->SetAbortOnUncaughtExceptionCallback(callback);
}

void Isolate::SetHostImportModuleDynamicallyCallback(
i::Isolate::DeprecatedHostImportModuleDynamicallyCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->SetHostImportModuleDynamicallyCallback(callback);
}

void Isolate::SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyWithImportAssertionsCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
Expand Down
71 changes: 50 additions & 21 deletions deps/v8/src/builtins/builtins-definitions.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,40 @@ namespace internal {
// TODO(jgruber): Remove DummyDescriptor once all ASM builtins have been
// properly associated with their descriptor.

#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* GC write barrirer */ \
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \
\
/* TSAN support for stores in generated code.*/ \
// Builtins are additionally split into tiers, where the tier determines the
// distance of the builtins table from the root register within IsolateData.
//
// - Tier 0 (T0) are guaranteed to be close to the root register and can thus
// be accessed efficiently root-relative calls (so not, e.g., calls from
// generated code when short-builtin-calls is on).
// - T1 builtins have no distance guarantees.
//
// Note, this mechanism works only if the set of T0 builtins is kept as small
// as possible. Please, resist the temptation to add your builtin here unless
// there's a very good reason.
#define BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* Deoptimization entries. */ \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
\
/* GC write barrier. */ \
TFC(RecordWriteEmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetSaveFP, WriteBarrier) \
TFC(RecordWriteEmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(RecordWriteOmitRememberedSetIgnoreFP, WriteBarrier) \
TFC(EphemeronKeyBarrierSaveFP, WriteBarrier) \
TFC(EphemeronKeyBarrierIgnoreFP, WriteBarrier) \
\
/* Adaptor for CPP builtins. */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor)

#define BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
/* TSAN support for stores in generated code. */ \
IF_TSAN(TFC, TSANRelaxedStore8IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore8SaveFP, TSANStore) \
IF_TSAN(TFC, TSANRelaxedStore16IgnoreFP, TSANStore) \
Expand All @@ -58,15 +82,12 @@ namespace internal {
IF_TSAN(TFC, TSANSeqCstStore64IgnoreFP, TSANStore) \
IF_TSAN(TFC, TSANSeqCstStore64SaveFP, TSANStore) \
\
/* TSAN support for loads in generated code.*/ \
/* TSAN support for loads in generated code. */ \
IF_TSAN(TFC, TSANRelaxedLoad32IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad32SaveFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64IgnoreFP, TSANLoad) \
IF_TSAN(TFC, TSANRelaxedLoad64SaveFP, TSANLoad) \
\
/* Adaptor for CPP builtin */ \
TFC(AdaptorWithBuiltinExitFrame, CppBuiltinAdaptor) \
\
/* Calls */ \
/* ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) */ \
ASM(CallFunction_ReceiverIsNullOrUndefined, CallTrampoline) \
Expand Down Expand Up @@ -187,10 +208,6 @@ namespace internal {
TFC(CompileLazyDeoptimizedCode, JSTrampoline) \
TFC(InstantiateAsmJs, JSTrampoline) \
ASM(NotifyDeoptimized, Dummy) \
ASM(DeoptimizationEntry_Eager, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Soft, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Bailout, DeoptimizationEntry) \
ASM(DeoptimizationEntry_Lazy, DeoptimizationEntry) \
\
/* Trampolines called when returning from a deoptimization that expects */ \
/* to continue in a JavaScript builtin to finish the functionality of a */ \
Expand Down Expand Up @@ -282,10 +299,7 @@ namespace internal {
TFH(HasIndexedInterceptorIC, LoadWithVector) \
\
/* Dynamic check maps */ \
ASM(DynamicCheckMapsTrampoline, DynamicCheckMaps) \
TFC(DynamicCheckMaps, DynamicCheckMaps) \
ASM(DynamicCheckMapsWithFeedbackVectorTrampoline, \
DynamicCheckMapsWithFeedbackVector) \
TFC(DynamicCheckMapsWithFeedbackVector, DynamicCheckMapsWithFeedbackVector) \
\
/* Microtask helpers */ \
Expand Down Expand Up @@ -1032,6 +1046,10 @@ namespace internal {
CPP(CallAsyncModuleFulfilled) \
CPP(CallAsyncModuleRejected)

#define BUILTIN_LIST_BASE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM)

#ifdef V8_INTL_SUPPORT
#define BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
/* ecma402 #sec-intl.collator */ \
Expand Down Expand Up @@ -1218,6 +1236,17 @@ namespace internal {
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH)

// See the comment on top of BUILTIN_LIST_BASE_TIER0 for an explanation of
// tiers.
#define BUILTIN_LIST_TIER0(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER0(CPP, TFJ, TFC, TFS, TFH, ASM)

#define BUILTIN_LIST_TIER1(CPP, TFJ, TFC, TFS, TFH, BCH, ASM) \
BUILTIN_LIST_BASE_TIER1(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_FROM_TORQUE(CPP, TFJ, TFC, TFS, TFH, ASM) \
BUILTIN_LIST_INTL(CPP, TFJ, TFS) \
BUILTIN_LIST_BYTECODE_HANDLERS(BCH)

// The exception thrown in the following builtins are caught
// internally and result in a promise rejection.
#define BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(V) \
Expand Down
44 changes: 25 additions & 19 deletions deps/v8/src/builtins/builtins.cc
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ BytecodeOffset Builtins::GetContinuationBytecodeOffset(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ || Builtins::KindOf(builtin) == TFC ||
Builtins::KindOf(builtin) == TFS);
return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId +
static_cast<int>(builtin));
ToInt(builtin));
}

Builtin Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
Expand Down Expand Up @@ -182,7 +182,7 @@ Handle<Code> Builtins::code_handle(Builtin builtin) {
// static
int Builtins::GetStackParameterCount(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ);
return builtin_metadata[static_cast<int>(builtin)].data.parameter_count;
return builtin_metadata[ToInt(builtin)].data.parameter_count;
}

// static
Expand Down Expand Up @@ -224,7 +224,7 @@ bool Builtins::HasJSLinkage(Builtin builtin) {

// static
const char* Builtins::name(Builtin builtin) {
int index = static_cast<int>(builtin);
int index = ToInt(builtin);
DCHECK(IsBuiltinId(index));
return builtin_metadata[index].name;
}
Expand Down Expand Up @@ -262,7 +262,7 @@ void Builtins::PrintBuiltinSize() {
// static
Address Builtins::CppEntryOf(Builtin builtin) {
DCHECK(Builtins::IsCpp(builtin));
return builtin_metadata[static_cast<int>(builtin)].data.cpp_entry;
return builtin_metadata[ToInt(builtin)].data.cpp_entry;
}

// static
Expand Down Expand Up @@ -292,18 +292,24 @@ bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
}

// static
void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) {
EmbeddedData d = EmbeddedData::FromBlob(isolate);
Address* builtin_entry_table = isolate->builtin_entry_table();
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) {
// TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is
// resolved.
CHECK(
Builtins::IsBuiltinId(isolate->heap()->builtin(builtin).builtin_id()));
DCHECK(isolate->heap()->builtin(builtin).is_off_heap_trampoline());
builtin_entry_table[static_cast<int>(builtin)] =
d.InstructionStartOfBuiltin(builtin);
void Builtins::InitializeIsolateDataTables(Isolate* isolate) {
EmbeddedData embedded_data = EmbeddedData::FromBlob(isolate);
IsolateData* isolate_data = isolate->isolate_data();

// The entry table.
for (Builtin i = Builtins::kFirst; i <= Builtins::kLast; ++i) {
DCHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_id()));
DCHECK(isolate->heap()->builtin(i).is_off_heap_trampoline());
isolate_data->builtin_entry_table()[ToInt(i)] =
embedded_data.InstructionStartOfBuiltin(i);
}

// T0 tables.
for (Builtin i = Builtins::kFirst; i <= Builtins::kLastTier0; ++i) {
const int ii = ToInt(i);
isolate_data->builtin_tier0_entry_table()[ii] =
isolate_data->builtin_entry_table()[ii];
isolate_data->builtin_tier0_table()[ii] = isolate_data->builtin_table()[ii];
}
}

Expand All @@ -314,10 +320,10 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
return; // No need to iterate the entire table in this case.
}

Address* builtins = isolate->builtins_table();
Address* builtins = isolate->builtin_table();
int i = 0;
HandleScope scope(isolate);
for (; i < static_cast<int>(Builtin::kFirstBytecodeHandler); i++) {
for (; i < ToInt(Builtin::kFirstBytecodeHandler); i++) {
Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code,
Builtins::name(FromInt(i))));
Expand Down Expand Up @@ -420,7 +426,7 @@ Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(

Builtins::Kind Builtins::KindOf(Builtin builtin) {
DCHECK(IsBuiltinId(builtin));
return builtin_metadata[static_cast<int>(builtin)].kind;
return builtin_metadata[ToInt(builtin)].kind;
}

// static
Expand Down
Loading