Skip to content

Commit

Permalink
[maglev] Make CheckNumber/InternalizedString arch independent
Browse files Browse the repository at this point in the history
Bug: v8:7700
Change-Id: I7a646b80b4fef3de42f71023ffdc1c3eb95d5585
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4650731
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Auto-Submit: Victor Gomes <victorgomes@chromium.org>
Commit-Queue: Victor Gomes <victorgomes@chromium.org>
Cr-Commit-Position: refs/heads/main@{#88526}
  • Loading branch information
victorgomes authored and V8 LUCI CQ committed Jun 28, 2023
1 parent 9b14eae commit bbdbe26
Show file tree
Hide file tree
Showing 8 changed files with 94 additions and 172 deletions.
6 changes: 6 additions & 0 deletions src/maglev/arm/maglev-assembler-arm-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -608,6 +608,12 @@ inline Condition MaglevAssembler::IsNotCallableNorUndetactable(
return kEqual;
}

inline void MaglevAssembler::LoadInstanceType(Register instance_type,
Register heap_object) {
LoadMap(instance_type, heap_object);
ldrh(instance_type, FieldMemOperand(instance_type, Map::kInstanceTypeOffset));
}

inline void MaglevAssembler::IsObjectType(Register heap_object,
InstanceType type) {
ScratchRegisterScope temps(this);
Expand Down
18 changes: 0 additions & 18 deletions src/maglev/arm/maglev-ir-arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -152,14 +152,6 @@ void FoldedAllocation::GenerateCode(MaglevAssembler* masm,
__ add(ToRegister(result()), ToRegister(raw_allocation()), Operand(offset()));
}

void CheckNumber::SetValueLocationConstraints() {
UseRegister(receiver_input());
}
void CheckNumber::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
MAGLEV_NODE_NOT_IMPLEMENTED(CheckNumber);
}

int CheckedObjectToIndex::MaxCallStackArgs() const { return 0; }

void Int32AddWithOverflow::SetValueLocationConstraints() {
Expand Down Expand Up @@ -696,16 +688,6 @@ void CheckJSDataViewBounds::GenerateCode(MaglevAssembler* masm,
MAGLEV_NODE_NOT_IMPLEMENTED(CheckJSDataViewBounds);
}

void CheckedInternalizedString::SetValueLocationConstraints() {
UseRegister(object_input());
DefineSameAsFirst(this);
set_temporaries_needed(1);
}
void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
MAGLEV_NODE_NOT_IMPLEMENTED(CheckedInternalizedString);
}

void HoleyFloat64ToMaybeNanFloat64::SetValueLocationConstraints() {
UseRegister(input());
DefineAsRegister(this);
Expand Down
7 changes: 7 additions & 0 deletions src/maglev/arm64/maglev-assembler-arm64-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -707,6 +707,13 @@ inline Condition MaglevAssembler::IsNotCallableNorUndetactable(
return kEqual;
}

inline void MaglevAssembler::LoadInstanceType(Register instance_type,
Register heap_object) {
LoadMap(instance_type, heap_object);
Ldrh(instance_type.W(),
FieldMemOperand(instance_type, Map::kInstanceTypeOffset));
}

inline void MaglevAssembler::IsObjectType(Register heap_object,
InstanceType type) {
ScratchRegisterScope temps(this);
Expand Down
80 changes: 0 additions & 80 deletions src/maglev/arm64/maglev-ir-arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -151,33 +151,6 @@ void FoldedAllocation::GenerateCode(MaglevAssembler* masm,
__ Add(ToRegister(result()), ToRegister(raw_allocation()), offset());
}

void CheckNumber::SetValueLocationConstraints() {
UseRegister(receiver_input());
}
void CheckNumber::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Label done;
MaglevAssembler::ScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
Register value = ToRegister(receiver_input());
// If {value} is a Smi or a HeapNumber, we're done.
__ JumpIfSmi(value, &done);
if (mode() == Object::Conversion::kToNumeric) {
__ LoadMap(scratch, value);
__ CompareRoot(scratch.W(), RootIndex::kHeapNumberMap);
// Jump to done if it is a HeapNumber.
__ B(&done, eq);
// Check if it is a BigInt.
__ Ldrh(scratch.W(), FieldMemOperand(scratch, Map::kInstanceTypeOffset));
__ Cmp(scratch, Immediate(BIGINT_TYPE));
} else {
__ Ldr(scratch.W(), FieldMemOperand(value, HeapObject::kMapOffset));
__ CompareRoot(scratch, RootIndex::kHeapNumberMap);
}
__ EmitEagerDeoptIf(ne, DeoptimizeReason::kNotANumber, this);
__ Bind(&done);
}

int CheckedObjectToIndex::MaxCallStackArgs() const { return 0; }

void Int32AddWithOverflow::SetValueLocationConstraints() {
Expand Down Expand Up @@ -666,59 +639,6 @@ void CheckJSDataViewBounds::GenerateCode(MaglevAssembler* masm,
__ EmitEagerDeoptIf(hs, DeoptimizeReason::kOutOfBounds, this);
}

void CheckedInternalizedString::SetValueLocationConstraints() {
UseRegister(object_input());
DefineSameAsFirst(this);
set_temporaries_needed(1);
}
void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
MaglevAssembler::ScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
Register object = ToRegister(object_input());

if (check_type() == CheckType::kOmitHeapObjectCheck) {
__ AssertNotSmi(object);
} else {
Condition is_smi = __ CheckSmi(object);
__ EmitEagerDeoptIf(is_smi, DeoptimizeReason::kWrongMap, this);
}

__ LoadMap(scratch, object);
__ RecordComment("Test IsInternalizedString");
// Go to the slow path if this is a non-string, or a non-internalised string.
__ Ldrh(scratch.W(), FieldMemOperand(scratch, Map::kInstanceTypeOffset));
__ Tst(scratch.W(), Immediate(kIsNotStringMask | kIsNotInternalizedMask));
static_assert((kStringTag | kInternalizedTag) == 0);
ZoneLabelRef done(masm);
__ JumpToDeferredIf(
ne,
[](MaglevAssembler* masm, ZoneLabelRef done, Register object,
CheckedInternalizedString* node, EagerDeoptInfo* deopt_info,
Register instance_type) {
__ RecordComment("Deferred Test IsThinString");
// Deopt if this isn't a thin string.
__ Cmp(instance_type.W(), Immediate(THIN_STRING_TYPE));
__ EmitEagerDeoptIf(ne, DeoptimizeReason::kWrongMap, node);
__ LoadTaggedField(object,
FieldMemOperand(object, ThinString::kActualOffset));
if (v8_flags.debug_code) {
__ RecordComment("DCHECK IsInternalizedString");
Register scratch = instance_type;
__ LoadMap(scratch, object);
__ Ldrh(scratch.W(),
FieldMemOperand(scratch, Map::kInstanceTypeOffset));
__ Tst(scratch.W(),
Immediate(kIsNotStringMask | kIsNotInternalizedMask));
static_assert((kStringTag | kInternalizedTag) == 0);
__ Check(eq, AbortReason::kUnexpectedValue);
}
__ jmp(*done);
},
done, object, this, eager_deopt_info(), scratch);
__ Bind(*done);
}

void HoleyFloat64ToMaybeNanFloat64::SetValueLocationConstraints() {
UseRegister(input());
DefineAsRegister(this);
Expand Down
1 change: 1 addition & 0 deletions src/maglev/maglev-assembler.h
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,7 @@ class MaglevAssembler : public MacroAssembler {
inline Condition IsCallableAndNotUndetectable(Register map, Register scratch);
inline Condition IsNotCallableNorUndetactable(Register map, Register scratch);

inline void LoadInstanceType(Register instance_type, Register heap_object);
inline void IsObjectType(Register heap_object, InstanceType type);
inline void CompareObjectType(Register heap_object, InstanceType type);
inline void JumpIfJSAnyIsNotPrimitive(Register heap_object, Label* target,
Expand Down
74 changes: 74 additions & 0 deletions src/maglev/maglev-ir.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4158,6 +4158,80 @@ void Float64ToUint8Clamped::GenerateCode(MaglevAssembler* masm,
__ bind(&done);
}

void CheckNumber::SetValueLocationConstraints() {
UseRegister(receiver_input());
}
void CheckNumber::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Label done;
MaglevAssembler::ScratchRegisterScope temps(masm);
Register scratch = temps.GetDefaultScratchRegister();
Register value = ToRegister(receiver_input());
// If {value} is a Smi or a HeapNumber, we're done.
__ JumpIfSmi(value, &done, Label::Distance::kNear);
if (mode() == Object::Conversion::kToNumeric) {
__ LoadMap(scratch, value);
__ CompareRoot(scratch, RootIndex::kHeapNumberMap);
// Jump to done if it is a HeapNumber.
__ JumpIf(kEqual, &done, Label::Distance::kNear);
// Check if it is a BigInt.
__ CompareRoot(scratch, RootIndex::kBigIntMap);
} else {
__ CompareMapWithRoot(value, RootIndex::kHeapNumberMap, scratch);
}
__ EmitEagerDeoptIf(kNotEqual, DeoptimizeReason::kNotANumber, this);
__ bind(&done);
}

void CheckedInternalizedString::SetValueLocationConstraints() {
UseRegister(object_input());
DefineSameAsFirst(this);
}
void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Register object = ToRegister(object_input());
MaglevAssembler::ScratchRegisterScope temps(masm);
Register instance_type = temps.GetDefaultScratchRegister();
if (check_type() == CheckType::kOmitHeapObjectCheck) {
__ AssertNotSmi(object);
} else {
Condition is_smi = __ CheckSmi(object);
__ EmitEagerDeoptIf(is_smi, DeoptimizeReason::kWrongMap, this);
}
__ LoadInstanceType(instance_type, object);
__ RecordComment("Test IsInternalizedString");
// Go to the slow path if this is a non-string, or a non-internalised string.
static_assert((kStringTag | kInternalizedTag) == 0);
ZoneLabelRef done(masm);
__ TestInt32AndJumpIfAnySet(
instance_type, kIsNotStringMask | kIsNotInternalizedMask,
__ MakeDeferredCode(
[](MaglevAssembler* masm, ZoneLabelRef done,
CheckedInternalizedString* node, Register object,
Register instance_type) {
__ RecordComment("Deferred Test IsThinString");
// Deopt if this isn't a thin string.
__ CompareInt32AndJumpIf(
instance_type, THIN_STRING_TYPE, kNotEqual,
__ GetDeoptLabel(node, DeoptimizeReason::kWrongMap));
// Load internalized string from thin string.
__ LoadTaggedField(object, object, ThinString::kActualOffset);
if (v8_flags.debug_code) {
__ RecordComment("DCHECK IsInternalizedString");
Label checked;
__ LoadInstanceType(instance_type, object);
__ TestInt32AndJumpIfAllClear(
instance_type, kIsNotStringMask | kIsNotInternalizedMask,
&checked);
__ Abort(AbortReason::kUnexpectedValue);
__ bind(&checked);
}
__ Jump(*done);
},
done, this, object, instance_type));
__ bind(*done);
}

void CheckedNumberToUint8Clamped::SetValueLocationConstraints() {
UseRegister(input());
DefineSameAsFirst(this);
Expand Down
6 changes: 6 additions & 0 deletions src/maglev/x64/maglev-assembler-x64-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -601,6 +601,12 @@ inline Condition MaglevAssembler::IsNotCallableNorUndetactable(
return kEqual;
}

inline void MaglevAssembler::LoadInstanceType(Register instance_type,
Register heap_object) {
LoadMap(instance_type, heap_object);
movzxwl(instance_type, FieldOperand(instance_type, Map::kInstanceTypeOffset));
}

inline void MaglevAssembler::IsObjectType(Register heap_object,
InstanceType type) {
MacroAssembler::IsObjectType(heap_object, type, kScratchRegister);
Expand Down
74 changes: 0 additions & 74 deletions src/maglev/x64/maglev-ir-x64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,29 +37,6 @@ void FoldedAllocation::GenerateCode(MaglevAssembler* masm,
Operand(ToRegister(raw_allocation()), offset()));
}

void CheckNumber::SetValueLocationConstraints() {
UseRegister(receiver_input());
}
void CheckNumber::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Label done;
Register value = ToRegister(receiver_input());
// If {value} is a Smi or a HeapNumber, we're done.
__ JumpIfSmi(value, &done);
__ CompareRoot(FieldOperand(value, HeapObject::kMapOffset),
RootIndex::kHeapNumberMap);
if (mode() == Object::Conversion::kToNumeric) {
// Jump to done if it is a HeapNumber.
__ j(equal, &done);
// Check if it is a BigInt.
__ LoadMap(kScratchRegister, value);
__ cmpw(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
Immediate(BIGINT_TYPE));
}
__ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kNotANumber, this);
__ bind(&done);
}

void CheckJSTypedArrayBounds::SetValueLocationConstraints() {
UseRegister(receiver_input());
if (ElementsKindSize(elements_kind_) == 1) {
Expand Down Expand Up @@ -119,57 +96,6 @@ void CheckJSDataViewBounds::GenerateCode(MaglevAssembler* masm,
__ EmitEagerDeoptIf(above_equal, DeoptimizeReason::kOutOfBounds, this);
}

void CheckedInternalizedString::SetValueLocationConstraints() {
UseRegister(object_input());
set_temporaries_needed(1);
DefineSameAsFirst(this);
}
void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
MaglevAssembler::ScratchRegisterScope temps(masm);
Register map_tmp = temps.Acquire();
Register object = ToRegister(object_input());

if (check_type() == CheckType::kOmitHeapObjectCheck) {
__ AssertNotSmi(object);
} else {
Condition is_smi = __ CheckSmi(object);
__ EmitEagerDeoptIf(is_smi, DeoptimizeReason::kWrongMap, this);
}

__ LoadMap(map_tmp, object);
__ RecordComment("Test IsInternalizedString");
// Go to the slow path if this is a non-string, or a non-internalised string.
__ testw(FieldOperand(map_tmp, Map::kInstanceTypeOffset),
Immediate(kIsNotStringMask | kIsNotInternalizedMask));
static_assert((kStringTag | kInternalizedTag) == 0);
ZoneLabelRef done(masm);
__ JumpToDeferredIf(
not_zero,
[](MaglevAssembler* masm, ZoneLabelRef done, Register object,
CheckedInternalizedString* node, EagerDeoptInfo* deopt_info,
Register map_tmp) {
__ RecordComment("Deferred Test IsThinString");
__ movw(map_tmp, FieldOperand(map_tmp, Map::kInstanceTypeOffset));
__ cmpw(map_tmp, Immediate(THIN_STRING_TYPE));
// Deopt if this isn't a thin string.
__ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kWrongMap, node);
__ LoadTaggedField(object,
FieldOperand(object, ThinString::kActualOffset));
if (v8_flags.debug_code) {
__ RecordComment("DCHECK IsInternalizedString");
__ LoadMap(map_tmp, object);
__ testw(FieldOperand(map_tmp, Map::kInstanceTypeOffset),
Immediate(kIsNotStringMask | kIsNotInternalizedMask));
static_assert((kStringTag | kInternalizedTag) == 0);
__ Check(zero, AbortReason::kUnexpectedValue);
}
__ jmp(*done);
},
done, object, this, eager_deopt_info(), map_tmp);
__ bind(*done);
}

int CheckedObjectToIndex::MaxCallStackArgs() const {
return MaglevAssembler::ArgumentStackSlotsForCFunctionCall(1);
}
Expand Down

0 comments on commit bbdbe26

Please sign in to comment.