Skip to content

Commit

Permalink
Bug 1240583 - Odin: refactor x86/x64 loads/stores (r=sunfish)
Browse files Browse the repository at this point in the history
  • Loading branch information
Luke Wagner committed Feb 9, 2016
1 parent 7a0c82a commit 267f302
Show file tree
Hide file tree
Showing 4 changed files with 105 additions and 76 deletions.
59 changes: 20 additions & 39 deletions js/src/jit/x64/CodeGenerator-x64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -300,10 +300,7 @@ CodeGeneratorX64::emitSimdLoad(LAsmJSLoadHeap* ins)
? Operand(HeapReg, mir->offset())
: Operand(HeapReg, ToRegister(ptr), TimesOne, mir->offset());

uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir))
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr),
masm.asmOnOutOfBoundsLabel());
uint32_t maybeCmpOffset = maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ptr);

unsigned numElems = mir->numSimdElems();
if (numElems == 3) {
Expand Down Expand Up @@ -363,19 +360,9 @@ CodeGeneratorX64::visitAsmJSLoadHeap(LAsmJSLoadHeap* ins)
: Operand(HeapReg, ToRegister(ptr), TimesOne, mir->offset());

memoryBarrier(mir->barrierBefore());
OutOfLineLoadTypedArrayOutOfBounds* ool = nullptr;
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir)) {
Label* jumpTo = nullptr;
if (mir->isAtomicAccess()) {
jumpTo = masm.asmOnOutOfBoundsLabel();
} else {
ool = new(alloc()) OutOfLineLoadTypedArrayOutOfBounds(ToAnyRegister(out), accessType);
addOutOfLineCode(ool, mir);
jumpTo = ool->entry();
}
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr), jumpTo);
}

OutOfLineLoadTypedArrayOutOfBounds* ool;
uint32_t maybeCmpOffset = maybeEmitAsmJSLoadBoundsCheck(mir, ins, &ool);

uint32_t before = masm.size();
switch (accessType) {
Expand All @@ -394,12 +381,16 @@ CodeGeneratorX64::visitAsmJSLoadHeap(LAsmJSLoadHeap* ins)
MOZ_CRASH("unexpected array type");
}
uint32_t after = masm.size();

verifyHeapAccessDisassembly(before, after, /*isLoad=*/true, accessType, 0, srcAddr, *out->output());

if (ool) {
cleanupAfterAsmJSBoundsCheckBranch(mir, ToRegister(ptr));
masm.bind(ool->rejoin());
}

memoryBarrier(mir->barrierAfter());

masm.append(wasm::HeapAccess(before, wasm::HeapAccess::CarryOn, maybeCmpOffset));
}

Expand Down Expand Up @@ -455,10 +446,7 @@ CodeGeneratorX64::emitSimdStore(LAsmJSStoreHeap* ins)
? Operand(HeapReg, mir->offset())
: Operand(HeapReg, ToRegister(ptr), TimesOne, mir->offset());

uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir))
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr),
masm.asmOnOutOfBoundsLabel());
uint32_t maybeCmpOffset = maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ptr);

unsigned numElems = mir->numSimdElems();
if (numElems == 3) {
Expand Down Expand Up @@ -516,16 +504,9 @@ CodeGeneratorX64::visitAsmJSStoreHeap(LAsmJSStoreHeap* ins)
: Operand(HeapReg, ToRegister(ptr), TimesOne, mir->offset());

memoryBarrier(mir->barrierBefore());
Label* rejoin = nullptr;
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir)) {
Label* jumpTo = nullptr;
if (mir->isAtomicAccess())
jumpTo = masm.asmOnOutOfBoundsLabel();
else
rejoin = jumpTo = alloc().lifoAlloc()->newInfallible<Label>();
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr), jumpTo);
}

Label* rejoin;
uint32_t maybeCmpOffset = maybeEmitAsmJSStoreBoundsCheck(mir, ins, &rejoin);

uint32_t before = masm.size();
if (value->isConstant()) {
Expand Down Expand Up @@ -562,12 +543,16 @@ CodeGeneratorX64::visitAsmJSStoreHeap(LAsmJSStoreHeap* ins)
}
}
uint32_t after = masm.size();

verifyHeapAccessDisassembly(before, after, /*isLoad=*/false, accessType, 0, dstAddr, *value);

if (rejoin) {
cleanupAfterAsmJSBoundsCheckBranch(mir, ToRegister(ptr));
masm.bind(rejoin);
}

memoryBarrier(mir->barrierAfter());

masm.append(wasm::HeapAccess(before, wasm::HeapAccess::CarryOn, maybeCmpOffset));
}

Expand All @@ -585,8 +570,7 @@ CodeGeneratorX64::visitAsmJSCompareExchangeHeap(LAsmJSCompareExchangeHeap* ins)
Register oldval = ToRegister(ins->oldValue());
Register newval = ToRegister(ins->newValue());

// Note that we can't use
// needsAsmJSBoundsCheckBranch/emitAsmJSBoundsCheckBranch/cleanupAfterAsmJSBoundsCheckBranch
// Note that we can't use the same machinery as normal asm.js loads/stores
// since signal-handler bounds checking is not yet implemented for atomic accesses.
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (mir->needsBoundsCheck()) {
Expand Down Expand Up @@ -620,8 +604,7 @@ CodeGeneratorX64::visitAsmJSAtomicExchangeHeap(LAsmJSAtomicExchangeHeap* ins)
BaseIndex srcAddr(HeapReg, ToRegister(ptr), TimesOne, mir->offset());
Register value = ToRegister(ins->value());

// Note that we can't use
// needsAsmJSBoundsCheckBranch/emitAsmJSBoundsCheckBranch/cleanupAfterAsmJSBoundsCheckBranch
// Note that we can't use the same machinery as normal asm.js loads/stores
// since signal-handler bounds checking is not yet implemented for atomic accesses.
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (mir->needsBoundsCheck()) {
Expand Down Expand Up @@ -655,8 +638,7 @@ CodeGeneratorX64::visitAsmJSAtomicBinopHeap(LAsmJSAtomicBinopHeap* ins)

BaseIndex srcAddr(HeapReg, ptrReg, TimesOne, mir->offset());

// Note that we can't use
// needsAsmJSBoundsCheckBranch/emitAsmJSBoundsCheckBranch/cleanupAfterAsmJSBoundsCheckBranch
// Note that we can't use the same machinery as normal asm.js loads/stores
// since signal-handler bounds checking is not yet implemented for atomic accesses.
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (mir->needsBoundsCheck()) {
Expand Down Expand Up @@ -699,8 +681,7 @@ CodeGeneratorX64::visitAsmJSAtomicBinopHeapForEffect(LAsmJSAtomicBinopHeapForEff

BaseIndex srcAddr(HeapReg, ptrReg, TimesOne, mir->offset());

// Note that we can't use
// needsAsmJSBoundsCheckBranch/emitAsmJSBoundsCheckBranch/cleanupAfterAsmJSBoundsCheckBranch
// Note that we can't use the same machinery as normal asm.js loads/stores
// since signal-handler bounds checking is not yet implemented for atomic accesses.
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (mir->needsBoundsCheck()) {
Expand Down
52 changes: 50 additions & 2 deletions js/src/jit/x86-shared/CodeGenerator-x86-shared.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,61 @@ CodeGeneratorX86Shared::emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* acces
// field, so -access->endOffset() will turn into
// (heapLength - access->endOffset()), allowing us to test whether the end
// of the access is beyond the end of the heap.
uint32_t maybeCmpOffset = masm.cmp32WithPatch(ptr, Imm32(-access->endOffset())).offset();
uint32_t cmpOffset = masm.cmp32WithPatch(ptr, Imm32(-access->endOffset())).offset();
masm.j(Assembler::Above, fail);

if (pass)
masm.bind(pass);

return maybeCmpOffset;
return cmpOffset;
}

uint32_t
CodeGeneratorX86Shared::maybeEmitThrowingAsmJSBoundsCheck(const MAsmJSHeapAccess* access,
const MInstruction* mir,
const LAllocation* ptr)
{
if (!gen->needsAsmJSBoundsCheckBranch(access))
return wasm::HeapAccess::NoLengthCheck;

return emitAsmJSBoundsCheckBranch(access, mir, ToRegister(ptr), masm.asmOnOutOfBoundsLabel());
}

uint32_t
CodeGeneratorX86Shared::maybeEmitAsmJSLoadBoundsCheck(const MAsmJSLoadHeap* mir, LAsmJSLoadHeap* ins,
OutOfLineLoadTypedArrayOutOfBounds** ool)
{
MOZ_ASSERT(!Scalar::isSimdType(mir->accessType()));
*ool = nullptr;

if (!gen->needsAsmJSBoundsCheckBranch(mir))
return wasm::HeapAccess::NoLengthCheck;

if (mir->isAtomicAccess())
return maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ins->ptr());

*ool = new(alloc()) OutOfLineLoadTypedArrayOutOfBounds(ToAnyRegister(ins->output()),
mir->accessType());

addOutOfLineCode(*ool, mir);
return emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ins->ptr()), (*ool)->entry());
}

uint32_t
CodeGeneratorX86Shared::maybeEmitAsmJSStoreBoundsCheck(const MAsmJSStoreHeap* mir, LAsmJSStoreHeap* ins,
Label** rejoin)
{
MOZ_ASSERT(!Scalar::isSimdType(mir->accessType()));
*rejoin = nullptr;

if (!gen->needsAsmJSBoundsCheckBranch(mir))
return wasm::HeapAccess::NoLengthCheck;

if (mir->isAtomicAccess())
return maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ins->ptr());

*rejoin = alloc().lifoAlloc()->newInfallible<Label>();
return emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ins->ptr()), *rejoin);
}

void
Expand Down
25 changes: 21 additions & 4 deletions js/src/jit/x86-shared/CodeGenerator-x86-shared.h
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,27 @@ class CodeGeneratorX86Shared : public CodeGeneratorShared
}
};

// Functions for emitting bounds-checking code with branches.
MOZ_WARN_UNUSED_RESULT
uint32_t emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* mir, const MInstruction* ins,
Register ptr, Label* fail);
private:
MOZ_WARN_UNUSED_RESULT uint32_t
emitAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* mir, const MInstruction* ins,
Register ptr, Label* fail);

public:
// For SIMD and atomic loads and stores (which throw on out-of-bounds):
MOZ_WARN_UNUSED_RESULT uint32_t
maybeEmitThrowingAsmJSBoundsCheck(const MAsmJSHeapAccess* mir, const MInstruction* ins,
const LAllocation* ptr);

// For asm.js plain and atomic loads that possibly require a bounds check:
MOZ_WARN_UNUSED_RESULT uint32_t
maybeEmitAsmJSLoadBoundsCheck(const MAsmJSLoadHeap* mir, LAsmJSLoadHeap* ins,
OutOfLineLoadTypedArrayOutOfBounds** ool);

// For asm.js plain and atomic stores that possibly require a bounds check:
MOZ_WARN_UNUSED_RESULT uint32_t
maybeEmitAsmJSStoreBoundsCheck(const MAsmJSStoreHeap* mir, LAsmJSStoreHeap* ins,
Label** rejoin);

void cleanupAfterAsmJSBoundsCheckBranch(const MAsmJSHeapAccess* mir, Register ptr);

NonAssertingLabel deoptLabel_;
Expand Down
45 changes: 14 additions & 31 deletions js/src/jit/x86/CodeGenerator-x86.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -395,10 +395,7 @@ CodeGeneratorX86::emitSimdLoad(LAsmJSLoadHeap* ins)
? Operand(PatchedAbsoluteAddress(mir->offset()))
: Operand(ToRegister(ptr), mir->offset());

uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir))
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr),
masm.asmOnOutOfBoundsLabel());
uint32_t maybeCmpOffset = maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ptr);

unsigned numElems = mir->numSimdElems();
if (numElems == 3) {
Expand Down Expand Up @@ -452,28 +449,21 @@ CodeGeneratorX86::visitAsmJSLoadHeap(LAsmJSLoadHeap* ins)
: Operand(ToRegister(ptr), mir->offset());

memoryBarrier(mir->barrierBefore());
OutOfLineLoadTypedArrayOutOfBounds* ool = nullptr;
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir)) {
Label* jumpTo = nullptr;
if (mir->isAtomicAccess()) {
jumpTo = masm.asmOnOutOfBoundsLabel();
} else {
ool = new(alloc()) OutOfLineLoadTypedArrayOutOfBounds(ToAnyRegister(out), accessType);
addOutOfLineCode(ool, mir);
jumpTo = ool->entry();
}
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr), jumpTo);
}

OutOfLineLoadTypedArrayOutOfBounds* ool;
uint32_t maybeCmpOffset = maybeEmitAsmJSLoadBoundsCheck(mir, ins, &ool);

uint32_t before = masm.size();
load(accessType, srcAddr, out);
uint32_t after = masm.size();

if (ool) {
cleanupAfterAsmJSBoundsCheckBranch(mir, ToRegister(ptr));
masm.bind(ool->rejoin());
}

memoryBarrier(mir->barrierAfter());

masm.append(wasm::HeapAccess(before, after, maybeCmpOffset));
}

Expand Down Expand Up @@ -573,10 +563,7 @@ CodeGeneratorX86::emitSimdStore(LAsmJSStoreHeap* ins)
? Operand(PatchedAbsoluteAddress(mir->offset()))
: Operand(ToRegister(ptr), mir->offset());

uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir))
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr),
masm.asmOnOutOfBoundsLabel());
uint32_t maybeCmpOffset = maybeEmitThrowingAsmJSBoundsCheck(mir, mir, ptr);

unsigned numElems = mir->numSimdElems();
if (numElems == 3) {
Expand Down Expand Up @@ -629,25 +616,21 @@ CodeGeneratorX86::visitAsmJSStoreHeap(LAsmJSStoreHeap* ins)
: Operand(ToRegister(ptr), mir->offset());

memoryBarrier(mir->barrierBefore());
Label* rejoin = nullptr;
uint32_t maybeCmpOffset = wasm::HeapAccess::NoLengthCheck;
if (gen->needsAsmJSBoundsCheckBranch(mir)) {
Label* jumpTo = nullptr;
if (mir->isAtomicAccess())
jumpTo = masm.asmOnOutOfBoundsLabel();
else
rejoin = jumpTo = alloc().lifoAlloc()->newInfallible<Label>();
maybeCmpOffset = emitAsmJSBoundsCheckBranch(mir, mir, ToRegister(ptr), jumpTo);
}

Label* rejoin;
uint32_t maybeCmpOffset = maybeEmitAsmJSStoreBoundsCheck(mir, ins, &rejoin);

uint32_t before = masm.size();
store(accessType, value, dstAddr);
uint32_t after = masm.size();

if (rejoin) {
cleanupAfterAsmJSBoundsCheckBranch(mir, ToRegister(ptr));
masm.bind(rejoin);
}

memoryBarrier(mir->barrierAfter());

masm.append(wasm::HeapAccess(before, after, maybeCmpOffset));
}

Expand Down

0 comments on commit 267f302

Please sign in to comment.