Skip to content

Commit

Permalink
deps: backport 8d6a228 from the v8's upstream
Browse files Browse the repository at this point in the history
Original commit message:

    [heap] fix crash during the scavenge of ArrayBuffer
    Scavenger should not attempt to visit ArrayBuffer's storage, it is a
    user-supplied pointer that may have any alignment. Visiting it, may
    result in a crash.

    BUG=
    R=jochen

    Review URL: https://codereview.chromium.org/1406133003

    Cr-Commit-Position: refs/heads/master@{#31611}

PR-URL: #3549
Reviewed-By: Trevor Norris <trev.norris@gmail.com>
  • Loading branch information
indutny committed Oct 28, 2015
1 parent aaf9b48 commit 3223704
Show file tree
Hide file tree
Showing 3 changed files with 93 additions and 36 deletions.
100 changes: 64 additions & 36 deletions deps/v8/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2016,42 +2016,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// for pointers to from semispace instead of looking for pointers
// to new space.
DCHECK(!target->IsMap());
Address obj_address = target->address();

// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(target);
record_slots = Marking::IsBlack(mark_bit);
}
#if V8_DOUBLE_FIELDS_UNBOXING
LayoutDescriptorHelper helper(target->map());
bool has_only_tagged_fields = helper.all_fields_tagged();

if (!has_only_tagged_fields) {
for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots,
&ScavengeObject);
}
offset = end_of_region_offset;
}
} else {
#endif
IterateAndMarkPointersToFromSpace(target, obj_address,
obj_address + size, record_slots,
&ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING
}
#endif

IteratePointersToFromSpace(target, size, &ScavengeObject);
}
}

Expand Down Expand Up @@ -5184,6 +5150,68 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
}


void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
ObjectSlotCallback callback) {
Address obj_address = target->address();

// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(target);
record_slots = Marking::IsBlack(mark_bit);
}

// Do not scavenge JSArrayBuffer's contents
switch (target->ContentType()) {
case HeapObjectContents::kTaggedValues: {
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
record_slots, callback);
break;
}
case HeapObjectContents::kMixedValues: {
if (target->IsFixedTypedArrayBase()) {
IterateAndMarkPointersToFromSpace(
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
callback);
} else if (target->IsJSArrayBuffer()) {
IterateAndMarkPointersToFromSpace(
target, obj_address,
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
record_slots, callback);
IterateAndMarkPointersToFromSpace(
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
record_slots, callback);
#if V8_DOUBLE_FIELDS_UNBOXING
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(target->map());
DCHECK(!helper.all_fields_tagged());

for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots, callback);
}
offset = end_of_region_offset;
}
#endif
}
break;
}
case HeapObjectContents::kRawValues: {
break;
}
}
}


void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
Expand Down
3 changes: 3 additions & 0 deletions deps/v8/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -961,6 +961,9 @@ class Heap {

// Iterate pointers to from semispace of new space found in memory interval
// from start to end within |object|.
void IteratePointersToFromSpace(HeapObject* target, int size,
ObjectSlotCallback callback);

void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback);
Expand Down
26 changes: 26 additions & 0 deletions deps/v8/test/cctest/test-api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14242,6 +14242,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
}


THREADED_TEST(SkipArrayBufferDuringScavenge) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);

// Make sure the pointer looks like a heap object
Local<v8::Object> tmp = v8::Object::New(isolate);
uint8_t* store_ptr =
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));

// Make `store_ptr` point to from space
CcTest::heap()->CollectGarbage(i::NEW_SPACE);

// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);

// Should not crash,
// i.e. backing store pointer should not be treated as a heap object pointer
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now

// Use `ab` to silence compiler warning
CHECK_EQ(ab->GetContents().Data(), store_ptr);
}


THREADED_TEST(SharedUint8Array) {
i::FLAG_harmony_sharedarraybuffer = true;
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,
Expand Down

4 comments on commit 3223704

@zcbenz
Copy link
Contributor

@zcbenz zcbenz commented on 3223704 Dec 12, 2015

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@indutny Hi, I think you forgot to backport this after the upgrade to V8 4.7.

@indutny
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cc @nodejs/v8 Looks like this is the case, indeed.

@indutny
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@zcbenz thank you, filed a PR: #4259

@ofrobots
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems that upstream never backported. As the one who upgraded to V8 4.7, I think it is my fault this got missed. Thanks for the report @zcbenz!

Please sign in to comment.