Skip to content
This repository has been archived by the owner on Oct 16, 2021. It is now read-only.

Commit

Permalink
deps: backport a715957 from V8 upstream
Browse files Browse the repository at this point in the history
This commit does not include the changes to `src/heap/scavenger.cc`.

These changes would revert the changes that should have come in
086bd5aede, meaning that there is no issue with that change missing
in the previous commit.

Original commit message:
  Iterate handles with special left-trim visitor

  BUG=chromium:620553
  LOG=N
  R=hpayer@chromium.org

  Review-Url: https://codereview.chromium.org/2102243002
  Cr-Commit-Position: refs/heads/master@{#37366}

PR-URL: nodejs/node#10668
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Fedor Indutny <fedor.indutny@gmail.com>
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
  • Loading branch information
MylesBorins authored and gibfahn committed Feb 22, 2017
1 parent 10398cc commit 2386ddc
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 33 deletions.
25 changes: 0 additions & 25 deletions deps/v8z/src/heap/heap-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -398,31 +398,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
static_cast<size_t>(byte_size / kPointerSize));
}

bool Heap::PurgeLeftTrimmedObject(Object** object) {
HeapObject* current = reinterpret_cast<HeapObject*>(*object);
const MapWord map_word = current->map_word();
if (current->IsFiller() && !map_word.IsForwardingAddress()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*object = nullptr;
return true;
}
return false;
}

void Heap::MoveBlock(Address dst, Address src, int byte_size) {
DCHECK(IsAligned(byte_size, kPointerSize));

Expand Down
45 changes: 45 additions & 0 deletions deps/v8z/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5320,6 +5320,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) {
v->Synchronize(VisitorSynchronization::kSmiRootList);
}

// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor {
public:
explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) {
USE(heap_);
}

void VisitPointer(Object** p) override { FixHandle(p); }

void VisitPointers(Object** start, Object** end) override {
for (Object** p = start; p < end; p++) FixHandle(p);
}

private:
inline void FixHandle(Object** p) {
HeapObject* current = reinterpret_cast<HeapObject*>(*p);
if (!current->IsHeapObject()) return;
const MapWord map_word = current->map_word();
if (!map_word.IsForwardingAddress() && current->IsFiller()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == heap_->one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == heap_->two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*p = nullptr;
}
}

Heap* heap_;
};

void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
Expand All @@ -5343,6 +5386,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->Synchronize(VisitorSynchronization::kCompilationCache);

// Iterate over local handles in handle scopes.
FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this);
isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
isolate_->handle_scope_implementer()->Iterate(v);
isolate_->IterateDeferredHandles(v);
v->Synchronize(VisitorSynchronization::kHandleScope);
Expand Down
6 changes: 0 additions & 6 deletions deps/v8z/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -590,12 +590,6 @@ class Heap {
// jslimit_/real_jslimit_ variable in the StackGuard.
void SetStackLimits();

// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
inline bool PurgeLeftTrimmedObject(Object** object);

// Notifies the heap that is ok to start marking or other activities that
// should not happen during deserialization.
void NotifyDeserializationComplete();
Expand Down
2 changes: 0 additions & 2 deletions deps/v8z/src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1650,8 +1650,6 @@ class RootMarkingVisitor : public ObjectVisitor {

HeapObject* object = ShortCircuitConsString(p);

if (collector_->heap()->PurgeLeftTrimmedObject(p)) return;

MarkBit mark_bit = Marking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark_bit)) return;

Expand Down

0 comments on commit 2386ddc

Please sign in to comment.