From d731770da2c6f289b6fd1464c152309ad86757fe Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 29 Jun 2016 01:16:07 -0700 Subject: [PATCH] deps: cherry-pick a715957 from V8 upstream Original commit message: Iterate handles with special left-trim visitor BUG=chromium:620553 LOG=N R=hpayer@chromium.org Review-Url: https://codereview.chromium.org/2102243002 Cr-Commit-Position: refs/heads/master@{#37366} PR-URL: https://github.com/nodejs/node/pull/10666 Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Ali Ijaz Sheikh --- deps/v8/src/heap/heap-inl.h | 25 ------------------ deps/v8/src/heap/heap.cc | 45 ++++++++++++++++++++++++++++++++ deps/v8/src/heap/heap.h | 6 ----- deps/v8/src/heap/mark-compact.cc | 2 -- deps/v8/src/heap/scavenger.cc | 2 -- 5 files changed, 45 insertions(+), 35 deletions(-) diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index 3d2e059fae8939..e31d3d6859b463 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -449,31 +449,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) { static_cast(byte_size / kPointerSize)); } -bool Heap::PurgeLeftTrimmedObject(Object** object) { - HeapObject* current = reinterpret_cast(*object); - const MapWord map_word = current->map_word(); - if (current->IsFiller() && !map_word.IsForwardingAddress()) { -#ifdef DEBUG - // We need to find a FixedArrayBase map after walking the fillers. - while (current->IsFiller()) { - Address next = reinterpret_cast
(current); - if (current->map() == one_pointer_filler_map()) { - next += kPointerSize; - } else if (current->map() == two_pointer_filler_map()) { - next += 2 * kPointerSize; - } else { - next += current->Size(); - } - current = reinterpret_cast(next); - } - DCHECK(current->IsFixedArrayBase()); -#endif // DEBUG - *object = nullptr; - return true; - } - return false; -} - template AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { // Check if there is potentially a memento behind the object. If diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 3e8e14493955b6..eae9695caf09af 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -4800,6 +4800,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) { v->Synchronize(VisitorSynchronization::kSmiRootList); } +// We cannot avoid stale handles to left-trimmed objects, but can only make +// sure all handles still needed are updated. Filter out a stale pointer +// and clear the slot to allow post processing of handles (needed because +// the sweeper might actually free the underlying page). +class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor { + public: + explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { + USE(heap_); + } + + void VisitPointer(Object** p) override { FixHandle(p); } + + void VisitPointers(Object** start, Object** end) override { + for (Object** p = start; p < end; p++) FixHandle(p); + } + + private: + inline void FixHandle(Object** p) { + HeapObject* current = reinterpret_cast(*p); + if (!current->IsHeapObject()) return; + const MapWord map_word = current->map_word(); + if (!map_word.IsForwardingAddress() && current->IsFiller()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == heap_->one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == heap_->two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *p = nullptr; + } + } + + Heap* heap_; +}; void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); @@ -4820,6 +4863,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { v->Synchronize(VisitorSynchronization::kCompilationCache); // Iterate over local handles in handle scopes. + FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this); + isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor); isolate_->handle_scope_implementer()->Iterate(v); isolate_->IterateDeferredHandles(v); v->Synchronize(VisitorSynchronization::kHandleScope); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 2ce326a64e8416..2d2029912cd717 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -602,12 +602,6 @@ class Heap { // stored on the map to facilitate fast dispatch for {StaticVisitorBase}. static int GetStaticVisitorIdForMap(Map* map); - // We cannot avoid stale handles to left-trimmed objects, but can only make - // sure all handles still needed are updated. Filter out a stale pointer - // and clear the slot to allow post processing of handles (needed because - // the sweeper might actually free the underlying page). - inline bool PurgeLeftTrimmedObject(Object** object); - // Notifies the heap that is ok to start marking or other activities that // should not happen during deserialization. void NotifyDeserializationComplete(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index 31b12f1d1b59b2..16f93f13316bad 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1376,8 +1376,6 @@ class RootMarkingVisitor : public ObjectVisitor { HeapObject* object = HeapObject::cast(*p); - if (collector_->heap()->PurgeLeftTrimmedObject(p)) return; - MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return; diff --git a/deps/v8/src/heap/scavenger.cc b/deps/v8/src/heap/scavenger.cc index 18735bd20179cc..dce53554474a57 100644 --- a/deps/v8/src/heap/scavenger.cc +++ b/deps/v8/src/heap/scavenger.cc @@ -463,8 +463,6 @@ void ScavengeVisitor::ScavengePointer(Object** p) { Object* object = *p; if (!heap_->InNewSpace(object)) return; - if (heap_->PurgeLeftTrimmedObject(p)) return; - Scavenger::ScavengeObject(reinterpret_cast(p), reinterpret_cast(object)); }