summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/mark-compact.cc
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2018-09-21 09:14:51 +0200
committerMichaël Zasso <targos@protonmail.com>2018-09-22 18:29:25 +0200
commit0e7ddbd3d7e9439c67573b854c49cf82c398ae82 (patch)
tree2afe372acde921cb57ddb3444ff00c5adef8848c /deps/v8/src/heap/mark-compact.cc
parent13245dc50da4cb7443c39ef6c68d419d5e6336d4 (diff)
downloadandroid-node-v8-0e7ddbd3d7e9439c67573b854c49cf82c398ae82.tar.gz
android-node-v8-0e7ddbd3d7e9439c67573b854c49cf82c398ae82.tar.bz2
android-node-v8-0e7ddbd3d7e9439c67573b854c49cf82c398ae82.zip
deps: update V8 to 7.0.276.20
PR-URL: https://github.com/nodejs/node/pull/22754 Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Refael Ackermann <refack@gmail.com> Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Diffstat (limited to 'deps/v8/src/heap/mark-compact.cc')
-rw-r--r--deps/v8/src/heap/mark-compact.cc94
1 files changed, 36 insertions, 58 deletions
diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc
index af56c72418..dea105943a 100644
--- a/deps/v8/src/heap/mark-compact.cc
+++ b/deps/v8/src/heap/mark-compact.cc
@@ -20,7 +20,7 @@
#include "src/heap/incremental-marking.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/item-parallel-job.h"
-#include "src/heap/local-allocator.h"
+#include "src/heap/local-allocator-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
@@ -1099,7 +1099,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
DCHECK_EQ(host, rinfo->host());
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
HeapObject* object = HeapObject::cast(rinfo->target_object());
- collector_->heap()->RecordWriteIntoCode(host, rinfo, object);
+ GenerationalBarrierForCode(host, rinfo, object);
collector_->RecordRelocSlot(host, rinfo, object);
}
@@ -1249,8 +1249,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
bool AbortCompactionForTesting(HeapObject* object) {
if (FLAG_stress_compaction) {
const uintptr_t mask = static_cast<uintptr_t>(FLAG_random_seed) &
- Page::kPageAlignmentMask & ~kPointerAlignmentMask;
- if ((object->address() & Page::kPageAlignmentMask) == mask) {
+ kPageAlignmentMask & ~kPointerAlignmentMask;
+ if ((object->address() & kPageAlignmentMask) == mask) {
Page* page = Page::FromAddress(object->address());
if (page->IsFlagSet(Page::COMPACTION_WAS_ABORTED_FOR_TESTING)) {
page->ClearFlag(Page::COMPACTION_WAS_ABORTED_FOR_TESTING);
@@ -1635,8 +1635,7 @@ void MarkCompactCollector::PerformWrapperTracing() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING);
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
heap_->local_embedder_heap_tracer()->Trace(
- 0, EmbedderHeapTracer::AdvanceTracingActions(
- EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
+ std::numeric_limits<double>::infinity());
}
}
@@ -1784,6 +1783,18 @@ void MarkCompactCollector::MarkLiveObjects() {
DCHECK(marking_worklist()->IsEmpty());
+ // Mark objects reachable through the embedder heap. This phase is
+ // opportunistic as it may not discover graphs that are only reachable
+ // through ephemerons.
+ {
+ TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPERS);
+ while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone()) {
+ PerformWrapperTracing();
+ ProcessMarkingWorklist();
+ }
+ DCHECK(marking_worklist()->IsEmpty());
+ }
+
// The objects reachable from the roots are marked, yet unreachable objects
// are unmarked. Mark objects reachable due to embedder heap tracing or
// harmony weak maps.
@@ -1874,13 +1885,11 @@ void MarkCompactCollector::ClearNonLiveReferences() {
// cleared.
ClearFullMapTransitions();
}
- ClearWeakCells();
ClearWeakReferences();
MarkDependentCodeForDeoptimization();
ClearWeakCollections();
- DCHECK(weak_objects_.weak_cells.IsEmpty());
DCHECK(weak_objects_.transition_arrays.IsEmpty());
DCHECK(weak_objects_.weak_references.IsEmpty());
DCHECK(weak_objects_.weak_objects_in_code.IsEmpty());
@@ -1939,7 +1948,7 @@ void MarkCompactCollector::ClearFullMapTransitions() {
// The array might contain "undefined" elements because it's not yet
// filled. Allow it.
if (array->GetTargetIfExists(0, isolate(), &map)) {
- DCHECK_NOT_NULL(map); // WeakCells aren't cleared yet.
+ DCHECK_NOT_NULL(map); // Weak pointers aren't cleared yet.
Map* parent = Map::cast(map->constructor_or_backpointer());
bool parent_is_alive =
non_atomic_marking_state()->IsBlackOrGrey(parent);
@@ -2077,46 +2086,6 @@ void MarkCompactCollector::ClearWeakCollections() {
}
}
-void MarkCompactCollector::ClearWeakCells() {
- Heap* heap = this->heap();
- TRACE_GC(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_CELLS);
- WeakCell* weak_cell;
- while (weak_objects_.weak_cells.Pop(kMainThread, &weak_cell)) {
- // We do not insert cleared weak cells into the list, so the value
- // cannot be a Smi here.
- HeapObject* value = HeapObject::cast(weak_cell->value());
- if (!non_atomic_marking_state()->IsBlackOrGrey(value)) {
- // Cells for new-space objects embedded in optimized code are wrapped in
- // WeakCell and put into Heap::weak_object_to_code_table.
- // Such cells do not have any strong references but we want to keep them
- // alive as long as the cell value is alive.
- // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
- if (value->IsCell()) {
- Object* cell_value = Cell::cast(value)->value();
- if (cell_value->IsHeapObject() &&
- non_atomic_marking_state()->IsBlackOrGrey(
- HeapObject::cast(cell_value))) {
- // Resurrect the cell.
- non_atomic_marking_state()->WhiteToBlack(value);
- Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
- RecordSlot(value, slot, HeapObject::cast(*slot));
- slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
- RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
- } else {
- weak_cell->clear();
- }
- } else {
- // All other objects.
- weak_cell->clear();
- }
- } else {
- // The value of the weak cell is alive.
- Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
- RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
- }
- }
-}
-
void MarkCompactCollector::ClearWeakReferences() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_REFERENCES);
std::pair<HeapObject*, HeapObjectReference**> slot;
@@ -2140,7 +2109,6 @@ void MarkCompactCollector::ClearWeakReferences() {
}
void MarkCompactCollector::AbortWeakObjects() {
- weak_objects_.weak_cells.Clear();
weak_objects_.transition_arrays.Clear();
weak_objects_.ephemeron_hash_tables.Clear();
weak_objects_.current_ephemerons.Clear();
@@ -2195,6 +2163,8 @@ static inline SlotCallbackResult UpdateSlot(
}
DCHECK(!Heap::InFromSpace(target));
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
+ } else {
+ DCHECK(heap_obj->map()->IsMap());
}
// OLD_TO_OLD slots are always removed after updating.
return REMOVE_SLOT;
@@ -2301,7 +2271,14 @@ static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
MapWord map_word = HeapObject::cast(*p)->map_word();
if (map_word.IsForwardingAddress()) {
- return String::cast(map_word.ToForwardingAddress());
+ String* new_string = String::cast(map_word.ToForwardingAddress());
+
+ if (new_string->IsExternalString()) {
+ heap->ProcessMovedExternalString(
+ Page::FromAddress(reinterpret_cast<Address>(*p)),
+ Page::FromHeapObject(new_string), ExternalString::cast(new_string));
+ }
+ return new_string;
}
return String::cast(*p);
@@ -3026,13 +3003,14 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
nullptr) {
CHECK_NE(chunk_->owner(), heap_->map_space());
+ const auto check_and_update_old_to_new_slot_fn =
+ [this](MaybeObject** slot) {
+ return CheckAndUpdateOldToNewSlot(reinterpret_cast<Address>(slot));
+ };
RememberedSet<OLD_TO_NEW>::IterateTyped(
- chunk_, [this](SlotType slot_type, Address host_addr, Address slot) {
+ chunk_, [=](SlotType slot_type, Address host_addr, Address slot) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
- heap_, slot_type, slot, [this](MaybeObject** slot) {
- return CheckAndUpdateOldToNewSlot(
- reinterpret_cast<Address>(slot));
- });
+ heap_, slot_type, slot, check_and_update_old_to_new_slot_fn);
});
}
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
@@ -3896,7 +3874,7 @@ void MinorMarkCompactCollector::MakeIterable(
p->AddressToMarkbitIndex(free_start),
p->AddressToMarkbitIndex(free_end));
if (free_space_mode == ZAP_FREE_SPACE) {
- memset(reinterpret_cast<void*>(free_start), 0xCC, size);
+ ZapCode(free_start, size);
}
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);
@@ -3913,7 +3891,7 @@ void MinorMarkCompactCollector::MakeIterable(
p->AddressToMarkbitIndex(free_start),
p->AddressToMarkbitIndex(p->area_end()));
if (free_space_mode == ZAP_FREE_SPACE) {
- memset(reinterpret_cast<void*>(free_start), 0xCC, size);
+ ZapCode(free_start, size);
}
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);