summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/incremental-marking.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/incremental-marking.cc')
-rw-r--r--deps/v8/src/heap/incremental-marking.cc66
1 files changed, 58 insertions, 8 deletions
diff --git a/deps/v8/src/heap/incremental-marking.cc b/deps/v8/src/heap/incremental-marking.cc
index a7b56e4315..2b693ed44e 100644
--- a/deps/v8/src/heap/incremental-marking.cc
+++ b/deps/v8/src/heap/incremental-marking.cc
@@ -53,9 +53,11 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
}
IncrementalMarking::IncrementalMarking(
- Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist)
+ Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
+ WeakObjects* weak_objects)
: heap_(heap),
marking_worklist_(marking_worklist),
+ weak_objects_(weak_objects),
initial_old_generation_size_(0),
bytes_marked_ahead_of_schedule_(0),
bytes_marked_concurrently_(0),
@@ -91,8 +93,8 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
return is_compacting_ && need_recording;
}
-
-void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
+void IncrementalMarking::RecordWriteSlow(HeapObject* obj,
+ HeapObjectReference** slot,
Object* value) {
if (BaseRecordWrite(obj, value) && slot != nullptr) {
// Object is not going to be rescanned we need to record the slot.
@@ -422,7 +424,7 @@ void IncrementalMarking::StartMarking() {
IncrementalMarkingRootMarkingVisitor visitor(this);
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
- if (FLAG_concurrent_marking && heap_->use_tasks()) {
+ if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
heap_->concurrent_marking()->ScheduleTasks();
}
@@ -558,8 +560,6 @@ void IncrementalMarking::FinalizeIncrementally() {
// objects to reduce the marking load in the final pause.
// 1) We scan and mark the roots again to find all changes to the root set.
// 2) Age and retain maps embedded in optimized code.
- // 3) Remove weak cell with live values from the list of weak cells, they
- // do not need processing during GC.
MarkRoots();
// Map retaining is needed for perfromance, not correctness,
@@ -588,8 +588,12 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
Map* filler_map = heap_->one_pointer_filler_map();
+#ifdef ENABLE_MINOR_MC
MinorMarkCompactCollector::MarkingState* minor_marking_state =
heap()->minor_mark_compact_collector()->marking_state();
+#else
+ void* minor_marking_state = nullptr;
+#endif // ENABLE_MINOR_MC
marking_worklist()->Update([this, filler_map, minor_marking_state](
HeapObject* obj, HeapObject** out) -> bool {
@@ -613,19 +617,24 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
// The object may be on a page that was moved in new space.
DCHECK(
Page::FromAddress(obj->address())->IsFlagSet(Page::SWEEP_TO_ITERATE));
+#ifdef ENABLE_MINOR_MC
if (minor_marking_state->IsGrey(obj)) {
*out = obj;
return true;
}
+#endif // ENABLE_MINOR_MC
return false;
} else {
- // The object may be on a page that was moved from new to old space.
+ // The object may be on a page that was moved from new to old space. Only
+ // applicable during minor MC garbage collections.
if (Page::FromAddress(obj->address())
->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
+#ifdef ENABLE_MINOR_MC
if (minor_marking_state->IsGrey(obj)) {
*out = obj;
return true;
}
+#endif // ENABLE_MINOR_MC
return false;
}
DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
@@ -638,6 +647,47 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
return false;
}
});
+
+ UpdateWeakReferencesAfterScavenge();
+}
+
+void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
+ weak_objects_->weak_references.Update(
+ [](std::pair<HeapObject*, HeapObjectReference**> slot_in,
+ std::pair<HeapObject*, HeapObjectReference**>* slot_out) -> bool {
+ HeapObject* heap_obj = slot_in.first;
+ MapWord map_word = heap_obj->map_word();
+ if (map_word.IsForwardingAddress()) {
+ ptrdiff_t distance_to_slot =
+ reinterpret_cast<Address>(slot_in.second) -
+ reinterpret_cast<Address>(slot_in.first);
+ Address new_slot =
+ reinterpret_cast<Address>(map_word.ToForwardingAddress()) +
+ distance_to_slot;
+ slot_out->first = map_word.ToForwardingAddress();
+ slot_out->second = reinterpret_cast<HeapObjectReference**>(new_slot);
+ return true;
+ }
+ if (heap_obj->GetHeap()->InNewSpace(heap_obj)) {
+ // The new space object containing the weak reference died.
+ return false;
+ }
+ *slot_out = slot_in;
+ return true;
+ });
+ weak_objects_->weak_objects_in_code.Update(
+ [](std::pair<HeapObject*, Code*> slot_in,
+ std::pair<HeapObject*, Code*>* slot_out) -> bool {
+ HeapObject* heap_obj = slot_in.first;
+ MapWord map_word = heap_obj->map_word();
+ if (map_word.IsForwardingAddress()) {
+ slot_out->first = map_word.ToForwardingAddress();
+ slot_out->second = slot_in.second;
+ } else {
+ *slot_out = slot_in;
+ }
+ return true;
+ });
}
void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
@@ -908,7 +958,7 @@ size_t IncrementalMarking::StepSizeToMakeProgress() {
const size_t kTargetStepCountAtOOM = 32;
size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
- if (heap()->IsCloseToOutOfMemory(oom_slack)) {
+ if (!heap()->CanExpandOldGeneration(oom_slack)) {
return heap()->PromotedSpaceSizeOfObjects() / kTargetStepCountAtOOM;
}