aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/incremental-marking.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/incremental-marking.cc')
-rw-r--r--deps/v8/src/incremental-marking.cc189
1 files changed, 79 insertions, 110 deletions
diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc
index 94afffa733..57f18b2e75 100644
--- a/deps/v8/src/incremental-marking.cc
+++ b/deps/v8/src/incremental-marking.cc
@@ -31,6 +31,8 @@
#include "code-stubs.h"
#include "compilation-cache.h"
+#include "objects-visiting.h"
+#include "objects-visiting-inl.h"
#include "v8conversions.h"
namespace v8 {
@@ -160,93 +162,82 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
}
-class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
+class IncrementalMarkingMarkingVisitor
+ : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
public:
- IncrementalMarkingMarkingVisitor(Heap* heap,
- IncrementalMarking* incremental_marking)
- : heap_(heap),
- incremental_marking_(incremental_marking) {
- }
+ static void Initialize() {
+ StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
- void VisitEmbeddedPointer(RelocInfo* rinfo) {
- ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- Object* target = rinfo->target_object();
- if (target->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- MarkObject(target);
- }
- }
+ table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
- void VisitCodeTarget(RelocInfo* rinfo) {
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
- && (target->ic_age() != heap_->global_ic_age())) {
- IC::Clear(rinfo->pc());
- target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- }
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
- MarkObject(target);
- }
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
- void VisitDebugTarget(RelocInfo* rinfo) {
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
- rinfo->IsPatchedReturnSequence()) ||
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
- rinfo->IsPatchedDebugBreakSlotSequence()));
- Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
- heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
- MarkObject(target);
+ table_.Register(kVisitJSRegExp, &VisitJSRegExp);
}
- void VisitCodeEntry(Address entry_address) {
- Object* target = Code::GetObjectFromEntryAddress(entry_address);
- heap_->mark_compact_collector()->
- RecordCodeEntrySlot(entry_address, Code::cast(target));
- MarkObject(target);
+ static void VisitJSWeakMap(Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ VisitPointers(heap,
+ HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
+ HeapObject::RawField(object, JSWeakMap::kSize));
}
- void VisitSharedFunctionInfo(SharedFunctionInfo* shared) {
- if (shared->ic_age() != heap_->global_ic_age()) {
- shared->ResetForNewContext(heap_->global_ic_age());
+ static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ if (shared->ic_age() != heap->global_ic_age()) {
+ shared->ResetForNewContext(heap->global_ic_age());
}
- }
-
- void VisitPointer(Object** p) {
+ FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
+ SharedFunctionInfo::BodyDescriptor,
+ void>::Visit(map, object);
+ }
+
+ static inline void VisitJSFunction(Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ // Iterate over all fields in the body but take care in dealing with
+ // the code entry and skip weak fields.
+ VisitPointers(heap,
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset),
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
+ VisitPointers(heap,
+ HeapObject::RawField(object,
+ JSFunction::kCodeEntryOffset + kPointerSize),
+ HeapObject::RawField(object,
+ JSFunction::kNonWeakFieldsEndOffset));
+ }
+
+ INLINE(static void VisitPointer(Heap* heap, Object** p)) {
Object* obj = *p;
if (obj->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordSlot(p, p, obj);
- MarkObject(obj);
+ heap->mark_compact_collector()->RecordSlot(p, p, obj);
+ MarkObject(heap, obj);
}
}
- void VisitPointers(Object** start, Object** end) {
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
for (Object** p = start; p < end; p++) {
Object* obj = *p;
if (obj->NonFailureIsHeapObject()) {
- heap_->mark_compact_collector()->RecordSlot(start, p, obj);
- MarkObject(obj);
+ heap->mark_compact_collector()->RecordSlot(start, p, obj);
+ MarkObject(heap, obj);
}
}
}
- private:
- // Mark object pointed to by p.
- INLINE(void MarkObject(Object* obj)) {
+ INLINE(static void MarkObject(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
- if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) {
+ if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) {
MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
heap_object->Size());
}
} else if (Marking::IsWhite(mark_bit)) {
- incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
+ heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
}
}
-
- Heap* heap_;
- IncrementalMarking* incremental_marking_;
};
@@ -290,6 +281,11 @@ class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
};
+void IncrementalMarking::Initialize() {
+ IncrementalMarkingMarkingVisitor::Initialize();
+}
+
+
void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
bool is_marking,
bool is_compacting) {
@@ -623,24 +619,6 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
}
-void IncrementalMarking::VisitGlobalContext(Context* ctx, ObjectVisitor* v) {
- v->VisitPointers(
- HeapObject::RawField(
- ctx, Context::MarkCompactBodyDescriptor::kStartOffset),
- HeapObject::RawField(
- ctx, Context::MarkCompactBodyDescriptor::kEndOffset));
-
- MarkCompactCollector* collector = heap_->mark_compact_collector();
- for (int idx = Context::FIRST_WEAK_SLOT;
- idx < Context::GLOBAL_CONTEXT_SLOTS;
- ++idx) {
- Object** slot =
- HeapObject::RawField(ctx, FixedArray::OffsetOfElementAt(idx));
- collector->RecordSlot(slot, slot, *slot);
- }
-}
-
-
void IncrementalMarking::Hurry() {
if (state() == MARKING) {
double start = 0.0;
@@ -651,8 +629,7 @@ void IncrementalMarking::Hurry() {
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
Map* filler_map = heap_->one_pointer_filler_map();
- Map* global_context_map = heap_->global_context_map();
- IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
+ Map* native_context_map = heap_->native_context_map();
while (!marking_deque_.IsEmpty()) {
HeapObject* obj = marking_deque_.Pop();
@@ -661,9 +638,9 @@ void IncrementalMarking::Hurry() {
Map* map = obj->map();
if (map == filler_map) {
continue;
- } else if (map == global_context_map) {
- // Global contexts have weak fields.
- VisitGlobalContext(Context::cast(obj), &marking_visitor);
+ } else if (map == native_context_map) {
+ // Native contexts have weak fields.
+ IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
} else if (map->instance_type() == MAP_TYPE) {
Map* map = Map::cast(obj);
heap_->ClearCacheOnMap(map);
@@ -676,12 +653,17 @@ void IncrementalMarking::Hurry() {
map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
marker_.MarkMapContents(map);
} else {
- marking_visitor.VisitPointers(
+ IncrementalMarkingMarkingVisitor::VisitPointers(
+ heap_,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
} else {
- obj->Iterate(&marking_visitor);
+ MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+ if (Marking::IsWhite(map_mark_bit)) {
+ WhiteToGreyAndPush(map, map_mark_bit);
+ }
+ IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
MarkBit mark_bit = Marking::MarkBitFrom(obj);
@@ -704,7 +686,7 @@ void IncrementalMarking::Hurry() {
PolymorphicCodeCache::kSize);
}
- Object* context = heap_->global_contexts_list();
+ Object* context = heap_->native_contexts_list();
while (!context->IsUndefined()) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
@@ -814,8 +796,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
}
} else if (state_ == MARKING) {
Map* filler_map = heap_->one_pointer_filler_map();
- Map* global_context_map = heap_->global_context_map();
- IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
+ Map* native_context_map = heap_->native_context_map();
while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
HeapObject* obj = marking_deque_.Pop();
@@ -832,15 +813,15 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
}
// TODO(gc) switch to static visitor instead of normal visitor.
- if (map == global_context_map) {
- // Global contexts have weak fields.
+ if (map == native_context_map) {
+ // Native contexts have weak fields.
Context* ctx = Context::cast(obj);
// We will mark cache black with a separate pass
// when we finish marking.
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
- VisitGlobalContext(ctx, &marking_visitor);
+ IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
} else if (map->instance_type() == MAP_TYPE) {
Map* map = Map::cast(obj);
heap_->ClearCacheOnMap(map);
@@ -853,25 +834,13 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
marker_.MarkMapContents(map);
} else {
- marking_visitor.VisitPointers(
+ IncrementalMarkingMarkingVisitor::VisitPointers(
+ heap_,
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
- } else if (map->instance_type() == JS_FUNCTION_TYPE) {
- marking_visitor.VisitPointers(
- HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
- HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
-
- marking_visitor.VisitCodeEntry(
- obj->address() + JSFunction::kCodeEntryOffset);
-
- marking_visitor.VisitPointers(
- HeapObject::RawField(obj,
- JSFunction::kCodeEntryOffset + kPointerSize),
- HeapObject::RawField(obj,
- JSFunction::kNonWeakFieldsEndOffset));
} else {
- obj->IterateBody(map->instance_type(), size, &marking_visitor);
+ IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
@@ -892,8 +861,8 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
if (FLAG_trace_gc) {
- PrintF("Speed up marking after %d steps\n",
- static_cast<int>(kAllocationMarkingFactorSpeedupInterval));
+ PrintPID("Speed up marking after %d steps\n",
+ static_cast<int>(kAllocationMarkingFactorSpeedupInterval));
}
speed_up = true;
}
@@ -907,7 +876,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if (space_left_is_very_small ||
only_1_nth_of_space_that_was_available_still_left) {
- if (FLAG_trace_gc) PrintF("Speed up marking because of low space left\n");
+ if (FLAG_trace_gc) PrintPID("Speed up marking because of low space left\n");
speed_up = true;
}
@@ -918,7 +887,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if (size_of_old_space_multiplied_by_n_during_marking) {
speed_up = true;
if (FLAG_trace_gc) {
- PrintF("Speed up marking because of heap size increase\n");
+ PrintPID("Speed up marking because of heap size increase\n");
}
}
@@ -930,7 +899,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
// We try to scan at at least twice the speed that we are allocating.
if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
if (FLAG_trace_gc) {
- PrintF("Speed up marking because marker was not keeping up\n");
+ PrintPID("Speed up marking because marker was not keeping up\n");
}
speed_up = true;
}
@@ -938,7 +907,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
if (speed_up) {
if (state_ != MARKING) {
if (FLAG_trace_gc) {
- PrintF("Postponing speeding up marking until marking starts\n");
+ PrintPID("Postponing speeding up marking until marking starts\n");
}
} else {
allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
@@ -946,7 +915,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
Min(kMaxAllocationMarkingFactor,
static_cast<intptr_t>(allocation_marking_factor_ * 1.3)));
if (FLAG_trace_gc) {
- PrintF("Marking speed increased to %d\n", allocation_marking_factor_);
+ PrintPID("Marking speed increased to %d\n", allocation_marking_factor_);
}
}
}