summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/mark-compact-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/mark-compact-inl.h')
-rw-r--r--deps/v8/src/heap/mark-compact-inl.h70
1 files changed, 41 insertions, 29 deletions
diff --git a/deps/v8/src/heap/mark-compact-inl.h b/deps/v8/src/heap/mark-compact-inl.h
index 249144835a..d200671955 100644
--- a/deps/v8/src/heap/mark-compact-inl.h
+++ b/deps/v8/src/heap/mark-compact-inl.h
@@ -9,6 +9,7 @@
#include "src/heap/mark-compact.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/remembered-set.h"
+#include "src/objects/js-collection-inl.h"
namespace v8 {
namespace internal {
@@ -88,29 +89,38 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
- VisitJSWeakCollection(Map* map, JSWeakCollection* weak_collection) {
- // Enqueue weak collection in linked list of encountered weak collections.
- if (weak_collection->next() == heap_->undefined_value()) {
- weak_collection->set_next(heap_->encountered_weak_collections());
- heap_->set_encountered_weak_collections(weak_collection);
+ VisitEphemeronHashTable(Map* map, EphemeronHashTable* table) {
+ collector_->AddEphemeronHashTable(table);
+
+ for (int i = 0; i < table->Capacity(); i++) {
+ Object** key_slot =
+ table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
+ HeapObject* key = HeapObject::cast(table->KeyAt(i));
+ collector_->RecordSlot(table, key_slot, key);
+
+ Object** value_slot =
+ table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
+
+ if (marking_state()->IsBlackOrGrey(key)) {
+ VisitPointer(table, value_slot);
+
+ } else {
+ Object* value_obj = *value_slot;
+
+ if (value_obj->IsHeapObject()) {
+ HeapObject* value = HeapObject::cast(value_obj);
+ collector_->RecordSlot(table, value_slot, value);
+
+ // Revisit ephemerons with both key and value unreachable at end
+ // of concurrent marking cycle.
+ if (marking_state()->IsWhite(value)) {
+ collector_->AddEphemeron(key, value);
+ }
+ }
+ }
}
- // Skip visiting the backing hash table containing the mappings and the
- // pointer to the other enqueued weak collections, both are post-processed.
- int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
- JSWeakCollection::BodyDescriptorWeak::IterateBody(map, weak_collection, size,
- this);
-
- // Partially initialized weak collection is enqueued, but table is ignored.
- if (!weak_collection->table()->IsHashTable()) return size;
-
- // Mark the backing hash table without pushing it on the marking stack.
- Object** slot =
- HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
- HeapObject* obj = HeapObject::cast(*slot);
- collector_->RecordSlot(weak_collection, slot, obj);
- MarkObjectWithoutPush(weak_collection, obj);
- return size;
+ return table->SizeFromMap(map);
}
template <FixedArrayVisitationMode fixed_array_mode,
@@ -162,7 +172,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
- collector_->RecordSlot(weak_cell, slot, *slot);
+ collector_->RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
} else {
// If we do not know about liveness of values of weak cells, we have to
// process them when we know the liveness of the whole transitive
@@ -250,7 +260,7 @@ template <FixedArrayVisitationMode fixed_array_mode,
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitCodeTarget(Code* host,
RelocInfo* rinfo) {
- DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
+ DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
collector_->RecordRelocSlot(host, rinfo, target);
MarkObject(host, target);
@@ -383,7 +393,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
#ifdef ENABLE_MINOR_MC
void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
- if (heap_->InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
+ if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
worklist_->Push(kMainThread, obj);
}
}
@@ -400,13 +410,13 @@ void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
}
void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
- Object* target) {
+ HeapObject* target) {
RecordSlot(object, reinterpret_cast<HeapObjectReference**>(slot), target);
}
void MarkCompactCollector::RecordSlot(HeapObject* object,
HeapObjectReference** slot,
- Object* target) {
+ HeapObject* target) {
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
@@ -420,9 +430,11 @@ template <LiveObjectIterationMode mode>
LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
Address start)
: chunk_(chunk),
- one_word_filler_map_(chunk->heap()->one_pointer_filler_map()),
- two_word_filler_map_(chunk->heap()->two_pointer_filler_map()),
- free_space_map_(chunk->heap()->free_space_map()),
+ one_word_filler_map_(
+ ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
+ two_word_filler_map_(
+ ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
+ free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
it_(chunk, bitmap) {
it_.Advance(Bitmap::IndexToCell(
Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));