summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/concurrent-marking.cc
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2018-12-04 08:20:37 +0100
committerMichaël Zasso <targos@protonmail.com>2018-12-06 15:23:33 +0100
commit9b4bf7de6c9a7c25f116c7a502384c20b5cfaea3 (patch)
tree2b0c843168dafb939d8df8a15b2aa72b76dee51d /deps/v8/src/heap/concurrent-marking.cc
parentb8fbe69db1292307adb2c2b2e0d5ef48c4ab2faf (diff)
downloadandroid-node-v8-9b4bf7de6c9a7c25f116c7a502384c20b5cfaea3.tar.gz
android-node-v8-9b4bf7de6c9a7c25f116c7a502384c20b5cfaea3.tar.bz2
android-node-v8-9b4bf7de6c9a7c25f116c7a502384c20b5cfaea3.zip
deps: update V8 to 7.1.302.28
PR-URL: https://github.com/nodejs/node/pull/23423 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Gus Caplan <me@gus.host> Reviewed-By: Myles Borins <myles.borins@gmail.com>
Diffstat (limited to 'deps/v8/src/heap/concurrent-marking.cc')
-rw-r--r--deps/v8/src/heap/concurrent-marking.cc121
1 files changed, 56 insertions, 65 deletions
diff --git a/deps/v8/src/heap/concurrent-marking.cc b/deps/v8/src/heap/concurrent-marking.cc
index f6eabbb021..5e147ca9a5 100644
--- a/deps/v8/src/heap/concurrent-marking.cc
+++ b/deps/v8/src/heap/concurrent-marking.cc
@@ -74,15 +74,19 @@ class ConcurrentMarkingVisitor final
public:
using BaseClass = HeapVisitor<int, ConcurrentMarkingVisitor>;
- explicit ConcurrentMarkingVisitor(ConcurrentMarking::MarkingWorklist* shared,
- ConcurrentMarking::MarkingWorklist* bailout,
- LiveBytesMap* live_bytes,
- WeakObjects* weak_objects, int task_id)
+ explicit ConcurrentMarkingVisitor(
+ ConcurrentMarking::MarkingWorklist* shared,
+ ConcurrentMarking::MarkingWorklist* bailout, LiveBytesMap* live_bytes,
+ WeakObjects* weak_objects,
+ ConcurrentMarking::EmbedderTracingWorklist* embedder_objects, int task_id,
+ bool embedder_tracing_enabled)
: shared_(shared, task_id),
bailout_(bailout, task_id),
weak_objects_(weak_objects),
+ embedder_objects_(embedder_objects, task_id),
marking_state_(live_bytes),
- task_id_(task_id) {}
+ task_id_(task_id),
+ embedder_tracing_enabled_(embedder_tracing_enabled) {}
template <typename T>
static V8_INLINE T* Cast(HeapObject* object) {
@@ -138,19 +142,24 @@ class ConcurrentMarkingVisitor final
for (MaybeObject** slot = start; slot < end; slot++) {
MaybeObject* object = base::AsAtomicPointer::Relaxed_Load(slot);
HeapObject* heap_object;
- if (object->ToStrongHeapObject(&heap_object)) {
+ if (object->GetHeapObjectIfStrong(&heap_object)) {
// If the reference changes concurrently from strong to weak, the write
// barrier will treat the weak reference as strong, so we won't miss the
// weak reference.
ProcessStrongHeapObject(host, reinterpret_cast<Object**>(slot),
heap_object);
- } else if (object->ToWeakHeapObject(&heap_object)) {
+ } else if (object->GetHeapObjectIfWeak(&heap_object)) {
ProcessWeakHeapObject(
host, reinterpret_cast<HeapObjectReference**>(slot), heap_object);
}
}
}
+ // Weak list pointers should be ignored during marking. The lists are
+ // reconstructed after GC.
+ void VisitCustomWeakPointers(HeapObject* host, Object** start,
+ Object** end) override {}
+
void VisitPointersInSnapshot(HeapObject* host, const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_slots(); i++) {
Object** slot = snapshot.slot(i);
@@ -175,31 +184,27 @@ class ConcurrentMarkingVisitor final
return VisitJSObjectSubclass(map, object);
}
- int VisitJSArrayBuffer(Map* map, JSArrayBuffer* object) {
- return VisitJSObjectSubclass(map, object);
- }
-
int VisitWasmInstanceObject(Map* map, WasmInstanceObject* object) {
return VisitJSObjectSubclass(map, object);
}
+ // Some JS objects can carry back links to embedders that contain information
+ // relevant to the garbage collectors.
+
int VisitJSApiObject(Map* map, JSObject* object) {
- if (marking_state_.IsGrey(object)) {
- // The main thread will do wrapper tracing in Blink.
- bailout_.Push(object);
- }
- return 0;
+ return VisitEmbedderTracingSubclass(map, object);
}
- int VisitJSFunction(Map* map, JSFunction* object) {
- int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
- int used_size = map->UsedInstanceSize();
- DCHECK_LE(used_size, size);
- DCHECK_GE(used_size, JSObject::kHeaderSize);
- const SlotSnapshot& snapshot = MakeSlotSnapshotWeak(map, object, used_size);
- if (!ShouldVisit(object)) return 0;
- VisitPointersInSnapshot(object, snapshot);
- return size;
+ int VisitJSArrayBuffer(Map* map, JSArrayBuffer* object) {
+ return VisitEmbedderTracingSubclass(map, object);
+ }
+
+ int VisitJSDataView(Map* map, JSDataView* object) {
+ return VisitEmbedderTracingSubclass(map, object);
+ }
+
+ int VisitJSTypedArray(Map* map, JSTypedArray* object) {
+ return VisitEmbedderTracingSubclass(map, object);
}
// ===========================================================================
@@ -270,34 +275,18 @@ class ConcurrentMarkingVisitor final
}
// ===========================================================================
- // Objects with weak fields and/or side-effectiful visitation.
+ // Side-effectful visitation.
// ===========================================================================
int VisitBytecodeArray(Map* map, BytecodeArray* object) {
if (!ShouldVisit(object)) return 0;
- int size = BytecodeArray::BodyDescriptorWeak::SizeOf(map, object);
+ int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
- BytecodeArray::BodyDescriptorWeak::IterateBody(map, object, size, this);
+ BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
object->MakeOlder();
return size;
}
- int VisitAllocationSite(Map* map, AllocationSite* object) {
- if (!ShouldVisit(object)) return 0;
- int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
- VisitMapPointer(object, object->map_slot());
- AllocationSite::BodyDescriptorWeak::IterateBody(map, object, size, this);
- return size;
- }
-
- int VisitCodeDataContainer(Map* map, CodeDataContainer* object) {
- if (!ShouldVisit(object)) return 0;
- int size = CodeDataContainer::BodyDescriptorWeak::SizeOf(map, object);
- VisitMapPointer(object, object->map_slot());
- CodeDataContainer::BodyDescriptorWeak::IterateBody(map, object, size, this);
- return size;
- }
-
int VisitMap(Map* meta_map, Map* map) {
if (marking_state_.IsGrey(map)) {
// Maps have ad-hoc weakness for descriptor arrays. They also clear the
@@ -315,14 +304,6 @@ class ConcurrentMarkingVisitor final
return 0;
}
- int VisitNativeContext(Map* map, Context* object) {
- if (!ShouldVisit(object)) return 0;
- int size = Context::BodyDescriptorWeak::SizeOf(map, object);
- VisitMapPointer(object, object->map_slot());
- Context::BodyDescriptorWeak::IterateBody(map, object, size, this);
- return size;
- }
-
int VisitTransitionArray(Map* map, TransitionArray* array) {
if (!ShouldVisit(array)) return 0;
VisitMapPointer(array, array->map_slot());
@@ -443,6 +424,18 @@ class ConcurrentMarkingVisitor final
}
template <typename T>
+ int VisitEmbedderTracingSubclass(Map* map, T* object) {
+ DCHECK(object->IsApiWrapper());
+ int size = VisitJSObjectSubclass(map, object);
+ if (size && embedder_tracing_enabled_) {
+ // Success: The object needs to be processed for embedder references on
+ // the main thread.
+ embedder_objects_.Push(object);
+ }
+ return size;
+ }
+
+ template <typename T>
int VisitLeftTrimmableArray(Map* map, T* object) {
// The synchronized_length() function checks that the length is a Smi.
// This is not necessarily the case if the array is being left-trimmed.
@@ -466,20 +459,14 @@ class ConcurrentMarkingVisitor final
return slot_snapshot_;
}
- template <typename T>
- const SlotSnapshot& MakeSlotSnapshotWeak(Map* map, T* object, int size) {
- SlotSnapshottingVisitor visitor(&slot_snapshot_);
- visitor.VisitPointer(object,
- reinterpret_cast<Object**>(object->map_slot()));
- T::BodyDescriptorWeak::IterateBody(map, object, size, &visitor);
- return slot_snapshot_;
- }
ConcurrentMarking::MarkingWorklist::View shared_;
ConcurrentMarking::MarkingWorklist::View bailout_;
WeakObjects* weak_objects_;
+ ConcurrentMarking::EmbedderTracingWorklist::View embedder_objects_;
ConcurrentMarkingState marking_state_;
int task_id_;
SlotSnapshot slot_snapshot_;
+ bool embedder_tracing_enabled_;
};
// Strings can change maps due to conversion to thin string or external strings.
@@ -524,7 +511,7 @@ class ConcurrentMarking::Task : public CancelableTask {
task_state_(task_state),
task_id_(task_id) {}
- virtual ~Task() {}
+ ~Task() override = default;
private:
// v8::internal::CancelableTask overrides.
@@ -541,12 +528,14 @@ class ConcurrentMarking::Task : public CancelableTask {
ConcurrentMarking::ConcurrentMarking(Heap* heap, MarkingWorklist* shared,
MarkingWorklist* bailout,
MarkingWorklist* on_hold,
- WeakObjects* weak_objects)
+ WeakObjects* weak_objects,
+ EmbedderTracingWorklist* embedder_objects)
: heap_(heap),
shared_(shared),
bailout_(bailout),
on_hold_(on_hold),
- weak_objects_(weak_objects) {
+ weak_objects_(weak_objects),
+ embedder_objects_(embedder_objects) {
// The runtime flag should be set only if the compile time flag was set.
#ifndef V8_CONCURRENT_MARKING
CHECK(!FLAG_concurrent_marking);
@@ -558,8 +547,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
GCTracer::BackgroundScope::MC_BACKGROUND_MARKING);
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
- ConcurrentMarkingVisitor visitor(shared_, bailout_, &task_state->live_bytes,
- weak_objects_, task_id);
+ ConcurrentMarkingVisitor visitor(
+ shared_, bailout_, &task_state->live_bytes, weak_objects_,
+ embedder_objects_, task_id, heap_->local_embedder_heap_tracer()->InUse());
double time_ms;
size_t marked_bytes = 0;
if (FLAG_trace_concurrent_marking) {
@@ -626,6 +616,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
shared_->FlushToGlobal(task_id);
bailout_->FlushToGlobal(task_id);
on_hold_->FlushToGlobal(task_id);
+ embedder_objects_->FlushToGlobal(task_id);
weak_objects_->transition_arrays.FlushToGlobal(task_id);
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);