summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/heap.h')
-rw-r--r--deps/v8/src/heap/heap.h256
1 files changed, 132 insertions, 124 deletions
diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h
index fe7b9341c0..80bc68c172 100644
--- a/deps/v8/src/heap/heap.h
+++ b/deps/v8/src/heap/heap.h
@@ -6,7 +6,7 @@
#define V8_HEAP_HEAP_H_
#include <cmath>
-#include <map>
+#include <vector>
// Clients of this interface shouldn't depend on lots of heap internals.
// Do not include anything from src/heap here!
@@ -21,6 +21,7 @@
#include "src/objects.h"
#include "src/objects/hash-table.h"
#include "src/objects/string-table.h"
+#include "src/visitors.h"
namespace v8 {
namespace internal {
@@ -136,6 +137,17 @@ using v8::MemoryPressureLevel;
V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
+ /* Oddball maps */ \
+ V(Map, undefined_map, UndefinedMap) \
+ V(Map, the_hole_map, TheHoleMap) \
+ V(Map, null_map, NullMap) \
+ V(Map, boolean_map, BooleanMap) \
+ V(Map, uninitialized_map, UninitializedMap) \
+ V(Map, arguments_marker_map, ArgumentsMarkerMap) \
+ V(Map, exception_map, ExceptionMap) \
+ V(Map, termination_exception_map, TerminationExceptionMap) \
+ V(Map, optimized_out_map, OptimizedOutMap) \
+ V(Map, stale_register_map, StaleRegisterMap) \
/* Canonical empty values */ \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
@@ -155,6 +167,7 @@ using v8::MemoryPressureLevel;
EmptySlowElementDictionary) \
V(PropertyCell, empty_property_cell, EmptyPropertyCell) \
V(WeakCell, empty_weak_cell, EmptyWeakCell) \
+ V(InterceptorInfo, noop_interceptor_info, NoOpInterceptorInfo) \
/* Protectors */ \
V(PropertyCell, array_protector, ArrayProtector) \
V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector) \
@@ -201,25 +214,13 @@ using v8::MemoryPressureLevel;
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
V(FixedArray, serialized_templates, SerializedTemplates) \
V(FixedArray, serialized_global_proxy_sizes, SerializedGlobalProxySizes) \
- /* Configured values */ \
V(TemplateList, message_listeners, MessageListeners) \
- V(InterceptorInfo, noop_interceptor_info, NoOpInterceptorInfo) \
- V(Code, js_entry_code, JsEntryCode) \
- V(Code, js_construct_entry_code, JsConstructEntryCode) \
- /* Oddball maps */ \
- V(Map, undefined_map, UndefinedMap) \
- V(Map, the_hole_map, TheHoleMap) \
- V(Map, null_map, NullMap) \
- V(Map, boolean_map, BooleanMap) \
- V(Map, uninitialized_map, UninitializedMap) \
- V(Map, arguments_marker_map, ArgumentsMarkerMap) \
- V(Map, exception_map, ExceptionMap) \
- V(Map, termination_exception_map, TerminationExceptionMap) \
- V(Map, optimized_out_map, OptimizedOutMap) \
- V(Map, stale_register_map, StaleRegisterMap) \
/* per-Isolate map for JSPromiseCapability. */ \
/* TODO(caitp): Make this a Struct */ \
- V(Map, js_promise_capability_map, JSPromiseCapabilityMap)
+ V(Map, js_promise_capability_map, JSPromiseCapabilityMap) \
+ /* JS Entries */ \
+ V(Code, js_entry_code, JsEntryCode) \
+ V(Code, js_construct_entry_code, JsConstructEntryCode)
// Entries in this list are limited to Smis and are not visited during GC.
#define SMI_ROOT_LIST(V) \
@@ -248,64 +249,93 @@ using v8::MemoryPressureLevel;
// Heap roots that are known to be immortal immovable, for which we can safely
// skip write barriers. This list is not complete and has omissions.
#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
+ V(ArgumentsMarker) \
+ V(ArgumentsMarkerMap) \
+ V(ArrayBufferNeuteringProtector) \
+ V(ArrayIteratorProtector) \
+ V(ArrayProtector) \
+ V(BlockContextMap) \
+ V(BooleanMap) \
V(ByteArrayMap) \
V(BytecodeArrayMap) \
- V(FreeSpaceMap) \
- V(OnePointerFillerMap) \
- V(TwoPointerFillerMap) \
- V(UndefinedValue) \
- V(TheHoleValue) \
- V(NullValue) \
- V(TrueValue) \
- V(FalseValue) \
- V(UninitializedValue) \
+ V(CatchContextMap) \
V(CellMap) \
- V(GlobalPropertyCellMap) \
- V(SharedFunctionInfoMap) \
- V(MetaMap) \
- V(HeapNumberMap) \
- V(MutableHeapNumberMap) \
- V(NativeContextMap) \
- V(FixedArrayMap) \
V(CodeMap) \
- V(ScopeInfoMap) \
- V(ModuleInfoMap) \
- V(FixedCOWArrayMap) \
- V(FixedDoubleArrayMap) \
- V(WeakCellMap) \
- V(TransitionArrayMap) \
- V(HashTableMap) \
- V(OrderedHashTableMap) \
- V(EmptyFixedArray) \
V(EmptyByteArray) \
V(EmptyDescriptorArray) \
- V(ArgumentsMarker) \
- V(SymbolMap) \
- V(SloppyArgumentsElementsMap) \
+ V(EmptyFixedArray) \
+ V(EmptyFixedFloat32Array) \
+ V(EmptyFixedFloat64Array) \
+ V(EmptyFixedInt16Array) \
+ V(EmptyFixedInt32Array) \
+ V(EmptyFixedInt8Array) \
+ V(EmptyFixedUint16Array) \
+ V(EmptyFixedUint32Array) \
+ V(EmptyFixedUint8Array) \
+ V(EmptyFixedUint8ClampedArray) \
+ V(EmptyPropertyCell) \
+ V(EmptyScopeInfo) \
+ V(EmptyScript) \
+ V(EmptySloppyArgumentsElements) \
+ V(EmptySlowElementDictionary) \
+ V(empty_string) \
+ V(EmptyWeakCell) \
+ V(EvalContextMap) \
+ V(Exception) \
+ V(FalseValue) \
+ V(FastArrayIterationProtector) \
+ V(FixedArrayMap) \
+ V(FixedCOWArrayMap) \
+ V(FixedDoubleArrayMap) \
+ V(ForeignMap) \
+ V(FreeSpaceMap) \
V(FunctionContextMap) \
- V(CatchContextMap) \
- V(WithContextMap) \
- V(BlockContextMap) \
+ V(GlobalPropertyCellMap) \
+ V(HashTableMap) \
+ V(HeapNumberMap) \
+ V(HoleNanValue) \
+ V(InfinityValue) \
+ V(IsConcatSpreadableProtector) \
+ V(JsConstructEntryCode) \
+ V(JsEntryCode) \
+ V(JSMessageObjectMap) \
+ V(ManyClosuresCellMap) \
+ V(MetaMap) \
+ V(MinusInfinityValue) \
+ V(MinusZeroValue) \
V(ModuleContextMap) \
- V(EvalContextMap) \
+ V(ModuleInfoMap) \
+ V(MutableHeapNumberMap) \
+ V(NanValue) \
+ V(NativeContextMap) \
+ V(NoClosuresCellMap) \
+ V(NullMap) \
+ V(NullValue) \
+ V(OneClosureCellMap) \
+ V(OnePointerFillerMap) \
+ V(OptimizedOut) \
+ V(OrderedHashTableMap) \
+ V(ScopeInfoMap) \
V(ScriptContextMap) \
- V(UndefinedMap) \
+ V(SharedFunctionInfoMap) \
+ V(SloppyArgumentsElementsMap) \
+ V(SpeciesProtector) \
+ V(StaleRegister) \
+ V(StringLengthProtector) \
+ V(SymbolMap) \
+ V(TerminationException) \
V(TheHoleMap) \
- V(NullMap) \
- V(BooleanMap) \
+ V(TheHoleValue) \
+ V(TransitionArrayMap) \
+ V(TrueValue) \
+ V(TwoPointerFillerMap) \
+ V(UndefinedCell) \
+ V(UndefinedMap) \
+ V(UndefinedValue) \
V(UninitializedMap) \
- V(ArgumentsMarkerMap) \
- V(JSMessageObjectMap) \
- V(ForeignMap) \
- V(NoClosuresCellMap) \
- V(OneClosureCellMap) \
- V(ManyClosuresCellMap) \
- V(NanValue) \
- V(InfinityValue) \
- V(MinusZeroValue) \
- V(MinusInfinityValue) \
- V(EmptyWeakCell) \
- V(empty_string) \
+ V(UninitializedValue) \
+ V(WeakCellMap) \
+ V(WithContextMap) \
PRIVATE_SYMBOL_LIST(V)
// Forward declarations.
@@ -328,6 +358,7 @@ class ObjectIterator;
class ObjectStats;
class Page;
class PagedSpace;
+class RootVisitor;
class Scavenger;
class ScavengeJob;
class Space;
@@ -403,9 +434,8 @@ class PromotionQueue {
inline void SetNewLimit(Address limit);
inline bool IsBelowPromotionQueue(Address to_space_top);
- inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
- inline void remove(HeapObject** target, int32_t* size,
- bool* was_marked_black);
+ inline void insert(HeapObject* target, int32_t size);
+ inline void remove(HeapObject** target, int32_t* size);
bool is_empty() {
return (front_ == rear_) &&
@@ -414,12 +444,10 @@ class PromotionQueue {
private:
struct Entry {
- Entry(HeapObject* obj, int32_t size, bool was_marked_black)
- : obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
+ Entry(HeapObject* obj, int32_t size) : obj_(obj), size_(size) {}
HeapObject* obj_;
- int32_t size_ : 31;
- bool was_marked_black_ : 1;
+ int32_t size_;
};
inline Page* GetHeadPage();
@@ -562,19 +590,9 @@ class Heap {
enum UpdateAllocationSiteMode { kGlobal, kCached };
- // Taking this lock prevents the GC from entering a phase that relocates
+ // Taking this mutex prevents the GC from entering a phase that relocates
// object references.
- class RelocationLock {
- public:
- explicit RelocationLock(Heap* heap) : heap_(heap) {
- heap_->relocation_mutex_.Lock();
- }
-
- ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
-
- private:
- Heap* heap_;
- };
+ base::Mutex* relocation_mutex() { return &relocation_mutex_; }
// Support for partial snapshots. After calling this we have a linear
// space to write objects in each space.
@@ -583,7 +601,7 @@ class Heap {
Address start;
Address end;
};
- typedef List<Chunk> Reservation;
+ typedef std::vector<Chunk> Reservation;
static const int kInitalOldGenerationLimitFactor = 2;
@@ -609,16 +627,6 @@ class Heap {
static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
static const int kMaxOldSpaceSizeHugeMemoryDevice = 1024 * kPointerMultiplier;
- // The executable size has to be a multiple of Page::kPageSize.
- // Sizes are in MB.
- static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
- static const int kMaxExecutableSizeMediumMemoryDevice =
- 192 * kPointerMultiplier;
- static const int kMaxExecutableSizeHighMemoryDevice =
- 256 * kPointerMultiplier;
- static const int kMaxExecutableSizeHugeMemoryDevice =
- 256 * kPointerMultiplier;
-
static const int kTraceRingBufferSize = 512;
static const int kStacktraceBufferSize = 512;
@@ -666,7 +674,7 @@ class Heap {
static void FatalProcessOutOfMemory(const char* location,
bool is_heap_oom = false);
- static bool RootIsImmortalImmovable(int root_index);
+ V8_EXPORT_PRIVATE static bool RootIsImmortalImmovable(int root_index);
// Checks whether the space is valid.
static bool IsValidAllocationSpace(AllocationSpace space);
@@ -786,9 +794,7 @@ class Heap {
Object* encountered_weak_collections() const {
return encountered_weak_collections_;
}
- void VisitEncounteredWeakCollections(ObjectVisitor* visitor) {
- visitor->VisitPointer(&encountered_weak_collections_);
- }
+ void IterateEncounteredWeakCollections(RootVisitor* visitor);
void set_encountered_weak_cells(Object* weak_cell) {
encountered_weak_cells_ = weak_cell;
@@ -987,16 +993,13 @@ class Heap {
// Configure heap size in MB before setup. Return false if the heap has been
// set up already.
bool ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
- size_t max_executable_size, size_t code_range_size);
+ size_t code_range_size);
bool ConfigureHeapDefault();
// Prepares the heap, setting up memory areas that are needed in the isolate
// without actually creating any objects.
bool SetUp();
- // (Re-)Initialize hash seed from flag or RNG.
- void InitializeHashSeed();
-
// Bootstraps the object heap with the core set of objects required to run.
// Returns whether it succeeded.
bool CreateHeapObjects();
@@ -1179,18 +1182,17 @@ class Heap {
// ===========================================================================
// Iterates over all roots in the heap.
- void IterateRoots(ObjectVisitor* v, VisitMode mode);
+ void IterateRoots(RootVisitor* v, VisitMode mode);
// Iterates over all strong roots in the heap.
- void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
+ void IterateStrongRoots(RootVisitor* v, VisitMode mode);
// Iterates over entries in the smi roots list. Only interesting to the
// serializer/deserializer, since GC does not care about smis.
- void IterateSmiRoots(ObjectVisitor* v);
+ void IterateSmiRoots(RootVisitor* v);
// Iterates over all the other roots in the heap.
- void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
+ void IterateWeakRoots(RootVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
- void IterateAndScavengePromotedObject(HeapObject* target, int size,
- bool was_marked_black);
+ void IterateAndScavengePromotedObject(HeapObject* target, int size);
// ===========================================================================
// Store buffer API. =========================================================
@@ -1217,7 +1219,9 @@ class Heap {
// Start incremental marking and ensure that idle time handler can perform
// incremental steps.
- void StartIdleIncrementalMarking(GarbageCollectionReason gc_reason);
+ void StartIdleIncrementalMarking(
+ GarbageCollectionReason gc_reason,
+ GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
// Starts incremental marking assuming incremental marking is currently
// stopped.
@@ -1231,9 +1235,6 @@ class Heap {
void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
- bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms,
- GarbageCollectionReason gc_reason);
-
void RegisterDeserializedObjectsForBlackAllocation(
Reservation* reservations, List<HeapObject*>* large_objects);
@@ -1247,11 +1248,9 @@ class Heap {
// The runtime uses this function to notify potentially unsafe object layout
// changes that require special synchronization with the concurrent marker.
- // A layout change is unsafe if
- // - it removes a tagged in-object field.
- // - it replaces a tagged in-objects field with an untagged in-object field.
void NotifyObjectLayoutChange(HeapObject* object,
const DisallowHeapAllocation&);
+
#ifdef VERIFY_HEAP
// This function checks that either
// - the map transition is safe,
@@ -1345,7 +1344,6 @@ class Heap {
size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; }
- size_t MaxExecutableSize() { return max_executable_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
@@ -1516,6 +1514,7 @@ class Heap {
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
+ void VerifyRememberedSetFor(HeapObject* object);
#endif
#ifdef DEBUG
@@ -1544,8 +1543,8 @@ class Heap {
// Registers an external string.
inline void AddString(String* string);
- inline void IterateAll(ObjectVisitor* v);
- inline void IterateNewSpaceStrings(ObjectVisitor* v);
+ inline void IterateAll(RootVisitor* v);
+ inline void IterateNewSpaceStrings(RootVisitor* v);
inline void PromoteAllNewSpaceStrings();
// Restores internal invariant and gets rid of collected strings. Must be
@@ -1828,7 +1827,7 @@ class Heap {
void Scavenge();
void EvacuateYoungGeneration();
- Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
+ Address DoScavenge(Address new_space_front);
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
@@ -2164,7 +2163,6 @@ class Heap {
size_t initial_max_old_generation_size_;
size_t initial_old_generation_size_;
bool old_generation_size_configured_;
- size_t max_executable_size_;
size_t maximum_committed_;
// For keeping track of how much data has survived
@@ -2391,6 +2389,7 @@ class Heap {
friend class IncrementalMarkingJob;
friend class LargeObjectSpace;
friend class MarkCompactCollector;
+ friend class MarkCompactCollectorBase;
friend class MinorMarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class NewSpace;
@@ -2462,16 +2461,23 @@ class AlwaysAllocateScope {
// point into the heap to a location that has a map pointer at its first word.
// Caveat: Heap::Contains is an approximation because it can return true for
// objects in a heap space but above the allocation pointer.
-class VerifyPointersVisitor : public ObjectVisitor {
+class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
public:
- inline void VisitPointers(Object** start, Object** end) override;
+ inline void VisitPointers(HeapObject* host, Object** start,
+ Object** end) override;
+ inline void VisitRootPointers(Root root, Object** start,
+ Object** end) override;
+
+ private:
+ inline void VerifyPointers(Object** start, Object** end);
};
// Verify that all objects are Smis.
-class VerifySmisVisitor : public ObjectVisitor {
+class VerifySmisVisitor : public RootVisitor {
public:
- inline void VisitPointers(Object** start, Object** end) override;
+ inline void VisitRootPointers(Root root, Object** start,
+ Object** end) override;
};
@@ -2630,6 +2636,8 @@ class AllocationObserver {
DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
};
+V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space);
+
} // namespace internal
} // namespace v8