aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/heap.h')
-rw-r--r--deps/v8/src/heap/heap.h214
1 files changed, 98 insertions, 116 deletions
diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h
index a242bd80d1..81f2b0dd8c 100644
--- a/deps/v8/src/heap/heap.h
+++ b/deps/v8/src/heap/heap.h
@@ -44,29 +44,20 @@ class HeapTester;
class TestMemoryAllocatorScope;
} // namespace heap
-class ObjectBoilerplateDescription;
-class BytecodeArray;
-class CodeDataContainer;
-class DeoptimizationData;
-class HandlerTable;
class IncrementalMarking;
class JSArrayBuffer;
-class ExternalString;
using v8::MemoryPressureLevel;
class AllocationObserver;
class ArrayBufferCollector;
-class ArrayBufferTracker;
class CodeLargeObjectSpace;
class ConcurrentMarking;
class GCIdleTimeHandler;
class GCIdleTimeHeapState;
class GCTracer;
-class HeapController;
class HeapObjectAllocationTracker;
class HeapObjectsFilter;
class HeapStats;
-class HistogramTimer;
class Isolate;
class JSFinalizationGroup;
class LocalEmbedderHeapTracer;
@@ -86,7 +77,6 @@ class Space;
class StoreBuffer;
class StressScavengeObserver;
class TimedHistogram;
-class TracePossibleWrapperReporter;
class WeakObjectRetainer;
enum ArrayStorageAllocationMode {
@@ -243,19 +233,24 @@ class Heap {
// should instead adapt it's heap size based on available physical memory.
static const int kPointerMultiplier = 1;
#else
- // TODO(ishell): kSystePointerMultiplier?
- static const int kPointerMultiplier = i::kSystemPointerSize / 4;
+ static const int kPointerMultiplier = i::kTaggedSize / 4;
#endif
static const size_t kMaxInitialOldGenerationSize =
256 * MB * kPointerMultiplier;
- // Semi-space size needs to be a multiple of page size.
- static const size_t kMinSemiSpaceSizeInKB = 512 * kPointerMultiplier;
- static const size_t kMaxSemiSpaceSizeInKB = 8192 * kPointerMultiplier;
+ // These constants control heap configuration based on the physical memory.
+ static constexpr size_t kPhysicalMemoryToOldGenerationRatio = 4;
+ static constexpr size_t kOldGenerationToSemiSpaceRatio = 128;
+ static constexpr size_t kOldGenerationToSemiSpaceRatioLowMemory = 256;
+ static constexpr size_t kOldGenerationLowMemory =
+ 128 * MB * kPointerMultiplier;
+ static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio = 1;
+ static constexpr size_t kMinSemiSpaceSize = 512 * KB * kPointerMultiplier;
+ static constexpr size_t kMaxSemiSpaceSize = 8192 * KB * kPointerMultiplier;
- STATIC_ASSERT(kMinSemiSpaceSizeInKB* KB % (1 << kPageSizeBits) == 0);
- STATIC_ASSERT(kMaxSemiSpaceSizeInKB* KB % (1 << kPageSizeBits) == 0);
+ STATIC_ASSERT(kMinSemiSpaceSize % (1 << kPageSizeBits) == 0);
+ STATIC_ASSERT(kMaxSemiSpaceSize % (1 << kPageSizeBits) == 0);
static const int kTraceRingBufferSize = 512;
static const int kStacktraceBufferSize = 512;
@@ -365,8 +360,8 @@ class Heap {
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
- V8_EXPORT_PRIVATE void RecordEphemeronKeyWrite(EphemeronHashTable table,
- Address key_slot);
+ V8_EXPORT_PRIVATE inline void RecordEphemeronKeyWrite(
+ EphemeronHashTable table, Address key_slot);
V8_EXPORT_PRIVATE static void EphemeronKeyWriteBarrierFromCode(
Address raw_object, Address address, Isolate* isolate);
V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
@@ -477,6 +472,12 @@ class Heap {
// Print short heap statistics.
void PrintShortHeapStatistics();
+ // Print statistics of freelists of old_space:
+ // with FLAG_trace_gc_freelists: summary of each FreeListCategory.
+ // with FLAG_trace_gc_freelists_verbose: also prints the statistics of each
+ // FreeListCategory of each page.
+ void PrintFreeListsStats();
+
// Dump heap statistics in JSON format.
void DumpJSONHeapStatistics(std::stringstream& stream);
@@ -571,7 +572,7 @@ class Heap {
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
- int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
+ int64_t external_memory_hard_limit() { return max_old_generation_size_ / 2; }
V8_INLINE int64_t external_memory();
V8_INLINE void update_external_memory(int64_t delta);
@@ -619,13 +620,7 @@ class Heap {
// Initialization. ===========================================================
// ===========================================================================
- // Configure heap sizes
- // max_semi_space_size_in_kb: maximum semi-space size in KB
- // max_old_generation_size_in_mb: maximum old generation size in MB
- // code_range_size_in_mb: code range size in MB
- void ConfigureHeap(size_t max_semi_space_size_in_kb,
- size_t max_old_generation_size_in_mb,
- size_t code_range_size_in_mb);
+ void ConfigureHeap(const v8::ResourceConstraints& constraints);
void ConfigureHeapDefault();
// Prepares the heap, setting up for deserialization.
@@ -681,8 +676,6 @@ class Heap {
// Getters to other components. ==============================================
// ===========================================================================
- ReadOnlyHeap* read_only_heap() const { return read_only_heap_; }
-
GCTracer* tracer() { return tracer_.get(); }
MemoryAllocator* memory_allocator() { return memory_allocator_.get(); }
@@ -748,8 +741,8 @@ class Heap {
std::function<void(HeapObject object, ObjectSlot slot, Object target)>
gc_notify_updated_slot);
- V8_EXPORT_PRIVATE void AddKeepDuringJobTarget(Handle<JSReceiver> target);
- void ClearKeepDuringJobSet();
+ V8_EXPORT_PRIVATE void KeepDuringJob(Handle<JSReceiver> target);
+ void ClearKeptObjects();
// ===========================================================================
// Inline allocation. ========================================================
@@ -986,8 +979,9 @@ class Heap {
// Returns whether the object resides in old space.
inline bool InOldSpace(Object object);
- // Checks whether an address/object in the heap (including auxiliary
- // area and unused area).
+ // Checks whether an address/object is in the non-read-only heap (including
+ // auxiliary area and unused area). Use IsValidHeapObject if checking both
+ // heaps is required.
V8_EXPORT_PRIVATE bool Contains(HeapObject value);
// Checks whether an address/object in a space.
@@ -998,7 +992,7 @@ class Heap {
// with off-heap Addresses.
bool InSpaceSlow(Address addr, AllocationSpace space);
- static inline Heap* FromWritableHeapObject(const HeapObject obj);
+ static inline Heap* FromWritableHeapObject(HeapObject obj);
// ===========================================================================
// Object statistics tracking. ===============================================
@@ -1042,23 +1036,21 @@ class Heap {
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; }
- V8_EXPORT_PRIVATE static size_t ComputeMaxOldGenerationSize(
+ V8_EXPORT_PRIVATE static size_t HeapSizeFromPhysicalMemory(
+ uint64_t physical_memory);
+ V8_EXPORT_PRIVATE static void GenerationSizesFromHeapSize(
+ size_t heap_size, size_t* young_generation_size,
+ size_t* old_generation_size);
+ V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromOldGenerationSize(
+ size_t old_generation_size);
+ V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromSemiSpaceSize(
+ size_t semi_space_size);
+ V8_EXPORT_PRIVATE static size_t SemiSpaceSizeFromYoungGenerationSize(
+ size_t young_generation_size);
+ V8_EXPORT_PRIVATE static size_t MinYoungGenerationSize();
+ V8_EXPORT_PRIVATE static size_t MinOldGenerationSize();
+ V8_EXPORT_PRIVATE static size_t MaxOldGenerationSize(
uint64_t physical_memory);
-
- static size_t ComputeMaxSemiSpaceSize(uint64_t physical_memory) {
- const uint64_t min_physical_memory = 512 * MB;
- const uint64_t max_physical_memory = 3 * static_cast<uint64_t>(GB);
-
- uint64_t capped_physical_memory =
- Max(Min(physical_memory, max_physical_memory), min_physical_memory);
- // linearly scale max semi-space size: (X-A)/(B-A)*(D-C)+C
- size_t semi_space_size_in_kb =
- static_cast<size_t>(((capped_physical_memory - min_physical_memory) *
- (kMaxSemiSpaceSizeInKB - kMinSemiSpaceSizeInKB)) /
- (max_physical_memory - min_physical_memory) +
- kMinSemiSpaceSizeInKB);
- return RoundUp(semi_space_size_in_kb, (1 << kPageSizeBits) / KB);
- }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
@@ -1185,6 +1177,11 @@ class Heap {
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects();
+ // We allow incremental marking to overshoot the V8 and global allocation
+ // limit for performace reasons. If the overshoot is too large then we are
+ // more eager to finalize incremental marking.
+ bool AllocationLimitOvershotByLargeMargin();
+
// ===========================================================================
// Prologue/epilogue callback methods.========================================
// ===========================================================================
@@ -1655,26 +1652,6 @@ class Heap {
OldGenerationObjectsAndPromotedExternalMemorySize());
}
- // We allow incremental marking to overshoot the allocation limit for
- // performace reasons. If the overshoot is too large then we are more
- // eager to finalize incremental marking.
- inline bool AllocationLimitOvershotByLargeMargin() {
- // This guards against too eager finalization in small heaps.
- // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
- size_t kMarginForSmallHeaps = 32u * MB;
- if (old_generation_allocation_limit_ >=
- OldGenerationObjectsAndPromotedExternalMemorySize())
- return false;
- uint64_t overshoot = OldGenerationObjectsAndPromotedExternalMemorySize() -
- old_generation_allocation_limit_;
- // Overshoot margin is 50% of allocation limit or half-way to the max heap
- // with special handling of small heaps.
- uint64_t margin =
- Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
- (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
- return overshoot >= margin;
- }
-
void UpdateTotalGCTime(double duration);
bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
@@ -1708,6 +1685,8 @@ class Heap {
return old_generation_allocation_limit_;
}
+ size_t global_allocation_limit() const { return global_allocation_limit_; }
+
bool always_allocate() { return always_allocate_scope_count_ != 0; }
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
@@ -1816,18 +1795,25 @@ class Heap {
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_ = nullptr;
+ // These limits are initialized in Heap::ConfigureHeap based on the resource
+ // constraints and flags.
size_t code_range_size_ = 0;
- size_t max_semi_space_size_ = 8 * (kSystemPointerSize / 4) * MB;
- size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB;
- size_t max_old_generation_size_ = 700ul * (kSystemPointerSize / 4) * MB;
- // TODO(mlippautz): Clarify whether this should be take some embedder
+ size_t max_semi_space_size_ = 0;
+ size_t initial_semispace_size_ = 0;
+ // Full garbage collections can be skipped if the old generation size
+ // is below this threshold.
+ size_t min_old_generation_size_ = 0;
+ // If the old generation size exceeds this limit, then V8 will
+ // crash with out-of-memory error.
+ size_t max_old_generation_size_ = 0;
+ // TODO(mlippautz): Clarify whether this should take some embedder
// configurable limit into account.
- size_t max_global_memory_size_ =
- Min(static_cast<uint64_t>(std::numeric_limits<size_t>::max()),
- static_cast<uint64_t>(max_old_generation_size_) * 2);
- size_t initial_max_old_generation_size_;
- size_t initial_max_old_generation_size_threshold_;
- size_t initial_old_generation_size_;
+ size_t min_global_memory_size_ = 0;
+ size_t max_global_memory_size_ = 0;
+
+ size_t initial_max_old_generation_size_ = 0;
+ size_t initial_max_old_generation_size_threshold_ = 0;
+ size_t initial_old_generation_size_ = 0;
bool old_generation_size_configured_ = false;
size_t maximum_committed_ = 0;
size_t old_generation_capacity_after_bootstrap_ = 0;
@@ -1861,8 +1847,6 @@ class Heap {
// and after context disposal.
int number_of_disposed_maps_ = 0;
- ReadOnlyHeap* read_only_heap_ = nullptr;
-
NewSpace* new_space_ = nullptr;
OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr;
@@ -1932,8 +1916,8 @@ class Heap {
// is checked when we have already decided to do a GC to help determine
// which collector to invoke, before expanding a paged space in the old
// generation and on every allocation in large object space.
- size_t old_generation_allocation_limit_;
- size_t global_allocation_limit_;
+ size_t old_generation_allocation_limit_ = 0;
+ size_t global_allocation_limit_ = 0;
// Indicates that inline bump-pointer allocation has been globally disabled
// for all spaces. This is used to disable allocations in generated code.
@@ -2034,9 +2018,10 @@ class Heap {
// Currently set GC callback flags that are used to pass information between
// the embedder and V8's GC.
- GCCallbackFlags current_gc_callback_flags_;
+ GCCallbackFlags current_gc_callback_flags_ =
+ GCCallbackFlags::kNoGCCallbackFlags;
- bool is_current_gc_forced_;
+ bool is_current_gc_forced_ = false;
ExternalStringTable external_string_table_;
@@ -2082,7 +2067,7 @@ class Heap {
friend class ConcurrentMarking;
friend class GCCallbacksScope;
friend class GCTracer;
- friend class HeapIterator;
+ friend class HeapObjectIterator;
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
@@ -2115,9 +2100,6 @@ class Heap {
// Used in cctest.
friend class heap::HeapTester;
- FRIEND_TEST(HeapControllerTest, OldGenerationAllocationLimit);
- FRIEND_TEST(HeapTest, ExternalLimitDefault);
- FRIEND_TEST(HeapTest, ExternalLimitStaysAboveDefaultForExplicitHandling);
DISALLOW_COPY_AND_ASSIGN(Heap);
};
@@ -2245,56 +2227,56 @@ class VerifySmisVisitor : public RootVisitor {
};
// Space iterator for iterating over all the paged spaces of the heap: Map
-// space, old space, code space and optionally read only space. Returns each
-// space in turn, and null when it is done.
-class V8_EXPORT_PRIVATE PagedSpaces {
+// space, old space and code space. Returns each space in turn, and null when it
+// is done.
+class V8_EXPORT_PRIVATE PagedSpaceIterator {
public:
- explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
- PagedSpace* next();
+ explicit PagedSpaceIterator(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
+ PagedSpace* Next();
private:
Heap* heap_;
int counter_;
};
-
-class SpaceIterator : public Malloced {
+class V8_EXPORT_PRIVATE SpaceIterator : public Malloced {
public:
explicit SpaceIterator(Heap* heap);
virtual ~SpaceIterator();
- bool has_next();
- Space* next();
+ bool HasNext();
+ Space* Next();
private:
Heap* heap_;
int current_space_; // from enum AllocationSpace.
};
-// A HeapIterator provides iteration over the entire non-read-only heap. It
-// aggregates the specific iterators for the different spaces as these can only
-// iterate over one space only.
+// A HeapObjectIterator provides iteration over the entire non-read-only heap.
+// It aggregates the specific iterators for the different spaces as these can
+// only iterate over one space only.
//
-// HeapIterator ensures there is no allocation during its lifetime (using an
-// embedded DisallowHeapAllocation instance).
+// HeapObjectIterator ensures there is no allocation during its lifetime (using
+// an embedded DisallowHeapAllocation instance).
//
-// HeapIterator can skip free list nodes (that is, de-allocated heap objects
-// that still remain in the heap). As implementation of free nodes filtering
-// uses GC marks, it can't be used during MS/MC GC phases. Also, it is forbidden
-// to interrupt iteration in this mode, as this will leave heap objects marked
-// (and thus, unusable).
+// HeapObjectIterator can skip free list nodes (that is, de-allocated heap
+// objects that still remain in the heap). As implementation of free nodes
+// filtering uses GC marks, it can't be used during MS/MC GC phases. Also, it is
+// forbidden to interrupt iteration in this mode, as this will leave heap
+// objects marked (and thus, unusable).
//
-// See ReadOnlyHeapIterator if you need to iterate over read-only space objects,
-// or CombinedHeapIterator if you need to iterate over both heaps.
-class V8_EXPORT_PRIVATE HeapIterator {
+// See ReadOnlyHeapObjectIterator if you need to iterate over read-only space
+// objects, or CombinedHeapObjectIterator if you need to iterate over both
+// heaps.
+class V8_EXPORT_PRIVATE HeapObjectIterator {
public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
- explicit HeapIterator(Heap* heap,
- HeapObjectsFiltering filtering = kNoFiltering);
- ~HeapIterator();
+ explicit HeapObjectIterator(Heap* heap,
+ HeapObjectsFiltering filtering = kNoFiltering);
+ ~HeapObjectIterator();
- HeapObject next();
+ HeapObject Next();
private:
HeapObject NextObject();