aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/heap-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/heap-inl.h')
-rw-r--r--deps/v8/src/heap/heap-inl.h153
1 files changed, 62 insertions, 91 deletions
diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h
index d617d9f9ac..b143a33af5 100644
--- a/deps/v8/src/heap/heap-inl.h
+++ b/deps/v8/src/heap/heap-inl.h
@@ -34,17 +34,12 @@
#include "src/objects/property-cell.h"
#include "src/objects/scope-info.h"
#include "src/objects/script-inl.h"
+#include "src/objects/slots-inl.h"
#include "src/objects/struct-inl.h"
#include "src/profiler/heap-profiler.h"
#include "src/string-hasher.h"
#include "src/zone/zone-list-inl.h"
-// The following header includes the write barrier essentials that can also be
-// used stand-alone without including heap-inl.h.
-// TODO(mlippautz): Remove once users of object-macros.h include this file on
-// their own.
-#include "src/heap/heap-write-barrier-inl.h"
-
namespace v8 {
namespace internal {
@@ -124,6 +119,11 @@ void Heap::SetMessageListeners(TemplateList value) {
roots_table()[RootIndex::kMessageListeners] = value->ptr();
}
+void Heap::SetPendingOptimizeForTestBytecode(Object bytecode) {
+ DCHECK(bytecode->IsBytecodeArray() || bytecode->IsUndefined(isolate()));
+ roots_table()[RootIndex::kPendingOptimizeForTestBytecode] = bytecode->ptr();
+}
+
PagedSpace* Heap::paged_space(int idx) {
DCHECK_NE(idx, LO_SPACE);
DCHECK_NE(idx, NEW_SPACE);
@@ -158,7 +158,7 @@ size_t Heap::NewSpaceAllocationCounter() {
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
}
-AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
+AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
@@ -166,7 +166,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
- return AllocationResult::Retry(space);
+ return AllocationResult::Retry();
}
}
#endif
@@ -178,44 +178,36 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
HeapObject object;
AllocationResult allocation;
- if (NEW_SPACE == space) {
+
+ if (AllocationType::kYoung == type) {
if (large_object) {
- // TODO(hpayer): Implement a LO tenuring strategy.
- space = FLAG_young_generation_large_objects ? NEW_LO_SPACE : LO_SPACE;
+ if (FLAG_young_generation_large_objects) {
+ allocation = new_lo_space_->AllocateRaw(size_in_bytes);
+ } else {
+ // If young generation large objects are disalbed we have to tenure the
+ // allocation and violate the given allocation type. This could be
+ // dangerous. We may want to remove FLAG_young_generation_large_objects
+ // and avoid patching.
+ allocation = lo_space_->AllocateRaw(size_in_bytes);
+ }
} else {
allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
- if (allocation.To(&object)) {
- OnAllocationEvent(object, size_in_bytes);
- }
- return allocation;
}
- }
-
- // Here we only allocate in the old generation.
- if (OLD_SPACE == space) {
+ } else if (AllocationType::kOld == type) {
if (large_object) {
allocation = lo_space_->AllocateRaw(size_in_bytes);
} else {
allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
}
- } else if (CODE_SPACE == space) {
+ } else if (AllocationType::kCode == type) {
if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} else {
allocation = code_lo_space_->AllocateRaw(size_in_bytes);
}
- } else if (LO_SPACE == space) {
- DCHECK(large_object);
- allocation = lo_space_->AllocateRaw(size_in_bytes);
- } else if (NEW_LO_SPACE == space) {
- DCHECK(FLAG_young_generation_large_objects);
- allocation = new_lo_space_->AllocateRaw(size_in_bytes);
- } else if (CODE_LO_SPACE == space) {
- DCHECK(large_object);
- allocation = code_lo_space_->AllocateRaw(size_in_bytes);
- } else if (MAP_SPACE == space) {
+ } else if (AllocationType::kMap == type) {
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
- } else if (RO_SPACE == space) {
+ } else if (AllocationType::kReadOnly == type) {
#ifdef V8_USE_SNAPSHOT
DCHECK(isolate_->serializer_enabled());
#endif
@@ -223,12 +215,11 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else {
- // NEW_SPACE is not allowed here.
UNREACHABLE();
}
if (allocation.To(&object)) {
- if (space == CODE_SPACE) {
+ if (AllocationType::kCode == type) {
// Unprotect the memory chunk of the object if it was not unprotected
// already.
UnprotectAndRegisterMemoryChunk(object);
@@ -300,19 +291,6 @@ void Heap::RegisterExternalString(String string) {
external_string_table_.AddString(string);
}
-void Heap::UpdateExternalString(String string, size_t old_payload,
- size_t new_payload) {
- DCHECK(string->IsExternalString());
- Page* page = Page::FromHeapObject(string);
-
- if (old_payload > new_payload)
- page->DecrementExternalBackingStoreBytes(
- ExternalBackingStoreType::kExternalString, old_payload - new_payload);
- else
- page->IncrementExternalBackingStoreBytes(
- ExternalBackingStoreType::kExternalString, new_payload - old_payload);
-}
-
void Heap::FinalizeExternalString(String string) {
DCHECK(string->IsExternalString());
Page* page = Page::FromHeapObject(string);
@@ -335,67 +313,65 @@ void Heap::FinalizeExternalString(String string) {
Address Heap::NewSpaceTop() { return new_space_->top(); }
-// static
-bool Heap::InNewSpace(Object object) {
+bool Heap::InYoungGeneration(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InNewSpace(HeapObject::cast(object));
+ return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
}
// static
-bool Heap::InNewSpace(MaybeObject object) {
+bool Heap::InYoungGeneration(MaybeObject object) {
HeapObject heap_object;
- return object->GetHeapObject(&heap_object) && InNewSpace(heap_object);
+ return object->GetHeapObject(&heap_object) && InYoungGeneration(heap_object);
}
// static
-bool Heap::InNewSpace(HeapObject heap_object) {
- // Inlined check from NewSpace::Contains.
- bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
+bool Heap::InYoungGeneration(HeapObject heap_object) {
+ bool result = MemoryChunk::FromHeapObject(heap_object)->InYoungGeneration();
#ifdef DEBUG
- // If in NEW_SPACE, then check we're either not in the middle of GC or the
- // object is in to-space.
+ // If in the young generation, then check we're either not in the middle of
+ // GC or the object is in to-space.
if (result) {
- // If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
+ // If the object is in the young generation, then it's not in RO_SPACE so
+ // this is safe.
Heap* heap = Heap::FromWritableHeapObject(heap_object);
- DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
+ DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
}
#endif
return result;
}
// static
-bool Heap::InFromSpace(Object object) {
+bool Heap::InFromPage(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InFromSpace(HeapObject::cast(object));
+ return object->IsHeapObject() && InFromPage(HeapObject::cast(object));
}
// static
-bool Heap::InFromSpace(MaybeObject object) {
+bool Heap::InFromPage(MaybeObject object) {
HeapObject heap_object;
- return object->GetHeapObject(&heap_object) && InFromSpace(heap_object);
+ return object->GetHeapObject(&heap_object) && InFromPage(heap_object);
}
// static
-bool Heap::InFromSpace(HeapObject heap_object) {
- return MemoryChunk::FromHeapObject(heap_object)
- ->IsFlagSet(Page::IN_FROM_SPACE);
+bool Heap::InFromPage(HeapObject heap_object) {
+ return MemoryChunk::FromHeapObject(heap_object)->IsFromPage();
}
// static
-bool Heap::InToSpace(Object object) {
+bool Heap::InToPage(Object object) {
DCHECK(!HasWeakHeapObjectTag(object));
- return object->IsHeapObject() && InToSpace(HeapObject::cast(object));
+ return object->IsHeapObject() && InToPage(HeapObject::cast(object));
}
// static
-bool Heap::InToSpace(MaybeObject object) {
+bool Heap::InToPage(MaybeObject object) {
HeapObject heap_object;
- return object->GetHeapObject(&heap_object) && InToSpace(heap_object);
+ return object->GetHeapObject(&heap_object) && InToPage(heap_object);
}
// static
-bool Heap::InToSpace(HeapObject heap_object) {
- return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
+bool Heap::InToPage(HeapObject heap_object) {
+ return MemoryChunk::FromHeapObject(heap_object)->IsToPage();
}
bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
@@ -426,8 +402,7 @@ bool Heap::ShouldBePromoted(Address old_address) {
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
DCHECK(IsAligned(byte_size, kTaggedSize));
- STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
- CopyWords(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
+ CopyTagged(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
}
template <Heap::FindMementoMode mode>
@@ -495,11 +470,13 @@ AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
void Heap::UpdateAllocationSite(Map map, HeapObject object,
PretenuringFeedbackMap* pretenuring_feedback) {
DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
- DCHECK(InFromSpace(object) ||
- (InToSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
- Page::PAGE_NEW_NEW_PROMOTION)) ||
- (!InNewSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
- Page::PAGE_NEW_OLD_PROMOTION)));
+#ifdef DEBUG
+ MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
+ DCHECK_IMPLIES(chunk->IsToPage(),
+ chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION));
+ DCHECK_IMPLIES(!chunk->InYoungGeneration(),
+ chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION));
+#endif
if (!FLAG_allocation_site_pretenuring ||
!AllocationSite::CanTrack(map->instance_type())) {
return;
@@ -519,10 +496,10 @@ void Heap::ExternalStringTable::AddString(String string) {
DCHECK(string->IsExternalString());
DCHECK(!Contains(string));
- if (InNewSpace(string)) {
- new_space_strings_.push_back(string);
+ if (InYoungGeneration(string)) {
+ young_strings_.push_back(string);
} else {
- old_space_strings_.push_back(string);
+ old_strings_.push_back(string);
}
}
@@ -531,14 +508,6 @@ Oddball Heap::ToBoolean(bool condition) {
return condition ? roots.true_value() : roots.false_value();
}
-uint64_t Heap::HashSeed() {
- uint64_t seed;
- ReadOnlyRoots(this).hash_seed()->copy_out(0, reinterpret_cast<byte*>(&seed),
- kInt64Size);
- DCHECK(FLAG_randomize_hashes || seed == 0);
- return seed;
-}
-
int Heap::NextScriptId() {
int last_id = last_script_id()->value();
if (last_id == Smi::kMaxValue) last_id = v8::UnboundScript::kNoScriptId;
@@ -588,11 +557,13 @@ void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
base::CheckedDecrement(&backing_store_bytes_, amount);
}
-AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
- : heap_(isolate->heap()) {
+AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) {
heap_->always_allocate_scope_count_++;
}
+AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
+ : AlwaysAllocateScope(isolate->heap()) {}
+
AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_count_--;
}