summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/heap-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/heap-inl.h')
-rw-r--r--deps/v8/src/heap/heap-inl.h172
1 files changed, 42 insertions, 130 deletions
diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h
index aecdd40988..cff69b1e17 100644
--- a/deps/v8/src/heap/heap-inl.h
+++ b/deps/v8/src/heap/heap-inl.h
@@ -14,12 +14,11 @@
#include "src/heap/spaces-inl.h"
#include "src/heap/store-buffer.h"
#include "src/heap/store-buffer-inl.h"
-#include "src/heap-profiler.h"
#include "src/isolate.h"
#include "src/list-inl.h"
#include "src/log.h"
#include "src/msan.h"
-#include "src/objects.h"
+#include "src/objects-inl.h"
namespace v8 {
namespace internal {
@@ -66,7 +65,7 @@ INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
-#define SYMBOL_ACCESSOR(name, varname, description) \
+#define SYMBOL_ACCESSOR(name, description) \
Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
@@ -123,12 +122,11 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
// Compute map and object size.
Map* map = one_byte_internalized_string_map();
int size = SeqOneByteString::SizeFor(str.length());
- AllocationSpace space = SelectSpace(size, TENURED);
// Allocate string.
- HeapObject* result;
+ HeapObject* result = nullptr;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
@@ -155,12 +153,11 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
// Compute map and object size.
Map* map = internalized_string_map();
int size = SeqTwoByteString::SizeFor(str.length());
- AllocationSpace space = SelectSpace(size, TENURED);
// Allocate string.
- HeapObject* result;
+ HeapObject* result = nullptr;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
@@ -192,13 +189,12 @@ AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
- AllocationSpace retry_space,
AllocationAlignment alignment) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(gc_state_ == NOT_IN_GC);
#ifdef DEBUG
- if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
+ if (FLAG_gc_interval >= 0 && !always_allocate() &&
Heap::allocation_timeout_-- <= 0) {
return AllocationResult::Retry(space);
}
@@ -206,13 +202,14 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
isolate_->counters()->objs_since_last_young()->Increment();
#endif
- HeapObject* object;
+ bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize;
+ HeapObject* object = nullptr;
AllocationResult allocation;
if (NEW_SPACE == space) {
- allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
- if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
- space = retry_space;
+ if (large_object) {
+ space = LO_SPACE;
} else {
+ allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
if (allocation.To(&object)) {
OnAllocationEvent(object, size_in_bytes);
}
@@ -220,20 +217,27 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
}
}
+ // Here we only allocate in the old generation.
if (OLD_SPACE == space) {
- allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
+ if (large_object) {
+ allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
+ } else {
+ allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
+ }
} else if (CODE_SPACE == space) {
if (size_in_bytes <= code_space()->AreaSize()) {
allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
} else {
- // Large code objects are allocated in large object space.
allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
}
} else if (LO_SPACE == space) {
+ DCHECK(large_object);
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
- } else {
- DCHECK(MAP_SPACE == space);
+ } else if (MAP_SPACE == space) {
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
+ } else {
+ // NEW_SPACE is not allowed here.
+ UNREACHABLE();
}
if (allocation.To(&object)) {
OnAllocationEvent(object, size_in_bytes);
@@ -323,9 +327,8 @@ void Heap::UpdateAllocationsHash(uint32_t value) {
}
-void Heap::PrintAlloctionsHash() {
- uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
- PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
+void Heap::RegisterExternalString(String* string) {
+ external_string_table_.AddString(string);
}
@@ -461,9 +464,6 @@ void Heap::MoveBlock(Address dst, Address src, int byte_size) {
}
-void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
-
-
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
// Check if there is potentially a memento behind the object. If
// the last word of the memento is on another page we return
@@ -521,33 +521,6 @@ void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
}
-void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
- DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
-
- // We use the first word (where the map pointer usually is) of a heap
- // object to record the forwarding pointer. A forwarding pointer can
- // point to an old space, the code space, or the to space of the new
- // generation.
- MapWord first_word = object->map_word();
-
- // If the first word is a forwarding address, the object has already been
- // copied.
- if (first_word.IsForwardingAddress()) {
- HeapObject* dest = first_word.ToForwardingAddress();
- DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
- *p = dest;
- return;
- }
-
- UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
-
- // AllocationMementos are unrooted and shouldn't survive a scavenge
- DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
- // Call the slow part of scavenge object.
- return ScavengeObjectSlow(p, object);
-}
-
-
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
const v8::GCCallbackFlags callbackFlags) {
const char* collector_reason = NULL;
@@ -563,58 +536,7 @@ Isolate* Heap::isolate() {
}
-// Calls the FUNCTION_CALL function and retries it up to three times
-// to guarantee that any allocations performed during the call will
-// succeed if there's enough memory.
-
-// Warning: Do not use the identifiers __object__, __maybe_object__ or
-// __scope__ in a call to this macro.
-
-#define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- if (__allocation__.To(&__object__)) { \
- DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
- RETURN_VALUE; \
- }
-
-#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
- do { \
- AllocationResult __allocation__ = FUNCTION_CALL; \
- Object* __object__ = NULL; \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- /* Two GCs before panicking. In newspace will almost always succeed. */ \
- for (int __i__ = 0; __i__ < 2; __i__++) { \
- (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
- "allocation failure"); \
- __allocation__ = FUNCTION_CALL; \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- } \
- (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
- (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
- { \
- AlwaysAllocateScope __scope__(ISOLATE); \
- __allocation__ = FUNCTION_CALL; \
- } \
- RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
- /* TODO(1181417): Fix this. */ \
- v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
- RETURN_EMPTY; \
- } while (false)
-
-#define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
- RETURN_EMPTY) \
- CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
-
-#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
- CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \
- return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
- return Handle<TYPE>())
-
-
-#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
- CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
-
-
-void ExternalStringTable::AddString(String* string) {
+void Heap::ExternalStringTable::AddString(String* string) {
DCHECK(string->IsExternalString());
if (heap_->InNewSpace(string)) {
new_space_strings_.Add(string);
@@ -624,7 +546,7 @@ void ExternalStringTable::AddString(String* string) {
}
-void ExternalStringTable::Iterate(ObjectVisitor* v) {
+void Heap::ExternalStringTable::Iterate(ObjectVisitor* v) {
if (!new_space_strings_.is_empty()) {
Object** start = &new_space_strings_[0];
v->VisitPointers(start, start + new_space_strings_.length());
@@ -638,7 +560,7 @@ void ExternalStringTable::Iterate(ObjectVisitor* v) {
// Verify() is inline to avoid ifdef-s around its calls in release
// mode.
-void ExternalStringTable::Verify() {
+void Heap::ExternalStringTable::Verify() {
#ifdef DEBUG
for (int i = 0; i < new_space_strings_.length(); ++i) {
Object* obj = Object::cast(new_space_strings_[i]);
@@ -654,14 +576,14 @@ void ExternalStringTable::Verify() {
}
-void ExternalStringTable::AddOldString(String* string) {
+void Heap::ExternalStringTable::AddOldString(String* string) {
DCHECK(string->IsExternalString());
DCHECK(!heap_->InNewSpace(string));
old_space_strings_.Add(string);
}
-void ExternalStringTable::ShrinkNewStrings(int position) {
+void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
new_space_strings_.Rewind(position);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
@@ -715,12 +637,15 @@ uint32_t Heap::HashSeed() {
}
-Smi* Heap::NextScriptId() {
- int next_id = last_script_id()->value() + 1;
- if (!Smi::IsValid(next_id) || next_id < 0) next_id = 1;
- Smi* next_id_smi = Smi::FromInt(next_id);
- set_last_script_id(next_id_smi);
- return next_id_smi;
+int Heap::NextScriptId() {
+ int last_id = last_script_id()->value();
+ if (last_id == Smi::kMaxValue) {
+ last_id = 1;
+ } else {
+ last_id++;
+ }
+ set_last_script_id(Smi::FromInt(last_id));
+ return last_id;
}
@@ -749,26 +674,13 @@ void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
- : heap_(isolate->heap()), daf_(isolate) {
- heap_->always_allocate_scope_depth_++;
+ : heap_(isolate->heap()) {
+ heap_->always_allocate_scope_count_.Increment(1);
}
AlwaysAllocateScope::~AlwaysAllocateScope() {
- heap_->always_allocate_scope_depth_--;
-}
-
-
-GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
- heap_->gc_callbacks_depth_++;
-}
-
-
-GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
-
-
-bool GCCallbacksScope::CheckReenter() {
- return heap_->gc_callbacks_depth_ == 1;
+ heap_->always_allocate_scope_count_.Increment(-1);
}