summaryrefslogtreecommitdiff
path: root/deps/v8/test/cctest/heap
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/cctest/heap')
-rw-r--r--deps/v8/test/cctest/heap/heap-utils.cc17
-rw-r--r--deps/v8/test/cctest/heap/heap-utils.h12
-rw-r--r--deps/v8/test/cctest/heap/test-alloc.cc18
-rw-r--r--deps/v8/test/cctest/heap/test-array-buffer-tracker.cc3
-rw-r--r--deps/v8/test/cctest/heap/test-compaction.cc20
-rw-r--r--deps/v8/test/cctest/heap/test-embedder-tracing.cc348
-rw-r--r--deps/v8/test/cctest/heap/test-external-string-tracker.cc5
-rw-r--r--deps/v8/test/cctest/heap/test-heap.cc405
-rw-r--r--deps/v8/test/cctest/heap/test-incremental-marking.cc6
-rw-r--r--deps/v8/test/cctest/heap/test-invalidated-slots.cc21
-rw-r--r--deps/v8/test/cctest/heap/test-mark-compact.cc15
-rw-r--r--deps/v8/test/cctest/heap/test-page-promotion.cc5
-rw-r--r--deps/v8/test/cctest/heap/test-spaces.cc172
-rw-r--r--deps/v8/test/cctest/heap/test-weak-references.cc35
14 files changed, 787 insertions, 295 deletions
diff --git a/deps/v8/test/cctest/heap/heap-utils.cc b/deps/v8/test/cctest/heap/heap-utils.cc
index 084bf6ef1b..0e437ed9d8 100644
--- a/deps/v8/test/cctest/heap/heap-utils.cc
+++ b/deps/v8/test/cctest/heap/heap-utils.cc
@@ -15,6 +15,10 @@ namespace v8 {
namespace internal {
namespace heap {
+void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
+
+void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
+
void SealCurrentObjects(Heap* heap) {
CcTest::CollectAllGarbage();
CcTest::CollectAllGarbage();
@@ -26,7 +30,8 @@ void SealCurrentObjects(Heap* heap) {
}
int FixedArrayLenFromSize(int size) {
- return (size - FixedArray::kHeaderSize) / kTaggedSize;
+ return Min((size - FixedArray::kHeaderSize) / kTaggedSize,
+ FixedArray::kMaxRegularLength);
}
std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
@@ -101,9 +106,10 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
}
}
handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
- CHECK((tenure == NOT_TENURED && Heap::InNewSpace(*handles.back())) ||
+ CHECK((tenure == NOT_TENURED &&
+ heap->new_space()->Contains(*handles.back())) ||
(tenure == TENURED && heap->InOldSpace(*handles.back())));
- free_memory -= allocate_memory;
+ free_memory -= handles.back()->Size();
}
return handles;
}
@@ -148,6 +154,7 @@ void SimulateFullSpace(v8::internal::NewSpace* space,
}
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
+ const double kStepSizeInMs = 100;
CHECK(FLAG_incremental_marking);
i::IncrementalMarking* marking = heap->incremental_marking();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
@@ -166,8 +173,8 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
if (!force_completion) return;
while (!marking->IsComplete()) {
- marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- i::StepOrigin::kV8);
+ marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ i::StepOrigin::kV8);
if (marking->IsReadyToOverApproximateWeakClosure()) {
marking->FinalizeIncrementally();
}
diff --git a/deps/v8/test/cctest/heap/heap-utils.h b/deps/v8/test/cctest/heap/heap-utils.h
index a494f54210..30a18b5895 100644
--- a/deps/v8/test/cctest/heap/heap-utils.h
+++ b/deps/v8/test/cctest/heap/heap-utils.h
@@ -5,6 +5,7 @@
#ifndef HEAP_HEAP_UTILS_H_
#define HEAP_HEAP_UTILS_H_
+#include "src/api-inl.h"
#include "src/heap/heap.h"
namespace v8 {
@@ -52,6 +53,17 @@ void GcAndSweep(Heap* heap, AllocationSpace space);
void ForceEvacuationCandidate(Page* page);
+void InvokeScavenge();
+
+void InvokeMarkSweep();
+
+template <typename GlobalOrPersistent>
+bool InYoungGeneration(v8::Isolate* isolate, const GlobalOrPersistent& global) {
+ v8::HandleScope scope(isolate);
+ auto tmp = global.Get(isolate);
+ return i::Heap::InYoungGeneration(*v8::Utils::OpenHandle(*tmp));
+}
+
} // namespace heap
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/cctest/heap/test-alloc.cc b/deps/v8/test/cctest/heap/test-alloc.cc
index 89bb25b56c..cf01d9fe9b 100644
--- a/deps/v8/test/cctest/heap/test-alloc.cc
+++ b/deps/v8/test/cctest/heap/test-alloc.cc
@@ -30,6 +30,7 @@
#include "src/accessors.h"
#include "src/api-inl.h"
+#include "src/heap/heap-inl.h"
#include "src/objects-inl.h"
#include "src/objects/api-callbacks.h"
#include "src/property.h"
@@ -48,15 +49,16 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
AlwaysAllocateScope scope(CcTest::i_isolate());
Heap* heap = CcTest::heap();
int size = FixedArray::SizeFor(100);
- // New space.
- HeapObject obj = heap->AllocateRaw(size, NEW_SPACE).ToObjectChecked();
+ // Young generation.
+ HeapObject obj =
+ heap->AllocateRaw(size, AllocationType::kYoung).ToObjectChecked();
// In order to pass heap verification on Isolate teardown, mark the
// allocated area as a filler.
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
- // Old space.
+ // Old generation.
heap::SimulateFullSpace(heap->old_space());
- obj = heap->AllocateRaw(size, OLD_SPACE).ToObjectChecked();
+ obj = heap->AllocateRaw(size, AllocationType::kOld).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
// Large object space.
@@ -67,24 +69,24 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
CHECK_GT(kLargeObjectSpaceFillerSize,
static_cast<size_t>(heap->old_space()->AreaSize()));
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
- obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, OLD_SPACE)
+ obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
}
- obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, OLD_SPACE)
+ obj = heap->AllocateRaw(kLargeObjectSpaceFillerSize, AllocationType::kOld)
.ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
// Map space.
heap::SimulateFullSpace(heap->map_space());
- obj = heap->AllocateRaw(Map::kSize, MAP_SPACE).ToObjectChecked();
+ obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), Map::kSize,
ClearRecordedSlots::kNo);
// Code space.
heap::SimulateFullSpace(heap->code_space());
size = CcTest::i_isolate()->builtins()->builtin(Builtins::kIllegal)->Size();
- obj = heap->AllocateRaw(size, CODE_SPACE).ToObjectChecked();
+ obj = heap->AllocateRaw(size, AllocationType::kCode).ToObjectChecked();
heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
return CcTest::i_isolate()->factory()->true_value();
}
diff --git a/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc b/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc
index daeccca777..9cc2d84ec9 100644
--- a/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc
+++ b/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc
@@ -308,10 +308,11 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
if (FLAG_optimize_for_size) return;
+ ManualGCScope manual_gc_scope;
// Test allocates JSArrayBuffer on different pages before triggering a
// full GC that performs the semispace copy. If parallelized, this test
// ensures proper synchronization in TSAN configurations.
- FLAG_min_semi_space_size = 2 * Page::kPageSize / MB;
+ FLAG_min_semi_space_size = Max(2 * Page::kPageSize / MB, 1);
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
diff --git a/deps/v8/test/cctest/heap/test-compaction.cc b/deps/v8/test/cctest/heap/test-compaction.cc
index 9fb989482c..eea4a6eb43 100644
--- a/deps/v8/test/cctest/heap/test-compaction.cc
+++ b/deps/v8/test/cctest/heap/test-compaction.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "src/heap/factory.h"
+#include "src/heap/heap-inl.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
@@ -96,8 +97,9 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
const int objects_per_page = 10;
const int object_size =
- static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
- objects_per_page;
+ Min(kMaxRegularHeapObjectSize,
+ static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
+ objects_per_page);
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@@ -175,8 +177,9 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
const int objects_per_page = 10;
const int object_size =
- static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
- objects_per_page;
+ Min(kMaxRegularHeapObjectSize,
+ static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
+ objects_per_page);
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@@ -267,8 +270,9 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
const int objects_per_page = 10;
const int object_size =
- static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
- objects_per_page;
+ Min(kMaxRegularHeapObjectSize,
+ static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) /
+ objects_per_page);
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
@@ -302,7 +306,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
root_array->set(0, *compaction_page_handles.back());
Handle<FixedArray> new_space_array =
isolate->factory()->NewFixedArray(1, NOT_TENURED);
- CHECK(Heap::InNewSpace(*new_space_array));
+ CHECK(Heap::InYoungGeneration(*new_space_array));
compaction_page_handles.front()->set(1, *new_space_array);
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
}
@@ -329,7 +333,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
while (current->get(0) != ReadOnlyRoots(heap).undefined_value()) {
current =
Handle<FixedArray>(FixedArray::cast(current->get(0)), isolate);
- CHECK(!Heap::InNewSpace(*current));
+ CHECK(!Heap::InYoungGeneration(*current));
CHECK(current->IsFixedArray());
if (Page::FromHeapObject(*current) != to_be_aborted_page) {
in_place = false;
diff --git a/deps/v8/test/cctest/heap/test-embedder-tracing.cc b/deps/v8/test/cctest/heap/test-embedder-tracing.cc
index 5134392886..ace016dbd0 100644
--- a/deps/v8/test/cctest/heap/test-embedder-tracing.cc
+++ b/deps/v8/test/cctest/heap/test-embedder-tracing.cc
@@ -2,13 +2,18 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <unordered_map>
+#include <vector>
+
#include "include/v8.h"
#include "src/api-inl.h"
+#include "src/heap/heap-inl.h"
#include "src/objects-inl.h"
#include "src/objects/module.h"
#include "src/objects/script.h"
#include "src/objects/shared-function-info.h"
#include "test/cctest/cctest.h"
+#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
@@ -35,9 +40,14 @@ v8::Local<v8::Object> ConstructTraceableJSApiObject(
return scope.Escape(instance);
}
+enum class TracePrologueBehavior { kNoop, kCallV8WriteBarrier };
+
class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
public:
- explicit TestEmbedderHeapTracer(v8::Isolate* isolate) : isolate_(isolate) {}
+ TestEmbedderHeapTracer() = default;
+ TestEmbedderHeapTracer(TracePrologueBehavior prologue_behavior,
+ v8::Global<v8::Array> array)
+ : prologue_behavior_(prologue_behavior), array_(std::move(array)) {}
void RegisterV8References(
const std::vector<std::pair<void*, void*>>& embedder_fields) final {
@@ -45,13 +55,13 @@ class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
embedder_fields.begin(), embedder_fields.end());
}
- void AddReferenceForTracing(v8::Persistent<v8::Object>* persistent) {
- to_register_with_v8_.push_back(persistent);
+ void AddReferenceForTracing(v8::TracedGlobal<v8::Object>* global) {
+ to_register_with_v8_.push_back(global);
}
bool AdvanceTracing(double deadline_in_ms) final {
- for (auto persistent : to_register_with_v8_) {
- persistent->RegisterExternalReference(isolate_);
+ for (auto global : to_register_with_v8_) {
+ RegisterEmbedderReference(global->As<v8::Value>());
}
to_register_with_v8_.clear();
return true;
@@ -59,9 +69,15 @@ class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
bool IsTracingDone() final { return to_register_with_v8_.empty(); }
- void TracePrologue() final {}
+ void TracePrologue() final {
+ if (prologue_behavior_ == TracePrologueBehavior::kCallV8WriteBarrier) {
+ auto local = array_.Get(isolate());
+ local->Set(local->CreationContext(), 0, v8::Object::New(isolate()))
+ .Check();
+ }
+ }
+
void TraceEpilogue() final {}
- void AbortTracing() final {}
void EnterFinalPause(EmbedderStackState) final {}
bool IsRegisteredFromV8(void* first_field) const {
@@ -71,10 +87,20 @@ class TestEmbedderHeapTracer final : public v8::EmbedderHeapTracer {
return false;
}
+ void ConsiderTracedGlobalAsRoot(bool value) {
+ consider_traced_global_as_root_ = value;
+ }
+
+ bool IsRootForNonTracingGC(const v8::TracedGlobal<v8::Value>& handle) final {
+ return consider_traced_global_as_root_;
+ }
+
private:
- v8::Isolate* const isolate_;
std::vector<std::pair<void*, void*>> registered_from_v8_;
- std::vector<v8::Persistent<v8::Object>*> to_register_with_v8_;
+ std::vector<v8::TracedGlobal<v8::Object>*> to_register_with_v8_;
+ bool consider_traced_global_as_root_ = true;
+ TracePrologueBehavior prologue_behavior_ = TracePrologueBehavior::kNoop;
+ v8::Global<v8::Array> array_;
};
class TemporaryEmbedderHeapTracerScope {
@@ -101,7 +127,7 @@ TEST(V8RegisteringEmbedderReference) {
ManualGCScope manual_gc;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
@@ -121,19 +147,18 @@ TEST(EmbedderRegisteringV8Reference) {
ManualGCScope manual_gc;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
v8::Context::Scope context_scope(context);
- v8::Persistent<v8::Object> g;
+ v8::TracedGlobal<v8::Object> g;
{
v8::HandleScope inner_scope(isolate);
v8::Local<v8::Object> o =
v8::Local<v8::Object>::New(isolate, v8::Object::New(isolate));
g.Reset(isolate, o);
- g.SetWeak();
}
tracer.AddReferenceForTracing(&g);
CcTest::CollectGarbage(i::OLD_SPACE);
@@ -155,7 +180,7 @@ TEST(TracingInRevivedSubgraph) {
ManualGCScope manual_gc;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
@@ -183,7 +208,7 @@ TEST(TracingInEphemerons) {
ManualGCScope manual_gc;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
@@ -214,7 +239,7 @@ TEST(FinalizeTracingIsNoopWhenNotMarking) {
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
Isolate* i_isolate = CcTest::i_isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
// Finalize a potentially running garbage collection.
@@ -233,7 +258,7 @@ TEST(FinalizeTracingWhenMarking) {
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
Isolate* i_isolate = CcTest::i_isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
// Finalize a potentially running garbage collection.
@@ -258,7 +283,7 @@ TEST(GarbageCollectionForTesting) {
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
Isolate* i_isolate = CcTest::i_isolate();
- TestEmbedderHeapTracer tracer(isolate);
+ TestEmbedderHeapTracer tracer;
TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
int saved_gc_counter = i_isolate->heap()->gc_count();
@@ -266,6 +291,293 @@ TEST(GarbageCollectionForTesting) {
CHECK_GT(i_isolate->heap()->gc_count(), saved_gc_counter);
}
+namespace {
+
+void ConstructJSObject(v8::Isolate* isolate, v8::Local<v8::Context> context,
+ v8::TracedGlobal<v8::Object>* global) {
+ v8::HandleScope scope(isolate);
+ v8::Local<v8::Object> object(v8::Object::New(isolate));
+ CHECK(!object.IsEmpty());
+ *global = v8::TracedGlobal<v8::Object>(isolate, object);
+ CHECK(!global->IsEmpty());
+}
+
+void ConstructJSApiObject(v8::Isolate* isolate, v8::Local<v8::Context> context,
+ v8::TracedGlobal<v8::Object>* global) {
+ v8::HandleScope scope(isolate);
+ v8::Local<v8::Object> object(
+ ConstructTraceableJSApiObject(context, nullptr, nullptr));
+ CHECK(!object.IsEmpty());
+ *global = v8::TracedGlobal<v8::Object>(isolate, object);
+ CHECK(!global->IsEmpty());
+}
+
+enum class SurvivalMode { kSurvives, kDies };
+
+template <typename ModifierFunction, typename ConstructTracedGlobalFunction>
+void TracedGlobalTest(v8::Isolate* isolate,
+ ConstructTracedGlobalFunction construct_function,
+ ModifierFunction modifier_function, void (*gc_function)(),
+ SurvivalMode survives) {
+ v8::HandleScope scope(isolate);
+ v8::Local<v8::Context> context = v8::Context::New(isolate);
+ v8::Context::Scope context_scope(context);
+
+ v8::TracedGlobal<v8::Object> global;
+ construct_function(isolate, context, &global);
+ CHECK(InYoungGeneration(isolate, global));
+ modifier_function(global);
+ gc_function();
+ CHECK_IMPLIES(survives == SurvivalMode::kSurvives, !global.IsEmpty());
+ CHECK_IMPLIES(survives == SurvivalMode::kDies, global.IsEmpty());
+}
+
+} // namespace
+
+TEST(TracedGlobalReset) {
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+
+ v8::TracedGlobal<v8::Object> traced;
+ ConstructJSObject(isolate, isolate->GetCurrentContext(), &traced);
+ CHECK(!traced.IsEmpty());
+ traced.Reset();
+ CHECK(traced.IsEmpty());
+}
+
+TEST(TracedGlobalInStdVector) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+
+ std::vector<v8::TracedGlobal<v8::Object>> vec;
+ {
+ v8::HandleScope scope(isolate);
+ vec.emplace_back(isolate, v8::Object::New(isolate));
+ }
+ CHECK(!vec[0].IsEmpty());
+ InvokeMarkSweep();
+ CHECK(vec[0].IsEmpty());
+}
+
+TEST(TracedGlobalInStdUnorderedMap) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+
+ std::unordered_map<int, v8::TracedGlobal<v8::Object>> map;
+ {
+ v8::HandleScope scope(isolate);
+ map.emplace(std::piecewise_construct, std::forward_as_tuple(1),
+ std::forward_as_tuple(isolate, v8::Object::New(isolate)));
+ }
+ CHECK(!map[1].IsEmpty());
+ InvokeMarkSweep();
+ CHECK(map[1].IsEmpty());
+}
+
+TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) {
+ CcTest::InitializeVM();
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSObject,
+ [](const TracedGlobal<v8::Object>& global) {}, InvokeMarkSweep,
+ SurvivalMode::kDies);
+}
+
+TEST(TracedGlobalToUnmodifiedJSObjectSurvivesMarkSweepWhenHeldAliveOtherwise) {
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::Global<v8::Object> strong_global;
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSObject,
+ [isolate, &strong_global](const TracedGlobal<v8::Object>& global) {
+ v8::HandleScope scope(isolate);
+ strong_global = v8::Global<v8::Object>(isolate, global.Get(isolate));
+ },
+ InvokeMarkSweep, SurvivalMode::kSurvives);
+}
+
+TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavenge) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSObject,
+ [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
+ SurvivalMode::kSurvives);
+}
+
+TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavengeWhenExcludedFromRoots) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+ tracer.ConsiderTracedGlobalAsRoot(false);
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSObject,
+ [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
+ SurvivalMode::kSurvives);
+}
+
+TEST(TracedGlobalToUnmodifiedJSApiObjectSurvivesScavengePerDefault) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+ tracer.ConsiderTracedGlobalAsRoot(true);
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSApiObject,
+ [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
+ SurvivalMode::kSurvives);
+}
+
+TEST(TracedGlobalToUnmodifiedJSApiObjectDiesOnScavengeWhenExcludedFromRoots) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+ tracer.ConsiderTracedGlobalAsRoot(false);
+ TracedGlobalTest(
+ CcTest::isolate(), ConstructJSApiObject,
+ [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
+ SurvivalMode::kDies);
+}
+
+TEST(TracedGlobalWrapperClassId) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+
+ v8::TracedGlobal<v8::Object> traced;
+ ConstructJSObject(isolate, isolate->GetCurrentContext(), &traced);
+ CHECK_EQ(0, traced.WrapperClassId());
+ traced.SetWrapperClassId(17);
+ CHECK_EQ(17, traced.WrapperClassId());
+}
+
+namespace {
+
+class TracedGlobalVisitor final
+ : public v8::EmbedderHeapTracer::TracedGlobalHandleVisitor {
+ public:
+ ~TracedGlobalVisitor() override = default;
+ void VisitTracedGlobalHandle(const TracedGlobal<Value>& value) final {
+ if (value.WrapperClassId() == 57) {
+ count_++;
+ }
+ }
+
+ size_t count() const { return count_; }
+
+ private:
+ size_t count_ = 0;
+};
+
+} // namespace
+
+TEST(TracedGlobalIteration) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+
+ v8::TracedGlobal<v8::Object> traced;
+ ConstructJSObject(isolate, isolate->GetCurrentContext(), &traced);
+ CHECK(!traced.IsEmpty());
+ traced.SetWrapperClassId(57);
+ TracedGlobalVisitor visitor;
+ {
+ v8::HandleScope scope(isolate);
+ tracer.IterateTracedGlobalHandles(&visitor);
+ }
+ CHECK_EQ(1, visitor.count());
+}
+
+namespace {
+
+void FinalizationCallback(const WeakCallbackInfo<void>& data) {
+ v8::TracedGlobal<v8::Object>* traced =
+ reinterpret_cast<v8::TracedGlobal<v8::Object>*>(data.GetParameter());
+ CHECK_EQ(reinterpret_cast<void*>(0x4), data.GetInternalField(0));
+ CHECK_EQ(reinterpret_cast<void*>(0x8), data.GetInternalField(1));
+ traced->Reset();
+}
+
+} // namespace
+
+TEST(TracedGlobalSetFinalizationCallbackScavenge) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ TestEmbedderHeapTracer tracer;
+ tracer.ConsiderTracedGlobalAsRoot(false);
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+
+ v8::TracedGlobal<v8::Object> traced;
+ ConstructJSApiObject(isolate, isolate->GetCurrentContext(), &traced);
+ CHECK(!traced.IsEmpty());
+ {
+ v8::HandleScope scope(isolate);
+ auto local = traced.Get(isolate);
+ local->SetAlignedPointerInInternalField(0, reinterpret_cast<void*>(0x4));
+ local->SetAlignedPointerInInternalField(1, reinterpret_cast<void*>(0x8));
+ }
+ traced.SetFinalizationCallback(&traced, FinalizationCallback);
+ heap::InvokeScavenge();
+ CHECK(traced.IsEmpty());
+}
+
+TEST(TracedGlobalSetFinalizationCallbackMarkSweep) {
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ TestEmbedderHeapTracer tracer;
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+
+ v8::TracedGlobal<v8::Object> traced;
+ ConstructJSApiObject(isolate, isolate->GetCurrentContext(), &traced);
+ CHECK(!traced.IsEmpty());
+ {
+ v8::HandleScope scope(isolate);
+ auto local = traced.Get(isolate);
+ local->SetAlignedPointerInInternalField(0, reinterpret_cast<void*>(0x4));
+ local->SetAlignedPointerInInternalField(1, reinterpret_cast<void*>(0x8));
+ }
+ traced.SetFinalizationCallback(&traced, FinalizationCallback);
+ heap::InvokeMarkSweep();
+ CHECK(traced.IsEmpty());
+}
+
+TEST(TracePrologueCallingIntoV8WriteBarrier) {
+ // Regression test: https://crbug.com/940003
+ ManualGCScope manual_gc;
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope scope(isolate);
+ v8::Global<v8::Array> global;
+ {
+ v8::HandleScope scope(isolate);
+ auto local = v8::Array::New(isolate, 10);
+ global.Reset(isolate, local);
+ }
+ TestEmbedderHeapTracer tracer(TracePrologueBehavior::kCallV8WriteBarrier,
+ std::move(global));
+ TemporaryEmbedderHeapTracerScope tracer_scope(isolate, &tracer);
+ SimulateIncrementalMarking(CcTest::i_isolate()->heap());
+}
+
} // namespace heap
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/cctest/heap/test-external-string-tracker.cc b/deps/v8/test/cctest/heap/test-external-string-tracker.cc
index 36a9391307..bcc00764de 100644
--- a/deps/v8/test/cctest/heap/test-external-string-tracker.cc
+++ b/deps/v8/test/cctest/heap/test-external-string-tracker.cc
@@ -4,6 +4,7 @@
#include "src/api-inl.h"
#include "src/api.h"
+#include "src/heap/heap-inl.h"
#include "src/heap/spaces.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
@@ -193,7 +194,7 @@ TEST(ExternalString_PromotedThinString) {
i::Handle<i::String> isymbol1 = factory->InternalizeString(string1);
CHECK(isymbol1->IsInternalizedString());
CHECK(string1->IsExternalString());
- CHECK(!heap->InNewSpace(*isymbol1));
+ CHECK(!heap->InYoungGeneration(*isymbol1));
// New external string in the young space. This string has the same content
// as the previous one (that was already internalized).
@@ -209,7 +210,7 @@ TEST(ExternalString_PromotedThinString) {
i::Handle<i::String> isymbol2 = factory->InternalizeString(istring);
CHECK(isymbol2->IsInternalizedString());
CHECK(istring->IsThinString());
- CHECK(heap->InNewSpace(*istring));
+ CHECK(heap->InYoungGeneration(*istring));
// Collect thin string. References to the thin string will be updated to
// point to the actual external string in the old space.
diff --git a/deps/v8/test/cctest/heap/test-heap.cc b/deps/v8/test/cctest/heap/test-heap.cc
index c7c1d93f87..f12ba10979 100644
--- a/deps/v8/test/cctest/heap/test-heap.cc
+++ b/deps/v8/test/cctest/heap/test-heap.cc
@@ -37,8 +37,10 @@
#include "src/execution.h"
#include "src/field-type.h"
#include "src/global-handles.h"
+#include "src/hash-seed-inl.h"
#include "src/heap/factory.h"
#include "src/heap/gc-tracer.h"
+#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
#include "src/heap/memory-reducer.h"
@@ -46,6 +48,7 @@
#include "src/ic/ic.h"
#include "src/macro-assembler-inl.h"
#include "src/objects-inl.h"
+#include "src/objects/frame-array-inl.h"
#include "src/objects/heap-number-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-collection-inl.h"
@@ -188,7 +191,7 @@ HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
HandleScope sc(isolate);
Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
- CHECK(Heap::InNewSpace(*value));
+ CHECK(Heap::InYoungGeneration(*value));
i::byte buffer[i::Assembler::kMinimalBufferSize];
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
@@ -368,16 +371,11 @@ TEST(GarbageCollection) {
HandleScope inner_scope(isolate);
// Allocate a function and keep it in global object's property.
Handle<JSFunction> function = factory->NewFunctionForTest(name);
- Object::SetProperty(isolate, global, name, function, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, global, name, function).Check();
// Allocate an object. Unrooted after leaving the scope.
Handle<JSObject> obj = factory->NewJSObject(function);
- Object::SetProperty(isolate, obj, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_namex, twenty_four,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
+ Object::SetProperty(isolate, obj, prop_namex, twenty_four).Check();
CHECK_EQ(Smi::FromInt(23),
*Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
@@ -399,11 +397,8 @@ TEST(GarbageCollection) {
HandleScope inner_scope(isolate);
// Allocate another object, make it reachable from global.
Handle<JSObject> obj = factory->NewJSObject(function);
- Object::SetProperty(isolate, global, obj_name, obj, LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, global, obj_name, obj).Check();
+ Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
}
// After gc, it should survive.
@@ -698,7 +693,7 @@ TEST(WeakGlobalHandlesMark) {
// Make sure the objects are promoted.
CcTest::CollectGarbage(OLD_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
- CHECK(!Heap::InNewSpace(*h1) && !Heap::InNewSpace(*h2));
+ CHECK(!Heap::InYoungGeneration(*h1) && !Heap::InYoungGeneration(*h2));
std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
GlobalHandles::MakeWeak(
@@ -944,15 +939,11 @@ TEST(FunctionAllocation) {
Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
Handle<JSObject> obj = factory->NewJSObject(function);
- Object::SetProperty(isolate, obj, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
CHECK_EQ(Smi::FromInt(23),
*Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
// Check that we can add properties to function objects.
- Object::SetProperty(isolate, function, prop_name, twenty_four,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, function, prop_name, twenty_four).Check();
CHECK_EQ(
Smi::FromInt(24),
*Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
@@ -983,7 +974,7 @@ TEST(ObjectProperties) {
CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
// add first
- Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
+ Object::SetProperty(isolate, obj, first, one).Check();
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
// delete first
@@ -992,8 +983,8 @@ TEST(ObjectProperties) {
CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
// add first and then second
- Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
- Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
+ Object::SetProperty(isolate, obj, first, one).Check();
+ Object::SetProperty(isolate, obj, second, two).Check();
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
@@ -1007,8 +998,8 @@ TEST(ObjectProperties) {
CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
// add first and then second
- Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
- Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
+ Object::SetProperty(isolate, obj, first, one).Check();
+ Object::SetProperty(isolate, obj, second, two).Check();
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
@@ -1024,15 +1015,14 @@ TEST(ObjectProperties) {
// check string and internalized string match
const char* string1 = "fisk";
Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
- Object::SetProperty(isolate, obj, s1, one, LanguageMode::kSloppy).Check();
+ Object::SetProperty(isolate, obj, s1, one).Check();
Handle<String> s1_string = factory->InternalizeUtf8String(string1);
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
// check internalized string and string match
const char* string2 = "fugl";
Handle<String> s2_string = factory->InternalizeUtf8String(string2);
- Object::SetProperty(isolate, obj, s2_string, one, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, s2_string, one).Check();
Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
}
@@ -1053,9 +1043,7 @@ TEST(JSObjectMaps) {
// Set a propery
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
- Object::SetProperty(isolate, obj, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
CHECK_EQ(Smi::FromInt(23),
*Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
@@ -1090,7 +1078,7 @@ TEST(JSArray) {
CHECK(array->HasSmiOrObjectElements());
// array[length] = name.
- Object::SetElement(isolate, array, 0, name, LanguageMode::kSloppy).Check();
+ Object::SetElement(isolate, array, 0, name, ShouldThrow::kDontThrow).Check();
CHECK_EQ(Smi::FromInt(1), array->length());
element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
CHECK_EQ(*element, *name);
@@ -1104,7 +1092,7 @@ TEST(JSArray) {
CHECK(array->HasDictionaryElements()); // Must be in slow mode.
// array[length] = name.
- Object::SetElement(isolate, array, int_length, name, LanguageMode::kSloppy)
+ Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
.Check();
uint32_t new_int_length = 0;
CHECK(array->length()->ToArrayIndex(&new_int_length));
@@ -1136,11 +1124,11 @@ TEST(JSObjectCopy) {
Handle<Smi> one(Smi::FromInt(1), isolate);
Handle<Smi> two(Smi::FromInt(2), isolate);
- Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
- Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
+ Object::SetProperty(isolate, obj, first, one).Check();
+ Object::SetProperty(isolate, obj, second, two).Check();
- Object::SetElement(isolate, obj, 0, first, LanguageMode::kSloppy).Check();
- Object::SetElement(isolate, obj, 1, second, LanguageMode::kSloppy).Check();
+ Object::SetElement(isolate, obj, 0, first, ShouldThrow::kDontThrow).Check();
+ Object::SetElement(isolate, obj, 1, second, ShouldThrow::kDontThrow).Check();
// Make the clone.
Handle<Object> value1, value2;
@@ -1162,13 +1150,12 @@ TEST(JSObjectCopy) {
CHECK_EQ(*value1, *value2);
// Flip the values.
- Object::SetProperty(isolate, clone, first, two, LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, clone, second, one, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, clone, first, two).Check();
+ Object::SetProperty(isolate, clone, second, one).Check();
- Object::SetElement(isolate, clone, 0, second, LanguageMode::kSloppy).Check();
- Object::SetElement(isolate, clone, 1, first, LanguageMode::kSloppy).Check();
+ Object::SetElement(isolate, clone, 0, second, ShouldThrow::kDontThrow)
+ .Check();
+ Object::SetElement(isolate, clone, 1, first, ShouldThrow::kDontThrow).Check();
value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
@@ -1800,8 +1787,9 @@ static HeapObject NewSpaceAllocateAligned(int size,
static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
int fill = Heap::GetFillToAlign(*top_addr, alignment);
- if (fill) {
- NewSpaceAllocateAligned(fill + offset, kWordAligned);
+ int allocation = fill + offset;
+ if (allocation) {
+ NewSpaceAllocateAligned(allocation, kWordAligned);
}
return *top_addr;
}
@@ -1923,6 +1911,64 @@ TEST(TestAlignedOverAllocation) {
}
}
+TEST(HeapNumberAlignment) {
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+ HandleScope sc(isolate);
+
+ const auto required_alignment =
+ HeapObject::RequiredAlignment(*factory->heap_number_map());
+ const int maximum_misalignment =
+ Heap::GetMaximumFillToAlign(required_alignment);
+
+ for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
+ AlignNewSpace(required_alignment, offset);
+ Handle<Object> number_new = factory->NewNumber(1.000123);
+ CHECK(number_new->IsHeapNumber());
+ CHECK(Heap::InYoungGeneration(*number_new));
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
+ required_alignment));
+
+ AlignOldSpace(required_alignment, offset);
+ Handle<Object> number_old = factory->NewNumber(1.000321, TENURED);
+ CHECK(number_old->IsHeapNumber());
+ CHECK(heap->InOldSpace(*number_old));
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
+ required_alignment));
+ }
+}
+
+TEST(MutableHeapNumberAlignment) {
+ CcTest::InitializeVM();
+ Isolate* isolate = CcTest::i_isolate();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+ HandleScope sc(isolate);
+
+ const auto required_alignment =
+ HeapObject::RequiredAlignment(*factory->mutable_heap_number_map());
+ const int maximum_misalignment =
+ Heap::GetMaximumFillToAlign(required_alignment);
+
+ for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
+ AlignNewSpace(required_alignment, offset);
+ Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
+ CHECK(number_new->IsMutableHeapNumber());
+ CHECK(Heap::InYoungGeneration(*number_new));
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
+ required_alignment));
+
+ AlignOldSpace(required_alignment, offset);
+ Handle<Object> number_old =
+ factory->NewMutableHeapNumber(1.000321, TENURED);
+ CHECK(number_old->IsMutableHeapNumber());
+ CHECK(heap->InOldSpace(*number_old));
+ CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
+ required_alignment));
+ }
+}
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
CcTest::InitializeVM();
@@ -1975,6 +2021,8 @@ TEST(GrowAndShrinkNewSpace) {
// Make sure we're in a consistent state to start out.
CcTest::CollectAllGarbage();
+ CcTest::CollectAllGarbage();
+ new_space->Shrink();
// Explicitly growing should double the space capacity.
size_t old_capacity, new_capacity;
@@ -2285,11 +2333,12 @@ TEST(InstanceOfStubWriteBarrier) {
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
+ const double kStepSizeInMs = 100;
while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
// Discard any pending GC requests otherwise we will get GC when we enter
// code below.
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
}
CHECK(marking->IsMarking());
@@ -2352,7 +2401,7 @@ HEAP_TEST(Regress845060) {
// Preparation: create a string in new space.
Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
- CHECK(Heap::InNewSpace(*v8::Utils::OpenHandle(*str)));
+ CHECK(Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
// Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
// causes from_space to be unmapped after scavenging.
@@ -2363,7 +2412,7 @@ HEAP_TEST(Regress845060) {
// promoted to old space. Unmapping of from_space causes accesses to any
// stale raw pointers to crash.
CompileRun("while (%InNewSpace(str)) { str.split(''); }");
- CHECK(!Heap::InNewSpace(*v8::Utils::OpenHandle(*str)));
+ CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
}
TEST(IdleNotificationFinishMarking) {
@@ -2380,9 +2429,10 @@ TEST(IdleNotificationFinishMarking) {
CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
+ const double kStepSizeInMs = 100;
do {
- marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
} while (
!CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
@@ -2431,7 +2481,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
i::Handle<JSReceiver> o =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
- CHECK(Heap::InNewSpace(*o));
+ CHECK(Heap::InYoungGeneration(*o));
}
@@ -2569,7 +2619,7 @@ TEST(OptimizedPretenuringNestedInObjectProperties) {
// Nested literal sites are only pretenured if the top level
// literal is pretenured
- CHECK(Heap::InNewSpace(*o));
+ CHECK(Heap::InYoungGeneration(*o));
}
TEST(OptimizedPretenuringMixedInObjectProperties) {
@@ -2897,7 +2947,7 @@ TEST(OptimizedAllocationArrayLiterals) {
i::Handle<JSObject> o = Handle<JSObject>::cast(
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
- CHECK(Heap::InNewSpace(o->elements()));
+ CHECK(Heap::InYoungGeneration(o->elements()));
}
static int CountMapTransitions(i::Isolate* isolate, Map map) {
@@ -2952,6 +3002,13 @@ TEST(Regress1465) {
CHECK_EQ(1, transitions_after);
}
+static i::Handle<JSObject> GetByName(const char* name) {
+ return i::Handle<JSObject>::cast(
+ v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
+ CcTest::global()
+ ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
+ .ToLocalChecked())));
+}
#ifdef DEBUG
static void AddTransitions(int transitions_count) {
@@ -2964,15 +3021,6 @@ static void AddTransitions(int transitions_count) {
}
-static i::Handle<JSObject> GetByName(const char* name) {
- return i::Handle<JSObject>::cast(
- v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
- CcTest::global()
- ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
- .ToLocalChecked())));
-}
-
-
static void AddPropertyTo(
int gc_count, Handle<JSObject> object, const char* property_name) {
Isolate* isolate = CcTest::i_isolate();
@@ -2983,9 +3031,7 @@ static void AddPropertyTo(
FLAG_gc_global = true;
FLAG_retain_maps_for_n_gc = 0;
CcTest::heap()->set_allocation_timeout(gc_count);
- Object::SetProperty(isolate, object, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, object, prop_name, twenty_three).Check();
}
@@ -3106,6 +3152,9 @@ TEST(ReleaseOverReservedPages) {
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
v8::HandleScope scope(CcTest::isolate());
+ // Ensure that the young generation is empty.
+ CcTest::CollectGarbage(NEW_SPACE);
+ CcTest::CollectGarbage(NEW_SPACE);
static const int number_of_test_pages = 20;
// Prepare many pages with low live-bytes count.
@@ -3139,7 +3188,7 @@ TEST(ReleaseOverReservedPages) {
// boots, but if the 20 small arrays don't fit on the first page then that's
// an indication that it is too small.
CcTest::CollectAllAvailableGarbage();
- CHECK_EQ(initial_page_count, old_space->CountTotalPages());
+ CHECK_GE(initial_page_count, old_space->CountTotalPages());
}
static int forced_gc_counter = 0;
@@ -3239,7 +3288,7 @@ static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
FeedbackVectorHelper helper(vector);
FeedbackSlot slot = helper.slot(slot_index);
FeedbackNexus nexus(vector, slot);
- CHECK(nexus.StateFromFeedback() == desired_state);
+ CHECK(nexus.ic_state() == desired_state);
}
TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
@@ -3488,6 +3537,119 @@ UNINITIALIZED_TEST(ReleaseStackTraceData) {
isolate->Dispose();
}
+// TODO(mmarchini) also write tests for async/await and Promise.all
+void DetailedErrorStackTraceTest(const char* src,
+ std::function<void(Handle<FrameArray>)> test) {
+ FLAG_detailed_error_stack_trace = true;
+ CcTest::InitializeVM();
+ v8::HandleScope scope(CcTest::isolate());
+
+ v8::TryCatch try_catch(CcTest::isolate());
+ CompileRun(src);
+
+ CHECK(try_catch.HasCaught());
+ Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
+
+ Isolate* isolate = CcTest::i_isolate();
+ Handle<Name> key = isolate->factory()->stack_trace_symbol();
+
+ Handle<FrameArray> stack_trace(
+ FrameArray::cast(
+ Handle<JSArray>::cast(
+ Object::GetProperty(isolate, exception, key).ToHandleChecked())
+ ->elements()),
+ isolate);
+
+ test(stack_trace);
+}
+
+// * Test interpreted function error
+TEST(DetailedErrorStackTrace) {
+ static const char* source =
+ "function func1(arg1) { "
+ " let err = new Error(); "
+ " throw err; "
+ "} "
+ "function func2(arg1, arg2) { "
+ " func1(42); "
+ "} "
+ "class Foo {}; "
+ "function main(arg1, arg2) { "
+ " func2(arg1, false); "
+ "} "
+ "var foo = new Foo(); "
+ "main(foo); ";
+
+ DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
+ FixedArray foo_parameters = stack_trace->Parameters(0);
+ CHECK_EQ(foo_parameters->length(), 1);
+ CHECK(foo_parameters->get(0)->IsSmi());
+ CHECK_EQ(Smi::ToInt(foo_parameters->get(0)), 42);
+
+ FixedArray bar_parameters = stack_trace->Parameters(1);
+ CHECK_EQ(bar_parameters->length(), 2);
+ CHECK(bar_parameters->get(0)->IsJSObject());
+ CHECK(bar_parameters->get(1)->IsBoolean());
+ Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
+ CHECK_EQ(bar_parameters->get(0), *foo);
+ CHECK(!bar_parameters->get(1)->BooleanValue(CcTest::i_isolate()));
+
+ FixedArray main_parameters = stack_trace->Parameters(2);
+ CHECK_EQ(main_parameters->length(), 2);
+ CHECK(main_parameters->get(0)->IsJSObject());
+ CHECK(main_parameters->get(1)->IsUndefined());
+ CHECK_EQ(main_parameters->get(0), *foo);
+ });
+}
+
+// * Test optimized function with inline frame error
+TEST(DetailedErrorStackTraceInline) {
+ FLAG_allow_natives_syntax = true;
+ static const char* source =
+ "function add(x) { "
+ " if (x == 42) "
+ " throw new Error(); "
+ " return x + x; "
+ "} "
+ "add(0); "
+ "add(1); "
+ "function foo(x) { "
+ " return add(x + 1) "
+ "} "
+ "foo(40); "
+ "%OptimizeFunctionOnNextCall(foo); "
+ "foo(41); ";
+
+ DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
+ FixedArray parameters_add = stack_trace->Parameters(0);
+ CHECK_EQ(parameters_add->length(), 1);
+ CHECK(parameters_add->get(0)->IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters_add->get(0)), 42);
+
+ FixedArray parameters_foo = stack_trace->Parameters(1);
+ CHECK_EQ(parameters_foo->length(), 1);
+ CHECK(parameters_foo->get(0)->IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters_foo->get(0)), 41);
+ });
+}
+
+// * Test builtin exit error
+TEST(DetailedErrorStackTraceBuiltinExit) {
+ static const char* source =
+ "function test(arg1) { "
+ " (new Number()).toFixed(arg1); "
+ "} "
+ "test(9999); ";
+
+ DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
+ FixedArray parameters = stack_trace->Parameters(0);
+
+ CHECK_EQ(parameters->length(), 2);
+ CHECK(parameters->get(0)->IsSmi());
+ CHECK_EQ(Smi::ToInt(parameters->get(0)), 9999);
+ });
+}
+
TEST(Regress169928) {
FLAG_allow_natives_syntax = true;
#ifndef V8_LITE_MODE
@@ -3595,8 +3757,6 @@ TEST(LargeObjectSlotRecording) {
// Start incremental marking to active write barrier.
heap::SimulateIncrementalMarking(heap, false);
- heap->incremental_marking()->AdvanceIncrementalMarking(
- 10000000, IncrementalMarking::NO_GC_VIA_STACK_GUARD, StepOrigin::kV8);
// Create references from the large object to the object on the evacuation
// candidate.
@@ -3606,6 +3766,8 @@ TEST(LargeObjectSlotRecording) {
CHECK(lo->get(i) == old_location);
}
+ heap::SimulateIncrementalMarking(heap, true);
+
// Move the evaucation candidate object.
CcTest::CollectAllGarbage();
@@ -3659,9 +3821,7 @@ TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
CcTest::heap()->StartIncrementalMarking(
i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
}
- // This big step should be sufficient to mark the whole array.
- marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ heap::SimulateIncrementalMarking(CcTest::heap());
CHECK(marking->IsComplete() ||
marking->IsReadyToOverApproximateWeakClosure());
}
@@ -4069,10 +4229,10 @@ TEST(NewSpaceObjectsInOptimizedCode) {
->Get(context.local(), v8_str("foo"))
.ToLocalChecked())));
- CHECK(Heap::InNewSpace(*foo));
+ CHECK(Heap::InYoungGeneration(*foo));
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
- CHECK(!Heap::InNewSpace(*foo));
+ CHECK(!Heap::InYoungGeneration(*foo));
#ifdef VERIFY_HEAP
CcTest::heap()->Verify();
#endif
@@ -4519,7 +4679,7 @@ void CheckIC(Handle<JSFunction> function, int slot_index,
FeedbackVector vector = function->feedback_vector();
FeedbackSlot slot(slot_index);
FeedbackNexus nexus(vector, slot);
- CHECK_EQ(nexus.StateFromFeedback(), state);
+ CHECK_EQ(nexus.ic_state(), state);
}
TEST(MonomorphicStaysMonomorphicAfterGC) {
@@ -4735,8 +4895,8 @@ TEST(Regress507979) {
Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
- CHECK(Heap::InNewSpace(*o1));
- CHECK(Heap::InNewSpace(*o2));
+ CHECK(Heap::InYoungGeneration(*o1));
+ CHECK(Heap::InYoungGeneration(*o2));
HeapIterator it(isolate->heap(), i::HeapIterator::kFilterUnreachable);
@@ -4826,12 +4986,7 @@ TEST(Regress3631) {
Handle<JSReceiver> obj =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
- HeapObject weak_map_table = HeapObject::cast(weak_map->table());
- IncrementalMarking::MarkingState* marking_state = marking->marking_state();
- while (!marking_state->IsBlack(weak_map_table) && !marking->IsStopped()) {
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
- }
+ SimulateIncrementalMarking(heap);
// Stash the backing store in a handle.
Handle<Object> save(weak_map->table(), isolate);
// The following line will update the backing store.
@@ -4855,8 +5010,7 @@ TEST(Regress442710) {
Handle<JSArray> array = factory->NewJSArray(2);
Handle<String> name = factory->InternalizeUtf8String("testArray");
- Object::SetProperty(isolate, global, name, array, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, global, name, array).Check();
CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
CcTest::CollectGarbage(OLD_SPACE);
}
@@ -4991,7 +5145,7 @@ void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
Handle<FixedArray> array = factory->NewFixedArray(
elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
- CHECK((space == NEW_SPACE) == Heap::InNewSpace(*array));
+ CHECK((space == NEW_SPACE) == Heap::InYoungGeneration(*array));
CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
}
@@ -5213,7 +5367,8 @@ AllocationResult HeapTester::AllocateByteArrayForTest(Heap* heap, int length,
AllocationSpace space = heap->SelectSpace(pretenure);
HeapObject result;
{
- AllocationResult allocation = heap->AllocateRaw(size, space);
+ AllocationResult allocation =
+ heap->AllocateRaw(size, Heap::SelectType(space));
if (!allocation.To(&result)) return allocation;
}
@@ -5239,7 +5394,7 @@ HEAP_TEST(Regress587004) {
Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
CHECK(heap->old_space()->Contains(*array));
Handle<Object> number = factory->NewHeapNumber(1.0);
- CHECK(Heap::InNewSpace(*number));
+ CHECK(Heap::InYoungGeneration(*number));
for (int i = 0; i < N; i++) {
array->set(i, *number);
}
@@ -5349,7 +5504,8 @@ TEST(Regress598319) {
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
- const int kNumberOfObjects = kMaxRegularHeapObjectSize / kTaggedSize;
+ // The size of the array should be larger than kProgressBarScanningChunk.
+ const int kNumberOfObjects = Max(FixedArray::kMaxRegularLength + 1, 128 * KB);
struct Arr {
Arr(Isolate* isolate, int number_of_objects) {
@@ -5374,7 +5530,7 @@ TEST(Regress598319) {
CHECK_EQ(arr.get()->length(), kNumberOfObjects);
CHECK(heap->lo_space()->Contains(arr.get()));
- LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
+ LargePage* page = LargePage::FromHeapObject(arr.get());
CHECK_NOT_NULL(page);
// GC to cleanup state
@@ -5409,11 +5565,13 @@ TEST(Regress598319) {
// Now we search for a state where we are in incremental marking and have
// only partially marked the large object.
+ const double kSmallStepSizeInMs = 0.1;
while (!marking->IsComplete()) {
- marking->Step(i::KB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
- if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
- CHECK_NE(page->progress_bar(), arr.get()->Size());
+ marking->V8Step(kSmallStepSizeInMs,
+ i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
+ if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->ProgressBar() > 0) {
+ CHECK_NE(page->ProgressBar(), arr.get()->Size());
{
// Shift by 1, effectively moving one white object across the progress
// bar, meaning that we will miss marking it.
@@ -5427,9 +5585,11 @@ TEST(Regress598319) {
}
// Finish marking with bigger steps to speed up test.
+ const double kLargeStepSizeInMs = 1000;
while (!marking->IsComplete()) {
- marking->Step(10 * i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ marking->V8Step(kLargeStepSizeInMs,
+ i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
if (marking->IsReadyToOverApproximateWeakClosure()) {
marking->FinalizeIncrementally();
}
@@ -5509,9 +5669,10 @@ TEST(Regress615489) {
v8::HandleScope inner(CcTest::isolate());
isolate->factory()->NewFixedArray(500, TENURED)->Size();
}
+ const double kStepSizeInMs = 100;
while (!marking->IsComplete()) {
- marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
if (marking->IsReadyToOverApproximateWeakClosure()) {
marking->FinalizeIncrementally();
}
@@ -5568,10 +5729,11 @@ TEST(Regress631969) {
CcTest::CollectGarbage(NEW_SPACE);
// Finish incremental marking.
+ const double kStepSizeInMs = 100;
IncrementalMarking* marking = heap->incremental_marking();
while (!marking->IsComplete()) {
- marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
- StepOrigin::kV8);
+ marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+ StepOrigin::kV8);
if (marking->IsReadyToOverApproximateWeakClosure()) {
marking->FinalizeIncrementally();
}
@@ -5657,7 +5819,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
- IncrementalMarking::MarkingState* marking_state = marking->marking_state();
+ IncrementalMarking::NonAtomicMarkingState* marking_state =
+ marking->non_atomic_marking_state();
CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
@@ -5724,7 +5887,8 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
- IncrementalMarking::MarkingState* marking_state = marking->marking_state();
+ IncrementalMarking::NonAtomicMarkingState* marking_state =
+ marking->non_atomic_marking_state();
CHECK(marking_state->IsBlack(*array));
CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
@@ -5786,7 +5950,8 @@ TEST(YoungGenerationLargeObjectAllocationScavenge) {
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
- CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
+ CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
+ CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
array_small->set(0, *number);
@@ -5797,7 +5962,7 @@ TEST(YoungGenerationLargeObjectAllocationScavenge) {
// generation large object space.
chunk = MemoryChunk::FromHeapObject(*array_small);
CHECK_EQ(LO_SPACE, chunk->owner()->identity());
- CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
+ CHECK(!chunk->InYoungGeneration());
CcTest::CollectAllAvailableGarbage();
}
@@ -5815,7 +5980,8 @@ TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
- CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
+ CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
+ CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
array_small->set(0, *number);
@@ -5826,7 +5992,7 @@ TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
// large object space.
chunk = MemoryChunk::FromHeapObject(*array_small);
CHECK_EQ(LO_SPACE, chunk->owner()->identity());
- CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
+ CHECK(!chunk->InYoungGeneration());
CcTest::CollectAllAvailableGarbage();
}
@@ -5846,7 +6012,7 @@ TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
- CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
+ CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
}
}
@@ -5975,7 +6141,7 @@ HEAP_TEST(Regress670675) {
if (marking->IsStopped()) {
marking->Start(i::GarbageCollectionReason::kTesting);
}
- size_t array_length = Page::kPageSize / kTaggedSize + 100;
+ size_t array_length = 128 * KB;
size_t n = heap->OldGenerationSpaceAvailable() / array_length;
for (size_t i = 0; i < n + 40; i++) {
{
@@ -5985,7 +6151,7 @@ HEAP_TEST(Regress670675) {
}
if (marking->IsStopped()) break;
double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
- marking->AdvanceIncrementalMarking(
+ marking->AdvanceWithDeadline(
deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
}
DCHECK(marking->IsStopped());
@@ -6159,7 +6325,7 @@ UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
{
v8::Isolate::Scope isolate_scope(isolate);
CHECK_EQ(static_cast<uint64_t>(1337 * i),
- reinterpret_cast<i::Isolate*>(isolate)->heap()->HashSeed());
+ HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
CHECK(!context.IsEmpty());
@@ -6258,7 +6424,7 @@ HEAP_TEST(Regress779503) {
// The byte array filled with kHeapObjectTag ensures that we cannot read
// from the slot again and interpret it as heap value. Doing so will crash.
Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
- CHECK(Heap::InNewSpace(*byte_array));
+ CHECK(Heap::InYoungGeneration(*byte_array));
for (int i = 0; i < kArraySize; i++) {
byte_array->set(i, kHeapObjectTag);
}
@@ -6268,7 +6434,7 @@ HEAP_TEST(Regress779503) {
// The FixedArray in old space serves as space for slots.
Handle<FixedArray> fixed_array =
isolate->factory()->NewFixedArray(kArraySize, TENURED);
- CHECK(!Heap::InNewSpace(*fixed_array));
+ CHECK(!Heap::InYoungGeneration(*fixed_array));
for (int i = 0; i < kArraySize; i++) {
fixed_array->set(i, *byte_array);
}
@@ -6277,7 +6443,7 @@ HEAP_TEST(Regress779503) {
// currently scavenging.
heap->delay_sweeper_tasks_for_testing_ = true;
CcTest::CollectGarbage(OLD_SPACE);
- CHECK(Heap::InNewSpace(*byte_array));
+ CHECK(Heap::InYoungGeneration(*byte_array));
}
// Scavenging and sweeping the same page will crash as slots will be
// overridden.
@@ -6291,6 +6457,7 @@ struct OutOfMemoryState {
size_t old_generation_capacity_at_oom;
size_t memory_allocator_size_at_oom;
size_t new_space_capacity_at_oom;
+ size_t new_lo_space_size_at_oom;
size_t current_heap_limit;
size_t initial_heap_limit;
};
@@ -6303,6 +6470,7 @@ size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
state->new_space_capacity_at_oom = heap->new_space()->Capacity();
+ state->new_lo_space_size_at_oom = heap->new_lo_space()->Size();
state->current_heap_limit = current_heap_limit;
state->initial_heap_limit = initial_heap_limit;
return initial_heap_limit + 100 * MB;
@@ -6378,11 +6546,14 @@ UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
}
CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
+ state.new_space_capacity_at_oom +
+ state.new_lo_space_size_at_oom +
FixedArray::SizeFor(kFixedArrayLength));
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
- 2 * state.new_space_capacity_at_oom));
+ 2 * state.new_space_capacity_at_oom +
+ state.new_lo_space_size_at_oom));
reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
}
@@ -6466,7 +6637,7 @@ TEST(Regress8617) {
Handle<Object> foo =
v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
"foo;"));
- if (heap->InNewSpace(*foo)) {
+ if (heap->InYoungGeneration(*foo)) {
CcTest::CollectGarbage(NEW_SPACE);
CcTest::CollectGarbage(NEW_SPACE);
}
diff --git a/deps/v8/test/cctest/heap/test-incremental-marking.cc b/deps/v8/test/cctest/heap/test-incremental-marking.cc
index 8213ea6080..d90c2c2139 100644
--- a/deps/v8/test/cctest/heap/test-incremental-marking.cc
+++ b/deps/v8/test/cctest/heap/test-incremental-marking.cc
@@ -71,14 +71,14 @@ class MockPlatform : public TestPlatform {
void PostDelayedTask(std::unique_ptr<Task> task,
double delay_in_seconds) override {
- UNREACHABLE();
- };
+ task_ = std::move(task);
+ }
void PostIdleTask(std::unique_ptr<IdleTask> task) override {
UNREACHABLE();
}
- bool IdleTasksEnabled() override { return false; };
+ bool IdleTasksEnabled() override { return false; }
bool PendingTask() { return task_ != nullptr; }
diff --git a/deps/v8/test/cctest/heap/test-invalidated-slots.cc b/deps/v8/test/cctest/heap/test-invalidated-slots.cc
index 897f4d0242..9eeda75cc5 100644
--- a/deps/v8/test/cctest/heap/test-invalidated-slots.cc
+++ b/deps/v8/test/cctest/heap/test-invalidated-slots.cc
@@ -330,25 +330,16 @@ HEAP_TEST(InvalidatedSlotsFastToSlow) {
// Start incremental marking.
heap::SimulateIncrementalMarking(heap);
// Set properties to point to the evacuation candidate.
- Object::SetProperty(isolate, obj, prop_name1, evacuated,
- LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_name2, evacuated,
- LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_name3, evacuated,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name1, evacuated).Check();
+ Object::SetProperty(isolate, obj, prop_name2, evacuated).Check();
+ Object::SetProperty(isolate, obj, prop_name3, evacuated).Check();
{
HandleScope scope(isolate);
Handle<HeapObject> dead = factory->NewFixedArray(1);
- Object::SetProperty(isolate, obj, prop_name1, dead, LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_name2, dead, LanguageMode::kSloppy)
- .Check();
- Object::SetProperty(isolate, obj, prop_name3, dead, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name1, dead).Check();
+ Object::SetProperty(isolate, obj, prop_name2, dead).Check();
+ Object::SetProperty(isolate, obj, prop_name3, dead).Check();
Handle<Map> map(obj->map(), isolate);
Handle<Map> normalized_map =
Map::Normalize(isolate, map, CLEAR_INOBJECT_PROPERTIES, "testing");
diff --git a/deps/v8/test/cctest/heap/test-mark-compact.cc b/deps/v8/test/cctest/heap/test-mark-compact.cc
index 4f141af7a5..e85c73405f 100644
--- a/deps/v8/test/cctest/heap/test-mark-compact.cc
+++ b/deps/v8/test/cctest/heap/test-mark-compact.cc
@@ -101,7 +101,7 @@ HEAP_TEST(NoPromotion) {
AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
Heap* heap = isolate->heap();
HeapObject obj;
- AllocationResult alloc = heap->AllocateRaw(Map::kSize, MAP_SPACE);
+ AllocationResult alloc = heap->AllocateRaw(Map::kSize, AllocationType::kMap);
if (!alloc.To(&obj)) return alloc;
obj->set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
SKIP_WRITE_BARRIER);
@@ -119,7 +119,7 @@ AllocationResult HeapTester::AllocateFixedArrayForTest(
AllocationSpace space = heap->SelectSpace(pretenure);
HeapObject obj;
{
- AllocationResult result = heap->AllocateRaw(size, space);
+ AllocationResult result = heap->AllocateRaw(size, Heap::SelectType(space));
if (!result.To(&obj)) return result;
}
obj->set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
@@ -165,9 +165,7 @@ HEAP_TEST(MarkCompactCollector) {
// allocate a garbage
Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
Handle<JSFunction> function = factory->NewFunctionForTest(func_name);
- Object::SetProperty(isolate, global, func_name, function,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, global, func_name, function).Check();
factory->NewJSObject(function);
}
@@ -184,13 +182,10 @@ HEAP_TEST(MarkCompactCollector) {
Handle<JSObject> obj = factory->NewJSObject(function);
Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
- Object::SetProperty(isolate, global, obj_name, obj, LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, global, obj_name, obj).Check();
Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
- Object::SetProperty(isolate, obj, prop_name, twenty_three,
- LanguageMode::kSloppy)
- .Check();
+ Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
}
CcTest::CollectGarbage(OLD_SPACE);
diff --git a/deps/v8/test/cctest/heap/test-page-promotion.cc b/deps/v8/test/cctest/heap/test-page-promotion.cc
index 2db538d484..b68484e3c0 100644
--- a/deps/v8/test/cctest/heap/test-page-promotion.cc
+++ b/deps/v8/test/cctest/heap/test-page-promotion.cc
@@ -67,6 +67,11 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
+ // Ensure that the new space is empty so that the page to be promoted
+ // does not contain the age mark.
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
+ heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
+
std::vector<Handle<FixedArray>> handles;
heap::SimulateFullSpace(heap->new_space(), &handles);
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
diff --git a/deps/v8/test/cctest/heap/test-spaces.cc b/deps/v8/test/cctest/heap/test-spaces.cc
index 8219c1487d..337447dcea 100644
--- a/deps/v8/test/cctest/heap/test-spaces.cc
+++ b/deps/v8/test/cctest/heap/test-spaces.cc
@@ -45,18 +45,24 @@ namespace heap {
// Temporarily sets a given allocator in an isolate.
class TestMemoryAllocatorScope {
public:
- TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
- : isolate_(isolate), old_allocator_(isolate->heap()->memory_allocator()) {
- isolate->heap()->memory_allocator_ = allocator;
+ TestMemoryAllocatorScope(Isolate* isolate, size_t max_capacity,
+ size_t code_range_size)
+ : isolate_(isolate),
+ old_allocator_(std::move(isolate->heap()->memory_allocator_)) {
+ isolate->heap()->memory_allocator_.reset(
+ new MemoryAllocator(isolate, max_capacity, code_range_size));
}
+ MemoryAllocator* allocator() { return isolate_->heap()->memory_allocator(); }
+
~TestMemoryAllocatorScope() {
- isolate_->heap()->memory_allocator_ = old_allocator_;
+ isolate_->heap()->memory_allocator()->TearDown();
+ isolate_->heap()->memory_allocator_.swap(old_allocator_);
}
private:
Isolate* isolate_;
- MemoryAllocator* old_allocator_;
+ std::unique_ptr<MemoryAllocator> old_allocator_;
DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
};
@@ -89,41 +95,37 @@ static void VerifyMemoryChunk(Isolate* isolate, Heap* heap,
v8::PageAllocator* code_page_allocator,
size_t reserve_area_size, size_t commit_area_size,
Executability executable, Space* space) {
- MemoryAllocator* memory_allocator =
- new MemoryAllocator(isolate, heap->MaxReserved(), 0);
- {
- TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
- TestCodePageAllocatorScope test_code_page_allocator_scope(
- isolate, code_page_allocator);
-
- v8::PageAllocator* page_allocator =
- memory_allocator->page_allocator(executable);
-
- size_t allocatable_memory_area_offset =
- MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(space->identity());
- size_t guard_size =
- (executable == EXECUTABLE) ? MemoryChunkLayout::CodePageGuardSize() : 0;
-
- MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
- reserve_area_size, commit_area_size, executable, space);
- size_t reserved_size =
- ((executable == EXECUTABLE))
- ? allocatable_memory_area_offset +
- RoundUp(reserve_area_size, page_allocator->CommitPageSize()) +
- guard_size
- : RoundUp(allocatable_memory_area_offset + reserve_area_size,
- page_allocator->CommitPageSize());
- CHECK(memory_chunk->size() == reserved_size);
- CHECK(memory_chunk->area_start() <
- memory_chunk->address() + memory_chunk->size());
- CHECK(memory_chunk->area_end() <=
- memory_chunk->address() + memory_chunk->size());
- CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
-
- memory_allocator->Free<MemoryAllocator::kFull>(memory_chunk);
- }
- memory_allocator->TearDown();
- delete memory_allocator;
+ TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
+ 0);
+ MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
+ TestCodePageAllocatorScope test_code_page_allocator_scope(
+ isolate, code_page_allocator);
+
+ v8::PageAllocator* page_allocator =
+ memory_allocator->page_allocator(executable);
+
+ size_t allocatable_memory_area_offset =
+ MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(space->identity());
+ size_t guard_size =
+ (executable == EXECUTABLE) ? MemoryChunkLayout::CodePageGuardSize() : 0;
+
+ MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
+ reserve_area_size, commit_area_size, executable, space);
+ size_t reserved_size =
+ ((executable == EXECUTABLE))
+ ? allocatable_memory_area_offset +
+ RoundUp(reserve_area_size, page_allocator->CommitPageSize()) +
+ guard_size
+ : RoundUp(allocatable_memory_area_offset + reserve_area_size,
+ page_allocator->CommitPageSize());
+ CHECK(memory_chunk->size() == reserved_size);
+ CHECK(memory_chunk->area_start() <
+ memory_chunk->address() + memory_chunk->size());
+ CHECK(memory_chunk->area_end() <=
+ memory_chunk->address() + memory_chunk->size());
+ CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
+
+ memory_allocator->Free<MemoryAllocator::kFull>(memory_chunk);
}
static unsigned int PseudorandomAreaSize() {
@@ -170,48 +172,43 @@ TEST(MemoryAllocator) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
- MemoryAllocator* memory_allocator =
- new MemoryAllocator(isolate, heap->MaxReserved(), 0);
- CHECK_NOT_NULL(memory_allocator);
- TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
+ TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
+ 0);
+ MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
- {
- int total_pages = 0;
- OldSpace faked_space(heap);
- CHECK(!faked_space.first_page());
- CHECK(!faked_space.last_page());
- Page* first_page = memory_allocator->AllocatePage(
- faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
- NOT_EXECUTABLE);
-
- faked_space.memory_chunk_list().PushBack(first_page);
- CHECK(first_page->next_page() == nullptr);
- total_pages++;
-
- for (Page* p = first_page; p != nullptr; p = p->next_page()) {
- CHECK(p->owner() == &faked_space);
- }
+ int total_pages = 0;
+ OldSpace faked_space(heap);
+ CHECK(!faked_space.first_page());
+ CHECK(!faked_space.last_page());
+ Page* first_page = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
+ NOT_EXECUTABLE);
- // Again, we should get n or n - 1 pages.
- Page* other = memory_allocator->AllocatePage(
- faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
- NOT_EXECUTABLE);
- total_pages++;
- faked_space.memory_chunk_list().PushBack(other);
- int page_count = 0;
- for (Page* p = first_page; p != nullptr; p = p->next_page()) {
- CHECK(p->owner() == &faked_space);
- page_count++;
- }
- CHECK(total_pages == page_count);
+ faked_space.memory_chunk_list().PushBack(first_page);
+ CHECK(first_page->next_page() == nullptr);
+ total_pages++;
- Page* second_page = first_page->next_page();
- CHECK_NOT_NULL(second_page);
+ for (Page* p = first_page; p != nullptr; p = p->next_page()) {
+ CHECK(p->owner() == &faked_space);
+ }
- // OldSpace's destructor will tear down the space and free up all pages.
+ // Again, we should get n or n - 1 pages.
+ Page* other = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
+ NOT_EXECUTABLE);
+ total_pages++;
+ faked_space.memory_chunk_list().PushBack(other);
+ int page_count = 0;
+ for (Page* p = first_page; p != nullptr; p = p->next_page()) {
+ CHECK(p->owner() == &faked_space);
+ page_count++;
}
- memory_allocator->TearDown();
- delete memory_allocator;
+ CHECK(total_pages == page_count);
+
+ Page* second_page = first_page->next_page();
+ CHECK_NOT_NULL(second_page);
+
+ // OldSpace's destructor will tear down the space and free up all pages.
}
TEST(ComputeDiscardMemoryAreas) {
@@ -256,9 +253,9 @@ TEST(ComputeDiscardMemoryAreas) {
TEST(NewSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
- MemoryAllocator* memory_allocator =
- new MemoryAllocator(isolate, heap->MaxReserved(), 0);
- TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
+ TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
+ 0);
+ MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
NewSpace new_space(heap, memory_allocator->data_page_allocator(),
CcTest::heap()->InitialSemiSpaceSize(),
@@ -273,17 +270,14 @@ TEST(NewSpace) {
new_space.TearDown();
memory_allocator->unmapper()->EnsureUnmappingCompleted();
- memory_allocator->TearDown();
- delete memory_allocator;
}
TEST(OldSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
- MemoryAllocator* memory_allocator =
- new MemoryAllocator(isolate, heap->MaxReserved(), 0);
- TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
+ TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
+ 0);
OldSpace* s = new OldSpace(heap);
CHECK_NOT_NULL(s);
@@ -293,8 +287,6 @@ TEST(OldSpace) {
}
delete s;
- memory_allocator->TearDown();
- delete memory_allocator;
}
TEST(LargeObjectSpace) {
@@ -315,8 +307,6 @@ TEST(LargeObjectSpace) {
CHECK(lo->Contains(HeapObject::cast(obj)));
- CHECK(lo->FindObject(ho->address()) == obj);
-
CHECK(lo->Contains(ho));
while (true) {
@@ -396,7 +386,7 @@ TEST(SizeOfInitialHeap) {
#endif // DEBUG
static HeapObject AllocateUnaligned(NewSpace* space, int size) {
- AllocationResult allocation = space->AllocateRawUnaligned(size);
+ AllocationResult allocation = space->AllocateRaw(size, kWordAligned);
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
@@ -406,7 +396,7 @@ static HeapObject AllocateUnaligned(NewSpace* space, int size) {
}
static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
- AllocationResult allocation = space->AllocateRaw(size, kDoubleUnaligned);
+ AllocationResult allocation = space->AllocateRaw(size, kWordAligned);
CHECK(!allocation.IsRetry());
HeapObject filler;
CHECK(allocation.To(&filler));
diff --git a/deps/v8/test/cctest/heap/test-weak-references.cc b/deps/v8/test/cctest/heap/test-weak-references.cc
index 8a2ad3c184..bcf8622d31 100644
--- a/deps/v8/test/cctest/heap/test-weak-references.cc
+++ b/deps/v8/test/cctest/heap/test-weak-references.cc
@@ -5,6 +5,7 @@
#include "src/api-inl.h"
#include "src/assembler-inl.h"
#include "src/heap/factory.h"
+#include "src/heap/heap-inl.h"
#include "src/isolate.h"
#include "src/objects/smi.h"
#include "test/cctest/cctest.h"
@@ -41,12 +42,12 @@ TEST(WeakReferencesBasic) {
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
MaybeObject code_object = fv->optimized_code_weak_or_smi();
CHECK(code_object->IsSmi());
CcTest::CollectAllGarbage();
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
CHECK_EQ(code_object, fv->optimized_code_weak_or_smi());
{
@@ -123,7 +124,7 @@ TEST(WeakReferencesOldToNew) {
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array));
+ CHECK(Heap::InYoungGeneration(*fixed_array));
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
CcTest::CollectAllGarbage();
@@ -148,7 +149,7 @@ TEST(WeakReferencesOldToNewScavenged) {
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array));
+ CHECK(Heap::InYoungGeneration(*fixed_array));
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
CcTest::CollectGarbage(NEW_SPACE);
@@ -192,13 +193,13 @@ TEST(ObjectMovesBeforeClearingWeakField) {
HandleScope outer_scope(isolate);
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
FeedbackVector fv_location = *fv;
{
HandleScope inner_scope(isolate);
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array));
+ CHECK(Heap::InYoungGeneration(*fixed_array));
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
// inner_scope will go out of scope, so when marking the next time,
// *fixed_array will stay white.
@@ -233,12 +234,12 @@ TEST(ObjectWithWeakFieldDies) {
HandleScope outer_scope(isolate);
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
{
HandleScope inner_scope(isolate);
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array));
+ CHECK(Heap::InYoungGeneration(*fixed_array));
fv->set_optimized_code_weak_or_smi(
HeapObjectReference::Weak(*fixed_array));
// inner_scope will go out of scope, so when marking the next time,
@@ -266,11 +267,11 @@ TEST(ObjectWithWeakReferencePromoted) {
HandleScope outer_scope(isolate);
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array));
+ CHECK(Heap::InYoungGeneration(*fixed_array));
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
CcTest::CollectGarbage(NEW_SPACE);
@@ -292,13 +293,13 @@ TEST(ObjectWithClearedWeakReferencePromoted) {
HandleScope outer_scope(isolate);
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
fv->set_optimized_code_weak_or_smi(
HeapObjectReference::ClearedValue(isolate));
CcTest::CollectGarbage(NEW_SPACE);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
CHECK(fv->optimized_code_weak_or_smi()->IsCleared());
CcTest::CollectGarbage(NEW_SPACE);
@@ -323,21 +324,21 @@ TEST(WeakReferenceWriteBarrier) {
HandleScope outer_scope(isolate);
Handle<FeedbackVector> fv =
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
- CHECK(Heap::InNewSpace(*fv));
+ CHECK(Heap::InYoungGeneration(*fv));
{
HandleScope inner_scope(isolate);
// Create a new FixedArray which the FeedbackVector will point to.
Handle<FixedArray> fixed_array1 = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array1));
+ CHECK(Heap::InYoungGeneration(*fixed_array1));
fv->set_optimized_code_weak_or_smi(
HeapObjectReference::Weak(*fixed_array1));
SimulateIncrementalMarking(heap, true);
Handle<FixedArray> fixed_array2 = factory->NewFixedArray(1);
- CHECK(Heap::InNewSpace(*fixed_array2));
+ CHECK(Heap::InYoungGeneration(*fixed_array2));
// This write will trigger the write barrier.
fv->set_optimized_code_weak_or_smi(
HeapObjectReference::Weak(*fixed_array2));
@@ -374,7 +375,7 @@ TEST(WeakArraysBasic) {
CHECK(array->IsWeakFixedArray());
CHECK(!array->IsFixedArray());
CHECK_EQ(array->length(), length);
- CHECK(Heap::InNewSpace(*array));
+ CHECK(Heap::InYoungGeneration(*array));
for (int i = 0; i < length; ++i) {
HeapObject heap_object;
@@ -481,7 +482,7 @@ TEST(WeakArrayListBasic) {
isolate, array, MaybeObjectHandle(Smi::FromInt(7), isolate));
CHECK_EQ(array->length(), 8);
- CHECK(Heap::InNewSpace(*array));
+ CHECK(Heap::InYoungGeneration(*array));
CHECK_EQ(array->Get(0), HeapObjectReference::Weak(*index0));
CHECK_EQ(array->Get(1).ToSmi().value(), 1);