diff options
Diffstat (limited to 'deps/v8/test/cctest/heap/test-compaction.cc')
-rw-r--r-- | deps/v8/test/cctest/heap/test-compaction.cc | 79 |
1 files changed, 45 insertions, 34 deletions
diff --git a/deps/v8/test/cctest/heap/test-compaction.cc b/deps/v8/test/cctest/heap/test-compaction.cc index fec41c738a..9fb989482c 100644 --- a/deps/v8/test/cctest/heap/test-compaction.cc +++ b/deps/v8/test/cctest/heap/test-compaction.cc @@ -32,8 +32,8 @@ void CheckInvariantsOfAbortedPage(Page* page) { void CheckAllObjectsOnPage(std::vector<Handle<FixedArray>>& handles, Page* page) { - for (auto& fixed_array : handles) { - CHECK(Page::FromAddress(fixed_array->address()) == page); + for (Handle<FixedArray> fixed_array : handles) { + CHECK(Page::FromHeapObject(*fixed_array) == page); } } @@ -59,10 +59,12 @@ HEAP_TEST(CompactionFullAbortedPage) { { HandleScope scope2(isolate); CHECK(heap->old_space()->Expand()); - auto compaction_page_handles = - heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED); + auto compaction_page_handles = heap::CreatePadding( + heap, + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()), + TENURED); Page* to_be_aborted_page = - Page::FromAddress(compaction_page_handles.front()->address()); + Page::FromHeapObject(*compaction_page_handles.front()); to_be_aborted_page->SetFlag( MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page); @@ -74,7 +76,7 @@ HEAP_TEST(CompactionFullAbortedPage) { // Check that all handles still point to the same page, i.e., compaction // has been aborted on the page. for (Handle<FixedArray> object : compaction_page_handles) { - CHECK_EQ(to_be_aborted_page, Page::FromAddress(object->address())); + CHECK_EQ(to_be_aborted_page, Page::FromHeapObject(*object)); } CheckInvariantsOfAbortedPage(to_be_aborted_page); } @@ -93,7 +95,9 @@ HEAP_TEST(CompactionPartiallyAbortedPage) { FLAG_manual_evacuation_candidates_selection = true; const int objects_per_page = 10; - const int object_size = Page::kAllocatableMemory / objects_per_page; + const int object_size = + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) / + objects_per_page; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); @@ -109,9 +113,11 @@ HEAP_TEST(CompactionPartiallyAbortedPage) { // properly adjusted). CHECK(heap->old_space()->Expand()); auto compaction_page_handles = heap::CreatePadding( - heap, Page::kAllocatableMemory, TENURED, object_size); + heap, + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()), + TENURED, object_size); Page* to_be_aborted_page = - Page::FromAddress(compaction_page_handles.front()->address()); + Page::FromHeapObject(*compaction_page_handles.front()); to_be_aborted_page->SetFlag( MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page); @@ -137,12 +143,12 @@ HEAP_TEST(CompactionPartiallyAbortedPage) { // Once compaction has been aborted, all following objects still have // to be on the initial page. CHECK(!migration_aborted || - (Page::FromAddress(object->address()) == to_be_aborted_page)); - if (Page::FromAddress(object->address()) == to_be_aborted_page) { + (Page::FromHeapObject(*object) == to_be_aborted_page)); + if (Page::FromHeapObject(*object) == to_be_aborted_page) { // This object has not been migrated. migration_aborted = true; } else { - CHECK_EQ(Page::FromAddress(object->address()), page_to_fill); + CHECK_EQ(Page::FromHeapObject(*object), page_to_fill); } } // Check that we actually created a scenario with a partially aborted @@ -168,7 +174,9 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { FLAG_manual_evacuation_candidates_selection = true; const int objects_per_page = 10; - const int object_size = Page::kAllocatableMemory / objects_per_page; + const int object_size = + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) / + objects_per_page; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); @@ -187,10 +195,13 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { // properly adjusted). CHECK(heap->old_space()->Expand()); std::vector<Handle<FixedArray>> compaction_page_handles = - heap::CreatePadding(heap, Page::kAllocatableMemory, TENURED, - object_size); + heap::CreatePadding( + heap, + static_cast<int>( + MemoryChunkLayout::AllocatableMemoryInDataPage()), + TENURED, object_size); to_be_aborted_page = - Page::FromAddress(compaction_page_handles.front()->address()); + Page::FromHeapObject(*compaction_page_handles.front()); to_be_aborted_page->SetFlag( MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); for (size_t i = compaction_page_handles.size() - 1; i > 0; i--) { @@ -208,8 +219,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { int used_memory = object_size * num_objects; std::vector<Handle<FixedArray>> page_to_fill_handles = heap::CreatePadding(heap, used_memory, TENURED, object_size); - Page* page_to_fill = - Page::FromAddress(page_to_fill_handles.front()->address()); + Page* page_to_fill = Page::FromHeapObject(*page_to_fill_handles.front()); heap->set_force_oom(true); CcTest::CollectAllGarbage(); @@ -223,13 +233,12 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { current = Handle<FixedArray>(FixedArray::cast(current->get(0)), isolate); CHECK(current->IsFixedArray()); - if (Page::FromAddress(current->address()) != to_be_aborted_page) { + if (Page::FromHeapObject(*current) != to_be_aborted_page) { in_place = false; } bool on_aborted_page = - Page::FromAddress(current->address()) == to_be_aborted_page; - bool on_fill_page = - Page::FromAddress(current->address()) == page_to_fill; + Page::FromHeapObject(*current) == to_be_aborted_page; + bool on_fill_page = Page::FromHeapObject(*current) == page_to_fill; CHECK((in_place && on_aborted_page) || (!in_place && on_fill_page)); } // Check that we at least migrated one object, as otherwise the test would @@ -257,7 +266,9 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { FLAG_manual_evacuation_candidates_selection = true; const int objects_per_page = 10; - const int object_size = Page::kAllocatableMemory / objects_per_page; + const int object_size = + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()) / + objects_per_page; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); @@ -275,11 +286,13 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { // properly adjusted). CHECK(heap->old_space()->Expand()); auto compaction_page_handles = heap::CreatePadding( - heap, Page::kAllocatableMemory, TENURED, object_size); + heap, + static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()), + TENURED, object_size); // Sanity check that we have enough space for linking up arrays. CHECK_GE(compaction_page_handles.front()->length(), 2); to_be_aborted_page = - Page::FromAddress(compaction_page_handles.front()->address()); + Page::FromHeapObject(*compaction_page_handles.front()); to_be_aborted_page->SetFlag( MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); @@ -303,8 +316,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { int used_memory = object_size * num_objects; std::vector<Handle<FixedArray>> page_to_fill_handles = heap::CreatePadding(heap, used_memory, TENURED, object_size); - Page* page_to_fill = - Page::FromAddress(page_to_fill_handles.front()->address()); + Page* page_to_fill = Page::FromHeapObject(*page_to_fill_handles.front()); heap->set_force_oom(true); CcTest::CollectAllGarbage(); @@ -319,13 +331,12 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { Handle<FixedArray>(FixedArray::cast(current->get(0)), isolate); CHECK(!Heap::InNewSpace(*current)); CHECK(current->IsFixedArray()); - if (Page::FromAddress(current->address()) != to_be_aborted_page) { + if (Page::FromHeapObject(*current) != to_be_aborted_page) { in_place = false; } bool on_aborted_page = - Page::FromAddress(current->address()) == to_be_aborted_page; - bool on_fill_page = - Page::FromAddress(current->address()) == page_to_fill; + Page::FromHeapObject(*current) == to_be_aborted_page; + bool on_fill_page = Page::FromHeapObject(*current) == page_to_fill; CHECK((in_place && on_aborted_page) || (!in_place && on_fill_page)); } // Check that we at least migrated one object, as otherwise the test would @@ -338,10 +349,10 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { isolate->factory()->NewFixedArray(10, NOT_TENURED); // Create a broken address that looks like a tagged pointer to a new space // object. - Address broken_address = holder->address() + 2 * kPointerSize + 1; + Address broken_address = holder->address() + 2 * kTaggedSize + 1; // Convert it to a vector to create a string from it. Vector<const uint8_t> string_to_broken_addresss( - reinterpret_cast<const uint8_t*>(&broken_address), kPointerSize); + reinterpret_cast<const uint8_t*>(&broken_address), kTaggedSize); Handle<String> string; do { @@ -353,7 +364,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) { string = isolate->factory() ->NewStringFromOneByte(string_to_broken_addresss, TENURED) .ToHandleChecked(); - } while (Page::FromAddress(string->address()) != to_be_aborted_page); + } while (Page::FromHeapObject(*string) != to_be_aborted_page); // If store buffer entries are not properly filtered/reset for aborted // pages we have now a broken address at an object slot in old space and |