summaryrefslogtreecommitdiff
path: root/deps/v8/test/cctest/test-heap-profiler.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/cctest/test-heap-profiler.cc')
-rw-r--r--deps/v8/test/cctest/test-heap-profiler.cc57
1 files changed, 37 insertions, 20 deletions
diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc
index b0a2e00202..17893b3b48 100644
--- a/deps/v8/test/cctest/test-heap-profiler.cc
+++ b/deps/v8/test/cctest/test-heap-profiler.cc
@@ -62,12 +62,8 @@ class NamedEntriesDetector {
if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
}
- static bool AddressesMatch(void* key1, void* key2) {
- return key1 == key2;
- }
-
void CheckAllReachables(i::HeapEntry* root) {
- v8::base::HashMap visited(AddressesMatch);
+ v8::base::HashMap visited;
i::List<i::HeapEntry*> list(10);
list.Add(root);
CheckEntry(root);
@@ -135,17 +131,12 @@ static bool HasString(const v8::HeapGraphNode* node, const char* contents) {
}
-static bool AddressesMatch(void* key1, void* key2) {
- return key1 == key2;
-}
-
-
// Check that snapshot has no unretained entries except root.
static bool ValidateSnapshot(const v8::HeapSnapshot* snapshot, int depth = 3) {
i::HeapSnapshot* heap_snapshot = const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot));
- v8::base::HashMap visited(AddressesMatch);
+ v8::base::HashMap visited;
i::List<i::HeapGraphEdge>& edges = heap_snapshot->edges();
for (int i = 0; i < edges.length(); ++i) {
v8::base::HashMap::Entry* entry = visited.LookupOrInsert(
@@ -501,7 +492,7 @@ void CheckSimdSnapshot(const char* program, const char* var_name) {
// 28 @ 13523 entry with no retainer: /hidden/ system / AllocationSite
// 44 @ 767 $map: /hidden/ system / Map
// 44 @ 59 $map: /hidden/ system / Map
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot));
@@ -738,7 +729,7 @@ TEST(HeapSnapshotAddressReuse) {
CompileRun(
"for (var i = 0; i < 10000; ++i)\n"
" a[i] = new A();\n");
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@@ -780,7 +771,7 @@ TEST(HeapEntryIdsAndArrayShift) {
"for (var i = 0; i < 1; ++i)\n"
" a.shift();\n");
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@@ -821,7 +812,7 @@ TEST(HeapEntryIdsAndGC) {
const v8::HeapSnapshot* snapshot1 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot1));
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@@ -1150,7 +1141,7 @@ TEST(HeapSnapshotObjectsStats) {
// We have to call GC 6 times. In other case the garbage will be
// the reason of flakiness.
for (int i = 0; i < 6; ++i) {
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
}
v8::SnapshotObjectId initial_id;
@@ -1305,7 +1296,7 @@ TEST(HeapObjectIds) {
}
heap_profiler->StopTrackingHeapObjects();
- CcTest::heap()->CollectAllAvailableGarbage();
+ CcTest::CollectAllAvailableGarbage();
for (int i = 0; i < kLength; i++) {
v8::SnapshotObjectId id = heap_profiler->GetObjectId(objects[i]);
@@ -2557,8 +2548,34 @@ TEST(ArrayGrowLeftTrim) {
heap_profiler->StopTrackingHeapObjects();
}
+TEST(TrackHeapAllocationsWithInlining) {
+ v8::HandleScope scope(v8::Isolate::GetCurrent());
+ LocalContext env;
+
+ v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
+ heap_profiler->StartTrackingHeapObjects(true);
+
+ CompileRun(record_trace_tree_source);
+
+ AllocationTracker* tracker =
+ reinterpret_cast<i::HeapProfiler*>(heap_profiler)->allocation_tracker();
+ CHECK(tracker);
+ // Resolve all function locations.
+ tracker->PrepareForSerialization();
+ // Print for better diagnostics in case of failure.
+ tracker->trace_tree()->Print(tracker);
+
+ const char* names[] = {"", "start", "f_0_0"};
+ AllocationTraceNode* node = FindNode(tracker, ArrayVector(names));
+ CHECK(node);
+ CHECK_GE(node->allocation_count(), 12u);
+ CHECK_GE(node->allocation_size(), 4 * node->allocation_count());
+ heap_profiler->StopTrackingHeapObjects();
+}
-TEST(TrackHeapAllocations) {
+TEST(TrackHeapAllocationsWithoutInlining) {
+ i::FLAG_turbo_inlining = false;
+ i::FLAG_max_inlined_source_size = 0; // Disable inlining
v8::HandleScope scope(v8::Isolate::GetCurrent());
LocalContext env;
@@ -3033,7 +3050,7 @@ TEST(SamplingHeapProfiler) {
" eval(\"new Array(100)\");\n"
"}\n");
- CcTest::heap()->CollectAllGarbage();
+ CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
std::unique_ptr<v8::AllocationProfile> profile(
heap_profiler->GetAllocationProfile());
@@ -3087,7 +3104,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
" a.shift();\n"
"}\n");
- CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
+ CcTest::CollectGarbage(v8::internal::NEW_SPACE);
// Should not crash.
heap_profiler->StopSamplingHeapProfiler();