summaryrefslogtreecommitdiff
path: root/deps/v8/src/profiler
diff options
context:
space:
mode:
authorMyles Borins <mylesborins@google.com>2018-04-10 21:39:51 -0400
committerMyles Borins <mylesborins@google.com>2018-04-11 13:22:42 -0400
commit12a1b9b8049462e47181a298120243dc83e81c55 (patch)
tree8605276308c8b4e3597516961266bae1af57557a /deps/v8/src/profiler
parent78cd8263354705b767ef8c6a651740efe4931ba0 (diff)
downloadandroid-node-v8-12a1b9b8049462e47181a298120243dc83e81c55.tar.gz
android-node-v8-12a1b9b8049462e47181a298120243dc83e81c55.tar.bz2
android-node-v8-12a1b9b8049462e47181a298120243dc83e81c55.zip
deps: update V8 to 6.6.346.23
PR-URL: https://github.com/nodejs/node/pull/19201 Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com> Reviewed-By: Myles Borins <myles.borins@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Diffstat (limited to 'deps/v8/src/profiler')
-rw-r--r--deps/v8/src/profiler/allocation-tracker.cc1
-rw-r--r--deps/v8/src/profiler/allocation-tracker.h2
-rw-r--r--deps/v8/src/profiler/cpu-profiler.cc3
-rw-r--r--deps/v8/src/profiler/heap-profiler.cc36
-rw-r--r--deps/v8/src/profiler/heap-profiler.h21
-rw-r--r--deps/v8/src/profiler/heap-snapshot-generator.cc443
-rw-r--r--deps/v8/src/profiler/heap-snapshot-generator.h23
-rw-r--r--deps/v8/src/profiler/profile-generator-inl.h5
-rw-r--r--deps/v8/src/profiler/profile-generator.cc74
-rw-r--r--deps/v8/src/profiler/profile-generator.h25
-rw-r--r--deps/v8/src/profiler/profiler-listener.cc51
-rw-r--r--deps/v8/src/profiler/profiler-listener.h8
-rw-r--r--deps/v8/src/profiler/sampling-heap-profiler.cc18
-rw-r--r--deps/v8/src/profiler/sampling-heap-profiler.h2
-rw-r--r--deps/v8/src/profiler/strings-storage.cc2
-rw-r--r--deps/v8/src/profiler/strings-storage.h2
-rw-r--r--deps/v8/src/profiler/tick-sample.cc7
-rw-r--r--deps/v8/src/profiler/tracing-cpu-profiler.h6
-rw-r--r--deps/v8/src/profiler/unbound-queue.h6
19 files changed, 453 insertions, 282 deletions
diff --git a/deps/v8/src/profiler/allocation-tracker.cc b/deps/v8/src/profiler/allocation-tracker.cc
index 8ee4527234..b2b9392319 100644
--- a/deps/v8/src/profiler/allocation-tracker.cc
+++ b/deps/v8/src/profiler/allocation-tracker.cc
@@ -5,6 +5,7 @@
#include "src/profiler/allocation-tracker.h"
#include "src/frames-inl.h"
+#include "src/global-handles.h"
#include "src/objects-inl.h"
#include "src/profiler/heap-snapshot-generator-inl.h"
diff --git a/deps/v8/src/profiler/allocation-tracker.h b/deps/v8/src/profiler/allocation-tracker.h
index a84fd4a8fd..cd9e120db2 100644
--- a/deps/v8/src/profiler/allocation-tracker.h
+++ b/deps/v8/src/profiler/allocation-tracker.h
@@ -154,4 +154,4 @@ class AllocationTracker {
} // namespace internal
} // namespace v8
-#endif // V8_ALLOCATION_TRACKER_H_
+#endif // V8_PROFILER_ALLOCATION_TRACKER_H_
diff --git a/deps/v8/src/profiler/cpu-profiler.cc b/deps/v8/src/profiler/cpu-profiler.cc
index a915ebd511..841ce6000f 100644
--- a/deps/v8/src/profiler/cpu-profiler.cc
+++ b/deps/v8/src/profiler/cpu-profiler.cc
@@ -208,7 +208,7 @@ int CpuProfiler::GetProfilesCount() {
CpuProfile* CpuProfiler::GetProfile(int index) {
- return profiles_->profiles()->at(index);
+ return profiles_->profiles()->at(index).get();
}
@@ -220,7 +220,6 @@ void CpuProfiler::DeleteAllProfiles() {
void CpuProfiler::DeleteProfile(CpuProfile* profile) {
profiles_->RemoveProfile(profile);
- delete profile;
if (profiles_->profiles()->empty() && !is_profiling_) {
// If this was the last profile, clean up all accessory data as well.
ResetProfiles();
diff --git a/deps/v8/src/profiler/heap-profiler.cc b/deps/v8/src/profiler/heap-profiler.cc
index 8f0afdc771..9dbe3ff5bd 100644
--- a/deps/v8/src/profiler/heap-profiler.cc
+++ b/deps/v8/src/profiler/heap-profiler.cc
@@ -17,28 +17,22 @@ namespace internal {
HeapProfiler::HeapProfiler(Heap* heap)
: ids_(new HeapObjectsMap(heap)),
names_(new StringsStorage(heap)),
- is_tracking_object_moves_(false),
- get_retainer_infos_callback_(nullptr) {}
-
-static void DeleteHeapSnapshot(HeapSnapshot* snapshot_ptr) {
- delete snapshot_ptr;
-}
-
-
-HeapProfiler::~HeapProfiler() {
- std::for_each(snapshots_.begin(), snapshots_.end(), &DeleteHeapSnapshot);
-}
+ is_tracking_object_moves_(false) {}
+HeapProfiler::~HeapProfiler() = default;
void HeapProfiler::DeleteAllSnapshots() {
- std::for_each(snapshots_.begin(), snapshots_.end(), &DeleteHeapSnapshot);
snapshots_.clear();
names_.reset(new StringsStorage(heap()));
}
void HeapProfiler::RemoveSnapshot(HeapSnapshot* snapshot) {
- snapshots_.erase(std::find(snapshots_.begin(), snapshots_.end(), snapshot));
+ snapshots_.erase(
+ std::find_if(snapshots_.begin(), snapshots_.end(),
+ [&](const std::unique_ptr<HeapSnapshot>& entry) {
+ return entry.get() == snapshot;
+ }));
}
@@ -75,6 +69,18 @@ v8::HeapProfiler::RetainerInfos HeapProfiler::GetRetainerInfos(
return infos;
}
+void HeapProfiler::SetBuildEmbedderGraphCallback(
+ v8::HeapProfiler::BuildEmbedderGraphCallback callback) {
+ build_embedder_graph_callback_ = callback;
+}
+
+void HeapProfiler::BuildEmbedderGraph(Isolate* isolate,
+ v8::EmbedderGraph* graph) {
+ if (build_embedder_graph_callback_ != nullptr)
+ build_embedder_graph_callback_(reinterpret_cast<v8::Isolate*>(isolate),
+ graph);
+}
+
HeapSnapshot* HeapProfiler::TakeSnapshot(
v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver) {
@@ -85,7 +91,7 @@ HeapSnapshot* HeapProfiler::TakeSnapshot(
delete result;
result = nullptr;
} else {
- snapshots_.push_back(result);
+ snapshots_.emplace_back(result);
}
}
ids_->RemoveDeadEntries();
@@ -153,7 +159,7 @@ int HeapProfiler::GetSnapshotsCount() {
}
HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
- return snapshots_.at(index);
+ return snapshots_.at(index).get();
}
SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
diff --git a/deps/v8/src/profiler/heap-profiler.h b/deps/v8/src/profiler/heap-profiler.h
index da6814ddcb..d37a882805 100644
--- a/deps/v8/src/profiler/heap-profiler.h
+++ b/deps/v8/src/profiler/heap-profiler.h
@@ -8,7 +8,11 @@
#include <memory>
#include <vector>
-#include "src/isolate.h"
+#include "include/v8-profiler.h"
+#include "src/base/platform/mutex.h"
+#include "src/debug/debug-interface.h"
+#include "src/globals.h"
+#include "src/heap/heap.h"
namespace v8 {
namespace internal {
@@ -65,9 +69,15 @@ class HeapProfiler {
void SetGetRetainerInfosCallback(
v8::HeapProfiler::GetRetainerInfosCallback callback);
-
v8::HeapProfiler::RetainerInfos GetRetainerInfos(Isolate* isolate);
+ void SetBuildEmbedderGraphCallback(
+ v8::HeapProfiler::BuildEmbedderGraphCallback callback);
+ void BuildEmbedderGraph(Isolate* isolate, v8::EmbedderGraph* graph);
+ bool HasBuildEmbedderGraphCallback() {
+ return build_embedder_graph_callback_ != nullptr;
+ }
+
bool is_tracking_object_moves() const { return is_tracking_object_moves_; }
bool is_tracking_allocations() const { return !!allocation_tracker_; }
@@ -85,14 +95,17 @@ class HeapProfiler {
// Mapping from HeapObject addresses to objects' uids.
std::unique_ptr<HeapObjectsMap> ids_;
- std::vector<HeapSnapshot*> snapshots_;
+ std::vector<std::unique_ptr<HeapSnapshot>> snapshots_;
std::unique_ptr<StringsStorage> names_;
std::vector<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_;
std::unique_ptr<AllocationTracker> allocation_tracker_;
bool is_tracking_object_moves_;
base::Mutex profiler_mutex_;
std::unique_ptr<SamplingHeapProfiler> sampling_heap_profiler_;
- v8::HeapProfiler::GetRetainerInfosCallback get_retainer_infos_callback_;
+ v8::HeapProfiler::GetRetainerInfosCallback get_retainer_infos_callback_ =
+ nullptr;
+ v8::HeapProfiler::BuildEmbedderGraphCallback build_embedder_graph_callback_ =
+ nullptr;
DISALLOW_COPY_AND_ASSIGN(HeapProfiler);
};
diff --git a/deps/v8/src/profiler/heap-snapshot-generator.cc b/deps/v8/src/profiler/heap-snapshot-generator.cc
index 40779d9e5f..b1e033c5f5 100644
--- a/deps/v8/src/profiler/heap-snapshot-generator.cc
+++ b/deps/v8/src/profiler/heap-snapshot-generator.cc
@@ -10,6 +10,7 @@
#include "src/code-stubs.h"
#include "src/conversions.h"
#include "src/debug/debug.h"
+#include "src/global-handles.h"
#include "src/layout-descriptor.h"
#include "src/objects-body-descriptors.h"
#include "src/objects-inl.h"
@@ -176,7 +177,7 @@ HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
((kPointerSize == 8) && (sizeof(HeapGraphEdge) == 24)));
STATIC_ASSERT(((kPointerSize == 4) && (sizeof(HeapEntry) == 28)) ||
((kPointerSize == 8) && (sizeof(HeapEntry) == 40)));
- for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
+ for (int i = 0; i < static_cast<int>(Root::kNumberOfRoots); ++i) {
gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
}
}
@@ -184,7 +185,6 @@ HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
void HeapSnapshot::Delete() {
profiler_->RemoveSnapshot(this);
- delete this;
}
@@ -197,8 +197,8 @@ void HeapSnapshot::AddSyntheticRootEntries() {
AddRootEntry();
AddGcRootsEntry();
SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
- for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
- AddGcSubrootEntry(tag, id);
+ for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
+ AddGcSubrootEntry(static_cast<Root>(root), id);
id += HeapObjectsMap::kObjectIdStep;
}
DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id);
@@ -230,13 +230,11 @@ HeapEntry* HeapSnapshot::AddGcRootsEntry() {
return entry;
}
-
-HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
- DCHECK_EQ(gc_subroot_indexes_[tag], HeapEntry::kNoEntry);
- DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
- HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
- VisitorSynchronization::kTagNames[tag], id, 0, 0);
- gc_subroot_indexes_[tag] = entry->index();
+HeapEntry* HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) {
+ DCHECK_EQ(gc_subroot_indexes_[static_cast<int>(root)], HeapEntry::kNoEntry);
+ HeapEntry* entry =
+ AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0);
+ gc_subroot_indexes_[static_cast<int>(root)] = entry->index();
return entry;
}
@@ -307,7 +305,7 @@ const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
HeapObjectsMap::kGcRootsFirstSubrootId +
- VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
+ static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep;
HeapObjectsMap::HeapObjectsMap(Heap* heap)
: next_id_(kFirstAvailableObjectId), heap_(heap) {
@@ -733,15 +731,15 @@ class SnapshotFiller {
HeapEntry* parent_entry = &snapshot_->entries()[parent];
parent_entry->SetNamedReference(type, reference_name, child_entry);
}
- void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
- int parent,
+ void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent,
+ const char* description,
HeapEntry* child_entry) {
HeapEntry* parent_entry = &snapshot_->entries()[parent];
int index = parent_entry->children_count() + 1;
- parent_entry->SetNamedReference(
- type,
- names_->GetName(index),
- child_entry);
+ const char* name = description
+ ? names_->GetFormatted("%d / %s", index, description)
+ : names_->GetName(index);
+ parent_entry->SetNamedReference(type, name, child_entry);
}
private:
@@ -857,6 +855,8 @@ bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
ExtractCodeReferences(entry, Code::cast(obj));
} else if (obj->IsCell()) {
ExtractCellReferences(entry, Cell::cast(obj));
+ } else if (obj->IsFeedbackCell()) {
+ ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj));
} else if (obj->IsWeakCell()) {
ExtractWeakCellReferences(entry, WeakCell::cast(obj));
} else if (obj->IsPropertyCell()) {
@@ -929,11 +929,10 @@ void V8HeapExplorer::ExtractJSObjectReferences(
}
}
SharedFunctionInfo* shared_info = js_fun->shared();
- TagObject(js_fun->feedback_vector_cell(),
- "(function feedback vector cell)");
- SetInternalReference(js_fun, entry, "feedback_vector_cell",
- js_fun->feedback_vector_cell(),
- JSFunction::kFeedbackVectorOffset);
+ TagObject(js_fun->feedback_cell(), "(function feedback cell)");
+ SetInternalReference(js_fun, entry, "feedback_cell",
+ js_fun->feedback_cell(),
+ JSFunction::kFeedbackCellOffset);
TagObject(shared_info, "(shared function info)");
SetInternalReference(js_fun, entry,
"shared", shared_info,
@@ -1148,9 +1147,6 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
"scope_info", shared->scope_info(),
SharedFunctionInfo::kScopeInfoOffset);
SetInternalReference(obj, entry,
- "instance_class_name", shared->instance_class_name(),
- SharedFunctionInfo::kInstanceClassNameOffset);
- SetInternalReference(obj, entry,
"script", shared->script(),
SharedFunctionInfo::kScriptOffset);
const char* construct_stub_name = name ?
@@ -1234,9 +1230,6 @@ void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
SetInternalReference(code, entry,
"relocation_info", code->relocation_info(),
Code::kRelocationInfoOffset);
- SetInternalReference(code, entry,
- "handler_table", code->handler_table(),
- Code::kHandlerTableOffset);
TagObject(code->deoptimization_data(), "(code deopt data)");
SetInternalReference(code, entry,
"deoptimization_data", code->deoptimization_data(),
@@ -1251,6 +1244,13 @@ void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
}
+void V8HeapExplorer::ExtractFeedbackCellReferences(
+ int entry, FeedbackCell* feedback_cell) {
+ TagObject(feedback_cell, "(feedback cell)");
+ SetInternalReference(feedback_cell, entry, "value", feedback_cell->value(),
+ FeedbackCell::kValueOffset);
+}
+
void V8HeapExplorer::ExtractWeakCellReferences(int entry, WeakCell* weak_cell) {
TagObject(weak_cell, "(weak cell)");
SetWeakReference(weak_cell, entry, "value", weak_cell->value(),
@@ -1312,23 +1312,9 @@ void V8HeapExplorer::ExtractJSArrayBufferReferences(
}
void V8HeapExplorer::ExtractJSPromiseReferences(int entry, JSPromise* promise) {
- SetInternalReference(promise, entry, "result", promise->result(),
- JSPromise::kResultOffset);
- SetInternalReference(promise, entry, "deferred_promise",
- promise->deferred_promise(),
- JSPromise::kDeferredPromiseOffset);
- SetInternalReference(promise, entry, "deferred_on_resolve",
- promise->deferred_on_resolve(),
- JSPromise::kDeferredOnResolveOffset);
- SetInternalReference(promise, entry, "deferred_on_reject",
- promise->deferred_on_reject(),
- JSPromise::kDeferredOnRejectOffset);
- SetInternalReference(promise, entry, "fulfill_reactions",
- promise->fulfill_reactions(),
- JSPromise::kFulfillReactionsOffset);
- SetInternalReference(promise, entry, "reject_reactions",
- promise->reject_reactions(),
- JSPromise::kRejectReactionsOffset);
+ SetInternalReference(promise, entry, "reactions_or_result",
+ promise->reactions_or_result(),
+ JSPromise::kReactionsOrResultOffset);
}
void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
@@ -1347,12 +1333,20 @@ void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
int key_index =
ObjectHashTable::EntryToIndex(i) + ObjectHashTable::kEntryKeyIndex;
int value_index = ObjectHashTable::EntryToValueIndex(i);
- SetWeakReference(table, entry, key_index, table->get(key_index),
+ Object* key = table->get(key_index);
+ Object* value = table->get(value_index);
+ SetWeakReference(table, entry, key_index, key,
table->OffsetOfElementAt(key_index));
- SetInternalReference(table, entry, value_index, table->get(value_index),
+ SetInternalReference(table, entry, value_index, value,
table->OffsetOfElementAt(value_index));
- // TODO(alph): Add a strong link (shortcut?) from key to value per
- // WeakMap the key was added to. See crbug.com/778739
+ HeapEntry* key_entry = GetEntry(key);
+ int key_entry_index = key_entry->index();
+ HeapEntry* value_entry = GetEntry(value);
+ if (key_entry && value_entry) {
+ filler_->SetNamedAutoIndexReference(HeapGraphEdge::kInternal,
+ key_entry_index, "WeakMap",
+ value_entry);
+ }
}
break;
}
@@ -1498,73 +1492,30 @@ HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
}
class RootsReferencesExtractor : public RootVisitor {
- private:
- struct IndexTag {
- IndexTag(size_t index, VisitorSynchronization::SyncTag tag)
- : index(index), tag(tag) {}
- size_t index;
- VisitorSynchronization::SyncTag tag;
- };
-
public:
- explicit RootsReferencesExtractor(Heap* heap)
- : collecting_all_references_(false),
- previous_reference_count_(0),
- heap_(heap) {
- }
+ explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
+ : explorer_(explorer), visiting_weak_roots_(false) {}
- void VisitRootPointers(Root root, Object** start, Object** end) override {
- if (collecting_all_references_) {
- for (Object** p = start; p < end; p++) all_references_.push_back(*p);
- } else {
- for (Object** p = start; p < end; p++) strong_references_.push_back(*p);
- }
- }
+ void SetVisitingWeakRoots() { visiting_weak_roots_ = true; }
- void SetCollectingAllReferences() { collecting_all_references_ = true; }
-
- void FillReferences(V8HeapExplorer* explorer) {
- DCHECK_LE(strong_references_.size(), all_references_.size());
- Builtins* builtins = heap_->isolate()->builtins();
- USE(builtins);
- size_t strong_index = 0, all_index = 0, tags_index = 0;
- int builtin_index = 0;
- while (all_index < all_references_.size()) {
- bool is_strong =
- strong_index < strong_references_.size() &&
- strong_references_[strong_index] == all_references_[all_index];
- explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
- !is_strong,
- all_references_[all_index]);
- if (reference_tags_[tags_index].tag ==
- VisitorSynchronization::kBuiltins) {
- DCHECK(all_references_[all_index]->IsCode());
- explorer->TagBuiltinCodeObject(
- Code::cast(all_references_[all_index]),
- builtins->name(builtin_index++));
- }
- ++all_index;
- if (is_strong) ++strong_index;
- if (reference_tags_[tags_index].index == all_index) ++tags_index;
+ void VisitRootPointer(Root root, const char* description,
+ Object** object) override {
+ if (root == Root::kBuiltins) {
+ explorer_->TagBuiltinCodeObject(Code::cast(*object), description);
}
- CHECK_EQ(strong_index, strong_references_.size());
+ explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
+ *object);
}
- void Synchronize(VisitorSynchronization::SyncTag tag) override {
- if (collecting_all_references_ &&
- previous_reference_count_ != all_references_.size()) {
- previous_reference_count_ = all_references_.size();
- reference_tags_.emplace_back(previous_reference_count_, tag);
- }
+ void VisitRootPointers(Root root, const char* description, Object** start,
+ Object** end) override {
+ for (Object** p = start; p < end; p++)
+ VisitRootPointer(root, description, p);
}
private:
- bool collecting_all_references_;
- std::vector<Object*> strong_references_;
- std::vector<Object*> all_references_;
- size_t previous_reference_count_;
- std::vector<IndexTag> reference_tags_;
- Heap* heap_;
+ V8HeapExplorer* explorer_;
+ bool visiting_weak_roots_;
};
@@ -1574,18 +1525,17 @@ bool V8HeapExplorer::IterateAndExtractReferences(
// Create references to the synthetic roots.
SetRootGcRootsReference();
- for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
- SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
+ for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
+ SetGcRootsReference(static_cast<Root>(root));
}
// Make sure builtin code objects get their builtin tags
// first. Otherwise a particular JSFunction object could set
// its custom name to a generic builtin.
- RootsReferencesExtractor extractor(heap_);
+ RootsReferencesExtractor extractor(this);
heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
- extractor.SetCollectingAllReferences();
- heap_->IterateRoots(&extractor, VISIT_ALL);
- extractor.FillReferences(this);
+ extractor.SetVisitingWeakRoots();
+ heap_->IterateWeakGlobalHandles(&extractor);
// We have to do two passes as sometimes FixedArrays are used
// to weakly hold their items, and it's impossible to distinguish
@@ -1846,39 +1796,31 @@ void V8HeapExplorer::SetRootGcRootsReference() {
void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
DCHECK_NOT_NULL(child_entry);
- filler_->SetNamedAutoIndexReference(
- HeapGraphEdge::kShortcut,
- snapshot_->root()->index(),
- child_entry);
+ filler_->SetNamedAutoIndexReference(HeapGraphEdge::kShortcut,
+ snapshot_->root()->index(), nullptr,
+ child_entry);
}
-void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
- filler_->SetIndexedAutoIndexReference(
- HeapGraphEdge::kElement,
- snapshot_->gc_roots()->index(),
- snapshot_->gc_subroot(tag));
+void V8HeapExplorer::SetGcRootsReference(Root root) {
+ filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
+ snapshot_->gc_roots()->index(),
+ snapshot_->gc_subroot(root));
}
-void V8HeapExplorer::SetGcSubrootReference(
- VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
+void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
+ bool is_weak, Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry == nullptr) return;
const char* name = GetStrongGcSubrootName(child_obj);
+ HeapGraphEdge::Type edge_type =
+ is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal;
if (name != nullptr) {
- DCHECK(!is_weak);
- filler_->SetNamedReference(HeapGraphEdge::kInternal,
- snapshot_->gc_subroot(tag)->index(), name,
- child_entry);
+ filler_->SetNamedReference(edge_type, snapshot_->gc_subroot(root)->index(),
+ name, child_entry);
} else {
- if (is_weak) {
- filler_->SetNamedAutoIndexReference(HeapGraphEdge::kWeak,
- snapshot_->gc_subroot(tag)->index(),
- child_entry);
- } else {
- filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
- snapshot_->gc_subroot(tag)->index(),
- child_entry);
- }
+ filler_->SetNamedAutoIndexReference(edge_type,
+ snapshot_->gc_subroot(root)->index(),
+ description, child_entry);
}
// Add a shortcut to JS global object reference at snapshot root.
@@ -1945,7 +1887,8 @@ void V8HeapExplorer::TagFixedArraySubType(const FixedArray* array,
class GlobalObjectsEnumerator : public RootVisitor {
public:
- void VisitRootPointers(Root root, Object** start, Object** end) override {
+ void VisitRootPointers(Root root, const char* description, Object** start,
+ Object** end) override {
for (Object** p = start; p < end; p++) {
if (!(*p)->IsNativeContext()) continue;
JSObject* proxy = Context::cast(*p)->global_proxy();
@@ -1983,6 +1926,57 @@ void V8HeapExplorer::TagGlobalObjects() {
}
}
+class EmbedderGraphImpl : public EmbedderGraph {
+ public:
+ struct Edge {
+ Node* from;
+ Node* to;
+ };
+
+ class V8NodeImpl : public Node {
+ public:
+ explicit V8NodeImpl(Object* object) : object_(object) {}
+ Object* GetObject() { return object_; }
+
+ // Node overrides.
+ bool IsEmbedderNode() override { return false; }
+ const char* Name() override {
+ // The name should be retrieved via GetObject().
+ UNREACHABLE();
+ return "";
+ }
+ size_t SizeInBytes() override {
+ // The size should be retrieved via GetObject().
+ UNREACHABLE();
+ return 0;
+ }
+
+ private:
+ Object* object_;
+ };
+
+ Node* V8Node(const v8::Local<v8::Value>& value) final {
+ Handle<Object> object = v8::Utils::OpenHandle(*value);
+ DCHECK(!object.is_null());
+ return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object)));
+ }
+
+ Node* AddNode(std::unique_ptr<Node> node) final {
+ Node* result = node.get();
+ nodes_.push_back(std::move(node));
+ return result;
+ }
+
+ void AddEdge(Node* from, Node* to) final { edges_.push_back({from, to}); }
+
+ const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; }
+ const std::vector<Edge>& edges() { return edges_; }
+
+ private:
+ std::vector<std::unique_ptr<Node>> nodes_;
+ std::vector<Edge> edges_;
+};
+
class GlobalHandlesExtractor : public PersistentHandleVisitor {
public:
explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
@@ -2034,6 +2028,60 @@ HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
0);
}
+class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator {
+ public:
+ explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot)
+ : snapshot_(snapshot),
+ names_(snapshot_->profiler()->names()),
+ heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
+ virtual HeapEntry* AllocateEntry(HeapThing ptr);
+
+ private:
+ HeapSnapshot* snapshot_;
+ StringsStorage* names_;
+ HeapObjectsMap* heap_object_map_;
+};
+
+namespace {
+
+const char* EmbedderGraphNodeName(StringsStorage* names,
+ EmbedderGraphImpl::Node* node) {
+ const char* prefix = node->NamePrefix();
+ return prefix ? names->GetFormatted("%s %s", prefix, node->Name())
+ : names->GetCopy(node->Name());
+}
+
+HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) {
+ return HeapEntry::kNative;
+}
+
+// Merges the names of an embedder node and its wrapper node.
+// If the wrapper node name contains a tag suffix (part after '/') then the
+// result is the embedder node name concatenated with the tag suffix.
+// Otherwise, the result is the embedder node name.
+const char* MergeNames(StringsStorage* names, const char* embedder_name,
+ const char* wrapper_name) {
+ for (const char* suffix = wrapper_name; *suffix; suffix++) {
+ if (*suffix == '/') {
+ return names->GetFormatted("%s %s", embedder_name, suffix);
+ }
+ }
+ return embedder_name;
+}
+
+} // anonymous namespace
+
+HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) {
+ EmbedderGraphImpl::Node* node =
+ reinterpret_cast<EmbedderGraphImpl::Node*>(ptr);
+ DCHECK(node->IsEmbedderNode());
+ size_t size = node->SizeInBytes();
+ return snapshot_->AddEntry(
+ EmbedderGraphNodeType(node), EmbedderGraphNodeName(names_, node),
+ static_cast<SnapshotObjectId>(reinterpret_cast<uintptr_t>(node) << 1),
+ static_cast<int>(size), 0);
+}
+
NativeObjectsExplorer::NativeObjectsExplorer(
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
: isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
@@ -2042,13 +2090,13 @@ NativeObjectsExplorer::NativeObjectsExplorer(
embedder_queried_(false),
objects_by_info_(RetainedInfosMatch),
native_groups_(StringsMatch),
- filler_(nullptr) {
- synthetic_entries_allocator_ =
- new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
- native_entries_allocator_ =
- new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
-}
-
+ synthetic_entries_allocator_(
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic)),
+ native_entries_allocator_(
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative)),
+ embedder_graph_entries_allocator_(
+ new EmbedderGraphEntriesAllocator(snapshot)),
+ filler_(nullptr) {}
NativeObjectsExplorer::~NativeObjectsExplorer() {
for (base::HashMap::Entry* p = objects_by_info_.Start(); p != nullptr;
@@ -2066,8 +2114,6 @@ NativeObjectsExplorer::~NativeObjectsExplorer() {
reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
info->Dispose();
}
- delete synthetic_entries_allocator_;
- delete native_entries_allocator_;
}
@@ -2114,13 +2160,14 @@ void NativeObjectsExplorer::FillEdges() {
*pair.first->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
HeapObject* parent = HeapObject::cast(*parent_object);
int parent_entry =
- filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
+ filler_->FindOrAddEntry(parent, native_entries_allocator_.get())
+ ->index();
DCHECK_NE(parent_entry, HeapEntry::kNoEntry);
Handle<Object> child_object = v8::Utils::OpenHandle(
*pair.second->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
HeapObject* child = HeapObject::cast(*child_object);
HeapEntry* child_entry =
- filler_->FindOrAddEntry(child, native_entries_allocator_);
+ filler_->FindOrAddEntry(child, native_entries_allocator_.get());
filler_->SetNamedReference(HeapGraphEdge::kInternal, parent_entry, "native",
child_entry);
}
@@ -2139,25 +2186,83 @@ std::vector<HeapObject*>* NativeObjectsExplorer::GetVectorMaybeDisposeInfo(
return reinterpret_cast<std::vector<HeapObject*>*>(entry->value);
}
+HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode(
+ EmbedderGraphImpl::Node* node) {
+ EmbedderGraphImpl::Node* wrapper = node->WrapperNode();
+ if (wrapper) {
+ node = wrapper;
+ }
+ if (node->IsEmbedderNode()) {
+ return filler_->FindOrAddEntry(node,
+ embedder_graph_entries_allocator_.get());
+ } else {
+ EmbedderGraphImpl::V8NodeImpl* v8_node =
+ static_cast<EmbedderGraphImpl::V8NodeImpl*>(node);
+ Object* object = v8_node->GetObject();
+ if (object->IsSmi()) return nullptr;
+ HeapEntry* entry = filler_->FindEntry(HeapObject::cast(object));
+ return entry;
+ }
+}
bool NativeObjectsExplorer::IterateAndExtractReferences(
SnapshotFiller* filler) {
filler_ = filler;
- FillRetainedObjects();
- FillEdges();
- if (EstimateObjectsCount() > 0) {
- for (base::HashMap::Entry* p = objects_by_info_.Start(); p != nullptr;
- p = objects_by_info_.Next(p)) {
- v8::RetainedObjectInfo* info =
- reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
- SetNativeRootReference(info);
- std::vector<HeapObject*>* objects =
- reinterpret_cast<std::vector<HeapObject*>*>(p->value);
- for (HeapObject* object : *objects) {
- SetWrapperNativeReferences(object, info);
+
+ if (FLAG_heap_profiler_use_embedder_graph &&
+ snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
+ v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
+ DisallowHeapAllocation no_allocation;
+ EmbedderGraphImpl graph;
+ snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph);
+ for (const auto& node : graph.nodes()) {
+ if (node->IsRootNode()) {
+ filler_->SetIndexedAutoIndexReference(
+ HeapGraphEdge::kElement, snapshot_->root()->index(),
+ EntryForEmbedderGraphNode(node.get()));
+ }
+ // Adjust the name and the type of the V8 wrapper node.
+ auto wrapper = node->WrapperNode();
+ if (wrapper) {
+ HeapEntry* wrapper_entry = EntryForEmbedderGraphNode(wrapper);
+ wrapper_entry->set_name(
+ MergeNames(names_, EmbedderGraphNodeName(names_, node.get()),
+ wrapper_entry->name()));
+ wrapper_entry->set_type(EmbedderGraphNodeType(node.get()));
+ }
+ }
+ // Fill edges of the graph.
+ for (const auto& edge : graph.edges()) {
+ HeapEntry* from = EntryForEmbedderGraphNode(edge.from);
+ // The |from| and |to| can nullptr if the corrsponding node is a V8 node
+ // pointing to a Smi.
+ if (!from) continue;
+ // Adding an entry for |edge.to| can invalidate the |from| entry because
+ // it is an address in std::vector. Use index instead of pointer.
+ int from_index = from->index();
+ HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
+ if (to) {
+ filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
+ from_index, to);
+ }
+ }
+ } else {
+ FillRetainedObjects();
+ FillEdges();
+ if (EstimateObjectsCount() > 0) {
+ for (base::HashMap::Entry* p = objects_by_info_.Start(); p != nullptr;
+ p = objects_by_info_.Next(p)) {
+ v8::RetainedObjectInfo* info =
+ reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
+ SetNativeRootReference(info);
+ std::vector<HeapObject*>* objects =
+ reinterpret_cast<std::vector<HeapObject*>*>(p->value);
+ for (HeapObject* object : *objects) {
+ SetWrapperNativeReferences(object, info);
+ }
}
+ SetRootNativeRootsReference();
}
- SetRootNativeRootsReference();
}
filler_ = nullptr;
return true;
@@ -2210,19 +2315,17 @@ NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
void NativeObjectsExplorer::SetNativeRootReference(
v8::RetainedObjectInfo* info) {
HeapEntry* child_entry =
- filler_->FindOrAddEntry(info, native_entries_allocator_);
+ filler_->FindOrAddEntry(info, native_entries_allocator_.get());
DCHECK_NOT_NULL(child_entry);
NativeGroupRetainedObjectInfo* group_info =
FindOrAddGroupInfo(info->GetGroupLabel());
HeapEntry* group_entry =
- filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
+ filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_.get());
// |FindOrAddEntry| can move and resize the entries backing store. Reload
// potentially-stale pointer.
child_entry = filler_->FindEntry(info);
filler_->SetNamedAutoIndexReference(
- HeapGraphEdge::kInternal,
- group_entry->index(),
- child_entry);
+ HeapGraphEdge::kInternal, group_entry->index(), nullptr, child_entry);
}
@@ -2231,7 +2334,7 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
DCHECK_NOT_NULL(wrapper_entry);
HeapEntry* info_entry =
- filler_->FindOrAddEntry(info, native_entries_allocator_);
+ filler_->FindOrAddEntry(info, native_entries_allocator_.get());
DCHECK_NOT_NULL(info_entry);
filler_->SetNamedReference(HeapGraphEdge::kInternal,
wrapper_entry->index(),
@@ -2249,7 +2352,7 @@ void NativeObjectsExplorer::SetRootNativeRootsReference() {
NativeGroupRetainedObjectInfo* group_info =
static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
HeapEntry* group_entry =
- filler_->FindOrAddEntry(group_info, native_entries_allocator_);
+ filler_->FindOrAddEntry(group_info, native_entries_allocator_.get());
DCHECK_NOT_NULL(group_entry);
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
diff --git a/deps/v8/src/profiler/heap-snapshot-generator.h b/deps/v8/src/profiler/heap-snapshot-generator.h
index 2dacd5a9fe..5c7d88e0ca 100644
--- a/deps/v8/src/profiler/heap-snapshot-generator.h
+++ b/deps/v8/src/profiler/heap-snapshot-generator.h
@@ -113,6 +113,7 @@ class HeapEntry BASE_EMBEDDED {
HeapSnapshot* snapshot() { return snapshot_; }
Type type() const { return static_cast<Type>(type_); }
+ void set_type(Type type) { type_ = type; }
const char* name() const { return name_; }
void set_name(const char* name) { name_ = name; }
SnapshotObjectId id() const { return id_; }
@@ -165,8 +166,8 @@ class HeapSnapshot {
HeapProfiler* profiler() { return profiler_; }
HeapEntry* root() { return &entries_[root_index_]; }
HeapEntry* gc_roots() { return &entries_[gc_roots_index_]; }
- HeapEntry* gc_subroot(int index) {
- return &entries_[gc_subroot_indexes_[index]];
+ HeapEntry* gc_subroot(Root root) {
+ return &entries_[gc_subroot_indexes_[static_cast<int>(root)]];
}
std::vector<HeapEntry>& entries() { return entries_; }
std::deque<HeapGraphEdge>& edges() { return edges_; }
@@ -191,12 +192,12 @@ class HeapSnapshot {
private:
HeapEntry* AddRootEntry();
HeapEntry* AddGcRootsEntry();
- HeapEntry* AddGcSubrootEntry(int tag, SnapshotObjectId id);
+ HeapEntry* AddGcSubrootEntry(Root root, SnapshotObjectId id);
HeapProfiler* profiler_;
int root_index_;
int gc_roots_index_;
- int gc_subroot_indexes_[VisitorSynchronization::kNumberOfSyncTags];
+ int gc_subroot_indexes_[static_cast<int>(Root::kNumberOfRoots)];
std::vector<HeapEntry> entries_;
std::deque<HeapGraphEdge> edges_;
std::deque<HeapGraphEdge*> children_;
@@ -384,6 +385,7 @@ class V8HeapExplorer : public HeapEntriesAllocator {
void ExtractAccessorPairReferences(int entry, AccessorPair* accessors);
void ExtractCodeReferences(int entry, Code* code);
void ExtractCellReferences(int entry, Cell* cell);
+ void ExtractFeedbackCellReferences(int entry, FeedbackCell* feedback_cell);
void ExtractWeakCellReferences(int entry, WeakCell* weak_cell);
void ExtractPropertyCellReferences(int entry, PropertyCell* cell);
void ExtractAllocationSiteReferences(int entry, AllocationSite* site);
@@ -445,9 +447,9 @@ class V8HeapExplorer : public HeapEntriesAllocator {
void SetUserGlobalReference(Object* user_global);
void SetRootGcRootsReference();
- void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
- void SetGcSubrootReference(
- VisitorSynchronization::SyncTag tag, bool is_weak, Object* child);
+ void SetGcRootsReference(Root root);
+ void SetGcSubrootReference(Root root, const char* description, bool is_weak,
+ Object* child);
const char* GetStrongGcSubrootName(Object* object);
void TagObject(Object* obj, const char* tag);
void TagFixedArraySubType(const FixedArray* array,
@@ -514,6 +516,8 @@ class NativeObjectsExplorer {
NativeGroupRetainedObjectInfo* FindOrAddGroupInfo(const char* label);
+ HeapEntry* EntryForEmbedderGraphNode(EmbedderGraph::Node* node);
+
Isolate* isolate_;
HeapSnapshot* snapshot_;
StringsStorage* names_;
@@ -522,8 +526,9 @@ class NativeObjectsExplorer {
// RetainedObjectInfo* -> std::vector<HeapObject*>*
base::CustomMatcherHashMap objects_by_info_;
base::CustomMatcherHashMap native_groups_;
- HeapEntriesAllocator* synthetic_entries_allocator_;
- HeapEntriesAllocator* native_entries_allocator_;
+ std::unique_ptr<HeapEntriesAllocator> synthetic_entries_allocator_;
+ std::unique_ptr<HeapEntriesAllocator> native_entries_allocator_;
+ std::unique_ptr<HeapEntriesAllocator> embedder_graph_entries_allocator_;
// Used during references extraction.
SnapshotFiller* filler_;
v8::HeapProfiler::RetainerEdges edges_;
diff --git a/deps/v8/src/profiler/profile-generator-inl.h b/deps/v8/src/profiler/profile-generator-inl.h
index 5a7017ad49..970d462937 100644
--- a/deps/v8/src/profiler/profile-generator-inl.h
+++ b/deps/v8/src/profiler/profile-generator-inl.h
@@ -13,7 +13,8 @@ namespace internal {
CodeEntry::CodeEntry(CodeEventListener::LogEventsAndTags tag, const char* name,
const char* name_prefix, const char* resource_name,
int line_number, int column_number,
- JITLineInfoTable* line_info, Address instruction_start)
+ std::unique_ptr<JITLineInfoTable> line_info,
+ Address instruction_start)
: bit_field_(TagField::encode(tag) |
BuiltinIdField::encode(Builtins::builtin_count)),
name_prefix_(name_prefix),
@@ -26,7 +27,7 @@ CodeEntry::CodeEntry(CodeEventListener::LogEventsAndTags tag, const char* name,
bailout_reason_(kEmptyBailoutReason),
deopt_reason_(kNoDeoptReason),
deopt_id_(kNoDeoptimizationId),
- line_info_(line_info),
+ line_info_(std::move(line_info)),
instruction_start_(instruction_start) {}
ProfileNode::ProfileNode(ProfileTree* tree, CodeEntry* entry,
diff --git a/deps/v8/src/profiler/profile-generator.cc b/deps/v8/src/profiler/profile-generator.cc
index bb6ede6d95..9786741b94 100644
--- a/deps/v8/src/profiler/profile-generator.cc
+++ b/deps/v8/src/profiler/profile-generator.cc
@@ -85,16 +85,6 @@ CodeEntry* CodeEntry::UnresolvedEntryCreateTrait::Create() {
CodeEntry::kUnresolvedFunctionName);
}
-CodeEntry::~CodeEntry() {
- delete line_info_;
- for (auto location : inline_locations_) {
- for (auto entry : location.second) {
- delete entry;
- }
- }
-}
-
-
uint32_t CodeEntry::GetHash() const {
uint32_t hash = ComputeIntegerHash(tag());
if (script_id_ != v8::UnboundScript::kNoScriptId) {
@@ -137,12 +127,13 @@ int CodeEntry::GetSourceLine(int pc_offset) const {
return v8::CpuProfileNode::kNoLineNumberInfo;
}
-void CodeEntry::AddInlineStack(int pc_offset,
- std::vector<CodeEntry*> inline_stack) {
+void CodeEntry::AddInlineStack(
+ int pc_offset, std::vector<std::unique_ptr<CodeEntry>> inline_stack) {
inline_locations_.insert(std::make_pair(pc_offset, std::move(inline_stack)));
}
-const std::vector<CodeEntry*>* CodeEntry::GetInlineStack(int pc_offset) const {
+const std::vector<std::unique_ptr<CodeEntry>>* CodeEntry::GetInlineStack(
+ int pc_offset) const {
auto it = inline_locations_.find(pc_offset);
return it != inline_locations_.end() ? &it->second : nullptr;
}
@@ -528,9 +519,9 @@ void CodeMap::MoveCode(Address from, Address to) {
}
void CodeMap::Print() {
- for (auto it = code_map_.begin(); it != code_map_.end(); ++it) {
- base::OS::Print("%p %5d %s\n", static_cast<void*>(it->first),
- it->second.size, it->second.entry->name());
+ for (const auto& pair : code_map_) {
+ base::OS::Print("%p %5d %s\n", static_cast<void*>(pair.first),
+ pair.second.size, pair.second.entry->name());
}
}
@@ -539,12 +530,6 @@ CpuProfilesCollection::CpuProfilesCollection(Isolate* isolate)
profiler_(nullptr),
current_profiles_semaphore_(1) {}
-CpuProfilesCollection::~CpuProfilesCollection() {
- for (CpuProfile* profile : finished_profiles_) delete profile;
- for (CpuProfile* profile : current_profiles_) delete profile;
-}
-
-
bool CpuProfilesCollection::StartProfiling(const char* title,
bool record_samples) {
current_profiles_semaphore_.Wait();
@@ -552,7 +537,7 @@ bool CpuProfilesCollection::StartProfiling(const char* title,
current_profiles_semaphore_.Signal();
return false;
}
- for (CpuProfile* profile : current_profiles_) {
+ for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
if (strcmp(profile->title(), title) == 0) {
// Ignore attempts to start profile with the same title...
current_profiles_semaphore_.Signal();
@@ -560,7 +545,8 @@ bool CpuProfilesCollection::StartProfiling(const char* title,
return true;
}
}
- current_profiles_.push_back(new CpuProfile(profiler_, title, record_samples));
+ current_profiles_.emplace_back(
+ new CpuProfile(profiler_, title, record_samples));
current_profiles_semaphore_.Signal();
return true;
}
@@ -570,19 +556,22 @@ CpuProfile* CpuProfilesCollection::StopProfiling(const char* title) {
const int title_len = StrLength(title);
CpuProfile* profile = nullptr;
current_profiles_semaphore_.Wait();
- for (size_t i = current_profiles_.size(); i != 0; --i) {
- CpuProfile* current_profile = current_profiles_[i - 1];
- if (title_len == 0 || strcmp(current_profile->title(), title) == 0) {
- profile = current_profile;
- current_profiles_.erase(current_profiles_.begin() + i - 1);
- break;
- }
+
+ auto it =
+ std::find_if(current_profiles_.rbegin(), current_profiles_.rend(),
+ [&](const std::unique_ptr<CpuProfile>& p) {
+ return title_len == 0 || strcmp(p->title(), title) == 0;
+ });
+
+ if (it != current_profiles_.rend()) {
+ (*it)->FinishProfile();
+ profile = it->get();
+ finished_profiles_.push_back(std::move(*it));
+ // Convert reverse iterator to matching forward iterator.
+ current_profiles_.erase(--(it.base()));
}
- current_profiles_semaphore_.Signal();
- if (!profile) return nullptr;
- profile->FinishProfile();
- finished_profiles_.push_back(profile);
+ current_profiles_semaphore_.Signal();
return profile;
}
@@ -599,7 +588,10 @@ bool CpuProfilesCollection::IsLastProfile(const char* title) {
void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
// Called from VM thread for a completed profile.
auto pos =
- std::find(finished_profiles_.begin(), finished_profiles_.end(), profile);
+ std::find_if(finished_profiles_.begin(), finished_profiles_.end(),
+ [&](const std::unique_ptr<CpuProfile>& finished_profile) {
+ return finished_profile.get() == profile;
+ });
DCHECK(pos != finished_profiles_.end());
finished_profiles_.erase(pos);
}
@@ -611,7 +603,7 @@ void CpuProfilesCollection::AddPathToCurrentProfiles(
// method, we don't bother minimizing the duration of lock holding,
// e.g. copying contents of the list to a local vector.
current_profiles_semaphore_.Wait();
- for (CpuProfile* profile : current_profiles_) {
+ for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
profile->AddPath(timestamp, path, src_line, update_stats);
}
current_profiles_semaphore_.Signal();
@@ -684,11 +676,13 @@ void ProfileGenerator::RecordTickSample(const TickSample& sample) {
// Find out if the entry has an inlining stack associated.
int pc_offset =
static_cast<int>(stack_pos - entry->instruction_start());
- const std::vector<CodeEntry*>* inline_stack =
+ const std::vector<std::unique_ptr<CodeEntry>>* inline_stack =
entry->GetInlineStack(pc_offset);
if (inline_stack) {
- entries.insert(entries.end(), inline_stack->rbegin(),
- inline_stack->rend());
+ std::transform(
+ inline_stack->rbegin(), inline_stack->rend(),
+ std::back_inserter(entries),
+ [](const std::unique_ptr<CodeEntry>& ptr) { return ptr.get(); });
}
// Skip unresolved frames (e.g. internal frame) and get source line of
// the first JS caller.
diff --git a/deps/v8/src/profiler/profile-generator.h b/deps/v8/src/profiler/profile-generator.h
index 819800ae6b..5abb955a46 100644
--- a/deps/v8/src/profiler/profile-generator.h
+++ b/deps/v8/src/profiler/profile-generator.h
@@ -8,6 +8,7 @@
#include <map>
#include <vector>
+#include "include/v8-profiler.h"
#include "src/allocation.h"
#include "src/base/hashmap.h"
#include "src/log.h"
@@ -47,9 +48,8 @@ class CodeEntry {
const char* resource_name = CodeEntry::kEmptyResourceName,
int line_number = v8::CpuProfileNode::kNoLineNumberInfo,
int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
- JITLineInfoTable* line_info = nullptr,
+ std::unique_ptr<JITLineInfoTable> line_info = nullptr,
Address instruction_start = nullptr);
- ~CodeEntry();
const char* name_prefix() const { return name_prefix_; }
bool has_name_prefix() const { return name_prefix_[0] != '\0'; }
@@ -57,7 +57,7 @@ class CodeEntry {
const char* resource_name() const { return resource_name_; }
int line_number() const { return line_number_; }
int column_number() const { return column_number_; }
- const JITLineInfoTable* line_info() const { return line_info_; }
+ const JITLineInfoTable* line_info() const { return line_info_.get(); }
int script_id() const { return script_id_; }
void set_script_id(int script_id) { script_id_ = script_id; }
int position() const { return position_; }
@@ -91,8 +91,10 @@ class CodeEntry {
int GetSourceLine(int pc_offset) const;
- void AddInlineStack(int pc_offset, std::vector<CodeEntry*> inline_stack);
- const std::vector<CodeEntry*>* GetInlineStack(int pc_offset) const;
+ void AddInlineStack(int pc_offset,
+ std::vector<std::unique_ptr<CodeEntry>> inline_stack);
+ const std::vector<std::unique_ptr<CodeEntry>>* GetInlineStack(
+ int pc_offset) const;
void AddDeoptInlinedFrames(int deopt_id, std::vector<CpuProfileDeoptFrame>);
bool HasDeoptInlinedFramesFor(int deopt_id) const;
@@ -160,10 +162,10 @@ class CodeEntry {
const char* bailout_reason_;
const char* deopt_reason_;
int deopt_id_;
- JITLineInfoTable* line_info_;
+ std::unique_ptr<JITLineInfoTable> line_info_;
Address instruction_start_;
// Should be an unordered_map, but it doesn't currently work on Win & MacOS.
- std::map<int, std::vector<CodeEntry*>> inline_locations_;
+ std::map<int, std::vector<std::unique_ptr<CodeEntry>>> inline_locations_;
std::map<int, std::vector<CpuProfileDeoptFrame>> deopt_inlined_frames_;
DISALLOW_COPY_AND_ASSIGN(CodeEntry);
@@ -337,12 +339,13 @@ class CodeMap {
class CpuProfilesCollection {
public:
explicit CpuProfilesCollection(Isolate* isolate);
- ~CpuProfilesCollection();
void set_cpu_profiler(CpuProfiler* profiler) { profiler_ = profiler; }
bool StartProfiling(const char* title, bool record_samples);
CpuProfile* StopProfiling(const char* title);
- std::vector<CpuProfile*>* profiles() { return &finished_profiles_; }
+ std::vector<std::unique_ptr<CpuProfile>>* profiles() {
+ return &finished_profiles_;
+ }
const char* GetName(Name* name) { return resource_names_.GetName(name); }
bool IsLastProfile(const char* title);
void RemoveProfile(CpuProfile* profile);
@@ -357,11 +360,11 @@ class CpuProfilesCollection {
private:
StringsStorage resource_names_;
- std::vector<CpuProfile*> finished_profiles_;
+ std::vector<std::unique_ptr<CpuProfile>> finished_profiles_;
CpuProfiler* profiler_;
// Accessed by VM thread and profile generator thread.
- std::vector<CpuProfile*> current_profiles_;
+ std::vector<std::unique_ptr<CpuProfile>> current_profiles_;
base::Semaphore current_profiles_semaphore_;
DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
diff --git a/deps/v8/src/profiler/profiler-listener.cc b/deps/v8/src/profiler/profiler-listener.cc
index bd2f158e60..cec71d70e0 100644
--- a/deps/v8/src/profiler/profiler-listener.cc
+++ b/deps/v8/src/profiler/profiler-listener.cc
@@ -5,10 +5,12 @@
#include "src/profiler/profiler-listener.h"
#include "src/deoptimizer.h"
+#include "src/instruction-stream.h"
#include "src/objects-inl.h"
#include "src/profiler/cpu-profiler.h"
#include "src/profiler/profile-generator-inl.h"
#include "src/source-position-table.h"
+#include "src/wasm/wasm-code-manager.h"
namespace v8 {
namespace internal {
@@ -81,10 +83,10 @@ void ProfilerListener::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
rec->start = abstract_code->address();
- JITLineInfoTable* line_table = nullptr;
+ std::unique_ptr<JITLineInfoTable> line_table;
if (shared->script()->IsScript()) {
Script* script = Script::cast(shared->script());
- line_table = new JITLineInfoTable();
+ line_table.reset(new JITLineInfoTable());
int offset = abstract_code->IsCode() ? Code::kHeaderSize
: BytecodeArray::kHeaderSize;
for (SourcePositionTableIterator it(abstract_code->source_position_table());
@@ -101,8 +103,8 @@ void ProfilerListener::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
}
rec->entry = NewCodeEntry(
tag, GetFunctionName(shared->DebugName()), CodeEntry::kEmptyNamePrefix,
- GetName(InferScriptName(script_name, shared)), line, column, line_table,
- abstract_code->instruction_start());
+ GetName(InferScriptName(script_name, shared)), line, column,
+ std::move(line_table), abstract_code->instruction_start());
RecordInliningInfo(rec->entry, abstract_code);
RecordDeoptInlinedFrames(rec->entry, abstract_code);
rec->entry->FillFunctionInfo(shared);
@@ -110,6 +112,24 @@ void ProfilerListener::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
DispatchCodeEvent(evt_rec);
}
+void ProfilerListener::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
+ wasm::WasmCode* code,
+ wasm::WasmName name) {
+ CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
+ CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
+ rec->start = code->instructions().start();
+ // TODO(herhut): Instead of sanitizing here, make sure all wasm functions
+ // have names.
+ const char* name_ptr =
+ name.start() == nullptr ? "<anonymous>" : GetFunctionName(name.start());
+ rec->entry = NewCodeEntry(
+ tag, name_ptr, CodeEntry::kEmptyNamePrefix, CodeEntry::kEmptyResourceName,
+ CpuProfileNode::kNoLineNumberInfo, CpuProfileNode::kNoColumnNumberInfo,
+ nullptr, code->instructions().start());
+ rec->size = code->instructions().length();
+ DispatchCodeEvent(evt_rec);
+}
+
void ProfilerListener::CodeMoveEvent(AbstractCode* from, Address to) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_MOVE);
CodeMoveEventRecord* rec = &evt_rec.CodeMoveEventRecord_;
@@ -164,6 +184,20 @@ void ProfilerListener::RegExpCodeCreateEvent(AbstractCode* code,
DispatchCodeEvent(evt_rec);
}
+void ProfilerListener::InstructionStreamCreateEvent(
+ CodeEventListener::LogEventsAndTags tag, const InstructionStream* stream,
+ const char* description) {
+ CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
+ CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
+ rec->start = stream->bytes();
+ rec->entry = NewCodeEntry(
+ tag, description, CodeEntry::kEmptyNamePrefix,
+ CodeEntry::kEmptyResourceName, CpuProfileNode::kNoLineNumberInfo,
+ CpuProfileNode::kNoColumnNumberInfo, nullptr, stream->bytes());
+ rec->size = static_cast<unsigned>(stream->byte_length());
+ DispatchCodeEvent(evt_rec);
+}
+
void ProfilerListener::SetterCallbackEvent(Name* name, Address entry_point) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_CREATION);
CodeCreateEventRecord* rec = &evt_rec.CodeCreateEventRecord_;
@@ -199,7 +233,7 @@ void ProfilerListener::RecordInliningInfo(CodeEntry* entry,
DCHECK_EQ(Translation::BEGIN, opcode);
it.Skip(Translation::NumberOfOperandsFor(opcode));
int depth = 0;
- std::vector<CodeEntry*> inline_stack;
+ std::vector<std::unique_ptr<CodeEntry>> inline_stack;
while (it.HasNext() &&
Translation::BEGIN !=
(opcode = static_cast<Translation::Opcode>(it.Next()))) {
@@ -227,7 +261,7 @@ void ProfilerListener::RecordInliningInfo(CodeEntry* entry,
CpuProfileNode::kNoColumnNumberInfo, nullptr,
code->instruction_start());
inline_entry->FillFunctionInfo(shared_info);
- inline_stack.push_back(inline_entry);
+ inline_stack.emplace_back(inline_entry);
}
if (!inline_stack.empty()) {
entry->AddInlineStack(pc_offset, std::move(inline_stack));
@@ -276,10 +310,11 @@ void ProfilerListener::RecordDeoptInlinedFrames(CodeEntry* entry,
CodeEntry* ProfilerListener::NewCodeEntry(
CodeEventListener::LogEventsAndTags tag, const char* name,
const char* name_prefix, const char* resource_name, int line_number,
- int column_number, JITLineInfoTable* line_info, Address instruction_start) {
+ int column_number, std::unique_ptr<JITLineInfoTable> line_info,
+ Address instruction_start) {
std::unique_ptr<CodeEntry> code_entry = base::make_unique<CodeEntry>(
tag, name, name_prefix, resource_name, line_number, column_number,
- line_info, instruction_start);
+ std::move(line_info), instruction_start);
CodeEntry* raw_code_entry = code_entry.get();
code_entries_.push_back(std::move(code_entry));
return raw_code_entry;
diff --git a/deps/v8/src/profiler/profiler-listener.h b/deps/v8/src/profiler/profiler-listener.h
index c111bf81c4..ca2c213a93 100644
--- a/deps/v8/src/profiler/profiler-listener.h
+++ b/deps/v8/src/profiler/profiler-listener.h
@@ -37,6 +37,9 @@ class ProfilerListener : public CodeEventListener {
void CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
AbstractCode* code, SharedFunctionInfo* shared,
Name* script_name, int line, int column) override;
+ void CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
+ wasm::WasmCode* code, wasm::WasmName name) override;
+
void CodeMovingGCEvent() override {}
void CodeMoveEvent(AbstractCode* from, Address to) override;
void CodeDisableOptEvent(AbstractCode* code,
@@ -45,6 +48,9 @@ class ProfilerListener : public CodeEventListener {
int fp_to_sp_delta) override;
void GetterCallbackEvent(Name* name, Address entry_point) override;
void RegExpCodeCreateEvent(AbstractCode* code, String* source) override;
+ void InstructionStreamCreateEvent(CodeEventListener::LogEventsAndTags tag,
+ const InstructionStream* stream,
+ const char* description) override;
void SetterCallbackEvent(Name* name, Address entry_point) override;
void SharedFunctionInfoMoveEvent(Address from, Address to) override {}
@@ -54,7 +60,7 @@ class ProfilerListener : public CodeEventListener {
const char* resource_name = CodeEntry::kEmptyResourceName,
int line_number = v8::CpuProfileNode::kNoLineNumberInfo,
int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
- JITLineInfoTable* line_info = nullptr,
+ std::unique_ptr<JITLineInfoTable> line_info = nullptr,
Address instruction_start = nullptr);
void AddObserver(CodeEventObserver* observer);
diff --git a/deps/v8/src/profiler/sampling-heap-profiler.cc b/deps/v8/src/profiler/sampling-heap-profiler.cc
index fef21550ec..31c885fef0 100644
--- a/deps/v8/src/profiler/sampling-heap-profiler.cc
+++ b/deps/v8/src/profiler/sampling-heap-profiler.cc
@@ -76,11 +76,7 @@ SamplingHeapProfiler::~SamplingHeapProfiler() {
heap_->RemoveAllocationObserversFromAllSpaces(other_spaces_observer_.get(),
new_space_observer_.get());
- for (auto sample : samples_) {
- delete sample;
- }
- std::set<Sample*> empty;
- samples_.swap(empty);
+ samples_.clear();
}
@@ -101,7 +97,7 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
AllocationNode* node = AddStack();
node->allocations_[size]++;
Sample* sample = new Sample(size, node, loc, this);
- samples_.insert(sample);
+ samples_.emplace(sample);
sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
}
@@ -123,8 +119,14 @@ void SamplingHeapProfiler::OnWeakCallback(
node = parent;
}
}
- sample->profiler->samples_.erase(sample);
- delete sample;
+ auto it = std::find_if(sample->profiler->samples_.begin(),
+ sample->profiler->samples_.end(),
+ [&sample](const std::unique_ptr<Sample>& s) {
+ return s.get() == sample;
+ });
+
+ sample->profiler->samples_.erase(it);
+ // sample is deleted because its unique ptr was erased from samples_.
}
SamplingHeapProfiler::AllocationNode*
diff --git a/deps/v8/src/profiler/sampling-heap-profiler.h b/deps/v8/src/profiler/sampling-heap-profiler.h
index 3a347dd54e..46fa405279 100644
--- a/deps/v8/src/profiler/sampling-heap-profiler.h
+++ b/deps/v8/src/profiler/sampling-heap-profiler.h
@@ -146,7 +146,7 @@ class SamplingHeapProfiler {
std::unique_ptr<SamplingAllocationObserver> other_spaces_observer_;
StringsStorage* const names_;
AllocationNode profile_root_;
- std::set<Sample*> samples_;
+ std::set<std::unique_ptr<Sample>> samples_;
const int stack_depth_;
const uint64_t rate_;
v8::HeapProfiler::SamplingFlags flags_;
diff --git a/deps/v8/src/profiler/strings-storage.cc b/deps/v8/src/profiler/strings-storage.cc
index 2e8ad779fd..9ea7770b4b 100644
--- a/deps/v8/src/profiler/strings-storage.cc
+++ b/deps/v8/src/profiler/strings-storage.cc
@@ -80,7 +80,7 @@ const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
const char* StringsStorage::GetName(Name* name) {
if (name->IsString()) {
String* str = String::cast(name);
- int length = Min(kMaxNameSize, str->length());
+ int length = Min(FLAG_heap_snapshot_string_limit, str->length());
int actual_length = 0;
std::unique_ptr<char[]> data = str->ToCString(
DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length);
diff --git a/deps/v8/src/profiler/strings-storage.h b/deps/v8/src/profiler/strings-storage.h
index d73a9dd208..834b5a3335 100644
--- a/deps/v8/src/profiler/strings-storage.h
+++ b/deps/v8/src/profiler/strings-storage.h
@@ -31,8 +31,6 @@ class StringsStorage {
const char* GetFunctionName(const char* name);
private:
- static const int kMaxNameSize = 1024;
-
static bool StringsMatch(void* key1, void* key2);
const char* AddOrDisposeString(char* str, int len);
base::CustomMatcherHashMap::Entry* GetEntry(const char* str, int len);
diff --git a/deps/v8/src/profiler/tick-sample.cc b/deps/v8/src/profiler/tick-sample.cc
index 44bf9af3d1..a6b8b26d00 100644
--- a/deps/v8/src/profiler/tick-sample.cc
+++ b/deps/v8/src/profiler/tick-sample.cc
@@ -204,7 +204,12 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
#endif
DCHECK(regs->sp);
- if (regs->pc && IsNoFrameRegion(static_cast<i::Address>(regs->pc))) {
+ // Check whether we interrupted setup/teardown of a stack frame in JS code.
+ // Avoid this check for C++ code, as that would trigger false positives.
+ if (regs->pc &&
+ isolate->heap()->memory_allocator()->code_range()->contains(
+ static_cast<i::Address>(regs->pc)) &&
+ IsNoFrameRegion(static_cast<i::Address>(regs->pc))) {
// The frame is not setup, so it'd be hard to iterate the stack. Bailout.
return false;
}
diff --git a/deps/v8/src/profiler/tracing-cpu-profiler.h b/deps/v8/src/profiler/tracing-cpu-profiler.h
index e654f2be9d..ccd1fa42a2 100644
--- a/deps/v8/src/profiler/tracing-cpu-profiler.h
+++ b/deps/v8/src/profiler/tracing-cpu-profiler.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef V8_PROFILER_TRACING_CPU_PROFILER_H
-#define V8_PROFILER_TRACING_CPU_PROFILER_H
+#ifndef V8_PROFILER_TRACING_CPU_PROFILER_H_
+#define V8_PROFILER_TRACING_CPU_PROFILER_H_
#include "include/v8-platform.h"
#include "include/v8-profiler.h"
@@ -43,4 +43,4 @@ class TracingCpuProfilerImpl final
} // namespace internal
} // namespace v8
-#endif // V8_PROFILER_TRACING_CPU_PROFILER_H
+#endif // V8_PROFILER_TRACING_CPU_PROFILER_H_
diff --git a/deps/v8/src/profiler/unbound-queue.h b/deps/v8/src/profiler/unbound-queue.h
index c53b35a8ed..062f1ce609 100644
--- a/deps/v8/src/profiler/unbound-queue.h
+++ b/deps/v8/src/profiler/unbound-queue.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef V8_PROFILER_UNBOUND_QUEUE_
-#define V8_PROFILER_UNBOUND_QUEUE_
+#ifndef V8_PROFILER_UNBOUND_QUEUE_H_
+#define V8_PROFILER_UNBOUND_QUEUE_H_
#include "src/allocation.h"
#include "src/base/atomicops.h"
@@ -45,4 +45,4 @@ class UnboundQueue BASE_EMBEDDED {
} // namespace internal
} // namespace v8
-#endif // V8_PROFILER_UNBOUND_QUEUE_
+#endif // V8_PROFILER_UNBOUND_QUEUE_H_