aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/runtime-profiler.cc
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2017-06-06 10:28:14 +0200
committerMichaël Zasso <targos@protonmail.com>2017-06-07 10:33:31 +0200
commit3dc8c3bed4cf3a77607edbb0b015e33f8b60fc09 (patch)
tree9dee56e142638b34f1eccbd0ad88c3bce5377c29 /deps/v8/src/runtime-profiler.cc
parent91a1bbe3055a660194ca4d403795aa0c03e9d056 (diff)
downloadandroid-node-v8-3dc8c3bed4cf3a77607edbb0b015e33f8b60fc09.tar.gz
android-node-v8-3dc8c3bed4cf3a77607edbb0b015e33f8b60fc09.tar.bz2
android-node-v8-3dc8c3bed4cf3a77607edbb0b015e33f8b60fc09.zip
deps: update V8 to 5.9.211.32
PR-URL: https://github.com/nodejs/node/pull/13263 Reviewed-By: Gibson Fahnestock <gibfahn@gmail.com> Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: Franziska Hinkelmann <franziska.hinkelmann@gmail.com> Reviewed-By: Myles Borins <myles.borins@gmail.com>
Diffstat (limited to 'deps/v8/src/runtime-profiler.cc')
-rw-r--r--deps/v8/src/runtime-profiler.cc98
1 files changed, 41 insertions, 57 deletions
diff --git a/deps/v8/src/runtime-profiler.cc b/deps/v8/src/runtime-profiler.cc
index 6f9f44ee2a..d09e69ae61 100644
--- a/deps/v8/src/runtime-profiler.cc
+++ b/deps/v8/src/runtime-profiler.cc
@@ -19,10 +19,6 @@
namespace v8 {
namespace internal {
-
-// Number of times a function has to be seen on the stack before it is
-// compiled for baseline.
-static const int kProfilerTicksBeforeBaseline = 0;
// Number of times a function has to be seen on the stack before it is
// optimized.
static const int kProfilerTicksBeforeOptimization = 2;
@@ -66,7 +62,6 @@ static const int kMaxSizeOptIgnition = 250 * 1024;
#define OPTIMIZATION_REASON_LIST(V) \
V(DoNotOptimize, "do not optimize") \
V(HotAndStable, "hot and stable") \
- V(HotEnoughForBaseline, "hot enough for baseline") \
V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
V(SmallFunction, "small function")
@@ -158,14 +153,6 @@ void RuntimeProfiler::Optimize(JSFunction* function,
function->AttemptConcurrentOptimization();
}
-void RuntimeProfiler::Baseline(JSFunction* function,
- OptimizationReason reason) {
- DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
- TraceRecompile(function, OptimizationReasonToString(reason), "baseline");
- DCHECK(function->shared()->IsInterpreted());
- function->MarkForBaseline();
-}
-
void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
int loop_nesting_levels) {
JSFunction* function = frame->function();
@@ -216,7 +203,14 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
SharedFunctionInfo* shared = function->shared();
Code* shared_code = shared->code();
if (shared_code->kind() != Code::FUNCTION) return;
- if (function->IsInOptimizationQueue()) return;
+ if (function->IsInOptimizationQueue()) {
+ if (FLAG_trace_opt_verbose) {
+ PrintF("[function ");
+ function->PrintName();
+ PrintF(" is already in optimization queue]\n");
+ }
+ return;
+ }
if (FLAG_always_osr) {
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
@@ -251,7 +245,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
// Do not record non-optimizable functions.
if (shared->optimization_disabled()) {
- if (shared->deopt_count() >= FLAG_max_opt_count) {
+ if (shared->deopt_count() >= FLAG_max_deopt_count) {
// If optimization was disabled due to many deoptimizations,
// then check if the function is hot and try to reenable optimization.
int ticks = shared_code->profiler_ticks();
@@ -306,36 +300,17 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
}
}
-void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
+void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
JavaScriptFrame* frame) {
- if (function->IsInOptimizationQueue()) return;
-
- if (FLAG_always_osr) {
- AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
- // Fall through and do a normal baseline compile as well.
- } else if (MaybeOSRIgnition(function, frame)) {
- return;
- }
-
- SharedFunctionInfo* shared = function->shared();
- int ticks = shared->profiler_ticks();
-
- if (shared->optimization_disabled() &&
- shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
- // Don't baseline functions which have been marked by NeverOptimizeFunction
- // in a test.
+ if (function->IsInOptimizationQueue()) {
+ if (FLAG_trace_opt_verbose) {
+ PrintF("[function ");
+ function->PrintName();
+ PrintF(" is already in optimization queue]\n");
+ }
return;
}
- if (ticks >= kProfilerTicksBeforeBaseline) {
- Baseline(function, OptimizationReason::kHotEnoughForBaseline);
- }
-}
-
-void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
- JavaScriptFrame* frame) {
- if (function->IsInOptimizationQueue()) return;
-
if (FLAG_always_osr) {
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well.
@@ -347,7 +322,7 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
int ticks = shared->profiler_ticks();
if (shared->optimization_disabled()) {
- if (shared->deopt_count() >= FLAG_max_opt_count) {
+ if (shared->deopt_count() >= FLAG_max_deopt_count) {
// If optimization was disabled due to many deoptimizations,
// then check if the function is hot and try to reenable optimization.
if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
@@ -375,11 +350,8 @@ bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function,
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize.
- bool osr_before_baselined = function->IsMarkedForBaseline() &&
- ShouldOptimizeIgnition(function, frame) !=
- OptimizationReason::kDoNotOptimize;
if (!frame->is_optimized() &&
- (osr_before_baselined || function->IsMarkedForOptimization() ||
+ (function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
function->IsOptimized())) {
// Attempt OSR if we are still running interpreted code even though the
@@ -430,8 +402,28 @@ OptimizationReason RuntimeProfiler::ShouldOptimizeIgnition(
int typeinfo, generic, total, type_percentage, generic_percentage;
GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
&generic_percentage);
- if (type_percentage >= FLAG_type_info_threshold) {
- return OptimizationReason::kSmallFunction;
+ if (type_percentage < FLAG_type_info_threshold) {
+ if (FLAG_trace_opt_verbose) {
+ PrintF("[not yet optimizing ");
+ function->PrintName();
+ PrintF(
+ ", not enough type info for small function optimization: %d/%d "
+ "(%d%%)]\n",
+ typeinfo, total, type_percentage);
+ }
+ return OptimizationReason::kDoNotOptimize;
+ }
+ return OptimizationReason::kSmallFunction;
+ } else if (FLAG_trace_opt_verbose) {
+ PrintF("[not yet optimizing ");
+ function->PrintName();
+ PrintF(", not enough ticks: %d/%d and ", ticks,
+ kProfilerTicksBeforeOptimization);
+ if (any_ic_changed_) {
+ PrintF("ICs changed]\n");
+ } else {
+ PrintF(" too large for small function optimization: %d/%d]\n",
+ shared->bytecode_array()->Size(), kMaxSizeEarlyOptIgnition);
}
}
return OptimizationReason::kDoNotOptimize;
@@ -455,17 +447,9 @@ void RuntimeProfiler::MarkCandidatesForOptimization() {
JavaScriptFrame* frame = it.frame();
JSFunction* function = frame->function();
- Compiler::CompilationTier next_tier =
- Compiler::NextCompilationTier(function);
if (function->shared()->IsInterpreted()) {
- if (next_tier == Compiler::BASELINE) {
- MaybeBaselineIgnition(function, frame);
- } else {
- DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
- MaybeOptimizeIgnition(function, frame);
- }
+ MaybeOptimizeIgnition(function, frame);
} else {
- DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
MaybeOptimizeFullCodegen(function, frame, frame_count);
}