summaryrefslogtreecommitdiff
path: root/deps/v8/tools
diff options
context:
space:
mode:
authorMichaƫl Zasso <targos@protonmail.com>2017-09-12 11:34:59 +0200
committerAnna Henningsen <anna@addaleax.net>2017-09-13 16:15:18 +0200
commitd82e1075dbc2cec2d6598ade10c1f43805f690fd (patch)
treeccd242b9b491dfc341d1099fe11b0ef528839877 /deps/v8/tools
parentb4b7ac6ae811b2b5a3082468115dfb5a5246fe3f (diff)
downloadandroid-node-v8-d82e1075dbc2cec2d6598ade10c1f43805f690fd.tar.gz
android-node-v8-d82e1075dbc2cec2d6598ade10c1f43805f690fd.tar.bz2
android-node-v8-d82e1075dbc2cec2d6598ade10c1f43805f690fd.zip
deps: update V8 to 6.1.534.36
PR-URL: https://github.com/nodejs/node/pull/14730 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Diffstat (limited to 'deps/v8/tools')
-rwxr-xr-xdeps/v8/tools/dev/gen-tags.py2
-rwxr-xr-xdeps/v8/tools/dev/gm.py71
-rwxr-xr-xdeps/v8/tools/eval_gc_time.sh3
-rw-r--r--deps/v8/tools/foozzie/testdata/failure_output.txt4
-rwxr-xr-xdeps/v8/tools/foozzie/v8_foozzie.py19
-rw-r--r--deps/v8/tools/foozzie/v8_mock.js10
-rw-r--r--deps/v8/tools/foozzie/v8_mock_archs.js21
-rwxr-xr-xdeps/v8/tools/gcov.sh67
-rw-r--r--deps/v8/tools/gdbinit9
-rw-r--r--deps/v8/tools/gen-inlining-tests.py2
-rw-r--r--deps/v8/tools/gen-postmortem-metadata.py11
-rw-r--r--deps/v8/tools/ic-explorer.html14
-rw-r--r--deps/v8/tools/ic-processor.js27
-rwxr-xr-xdeps/v8/tools/ignition/linux_perf_bytecode_annotate.py4
-rwxr-xr-xdeps/v8/tools/ignition/linux_perf_report.py6
-rwxr-xr-xdeps/v8/tools/js2c.py21
-rwxr-xr-xdeps/v8/tools/lldbinit2
-rw-r--r--deps/v8/tools/memory/lsan/suppressions.txt16
-rwxr-xr-xdeps/v8/tools/presubmit.py9
-rwxr-xr-xdeps/v8/tools/release/backport_node.py119
-rw-r--r--deps/v8/tools/release/common_includes.py11
-rwxr-xr-xdeps/v8/tools/release/create_release.py34
-rwxr-xr-xdeps/v8/tools/release/filter_build_files.py101
-rw-r--r--deps/v8/tools/release/git_recipes.py7
-rwxr-xr-xdeps/v8/tools/release/merge_to_branch.py8
-rwxr-xr-xdeps/v8/tools/release/push_to_candidates.py20
-rwxr-xr-xdeps/v8/tools/release/test_backport_node.py71
-rwxr-xr-xdeps/v8/tools/release/test_scripts.py24
-rwxr-xr-xdeps/v8/tools/run-deopt-fuzzer.py7
-rwxr-xr-xdeps/v8/tools/run-tests.py88
-rwxr-xr-xdeps/v8/tools/run_perf.py18
-rwxr-xr-xdeps/v8/tools/sanitizers/sancov_formatter.py55
-rw-r--r--deps/v8/tools/sanitizers/sancov_formatter_test.py3
-rw-r--r--deps/v8/tools/testrunner/local/statusfile.py10
-rw-r--r--deps/v8/tools/testrunner/local/variants.py16
-rw-r--r--deps/v8/tools/testrunner/testrunner.isolate7
-rw-r--r--deps/v8/tools/tickprocessor.js6
-rwxr-xr-xdeps/v8/tools/try_perf.py5
-rw-r--r--deps/v8/tools/turbolizer/README.md4
-rw-r--r--deps/v8/tools/v8heapconst.py318
-rwxr-xr-xdeps/v8/tools/verify_source_deps.py1
-rwxr-xr-xdeps/v8/tools/wasm/update-wasm-fuzzers.sh16
-rwxr-xr-xdeps/v8/tools/wasm/update-wasm-spec-tests.sh2
-rw-r--r--deps/v8/tools/whitespace.txt2
44 files changed, 816 insertions, 455 deletions
diff --git a/deps/v8/tools/dev/gen-tags.py b/deps/v8/tools/dev/gen-tags.py
index 4e0e98c8d4..256f65a401 100755
--- a/deps/v8/tools/dev/gen-tags.py
+++ b/deps/v8/tools/dev/gen-tags.py
@@ -20,7 +20,7 @@ import subprocess
import sys
# All arches that this script understands.
-ARCHES = ["ia32", "x64", "arm", "arm64", "mips", "mips64", "ppc", "s390", "x87"]
+ARCHES = ["ia32", "x64", "arm", "arm64", "mips", "mips64", "ppc", "s390"]
def PrintHelpAndExit():
print(__doc__)
diff --git a/deps/v8/tools/dev/gm.py b/deps/v8/tools/dev/gm.py
index 96b9c3816b..21af4ff31c 100755
--- a/deps/v8/tools/dev/gm.py
+++ b/deps/v8/tools/dev/gm.py
@@ -18,18 +18,22 @@ All arguments are optional. Most combinations should work, e.g.:
"""
# See HELP below for additional documentation.
+from __future__ import print_function
+import errno
+import multiprocessing
import os
+import pty
import subprocess
import sys
BUILD_OPTS_DEFAULT = ""
-BUILD_OPTS_GOMA = "-j1000 -l50"
+BUILD_OPTS_GOMA = "-j1000 -l%d" % (multiprocessing.cpu_count() + 2)
BUILD_TARGETS_TEST = ["d8", "cctest", "unittests"]
BUILD_TARGETS_ALL = ["all"]
# All arches that this script understands.
ARCHES = ["ia32", "x64", "arm", "arm64", "mipsel", "mips64el", "ppc", "ppc64",
- "s390", "s390x", "x87"]
+ "s390", "s390x"]
# Arches that get built/run when you don't specify any.
DEFAULT_ARCHES = ["ia32", "x64", "arm", "arm64"]
# Modes that this script understands.
@@ -84,6 +88,8 @@ def DetectGoma():
home_goma = os.path.expanduser("~/goma")
if os.path.exists(home_goma):
return home_goma
+ if os.environ.get("GOMA_DIR"):
+ return os.environ.get("GOMA_DIR")
if os.environ.get("GOMADIR"):
return os.environ.get("GOMADIR")
return None
@@ -92,18 +98,18 @@ GOMADIR = DetectGoma()
IS_GOMA_MACHINE = GOMADIR is not None
USE_GOMA = "true" if IS_GOMA_MACHINE else "false"
-BUILD_OPTS = BUILD_OPTS_GOMA if IS_GOMA_MACHINE else BUILD_OPTS_DEFAULT
RELEASE_ARGS_TEMPLATE = """\
is_component_build = false
is_debug = false
%s
use_goma = {GOMA}
+goma_dir = \"{GOMA_DIR}\"
v8_enable_backtrace = true
v8_enable_disassembler = true
v8_enable_object_print = true
v8_enable_verify_heap = true
-""".replace("{GOMA}", USE_GOMA)
+""".replace("{GOMA}", USE_GOMA).replace("{GOMA_DIR}", str(GOMADIR))
DEBUG_ARGS_TEMPLATE = """\
is_component_build = true
@@ -111,10 +117,11 @@ is_debug = true
symbol_level = 2
%s
use_goma = {GOMA}
+goma_dir = \"{GOMA_DIR}\"
v8_enable_backtrace = true
v8_enable_slow_dchecks = true
v8_optimized_debug = false
-""".replace("{GOMA}", USE_GOMA)
+""".replace("{GOMA}", USE_GOMA).replace("{GOMA_DIR}", str(GOMADIR))
OPTDEBUG_ARGS_TEMPLATE = """\
is_component_build = true
@@ -122,10 +129,11 @@ is_debug = true
symbol_level = 1
%s
use_goma = {GOMA}
+goma_dir = \"{GOMA_DIR}\"
v8_enable_backtrace = true
v8_enable_verify_heap = true
v8_optimized_debug = true
-""".replace("{GOMA}", USE_GOMA)
+""".replace("{GOMA}", USE_GOMA).replace("{GOMA_DIR}", str(GOMADIR))
ARGS_TEMPLATES = {
"release": RELEASE_ARGS_TEMPLATE,
@@ -142,6 +150,35 @@ def _Call(cmd, silent=False):
if not silent: print("# %s" % cmd)
return subprocess.call(cmd, shell=True)
+def _CallWithOutputNoTerminal(cmd):
+ return subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
+
+def _CallWithOutput(cmd):
+ print("# %s" % cmd)
+ # The following trickery is required so that the 'cmd' thinks it's running
+ # in a real terminal, while this script gets to intercept its output.
+ master, slave = pty.openpty()
+ p = subprocess.Popen(cmd, shell=True, stdin=slave, stdout=slave, stderr=slave)
+ os.close(slave)
+ output = []
+ try:
+ while True:
+ try:
+ data = os.read(master, 512)
+ except OSError as e:
+ if e.errno != errno.EIO: raise
+ break # EIO means EOF on some systems
+ else:
+ if not data: # EOF
+ break
+ print(data, end="")
+ sys.stdout.flush()
+ output.append(data)
+ finally:
+ os.close(master)
+ p.wait()
+ return p.returncode, "".join(output)
+
def _Which(cmd):
for path in os.environ["PATH"].split(os.pathsep):
if os.path.exists(os.path.join(path, cmd)):
@@ -191,6 +228,11 @@ class Config(object):
arch_specific = self.GetTargetCpu() + self.GetV8TargetCpu()
return template % arch_specific
+ def WantsGoma(self):
+ output = _CallWithOutputNoTerminal(
+ "gn args --short --list=use_goma %s" % (GetPath(self.arch, self.mode)))
+ return "true" in output
+
def Build(self):
path = GetPath(self.arch, self.mode)
args_gn = os.path.join(path, "args.gn")
@@ -202,7 +244,22 @@ class Config(object):
code = _Call("gn gen %s" % path)
if code != 0: return code
targets = " ".join(self.targets)
- return _Call("ninja -C %s %s %s" % (path, BUILD_OPTS, targets))
+ build_opts = BUILD_OPTS_GOMA if self.WantsGoma() else BUILD_OPTS_DEFAULT
+ # The implementation of mksnapshot failure detection relies on
+ # the "pty" module and GDB presence, so skip it on non-Linux.
+ if "linux" not in sys.platform:
+ return _Call("ninja -C %s %s %s" % (path, build_opts, targets))
+
+ return_code, output = _CallWithOutput("ninja -C %s %s %s" %
+ (path, build_opts, targets))
+ if return_code != 0 and "FAILED: gen/snapshot.cc" in output:
+ _Notify("V8 build requires your attention",
+ "Detected mksnapshot failure, re-running in GDB...")
+ _Call("gdb -args %(path)s/mksnapshot "
+ "--startup_src %(path)s/gen/snapshot.cc "
+ "--random-seed 314159265 "
+ "--startup-blob %(path)s/snapshot_blob.bin" % {"path": path})
+ return return_code
def RunTests(self):
if not self.tests: return 0
diff --git a/deps/v8/tools/eval_gc_time.sh b/deps/v8/tools/eval_gc_time.sh
index 70786041d7..9abc93a95f 100755
--- a/deps/v8/tools/eval_gc_time.sh
+++ b/deps/v8/tools/eval_gc_time.sh
@@ -84,12 +84,10 @@ INTERESTING_NEW_GEN_KEYS="\
weak \
roots \
old_new \
- code \
semispace \
"
INTERESTING_OLD_GEN_KEYS="\
- clear.code_flush \
clear.dependent_code \
clear.global_handles \
clear.maps \
@@ -112,7 +110,6 @@ INTERESTING_OLD_GEN_KEYS="\
external.mc_incremental_epilogue \
external.weak_global_handles \
mark.finish_incremental \
- mark.prepare_code_flush \
mark.roots \
mark.weak_closure \
mark.weak_closure.ephemeral \
diff --git a/deps/v8/tools/foozzie/testdata/failure_output.txt b/deps/v8/tools/foozzie/testdata/failure_output.txt
index 33a6161565..9ac2c522e8 100644
--- a/deps/v8/tools/foozzie/testdata/failure_output.txt
+++ b/deps/v8/tools/foozzie/testdata/failure_output.txt
@@ -9,9 +9,9 @@
# Compared x64,ignition with x64,ignition_turbo
#
# Flags of x64,ignition:
---abort_on_stack_overflow --expose-gc --allow-natives-syntax --invoke-weak-callbacks --omit-quit --es-staging --random-seed 12345 --ignition --turbo-filter=~ --hydrogen-filter=~ --noopt
+--abort_on_stack_overflow --expose-gc --allow-natives-syntax --invoke-weak-callbacks --omit-quit --es-staging --random-seed 12345 --turbo-filter=~ --noopt
# Flags of x64,ignition_turbo:
---abort_on_stack_overflow --expose-gc --allow-natives-syntax --invoke-weak-callbacks --omit-quit --es-staging --random-seed 12345 --ignition --turbo
+--abort_on_stack_overflow --expose-gc --allow-natives-syntax --invoke-weak-callbacks --omit-quit --es-staging --random-seed 12345
#
# Difference:
- unknown
diff --git a/deps/v8/tools/foozzie/v8_foozzie.py b/deps/v8/tools/foozzie/v8_foozzie.py
index 6f585fdf8a..14aff4294f 100755
--- a/deps/v8/tools/foozzie/v8_foozzie.py
+++ b/deps/v8/tools/foozzie/v8_foozzie.py
@@ -21,45 +21,28 @@ import v8_suppressions
CONFIGS = dict(
default=[],
- fullcode=[
- '--noopt',
- '--turbo-filter=~',
- ],
ignition=[
- '--ignition',
'--turbo-filter=~',
- '--hydrogen-filter=~',
'--noopt',
],
ignition_asm=[
- '--ignition',
'--turbo-filter=~',
- '--hydrogen-filter=~',
'--noopt',
'--validate-asm',
'--stress-validate-asm',
'--suppress-asm-messages',
],
ignition_eager=[
- '--ignition',
'--turbo-filter=~',
- '--hydrogen-filter=~',
'--noopt',
'--no-lazy',
'--no-lazy-inner-functions',
],
- ignition_turbo=[
- '--ignition',
- '--turbo',
- ],
+ ignition_turbo=[],
ignition_turbo_opt=[
- '--ignition',
- '--turbo',
'--always-opt',
],
ignition_turbo_opt_eager=[
- '--ignition',
- '--turbo',
'--always-opt',
'--no-lazy',
'--no-lazy-inner-functions',
diff --git a/deps/v8/tools/foozzie/v8_mock.js b/deps/v8/tools/foozzie/v8_mock.js
index 836a1c3adb..5d15304cd7 100644
--- a/deps/v8/tools/foozzie/v8_mock.js
+++ b/deps/v8/tools/foozzie/v8_mock.js
@@ -94,8 +94,14 @@ Object.defineProperty(
var mock = function(arrayType) {
var handler = {
construct: function(target, args) {
- return new Proxy(
- Function.prototype.bind.apply(arrayType, [null].concat(args)), {});
+ var obj = new (Function.prototype.bind.apply(arrayType, [null].concat(args)));
+ return new Proxy(obj, {
+ get: function(x, prop) {
+ if (typeof x[prop] == "function")
+ return x[prop].bind(obj)
+ return x[prop];
+ },
+ });
},
};
return new Proxy(arrayType, handler);
diff --git a/deps/v8/tools/foozzie/v8_mock_archs.js b/deps/v8/tools/foozzie/v8_mock_archs.js
index 227d76724c..507f31a3a2 100644
--- a/deps/v8/tools/foozzie/v8_mock_archs.js
+++ b/deps/v8/tools/foozzie/v8_mock_archs.js
@@ -43,3 +43,24 @@
Float32Array = mock(Float32Array);
Float64Array = mock(Float64Array);
})();
+
+// Mock typed array set function and limit maximum offset to 1MiB.
+(function () {
+ var typedArrayTypes = [
+ Int8Array,
+ Uint8Array,
+ Uint8ClampedArray,
+ Int16Array,
+ Uint16Array,
+ Int32Array,
+ Uint32Array,
+ Float32Array,
+ Float64Array,
+ ];
+ for (let typedArrayType of typedArrayTypes) {
+ let set = typedArrayType.prototype.set
+ typedArrayType.prototype.set = function(array, offset) {
+ set.apply(this, [array, offset > 1048576 ? 1048576 : offset])
+ };
+ }
+})();
diff --git a/deps/v8/tools/gcov.sh b/deps/v8/tools/gcov.sh
new file mode 100755
index 0000000000..90f3974c85
--- /dev/null
+++ b/deps/v8/tools/gcov.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+#
+# Copyright 2017 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build and collect code coverage data, cummulatively, on specified architectures.
+
+BUILD_TYPE=${BUILD_TYPE:-Release}
+
+declare -A modes=( [Release]=release [Debug]=debug )
+declare -A pairs=( [arm]=ia32 [arm64]=x64 [ia32]=ia32 [x64]=x64 )
+
+if [ -z ${modes[$BUILD_TYPE]} ]
+then
+ echo "BUILD_TYPE must be {<unspecified>|Release|Debug}"
+ echo "Release is default"
+ exit
+fi
+
+mode=${modes[$BUILD_TYPE]}
+
+echo "Using build:" $BUILD_TYPE
+v8_root=$(readlink -f $(dirname $BASH_SOURCE)/../)
+work_dir=$v8_root/cov
+build_dir=$work_dir/$BUILD_TYPE
+
+if [ -z $@ ]
+then
+ echo "Pass at least one target architecture"
+ echo "Supported architectures: x64 ia32 arm arm64"
+ echo ""
+ echo "Example: ./tools/gcov.sh x64 arm"
+ echo ""
+ echo "Optionally, set BUILD_TYPE env variable to"
+ echo "either Debug or Release, to use the corresponding build."
+ echo "By default, BUILD_TYPE is Release."
+ echo ""
+ echo "Example: BUILD_TYPE=Debug ./tools/gcov.sh x64 arm"
+ echo ""
+ exit
+fi
+
+lcov --directory=$build_dir --zerocounters
+
+# Mapping v8 build terminology to gnu compiler terminology:
+# target_arch is the host, and
+# v8_target_arch is the target
+
+for v8_target_arch in "$@"
+do
+ target_arch=${pairs[$v8_target_arch]}
+ if [ -z $target_arch ]
+ then
+ echo "Skipping unknown architecture: " $v8_target_arch
+ else
+ echo "Building" $v8_target_arch
+ GYP_DEFINES="component=static_library use_goma=1 target_arch=$target_arch v8_target_arch=$v8_target_arch coverage=1 clang=0" python $v8_root/gypfiles/gyp_v8.py -G output_dir=$work_dir
+ ninja -C $build_dir -j2000
+ $v8_root/tools/run-tests.py --gcov-coverage --arch=$v8_target_arch --mode=$mode --shell-dir=$build_dir --exhaustive-variants
+ fi
+done
+
+lcov --directory=$build_dir --capture --output-file $work_dir/app.info
+genhtml --output-directory $work_dir/html $work_dir/app.info
+echo "Done"
+echo "Output available at: " $work_dir/html/index.html
diff --git a/deps/v8/tools/gdbinit b/deps/v8/tools/gdbinit
index 2d7d254ca4..03ecfdda30 100644
--- a/deps/v8/tools/gdbinit
+++ b/deps/v8/tools/gdbinit
@@ -75,6 +75,15 @@ Print a v8 TransitionArray object
Usage: jta tagged_ptr
end
+# Print TransitionTree.
+define jtt
+call _v8_internal_Print_TransitionTree((void*)($arg0))
+end
+document jtt
+Print the complete transition tree of the given v8 Map.
+Usage: jtt tagged_ptr
+end
+
# Print JavaScript stack trace.
define jst
call _v8_internal_Print_StackTrace()
diff --git a/deps/v8/tools/gen-inlining-tests.py b/deps/v8/tools/gen-inlining-tests.py
index 1a377e61ed..a79023642e 100644
--- a/deps/v8/tools/gen-inlining-tests.py
+++ b/deps/v8/tools/gen-inlining-tests.py
@@ -19,7 +19,7 @@ PREAMBLE = """
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Flags: --allow-natives-syntax --turbo --no-always-opt
+// Flags: --allow-natives-syntax --no-always-opt
// This test file was generated by tools/gen-inlining-tests.py .
diff --git a/deps/v8/tools/gen-postmortem-metadata.py b/deps/v8/tools/gen-postmortem-metadata.py
index 9e0b7c8a0c..188de931c8 100644
--- a/deps/v8/tools/gen-postmortem-metadata.py
+++ b/deps/v8/tools/gen-postmortem-metadata.py
@@ -72,7 +72,6 @@ consts_misc = [
{ 'name': 'ConsStringTag', 'value': 'kConsStringTag' },
{ 'name': 'ExternalStringTag', 'value': 'kExternalStringTag' },
{ 'name': 'SlicedStringTag', 'value': 'kSlicedStringTag' },
- { 'name': 'ThinStringTag', 'value': 'kThinStringTag' },
{ 'name': 'HeapObjectTag', 'value': 'kHeapObjectTag' },
{ 'name': 'HeapObjectTagMask', 'value': 'kHeapObjectTagMask' },
@@ -155,9 +154,9 @@ consts_misc = [
'value': 'DescriptorArray::kEntrySize' },
{ 'name': 'elements_fast_holey_elements',
- 'value': 'FAST_HOLEY_ELEMENTS' },
+ 'value': 'HOLEY_ELEMENTS' },
{ 'name': 'elements_fast_elements',
- 'value': 'FAST_ELEMENTS' },
+ 'value': 'PACKED_ELEMENTS' },
{ 'name': 'elements_dictionary_elements',
'value': 'DICTIONARY_ELEMENTS' },
@@ -193,9 +192,9 @@ consts_misc = [
'value': 'ScopeInfo::kVariablePartIndex' },
{ 'name': 'sharedfunctioninfo_start_position_mask',
- 'value': 'SharedFunctionInfo::kStartPositionMask' },
+ 'value': 'SharedFunctionInfo::StartPositionBits::kMask' },
{ 'name': 'sharedfunctioninfo_start_position_shift',
- 'value': 'SharedFunctionInfo::kStartPositionShift' },
+ 'value': 'SharedFunctionInfo::StartPositionBits::kShift' },
{ 'name': 'jsarray_buffer_was_neutered_mask',
'value': 'JSArrayBuffer::WasNeutered::kMask' },
@@ -371,7 +370,7 @@ def load_objects_from_file(objfilename, checktypes):
# do so without the embedded newlines.
#
for line in objfile:
- if (line.startswith('enum InstanceType {')):
+ if (line.startswith('enum InstanceType : uint8_t {')):
in_insttype = True;
continue;
diff --git a/deps/v8/tools/ic-explorer.html b/deps/v8/tools/ic-explorer.html
index fe3746c88a..c557de8c0f 100644
--- a/deps/v8/tools/ic-explorer.html
+++ b/deps/v8/tools/ic-explorer.html
@@ -93,20 +93,6 @@ code is governed by a BSD-style license that can be found in the LICENSE file.
old_state, new_state));
}
- processBinaryOpIC(pc, line, column, stub, old_state, new_state,
- allocation_site) {
- let fnName = this.functionName(pc);
- this.entries.push(new Entry("BinaryOpIc", fnName, line, column, name,
- old_state, new_state));
-
- }
-
- processToBooleanICfunction(pc, line, column, stub, old_state, new_state) {
- let fnName = this.functionName(pc);
- this.entries.push(new Entry("ToBooleanIC", fnName, line, column, name,
- old_state, new_state));
- }
-
processPatchIC(pc, test, delta) {
}
diff --git a/deps/v8/tools/ic-processor.js b/deps/v8/tools/ic-processor.js
index 6623b69ed5..9f7c4a91cd 100644
--- a/deps/v8/tools/ic-processor.js
+++ b/deps/v8/tools/ic-processor.js
@@ -60,13 +60,6 @@ function IcProcessor() {
parsers : [parseInt, parseInt, parseInt, parseInt, null, null, null,
null, null, null, null],
processor: this.processCompareIC },
- 'BinaryOpIC': {
- parsers : [parseInt, parseInt, parseInt, parseInt, null, null,
- parseInt],
- processor: this.processBinaryOpIC },
- 'ToBooleanIC': {
- parsers : [parseInt, parseInt, parseInt, parseInt, null, null],
- processor: this.processToBooleanIC },
'PatchIC': {
parsers : [parseInt, parseInt, parseInt],
processor: this.processPatchIC },
@@ -79,8 +72,6 @@ function IcProcessor() {
this.KeyedLoadIC = 0;
this.KeyedStoreIC = 0;
this.CompareIC = 0;
- this.BinaryOpIC = 0;
- this.ToBooleanIC = 0;
this.PatchIC = 0;
}
inherits(IcProcessor, LogReader);
@@ -123,8 +114,6 @@ IcProcessor.prototype.processLogFile = function(fileName) {
print("KeyedLoad: " + this.KeyedLoadIC);
print("KeyedStore: " + this.KeyedStoreIC);
print("CompareIC: " + this.CompareIC);
- print("BinaryOpIC: " + this.BinaryOpIC);
- print("ToBooleanIC: " + this.ToBooleanIC);
print("PatchIC: " + this.PatchIC);
};
@@ -192,22 +181,6 @@ IcProcessor.prototype.processCompareIC = function (
this.formatName(entry) + ":" + line + ":" + column);
}
-IcProcessor.prototype.processBinaryOpIC = function (
- pc, line, column, stub, old_state, new_state, allocation_site) {
- var entry = this.profile_.findEntry(pc);
- this.BinaryOpIC++;
- print("BinaryOpIC (" + old_state + "->" + new_state + ") at " +
- this.formatName(entry) + ":" + line + ":" + column);
-}
-
-IcProcessor.prototype.processToBooleanIC = function (
- pc, line, column, stub, old_state, new_state) {
- var entry = this.profile_.findEntry(pc);
- this.ToBooleanIC++;
- print("ToBooleanIC (" + old_state + "->" + new_state + ") at " +
- this.formatName(entry) + ":" + line + ":" + column);
-}
-
IcProcessor.prototype.processPatchIC = function (pc, test, delta) {
var entry = this.profile_.findEntry(pc);
this.PatchIC++;
diff --git a/deps/v8/tools/ignition/linux_perf_bytecode_annotate.py b/deps/v8/tools/ignition/linux_perf_bytecode_annotate.py
index 6681190d99..b2422323b0 100755
--- a/deps/v8/tools/ignition/linux_perf_bytecode_annotate.py
+++ b/deps/v8/tools/ignition/linux_perf_bytecode_annotate.py
@@ -156,8 +156,8 @@ def main():
v8_root_path = os.path.dirname(__file__) + "/../../"
d8_path = "{}/out/{}.debug/d8".format(v8_root_path, program_options.arch)
- d8_codegen = subprocess.Popen([d8_path, "--ignition",
- "--trace-ignition-codegen", "-e", "1"],
+ d8_codegen = subprocess.Popen([d8_path, "--trace-ignition-codegen",
+ "-e", "1"],
stdout=subprocess.PIPE)
bytecode_offsets = bytecode_offset_generator(
diff --git a/deps/v8/tools/ignition/linux_perf_report.py b/deps/v8/tools/ignition/linux_perf_report.py
index d2327ca6b8..268bed55b7 100755
--- a/deps/v8/tools/ignition/linux_perf_report.py
+++ b/deps/v8/tools/ignition/linux_perf_report.py
@@ -24,8 +24,7 @@ examples:
# without considering the time spent compiling JS code, entry trampoline
# samples and other non-Ignition samples.
#
- $ tools/run-perf.sh out/x64.release/d8 \\
- --ignition --noturbo --noopt run.js
+ $ tools/run-perf.sh out/x64.release/d8 --noopt run.js
$ tools/ignition/linux_perf_report.py --flamegraph -o out.collapsed
$ flamegraph.pl --colors js out.collapsed > out.svg
@@ -44,8 +43,7 @@ examples:
# See the hottest bytecodes on Octane benchmark, by number of samples.
#
- $ tools/run-perf.sh out/x64.release/d8 \\
- --ignition --noturbo --noopt octane/run.js
+ $ tools/run-perf.sh out/x64.release/d8 --noopt octane/run.js
$ tools/ignition/linux_perf_report.py
"""
diff --git a/deps/v8/tools/js2c.py b/deps/v8/tools/js2c.py
index b676d662e7..f6dcf90b7b 100755
--- a/deps/v8/tools/js2c.py
+++ b/deps/v8/tools/js2c.py
@@ -152,19 +152,8 @@ class TextMacro:
return mapping[match.group(0)]
return re.sub(any_key_pattern, replace, self.body)
-class PythonMacro:
- def __init__(self, args, fun):
- self.args = args
- self.fun = fun
- def expand(self, mapping):
- args = []
- for arg in self.args:
- args.append(mapping[arg])
- return str(self.fun(*args))
-
CONST_PATTERN = re.compile(r'^define\s+([a-zA-Z0-9_]+)\s*=\s*([^;]*);$')
MACRO_PATTERN = re.compile(r'^macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
-PYTHON_MACRO_PATTERN = re.compile(r'^python\s+macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
def ReadMacros(lines):
@@ -188,15 +177,7 @@ def ReadMacros(lines):
body = macro_match.group(3).strip()
macros.append((re.compile("\\b%s\\(" % name), TextMacro(args, body)))
else:
- python_match = PYTHON_MACRO_PATTERN.match(line)
- if python_match:
- name = python_match.group(1)
- args = [match.strip() for match in python_match.group(2).split(',')]
- body = python_match.group(3).strip()
- fun = eval("lambda " + ",".join(args) + ': ' + body)
- macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun)))
- else:
- raise Error("Illegal line: " + line)
+ raise Error("Illegal line: " + line)
return (constants, macros)
diff --git a/deps/v8/tools/lldbinit b/deps/v8/tools/lldbinit
index 7b8853cd74..be828dc630 100755
--- a/deps/v8/tools/lldbinit
+++ b/deps/v8/tools/lldbinit
@@ -26,4 +26,6 @@ command regex -h 'Print a v8 LayoutDescriptor object' jld 's/(.+)/expr -- '_v8_i
# Print TransitionArray.
command regex -h 'Print a v8 TransitionArray object' jta 's/(.+)/expr -- '_v8_internal_Print_TransitionArray((void*)(%1))/'
+command regex -h 'Print the transition tree of a v8 Map' jtt 's/(.+)/expr -- '_v8_internal_Print_TransitionTree((void*)(%1))/'
+
command script import ~/lldb_commands.py
diff --git a/deps/v8/tools/memory/lsan/suppressions.txt b/deps/v8/tools/memory/lsan/suppressions.txt
deleted file mode 100644
index f5c73935be..0000000000
--- a/deps/v8/tools/memory/lsan/suppressions.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-# Do not add new suppressions below.
-# TODO(machenbach): Delete this file as soon as it is empty.
-
-# cctest
-leak:v8::internal::Debug::NextAsyncTaskId
-leak:v8::internal::wasm::DecodeWasmModule
-leak:v8::internal::wasm::WasmInterpreter::WasmInterpreter
-leak:v8::internal::WasmDebugInfo::SetBreakpoint
-
-# debugger
-leak:v8_inspector::WasmTranslation::TranslatorImpl::DisassemblingTranslator::AddFakeScript
-leak:v8::internal::compiler::JumpThreading::ApplyForwarding
-
-# mjsunit
-leak:v8::internal::FuncNameInferrer::FuncNameInferrer
-leak:v8::internal::JSArrayBuffer::SetupAllocatingData
diff --git a/deps/v8/tools/presubmit.py b/deps/v8/tools/presubmit.py
index c95936037b..d32ce9dfa2 100755
--- a/deps/v8/tools/presubmit.py
+++ b/deps/v8/tools/presubmit.py
@@ -70,7 +70,7 @@ LINT_RULES = """
LINT_OUTPUT_PATTERN = re.compile(r'^.+[:(]\d+[:)]|^Done processing')
FLAGS_LINE = re.compile("//\s*Flags:.*--([A-z0-9-])+_[A-z0-9].*\n")
ASSERT_OPTIMIZED_PATTERN = re.compile("assertOptimized")
-FLAGS_ENABLE_OPT = re.compile("//\s*Flags:.*--(opt|turbo)[^-].*\n")
+FLAGS_ENABLE_OPT = re.compile("//\s*Flags:.*--opt[^-].*\n")
ASSERT_UNOPTIMIZED_PATTERN = re.compile("assertUnoptimized")
FLAGS_NO_ALWAYS_OPT = re.compile("//\s*Flags:.*--no-?always-opt.*\n")
@@ -249,7 +249,6 @@ class CppLintProcessor(SourceFileProcessor):
return True
filters = ",".join([n for n in LINT_RULES])
- command = [sys.executable, 'cpplint.py', '--filter', filters]
cpplint = self.GetCpplintScript(TOOLS_PATH)
if cpplint is None:
print('Could not find cpplint.py. Make sure '
@@ -258,7 +257,7 @@ class CppLintProcessor(SourceFileProcessor):
command = [sys.executable, cpplint, '--filter', filters]
- commands = join([command + [file] for file in files])
+ commands = [command + [file] for file in files]
count = multiprocessing.cpu_count()
pool = multiprocessing.Pool(count)
try:
@@ -413,8 +412,8 @@ class SourceProcessor(SourceFileProcessor):
if not "mjsunit/mjsunit.js" in name:
if ASSERT_OPTIMIZED_PATTERN.search(contents) and \
not FLAGS_ENABLE_OPT.search(contents):
- print "%s Flag --opt or --turbo should be set " \
- "if assertOptimized() is used" % name
+ print "%s Flag --opt should be set if " \
+ "assertOptimized() is used" % name
result = False
if ASSERT_UNOPTIMIZED_PATTERN.search(contents) and \
not FLAGS_NO_ALWAYS_OPT.search(contents):
diff --git a/deps/v8/tools/release/backport_node.py b/deps/v8/tools/release/backport_node.py
new file mode 100755
index 0000000000..862da82b1e
--- /dev/null
+++ b/deps/v8/tools/release/backport_node.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# Copyright 2017 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Use this script to cherry-pick a V8 commit to backport to a Node.js checkout.
+
+Requirements:
+ - Node.js checkout to backport to.
+ - V8 checkout that contains the commit to cherry-pick.
+
+Usage:
+ $ backport_node.py <path_to_v8> <path_to_node> <commit-hash>
+
+ This will apply the commit to <path_to_node>/deps/v8 and create a commit in
+ the Node.js checkout, increment patch level, and copy over the original
+ commit message.
+
+Optional flags:
+ --no-review Run `gclient sync` on the V8 checkout before updating.
+"""
+
+import argparse
+import os
+import subprocess
+import re
+import sys
+
+from common_includes import *
+
+TARGET_SUBDIR = os.path.join("deps", "v8")
+VERSION_FILE = os.path.join("include", "v8-version.h")
+VERSION_PATTERN = r'(?<=#define V8_PATCH_LEVEL )\d+'
+
+def Clean(options):
+ print ">> Cleaning target directory."
+ subprocess.check_call(["git", "clean", "-fd"],
+ cwd = os.path.join(options.node_path, TARGET_SUBDIR))
+
+def CherryPick(options):
+ print ">> Apply patch."
+ patch = subprocess.Popen(["git", "diff-tree", "-p", options.commit],
+ stdout=subprocess.PIPE, cwd=options.v8_path)
+ patch.wait()
+ try:
+ subprocess.check_output(["git", "apply", "-3", "--directory=%s" % TARGET_SUBDIR],
+ stdin=patch.stdout, cwd=options.node_path)
+ except:
+ print ">> In another shell, please resolve patch conflicts"
+ print ">> and `git add` affected files."
+ print ">> Finally continue by entering RESOLVED."
+ while raw_input("[RESOLVED]") != "RESOLVED":
+ print ">> You need to type RESOLVED"
+
+def UpdateVersion(options):
+ print ">> Increment patch level."
+ version_file = os.path.join(options.node_path, TARGET_SUBDIR, VERSION_FILE)
+ text = FileToText(version_file)
+ def increment(match):
+ patch = int(match.group(0))
+ return str(patch + 1)
+ text = re.sub(VERSION_PATTERN, increment, text, flags=re.MULTILINE)
+ TextToFile(text, version_file)
+
+def CreateCommit(options):
+ print ">> Creating commit."
+ # Find short hash from source.
+ shorthash = subprocess.check_output(
+ ["git", "rev-parse", "--short", options.commit],
+ cwd=options.v8_path).strip()
+
+ # Commit message
+ title = "deps: backport %s from upstream V8" % shorthash
+ body = subprocess.check_output(
+ ["git", "log", options.commit, "-1", "--format=%B"],
+ cwd=options.v8_path).strip()
+ body = '\n'.join(" " + line for line in body.splitlines())
+
+ message = title + "\n\nOriginal commit message:\n\n" + body
+
+ # Create commit at target.
+ review_message = "" if options.no_review else "-e"
+ git_commands = [
+ ["git", "checkout", "-b", "backport_%s" % shorthash], # new branch
+ ["git", "add", TARGET_SUBDIR], # add files
+ ["git", "commit", "-m", message, review_message] # new commit
+ ]
+ for command in git_commands:
+ subprocess.check_call(command, cwd=options.node_path)
+
+def ParseOptions(args):
+ parser = argparse.ArgumentParser(description="Backport V8 commit to Node.js")
+ parser.add_argument("v8_path", help="Path to V8 checkout")
+ parser.add_argument("node_path", help="Path to Node.js checkout")
+ parser.add_argument("commit", help="Commit to backport")
+ parser.add_argument("--no-review", action="store_true",
+ help="Skip editing commit message")
+ options = parser.parse_args(args)
+ options.v8_path = os.path.abspath(options.v8_path)
+ assert os.path.isdir(options.v8_path)
+ options.node_path = os.path.abspath(options.node_path)
+ assert os.path.isdir(options.node_path)
+ return options
+
+def Main(args):
+ options = ParseOptions(args)
+ Clean(options)
+ try:
+ CherryPick(options)
+ UpdateVersion(options)
+ CreateCommit(options)
+ except:
+ print ">> Failed. Resetting."
+ subprocess.check_output(["git", "reset", "--hard"], cwd=options.node_path)
+ raise
+
+if __name__ == "__main__":
+ Main(sys.argv[1:])
diff --git a/deps/v8/tools/release/common_includes.py b/deps/v8/tools/release/common_includes.py
index 8ef77c25ab..d295e37d64 100644
--- a/deps/v8/tools/release/common_includes.py
+++ b/deps/v8/tools/release/common_includes.py
@@ -395,7 +395,7 @@ class GitInterface(VCInterface):
"git updater is lagging behind?")
self.step.Git("tag %s %s" % (tag, commit))
- self.step.Git("push origin %s" % tag)
+ self.step.Git("push origin refs/tags/%s:refs/tags/%s" % (tag, tag))
def CLLand(self):
self.step.GitCLLand()
@@ -549,7 +549,8 @@ class Step(GitRecipesMixin):
def InitialEnvironmentChecks(self, cwd):
# Cancel if this is not a git checkout.
if not os.path.exists(os.path.join(cwd, ".git")): # pragma: no cover
- self.Die("This is not a git checkout, this script won't work for you.")
+ self.Die("%s is not a git checkout. If you know what you're doing, try "
+ "deleting it and rerunning this script." % cwd)
# Cancel if EDITOR is unset or not executable.
if (self._options.requires_editor and (not os.environ.get("EDITOR") or
@@ -767,7 +768,7 @@ class UploadStep(Step):
reviewer = self.ReadLine()
self.GitUpload(reviewer, self._options.author, self._options.force_upload,
bypass_hooks=self._options.bypass_upload_hooks,
- cc=self._options.cc, use_gerrit=not self._options.rietveld)
+ cc=self._options.cc)
def MakeStep(step_class=Step, number=0, state=None, config=None,
@@ -813,15 +814,13 @@ class ScriptsBase(object):
def MakeOptions(self, args=None):
parser = argparse.ArgumentParser(description=self._Description())
parser.add_argument("-a", "--author", default="",
- help="The author email used for rietveld.")
+ help="The author email used for code review.")
parser.add_argument("--dry-run", default=False, action="store_true",
help="Perform only read-only actions.")
parser.add_argument("--json-output",
help="File to write results summary to.")
parser.add_argument("-r", "--reviewer", default="",
help="The account name to be used for reviews.")
- parser.add_argument("--rietveld", default=False, action="store_true",
- help="Whether to use rietveld instead of gerrit.")
parser.add_argument("-s", "--step",
help="Specify the step where to start work. Default: 0.",
default=0, type=int)
diff --git a/deps/v8/tools/release/create_release.py b/deps/v8/tools/release/create_release.py
index 8c0ad489a1..e5c2114b1a 100755
--- a/deps/v8/tools/release/create_release.py
+++ b/deps/v8/tools/release/create_release.py
@@ -77,24 +77,6 @@ class DetectLastRelease(Step):
class PrepareChangeLog(Step):
MESSAGE = "Prepare raw ChangeLog entry."
- def Reload(self, body):
- """Attempts to reload the commit message from rietveld in order to allow
- late changes to the LOG flag. Note: This is brittle to future changes of
- the web page name or structure.
- """
- match = re.search(r"^Review URL: https://codereview\.chromium\.org/(\d+)$",
- body, flags=re.M)
- if match:
- cl_url = ("https://codereview.chromium.org/%s/description"
- % match.group(1))
- try:
- # Fetch from Rietveld but only retry once with one second delay since
- # there might be many revisions.
- body = self.ReadURL(cl_url, wait_plan=[1])
- except urllib2.URLError: # pragma: no cover
- pass
- return body
-
def RunStep(self):
self["date"] = self.GetDate()
output = "%s: Version %s\n\n" % (self["date"], self["version"])
@@ -107,7 +89,7 @@ class PrepareChangeLog(Step):
commit_messages = [
[
self.GitLog(n=1, format="%s", git_hash=commit),
- self.Reload(self.GitLog(n=1, format="%B", git_hash=commit)),
+ self.GitLog(n=1, format="%B", git_hash=commit),
self.GitLog(n=1, format="%an", git_hash=commit),
] for commit in commits.splitlines()
]
@@ -221,6 +203,7 @@ class CommitBranch(Step):
if not text: # pragma: no cover
self.Die("Commit message editing failed.")
+ text += "\n\nTBR=%s" % self._options.reviewer
self["commit_title"] = text.splitlines()[0]
TextToFile(text, self.Config("COMMITMSG_FILE"))
@@ -229,10 +212,17 @@ class CommitBranch(Step):
os.remove(self.Config("CHANGELOG_ENTRY_FILE"))
-class PushBranch(Step):
- MESSAGE = "Push changes."
+class LandBranch(Step):
+ MESSAGE = "Upload and land changes."
def RunStep(self):
+ if self._options.dry_run:
+ print "Dry run - upload CL."
+ else:
+ self.GitUpload(author=self._options.author,
+ force=True,
+ bypass_hooks=True,
+ private=True)
cmd = "cl land --bypass-hooks -f"
if self._options.dry_run:
print "Dry run. Command:\ngit %s" % cmd
@@ -305,7 +295,7 @@ class CreateRelease(ScriptsBase):
SetVersion,
EnableMergeWatchlist,
CommitBranch,
- PushBranch,
+ LandBranch,
TagRevision,
CleanUp,
]
diff --git a/deps/v8/tools/release/filter_build_files.py b/deps/v8/tools/release/filter_build_files.py
new file mode 100755
index 0000000000..7d3f22138a
--- /dev/null
+++ b/deps/v8/tools/release/filter_build_files.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2017 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Enumerates relevant build files for each platform.
+
+This can be used to filter the build directory before making an official
+archive. The archive should only contain files required for running or
+static linking, e.g. executables, startup files, libraries.
+
+The script is limited to release builds and assumes GN.
+"""
+
+import argparse
+import glob
+import itertools
+import json
+import os
+import re
+import sys
+
+EXECUTABLE_FILES = [
+ 'd8',
+]
+
+SUPPLEMENTARY_FILES = [
+ 'icudtl.dat',
+ 'natives_blob.bin',
+ 'snapshot_blob.bin',
+ 'v8_build_config.json',
+]
+
+LIBRARY_FILES = {
+ 'android': ['*.a', '*.so'],
+ 'linux': ['*.a', '*.so'],
+ 'mac': ['*.a', '*.so'],
+ 'win': ['*.lib', '*.dll'],
+}
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ parser.add_argument('-d', '--dir', required=True,
+ help='Path to the build directory.')
+ parser.add_argument('-p', '--platform', required=True,
+ help='Target platform name: win|mac|linux.')
+ parser.add_argument('-o', '--json-output', required=True,
+ help='Path to an output file. The files will '
+ 'be stored in json list with absolute paths.')
+ parser.add_argument('-t', '--type',
+ choices=['all', 'exe', 'lib'], default='all',
+ help='Specifies the archive type.')
+ args = parser.parse_args()
+
+ if not os.path.isdir(args.dir):
+ parser.error('%s is not an existing directory.' % args.dir)
+
+ args.dir = os.path.abspath(args.dir)
+
+ # Skip libraries for exe archive type.
+ if args.type == 'exe':
+ library_files = []
+ else:
+ library_files = LIBRARY_FILES[args.platform]
+
+ # Skip executables for lib archive type.
+ if args.type == 'lib':
+ executable_files = []
+ else:
+ executable_files = EXECUTABLE_FILES
+
+ list_of_files = []
+ def add_files_from_globs(globs):
+ list_of_files.extend(itertools.chain(*map(glob.iglob, globs)))
+
+ # Add toplevel executables, supplementary files and libraries.
+ extended_executable_files = [
+ f + '.exe' if args.platform == 'win' else f
+ for f in executable_files]
+ add_files_from_globs(
+ os.path.join(args.dir, f)
+ for f in extended_executable_files +
+ SUPPLEMENTARY_FILES +
+ library_files
+ )
+
+ # Add libraries recursively from obj directory.
+ for root, _, __ in os.walk(os.path.join(args.dir, 'obj'), followlinks=True):
+ add_files_from_globs(os.path.join(root, g) for g in library_files)
+
+ with open(args.json_output, 'w') as f:
+ json.dump(list_of_files, f)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/tools/release/git_recipes.py b/deps/v8/tools/release/git_recipes.py
index ce65b973ac..d831aa3a20 100644
--- a/deps/v8/tools/release/git_recipes.py
+++ b/deps/v8/tools/release/git_recipes.py
@@ -206,7 +206,7 @@ class GitRecipesMixin(object):
self.Git(MakeArgs(args), **kwargs)
def GitUpload(self, reviewer="", author="", force=False, cq=False,
- bypass_hooks=False, cc="", use_gerrit=False, **kwargs):
+ bypass_hooks=False, cc="", private=False, **kwargs):
args = ["cl upload --send-mail"]
if author:
args += ["--email", Quoted(author)]
@@ -220,8 +220,9 @@ class GitRecipesMixin(object):
args.append("--bypass-hooks")
if cc:
args += ["--cc", Quoted(cc)]
- if use_gerrit:
- args += ["--gerrit"]
+ args += ["--gerrit"]
+ if private:
+ args += ["--private"]
# TODO(machenbach): Check output in forced mode. Verify that all required
# base files were uploaded, if not retry.
self.Git(MakeArgs(args), pipe=False, **kwargs)
diff --git a/deps/v8/tools/release/merge_to_branch.py b/deps/v8/tools/release/merge_to_branch.py
index 802409436e..877d121b49 100755
--- a/deps/v8/tools/release/merge_to_branch.py
+++ b/deps/v8/tools/release/merge_to_branch.py
@@ -31,6 +31,7 @@ from collections import OrderedDict
import sys
from common_includes import *
+from git_recipes import GetCommitMessageFooterMap
def IsSvnNumber(rev):
return rev.isdigit() and len(rev) < 8
@@ -134,8 +135,13 @@ class CreateCommitMessage(Step):
msg = self.GitLog(n=1, git_hash=commit_hash)
for bug in re.findall(r"^[ \t]*BUG[ \t]*=[ \t]*(.*?)[ \t]*$", msg, re.M):
bugs.extend(s.strip() for s in bug.split(","))
- bug_aggregate = ",".join(sorted(filter(lambda s: s and s != "none", bugs)))
+ gerrit_bug = GetCommitMessageFooterMap(msg).get('Bug', '')
+ bugs.extend(s.strip() for s in gerrit_bug.split(","))
+ bug_aggregate = ",".join(
+ sorted(filter(lambda s: s and s != "none", set(bugs))))
if bug_aggregate:
+ # TODO(machenbach): Use proper gerrit footer for bug after switch to
+ # gerrit. Keep BUG= for now for backwards-compatibility.
msg_pieces.append("BUG=%s\nLOG=N\n" % bug_aggregate)
msg_pieces.append("NOTRY=true\nNOPRESUBMIT=true\nNOTREECHECKS=true\n")
diff --git a/deps/v8/tools/release/push_to_candidates.py b/deps/v8/tools/release/push_to_candidates.py
index 750794eabd..538b9887d6 100755
--- a/deps/v8/tools/release/push_to_candidates.py
+++ b/deps/v8/tools/release/push_to_candidates.py
@@ -119,24 +119,6 @@ class DetectLastRelease(Step):
class PrepareChangeLog(Step):
MESSAGE = "Prepare raw ChangeLog entry."
- def Reload(self, body):
- """Attempts to reload the commit message from rietveld in order to allow
- late changes to the LOG flag. Note: This is brittle to future changes of
- the web page name or structure.
- """
- match = re.search(r"^Review URL: https://codereview\.chromium\.org/(\d+)$",
- body, flags=re.M)
- if match:
- cl_url = ("https://codereview.chromium.org/%s/description"
- % match.group(1))
- try:
- # Fetch from Rietveld but only retry once with one second delay since
- # there might be many revisions.
- body = self.ReadURL(cl_url, wait_plan=[1])
- except urllib2.URLError: # pragma: no cover
- pass
- return body
-
def RunStep(self):
self["date"] = self.GetDate()
output = "%s: Version %s\n\n" % (self["date"], self["version"])
@@ -149,7 +131,7 @@ class PrepareChangeLog(Step):
commit_messages = [
[
self.GitLog(n=1, format="%s", git_hash=commit),
- self.Reload(self.GitLog(n=1, format="%B", git_hash=commit)),
+ self.GitLog(n=1, format="%B", git_hash=commit),
self.GitLog(n=1, format="%an", git_hash=commit),
] for commit in commits.splitlines()
]
diff --git a/deps/v8/tools/release/test_backport_node.py b/deps/v8/tools/release/test_backport_node.py
new file mode 100755
index 0000000000..a2be9cf33d
--- /dev/null
+++ b/deps/v8/tools/release/test_backport_node.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright 2017 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import unittest
+
+from common_includes import FileToText
+import backport_node
+
+# Base paths.
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+TEST_DATA = os.path.join(BASE_DIR, 'testdata')
+
+def gitify(path):
+ files = os.listdir(path)
+ subprocess.check_call(['git', 'init'], cwd=path)
+ subprocess.check_call(['git', 'add'] + files, cwd=path)
+ subprocess.check_call(['git', 'commit', '-m', 'Initial'], cwd=path)
+
+class TestUpdateNode(unittest.TestCase):
+ def setUp(self):
+ self.workdir = tempfile.mkdtemp(prefix='tmp_test_node_')
+
+ def tearDown(self):
+ shutil.rmtree(self.workdir)
+
+ def testUpdate(self):
+ v8_cwd = os.path.join(self.workdir, 'v8')
+ node_cwd = os.path.join(self.workdir, 'node')
+
+ # Set up V8 test fixture.
+ shutil.copytree(src=os.path.join(TEST_DATA, 'v8'), dst=v8_cwd)
+ gitify(v8_cwd)
+
+ # Set up node test fixture.
+ shutil.copytree(src=os.path.join(TEST_DATA, 'node'), dst=node_cwd)
+ gitify(os.path.join(node_cwd))
+
+ # Add a patch.
+ with open(os.path.join(v8_cwd, 'v8_foo'), 'w') as f:
+ f.write('zonk')
+ subprocess.check_call(['git', 'add', 'v8_foo'], cwd=v8_cwd)
+ subprocess.check_call(['git', 'commit', '-m', "Title\n\nBody"], cwd=v8_cwd)
+ commit = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=v8_cwd).strip()
+
+ # Run update script.
+ backport_node.Main([v8_cwd, node_cwd, commit, "--no-review"])
+
+ # Check message.
+ message = subprocess.check_output(['git', 'log', '-1', '--format=%B'], cwd=node_cwd)
+ self.assertIn('Original commit message:\n\n Title\n\n Body', message)
+
+ # Check patch.
+ gitlog = subprocess.check_output(
+ ['git', 'diff', 'master', '--cached', '--', 'deps/v8/v8_foo'],
+ cwd=node_cwd,
+ )
+ self.assertIn('+zonk', gitlog.strip())
+
+ # Check version.
+ version_file = os.path.join(node_cwd, "deps", "v8", "include", "v8-version.h")
+ self.assertIn('#define V8_PATCH_LEVEL 4322', FileToText(version_file))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/deps/v8/tools/release/test_scripts.py b/deps/v8/tools/release/test_scripts.py
index bfc68dc818..42bbd5a0a1 100755
--- a/deps/v8/tools/release/test_scripts.py
+++ b/deps/v8/tools/release/test_scripts.py
@@ -586,11 +586,7 @@ class ScriptTest(unittest.TestCase):
Cmd("git log -1 --format=%B rev3", "Title\n\nBUG=321\nLOG=true\n"),
Cmd("git log -1 --format=%an rev3", "author3@chromium.org"),
Cmd("git log -1 --format=%s rev4", "Title text 4"),
- Cmd("git log -1 --format=%B rev4",
- ("Title\n\nBUG=456\nLOG=Y\n\n"
- "Review URL: https://codereview.chromium.org/9876543210\n")),
- URL("https://codereview.chromium.org/9876543210/description",
- "Title\n\nBUG=456\nLOG=N\n\n"),
+ Cmd("git log -1 --format=%B rev4", "Title\n\nBUG=456\nLOG=N"),
Cmd("git log -1 --format=%an rev4", "author4@chromium.org"),
])
@@ -861,7 +857,7 @@ Performance and stability improvements on all platforms."""
"\"Version 3.22.5 (based on push_hash)\""
" origin/candidates", "hsh_to_tag"),
Cmd("git tag 3.22.5 hsh_to_tag", ""),
- Cmd("git push origin 3.22.5", ""),
+ Cmd("git push origin refs/tags/3.22.5:refs/tags/3.22.5", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
Cmd("git branch -D %s" % TEST_CONFIG["CANDIDATESBRANCH"], ""),
@@ -905,7 +901,9 @@ Performance and stability improvements on all platforms."""
Log text 1 (issue 321).
-Performance and stability improvements on all platforms."""
+Performance and stability improvements on all platforms.
+
+TBR=reviewer@chromium.org"""
def ResetChangeLog():
last_change_log = """1999-04-05: Version 3.22.4
@@ -969,12 +967,14 @@ Performance and stability improvements on all platforms."""
cb=self.WriteFakeWatchlistsFile),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], "",
cb=CheckVersionCommit),
+ Cmd("git cl upload --send-mail --email \"author@chromium.org\" "
+ "-f --bypass-hooks --gerrit --private", ""),
Cmd("git cl land --bypass-hooks -f", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep="
"\"Version 3.22.5\" origin/3.22.5", "hsh_to_tag"),
Cmd("git tag 3.22.5 hsh_to_tag", ""),
- Cmd("git push origin 3.22.5", ""),
+ Cmd("git push origin refs/tags/3.22.5:refs/tags/3.22.5", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch", "* master\n work-branch\n"),
Cmd("git branch -D work-branch", ""),
@@ -1119,7 +1119,7 @@ deps = {
self.ROLL_COMMIT_MSG),
"", cwd=chrome_dir),
Cmd("git cl upload --send-mail --email \"author@chromium.org\" -f "
- "--use-commit-queue --bypass-hooks", "", cwd=chrome_dir),
+ "--use-commit-queue --bypass-hooks --gerrit", "", cwd=chrome_dir),
Cmd("git checkout -f master", "", cwd=chrome_dir),
Cmd("git branch -D work-branch", "", cwd=chrome_dir),
]
@@ -1288,7 +1288,7 @@ LOG=N
"\" refs/remotes/origin/candidates",
"hsh_to_tag"),
Cmd("git tag 3.22.5.1 hsh_to_tag", ""),
- Cmd("git push origin 3.22.5.1", ""),
+ Cmd("git push origin refs/tags/3.22.5.1:refs/tags/3.22.5.1", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
@@ -1626,8 +1626,8 @@ NOTREECHECKS=true
Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
- Cmd("git log -1 ab34567", "Title3\nLOG=n\nBUG=567, 456"),
- Cmd("git log -1 ab45678", "Title1\nBUG="),
+ Cmd("git log -1 ab34567", "Title3\nLOG=n\nBug: 567, 456,345"),
+ Cmd("git log -1 ab45678", "Title1\nBug:"),
Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
diff --git a/deps/v8/tools/run-deopt-fuzzer.py b/deps/v8/tools/run-deopt-fuzzer.py
index 27f5cc7a5a..b2399ccf06 100755
--- a/deps/v8/tools/run-deopt-fuzzer.py
+++ b/deps/v8/tools/run-deopt-fuzzer.py
@@ -58,11 +58,9 @@ TIMEOUT_SCALEFACTOR = {"debug" : 4,
"release" : 1 }
MODE_FLAGS = {
- "debug" : ["--nohard-abort", "--nodead-code-elimination",
- "--nofold-constants", "--enable-slow-asserts",
+ "debug" : ["--nohard-abort", "--enable-slow-asserts",
"--verify-heap", "--noconcurrent-recompilation"],
- "release" : ["--nohard-abort", "--nodead-code-elimination",
- "--nofold-constants", "--noconcurrent-recompilation"]}
+ "release" : ["--nohard-abort", "--noconcurrent-recompilation"]}
SUPPORTED_ARCHS = ["android_arm",
"android_ia32",
@@ -405,6 +403,7 @@ def Execute(arch, mode, args, options, suites, workspace):
"novfp3": False,
"predictable": False,
"byteorder": sys.byteorder,
+ "no_harness": False,
}
all_tests = []
num_tests = 0
diff --git a/deps/v8/tools/run-tests.py b/deps/v8/tools/run-tests.py
index 7a9e90cd5e..8be8d42cad 100755
--- a/deps/v8/tools/run-tests.py
+++ b/deps/v8/tools/run-tests.py
@@ -109,12 +109,12 @@ TEST_MAP = {
TIMEOUT_DEFAULT = 60
# Variants ordered by expected runtime (slowest first).
-VARIANTS = ["default", "noturbofan"]
+VARIANTS = ["default"]
MORE_VARIANTS = [
"stress",
- "noturbofan_stress",
"nooptimization",
+ "fullcode",
"asm_wasm",
"wasm_traps",
]
@@ -126,15 +126,13 @@ VARIANT_ALIASES = {
"dev": VARIANTS,
# Additional variants, run on all bots.
"more": MORE_VARIANTS,
+ # TODO(machenbach): Deprecate this after the step is removed on infra side.
# Additional variants, run on a subset of bots.
- "extra": ["fullcode"],
+ "extra": [],
}
-DEBUG_FLAGS = ["--nohard-abort", "--nodead-code-elimination",
- "--nofold-constants", "--enable-slow-asserts",
- "--verify-heap"]
-RELEASE_FLAGS = ["--nohard-abort", "--nodead-code-elimination",
- "--nofold-constants"]
+DEBUG_FLAGS = ["--nohard-abort", "--enable-slow-asserts", "--verify-heap"]
+RELEASE_FLAGS = ["--nohard-abort"]
MODES = {
"debug": {
@@ -189,7 +187,6 @@ SUPPORTED_ARCHS = ["android_arm",
"android_x64",
"arm",
"ia32",
- "x87",
"mips",
"mipsel",
"mips64",
@@ -213,7 +210,6 @@ SLOW_ARCHS = ["android_arm",
"mips64el",
"s390",
"s390x",
- "x87",
"arm64"]
@@ -223,11 +219,9 @@ def BuildOptions():
result.description = """TESTS: %s""" % (TEST_MAP["default"])
result.add_option("--arch",
help=("The architecture to run tests for, "
- "'auto' or 'native' for auto-detect: %s" % SUPPORTED_ARCHS),
- default="ia32,x64,arm")
+ "'auto' or 'native' for auto-detect: %s" % SUPPORTED_ARCHS))
result.add_option("--arch-and-mode",
- help="Architecture and mode in the format 'arch.mode'",
- default=None)
+ help="Architecture and mode in the format 'arch.mode'")
result.add_option("--asan",
help="Regard test expectations for ASAN",
default=False, action="store_true")
@@ -276,8 +270,7 @@ def BuildOptions():
default=0, type="int")
result.add_option("-m", "--mode",
help="The test modes in which to run (comma-separated,"
- " uppercase for ninja and buildbot builds): %s" % MODES.keys(),
- default="release,debug")
+ " uppercase for ninja and buildbot builds): %s" % MODES.keys())
result.add_option("--no-harness", "--noharness",
help="Run without test harness of a given suite",
default=False, action="store_true")
@@ -411,10 +404,6 @@ def SetupEnvironment(options):
if not utils.GuessOS() == 'macos':
# LSAN is not available on mac.
asan_options.append('detect_leaks=1')
- os.environ['LSAN_OPTIONS'] = ":".join([
- 'suppressions=%s' % os.path.join(
- BASE_DIR, 'tools', 'memory', 'lsan', 'suppressions.txt'),
- ])
os.environ['ASAN_OPTIONS'] = ":".join(asan_options)
if options.sancov_dir:
@@ -477,6 +466,7 @@ def ProcessOptions(options):
build_config_path = os.path.join(
BASE_DIR, options.outdir, "v8_build_config.json")
+ # Auto-detect test configurations based on the build (GN only).
if os.path.exists(build_config_path):
try:
with open(build_config_path) as f:
@@ -490,19 +480,52 @@ def ProcessOptions(options):
# In auto-detect mode the outdir is always where we found the build config.
# This ensures that we'll also take the build products from there.
options.outdir = os.path.dirname(build_config_path)
-
options.arch_and_mode = None
- options.arch = build_config["v8_target_cpu"]
- if options.arch == 'x86':
- # TODO(machenbach): Transform all to x86 eventually.
- options.arch = 'ia32'
- options.asan = build_config["is_asan"]
- options.dcheck_always_on = build_config["dcheck_always_on"]
- options.mode = 'debug' if build_config["is_debug"] else 'release'
- options.msan = build_config["is_msan"]
- options.no_i18n = not build_config["v8_enable_i18n_support"]
- options.no_snap = not build_config["v8_use_snapshot"]
- options.tsan = build_config["is_tsan"]
+ if options.mode:
+ # In auto-detect mode we don't use the mode for more path-magic.
+ # Therefore transform the buildbot mode here to fit to the GN build
+ # config.
+ options.mode = BuildbotToV8Mode(options.mode)
+
+ # In V8 land, GN's x86 is called ia32.
+ if build_config["v8_target_cpu"] == "x86":
+ build_config["v8_target_cpu"] = "ia32"
+
+ # Update options based on the build config. Sanity check that we're not
+ # trying to use inconsistent options.
+ for param, value in (
+ ('arch', build_config["v8_target_cpu"]),
+ ('asan', build_config["is_asan"]),
+ ('dcheck_always_on', build_config["dcheck_always_on"]),
+ ('gcov_coverage', build_config["is_gcov_coverage"]),
+ ('mode', 'debug' if build_config["is_debug"] else 'release'),
+ ('msan', build_config["is_msan"]),
+ ('no_i18n', not build_config["v8_enable_i18n_support"]),
+ ('no_snap', not build_config["v8_use_snapshot"]),
+ ('tsan', build_config["is_tsan"])):
+ cmd_line_value = getattr(options, param)
+ if cmd_line_value not in [None, True, False] and cmd_line_value != value:
+ # TODO(machenbach): This is for string options only. Requires options
+ # to not have default values. We should make this more modular and
+ # implement it in our own version of the option parser.
+ print "Attempted to set %s to %s, while build is %s." % (
+ param, cmd_line_value, value)
+ return False
+ if cmd_line_value == True and value == False:
+ print "Attempted to turn on %s, but it's not available." % (
+ param)
+ return False
+ if cmd_line_value != value:
+ print ">>> Auto-detected %s=%s" % (param, value)
+ setattr(options, param, value)
+
+ else:
+ # Non-GN build without auto-detect. Set default values for missing
+ # parameters.
+ if not options.mode:
+ options.mode = "release,debug"
+ if not options.arch:
+ options.arch = "ia32,x64,arm"
# Architecture and mode related stuff.
if options.arch_and_mode:
@@ -808,6 +831,7 @@ def Execute(arch, mode, args, options, suites):
"novfp3": options.novfp3,
"predictable": options.predictable,
"byteorder": sys.byteorder,
+ "no_harness": options.no_harness
}
all_tests = []
num_tests = 0
diff --git a/deps/v8/tools/run_perf.py b/deps/v8/tools/run_perf.py
index e6ec9cb38b..b22a4f11ea 100755
--- a/deps/v8/tools/run_perf.py
+++ b/deps/v8/tools/run_perf.py
@@ -23,12 +23,14 @@ The suite json format is expected to be:
"results_regexp": <optional regexp>,
"results_processor": <optional python results processor script>,
"units": <the unit specification for the performance dashboard>,
+ "process_size": <flag - collect maximum memory used by the process>,
"tests": [
{
"name": <name of the trace>,
"results_regexp": <optional more specific regexp>,
"results_processor": <optional python results processor script>,
"units": <the unit specification for the performance dashboard>,
+ "process_size": <flag - collect maximum memory used by the process>,
}, ...
]
}
@@ -186,6 +188,7 @@ class Measurement(object):
self.results = []
self.errors = []
self.stddev = ""
+ self.process_size = False
def ConsumeOutput(self, stdout):
try:
@@ -378,6 +381,7 @@ class DefaultSentinel(Node):
self.graphs = []
self.flags = []
self.test_flags = []
+ self.process_size = False
self.resources = []
self.results_processor = None
self.results_regexp = None
@@ -420,6 +424,7 @@ class GraphConfig(Node):
self.total = suite.get("total", parent.total)
self.results_processor = suite.get(
"results_processor", parent.results_processor)
+ self.process_size = suite.get("process_size", parent.process_size)
# A regular expression for results. If the parent graph provides a
# regexp and the current suite has none, a string place holder for the
@@ -675,7 +680,12 @@ class DesktopPlatform(Platform):
suffix = ' - without patch' if no_patch else ''
shell_dir = self.shell_dir_no_patch if no_patch else self.shell_dir
title = ">>> %%s (#%d)%s:" % ((count + 1), suffix)
- command = self.command_prefix + runnable.GetCommand(shell_dir,
+ if runnable.process_size:
+ command = ["/usr/bin/time", "--format=MaxMemory: %MKB"]
+ else:
+ command = []
+
+ command += self.command_prefix + runnable.GetCommand(shell_dir,
self.extra_flags)
try:
output = commands.Execute(
@@ -702,6 +712,10 @@ class DesktopPlatform(Platform):
subprocess.check_call(tick_tools + " --only-summary", shell=True)
else: # pragma: no cover
print "Profiler option currently supported on Linux and Mac OS."
+
+ # time outputs to stderr
+ if runnable.process_size:
+ return output.stdout + output.stderr
return output.stdout
@@ -841,6 +855,8 @@ class AndroidPlatform(Platform): # pragma: no cover
except device_errors.CommandTimeoutError:
print ">>> Test timed out after %ss." % runnable.timeout
stdout = ""
+ if runnable.process_size:
+ return stdout + "MaxMemory: Unsupported"
return stdout
class CustomMachineConfiguration:
diff --git a/deps/v8/tools/sanitizers/sancov_formatter.py b/deps/v8/tools/sanitizers/sancov_formatter.py
index 4f3ea9e5cb..2e168fb0ec 100755
--- a/deps/v8/tools/sanitizers/sancov_formatter.py
+++ b/deps/v8/tools/sanitizers/sancov_formatter.py
@@ -78,12 +78,6 @@ EXE_BLACKLIST = [
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
-# Executable location. TODO(machenbach): Only release is supported for now.
-BUILD_DIR = os.path.join(BASE_DIR, 'out', 'Release')
-
-# Path prefix added by the llvm symbolizer including trailing slash.
-OUTPUT_PATH_PREFIX = os.path.join(BUILD_DIR, '..', '..', '')
-
# The sancov tool location.
SANCOV_TOOL = os.path.join(
BASE_DIR, 'third_party', 'llvm', 'projects', 'compiler-rt',
@@ -105,17 +99,17 @@ CPUS = cpu_count()
SANCOV_FILE_RE = re.compile(r'^(.*)\.result.sancov$')
-def executables():
+def executables(build_dir):
"""Iterates over executable files in the build directory."""
- for f in os.listdir(BUILD_DIR):
- file_path = os.path.join(BUILD_DIR, f)
+ for f in os.listdir(build_dir):
+ file_path = os.path.join(build_dir, f)
if (os.path.isfile(file_path) and
os.access(file_path, os.X_OK) and
f not in EXE_BLACKLIST):
yield file_path
-def process_symbolizer_output(output):
+def process_symbolizer_output(output, build_dir):
"""Post-process llvm symbolizer output.
Excludes files outside the v8 checkout or given in exclusion list above
@@ -125,13 +119,16 @@ def process_symbolizer_output(output):
have relative paths to the v8 base directory. The lists of line
numbers don't contain duplicate lines and are sorted.
"""
+ # Path prefix added by the llvm symbolizer including trailing slash.
+ output_path_prefix = os.path.join(build_dir, '..', '..', '')
+
# Drop path prefix when iterating lines. The path is redundant and takes
# too much space. Drop files outside that path, e.g. generated files in
# the build dir and absolute paths to c++ library headers.
def iter_lines():
for line in output.strip().splitlines():
- if line.startswith(OUTPUT_PATH_PREFIX):
- yield line[len(OUTPUT_PATH_PREFIX):]
+ if line.startswith(output_path_prefix):
+ yield line[len(output_path_prefix):]
# Map file names to sets of instrumented line numbers.
file_map = {}
@@ -168,7 +165,7 @@ def get_instrumented_lines(executable):
process = subprocess.Popen(
'objdump -d %s | '
'grep \'^\s\+[0-9a-f]\+:.*\scall\(q\|\)\s\+[0-9a-f]\+ '
- '<__sanitizer_cov\(_with_check\|\)\(@plt\|\)>\' | '
+ '<__sanitizer_cov\(_with_check\|\|_trace_pc_guard\)\(@plt\|\)>\' | '
'grep \'^\s\+[0-9a-f]\+\' -o | '
'%s | '
'%s --obj %s -functions=none' %
@@ -181,7 +178,7 @@ def get_instrumented_lines(executable):
)
output, _ = process.communicate()
assert process.returncode == 0
- return process_symbolizer_output(output)
+ return process_symbolizer_output(output, os.path.dirname(executable))
def merge_instrumented_line_results(exe_list, results):
@@ -216,7 +213,7 @@ def merge_instrumented_line_results(exe_list, results):
def write_instrumented(options):
"""Implements the 'all' action of this tool."""
- exe_list = list(executables())
+ exe_list = list(executables(options.build_dir))
logging.info('Reading instrumented lines from %d executables.',
len(exe_list))
pool = Pool(CPUS)
@@ -242,8 +239,8 @@ def get_covered_lines(args):
Called trough multiprocessing pool. The args are expected to unpack to:
cov_dir: Folder with sancov files merged by sancov_merger.py.
- executable: The executable that was called to produce the given coverage
- data.
+ executable: Absolute path to the executable that was called to produce the
+ given coverage data.
sancov_file: The merged sancov file with coverage data.
Returns: A tuple of post-processed llvm output as returned by
@@ -259,7 +256,7 @@ def get_covered_lines(args):
(SANCOV_TOOL,
os.path.join(cov_dir, sancov_file),
SYMBOLIZER,
- os.path.join(BUILD_DIR, executable)),
+ executable),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
@@ -268,7 +265,10 @@ def get_covered_lines(args):
)
output, _ = process.communicate()
assert process.returncode == 0
- return process_symbolizer_output(output), executable
+ return (
+ process_symbolizer_output(output, os.path.dirname(executable)),
+ os.path.basename(executable),
+ )
def merge_covered_line_results(data, results):
@@ -339,12 +339,16 @@ def merge(options):
os.path.isdir(options.coverage_dir))
# Inputs for multiprocessing. List of tuples of:
- # Coverage dir, executable name, sancov file name.
+ # Coverage dir, absoluate path to executable, sancov file name.
inputs = []
- for f in os.listdir(options.coverage_dir):
- match = SANCOV_FILE_RE.match(f)
+ for sancov_file in os.listdir(options.coverage_dir):
+ match = SANCOV_FILE_RE.match(sancov_file)
if match:
- inputs.append((options.coverage_dir, match.group(1), f))
+ inputs.append((
+ options.coverage_dir,
+ os.path.join(options.build_dir, match.group(1)),
+ sancov_file,
+ ))
logging.info('Merging %d sancov files into %s',
len(inputs), options.json_input)
@@ -403,6 +407,10 @@ def split(options):
def main(args=None):
parser = argparse.ArgumentParser()
+ # TODO(machenbach): Make this required and deprecate the default.
+ parser.add_argument('--build-dir',
+ default=os.path.join(BASE_DIR, 'out', 'Release'),
+ help='Path to the build output directory.')
parser.add_argument('--coverage-dir',
help='Path to the sancov output files.')
parser.add_argument('--json-input',
@@ -415,6 +423,7 @@ def main(args=None):
help='Action to perform.')
options = parser.parse_args(args)
+ options.build_dir = os.path.abspath(options.build_dir)
if options.action.lower() == 'all':
if not options.json_output:
print '--json-output is required'
diff --git a/deps/v8/tools/sanitizers/sancov_formatter_test.py b/deps/v8/tools/sanitizers/sancov_formatter_test.py
index 6a741c804c..008151d40e 100644
--- a/deps/v8/tools/sanitizers/sancov_formatter_test.py
+++ b/deps/v8/tools/sanitizers/sancov_formatter_test.py
@@ -185,7 +185,8 @@ class FormatterTests(unittest.TestCase):
cls._cov.report()
def test_process_symbolizer_output(self):
- result = sancov_formatter.process_symbolizer_output(SYMBOLIZER_OUTPUT)
+ result = sancov_formatter.process_symbolizer_output(
+ SYMBOLIZER_OUTPUT, BUILD_DIR)
self.assertEquals(EXPECTED_PROCESSED_OUTPUT, result)
def test_merge_instrumented_line_results(self):
diff --git a/deps/v8/tools/testrunner/local/statusfile.py b/deps/v8/tools/testrunner/local/statusfile.py
index b03fcc121e..880837b8a7 100644
--- a/deps/v8/tools/testrunner/local/statusfile.py
+++ b/deps/v8/tools/testrunner/local/statusfile.py
@@ -49,7 +49,7 @@ FAIL_SLOPPY = "FAIL_SLOPPY"
ALWAYS = "ALWAYS"
KEYWORDS = {}
-for key in [SKIP, FAIL, PASS, OKAY, TIMEOUT, CRASH, SLOW, FAIL_OK,
+for key in [SKIP, FAIL, PASS, OKAY, CRASH, SLOW, FAIL_OK,
FAST_VARIANTS, NO_VARIANTS, PASS_OR_FAIL, FAIL_SLOPPY, ALWAYS]:
KEYWORDS[key] = key
@@ -59,10 +59,10 @@ DEFS = {FAIL_OK: [FAIL, OKAY],
# Support arches, modes to be written as keywords instead of strings.
VARIABLES = {ALWAYS: True}
for var in ["debug", "release", "big", "little",
- "android_arm", "android_arm64", "android_ia32", "android_x87",
- "android_x64", "arm", "arm64", "ia32", "mips", "mipsel", "mips64",
- "mips64el", "x64", "x87", "ppc", "ppc64", "s390", "s390x", "macos",
- "windows", "linux", "aix"]:
+ "android_arm", "android_arm64", "android_ia32", "android_x64",
+ "arm", "arm64", "ia32", "mips", "mipsel", "mips64", "mips64el",
+ "x64", "ppc", "ppc64", "s390", "s390x", "macos", "windows",
+ "linux", "aix"]:
VARIABLES[var] = var
# Allow using variants as keywords.
diff --git a/deps/v8/tools/testrunner/local/variants.py b/deps/v8/tools/testrunner/local/variants.py
index 0dba0d9579..30fc3e959e 100644
--- a/deps/v8/tools/testrunner/local/variants.py
+++ b/deps/v8/tools/testrunner/local/variants.py
@@ -6,11 +6,7 @@
ALL_VARIANT_FLAGS = {
"default": [[]],
"stress": [["--stress-opt", "--always-opt"]],
- "turbofan": [["--turbo"]],
- "turbofan_opt": [["--turbo", "--always-opt"]],
- "noturbofan": [["--no-turbo"]],
- "noturbofan_stress": [["--no-turbo", "--stress-opt", "--always-opt"]],
- "fullcode": [["--noopt", "--no-turbo"]],
+ "fullcode": [["--noopt", "--stress-fullcodegen"]],
# No optimization means disable all optimizations. OptimizeFunctionOnNextCall
# would not force optimization too. It turns into a Nop. Please see
# https://chromium-review.googlesource.com/c/452620/ for more discussion.
@@ -23,10 +19,7 @@ ALL_VARIANT_FLAGS = {
FAST_VARIANT_FLAGS = {
"default": [[]],
"stress": [["--stress-opt"]],
- "turbofan": [["--turbo"]],
- "noturbofan": [["--no-turbo"]],
- "noturbofan_stress": [["--no-turbo", "--stress-opt"]],
- "fullcode": [["--noopt", "--no-turbo"]],
+ "fullcode": [["--noopt", "--stress-fullcodegen"]],
# No optimization means disable all optimizations. OptimizeFunctionOnNextCall
# would not force optimization too. It turns into a Nop. Please see
# https://chromium-review.googlesource.com/c/452620/ for more discussion.
@@ -35,6 +28,5 @@ FAST_VARIANT_FLAGS = {
"wasm_traps": [["--wasm_guard_pages", "--wasm_trap_handler", "--invoke-weak-callbacks"]],
}
-ALL_VARIANTS = set(["default", "stress", "turbofan", "turbofan_opt",
- "noturbofan", "noturbofan_stress", "fullcode",
- "nooptimization", "asm_wasm", "wasm_traps"])
+ALL_VARIANTS = set(["default", "stress", "fullcode", "nooptimization",
+ "asm_wasm", "wasm_traps"])
diff --git a/deps/v8/tools/testrunner/testrunner.isolate b/deps/v8/tools/testrunner/testrunner.isolate
index 545d888871..e29f1df98d 100644
--- a/deps/v8/tools/testrunner/testrunner.isolate
+++ b/deps/v8/tools/testrunner/testrunner.isolate
@@ -27,12 +27,5 @@
],
},
}],
- ['lsan==1', {
- 'variables': {
- 'files': [
- '../memory/lsan/suppressions.txt',
- ],
- },
- }],
],
}
diff --git a/deps/v8/tools/tickprocessor.js b/deps/v8/tools/tickprocessor.js
index 91b5436eb5..de4e0296b5 100644
--- a/deps/v8/tools/tickprocessor.js
+++ b/deps/v8/tools/tickprocessor.js
@@ -645,9 +645,11 @@ CppEntriesProvider.prototype.parseVmSymbols = function(
} else if (funcInfo === false) {
break;
}
- funcInfo.start += libASLRSlide;
- if (funcInfo.start < libStart && funcInfo.start < libEnd - libStart) {
+ if (funcInfo.start < libStart - libASLRSlide &&
+ funcInfo.start < libEnd - libStart) {
funcInfo.start += libStart;
+ } else {
+ funcInfo.start += libASLRSlide;
}
if (funcInfo.size) {
funcInfo.end = funcInfo.start + funcInfo.size;
diff --git a/deps/v8/tools/try_perf.py b/deps/v8/tools/try_perf.py
index 98d3b067e1..17eb0706e6 100755
--- a/deps/v8/tools/try_perf.py
+++ b/deps/v8/tools/try_perf.py
@@ -14,6 +14,7 @@ BOTS = {
'--linux64': 'v8_linux64_perf_try',
'--linux64_atom': 'v8_linux64_atom_perf_try',
'--linux64_haswell': 'v8_linux64_haswell_perf_try',
+ '--linux64_haswell_cm': 'v8_linux64_haswell_cm_perf_try',
'--nexus5': 'v8_nexus5_perf_try',
'--nexus7': 'v8_nexus7_perf_try',
'--nexus9': 'v8_nexus9_perf_try',
@@ -33,21 +34,17 @@ PUBLIC_BENCHMARKS = [
'emscripten',
'compile',
'jetstream',
- 'jetstream-ignition',
'jsbench',
'jstests',
'kraken_orig',
- 'kraken_orig-ignition',
'massive',
'memory',
'octane',
'octane-noopt',
- 'octane-ignition',
'octane-pr',
'octane-tf',
'octane-tf-pr',
'sunspider',
- 'sunspider-ignition',
'unity',
'wasm',
]
diff --git a/deps/v8/tools/turbolizer/README.md b/deps/v8/tools/turbolizer/README.md
index 54e1051690..d4010d8388 100644
--- a/deps/v8/tools/turbolizer/README.md
+++ b/deps/v8/tools/turbolizer/README.md
@@ -48,7 +48,7 @@ necessary disassembly for linking with the perf profile.
The basic example of generating the required data is as follows:
- perf record -k mono /path/to/d8 --turbo --trace-turbo --perf-prof main.js
+ perf record -k mono /path/to/d8 --trace-turbo --perf-prof main.js
perf inject -j -i perf.data -o perf.data.jitted
perf script -i perf.data.jitted -s turbolizer-perf.py turbo-main.json
@@ -59,4 +59,4 @@ script command must be piped to a file for uploading to turbolizer.
There are many options that can be added to the first command, for example '-e'
can be used to specify the counting of specific events (default: cycles), as
-well as '--cpu' to specify which CPU to sample. \ No newline at end of file
+well as '--cpu' to specify which CPU to sample.
diff --git a/deps/v8/tools/v8heapconst.py b/deps/v8/tools/v8heapconst.py
index d2d6ae9266..87c86f9dc0 100644
--- a/deps/v8/tools/v8heapconst.py
+++ b/deps/v8/tools/v8heapconst.py
@@ -71,16 +71,16 @@ INSTANCE_TYPES = {
167: "MODULE_TYPE",
168: "MODULE_INFO_ENTRY_TYPE",
169: "ASYNC_GENERATOR_REQUEST_TYPE",
- 170: "FIXED_ARRAY_TYPE",
- 171: "TRANSITION_ARRAY_TYPE",
- 172: "SHARED_FUNCTION_INFO_TYPE",
- 173: "CELL_TYPE",
- 174: "WEAK_CELL_TYPE",
- 175: "PROPERTY_CELL_TYPE",
- 176: "PADDING_TYPE_1",
- 177: "PADDING_TYPE_2",
- 178: "PADDING_TYPE_3",
- 179: "PADDING_TYPE_4",
+ 170: "PREPARSED_SCOPE_DATA_TYPE",
+ 171: "FIXED_ARRAY_TYPE",
+ 172: "PROPERTY_ARRAY_TYPE",
+ 173: "TRANSITION_ARRAY_TYPE",
+ 174: "SHARED_FUNCTION_INFO_TYPE",
+ 175: "CELL_TYPE",
+ 176: "WEAK_CELL_TYPE",
+ 177: "PROPERTY_CELL_TYPE",
+ 178: "SMALL_ORDERED_HASH_MAP_TYPE",
+ 179: "SMALL_ORDERED_HASH_SET_TYPE",
180: "JS_PROXY_TYPE",
181: "JS_GLOBAL_OBJECT_TYPE",
182: "JS_GLOBAL_PROXY_TYPE",
@@ -101,53 +101,60 @@ INSTANCE_TYPES = {
197: "JS_DATA_VIEW_TYPE",
198: "JS_SET_TYPE",
199: "JS_MAP_TYPE",
- 200: "JS_SET_ITERATOR_TYPE",
- 201: "JS_MAP_ITERATOR_TYPE",
- 202: "JS_WEAK_MAP_TYPE",
- 203: "JS_WEAK_SET_TYPE",
- 204: "JS_PROMISE_CAPABILITY_TYPE",
- 205: "JS_PROMISE_TYPE",
- 206: "JS_REGEXP_TYPE",
- 207: "JS_ERROR_TYPE",
- 208: "JS_ASYNC_FROM_SYNC_ITERATOR_TYPE",
- 209: "JS_STRING_ITERATOR_TYPE",
- 210: "JS_TYPED_ARRAY_KEY_ITERATOR_TYPE",
- 211: "JS_FAST_ARRAY_KEY_ITERATOR_TYPE",
- 212: "JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE",
- 213: "JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 214: "JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 215: "JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 216: "JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 217: "JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 218: "JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 219: "JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 220: "JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 221: "JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 222: "JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 223: "JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 224: "JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 225: "JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 226: "JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 227: "JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 228: "JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE",
- 229: "JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE",
- 230: "JS_INT8_ARRAY_VALUE_ITERATOR_TYPE",
- 231: "JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE",
- 232: "JS_INT16_ARRAY_VALUE_ITERATOR_TYPE",
- 233: "JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE",
- 234: "JS_INT32_ARRAY_VALUE_ITERATOR_TYPE",
- 235: "JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE",
- 236: "JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE",
- 237: "JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE",
- 238: "JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE",
- 239: "JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE",
- 240: "JS_FAST_ARRAY_VALUE_ITERATOR_TYPE",
- 241: "JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE",
- 242: "JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE",
- 243: "JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE",
- 244: "JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE",
- 245: "JS_BOUND_FUNCTION_TYPE",
- 246: "JS_FUNCTION_TYPE",
+ 200: "JS_SET_KEY_VALUE_ITERATOR_TYPE",
+ 201: "JS_SET_VALUE_ITERATOR_TYPE",
+ 202: "JS_MAP_KEY_ITERATOR_TYPE",
+ 203: "JS_MAP_KEY_VALUE_ITERATOR_TYPE",
+ 204: "JS_MAP_VALUE_ITERATOR_TYPE",
+ 205: "JS_WEAK_MAP_TYPE",
+ 206: "JS_WEAK_SET_TYPE",
+ 207: "JS_PROMISE_CAPABILITY_TYPE",
+ 208: "JS_PROMISE_TYPE",
+ 209: "JS_REGEXP_TYPE",
+ 210: "JS_ERROR_TYPE",
+ 211: "JS_ASYNC_FROM_SYNC_ITERATOR_TYPE",
+ 212: "JS_STRING_ITERATOR_TYPE",
+ 213: "JS_TYPED_ARRAY_KEY_ITERATOR_TYPE",
+ 214: "JS_FAST_ARRAY_KEY_ITERATOR_TYPE",
+ 215: "JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE",
+ 216: "JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 217: "JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 218: "JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 219: "JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 220: "JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 221: "JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 222: "JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 223: "JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 224: "JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 225: "JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 226: "JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 227: "JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 228: "JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 229: "JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 230: "JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 231: "JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE",
+ 232: "JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE",
+ 233: "JS_INT8_ARRAY_VALUE_ITERATOR_TYPE",
+ 234: "JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE",
+ 235: "JS_INT16_ARRAY_VALUE_ITERATOR_TYPE",
+ 236: "JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE",
+ 237: "JS_INT32_ARRAY_VALUE_ITERATOR_TYPE",
+ 238: "JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE",
+ 239: "JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE",
+ 240: "JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE",
+ 241: "JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE",
+ 242: "JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE",
+ 243: "JS_FAST_ARRAY_VALUE_ITERATOR_TYPE",
+ 244: "JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE",
+ 245: "JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE",
+ 246: "JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE",
+ 247: "JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE",
+ 248: "WASM_INSTANCE_TYPE",
+ 249: "WASM_MEMORY_TYPE",
+ 250: "WASM_MODULE_TYPE",
+ 251: "WASM_TABLE_TYPE",
+ 252: "JS_BOUND_FUNCTION_TYPE",
+ 253: "JS_FUNCTION_TYPE",
}
# List of known V8 maps.
@@ -155,7 +162,7 @@ KNOWN_MAPS = {
0x02201: (137, "FreeSpaceMap"),
0x02259: (131, "MetaMap"),
0x022b1: (130, "NullMap"),
- 0x02309: (170, "FixedArrayMap"),
+ 0x02309: (171, "FixedArrayMap"),
0x02361: (8, "OneByteInternalizedStringMap"),
0x023b9: (148, "OnePointerFillerMap"),
0x02411: (148, "TwoPointerFillerMap"),
@@ -165,98 +172,102 @@ KNOWN_MAPS = {
0x02571: (130, "TheHoleMap"),
0x025c9: (130, "BooleanMap"),
0x02621: (135, "ByteArrayMap"),
- 0x02679: (170, "FixedCOWArrayMap"),
- 0x026d1: (170, "HashTableMap"),
+ 0x02679: (171, "FixedCOWArrayMap"),
+ 0x026d1: (171, "HashTableMap"),
0x02729: (128, "SymbolMap"),
0x02781: (72, "OneByteStringMap"),
- 0x027d9: (170, "ScopeInfoMap"),
- 0x02831: (172, "SharedFunctionInfoMap"),
+ 0x027d9: (171, "ScopeInfoMap"),
+ 0x02831: (174, "SharedFunctionInfoMap"),
0x02889: (132, "CodeMap"),
- 0x028e1: (170, "FunctionContextMap"),
- 0x02939: (173, "CellMap"),
- 0x02991: (174, "WeakCellMap"),
- 0x029e9: (175, "GlobalPropertyCellMap"),
+ 0x028e1: (171, "FunctionContextMap"),
+ 0x02939: (175, "CellMap"),
+ 0x02991: (176, "WeakCellMap"),
+ 0x029e9: (177, "GlobalPropertyCellMap"),
0x02a41: (134, "ForeignMap"),
- 0x02a99: (171, "TransitionArrayMap"),
+ 0x02a99: (173, "TransitionArrayMap"),
0x02af1: (130, "ArgumentsMarkerMap"),
0x02b49: (130, "ExceptionMap"),
0x02ba1: (130, "TerminationExceptionMap"),
0x02bf9: (130, "OptimizedOutMap"),
0x02c51: (130, "StaleRegisterMap"),
- 0x02ca9: (170, "NativeContextMap"),
- 0x02d01: (170, "ModuleContextMap"),
- 0x02d59: (170, "EvalContextMap"),
- 0x02db1: (170, "ScriptContextMap"),
- 0x02e09: (170, "BlockContextMap"),
- 0x02e61: (170, "CatchContextMap"),
- 0x02eb9: (170, "WithContextMap"),
+ 0x02ca9: (171, "NativeContextMap"),
+ 0x02d01: (171, "ModuleContextMap"),
+ 0x02d59: (171, "EvalContextMap"),
+ 0x02db1: (171, "ScriptContextMap"),
+ 0x02e09: (171, "BlockContextMap"),
+ 0x02e61: (171, "CatchContextMap"),
+ 0x02eb9: (171, "WithContextMap"),
0x02f11: (147, "FixedDoubleArrayMap"),
0x02f69: (133, "MutableHeapNumberMap"),
- 0x02fc1: (170, "OrderedHashTableMap"),
- 0x03019: (170, "SloppyArgumentsElementsMap"),
- 0x03071: (185, "JSMessageObjectMap"),
- 0x030c9: (136, "BytecodeArrayMap"),
- 0x03121: (170, "ModuleInfoMap"),
- 0x03179: (173, "NoClosuresCellMap"),
- 0x031d1: (173, "OneClosureCellMap"),
- 0x03229: (173, "ManyClosuresCellMap"),
- 0x03281: (64, "StringMap"),
- 0x032d9: (73, "ConsOneByteStringMap"),
- 0x03331: (65, "ConsStringMap"),
- 0x03389: (77, "ThinOneByteStringMap"),
- 0x033e1: (69, "ThinStringMap"),
- 0x03439: (67, "SlicedStringMap"),
- 0x03491: (75, "SlicedOneByteStringMap"),
- 0x034e9: (66, "ExternalStringMap"),
- 0x03541: (82, "ExternalStringWithOneByteDataMap"),
- 0x03599: (74, "ExternalOneByteStringMap"),
- 0x035f1: (98, "ShortExternalStringMap"),
- 0x03649: (114, "ShortExternalStringWithOneByteDataMap"),
- 0x036a1: (0, "InternalizedStringMap"),
- 0x036f9: (2, "ExternalInternalizedStringMap"),
- 0x03751: (18, "ExternalInternalizedStringWithOneByteDataMap"),
- 0x037a9: (10, "ExternalOneByteInternalizedStringMap"),
- 0x03801: (34, "ShortExternalInternalizedStringMap"),
- 0x03859: (50, "ShortExternalInternalizedStringWithOneByteDataMap"),
- 0x038b1: (42, "ShortExternalOneByteInternalizedStringMap"),
- 0x03909: (106, "ShortExternalOneByteStringMap"),
- 0x03961: (139, "FixedUint8ArrayMap"),
- 0x039b9: (138, "FixedInt8ArrayMap"),
- 0x03a11: (141, "FixedUint16ArrayMap"),
- 0x03a69: (140, "FixedInt16ArrayMap"),
- 0x03ac1: (143, "FixedUint32ArrayMap"),
- 0x03b19: (142, "FixedInt32ArrayMap"),
- 0x03b71: (144, "FixedFloat32ArrayMap"),
- 0x03bc9: (145, "FixedFloat64ArrayMap"),
- 0x03c21: (146, "FixedUint8ClampedArrayMap"),
- 0x03c79: (157, "ScriptMap"),
- 0x03cd1: (170, "FeedbackVectorMap"),
- 0x03d29: (170, "DebugEvaluateContextMap"),
- 0x03d81: (170, "ScriptContextTableMap"),
- 0x03dd9: (170, "UnseededNumberDictionaryMap"),
- 0x03e31: (188, "ExternalMap"),
- 0x03e89: (106, "NativeSourceStringMap"),
- 0x03ee1: (152, "InterceptorInfoMap"),
- 0x03f39: (156, "AllocationMementoMap"),
- 0x03f91: (204, "JSPromiseCapabilityMap"),
- 0x03fe9: (149, "AccessorInfoMap"),
- 0x04041: (150, "AccessorPairMap"),
- 0x04099: (151, "AccessCheckInfoMap"),
- 0x040f1: (153, "FunctionTemplateInfoMap"),
- 0x04149: (154, "ObjectTemplateInfoMap"),
- 0x041a1: (155, "AllocationSiteMap"),
- 0x041f9: (158, "AliasedArgumentsEntryMap"),
- 0x04251: (159, "PromiseResolveThenableJobInfoMap"),
- 0x042a9: (160, "PromiseReactionJobInfoMap"),
- 0x04301: (161, "DebugInfoMap"),
- 0x04359: (162, "StackFrameInfoMap"),
- 0x043b1: (163, "PrototypeInfoMap"),
- 0x04409: (164, "Tuple2Map"),
- 0x04461: (165, "Tuple3Map"),
- 0x044b9: (166, "ContextExtensionMap"),
- 0x04511: (167, "ModuleMap"),
- 0x04569: (168, "ModuleInfoEntryMap"),
- 0x045c1: (169, "AsyncGeneratorRequestMap"),
+ 0x02fc1: (171, "OrderedHashTableMap"),
+ 0x03019: (171, "SloppyArgumentsElementsMap"),
+ 0x03071: (178, "SmallOrderedHashMapMap"),
+ 0x030c9: (179, "SmallOrderedHashSetMap"),
+ 0x03121: (185, "JSMessageObjectMap"),
+ 0x03179: (136, "BytecodeArrayMap"),
+ 0x031d1: (171, "ModuleInfoMap"),
+ 0x03229: (175, "NoClosuresCellMap"),
+ 0x03281: (175, "OneClosureCellMap"),
+ 0x032d9: (175, "ManyClosuresCellMap"),
+ 0x03331: (172, "PropertyArrayMap"),
+ 0x03389: (64, "StringMap"),
+ 0x033e1: (73, "ConsOneByteStringMap"),
+ 0x03439: (65, "ConsStringMap"),
+ 0x03491: (77, "ThinOneByteStringMap"),
+ 0x034e9: (69, "ThinStringMap"),
+ 0x03541: (67, "SlicedStringMap"),
+ 0x03599: (75, "SlicedOneByteStringMap"),
+ 0x035f1: (66, "ExternalStringMap"),
+ 0x03649: (82, "ExternalStringWithOneByteDataMap"),
+ 0x036a1: (74, "ExternalOneByteStringMap"),
+ 0x036f9: (98, "ShortExternalStringMap"),
+ 0x03751: (114, "ShortExternalStringWithOneByteDataMap"),
+ 0x037a9: (0, "InternalizedStringMap"),
+ 0x03801: (2, "ExternalInternalizedStringMap"),
+ 0x03859: (18, "ExternalInternalizedStringWithOneByteDataMap"),
+ 0x038b1: (10, "ExternalOneByteInternalizedStringMap"),
+ 0x03909: (34, "ShortExternalInternalizedStringMap"),
+ 0x03961: (50, "ShortExternalInternalizedStringWithOneByteDataMap"),
+ 0x039b9: (42, "ShortExternalOneByteInternalizedStringMap"),
+ 0x03a11: (106, "ShortExternalOneByteStringMap"),
+ 0x03a69: (139, "FixedUint8ArrayMap"),
+ 0x03ac1: (138, "FixedInt8ArrayMap"),
+ 0x03b19: (141, "FixedUint16ArrayMap"),
+ 0x03b71: (140, "FixedInt16ArrayMap"),
+ 0x03bc9: (143, "FixedUint32ArrayMap"),
+ 0x03c21: (142, "FixedInt32ArrayMap"),
+ 0x03c79: (144, "FixedFloat32ArrayMap"),
+ 0x03cd1: (145, "FixedFloat64ArrayMap"),
+ 0x03d29: (146, "FixedUint8ClampedArrayMap"),
+ 0x03d81: (157, "ScriptMap"),
+ 0x03dd9: (171, "FeedbackVectorMap"),
+ 0x03e31: (171, "DebugEvaluateContextMap"),
+ 0x03e89: (171, "ScriptContextTableMap"),
+ 0x03ee1: (171, "UnseededNumberDictionaryMap"),
+ 0x03f39: (188, "ExternalMap"),
+ 0x03f91: (106, "NativeSourceStringMap"),
+ 0x03fe9: (152, "InterceptorInfoMap"),
+ 0x04041: (207, "JSPromiseCapabilityMap"),
+ 0x04099: (149, "AccessorInfoMap"),
+ 0x040f1: (150, "AccessorPairMap"),
+ 0x04149: (151, "AccessCheckInfoMap"),
+ 0x041a1: (153, "FunctionTemplateInfoMap"),
+ 0x041f9: (154, "ObjectTemplateInfoMap"),
+ 0x04251: (155, "AllocationSiteMap"),
+ 0x042a9: (156, "AllocationMementoMap"),
+ 0x04301: (158, "AliasedArgumentsEntryMap"),
+ 0x04359: (159, "PromiseResolveThenableJobInfoMap"),
+ 0x043b1: (160, "PromiseReactionJobInfoMap"),
+ 0x04409: (161, "DebugInfoMap"),
+ 0x04461: (162, "StackFrameInfoMap"),
+ 0x044b9: (163, "PrototypeInfoMap"),
+ 0x04511: (164, "Tuple2Map"),
+ 0x04569: (165, "Tuple3Map"),
+ 0x045c1: (166, "ContextExtensionMap"),
+ 0x04619: (167, "ModuleMap"),
+ 0x04671: (168, "ModuleInfoEntryMap"),
+ 0x046c9: (169, "AsyncGeneratorRequestMap"),
+ 0x04721: (170, "PreParsedScopeDataMap"),
}
# List of known V8 objects.
@@ -289,21 +300,21 @@ KNOWN_OBJECTS = {
("OLD_SPACE", 0x02749): "EmptyFixedFloat64Array",
("OLD_SPACE", 0x02769): "EmptyFixedUint8ClampedArray",
("OLD_SPACE", 0x02789): "EmptyScript",
- ("OLD_SPACE", 0x02811): "UndefinedCell",
- ("OLD_SPACE", 0x02821): "EmptySloppyArgumentsElements",
- ("OLD_SPACE", 0x02841): "EmptySlowElementDictionary",
- ("OLD_SPACE", 0x02891): "EmptyPropertyCell",
- ("OLD_SPACE", 0x028b1): "EmptyWeakCell",
- ("OLD_SPACE", 0x028c9): "ArrayProtector",
+ ("OLD_SPACE", 0x02809): "UndefinedCell",
+ ("OLD_SPACE", 0x02819): "EmptySloppyArgumentsElements",
+ ("OLD_SPACE", 0x02839): "EmptySlowElementDictionary",
+ ("OLD_SPACE", 0x02881): "EmptyPropertyCell",
+ ("OLD_SPACE", 0x028a9): "EmptyWeakCell",
+ ("OLD_SPACE", 0x028c1): "ArrayProtector",
("OLD_SPACE", 0x028e9): "IsConcatSpreadableProtector",
("OLD_SPACE", 0x028f9): "SpeciesProtector",
- ("OLD_SPACE", 0x02909): "StringLengthProtector",
- ("OLD_SPACE", 0x02929): "FastArrayIterationProtector",
- ("OLD_SPACE", 0x02939): "ArrayIteratorProtector",
- ("OLD_SPACE", 0x02959): "ArrayBufferNeuteringProtector",
- ("OLD_SPACE", 0x02979): "InfinityValue",
- ("OLD_SPACE", 0x02989): "MinusZeroValue",
- ("OLD_SPACE", 0x02999): "MinusInfinityValue",
+ ("OLD_SPACE", 0x02921): "StringLengthProtector",
+ ("OLD_SPACE", 0x02949): "FastArrayIterationProtector",
+ ("OLD_SPACE", 0x02959): "ArrayIteratorProtector",
+ ("OLD_SPACE", 0x02981): "ArrayBufferNeuteringProtector",
+ ("OLD_SPACE", 0x029a9): "InfinityValue",
+ ("OLD_SPACE", 0x029b9): "MinusZeroValue",
+ ("OLD_SPACE", 0x029c9): "MinusInfinityValue",
}
# List of known V8 Frame Markers.
@@ -319,7 +330,8 @@ FRAME_MARKERS = (
"WASM_INTERPRETER_ENTRY",
"INTERPRETED",
"STUB",
- "STUB_FAILURE_TRAMPOLINE",
+ "BUILTIN_CONTINUATION",
+ "JAVA_SCRIPT_BUILTIN_CONTINUATION",
"INTERNAL",
"CONSTRUCT",
"ARGUMENTS_ADAPTOR",
diff --git a/deps/v8/tools/verify_source_deps.py b/deps/v8/tools/verify_source_deps.py
index e3a39c1d17..c49d51ab5d 100755
--- a/deps/v8/tools/verify_source_deps.py
+++ b/deps/v8/tools/verify_source_deps.py
@@ -82,7 +82,6 @@ GN_UNSUPPORTED_FEATURES = [
'solaris',
'vtune',
'v8-version.h',
- 'x87',
]
ALL_GN_PREFIXES = [
diff --git a/deps/v8/tools/wasm/update-wasm-fuzzers.sh b/deps/v8/tools/wasm/update-wasm-fuzzers.sh
index ab9f84be28..ffd7e01633 100755
--- a/deps/v8/tools/wasm/update-wasm-fuzzers.sh
+++ b/deps/v8/tools/wasm/update-wasm-fuzzers.sh
@@ -10,21 +10,12 @@ TOOLS_WASM_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd ${TOOLS_WASM_DIR}/../..
rm -rf test/fuzzer/wasm_corpus
-rm -rf test/fuzzer/wasm_asmjs_corpus
tools/dev/gm.py x64.release all
mkdir -p test/fuzzer/wasm_corpus
-mkdir -p test/fuzzer/wasm_asmjs_corpus
-# asm.js
-./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
- --mode=release --no-presubmit --extra-flags="--dump-wasm-module \
- --dump-wasm-module-path=./test/fuzzer/wasm_asmjs_corpus/" mjsunit/wasm/asm*
-./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
- --mode=release --no-presubmit --extra-flags="--dump-wasm-module \
- --dump-wasm-module-path=./test/fuzzer/wasm_asmjs_corpus/" mjsunit/asm/*
-# WASM
+# wasm
./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
--mode=release --no-presubmit --extra-flags="--dump-wasm-module \
--dump-wasm-module-path=./test/fuzzer/wasm_corpus/" unittests
@@ -45,12 +36,7 @@ for x in $(find ./test/fuzzer/wasm_corpus/ -type f -size +20k)
do
rm $x
done
-for x in $(find ./test/fuzzer/wasm_asmjs_corpus/ -type f -size +20k)
-do
- rm $x
-done
# Upload changes.
cd test/fuzzer
upload_to_google_storage.py -a -b v8-wasm-fuzzer wasm_corpus
-upload_to_google_storage.py -a -b v8-wasm-asmjs-fuzzer wasm_asmjs_corpus
diff --git a/deps/v8/tools/wasm/update-wasm-spec-tests.sh b/deps/v8/tools/wasm/update-wasm-spec-tests.sh
index be277e9ad7..2a144b2d5e 100755
--- a/deps/v8/tools/wasm/update-wasm-spec-tests.sh
+++ b/deps/v8/tools/wasm/update-wasm-spec-tests.sh
@@ -13,7 +13,7 @@ cd ${V8_DIR}
mkdir -p ./test/wasm-spec-tests/tests/
rm -rf ./test/wasm-spec-tests/tests/*
-./tools/dev/gm.py x64.release all
+./tools/dev/gm.py x64.release d8
cd ${V8_DIR}/test/wasm-js/interpreter
make
diff --git a/deps/v8/tools/whitespace.txt b/deps/v8/tools/whitespace.txt
index 4448e29f88..1698df5f80 100644
--- a/deps/v8/tools/whitespace.txt
+++ b/deps/v8/tools/whitespace.txt
@@ -7,5 +7,5 @@ A Smi balks into a war and says:
The doubles heard this and started to unbox.
The Smi looked at them when a crazy v8-autoroll account showed up......
The autoroller bought a round of Himbeerbrause. Suddenly .....
-The bartender starts to shake the bottles........
+The bartender starts to shake the bottles...........
.