summaryrefslogtreecommitdiff
path: root/deps/v8
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8')
-rw-r--r--deps/v8/.gitignore12
-rw-r--r--deps/v8/AUTHORS1
-rw-r--r--deps/v8/ChangeLog41
-rw-r--r--deps/v8/DEPS4
-rw-r--r--deps/v8/Makefile13
-rw-r--r--deps/v8/PRESUBMIT.py3
-rw-r--r--deps/v8/build/standalone.gypi6
-rw-r--r--deps/v8/build/toolchain.gypi47
-rwxr-xr-xdeps/v8/include/v8-debug.h36
-rw-r--r--deps/v8/include/v8-preparser.h44
-rw-r--r--deps/v8/include/v8-profiler.h61
-rw-r--r--deps/v8/include/v8-testing.h34
-rw-r--r--deps/v8/include/v8.h289
-rw-r--r--deps/v8/src/api.cc78
-rw-r--r--deps/v8/src/arm/assembler-arm.cc59
-rw-r--r--deps/v8/src/arm/assembler-arm.h1
-rw-r--r--deps/v8/src/arm/builtins-arm.cc20
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc48
-rw-r--r--deps/v8/src/arm/codegen-arm.cc4
-rw-r--r--deps/v8/src/arm/debug-arm.cc6
-rw-r--r--deps/v8/src/arm/frames-arm.h5
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc43
-rw-r--r--deps/v8/src/arm/lithium-arm.cc86
-rw-r--r--deps/v8/src/arm/lithium-arm.h70
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc160
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.h7
-rw-r--r--deps/v8/src/arm/lithium-gap-resolver-arm.cc2
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.cc55
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.h8
-rw-r--r--deps/v8/src/array-iterator.js7
-rw-r--r--deps/v8/src/ast.cc11
-rw-r--r--deps/v8/src/ast.h5
-rw-r--r--deps/v8/src/atomicops_internals_tsan.h12
-rw-r--r--deps/v8/src/bootstrapper.cc11
-rw-r--r--deps/v8/src/builtins.h2
-rw-r--r--deps/v8/src/checks.h6
-rw-r--r--deps/v8/src/code-stubs-hydrogen.cc189
-rw-r--r--deps/v8/src/code-stubs.cc88
-rw-r--r--deps/v8/src/code-stubs.h68
-rw-r--r--deps/v8/src/collection.js8
-rw-r--r--deps/v8/src/compilation-cache.cc26
-rw-r--r--deps/v8/src/compilation-cache.h5
-rw-r--r--deps/v8/src/compiler.cc27
-rw-r--r--deps/v8/src/compiler.h7
-rw-r--r--deps/v8/src/d8.cc89
-rw-r--r--deps/v8/src/d8.h2
-rw-r--r--deps/v8/src/debug.cc2
-rw-r--r--deps/v8/src/deoptimizer.cc55
-rw-r--r--deps/v8/src/extensions/i18n/date-format.cc329
-rw-r--r--deps/v8/src/extensions/i18n/date-format.js10
-rw-r--r--deps/v8/src/extensions/i18n/footer.js2
-rw-r--r--deps/v8/src/extensions/i18n/header.js2
-rw-r--r--deps/v8/src/extensions/i18n/i18n-extension.cc22
-rw-r--r--deps/v8/src/extensions/i18n/i18n-utils.cc46
-rw-r--r--deps/v8/src/extensions/i18n/i18n-utils.js11
-rw-r--r--deps/v8/src/extensions/i18n/locale.cc251
-rw-r--r--deps/v8/src/extensions/i18n/locale.js4
-rw-r--r--deps/v8/src/factory.cc54
-rw-r--r--deps/v8/src/flag-definitions.h8
-rw-r--r--deps/v8/src/frames.cc10
-rw-r--r--deps/v8/src/frames.h3
-rw-r--r--deps/v8/src/full-codegen.cc16
-rw-r--r--deps/v8/src/full-codegen.h64
-rw-r--r--deps/v8/src/global-handles.cc594
-rw-r--r--deps/v8/src/global-handles.h135
-rw-r--r--deps/v8/src/globals.h392
-rw-r--r--deps/v8/src/harmony-array.js124
-rw-r--r--deps/v8/src/harmony-string.js154
-rw-r--r--deps/v8/src/heap-snapshot-generator.cc24
-rw-r--r--deps/v8/src/heap-snapshot-generator.h10
-rw-r--r--deps/v8/src/heap.cc67
-rw-r--r--deps/v8/src/heap.h27
-rw-r--r--deps/v8/src/hydrogen-bce.cc8
-rw-r--r--deps/v8/src/hydrogen-bch.cc14
-rw-r--r--deps/v8/src/hydrogen-dehoist.cc2
-rw-r--r--deps/v8/src/hydrogen-instructions.cc681
-rw-r--r--deps/v8/src/hydrogen-instructions.h1933
-rw-r--r--deps/v8/src/hydrogen-uint32-analysis.cc6
-rw-r--r--deps/v8/src/hydrogen.cc1090
-rw-r--r--deps/v8/src/hydrogen.h293
-rw-r--r--deps/v8/src/i18n.cc297
-rw-r--r--deps/v8/src/i18n.h (renamed from deps/v8/src/extensions/i18n/date-format.h)48
-rw-r--r--deps/v8/src/ia32/assembler-ia32.cc4
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc20
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc54
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc4
-rw-r--r--deps/v8/src/ia32/debug-ia32.cc2
-rw-r--r--deps/v8/src/ia32/deoptimizer-ia32.cc2
-rw-r--r--deps/v8/src/ia32/frames-ia32.h5
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc44
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc2
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc175
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.h15
-rw-r--r--deps/v8/src/ia32/lithium-ia32.cc82
-rw-r--r--deps/v8/src/ia32/lithium-ia32.h70
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc59
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h8
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc2
-rw-r--r--deps/v8/src/ic.cc36
-rw-r--r--deps/v8/src/ic.h8
-rw-r--r--deps/v8/src/icu_util.cc4
-rw-r--r--deps/v8/src/isolate.cc4
-rw-r--r--deps/v8/src/isolate.h3
-rw-r--r--deps/v8/src/lithium.cc4
-rw-r--r--deps/v8/src/log.cc1
-rw-r--r--deps/v8/src/mark-compact.cc13
-rw-r--r--deps/v8/src/mark-compact.h4
-rw-r--r--deps/v8/src/messages.js23
-rw-r--r--deps/v8/src/mips/assembler-mips.h5
-rw-r--r--deps/v8/src/mips/builtins-mips.cc20
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc58
-rw-r--r--deps/v8/src/mips/codegen-mips.cc4
-rw-r--r--deps/v8/src/mips/debug-mips.cc7
-rw-r--r--deps/v8/src/mips/frames-mips.h5
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc58
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc174
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.h6
-rw-r--r--deps/v8/src/mips/lithium-gap-resolver-mips.cc2
-rw-r--r--deps/v8/src/mips/lithium-mips.cc86
-rw-r--r--deps/v8/src/mips/lithium-mips.h70
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc63
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h12
-rw-r--r--deps/v8/src/object-observe.js5
-rw-r--r--deps/v8/src/objects-debug.cc13
-rw-r--r--deps/v8/src/objects-inl.h88
-rw-r--r--deps/v8/src/objects-printer.cc44
-rw-r--r--deps/v8/src/objects-visiting.h2
-rw-r--r--deps/v8/src/objects.cc64
-rw-r--r--deps/v8/src/objects.h320
-rw-r--r--deps/v8/src/parser.cc14
-rw-r--r--deps/v8/src/platform-linux.cc3
-rw-r--r--deps/v8/src/profile-generator.cc12
-rw-r--r--deps/v8/src/profile-generator.h17
-rw-r--r--deps/v8/src/runtime.cc655
-rw-r--r--deps/v8/src/runtime.h29
-rw-r--r--deps/v8/src/runtime.js14
-rw-r--r--deps/v8/src/sampler.cc8
-rw-r--r--deps/v8/src/serialize.cc1
-rw-r--r--deps/v8/src/spaces.h5
-rw-r--r--deps/v8/src/transitions-inl.h24
-rw-r--r--deps/v8/src/transitions.cc5
-rw-r--r--deps/v8/src/transitions.h44
-rw-r--r--deps/v8/src/type-info.cc12
-rw-r--r--deps/v8/src/type-info.h1
-rw-r--r--deps/v8/src/typedarray.js120
-rw-r--r--deps/v8/src/typing.cc12
-rw-r--r--deps/v8/src/v8.cc7
-rw-r--r--deps/v8/src/version.cc4
-rw-r--r--deps/v8/src/x64/assembler-x64-inl.h5
-rw-r--r--deps/v8/src/x64/assembler-x64.cc7
-rw-r--r--deps/v8/src/x64/assembler-x64.h51
-rw-r--r--deps/v8/src/x64/builtins-x64.cc48
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc112
-rw-r--r--deps/v8/src/x64/codegen-x64.cc4
-rw-r--r--deps/v8/src/x64/debug-x64.cc7
-rw-r--r--deps/v8/src/x64/deoptimizer-x64.cc4
-rw-r--r--deps/v8/src/x64/frames-x64.h6
-rw-r--r--deps/v8/src/x64/full-codegen-x64.cc47
-rw-r--r--deps/v8/src/x64/ic-x64.cc44
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc181
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.h14
-rw-r--r--deps/v8/src/x64/lithium-x64.cc81
-rw-r--r--deps/v8/src/x64/lithium-x64.h68
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc94
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.h18
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.cc2
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc36
-rw-r--r--deps/v8/src/zone.h5
-rw-r--r--deps/v8/test/benchmarks/benchmarks.status29
-rw-r--r--deps/v8/test/benchmarks/testcfg.py181
-rw-r--r--deps/v8/test/cctest/cctest.cc5
-rw-r--r--deps/v8/test/cctest/cctest.h53
-rw-r--r--deps/v8/test/cctest/test-api.cc175
-rw-r--r--deps/v8/test/cctest/test-assembler-arm.cc21
-rw-r--r--deps/v8/test/cctest/test-code-stubs-ia32.cc2
-rw-r--r--deps/v8/test/cctest/test-code-stubs-x64.cc2
-rw-r--r--deps/v8/test/cctest/test-compiler.cc1
-rw-r--r--deps/v8/test/cctest/test-cpu-profiler.cc15
-rw-r--r--deps/v8/test/cctest/test-global-handles.cc204
-rw-r--r--deps/v8/test/cctest/test-heap.cc4
-rw-r--r--deps/v8/test/cctest/test-strings.cc52
-rw-r--r--deps/v8/test/intl/break-iterator/default-locale.js2
-rw-r--r--deps/v8/test/intl/break-iterator/wellformed-unsupported-locale.js2
-rw-r--r--deps/v8/test/intl/collator/default-locale.js4
-rw-r--r--deps/v8/test/intl/collator/wellformed-unsupported-locale.js2
-rw-r--r--deps/v8/test/intl/date-format/default-locale.js2
-rw-r--r--deps/v8/test/intl/date-format/wellformed-unsupported-locale.js2
-rw-r--r--deps/v8/test/intl/intl.status10
-rw-r--r--deps/v8/test/intl/number-format/default-locale.js2
-rw-r--r--deps/v8/test/intl/number-format/wellformed-unsupported-locale.js2
-rw-r--r--deps/v8/test/intl/testcfg.py2
-rw-r--r--deps/v8/test/mjsunit/harmony/array-find.js280
-rw-r--r--deps/v8/test/mjsunit/harmony/array-findindex.js280
-rw-r--r--deps/v8/test/mjsunit/harmony/array-iterator.js34
-rw-r--r--deps/v8/test/mjsunit/harmony/collections.js9
-rw-r--r--deps/v8/test/mjsunit/harmony/object-observe.js16
-rw-r--r--deps/v8/test/mjsunit/harmony/proxies-example-membrane.js4
-rw-r--r--deps/v8/test/mjsunit/harmony/proxies-hash.js4
-rw-r--r--deps/v8/test/mjsunit/harmony/string-contains.js151
-rw-r--r--deps/v8/test/mjsunit/harmony/string-endswith.js136
-rw-r--r--deps/v8/test/mjsunit/harmony/string-repeat.js74
-rw-r--r--deps/v8/test/mjsunit/harmony/string-startswith.js135
-rw-r--r--deps/v8/test/mjsunit/math-abs.js11
-rw-r--r--deps/v8/test/mjsunit/regress/regress-264203.js (renamed from deps/v8/test/intl/general/v8Intl-exists.js)22
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2813.js44
-rw-r--r--deps/v8/test/mjsunit/regress/regress-omit-checks.js55
-rw-r--r--deps/v8/test/mjsunit/unary-minus-deopt.js (renamed from deps/v8/src/extensions/i18n/locale.h)57
-rw-r--r--deps/v8/test/webkit/webkit.status3
-rwxr-xr-xdeps/v8/tools/grokdump.py274
-rw-r--r--deps/v8/tools/gyp/v8.gyp10
-rwxr-xr-xdeps/v8/tools/run-tests.py8
-rw-r--r--deps/v8/tools/testrunner/local/old_statusfile.py2
-rw-r--r--deps/v8/tools/testrunner/local/statusfile.py7
-rw-r--r--deps/v8/tools/testrunner/local/testsuite.py47
-rw-r--r--deps/v8/tools/testrunner/local/verbose.py2
-rw-r--r--deps/v8/tools/v8heapconst.py252
-rw-r--r--deps/v8/tools/v8heapconst.py.tmpl30
217 files changed, 8258 insertions, 6632 deletions
diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore
index 2f524bed03..282e463fc5 100644
--- a/deps/v8/.gitignore
+++ b/deps/v8/.gitignore
@@ -27,12 +27,22 @@ d8
d8_g
shell
shell_g
+/_*
/build/Debug
/build/gyp
/build/ipch/
/build/Release
+/hydrogen.cfg
/obj
/out
+/perf.data
+/perf.data.old
+/test/benchmarks/benchmarks.status2
+/test/benchmarks/CHECKED_OUT_*
+/test/benchmarks/downloaded_*
+/test/benchmarks/kraken
+/test/benchmarks/octane
+/test/benchmarks/sunspider
/test/cctest/cctest.status2
/test/message/message.status2
/test/mjsunit/mjsunit.status2
@@ -52,6 +62,7 @@ shell_g
/tools/oom_dump/oom_dump.o
/tools/visual_studio/Debug
/tools/visual_studio/Release
+/v8.log.ll
/xcodebuild
TAGS
*.Makefile
@@ -59,4 +70,3 @@ GTAGS
GRTAGS
GSYMS
GPATH
-/_*
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index 1a927c4573..46e3a14bc1 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -10,6 +10,7 @@ Hewlett-Packard Development Company, LP
Igalia, S.L.
Joyent, Inc.
Bloomberg Finance L.P.
+NVIDIA Corporation
Akinori MUSHA <knu@FreeBSD.org>
Alexander Botero-Lowry <alexbl@FreeBSD.org>
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index 567afd290c..b19fda2ea5 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,44 @@
+2013-08-07: Version 3.20.14
+
+ Exposed eternal handle api.
+
+ Bugfix to solve issues with enabling V8 typed arrays in Blink.
+
+ Fixed Array index dehoisting. (Chromium issue 264203)
+
+ Updated Array Iterator to use numeric indexes (issue 2818)
+
+ Return start/end profiling time in microseconds instead of milliseconds
+ (issue 2824)
+
+ Performance and stability improvements on all platforms.
+
+
+2013-08-06: Version 3.20.14
+
+ Added new Harmony methods to Array.prototype object.
+ (issue 2776,v8:2777)
+
+ Performance and stability improvements on all platforms.
+
+
+2013-08-01: Version 3.20.12
+
+ Removed buggy ToNumber truncation (partial fix for issue 2813)
+
+ Calling Map etc without new should throw TypeError (issue 2819)
+
+ Fixed a crash for large code objects on ARM (Chromium issue 2736)
+
+ Fixed stale unhandlified value in JSObject::SetPropertyForResult.
+ (Chromium issue 265894)
+
+ Added new Harmony methods to String.prototype object.
+ (issue 2796,v8:2797,v8:2798,v8:2799)
+
+ Performance and stability improvements on all platforms.
+
+
2013-07-30: Version 3.20.11
Performance and stability improvements on all platforms.
diff --git a/deps/v8/DEPS b/deps/v8/DEPS
index da87853fa7..b91ae4e766 100644
--- a/deps/v8/DEPS
+++ b/deps/v8/DEPS
@@ -5,10 +5,10 @@
deps = {
# Remember to keep the revision in sync with the Makefile.
"v8/build/gyp":
- "http://gyp.googlecode.com/svn/trunk@1656",
+ "http://gyp.googlecode.com/svn/trunk@1685",
"v8/third_party/icu":
- "https://src.chromium.org/chrome/trunk/deps/third_party/icu46@213354",
+ "https://src.chromium.org/chrome/trunk/deps/third_party/icu46@214189",
}
deps_os = {
diff --git a/deps/v8/Makefile b/deps/v8/Makefile
index a749fd026e..288c257396 100644
--- a/deps/v8/Makefile
+++ b/deps/v8/Makefile
@@ -192,6 +192,7 @@ endif
# ----------------- available targets: --------------------
# - "dependencies": pulls in external dependencies (currently: GYP)
+# - "grokdump": rebuilds heap constants lists used by grokdump
# - any arch listed in ARCHES (see below)
# - any mode listed in MODES
# - every combination <arch>.<mode>, e.g. "ia32.release"
@@ -392,7 +393,7 @@ endif
# Replaces the old with the new environment file if they're different, which
# will trigger GYP to regenerate Makefiles.
$(ENVFILE): $(ENVFILE).new
- @if test -r $(ENVFILE) && cmp $(ENVFILE).new $(ENVFILE) >/dev/null; \
+ @if test -r $(ENVFILE) && cmp $(ENVFILE).new $(ENVFILE) > /dev/null; \
then rm $(ENVFILE).new; \
else mv $(ENVFILE).new $(ENVFILE); fi
@@ -401,11 +402,17 @@ $(ENVFILE).new:
@mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; \
echo "CXX=$(CXX)" >> $(ENVFILE).new
+# Heap constants for grokdump.
+DUMP_FILE = tools/v8heapconst.py
+grokdump: ia32.release
+ @cat $(DUMP_FILE).tmpl > $(DUMP_FILE)
+ @$(OUTDIR)/ia32.release/d8 --dump-heap-constants >> $(DUMP_FILE)
+
# Dependencies.
# Remember to keep these in sync with the DEPS file.
dependencies:
svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \
- --revision 1656
+ --revision 1685
svn checkout --force \
https://src.chromium.org/chrome/trunk/deps/third_party/icu46 \
- third_party/icu --revision 213354
+ third_party/icu --revision 214189
diff --git a/deps/v8/PRESUBMIT.py b/deps/v8/PRESUBMIT.py
index 1f176e08bd..819331f9e5 100644
--- a/deps/v8/PRESUBMIT.py
+++ b/deps/v8/PRESUBMIT.py
@@ -69,4 +69,7 @@ def CheckChangeOnCommit(input_api, output_api):
results.extend(_CommonChecks(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
+ results.extend(input_api.canned_checks.CheckTreeIsOpen(
+ input_api, output_api,
+ json_url='http://v8-status.appspot.com/current?format=json'))
return results
diff --git a/deps/v8/build/standalone.gypi b/deps/v8/build/standalone.gypi
index ab2dfd528e..5c017d5f50 100644
--- a/deps/v8/build/standalone.gypi
+++ b/deps/v8/build/standalone.gypi
@@ -120,6 +120,9 @@
},
'target_conditions': [
['v8_code == 0', {
+ 'defines!': [
+ 'DEBUG',
+ ],
'conditions': [
['os_posix == 1 and OS != "mac"', {
'cflags!': [
@@ -132,9 +135,6 @@
},
}],
['OS == "win"', {
- 'defines!': [
- 'DEBUG',
- ],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'false',
diff --git a/deps/v8/build/toolchain.gypi b/deps/v8/build/toolchain.gypi
index 36d60c28ab..ddb8aafad0 100644
--- a/deps/v8/build/toolchain.gypi
+++ b/deps/v8/build/toolchain.gypi
@@ -61,11 +61,10 @@
'v8_enable_backtrace%': 0,
# Speeds up Debug builds:
- # 0 - compiler optimizations off (debuggable) (default). This may
+ # 0 - Compiler optimizations off (debuggable) (default). This may
# be 5x slower than Release (or worse).
- # 1 - turn on compiler optimizations. and #undef DEBUG/#define NDEBUG.
- # This may be hard or impossible to debug. This may still be
- # 2x slower than Release (or worse).
+ # 1 - Turn on compiler optimizations. This may be hard or impossible to
+ # debug. This may still be 2x slower than Release (or worse).
# 2 - Turn on optimizations, and also #undef DEBUG / #define NDEBUG
# (but leave V8_ENABLE_CHECKS and most other assertions enabled.
# This may cause some v8 tests to fail in the Debug configuration.
@@ -455,14 +454,32 @@
'msvs_settings': {
'VCCLCompilerTool': {
'conditions': [
- ['component=="shared_library"', {
- 'RuntimeLibrary': '3', # /MDd
- }, {
- 'RuntimeLibrary': '1', # /MTd
- }],
['v8_optimized_debug==0', {
'Optimization': '0',
- }, {
+ 'conditions': [
+ ['component=="shared_library"', {
+ 'RuntimeLibrary': '3', # /MDd
+ }, {
+ 'RuntimeLibrary': '1', # /MTd
+ }],
+ ],
+ }],
+ ['v8_optimized_debug==1', {
+ 'Optimization': '1',
+ 'InlineFunctionExpansion': '2',
+ 'EnableIntrinsicFunctions': 'true',
+ 'FavorSizeOrSpeed': '0',
+ 'StringPooling': 'true',
+ 'BasicRuntimeChecks': '0',
+ 'conditions': [
+ ['component=="shared_library"', {
+ 'RuntimeLibrary': '3', # /MDd
+ }, {
+ 'RuntimeLibrary': '1', # /MTd
+ }],
+ ],
+ }],
+ ['v8_optimized_debug==2', {
'Optimization': '2',
'InlineFunctionExpansion': '2',
'EnableIntrinsicFunctions': 'true',
@@ -471,9 +488,9 @@
'BasicRuntimeChecks': '0',
'conditions': [
['component=="shared_library"', {
- 'RuntimeLibrary': '2', #/MD
+ 'RuntimeLibrary': '3', #/MDd
}, {
- 'RuntimeLibrary': '0', #/MT
+ 'RuntimeLibrary': '1', #/MTd
}],
['v8_target_arch=="x64"', {
# TODO(2207): remove this option once the bug is fixed.
@@ -487,7 +504,11 @@
'conditions': [
['v8_optimized_debug==0', {
'LinkIncremental': '2',
- }, {
+ }],
+ ['v8_optimized_debug==1', {
+ 'LinkIncremental': '2',
+ }],
+ ['v8_optimized_debug==2', {
'LinkIncremental': '1',
'OptimizeReferences': '2',
'EnableCOMDATFolding': '2',
diff --git a/deps/v8/include/v8-debug.h b/deps/v8/include/v8-debug.h
index e488aaa889..bacccb61dd 100755
--- a/deps/v8/include/v8-debug.h
+++ b/deps/v8/include/v8-debug.h
@@ -30,40 +30,6 @@
#include "v8.h"
-#ifdef _WIN32
-typedef int int32_t;
-typedef unsigned int uint32_t;
-typedef unsigned short uint16_t; // NOLINT
-typedef long long int64_t; // NOLINT
-
-// Setup for Windows DLL export/import. See v8.h in this directory for
-// information on how to build/use V8 as a DLL.
-#if defined(BUILDING_V8_SHARED) && defined(USING_V8_SHARED)
-#error both BUILDING_V8_SHARED and USING_V8_SHARED are set - please check the\
- build configuration to ensure that at most one of these is set
-#endif
-
-#ifdef BUILDING_V8_SHARED
-#define EXPORT __declspec(dllexport)
-#elif USING_V8_SHARED
-#define EXPORT __declspec(dllimport)
-#else
-#define EXPORT
-#endif
-
-#else // _WIN32
-
-// Setup for Linux shared library export. See v8.h in this directory for
-// information on how to build/use V8 as shared library.
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
-#define EXPORT __attribute__ ((visibility("default")))
-#else // defined(__GNUC__) && (__GNUC__ >= 4)
-#define EXPORT
-#endif // defined(__GNUC__) && (__GNUC__ >= 4)
-
-#endif // _WIN32
-
-
/**
* Debugger support for the V8 JavaScript engine.
*/
@@ -81,7 +47,7 @@ enum DebugEvent {
};
-class EXPORT Debug {
+class V8_EXPORT Debug {
public:
/**
* A client object passed to the v8 debugger whose ownership will be taken by
diff --git a/deps/v8/include/v8-preparser.h b/deps/v8/include/v8-preparser.h
index 3e39823d65..1da77185af 100644
--- a/deps/v8/include/v8-preparser.h
+++ b/deps/v8/include/v8-preparser.h
@@ -28,48 +28,14 @@
#ifndef PREPARSER_H
#define PREPARSER_H
+#include "v8.h"
#include "v8stdint.h"
-#ifdef _WIN32
-
-// Setup for Windows DLL export/import. When building the V8 DLL the
-// BUILDING_V8_SHARED needs to be defined. When building a program which uses
-// the V8 DLL USING_V8_SHARED needs to be defined. When either building the V8
-// static library or building a program which uses the V8 static library neither
-// BUILDING_V8_SHARED nor USING_V8_SHARED should be defined.
-#if defined(BUILDING_V8_SHARED) && defined(USING_V8_SHARED)
-#error both BUILDING_V8_SHARED and USING_V8_SHARED are set - please check the\
- build configuration to ensure that at most one of these is set
-#endif
-
-#ifdef BUILDING_V8_SHARED
-#define V8EXPORT __declspec(dllexport)
-#elif USING_V8_SHARED
-#define V8EXPORT __declspec(dllimport)
-#else
-#define V8EXPORT
-#endif // BUILDING_V8_SHARED
-
-#else // _WIN32
-
-// Setup for Linux shared library export. There is no need to distinguish
-// between building or using the V8 shared library, but we should not
-// export symbols when we are building a static library.
-#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
- (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
-#define V8EXPORT __attribute__ ((visibility("default")))
-#else
-#define V8EXPORT
-#endif
-
-#endif // _WIN32
-
-
namespace v8 {
// The result of preparsing is either a stack overflow error, or an opaque
// blob of data that can be passed back into the parser.
-class V8EXPORT PreParserData {
+class V8_EXPORT PreParserData {
public:
PreParserData(size_t size, const uint8_t* data)
: data_(data), size_(size) { }
@@ -94,7 +60,7 @@ class V8EXPORT PreParserData {
// Interface for a stream of Unicode characters.
-class V8EXPORT UnicodeInputStream { // NOLINT - Thinks V8EXPORT is class name.
+class V8_EXPORT UnicodeInputStream { // NOLINT - V8_EXPORT is not a class name.
public:
virtual ~UnicodeInputStream();
@@ -110,11 +76,9 @@ class V8EXPORT UnicodeInputStream { // NOLINT - Thinks V8EXPORT is class name.
// more stack space than the limit provided, the result's stack_overflow()
// method will return true. Otherwise the result contains preparser
// data that can be used by the V8 parser to speed up parsing.
-PreParserData V8EXPORT Preparse(UnicodeInputStream* input,
+PreParserData V8_EXPORT Preparse(UnicodeInputStream* input,
size_t max_stack_size);
} // namespace v8.
-#undef V8EXPORT
-
#endif // PREPARSER_H
diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h
index cf28341300..1d7b70d3c4 100644
--- a/deps/v8/include/v8-profiler.h
+++ b/deps/v8/include/v8-profiler.h
@@ -30,36 +30,6 @@
#include "v8.h"
-#ifdef _WIN32
-// Setup for Windows DLL export/import. See v8.h in this directory for
-// information on how to build/use V8 as a DLL.
-#if defined(BUILDING_V8_SHARED) && defined(USING_V8_SHARED)
-#error both BUILDING_V8_SHARED and USING_V8_SHARED are set - please check the\
- build configuration to ensure that at most one of these is set
-#endif
-
-#ifdef BUILDING_V8_SHARED
-#define V8EXPORT __declspec(dllexport)
-#elif USING_V8_SHARED
-#define V8EXPORT __declspec(dllimport)
-#else
-#define V8EXPORT
-#endif
-
-#else // _WIN32
-
-// Setup for Linux shared library export. See v8.h in this directory for
-// information on how to build/use V8 as shared library.
-#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
- (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
-#define V8EXPORT __attribute__ ((visibility("default")))
-#else
-#define V8EXPORT
-#endif
-
-#endif // _WIN32
-
-
/**
* Profiler support for the V8 JavaScript engine.
*/
@@ -70,7 +40,7 @@ typedef uint32_t SnapshotObjectId;
/**
* CpuProfileNode represents a node in a call graph.
*/
-class V8EXPORT CpuProfileNode {
+class V8_EXPORT CpuProfileNode {
public:
/** Returns function name (empty string for anonymous functions.) */
Handle<String> GetFunctionName() const;
@@ -125,7 +95,7 @@ class V8EXPORT CpuProfileNode {
* CpuProfile contains a CPU profile in a form of top-down call tree
* (from main() down to functions that do all the work).
*/
-class V8EXPORT CpuProfile {
+class V8_EXPORT CpuProfile {
public:
/** Returns CPU profile UID (assigned by the profiler.) */
unsigned GetUid() const;
@@ -149,6 +119,18 @@ class V8EXPORT CpuProfile {
const CpuProfileNode* GetSample(int index) const;
/**
+ * Returns time when the profile recording started (in microseconds
+ * since the Epoch).
+ */
+ int64_t GetStartTime() const;
+
+ /**
+ * Returns time when the profile recording was stopped (in microseconds
+ * since the Epoch).
+ */
+ int64_t GetEndTime() const;
+
+ /**
* Deletes the profile and removes it from CpuProfiler's list.
* All pointers to nodes previously returned become invalid.
* Profiles with the same uid but obtained using different
@@ -164,7 +146,7 @@ class V8EXPORT CpuProfile {
* Interface for controlling CPU profiling. Instance of the
* profiler can be retrieved using v8::Isolate::GetCpuProfiler.
*/
-class V8EXPORT CpuProfiler {
+class V8_EXPORT CpuProfiler {
public:
/**
* A note on security tokens usage. As scripts from different
@@ -225,7 +207,7 @@ class HeapGraphNode;
* HeapSnapshotEdge represents a directed connection between heap
* graph nodes: from retainers to retained nodes.
*/
-class V8EXPORT HeapGraphEdge {
+class V8_EXPORT HeapGraphEdge {
public:
enum Type {
kContextVariable = 0, // A variable from a function context.
@@ -261,7 +243,7 @@ class V8EXPORT HeapGraphEdge {
/**
* HeapGraphNode represents a node in a heap graph.
*/
-class V8EXPORT HeapGraphNode {
+class V8_EXPORT HeapGraphNode {
public:
enum Type {
kHidden = 0, // Hidden node, may be filtered when shown to user.
@@ -313,7 +295,7 @@ class V8EXPORT HeapGraphNode {
/**
* HeapSnapshots record the state of the JS heap at some moment.
*/
-class V8EXPORT HeapSnapshot {
+class V8_EXPORT HeapSnapshot {
public:
enum SerializationFormat {
kJSON = 0 // See format description near 'Serialize' method.
@@ -383,7 +365,7 @@ class RetainedObjectInfo;
* Interface for controlling heap profiling. Instance of the
* profiler can be retrieved using v8::Isolate::GetHeapProfiler.
*/
-class V8EXPORT HeapProfiler {
+class V8_EXPORT HeapProfiler {
public:
/**
* Callback function invoked for obtaining RetainedObjectInfo for
@@ -521,7 +503,7 @@ class V8EXPORT HeapProfiler {
* keeps them alive only during snapshot collection. Afterwards, they
* are freed by calling the Dispose class function.
*/
-class V8EXPORT RetainedObjectInfo { // NOLINT
+class V8_EXPORT RetainedObjectInfo { // NOLINT
public:
/** Called by V8 when it no longer needs an instance. */
virtual void Dispose() = 0;
@@ -587,7 +569,4 @@ struct HeapStatsUpdate {
} // namespace v8
-#undef V8EXPORT
-
-
#endif // V8_V8_PROFILER_H_
diff --git a/deps/v8/include/v8-testing.h b/deps/v8/include/v8-testing.h
index 59eebf9db4..97b467a91b 100644
--- a/deps/v8/include/v8-testing.h
+++ b/deps/v8/include/v8-testing.h
@@ -30,42 +30,12 @@
#include "v8.h"
-#ifdef _WIN32
-// Setup for Windows DLL export/import. See v8.h in this directory for
-// information on how to build/use V8 as a DLL.
-#if defined(BUILDING_V8_SHARED) && defined(USING_V8_SHARED)
-#error both BUILDING_V8_SHARED and USING_V8_SHARED are set - please check the\
- build configuration to ensure that at most one of these is set
-#endif
-
-#ifdef BUILDING_V8_SHARED
-#define V8EXPORT __declspec(dllexport)
-#elif USING_V8_SHARED
-#define V8EXPORT __declspec(dllimport)
-#else
-#define V8EXPORT
-#endif
-
-#else // _WIN32
-
-// Setup for Linux shared library export. See v8.h in this directory for
-// information on how to build/use V8 as shared library.
-#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
- (__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
-#define V8EXPORT __attribute__ ((visibility("default")))
-#else
-#define V8EXPORT
-#endif
-
-#endif // _WIN32
-
-
/**
* Testing support for the V8 JavaScript engine.
*/
namespace v8 {
-class V8EXPORT Testing {
+class V8_EXPORT Testing {
public:
enum StressType {
kStressTypeOpt,
@@ -99,7 +69,7 @@ class V8EXPORT Testing {
} // namespace v8
-#undef V8EXPORT
+#undef V8_EXPORT
#endif // V8_V8_TEST_H_
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index eb166ab68d..4b31e87273 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -40,6 +40,9 @@
#include "v8stdint.h"
+// We reserve the V8_* prefix for macros defined in V8 public API and
+// assume there are no name conflicts with the embedder's code.
+
#ifdef _WIN32
// Setup for Windows DLL export/import. When building the V8 DLL the
@@ -53,11 +56,11 @@
#endif
#ifdef BUILDING_V8_SHARED
-#define V8EXPORT __declspec(dllexport)
+#define V8_EXPORT __declspec(dllexport)
#elif USING_V8_SHARED
-#define V8EXPORT __declspec(dllimport)
+#define V8_EXPORT __declspec(dllimport)
#else
-#define V8EXPORT
+#define V8_EXPORT
#endif // BUILDING_V8_SHARED
#else // _WIN32
@@ -66,12 +69,12 @@
#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
(__GNUC__ == 3 && __GNUC_MINOR__ >= 3)) && defined(V8_SHARED)
#ifdef BUILDING_V8_SHARED
-#define V8EXPORT __attribute__ ((visibility("default")))
+#define V8_EXPORT __attribute__ ((visibility("default")))
#else
-#define V8EXPORT
+#define V8_EXPORT
#endif
#else
-#define V8EXPORT
+#define V8_EXPORT
#endif
#endif // _WIN32
@@ -385,6 +388,11 @@ template <class T> class Handle {
};
+// A value which will never be returned by Local::Eternalize
+// Useful for static initialization
+const int kUninitializedEternalIndex = -1;
+
+
/**
* A light-weight stack-allocated object handle. All operations
* that return objects from within v8 return them in local handles. They
@@ -430,6 +438,11 @@ template <class T> class Local : public Handle<T> {
return Local<S>::Cast(*this);
}
+ // Keep this Local alive for the lifetime of the Isolate.
+ // It remains retrievable via the returned index,
+ V8_INLINE(int Eternalize(Isolate* isolate));
+ V8_INLINE(static Local<T> GetEternal(Isolate* isolate, int index));
+
/**
* Create a local handle for the content of another handle.
* The referee is kept alive by the local handle even when
@@ -801,7 +814,7 @@ template <class T> class Persistent // NOLINT
* handle and may deallocate it. The behavior of accessing a handle
* for which the handle scope has been deleted is undefined.
*/
-class V8EXPORT HandleScope {
+class V8_EXPORT HandleScope {
public:
// TODO(svenpanne) Deprecate me when Chrome is fixed!
HandleScope();
@@ -840,7 +853,7 @@ class V8EXPORT HandleScope {
// This Data class is accessible internally as HandleScopeData through a
// typedef in the ImplementationUtilities class.
- class V8EXPORT Data {
+ class V8_EXPORT Data {
public:
internal::Object** next;
internal::Object** limit;
@@ -873,7 +886,7 @@ class V8EXPORT HandleScope {
/**
* The superclass of values and API object templates.
*/
-class V8EXPORT Data {
+class V8_EXPORT Data {
private:
Data();
};
@@ -885,7 +898,7 @@ class V8EXPORT Data {
* compiling it, and can be stored between compilations. When script
* data is given to the compile method compilation will be faster.
*/
-class V8EXPORT ScriptData { // NOLINT
+class V8_EXPORT ScriptData { // NOLINT
public:
virtual ~ScriptData() { }
@@ -942,24 +955,28 @@ class ScriptOrigin {
V8_INLINE(ScriptOrigin(
Handle<Value> resource_name,
Handle<Integer> resource_line_offset = Handle<Integer>(),
- Handle<Integer> resource_column_offset = Handle<Integer>()))
+ Handle<Integer> resource_column_offset = Handle<Integer>(),
+ Handle<Boolean> resource_is_shared_cross_origin = Handle<Boolean>()))
: resource_name_(resource_name),
resource_line_offset_(resource_line_offset),
- resource_column_offset_(resource_column_offset) { }
+ resource_column_offset_(resource_column_offset),
+ resource_is_shared_cross_origin_(resource_is_shared_cross_origin) { }
V8_INLINE(Handle<Value> ResourceName() const);
V8_INLINE(Handle<Integer> ResourceLineOffset() const);
V8_INLINE(Handle<Integer> ResourceColumnOffset() const);
+ V8_INLINE(Handle<Boolean> ResourceIsSharedCrossOrigin() const);
private:
Handle<Value> resource_name_;
Handle<Integer> resource_line_offset_;
Handle<Integer> resource_column_offset_;
+ Handle<Boolean> resource_is_shared_cross_origin_;
};
/**
* A compiled JavaScript script.
*/
-class V8EXPORT Script {
+class V8_EXPORT Script {
public:
/**
* Compiles the specified script (context-independent).
@@ -1077,7 +1094,7 @@ class V8EXPORT Script {
/**
* An error message.
*/
-class V8EXPORT Message {
+class V8_EXPORT Message {
public:
Local<String> Get() const;
Local<String> GetSourceLine() const;
@@ -1130,6 +1147,12 @@ class V8EXPORT Message {
*/
int GetEndColumn() const;
+ /**
+ * Passes on the value set by the embedder when it fed the script from which
+ * this Message was generated to V8.
+ */
+ bool IsSharedCrossOrigin() const;
+
// TODO(1245381): Print to a string instead of on a FILE.
static void PrintCurrentStackTrace(FILE* out);
@@ -1143,7 +1166,7 @@ class V8EXPORT Message {
* snapshot of the execution stack and the information remains valid after
* execution continues.
*/
-class V8EXPORT StackTrace {
+class V8_EXPORT StackTrace {
public:
/**
* Flags that determine what information is placed captured for each
@@ -1192,7 +1215,7 @@ class V8EXPORT StackTrace {
/**
* A single JavaScript stack frame.
*/
-class V8EXPORT StackFrame {
+class V8_EXPORT StackFrame {
public:
/**
* Returns the number, 1-based, of the line for the associate function call.
@@ -1244,13 +1267,29 @@ class V8EXPORT StackFrame {
};
+/**
+ * A JSON Parser.
+ */
+class V8_EXPORT JSON {
+ public:
+ /**
+ * Tries to parse the string |json_string| and returns it as object if
+ * successful.
+ *
+ * \param json_string The string to parse.
+ * \return The corresponding object if successfully parsed.
+ */
+ static Local<Object> Parse(Local<String> json_string);
+};
+
+
// --- Value ---
/**
* The superclass of all JavaScript values and objects.
*/
-class V8EXPORT Value : public Data {
+class V8_EXPORT Value : public Data {
public:
/**
* Returns true if this value is the undefined value. See ECMA-262
@@ -1481,14 +1520,14 @@ class V8EXPORT Value : public Data {
/**
* The superclass of primitive values. See ECMA-262 4.3.2.
*/
-class V8EXPORT Primitive : public Value { };
+class V8_EXPORT Primitive : public Value { };
/**
* A primitive boolean value (ECMA-262, 4.3.14). Either the true
* or false value.
*/
-class V8EXPORT Boolean : public Primitive {
+class V8_EXPORT Boolean : public Primitive {
public:
bool Value() const;
V8_INLINE(static Handle<Boolean> New(bool value));
@@ -1498,7 +1537,7 @@ class V8EXPORT Boolean : public Primitive {
/**
* A JavaScript string value (ECMA-262, 4.3.17).
*/
-class V8EXPORT String : public Primitive {
+class V8_EXPORT String : public Primitive {
public:
enum Encoding {
UNKNOWN_ENCODING = 0x1,
@@ -1604,7 +1643,7 @@ class V8EXPORT String : public Primitive {
*/
bool IsExternalAscii() const;
- class V8EXPORT ExternalStringResourceBase { // NOLINT
+ class V8_EXPORT ExternalStringResourceBase { // NOLINT
public:
virtual ~ExternalStringResourceBase() {}
@@ -1633,7 +1672,7 @@ class V8EXPORT String : public Primitive {
* ExternalStringResource to manage the life cycle of the underlying
* buffer. Note that the string data must be immutable.
*/
- class V8EXPORT ExternalStringResource
+ class V8_EXPORT ExternalStringResource
: public ExternalStringResourceBase {
public:
/**
@@ -1667,7 +1706,7 @@ class V8EXPORT String : public Primitive {
* Use String::New or convert to 16 bit data for non-ASCII.
*/
- class V8EXPORT ExternalAsciiStringResource
+ class V8_EXPORT ExternalAsciiStringResource
: public ExternalStringResourceBase {
public:
/**
@@ -1820,7 +1859,7 @@ class V8EXPORT String : public Primitive {
* then the length() method returns 0 and the * operator returns
* NULL.
*/
- class V8EXPORT Utf8Value {
+ class V8_EXPORT Utf8Value {
public:
explicit Utf8Value(Handle<v8::Value> obj);
~Utf8Value();
@@ -1843,7 +1882,7 @@ class V8EXPORT String : public Primitive {
* method of the object) then the length() method returns 0 and the * operator
* returns NULL.
*/
- class V8EXPORT AsciiValue {
+ class V8_EXPORT AsciiValue {
public:
// TODO(dcarney): deprecate
explicit AsciiValue(Handle<v8::Value> obj);
@@ -1866,7 +1905,7 @@ class V8EXPORT String : public Primitive {
* method of the object) then the length() method returns 0 and the * operator
* returns NULL.
*/
- class V8EXPORT Value {
+ class V8_EXPORT Value {
public:
explicit Value(Handle<v8::Value> obj);
~Value();
@@ -1895,7 +1934,7 @@ class V8EXPORT String : public Primitive {
*
* This is an experimental feature. Use at your own risk.
*/
-class V8EXPORT Symbol : public Primitive {
+class V8_EXPORT Symbol : public Primitive {
public:
// Returns the print name string of the symbol, or undefined if none.
Local<Value> Name() const;
@@ -1916,7 +1955,7 @@ class V8EXPORT Symbol : public Primitive {
/**
* A JavaScript number value (ECMA-262, 4.3.20)
*/
-class V8EXPORT Number : public Primitive {
+class V8_EXPORT Number : public Primitive {
public:
double Value() const;
static Local<Number> New(double value);
@@ -1931,7 +1970,7 @@ class V8EXPORT Number : public Primitive {
/**
* A JavaScript value representing a signed integer.
*/
-class V8EXPORT Integer : public Number {
+class V8_EXPORT Integer : public Number {
public:
static Local<Integer> New(int32_t value);
static Local<Integer> NewFromUnsigned(uint32_t value);
@@ -1948,7 +1987,7 @@ class V8EXPORT Integer : public Number {
/**
* A JavaScript value representing a 32-bit signed integer.
*/
-class V8EXPORT Int32 : public Integer {
+class V8_EXPORT Int32 : public Integer {
public:
int32_t Value() const;
private:
@@ -1959,7 +1998,7 @@ class V8EXPORT Int32 : public Integer {
/**
* A JavaScript value representing a 32-bit unsigned integer.
*/
-class V8EXPORT Uint32 : public Integer {
+class V8_EXPORT Uint32 : public Integer {
public:
uint32_t Value() const;
private:
@@ -2031,7 +2070,7 @@ enum AccessControl {
/**
* A JavaScript object (ECMA-262, 4.3.3)
*/
-class V8EXPORT Object : public Value {
+class V8_EXPORT Object : public Value {
public:
bool Set(Handle<Value> key,
Handle<Value> value,
@@ -2303,7 +2342,7 @@ class V8EXPORT Object : public Value {
/**
* An instance of the built-in array constructor (ECMA-262, 15.4.2).
*/
-class V8EXPORT Array : public Object {
+class V8_EXPORT Array : public Object {
public:
uint32_t Length() const;
@@ -2329,7 +2368,7 @@ class V8EXPORT Array : public Object {
/**
* A JavaScript function object (ECMA-262, 15.3).
*/
-class V8EXPORT Function : public Object {
+class V8_EXPORT Function : public Object {
public:
Local<Object> NewInstance() const;
Local<Object> NewInstance(int argc, Handle<Value> argv[]) const;
@@ -2385,7 +2424,7 @@ class V8EXPORT Function : public Object {
* An instance of the built-in ArrayBuffer constructor (ES6 draft 15.13.5).
* This API is experimental and may change significantly.
*/
-class V8EXPORT ArrayBuffer : public Object {
+class V8_EXPORT ArrayBuffer : public Object {
public:
/**
* Allocator that V8 uses to allocate |ArrayBuffer|'s memory.
@@ -2394,19 +2433,41 @@ class V8EXPORT ArrayBuffer : public Object {
*
* This API is experimental and may change significantly.
*/
- class V8EXPORT Allocator { // NOLINT
+ class V8_EXPORT Allocator { // NOLINT
public:
virtual ~Allocator() {}
/**
* Allocate |length| bytes. Return NULL if allocation is not successful.
+ * Memory should be initialized to zeroes.
*/
virtual void* Allocate(size_t length) = 0;
+
/**
- * Free the memory pointed to |data|. That memory is guaranteed to be
- * previously allocated by |Allocate|.
+ * Allocate |length| bytes. Return NULL if allocation is not successful.
+ * Memory does not have to be initialized.
*/
- virtual void Free(void* data) = 0;
+ virtual void* AllocateUninitialized(size_t length) {
+ // Override with call to |Allocate| for compatibility
+ // with legacy version.
+ return Allocate(length);
+ }
+
+ /**
+ * Free the memory block of size |length|, pointed to by |data|.
+ * That memory is guaranteed to be previously allocated by |Allocate|.
+ */
+ virtual void Free(void* data, size_t length) {
+ // Override with call to |Free(void*)| for compatibility
+ // with legacy version.
+ Free(data);
+ }
+
+ /**
+ * Deprecated. Never called directly by V8.
+ * For compatibility with legacy version of this interface.
+ */
+ virtual void Free(void* data);
};
/**
@@ -2419,7 +2480,7 @@ class V8EXPORT ArrayBuffer : public Object {
*
* This API is experimental and may change significantly.
*/
- class V8EXPORT Contents { // NOLINT
+ class V8_EXPORT Contents { // NOLINT
public:
Contents() : data_(NULL), byte_length_(0) {}
@@ -2502,7 +2563,7 @@ class V8EXPORT ArrayBuffer : public Object {
*
* This API is experimental and may change significantly.
*/
-class V8EXPORT ArrayBufferView : public Object {
+class V8_EXPORT ArrayBufferView : public Object {
public:
/**
* Returns underlying ArrayBuffer.
@@ -2537,7 +2598,7 @@ class V8EXPORT ArrayBufferView : public Object {
* (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT TypedArray : public ArrayBufferView {
+class V8_EXPORT TypedArray : public ArrayBufferView {
public:
/**
* Number of elements in this typed array
@@ -2557,7 +2618,7 @@ class V8EXPORT TypedArray : public ArrayBufferView {
* An instance of Uint8Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Uint8Array : public TypedArray {
+class V8_EXPORT Uint8Array : public TypedArray {
public:
static Local<Uint8Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2573,7 +2634,7 @@ class V8EXPORT Uint8Array : public TypedArray {
* An instance of Uint8ClampedArray constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Uint8ClampedArray : public TypedArray {
+class V8_EXPORT Uint8ClampedArray : public TypedArray {
public:
static Local<Uint8ClampedArray> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2588,7 +2649,7 @@ class V8EXPORT Uint8ClampedArray : public TypedArray {
* An instance of Int8Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Int8Array : public TypedArray {
+class V8_EXPORT Int8Array : public TypedArray {
public:
static Local<Int8Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2604,7 +2665,7 @@ class V8EXPORT Int8Array : public TypedArray {
* An instance of Uint16Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Uint16Array : public TypedArray {
+class V8_EXPORT Uint16Array : public TypedArray {
public:
static Local<Uint16Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2620,7 +2681,7 @@ class V8EXPORT Uint16Array : public TypedArray {
* An instance of Int16Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Int16Array : public TypedArray {
+class V8_EXPORT Int16Array : public TypedArray {
public:
static Local<Int16Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2636,7 +2697,7 @@ class V8EXPORT Int16Array : public TypedArray {
* An instance of Uint32Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Uint32Array : public TypedArray {
+class V8_EXPORT Uint32Array : public TypedArray {
public:
static Local<Uint32Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2652,7 +2713,7 @@ class V8EXPORT Uint32Array : public TypedArray {
* An instance of Int32Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Int32Array : public TypedArray {
+class V8_EXPORT Int32Array : public TypedArray {
public:
static Local<Int32Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2668,7 +2729,7 @@ class V8EXPORT Int32Array : public TypedArray {
* An instance of Float32Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Float32Array : public TypedArray {
+class V8_EXPORT Float32Array : public TypedArray {
public:
static Local<Float32Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2684,7 +2745,7 @@ class V8EXPORT Float32Array : public TypedArray {
* An instance of Float64Array constructor (ES6 draft 15.13.6).
* This API is experimental and may change significantly.
*/
-class V8EXPORT Float64Array : public TypedArray {
+class V8_EXPORT Float64Array : public TypedArray {
public:
static Local<Float64Array> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2700,7 +2761,7 @@ class V8EXPORT Float64Array : public TypedArray {
* An instance of DataView constructor (ES6 draft 15.13.7).
* This API is experimental and may change significantly.
*/
-class V8EXPORT DataView : public ArrayBufferView {
+class V8_EXPORT DataView : public ArrayBufferView {
public:
static Local<DataView> New(Handle<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
@@ -2715,7 +2776,7 @@ class V8EXPORT DataView : public ArrayBufferView {
/**
* An instance of the built-in Date constructor (ECMA-262, 15.9).
*/
-class V8EXPORT Date : public Object {
+class V8_EXPORT Date : public Object {
public:
static Local<Value> New(double time);
@@ -2753,7 +2814,7 @@ class V8EXPORT Date : public Object {
/**
* A Number object (ECMA-262, 4.3.21).
*/
-class V8EXPORT NumberObject : public Object {
+class V8_EXPORT NumberObject : public Object {
public:
static Local<Value> New(double value);
@@ -2776,7 +2837,7 @@ class V8EXPORT NumberObject : public Object {
/**
* A Boolean object (ECMA-262, 4.3.15).
*/
-class V8EXPORT BooleanObject : public Object {
+class V8_EXPORT BooleanObject : public Object {
public:
static Local<Value> New(bool value);
@@ -2799,7 +2860,7 @@ class V8EXPORT BooleanObject : public Object {
/**
* A String object (ECMA-262, 4.3.18).
*/
-class V8EXPORT StringObject : public Object {
+class V8_EXPORT StringObject : public Object {
public:
static Local<Value> New(Handle<String> value);
@@ -2824,7 +2885,7 @@ class V8EXPORT StringObject : public Object {
*
* This is an experimental feature. Use at your own risk.
*/
-class V8EXPORT SymbolObject : public Object {
+class V8_EXPORT SymbolObject : public Object {
public:
static Local<Value> New(Isolate* isolate, Handle<Symbol> value);
@@ -2847,7 +2908,7 @@ class V8EXPORT SymbolObject : public Object {
/**
* An instance of the built-in RegExp constructor (ECMA-262, 15.10).
*/
-class V8EXPORT RegExp : public Object {
+class V8_EXPORT RegExp : public Object {
public:
/**
* Regular expression flag bits. They can be or'ed to enable a set
@@ -2894,7 +2955,7 @@ class V8EXPORT RegExp : public Object {
* A JavaScript value that wraps a C++ void*. This type of value is mainly used
* to associate C++ data structures with JavaScript objects.
*/
-class V8EXPORT External : public Value {
+class V8_EXPORT External : public Value {
public:
static Local<External> New(void* value);
V8_INLINE(static External* Cast(Value* obj));
@@ -2910,7 +2971,7 @@ class V8EXPORT External : public Value {
/**
* The superclass of object and function templates.
*/
-class V8EXPORT Template : public Data {
+class V8_EXPORT Template : public Data {
public:
/** Adds a property to each instance created by this template.*/
void Set(Handle<String> name, Handle<Data> value,
@@ -2998,7 +3059,7 @@ class FunctionCallbackInfo {
};
-class V8EXPORT Arguments : public FunctionCallbackInfo<Value> {
+class V8_EXPORT Arguments : public FunctionCallbackInfo<Value> {
private:
friend class internal::FunctionCallbackArguments;
V8_INLINE(Arguments(internal::Object** implicit_args,
@@ -3039,7 +3100,7 @@ class PropertyCallbackInfo {
};
-class V8EXPORT AccessorInfo : public PropertyCallbackInfo<Value> {
+class V8_EXPORT AccessorInfo : public PropertyCallbackInfo<Value> {
private:
friend class internal::PropertyCallbackArguments;
V8_INLINE(AccessorInfo(internal::Object** args))
@@ -3287,7 +3348,7 @@ typedef bool (*IndexedSecurityCallback)(Local<Object> host,
* child_instance.instance_property == 3;
* \endcode
*/
-class V8EXPORT FunctionTemplate : public Template {
+class V8_EXPORT FunctionTemplate : public Template {
public:
/** Creates a function template.*/
V8_DEPRECATED(static Local<FunctionTemplate> New(
@@ -3378,7 +3439,7 @@ class V8EXPORT FunctionTemplate : public Template {
* Properties added to an ObjectTemplate are added to each object
* created from the ObjectTemplate.
*/
-class V8EXPORT ObjectTemplate : public Template {
+class V8_EXPORT ObjectTemplate : public Template {
public:
/** Creates an ObjectTemplate. */
static Local<ObjectTemplate> New();
@@ -3564,7 +3625,7 @@ class V8EXPORT ObjectTemplate : public Template {
* A Signature specifies which receivers and arguments are valid
* parameters to a function.
*/
-class V8EXPORT Signature : public Data {
+class V8_EXPORT Signature : public Data {
public:
static Local<Signature> New(Handle<FunctionTemplate> receiver =
Handle<FunctionTemplate>(),
@@ -3579,7 +3640,7 @@ class V8EXPORT Signature : public Data {
* An AccessorSignature specifies which receivers are valid parameters
* to an accessor callback.
*/
-class V8EXPORT AccessorSignature : public Data {
+class V8_EXPORT AccessorSignature : public Data {
public:
static Local<AccessorSignature> New(Handle<FunctionTemplate> receiver =
Handle<FunctionTemplate>());
@@ -3588,13 +3649,13 @@ class V8EXPORT AccessorSignature : public Data {
};
-class V8EXPORT DeclaredAccessorDescriptor : public Data {
+class V8_EXPORT DeclaredAccessorDescriptor : public Data {
private:
DeclaredAccessorDescriptor();
};
-class V8EXPORT ObjectOperationDescriptor : public Data {
+class V8_EXPORT ObjectOperationDescriptor : public Data {
public:
// This function is not yet stable and should not be used at this time.
static Local<RawOperationDescriptor> NewInternalFieldDereference(
@@ -3614,7 +3675,7 @@ enum DeclaredAccessorDescriptorDataType {
};
-class V8EXPORT RawOperationDescriptor : public Data {
+class V8_EXPORT RawOperationDescriptor : public Data {
public:
Local<DeclaredAccessorDescriptor> NewHandleDereference(Isolate* isolate);
Local<RawOperationDescriptor> NewRawDereference(Isolate* isolate);
@@ -3647,7 +3708,7 @@ class V8EXPORT RawOperationDescriptor : public Data {
* A utility for determining the type of objects based on the template
* they were constructed from.
*/
-class V8EXPORT TypeSwitch : public Data {
+class V8_EXPORT TypeSwitch : public Data {
public:
static Local<TypeSwitch> New(Handle<FunctionTemplate> type);
static Local<TypeSwitch> New(int argc, Handle<FunctionTemplate> types[]);
@@ -3659,7 +3720,7 @@ class V8EXPORT TypeSwitch : public Data {
// --- Extensions ---
-class V8EXPORT ExternalAsciiStringResourceImpl
+class V8_EXPORT ExternalAsciiStringResourceImpl
: public String::ExternalAsciiStringResource {
public:
ExternalAsciiStringResourceImpl() : data_(0), length_(0) {}
@@ -3676,7 +3737,7 @@ class V8EXPORT ExternalAsciiStringResourceImpl
/**
* Ignore
*/
-class V8EXPORT Extension { // NOLINT
+class V8_EXPORT Extension { // NOLINT
public:
// Note that the strings passed into this constructor must live as long
// as the Extension itself.
@@ -3714,13 +3775,13 @@ class V8EXPORT Extension { // NOLINT
};
-void V8EXPORT RegisterExtension(Extension* extension);
+void V8_EXPORT RegisterExtension(Extension* extension);
/**
* Ignore
*/
-class V8EXPORT DeclareExtension {
+class V8_EXPORT DeclareExtension {
public:
V8_INLINE(DeclareExtension(Extension* extension)) {
RegisterExtension(extension);
@@ -3731,10 +3792,10 @@ class V8EXPORT DeclareExtension {
// --- Statics ---
-Handle<Primitive> V8EXPORT Undefined();
-Handle<Primitive> V8EXPORT Null();
-Handle<Boolean> V8EXPORT True();
-Handle<Boolean> V8EXPORT False();
+Handle<Primitive> V8_EXPORT Undefined();
+Handle<Primitive> V8_EXPORT Null();
+Handle<Boolean> V8_EXPORT True();
+Handle<Boolean> V8_EXPORT False();
V8_INLINE(Handle<Primitive> Undefined(Isolate* isolate));
V8_INLINE(Handle<Primitive> Null(Isolate* isolate));
@@ -3751,7 +3812,7 @@ V8_INLINE(Handle<Boolean> False(Isolate* isolate));
* setting the stack limit and you must set a non-default stack limit separately
* for each thread.
*/
-class V8EXPORT ResourceConstraints {
+class V8_EXPORT ResourceConstraints {
public:
ResourceConstraints();
int max_young_space_size() const { return max_young_space_size_; }
@@ -3771,7 +3832,7 @@ class V8EXPORT ResourceConstraints {
};
-bool V8EXPORT SetResourceConstraints(ResourceConstraints* constraints);
+bool V8_EXPORT SetResourceConstraints(ResourceConstraints* constraints);
// --- Exceptions ---
@@ -3789,13 +3850,13 @@ typedef void (*MessageCallback)(Handle<Message> message, Handle<Value> error);
* operation; the caller must return immediately and only after the exception
* has been handled does it become legal to invoke JavaScript operations.
*/
-Handle<Value> V8EXPORT ThrowException(Handle<Value> exception);
+Handle<Value> V8_EXPORT ThrowException(Handle<Value> exception);
/**
* Create new error objects by calling the corresponding error object
* constructor with the message.
*/
-class V8EXPORT Exception {
+class V8_EXPORT Exception {
public:
static Local<Value> RangeError(Handle<String> message);
static Local<Value> ReferenceError(Handle<String> message);
@@ -3889,7 +3950,7 @@ typedef void (*GCCallback)();
* Instances of this class can be passed to v8::V8::HeapStatistics to
* get heap statistics from V8.
*/
-class V8EXPORT HeapStatistics {
+class V8_EXPORT HeapStatistics {
public:
HeapStatistics();
size_t total_heap_size() { return total_heap_size_; }
@@ -3921,13 +3982,13 @@ class RetainedObjectInfo;
* threads. An isolate can be entered by at most one thread at any
* given time. The Locker/Unlocker API must be used to synchronize.
*/
-class V8EXPORT Isolate {
+class V8_EXPORT Isolate {
public:
/**
* Stack-allocated class which sets the isolate for all operations
* executed within a local scope.
*/
- class V8EXPORT Scope {
+ class V8_EXPORT Scope {
public:
explicit Scope(Isolate* isolate) : isolate_(isolate) {
isolate->Enter();
@@ -4075,7 +4136,7 @@ class V8EXPORT Isolate {
};
-class V8EXPORT StartupData {
+class V8_EXPORT StartupData {
public:
enum CompressionAlgorithm {
kUncompressed,
@@ -4096,7 +4157,7 @@ class V8EXPORT StartupData {
*
* For an example of the class usage, see the "shell.cc" sample application.
*/
-class V8EXPORT StartupDataDecompressor { // NOLINT
+class V8_EXPORT StartupDataDecompressor { // NOLINT
public:
StartupDataDecompressor();
virtual ~StartupDataDecompressor();
@@ -4240,7 +4301,7 @@ typedef void (*JitCodeEventHandler)(const JitCodeEvent* event);
/**
* Interface for iterating through all external resources in the heap.
*/
-class V8EXPORT ExternalResourceVisitor { // NOLINT
+class V8_EXPORT ExternalResourceVisitor { // NOLINT
public:
virtual ~ExternalResourceVisitor() {}
virtual void VisitExternalString(Handle<String> string) {}
@@ -4250,7 +4311,7 @@ class V8EXPORT ExternalResourceVisitor { // NOLINT
/**
* Interface for iterating through all the persistent handles in the heap.
*/
-class V8EXPORT PersistentHandleVisitor { // NOLINT
+class V8_EXPORT PersistentHandleVisitor { // NOLINT
public:
virtual ~PersistentHandleVisitor() {}
virtual void VisitPersistentHandle(Persistent<Value>* value,
@@ -4263,7 +4324,7 @@ class V8EXPORT PersistentHandleVisitor { // NOLINT
* to be modified. Useful when otherwise unsafe handle operations need to
* be performed.
*/
-class V8EXPORT AssertNoGCScope {
+class V8_EXPORT AssertNoGCScope {
#ifndef DEBUG
// TODO(yangguo): remove isolate argument.
V8_INLINE(AssertNoGCScope(Isolate* isolate)) { }
@@ -4279,7 +4340,7 @@ class V8EXPORT AssertNoGCScope {
/**
* Container class for static utility functions.
*/
-class V8EXPORT V8 {
+class V8_EXPORT V8 {
public:
/** Set the callback to invoke in case of fatal errors. */
static void SetFatalErrorHandler(FatalErrorCallback that);
@@ -4736,6 +4797,9 @@ class V8EXPORT V8 {
void* data,
RevivableCallback weak_reference_callback);
static void ClearWeak(internal::Object** global_handle);
+ static int Eternalize(internal::Isolate* isolate,
+ internal::Object** handle);
+ static internal::Object** GetEternal(internal::Isolate* isolate, int index);
template <class T> friend class Handle;
template <class T> friend class Local;
@@ -4747,7 +4811,7 @@ class V8EXPORT V8 {
/**
* An external exception handler.
*/
-class V8EXPORT TryCatch {
+class V8_EXPORT TryCatch {
public:
/**
* Creates a new try/catch block and registers it with v8. Note that
@@ -4880,7 +4944,7 @@ class V8EXPORT TryCatch {
/**
* Ignore
*/
-class V8EXPORT ExtensionConfiguration {
+class V8_EXPORT ExtensionConfiguration {
public:
ExtensionConfiguration(int name_count, const char* names[])
: name_count_(name_count), names_(names) { }
@@ -4895,7 +4959,7 @@ class V8EXPORT ExtensionConfiguration {
* A sandboxed execution context with its own set of built-in objects
* and functions.
*/
-class V8EXPORT Context {
+class V8_EXPORT Context {
public:
/**
* Returns the global proxy object or global object itself for
@@ -5180,7 +5244,7 @@ class V8EXPORT Context {
* // V8 Now no longer locked.
* \endcode
*/
-class V8EXPORT Unlocker {
+class V8_EXPORT Unlocker {
public:
/**
* Initialize Unlocker for a given Isolate.
@@ -5198,7 +5262,7 @@ class V8EXPORT Unlocker {
};
-class V8EXPORT Locker {
+class V8_EXPORT Locker {
public:
/**
* Initialize Locker for a given Isolate.
@@ -5259,7 +5323,7 @@ struct HeapStatsUpdate;
/**
* An interface for exporting data from V8, using "push" model.
*/
-class V8EXPORT OutputStream { // NOLINT
+class V8_EXPORT OutputStream { // NOLINT
public:
enum OutputEncoding {
kAscii = 0 // 7-bit ASCII.
@@ -5296,7 +5360,7 @@ class V8EXPORT OutputStream { // NOLINT
* An interface for reporting progress and controlling long-running
* activities.
*/
-class V8EXPORT ActivityControl { // NOLINT
+class V8_EXPORT ActivityControl { // NOLINT
public:
enum ControlOption {
kContinue = 0,
@@ -5419,12 +5483,13 @@ class Internals {
static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
- static const int kEmptyStringRootIndex = 134;
+ static const int kEmptyStringRootIndex = 133;
static const int kNodeClassIdOffset = 1 * kApiPointerSize;
static const int kNodeFlagsOffset = 1 * kApiPointerSize + 3;
static const int kNodeStateMask = 0xf;
static const int kNodeStateIsWeakValue = 2;
+ static const int kNodeStateIsPendingValue = 3;
static const int kNodeStateIsNearDeathValue = 4;
static const int kNodeIsIndependentShift = 4;
static const int kNodeIsPartiallyDependentShift = 5;
@@ -5598,6 +5663,21 @@ Local<T> Local<T>::New(Isolate* isolate, T* that) {
}
+template<class T>
+int Local<T>::Eternalize(Isolate* isolate) {
+ return V8::Eternalize(reinterpret_cast<internal::Isolate*>(isolate),
+ reinterpret_cast<internal::Object**>(this->val_));
+}
+
+
+template<class T>
+Local<T> Local<T>::GetEternal(Isolate* isolate, int index) {
+ internal::Object** handle =
+ V8::GetEternal(reinterpret_cast<internal::Isolate*>(isolate), index);
+ return Local<T>(T::Cast(reinterpret_cast<Value*>(handle)));
+}
+
+
#ifdef V8_USE_UNSAFE_HANDLES
template <class T>
Persistent<T> Persistent<T>::New(Handle<T> that) {
@@ -5640,8 +5720,10 @@ template <class T>
bool Persistent<T>::IsNearDeath() const {
typedef internal::Internals I;
if (this->IsEmpty()) return false;
- return I::GetNodeState(reinterpret_cast<internal::Object**>(this->val_)) ==
- I::kNodeStateIsNearDeathValue;
+ uint8_t node_state =
+ I::GetNodeState(reinterpret_cast<internal::Object**>(this->val_));
+ return node_state == I::kNodeStateIsNearDeathValue ||
+ node_state == I::kNodeStateIsPendingValue;
}
@@ -5996,6 +6078,10 @@ Handle<Integer> ScriptOrigin::ResourceColumnOffset() const {
return resource_column_offset_;
}
+Handle<Boolean> ScriptOrigin::ResourceIsSharedCrossOrigin() const {
+ return resource_is_shared_cross_origin_;
+}
+
Handle<Boolean> Boolean::New(bool value) {
return value ? True() : False();
@@ -6503,7 +6589,6 @@ void* Context::GetAlignedPointerFromEmbedderData(int index) {
} // namespace v8
-#undef V8EXPORT
#undef TYPE_CHECK
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index d587b81fd3..e04fbef23b 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -46,6 +46,7 @@
#include "heap-profiler.h"
#include "heap-snapshot-generator-inl.h"
#include "icu_util.h"
+#include "json-parser.h"
#include "messages.h"
#ifdef COMPRESS_STARTUP_DATA_BZ2
#include "natives.h"
@@ -398,7 +399,7 @@ enum CompressedStartupDataItems {
kSnapshotContext,
kLibraries,
kExperimentalLibraries,
-#if defined(ENABLE_I18N_SUPPORT)
+#if defined(V8_I18N_SUPPORT)
kI18NExtension,
#endif
kCompressedStartupDataCount
@@ -442,7 +443,7 @@ void V8::GetCompressedStartupData(StartupData* compressed_data) {
compressed_data[kExperimentalLibraries].raw_size =
i::ExperimentalNatives::GetRawScriptsSize();
-#if defined(ENABLE_I18N_SUPPORT)
+#if defined(V8_I18N_SUPPORT)
i::Vector<const ii:byte> i18n_extension_source =
i::I18NNatives::GetScriptsSource();
compressed_data[kI18NExtension].data =
@@ -482,7 +483,7 @@ void V8::SetDecompressedStartupData(StartupData* decompressed_data) {
decompressed_data[kExperimentalLibraries].raw_size);
i::ExperimentalNatives::SetRawScriptsSource(exp_libraries_source);
-#if defined(ENABLE_I18N_SUPPORT)
+#if defined(V8_I18N_SUPPORT)
ASSERT_EQ(i::I18NNatives::GetRawScriptsSize(),
decompressed_data[kI18NExtension].raw_size);
i::Vector<const char> i18n_extension_source(
@@ -675,6 +676,16 @@ void V8::DisposeGlobal(i::Object** obj) {
}
+int V8::Eternalize(i::Isolate* isolate, i::Object** handle) {
+ return isolate->eternal_handles()->Create(isolate, *handle);
+}
+
+
+i::Object** V8::GetEternal(i::Isolate* isolate, int index) {
+ return isolate->eternal_handles()->Get(index).location();
+}
+
+
// --- H a n d l e s ---
@@ -1918,6 +1929,7 @@ Local<Script> Script::New(v8::Handle<String> source,
i::Handle<i::Object> name_obj;
int line_offset = 0;
int column_offset = 0;
+ bool is_shared_cross_origin = false;
if (origin != NULL) {
if (!origin->ResourceName().IsEmpty()) {
name_obj = Utils::OpenHandle(*origin->ResourceName());
@@ -1929,6 +1941,10 @@ Local<Script> Script::New(v8::Handle<String> source,
column_offset =
static_cast<int>(origin->ResourceColumnOffset()->Value());
}
+ if (!origin->ResourceIsSharedCrossOrigin().IsEmpty()) {
+ is_shared_cross_origin =
+ origin->ResourceIsSharedCrossOrigin() == v8::True();
+ }
}
EXCEPTION_PREAMBLE(isolate);
i::ScriptDataImpl* pre_data_impl =
@@ -1945,6 +1961,7 @@ Local<Script> Script::New(v8::Handle<String> source,
name_obj,
line_offset,
column_offset,
+ is_shared_cross_origin,
isolate->global_context(),
NULL,
pre_data_impl,
@@ -2412,6 +2429,20 @@ int Message::GetEndColumn() const {
}
+bool Message::IsSharedCrossOrigin() const {
+ i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ if (IsDeadCheck(isolate, "v8::Message::IsSharedCrossOrigin()")) return 0;
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ i::Handle<i::JSMessageObject> message =
+ i::Handle<i::JSMessageObject>::cast(Utils::OpenHandle(this));
+ i::Handle<i::JSValue> script =
+ i::Handle<i::JSValue>::cast(i::Handle<i::Object>(message->script(),
+ isolate));
+ return i::Script::cast(script->value())->is_shared_cross_origin();
+}
+
+
Local<String> Message::GetSourceLine() const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Message::GetSourceLine()", return Local<String>());
@@ -2587,6 +2618,29 @@ bool StackFrame::IsConstructor() const {
}
+// --- J S O N ---
+
+Local<Object> JSON::Parse(Local<String> json_string) {
+ i::Isolate* isolate = i::Isolate::Current();
+ EnsureInitializedForIsolate(isolate, "v8::JSON::Parse");
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ i::Handle<i::String> source = i::Handle<i::String>(
+ FlattenGetString(Utils::OpenHandle(*json_string)));
+ EXCEPTION_PREAMBLE(isolate);
+ i::Handle<i::Object> result;
+ if (source->IsSeqOneByteString()) {
+ result = i::JsonParser<true>::Parse(source);
+ } else {
+ result = i::JsonParser<false>::Parse(source);
+ }
+ has_pending_exception = result.is_null();
+ EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>());
+ return Utils::ToLocal(
+ i::Handle<i::JSObject>::cast(scope.CloseAndEscape(result)));
+}
+
+
// --- D a t a ---
bool Value::FullIsUndefined() const {
@@ -3051,6 +3105,12 @@ void v8::ArrayBuffer::CheckCast(Value* that) {
}
+void v8::ArrayBuffer::Allocator::Free(void* data) {
+ API_Fatal("v8::ArrayBuffer::Allocator::Free",
+ "Override Allocator::Free(void*, size_t)");
+}
+
+
void v8::ArrayBufferView::CheckCast(Value* that) {
i::Handle<i::Object> obj = Utils::OpenHandle(that);
ApiCheck(obj->IsJSArrayBufferView(),
@@ -7541,6 +7601,18 @@ const CpuProfileNode* CpuProfile::GetSample(int index) const {
}
+int64_t CpuProfile::GetStartTime() const {
+ const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
+ return profile->start_time_us();
+}
+
+
+int64_t CpuProfile::GetEndTime() const {
+ const i::CpuProfile* profile = reinterpret_cast<const i::CpuProfile*>(this);
+ return profile->end_time_us();
+}
+
+
int CpuProfile::GetSamplesCount() const {
return reinterpret_cast<const i::CpuProfile*>(this)->samples_count();
}
diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc
index ba0dc4b81d..a9db5a5994 100644
--- a/deps/v8/src/arm/assembler-arm.cc
+++ b/deps/v8/src/arm/assembler-arm.cc
@@ -764,10 +764,13 @@ int Assembler::GetCmpImmediateRawImmediate(Instr instr) {
// Linked labels refer to unknown positions in the code
// to be generated; pos() is the position of the last
// instruction using the label.
-
-
-// The link chain is terminated by a negative code position (must be aligned)
-const int kEndOfChain = -4;
+//
+// The linked labels form a link chain by making the branch offset
+// in the instruction steam to point to the previous branch
+// instruction using the same label.
+//
+// The link chain is terminated by a branch offset pointing to the
+// same position.
int Assembler::target_at(int pos) {
@@ -790,7 +793,7 @@ int Assembler::target_at(int pos) {
void Assembler::target_at_put(int pos, int target_pos) {
Instr instr = instr_at(pos);
if ((instr & ~kImm24Mask) == 0) {
- ASSERT(target_pos == kEndOfChain || target_pos >= 0);
+ ASSERT(target_pos == pos || target_pos >= 0);
// Emitted label constant, not part of a branch.
// Make label relative to Code* of generated Code object.
instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag));
@@ -886,27 +889,6 @@ void Assembler::bind_to(Label* L, int pos) {
}
-void Assembler::link_to(Label* L, Label* appendix) {
- if (appendix->is_linked()) {
- if (L->is_linked()) {
- // Append appendix to L's list.
- int fixup_pos;
- int link = L->pos();
- do {
- fixup_pos = link;
- link = target_at(fixup_pos);
- } while (link > 0);
- ASSERT(link == kEndOfChain);
- target_at_put(fixup_pos, appendix->pos());
- } else {
- // L is empty, simply use appendix.
- *L = *appendix;
- }
- }
- appendix->Unuse(); // appendix should not be used anymore
-}
-
-
void Assembler::bind(Label* L) {
ASSERT(!L->is_bound()); // label can only be bound once
bind_to(L, pc_offset());
@@ -916,7 +898,9 @@ void Assembler::bind(Label* L) {
void Assembler::next(Label* L) {
ASSERT(L->is_linked());
int link = target_at(L->pos());
- if (link == kEndOfChain) {
+ if (link == L->pos()) {
+ // Branch target points to the same instuction. This is the end of the link
+ // chain.
L->Unuse();
} else {
ASSERT(link >= 0);
@@ -1229,9 +1213,11 @@ int Assembler::branch_offset(Label* L, bool jump_elimination_allowed) {
target_pos = L->pos();
} else {
if (L->is_linked()) {
- target_pos = L->pos(); // L's link
+ // Point to previous instruction that uses the link.
+ target_pos = L->pos();
} else {
- target_pos = kEndOfChain;
+ // First entry of the link chain points to itself.
+ target_pos = pc_offset();
}
L->link_to(pc_offset());
}
@@ -1245,17 +1231,16 @@ int Assembler::branch_offset(Label* L, bool jump_elimination_allowed) {
void Assembler::label_at_put(Label* L, int at_offset) {
int target_pos;
- if (L->is_bound()) {
+ ASSERT(!L->is_bound());
+ if (L->is_linked()) {
+ // Point to previous instruction that uses the link.
target_pos = L->pos();
} else {
- if (L->is_linked()) {
- target_pos = L->pos(); // L's link
- } else {
- target_pos = kEndOfChain;
- }
- L->link_to(at_offset);
- instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag));
+ // First entry of the link chain points to itself.
+ target_pos = at_offset;
}
+ L->link_to(at_offset);
+ instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag));
}
diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h
index 496eb3e880..f647848de5 100644
--- a/deps/v8/src/arm/assembler-arm.h
+++ b/deps/v8/src/arm/assembler-arm.h
@@ -1548,7 +1548,6 @@ class Assembler : public AssemblerBase {
// Labels
void print(Label* L);
void bind_to(Label* L, int pos);
- void link_to(Label* L, Label* appendix);
void next(Label* L);
enum UseConstantPoolMode {
diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc
index eff47e2692..5f3a999f56 100644
--- a/deps/v8/src/arm/builtins-arm.cc
+++ b/deps/v8/src/arm/builtins-arm.cc
@@ -119,9 +119,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
// Initial map for the builtin InternalArray functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ SmiTst(r2);
- __ Assert(ne, "Unexpected initial map for InternalArray function");
+ __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ Assert(eq, "Unexpected initial map for InternalArray function");
+ __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
}
// Run the native code for the InternalArray function called as a normal
@@ -147,9 +147,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Initial map for the builtin Array functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
__ SmiTst(r2);
- __ Assert(ne, "Unexpected initial map for Array function");
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ Assert(eq, "Unexpected initial map for Array function");
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
}
// Run the native code for the Array function called as a normal function.
@@ -178,7 +178,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
__ cmp(function, Operand(r2));
- __ Assert(eq, "Unexpected String function");
+ __ Assert(eq, kUnexpectedStringFunction);
}
// Load the first arguments in r0 and get rid of the rest.
@@ -224,10 +224,10 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
__ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
- __ Assert(eq, "Unexpected string wrapper instance size");
+ __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
__ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
__ cmp(r4, Operand::Zero());
- __ Assert(eq, "Unexpected unused properties of string wrapper");
+ __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
}
__ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
@@ -471,7 +471,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
__ cmp(r0, r6);
- __ Assert(le, "Unexpected number of pre-allocated property fields.");
+ __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
}
__ InitializeFieldsWithFiller(r5, r0, r7);
// To allow for truncation.
@@ -503,7 +503,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Done if no extra properties are to be allocated.
__ b(eq, &allocated);
- __ Assert(pl, "Property allocation count failed.");
+ __ Assert(pl, kPropertyAllocationCountFailed);
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -547,7 +547,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
} else if (FLAG_debug_code) {
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
__ cmp(r7, r8);
- __ Assert(eq, "Undefined value not loaded.");
+ __ Assert(eq, kUndefinedValueNotLoaded);
}
__ b(&entry);
__ bind(&loop);
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index ba98b96315..98a835fd1a 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -246,17 +246,6 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
}
-void UnaryOpStub::InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { r0 };
- descriptor->register_param_count_ = 1;
- descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(UnaryOpIC_Miss);
-}
-
-
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -520,9 +509,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
Label after_sentinel;
__ JumpIfNotSmi(r3, &after_sentinel);
if (FLAG_debug_code) {
- const char* message = "Expected 0 as a Smi sentinel";
__ cmp(r3, Operand::Zero());
- __ Assert(eq, message);
+ __ Assert(eq, kExpected0AsASmiSentinel);
}
__ ldr(r3, GlobalObjectOperand());
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
@@ -3917,9 +3905,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
__ SmiTst(regexp_data);
- __ Check(ne, "Unexpected type for RegExp data, FixedArray expected");
+ __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
__ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
- __ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
+ __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
}
// regexp_data: RegExp data (FixedArray)
@@ -4261,7 +4249,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ tst(r0, Operand(kIsIndirectStringMask));
- __ Assert(eq, "external string expected, but not found");
+ __ Assert(eq, kExternalStringExpectedButNotFound);
}
__ ldr(subject,
FieldMemOperand(subject, ExternalString::kResourceDataOffset));
@@ -4643,7 +4631,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -4688,7 +4676,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
@@ -4718,7 +4706,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -4728,7 +4716,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
@@ -4785,7 +4773,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
// Check that destination is actually word aligned if the flag says
// that it is.
__ tst(dest, Operand(kPointerAlignmentMask));
- __ Check(eq, "Destination of copy not aligned.");
+ __ Check(eq, kDestinationOfCopyNotAligned);
}
const int kReadAlignment = 4;
@@ -5014,7 +5002,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
if (FLAG_debug_code) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(ip, candidate);
- __ Assert(eq, "oddball in string table is not undefined or the hole");
+ __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole);
}
__ jmp(&next_probe[i]);
@@ -6912,7 +6900,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -6969,7 +6957,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7030,9 +7018,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ tst(r3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function");
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
- __ Assert(eq, "Unexpected initial map for Array function");
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in ebx or a valid cell
Label okay_here;
@@ -7041,7 +7029,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ b(eq, &okay_here);
__ ldr(r3, FieldMemOperand(r2, 0));
__ cmp(r3, Operand(cell_map));
- __ Assert(eq, "Expected property cell in register ebx");
+ __ Assert(eq, kExpectedPropertyCellInRegisterEbx);
__ bind(&okay_here);
}
@@ -7144,9 +7132,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ tst(r3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function");
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
- __ Assert(eq, "Unexpected initial map for Array function");
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
}
// Figure out the right elements kind
@@ -7163,7 +7151,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ b(eq, &done);
__ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
__ Assert(eq,
- "Invalid ElementsKind for InternalArray or InternalPackedArray");
+ kInvalidElementsKindForInternalArrayOrInternalPackedArray);
__ bind(&done);
}
diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc
index 7559373ee9..1bcf3e3a60 100644
--- a/deps/v8/src/arm/codegen-arm.cc
+++ b/deps/v8/src/arm/codegen-arm.cc
@@ -532,7 +532,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
__ SmiTag(r9);
__ orr(r9, r9, Operand(1));
__ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, "object found in smi-only array");
+ __ Assert(eq, kObjectFoundInSmiOnlyArray);
}
__ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
@@ -728,7 +728,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ tst(result, Operand(kIsIndirectStringMask));
- __ Assert(eq, "external string expected, but not found");
+ __ Assert(eq, kExternalStringExpectedButNotFound);
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc
index 7faea08034..108435f0a9 100644
--- a/deps/v8/src/arm/debug-arm.cc
+++ b/deps/v8/src/arm/debug-arm.cc
@@ -130,7 +130,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ tst(reg, Operand(0xc0000000));
- __ Assert(eq, "Unable to encode value as smi");
+ __ Assert(eq, kUnableToEncodeValueAsSmi);
}
__ SmiTag(reg);
}
@@ -313,12 +313,12 @@ void Debug::GenerateSlotDebugBreak(MacroAssembler* masm) {
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on arm");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on arm");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
}
const bool Debug::kFrameDropperSupported = false;
diff --git a/deps/v8/src/arm/frames-arm.h b/deps/v8/src/arm/frames-arm.h
index 19b29b8553..d022b414b4 100644
--- a/deps/v8/src/arm/frames-arm.h
+++ b/deps/v8/src/arm/frames-arm.h
@@ -171,6 +171,11 @@ inline Object* JavaScriptFrame::function_slot_object() const {
}
+inline void StackHandler::SetFp(Address slot, Address fp) {
+ Memory::Address_at(slot) = fp;
+}
+
+
} } // namespace v8::internal
#endif // V8_ARM_FRAMES_ARM_H_
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index ea7b73f2fe..b73006a17d 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -786,9 +786,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// Check that we're not inside a with or catch context.
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.");
+ __ Check(ne, kDeclarationInWithContext);
__ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.");
+ __ Check(ne, kDeclarationInCatchContext);
}
}
@@ -2512,7 +2512,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Check for an uninitialized let binding.
__ ldr(r2, location);
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
- __ Check(eq, "Let binding re-initialization.");
+ __ Check(eq, kLetBindingReInitialization);
}
// Perform the assignment.
__ str(r0, location);
@@ -3473,23 +3473,23 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
Register value,
uint32_t encoding_mask) {
__ SmiTst(index);
- __ Check(eq, "Non-smi index");
+ __ Check(eq, kNonSmiIndex);
__ SmiTst(value);
- __ Check(eq, "Non-smi value");
+ __ Check(eq, kNonSmiValue);
__ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
__ cmp(index, ip);
- __ Check(lt, "Index is too large");
+ __ Check(lt, kIndexIsTooLarge);
__ cmp(index, Operand(Smi::FromInt(0)));
- __ Check(ge, "Index is negative");
+ __ Check(ge, kIndexIsNegative);
__ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
__ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
__ cmp(ip, Operand(encoding_mask));
- __ Check(eq, "Unexpected string type");
+ __ Check(eq, kUnexpectedStringType);
}
@@ -3849,7 +3849,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort("Attempt to use undefined cache.");
+ __ Abort(kAttemptToUseUndefinedCache);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
context()->Plug(r0);
return;
@@ -4030,7 +4030,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// elements_end: Array end.
if (generate_debug_code_) {
__ cmp(array_length, Operand::Zero());
- __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
+ __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
}
__ bind(&loop);
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
@@ -4349,35 +4349,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB:
- EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
- break;
-
- case Token::BIT_NOT:
- EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
- break;
-
default:
UNREACHABLE();
}
}
-void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
- const char* comment) {
- // TODO(svenpanne): Allowing format strings in Comment would be nice here...
- Comment cmt(masm_, comment);
- UnaryOpStub stub(expr->op());
- // UnaryOpStub expects the argument to be in the
- // accumulator register r0.
- VisitForAccumulatorValue(expr->expression());
- SetSourcePosition(expr->position());
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->UnaryOperationFeedbackId());
- context()->Plug(r0);
-}
-
-
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc
index b55679ee01..43f0fd3293 100644
--- a/deps/v8/src/arm/lithium-arm.cc
+++ b/deps/v8/src/arm/lithium-arm.cc
@@ -272,24 +272,6 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
}
-ExternalReference LLinkObjectInList::GetReference(Isolate* isolate) {
- switch (hydrogen()->known_list()) {
- case HLinkObjectInList::ALLOCATION_SITE_LIST:
- return ExternalReference::allocation_sites_list_address(isolate);
- }
-
- UNREACHABLE();
- // Return a dummy value
- return ExternalReference::isolate_address(isolate);
-}
-
-
-void LLinkObjectInList::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add(" offset %d", hydrogen()->store_field().offset());
-}
-
-
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -455,7 +437,7 @@ LPlatformChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* reason) {
+void LChunkBuilder::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -663,7 +645,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort("Out of virtual registers while trying to allocate temp register.");
+ Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1343,15 +1325,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
}
-LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
- ASSERT(instr->value()->representation().IsInteger32());
- ASSERT(instr->representation().IsInteger32());
- if (instr->HasNoUses()) return NULL;
- LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LBitNotI(value));
-}
-
-
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1850,17 +1823,6 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
}
-LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
- return NULL;
-}
-
-
-LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
- HInductionVariableAnnotation* instr) {
- return NULL;
-}
-
-
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
@@ -2034,19 +1996,6 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
}
-LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LUnallocated* temp1 = NULL;
- LOperand* temp2 = NULL;
- if (!instr->CanOmitPrototypeChecks()) {
- temp1 = TempRegister();
- temp2 = TempRegister();
- }
- LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
- if (instr->CanOmitPrototypeChecks()) return result;
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -2055,10 +2004,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
@@ -2140,13 +2095,6 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoLinkObjectInList(HLinkObjectInList* instr) {
- LOperand* object = UseRegister(instr->value());
- LLinkObjectInList* result = new(zone()) LLinkObjectInList(object);
- return result;
-}
-
-
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
LInstruction* result =
@@ -2347,7 +2295,7 @@ LInstruction* LChunkBuilder::DoTrapAllocationMemento(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_in_object = instr->access().IsInobject();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ bool needs_write_barrier_for_map = instr->has_transition() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2418,12 +2366,6 @@ LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
}
-LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LStringLength(string));
-}
-
-
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* size = instr->size()->IsConstant()
@@ -2473,7 +2415,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort("Too many spill slots needed for OSR");
+ Abort(kTooManySpillSlotsNeededForOSR);
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h
index eecacec1a8..7ce907a7ab 100644
--- a/deps/v8/src/arm/lithium-arm.h
+++ b/deps/v8/src/arm/lithium-arm.h
@@ -50,7 +50,6 @@ class LCodeGen;
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
- V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -68,7 +67,6 @@ class LCodeGen;
V(CheckNonSmi) \
V(CheckMaps) \
V(CheckMapValue) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -119,7 +117,6 @@ class LCodeGen;
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -176,7 +173,6 @@ class LCodeGen;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(SubI) \
V(RSubI) \
V(TaggedToI) \
@@ -1380,18 +1376,6 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
};
-class LBitNotI: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LBitNotI(LOperand* value) {
- inputs_[0] = value;
- }
-
- LOperand* value() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
-};
-
-
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -1676,23 +1660,6 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
};
-class LLinkObjectInList: public LTemplateInstruction<0, 1, 0> {
- public:
- explicit LLinkObjectInList(LOperand* object) {
- inputs_[0] = object;
- }
-
- LOperand* object() { return inputs_[0]; }
-
- ExternalReference GetReference(Isolate* isolate);
-
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList, "link-object-in-list")
- DECLARE_HYDROGEN_ACCESSOR(LinkObjectInList)
-
- virtual void PrintDataTo(StringStream* stream);
-};
-
-
class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
public:
explicit LLoadContextSlot(LOperand* context) {
@@ -2169,7 +2136,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition(); }
+ Handle<Map> transition() const { return hydrogen()->transition_map(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2332,19 +2299,6 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
};
-class LStringLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LStringLength(LOperand* string) {
- inputs_[0] = string;
- }
-
- LOperand* string() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
- DECLARE_HYDROGEN_ACCESSOR(StringLength)
-};
-
-
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -2384,26 +2338,6 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
- public:
- LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
- temps_[0] = temp;
- temps_[1] = temp2;
- }
-
- LOperand* temp() { return temps_[0]; }
- LOperand* temp2() { return temps_[1]; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
-
- ZoneList<Handle<JSObject> >* prototypes() const {
- return hydrogen()->prototypes();
- }
- ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
-};
-
-
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2702,7 +2636,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index 929d04de13..0b704d07ef 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* reason) {
+void LCodeGen::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -334,7 +334,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
// 32bit data after it.
if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 7)) {
- Abort("Generated code is too large");
+ Abort(kGeneratedCodeIsTooLarge);
}
if (deopt_jump_table_.length() > 0) {
@@ -423,7 +423,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
ASSERT(literal->IsNumber());
__ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
} else if (r.IsDouble()) {
- Abort("EmitLoadRegister: Unsupported double immediate.");
+ Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -461,9 +461,9 @@ DwVfpRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
__ vcvt_f64_s32(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort("unsupported double immediate");
+ Abort(kUnsupportedDoubleImmediate);
} else if (r.IsTagged()) {
- Abort("unsupported tagged immediate");
+ Abort(kUnsupportedTaggedImmediate);
}
} else if (op->IsStackSlot() || op->IsArgument()) {
// TODO(regis): Why is vldr not taking a MemOperand?
@@ -534,14 +534,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort("ToOperand Unsupported double immediate.");
+ Abort(kToOperandUnsupportedDoubleImmediate);
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort("ToOperand IsDoubleRegister unimplemented");
+ Abort(kToOperandIsDoubleRegisterUnimplemented);
return Operand::Zero();
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -772,7 +772,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort("bailout was not prepared");
+ Abort(kBailoutWasNotPrepared);
return;
}
@@ -1669,7 +1669,11 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ orr(result, left, right);
break;
case Token::BIT_XOR:
- __ eor(result, left, right);
+ if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
+ __ mvn(result, Operand(left));
+ } else {
+ __ eor(result, left, right);
+ }
break;
default:
UNREACHABLE();
@@ -1744,8 +1748,12 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
if (shift_count != 0) {
if (instr->hydrogen_value()->representation().IsSmi() &&
instr->can_deopt()) {
- __ mov(result, Operand(left, LSL, shift_count - 1));
- __ SmiTag(result, result, SetCC);
+ if (shift_count != 1) {
+ __ mov(result, Operand(left, LSL, shift_count - 1));
+ __ SmiTag(result, result, SetCC);
+ } else {
+ __ SmiTag(result, left, SetCC);
+ }
DeoptimizeIf(vs, instr->environment());
} else {
__ mov(result, Operand(left, LSL, shift_count));
@@ -1932,7 +1940,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, "Unexpected string type");
+ __ Check(eq, kUnexpectedStringType);
}
__ add(ip,
@@ -1949,13 +1957,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
-void LCodeGen::DoBitNotI(LBitNotI* instr) {
- Register input = ToRegister(instr->value());
- Register result = ToRegister(instr->result());
- __ mvn(result, Operand(input));
-}
-
-
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), ip);
__ push(input_reg);
@@ -2936,19 +2937,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
}
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
- Register object = ToRegister(instr->object());
- ExternalReference sites_list_address = instr->GetReference(isolate());
-
- __ mov(ip, Operand(sites_list_address));
- __ ldr(ip, MemOperand(ip));
- __ str(ip, FieldMemOperand(object,
- instr->hydrogen()->store_field().offset()));
- __ mov(ip, Operand(sites_list_address));
- __ str(object, MemOperand(ip));
-}
-
-
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -3209,7 +3197,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3293,7 +3281,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3554,7 +3542,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort("DoPushArgument not implemented for double type.");
+ Abort(kDoPushArgumentNotImplementedForDoubleType);
} else {
Register argument_reg = EmitLoadRegister(argument, ip);
__ push(argument_reg);
@@ -3774,7 +3762,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
DwVfpRegister input = ToDoubleRegister(instr->value());
DwVfpRegister result = ToDoubleRegister(instr->result());
__ vabs(result, input);
- } else if (r.IsInteger32()) {
+ } else if (r.IsSmiOrInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4328,7 +4316,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4401,7 +4389,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4424,7 +4412,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
if (masm()->emit_debug_code()) {
__ vmrs(ip);
__ tst(ip, Operand(kVFPDefaultNaNModeControlBit));
- __ Assert(ne, "Default NaN mode not set");
+ __ Assert(ne, kDefaultNaNModeNotSet);
}
__ VFPCanonicalizeNaN(value);
}
@@ -4654,13 +4642,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
}
-void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->string());
- Register result = ToRegister(instr->result());
- __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
-}
-
-
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
@@ -5230,33 +5211,67 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoCheckMapCommon(Register map_reg,
- Handle<Map> map,
- LEnvironment* env) {
- Label success;
- __ CompareMap(map_reg, map, &success);
- DeoptimizeIf(ne, env);
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
+ {
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ StoreToSafepointRegisterSlot(r0, scratch0());
+ }
+ __ tst(scratch0(), Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(map_reg, map, &success);
__ b(eq, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(map_reg, map, instr->environment());
+ __ CompareMap(map_reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ b(ne, deferred->entry());
+ } else {
+ DeoptimizeIf(ne, instr->environment());
+ }
+
__ bind(&success);
}
@@ -5311,25 +5326,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
-
- Register prototype_reg = ToRegister(instr->temp());
- Register map_reg = ToRegister(instr->temp2());
-
- ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
- ZoneList<Handle<Map> >* maps = instr->maps();
-
- ASSERT(prototypes->length() == maps->length());
-
- for (int i = 0; i < prototypes->length(); i++) {
- __ LoadHeapObject(prototype_reg, prototypes->at(i));
- __ ldr(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
- DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
- }
-}
-
-
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -5353,10 +5349,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
}
@@ -5415,10 +5413,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
__ Push(Smi::FromInt(size));
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
} else {
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h
index 21f792153b..143109c92d 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.h
+++ b/deps/v8/src/arm/lithium-codegen-arm.h
@@ -115,7 +115,7 @@ class LCodeGen BASE_EMBEDDED {
DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
SwVfpRegister flt_scratch,
DwVfpRegister dbl_scratch);
- int ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -154,8 +154,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -214,7 +213,7 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
diff --git a/deps/v8/src/arm/lithium-gap-resolver-arm.cc b/deps/v8/src/arm/lithium-gap-resolver-arm.cc
index 7a3c96892c..88ac7a2a21 100644
--- a/deps/v8/src/arm/lithium-gap-resolver-arm.cc
+++ b/deps/v8/src/arm/lithium-gap-resolver-arm.cc
@@ -254,7 +254,7 @@ void LGapResolver::EmitMove(int index) {
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
- } else if (source->IsDoubleRegister()) {
+ } else if (destination->IsDoubleRegister()) {
DwVfpRegister result = cgen_->ToDoubleRegister(destination);
double v = cgen_->ToDouble(constant_source);
__ Vmov(result, v, ip);
diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc
index cd124610f9..b9728ed04f 100644
--- a/deps/v8/src/arm/macro-assembler-arm.cc
+++ b/deps/v8/src/arm/macro-assembler-arm.cc
@@ -489,7 +489,7 @@ void MacroAssembler::RecordWrite(Register object,
if (emit_debug_code()) {
ldr(ip, MemOperand(address));
cmp(ip, value);
- Check(eq, "Wrong address or value passed to RecordWrite");
+ Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
Label done;
@@ -1490,7 +1490,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// In debug mode, make sure the lexical context is set.
#ifdef DEBUG
cmp(scratch, Operand::Zero());
- Check(ne, "we should not have an empty lexical context");
+ Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
#endif
// Load the native context of the current context.
@@ -1508,7 +1508,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, "JSGlobalObject::native_context should be a native context.");
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
pop(holder_reg); // Restore holder.
}
@@ -1525,12 +1525,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
mov(holder_reg, ip); // Move ip to its holding place.
LoadRoot(ip, Heap::kNullValueRootIndex);
cmp(holder_reg, ip);
- Check(ne, "JSGlobalProxy::context() should not be null.");
+ Check(ne, kJSGlobalProxyContextShouldNotBeNull);
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
cmp(holder_reg, ip);
- Check(eq, "JSGlobalObject::native_context should be a native context.");
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
// Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder.
// Restore ip to holder's context.
@@ -1727,7 +1727,7 @@ void MacroAssembler::Allocate(int object_size,
// respect to register content between debug and release mode.
ldr(ip, MemOperand(topaddr));
cmp(result, ip);
- Check(eq, "Unexpected allocation top");
+ Check(eq, kUnexpectedAllocationTop);
}
// Load allocation limit into ip. Result already contains allocation top.
ldr(ip, MemOperand(topaddr, limit - top));
@@ -1825,7 +1825,7 @@ void MacroAssembler::Allocate(Register object_size,
// respect to register content between debug and release mode.
ldr(ip, MemOperand(topaddr));
cmp(result, ip);
- Check(eq, "Unexpected allocation top");
+ Check(eq, kUnexpectedAllocationTop);
}
// Load allocation limit into ip. Result already contains allocation top.
ldr(ip, MemOperand(topaddr, limit - top));
@@ -1859,7 +1859,7 @@ void MacroAssembler::Allocate(Register object_size,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
tst(scratch2, Operand(kObjectAlignmentMask));
- Check(eq, "Unaligned allocation in new space");
+ Check(eq, kUnalignedAllocationInNewSpace);
}
str(scratch2, MemOperand(topaddr));
@@ -1882,7 +1882,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
mov(scratch, Operand(new_space_allocation_top));
ldr(scratch, MemOperand(scratch));
cmp(object, scratch);
- Check(lt, "Undo allocation of non allocated memory");
+ Check(lt, kUndoAllocationOfNonAllocatedMemory);
#endif
// Write the address of the object to un-allocate as the current top.
mov(scratch, Operand(new_space_allocation_top));
@@ -2131,7 +2131,7 @@ void MacroAssembler::StoreNumberToDoubleElements(
if (emit_debug_code()) {
vmrs(ip);
tst(ip, Operand(kVFPDefaultNaNModeControlBit));
- Assert(ne, "Default NaN mode not set");
+ Assert(ne, kDefaultNaNModeNotSet);
}
VFPCanonicalizeNaN(double_scratch);
b(&store);
@@ -2381,7 +2381,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
if (emit_debug_code()) {
ldr(r1, MemOperand(r7, kLevelOffset));
cmp(r1, r6);
- Check(eq, "Unexpected level after return from api call");
+ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
}
sub(r6, r6, Operand(1));
str(r6, MemOperand(r7, kLevelOffset));
@@ -2782,9 +2782,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
}
-void MacroAssembler::Assert(Condition cond, const char* msg) {
+void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
if (emit_debug_code())
- Check(cond, msg);
+ Check(cond, reason);
}
@@ -2803,23 +2803,23 @@ void MacroAssembler::AssertFastElements(Register elements) {
LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
cmp(elements, ip);
b(eq, &ok);
- Abort("JSObject with fast elements map has slow elements");
+ Abort(kJSObjectWithFastElementsMapHasSlowElements);
bind(&ok);
pop(elements);
}
}
-void MacroAssembler::Check(Condition cond, const char* msg) {
+void MacroAssembler::Check(Condition cond, BailoutReason reason) {
Label L;
b(cond, &L);
- Abort(msg);
+ Abort(reason);
// will not return here
bind(&L);
}
-void MacroAssembler::Abort(const char* msg) {
+void MacroAssembler::Abort(BailoutReason reason) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
@@ -2827,6 +2827,7 @@ void MacroAssembler::Abort(const char* msg) {
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
+ const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -2969,7 +2970,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
b(&ok);
bind(&fail);
- Abort("Global functions must have initial map");
+ Abort(kGlobalFunctionsMustHaveInitialMap);
bind(&ok);
}
}
@@ -3038,7 +3039,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi");
+ Check(ne, kOperandIsASmi);
}
}
@@ -3047,7 +3048,7 @@ void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(eq, "Operand is not smi");
+ Check(eq, kOperandIsNotSmi);
}
}
@@ -3056,12 +3057,12 @@ void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a string");
+ Check(ne, kOperandIsASmiAndNotAString);
push(object);
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(lo, "Operand is not a string");
+ Check(lo, kOperandIsNotAString);
}
}
@@ -3070,12 +3071,12 @@ void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a name");
+ Check(ne, kOperandIsASmiAndNotAName);
push(object);
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
CompareInstanceType(object, object, LAST_NAME_TYPE);
pop(object);
- Check(le, "Operand is not a name");
+ Check(le, kOperandIsNotAName);
}
}
@@ -3084,7 +3085,7 @@ void MacroAssembler::AssertName(Register object) {
void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
if (emit_debug_code()) {
CompareRoot(reg, index);
- Check(eq, "HeapNumberMap register clobbered.");
+ Check(eq, kHeapNumberMapRegisterClobbered);
}
}
@@ -3230,7 +3231,7 @@ void MacroAssembler::CopyBytes(Register src,
bind(&word_loop);
if (emit_debug_code()) {
tst(src, Operand(kPointerSize - 1));
- Assert(eq, "Expecting alignment for CopyBytes");
+ Assert(eq, kExpectingAlignmentForCopyBytes);
}
cmp(length, Operand(kPointerSize));
b(lt, &byte_loop);
@@ -3494,7 +3495,7 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
// Check that the instruction is a ldr reg, [pc + offset] .
and_(result, result, Operand(kLdrPCPattern));
cmp(result, Operand(kLdrPCPattern));
- Check(eq, "The instruction to patch should be a load from pc.");
+ Check(eq, kTheInstructionToPatchShouldBeALoadFromPc);
// Result was clobbered. Restore it.
ldr(result, MemOperand(ldr_location));
}
diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h
index 38308e5cde..8b9fa2b221 100644
--- a/deps/v8/src/arm/macro-assembler-arm.h
+++ b/deps/v8/src/arm/macro-assembler-arm.h
@@ -144,6 +144,8 @@ class MacroAssembler: public Assembler {
Condition cond = al);
void Call(Label* target);
+ void Push(Register src) { push(src); }
+ void Pop(Register dst) { pop(dst); }
// Register move. May do nothing if the registers are identical.
void Move(Register dst, Handle<Object> value);
@@ -1136,14 +1138,14 @@ class MacroAssembler: public Assembler {
// Calls Abort(msg) if the condition cond is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cond, const char* msg);
+ void Assert(Condition cond, BailoutReason reason);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cond, const char* msg);
+ void Check(Condition cond, BailoutReason reason);
// Print a message to stdout and abort execution.
- void Abort(const char* msg);
+ void Abort(BailoutReason msg);
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
diff --git a/deps/v8/src/array-iterator.js b/deps/v8/src/array-iterator.js
index 8f1ab47b8a..defd7342ab 100644
--- a/deps/v8/src/array-iterator.js
+++ b/deps/v8/src/array-iterator.js
@@ -77,16 +77,15 @@ function ArrayIteratorNext() {
return CreateIteratorResultObject(void 0, true);
}
- var elementKey = ToString(index);
iterator[arrayIteratorNextIndexSymbol] = index + 1;
if (itemKind == ARRAY_ITERATOR_KIND_VALUES)
- return CreateIteratorResultObject(array[elementKey], false);
+ return CreateIteratorResultObject(array[index], false);
if (itemKind == ARRAY_ITERATOR_KIND_ENTRIES)
- return CreateIteratorResultObject([elementKey, array[elementKey]], false);
+ return CreateIteratorResultObject([index, array[index]], false);
- return CreateIteratorResultObject(elementKey, false);
+ return CreateIteratorResultObject(index, false);
}
function ArrayEntries() {
diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc
index e0bca67aab..2077f87d74 100644
--- a/deps/v8/src/ast.cc
+++ b/deps/v8/src/ast.cc
@@ -304,17 +304,6 @@ void UnaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
}
-bool UnaryOperation::ResultOverwriteAllowed() {
- switch (op_) {
- case Token::BIT_NOT:
- case Token::SUB:
- return true;
- default:
- return false;
- }
-}
-
-
void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
// TODO(olivf) If this Operation is used in a test context, then the right
// hand side has a ToBoolean stub and we want to collect the type information.
diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h
index f14156f93c..0812472d0e 100644
--- a/deps/v8/src/ast.h
+++ b/deps/v8/src/ast.h
@@ -291,7 +291,6 @@ class SmallMapList {
}
void Add(Handle<Map> handle, Zone* zone) {
- ASSERT(!handle->is_deprecated());
list_.Add(handle.location(), zone);
}
@@ -1827,8 +1826,6 @@ class UnaryOperation: public Expression {
public:
DECLARE_NODE_TYPE(UnaryOperation)
- virtual bool ResultOverwriteAllowed();
-
Token::Value op() const { return op_; }
Expression* expression() const { return expression_; }
virtual int position() const { return pos_; }
@@ -1836,8 +1833,6 @@ class UnaryOperation: public Expression {
BailoutId MaterializeTrueId() { return materialize_true_id_; }
BailoutId MaterializeFalseId() { return materialize_false_id_; }
- TypeFeedbackId UnaryOperationFeedbackId() const { return reuse(id()); }
-
virtual void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle);
protected:
diff --git a/deps/v8/src/atomicops_internals_tsan.h b/deps/v8/src/atomicops_internals_tsan.h
index e52c26c2fe..b5162bad9f 100644
--- a/deps/v8/src/atomicops_internals_tsan.h
+++ b/deps/v8/src/atomicops_internals_tsan.h
@@ -32,6 +32,12 @@
#ifndef V8_ATOMICOPS_INTERNALS_TSAN_H_
#define V8_ATOMICOPS_INTERNALS_TSAN_H_
+namespace v8 {
+namespace internal {
+
+#ifndef TSAN_INTERFACE_ATOMIC_H
+#define TSAN_INTERFACE_ATOMIC_H
+
// This struct is not part of the public API of this module; clients may not
// use it. (However, it's exported via BASE_EXPORT because clients implicitly
// do use it at link time by inlining these functions.)
@@ -47,12 +53,6 @@ extern struct AtomicOps_x86CPUFeatureStruct
#define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
-namespace v8 {
-namespace internal {
-
-#ifndef TSAN_INTERFACE_ATOMIC_H
-#define TSAN_INTERFACE_ATOMIC_H
-
#ifdef __cplusplus
extern "C" {
#endif
diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc
index dda5fe42a5..2a385aa486 100644
--- a/deps/v8/src/bootstrapper.cc
+++ b/deps/v8/src/bootstrapper.cc
@@ -1525,6 +1525,7 @@ bool Genesis::CompileScriptCached(Isolate* isolate,
script_name,
0,
0,
+ false,
top_context,
extension,
NULL,
@@ -2079,6 +2080,16 @@ bool Genesis::InstallExperimentalNatives() {
"native array-iterator.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
+ if (FLAG_harmony_strings &&
+ strcmp(ExperimentalNatives::GetScriptName(i).start(),
+ "native harmony-string.js") == 0) {
+ if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+ }
+ if (FLAG_harmony_arrays &&
+ strcmp(ExperimentalNatives::GetScriptName(i).start(),
+ "native harmony-array.js") == 0) {
+ if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+ }
}
InstallExperimentalNativeFunctions();
diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h
index 73a2e96459..bb36c0251d 100644
--- a/deps/v8/src/builtins.h
+++ b/deps/v8/src/builtins.h
@@ -259,8 +259,6 @@ enum BuiltinExtraArguments {
V(BIT_OR, 1) \
V(BIT_AND, 1) \
V(BIT_XOR, 1) \
- V(UNARY_MINUS, 0) \
- V(BIT_NOT, 0) \
V(SHL, 1) \
V(SAR, 1) \
V(SHR, 1) \
diff --git a/deps/v8/src/checks.h b/deps/v8/src/checks.h
index 1feecf3a92..b309e2c42c 100644
--- a/deps/v8/src/checks.h
+++ b/deps/v8/src/checks.h
@@ -30,8 +30,6 @@
#include <string.h>
-#include "globals.h"
-
#include "../include/v8stdint.h"
extern "C" void V8_Fatal(const char* file, int line, const char* format, ...);
@@ -234,7 +232,7 @@ inline void CheckNonEqualsHelper(const char* file,
// Use C++11 static_assert if possible, which gives error
// messages that are easier to understand on first sight.
-#if V8_CXX_STATIC_ASSERT
+#if __cplusplus >= 201103L
#define STATIC_CHECK(test) static_assert(test, #test)
#else
// This is inspired by the static assertion facility in boost. This
@@ -256,7 +254,7 @@ template <int> class StaticAssertionHelper { };
typedef \
StaticAssertionHelper<sizeof(StaticAssertion<static_cast<bool>((test))>)> \
SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__)
-#endif // V8_CXX_STATIC_ASSERT
+#endif
extern bool FLAG_enable_slow_asserts;
diff --git a/deps/v8/src/code-stubs-hydrogen.cc b/deps/v8/src/code-stubs-hydrogen.cc
index 651ce0a0e8..852f7b5697 100644
--- a/deps/v8/src/code-stubs-hydrogen.cc
+++ b/deps/v8/src/code-stubs-hydrogen.cc
@@ -41,13 +41,13 @@ static LChunk* OptimizeGraph(HGraph* graph) {
DisallowHandleDereference no_deref;
ASSERT(graph != NULL);
- SmartArrayPointer<char> bailout_reason;
+ BailoutReason bailout_reason = kNoReason;
if (!graph->Optimize(&bailout_reason)) {
- FATAL(bailout_reason.is_empty() ? "unknown" : *bailout_reason);
+ FATAL(GetBailoutReason(bailout_reason));
}
LChunk* chunk = LChunk::NewChunk(graph);
if (chunk == NULL) {
- FATAL(graph->info()->bailout_reason());
+ FATAL(GetBailoutReason(graph->info()->bailout_reason()));
}
return chunk;
}
@@ -136,7 +136,6 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
isolate()->GetHTracer()->TraceCompilation(&info_);
}
- Zone* zone = this->zone();
int param_count = descriptor_->register_param_count_;
HEnvironment* start_environment = graph()->start_environment();
HBasicBlock* next_block = CreateBasicBlock(start_environment);
@@ -144,15 +143,13 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
next_block->SetJoinId(BailoutId::StubEntry());
set_current_block(next_block);
- HConstant* undefined_constant = new(zone) HConstant(
- isolate()->factory()->undefined_value());
- AddInstruction(undefined_constant);
+ HConstant* undefined_constant =
+ Add<HConstant>(isolate()->factory()->undefined_value());
graph()->set_undefined_constant(undefined_constant);
for (int i = 0; i < param_count; ++i) {
HParameter* param =
- new(zone) HParameter(i, HParameter::REGISTER_PARAMETER);
- AddInstruction(param);
+ Add<HParameter>(i, HParameter::REGISTER_PARAMETER);
start_environment->Bind(i, param);
parameters_[i] = param;
}
@@ -160,9 +157,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
HInstruction* stack_parameter_count;
if (descriptor_->stack_parameter_count_ != NULL) {
ASSERT(descriptor_->environment_length() == (param_count + 1));
- stack_parameter_count = new(zone) HParameter(param_count,
- HParameter::REGISTER_PARAMETER,
- Representation::Integer32());
+ stack_parameter_count = New<HParameter>(param_count,
+ HParameter::REGISTER_PARAMETER,
+ Representation::Integer32());
stack_parameter_count->set_type(HType::Smi());
// It's essential to bind this value to the environment in case of deopt.
AddInstruction(stack_parameter_count);
@@ -174,7 +171,7 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
arguments_length_ = graph()->GetConstant0();
}
- context_ = new(zone) HContext();
+ context_ = New<HContext>();
AddInstruction(context_);
start_environment->BindContext(context_);
@@ -191,20 +188,18 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
if (!stack_parameter_count->IsConstant() &&
descriptor_->hint_stack_parameter_count_ < 0) {
HInstruction* amount = graph()->GetConstant1();
- stack_pop_count = AddInstruction(
- HAdd::New(zone, context_, stack_parameter_count, amount));
+ stack_pop_count = Add<HAdd>(stack_parameter_count, amount);
stack_pop_count->ChangeRepresentation(Representation::Integer32());
stack_pop_count->ClearFlag(HValue::kCanOverflow);
} else {
int count = descriptor_->hint_stack_parameter_count_;
- stack_pop_count = AddInstruction(new(zone) HConstant(count));
+ stack_pop_count = Add<HConstant>(count);
}
}
if (current_block() != NULL) {
- HReturn* hreturn_instruction = new(zone) HReturn(return_value,
- context_,
- stack_pop_count);
+ HReturn* hreturn_instruction = New<HReturn>(return_value,
+ stack_pop_count);
current_block()->Finish(hreturn_instruction);
set_current_block(NULL);
}
@@ -322,9 +317,9 @@ HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
if_number.Else();
// Convert the parameter to number using the builtin.
- HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER, context());
+ HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
Add<HPushArgument>(value);
- Push(Add<HInvokeFunction>(context(), function, 1));
+ Push(Add<HInvokeFunction>(function, 1));
if_number.End();
@@ -339,32 +334,30 @@ Handle<Code> ToNumberStub::GenerateCode() {
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
- Zone* zone = this->zone();
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
int length = casted_stub()->length();
- HInstruction* allocation_site =
- AddInstruction(new(zone) HLoadKeyed(GetParameter(0),
- GetParameter(1),
- NULL,
- FAST_ELEMENTS));
+ HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
+ GetParameter(1),
+ static_cast<HValue*>(NULL),
+ FAST_ELEMENTS);
IfBuilder checker(this);
- checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, undefined);
+ checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
+ undefined);
checker.Then();
HObjectAccess access = HObjectAccess::ForAllocationSiteTransitionInfo();
- HInstruction* boilerplate = AddLoad(allocation_site, access);
+ HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access);
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements = AddLoadElements(boilerplate);
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
- environment()->Push(BuildCloneShallowArray(context(),
- boilerplate,
+ environment()->Push(BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
@@ -374,23 +367,20 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
- environment()->Push(BuildCloneShallowArray(context(),
- boilerplate,
+ environment()->Push(BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
length));
if_fixed.Else();
- environment()->Push(BuildCloneShallowArray(context(),
- boilerplate,
+ environment()->Push(BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS,
length));
} else {
ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
- environment()->Push(BuildCloneShallowArray(context(),
- boilerplate,
+ environment()->Push(BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
elements_kind,
@@ -414,38 +404,33 @@ HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
Zone* zone = this->zone();
HValue* undefined = graph()->GetConstantUndefined();
- HInstruction* boilerplate =
- AddInstruction(new(zone) HLoadKeyed(GetParameter(0),
- GetParameter(1),
- NULL,
- FAST_ELEMENTS));
+ HInstruction* boilerplate = Add<HLoadKeyed>(GetParameter(0),
+ GetParameter(1),
+ static_cast<HValue*>(NULL),
+ FAST_ELEMENTS);
IfBuilder checker(this);
- checker.IfNot<HCompareObjectEqAndBranch, HValue*>(boilerplate, undefined);
+ checker.IfNot<HCompareObjectEqAndBranch, HValue*>(boilerplate,
+ undefined);
checker.And();
int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
HValue* boilerplate_size =
AddInstruction(new(zone) HInstanceSize(boilerplate));
- HValue* size_in_words =
- AddInstruction(new(zone) HConstant(size >> kPointerSizeLog2));
+ HValue* size_in_words = Add<HConstant>(size >> kPointerSizeLog2);
checker.If<HCompareNumericAndBranch>(boilerplate_size,
size_in_words, Token::EQ);
checker.Then();
- HValue* size_in_bytes = AddInstruction(new(zone) HConstant(size));
- HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
- if (isolate()->heap()->ShouldGloballyPretenure()) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
- }
+ HValue* size_in_bytes = Add<HConstant>(size);
- HInstruction* object = AddInstruction(new(zone)
- HAllocate(context(), size_in_bytes, HType::JSObject(), flags));
+ HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
+ isolate()->heap()->GetPretenureMode(), JS_OBJECT_TYPE);
for (int i = 0; i < size; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForJSObjectOffset(i);
- AddStore(object, access, AddLoad(boilerplate, access));
+ Add<HStoreNamedField>(object, access,
+ Add<HLoadNamedField>(boilerplate, access));
}
environment()->Push(object);
@@ -463,14 +448,9 @@ Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
template <>
HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
- Zone* zone = this->zone();
-
- HValue* size = AddInstruction(new(zone) HConstant(AllocationSite::kSize));
- HAllocate::Flags flags = HAllocate::DefaultFlags();
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
- HInstruction* object = AddInstruction(new(zone)
- HAllocate(context(), size, HType::JSObject(), flags));
+ HValue* size = Add<HConstant>(AllocationSite::kSize);
+ HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
+ JS_OBJECT_TYPE);
// Store the map
Handle<Map> allocation_site_map(isolate()->heap()->allocation_site_map(),
@@ -478,14 +458,22 @@ HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
AddStoreMapConstant(object, allocation_site_map);
// Store the payload (smi elements kind)
- HValue* initial_elements_kind = AddInstruction(new(zone) HConstant(
- GetInitialFastElementsKind()));
+ HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
Add<HStoreNamedField>(object,
HObjectAccess::ForAllocationSiteTransitionInfo(),
initial_elements_kind);
- Add<HLinkObjectInList>(object, HObjectAccess::ForAllocationSiteWeakNext(),
- HLinkObjectInList::ALLOCATION_SITE_LIST);
+ // Link the object to the allocation site list
+ HValue* site_list = Add<HConstant>(
+ ExternalReference::allocation_sites_list_address(isolate()));
+ HValue* site = Add<HLoadNamedField>(site_list,
+ HObjectAccess::ForAllocationSiteList());
+ HStoreNamedField* store =
+ Add<HStoreNamedField>(object, HObjectAccess::ForAllocationSiteWeakNext(),
+ site);
+ store->SkipWriteBarrier();
+ Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
+ object);
// We use a hammer (SkipWriteBarrier()) to indicate that we know the input
// cell is really a Cell, and so no write barrier is needed.
@@ -493,7 +481,7 @@ HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
// a cell. (perhaps with a new instruction, HAssert).
HInstruction* cell = GetParameter(0);
HObjectAccess access = HObjectAccess::ForCellValue();
- HStoreNamedField* store = AddStore(cell, access, object);
+ store = Add<HStoreNamedField>(cell, access, object);
store->SkipWriteBarrier();
return cell;
}
@@ -590,14 +578,14 @@ HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
ArgumentClass argument_class) {
HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
if (context_mode == CONTEXT_CHECK_REQUIRED) {
- HInstruction* array_function = BuildGetArrayFunction(context());
+ HInstruction* array_function = BuildGetArrayFunction();
ArrayContextChecker checker(this, constructor, array_function);
}
HValue* property_cell = GetParameter(ArrayConstructorStubBase::kPropertyCell);
// Walk through the property cell to the AllocationSite
- HValue* alloc_site = AddInstruction(new(zone()) HLoadNamedField(property_cell,
- HObjectAccess::ForCellValue()));
+ HValue* alloc_site = Add<HLoadNamedField>(property_cell,
+ HObjectAccess::ForCellValue());
JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
override_mode);
HValue* result = NULL;
@@ -645,19 +633,17 @@ HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
HValue* constant_one = graph()->GetConstant1();
HValue* constant_zero = graph()->GetConstant0();
- HInstruction* elements = AddInstruction(
- new(zone()) HArgumentsElements(false));
+ HInstruction* elements = Add<HArgumentsElements>(false);
HInstruction* argument = AddInstruction(
new(zone()) HAccessArgumentsAt(elements, constant_one, constant_zero));
HConstant* max_alloc_length =
- new(zone()) HConstant(JSObject::kInitialMaxFastElementArray);
- AddInstruction(max_alloc_length);
+ Add<HConstant>(JSObject::kInitialMaxFastElementArray);
const int initial_capacity = JSArray::kPreallocatedArrayElements;
- HConstant* initial_capacity_node = new(zone()) HConstant(initial_capacity);
+ HConstant* initial_capacity_node = New<HConstant>(initial_capacity);
AddInstruction(initial_capacity_node);
- HBoundsCheck* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
+ HInstruction* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
IfBuilder if_builder(this);
if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
Token::EQ);
@@ -697,12 +683,11 @@ HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
LoopBuilder::kPostIncrement);
HValue* start = graph()->GetConstant0();
HValue* key = builder.BeginBody(start, length, Token::LT);
- HInstruction* argument_elements = AddInstruction(
- new(zone()) HArgumentsElements(false));
+ HInstruction* argument_elements = Add<HArgumentsElements>(false);
HInstruction* argument = AddInstruction(new(zone()) HAccessArgumentsAt(
argument_elements, length, key));
- AddInstruction(new(zone()) HStoreKeyed(elements, key, argument, kind));
+ Add<HStoreKeyed>(elements, key, argument, kind);
builder.EndBody();
return new_object;
}
@@ -817,45 +802,6 @@ Handle<Code> CompareNilICStub::GenerateCode() {
template <>
-HValue* CodeStubGraphBuilder<UnaryOpStub>::BuildCodeInitializedStub() {
- UnaryOpStub* stub = casted_stub();
- Handle<Type> type = stub->GetType(graph()->isolate());
- HValue* input = GetParameter(0);
-
- // Prevent unwanted HChange being inserted to ensure that the stub
- // deopts on newly encountered types.
- if (!type->Maybe(Type::Double())) {
- input = AddInstruction(new(zone())
- HForceRepresentation(input, Representation::Smi()));
- }
-
- if (!type->Is(Type::Number())) {
- // If we expect to see other things than Numbers, we will create a generic
- // stub, which handles all numbers and calls into the runtime for the rest.
- IfBuilder if_number(this);
- if_number.If<HIsNumberAndBranch>(input);
- if_number.Then();
- HInstruction* res = BuildUnaryMathOp(input, type, stub->operation());
- if_number.Return(AddInstruction(res));
- if_number.Else();
- HValue* function = AddLoadJSBuiltin(stub->ToJSBuiltin(), context());
- Add<HPushArgument>(GetParameter(0));
- HValue* result = Add<HInvokeFunction>(context(), function, 1);
- if_number.Return(result);
- if_number.End();
- return graph()->GetConstantUndefined();
- }
-
- return AddInstruction(BuildUnaryMathOp(input, type, stub->operation()));
-}
-
-
-Handle<Code> UnaryOpStub::GenerateCode() {
- return DoGenerateCode(this);
-}
-
-
-template <>
HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
ToBooleanStub* stub = casted_stub();
@@ -888,10 +834,9 @@ HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
// Check that the map of the global has not changed: use a placeholder map
// that will be replaced later with the global object's map.
Handle<Map> placeholder_map = isolate()->factory()->meta_map();
- AddInstruction(HCheckMaps::New(
- receiver, placeholder_map, zone(), top_info()));
+ Add<HCheckMaps>(receiver, placeholder_map, top_info());
- HValue* cell = Add<HConstant>(placeholder_cell, Representation::Tagged());
+ HValue* cell = Add<HConstant>(placeholder_cell);
HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
HValue* cell_contents = Add<HLoadNamedField>(cell, access);
@@ -906,7 +851,7 @@ HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
// property has been deleted and that the store must be handled by the
// runtime.
IfBuilder builder(this);
- HValue* hole_value = Add<HConstant>(hole, Representation::Tagged());
+ HValue* hole_value = Add<HConstant>(hole);
builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
builder.Then();
builder.Deopt();
diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc
index 5f6616ea07..d472fa287f 100644
--- a/deps/v8/src/code-stubs.cc
+++ b/deps/v8/src/code-stubs.cc
@@ -204,71 +204,6 @@ void CodeStub::PrintName(StringStream* stream) {
}
-Builtins::JavaScript UnaryOpStub::ToJSBuiltin() {
- switch (operation_) {
- default:
- UNREACHABLE();
- case Token::SUB:
- return Builtins::UNARY_MINUS;
- case Token::BIT_NOT:
- return Builtins::BIT_NOT;
- }
-}
-
-
-Handle<JSFunction> UnaryOpStub::ToJSFunction(Isolate* isolate) {
- Handle<JSBuiltinsObject> builtins(isolate->js_builtins_object());
- Object* builtin = builtins->javascript_builtin(ToJSBuiltin());
- return Handle<JSFunction>(JSFunction::cast(builtin), isolate);
-}
-
-
-MaybeObject* UnaryOpStub::Result(Handle<Object> object, Isolate* isolate) {
- Handle<JSFunction> builtin_function = ToJSFunction(isolate);
- bool caught_exception;
- Handle<Object> result = Execution::Call(builtin_function, object,
- 0, NULL, &caught_exception);
- if (caught_exception) {
- return Failure::Exception();
- }
- return *result;
-}
-
-
-void UnaryOpStub::UpdateStatus(Handle<Object> object) {
- State old_state(state_);
- if (object->IsSmi()) {
- state_.Add(SMI);
- if (operation_ == Token::SUB && *object == 0) {
- // The result (-0) has to be represented as double.
- state_.Add(HEAP_NUMBER);
- }
- } else if (object->IsHeapNumber()) {
- state_.Add(HEAP_NUMBER);
- } else {
- state_.Add(GENERIC);
- }
- TraceTransition(old_state, state_);
-}
-
-
-Handle<Type> UnaryOpStub::GetType(Isolate* isolate) {
- if (state_.Contains(GENERIC)) {
- return handle(Type::Any(), isolate);
- }
- Handle<Type> type = handle(Type::None(), isolate);
- if (state_.Contains(SMI)) {
- type = handle(
- Type::Union(type, handle(Type::Smi(), isolate)), isolate);
- }
- if (state_.Contains(HEAP_NUMBER)) {
- type = handle(
- Type::Union(type, handle(Type::Double(), isolate)), isolate);
- }
- return type;
-}
-
-
void BinaryOpStub::Generate(MacroAssembler* masm) {
// Explicitly allow generation of nested stubs. It is safe here because
// generation code does not use any raw pointers.
@@ -354,29 +289,6 @@ void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
#undef __
-void UnaryOpStub::PrintBaseName(StringStream* stream) {
- CodeStub::PrintBaseName(stream);
- if (operation_ == Token::SUB) stream->Add("Minus");
- if (operation_ == Token::BIT_NOT) stream->Add("Not");
-}
-
-
-void UnaryOpStub::PrintState(StringStream* stream) {
- state_.Print(stream);
-}
-
-
-void UnaryOpStub::State::Print(StringStream* stream) const {
- stream->Add("(");
- SimpleListPrinter printer(stream);
- if (IsEmpty()) printer.Add("None");
- if (Contains(GENERIC)) printer.Add("Generic");
- if (Contains(HEAP_NUMBER)) printer.Add("HeapNumber");
- if (Contains(SMI)) printer.Add("Smi");
- stream->Add(")");
-}
-
-
void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h
index 84d9b023b3..c58acd6b16 100644
--- a/deps/v8/src/code-stubs.h
+++ b/deps/v8/src/code-stubs.h
@@ -40,7 +40,6 @@ namespace internal {
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
V(CallFunction) \
V(CallConstruct) \
- V(UnaryOp) \
V(BinaryOp) \
V(StringAdd) \
V(SubString) \
@@ -593,73 +592,6 @@ class StoreGlobalStub : public HydrogenCodeStub {
};
-class UnaryOpStub : public HydrogenCodeStub {
- public:
- // Stub without type info available -> construct uninitialized
- explicit UnaryOpStub(Token::Value operation)
- : HydrogenCodeStub(UNINITIALIZED), operation_(operation) { }
- explicit UnaryOpStub(Code::ExtraICState ic_state) :
- state_(StateBits::decode(ic_state)),
- operation_(OperatorBits::decode(ic_state)) { }
-
- virtual void InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor);
-
- virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
- virtual InlineCacheState GetICState() {
- if (state_.Contains(GENERIC)) {
- return MEGAMORPHIC;
- } else if (state_.IsEmpty()) {
- return PREMONOMORPHIC;
- } else {
- return MONOMORPHIC;
- }
- }
- virtual Code::ExtraICState GetExtraICState() {
- return OperatorBits::encode(operation_) |
- StateBits::encode(state_.ToIntegral());
- }
-
- Token::Value operation() { return operation_; }
- Handle<JSFunction> ToJSFunction(Isolate* isolate);
- Builtins::JavaScript ToJSBuiltin();
-
- void UpdateStatus(Handle<Object> object);
- MaybeObject* Result(Handle<Object> object, Isolate* isolate);
- Handle<Code> GenerateCode();
- Handle<Type> GetType(Isolate* isolate);
-
- protected:
- void PrintState(StringStream* stream);
- void PrintBaseName(StringStream* stream);
-
- private:
- enum UnaryOpType {
- SMI,
- HEAP_NUMBER,
- GENERIC,
- NUMBER_OF_TYPES
- };
-
- class State : public EnumSet<UnaryOpType, byte> {
- public:
- State() : EnumSet<UnaryOpType, byte>() { }
- explicit State(byte bits) : EnumSet<UnaryOpType, byte>(bits) { }
- void Print(StringStream* stream) const;
- };
-
- class StateBits : public BitField<int, 0, NUMBER_OF_TYPES> { };
- class OperatorBits : public BitField<Token::Value, NUMBER_OF_TYPES, 8> { };
-
- State state_;
- Token::Value operation_;
-
- virtual CodeStub::Major MajorKey() { return UnaryOp; }
- virtual int NotMissMinorKey() { return GetExtraICState(); }
-};
-
-
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
diff --git a/deps/v8/src/collection.js b/deps/v8/src/collection.js
index 63ddbbb966..01537e87b0 100644
--- a/deps/v8/src/collection.js
+++ b/deps/v8/src/collection.js
@@ -47,7 +47,7 @@ function SetConstructor() {
if (%_IsConstructCall()) {
%SetInitialize(this);
} else {
- return new $Set();
+ throw MakeTypeError('constructor_not_function', ['Set']);
}
}
@@ -141,7 +141,7 @@ function MapConstructor() {
if (%_IsConstructCall()) {
%MapInitialize(this);
} else {
- return new $Map();
+ throw MakeTypeError('constructor_not_function', ['Map']);
}
}
@@ -243,7 +243,7 @@ function WeakMapConstructor() {
if (%_IsConstructCall()) {
%WeakCollectionInitialize(this);
} else {
- return new $WeakMap();
+ throw MakeTypeError('constructor_not_function', ['WeakMap']);
}
}
@@ -335,7 +335,7 @@ function WeakSetConstructor() {
if (%_IsConstructCall()) {
%WeakCollectionInitialize(this);
} else {
- return new $WeakSet();
+ throw MakeTypeError('constructor_not_function', ['WeakSet']);
}
}
diff --git a/deps/v8/src/compilation-cache.cc b/deps/v8/src/compilation-cache.cc
index 18c82e95fd..fffe5da71d 100644
--- a/deps/v8/src/compilation-cache.cc
+++ b/deps/v8/src/compilation-cache.cc
@@ -144,7 +144,8 @@ bool CompilationCacheScript::HasOrigin(
Handle<SharedFunctionInfo> function_info,
Handle<Object> name,
int line_offset,
- int column_offset) {
+ int column_offset,
+ bool is_shared_cross_origin) {
Handle<Script> script =
Handle<Script>(Script::cast(function_info->script()), isolate());
// If the script name isn't set, the boilerplate script should have
@@ -157,6 +158,8 @@ bool CompilationCacheScript::HasOrigin(
if (column_offset != script->column_offset()->value()) return false;
// Check that both names are strings. If not, no match.
if (!name->IsString() || !script->name()->IsString()) return false;
+ // Were both scripts tagged by the embedder as being shared cross-origin?
+ if (is_shared_cross_origin != script->is_shared_cross_origin()) return false;
// Compare the two name strings for equality.
return String::cast(*name)->Equals(String::cast(script->name()));
}
@@ -171,6 +174,7 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(
Handle<Object> name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context) {
Object* result = NULL;
int generation;
@@ -186,7 +190,11 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(
Handle<SharedFunctionInfo>::cast(probe);
// Break when we've found a suitable shared function info that
// matches the origin.
- if (HasOrigin(function_info, name, line_offset, column_offset)) {
+ if (HasOrigin(function_info,
+ name,
+ line_offset,
+ column_offset,
+ is_shared_cross_origin)) {
result = *function_info;
break;
}
@@ -214,7 +222,11 @@ Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(
if (result != NULL) {
Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(result),
isolate());
- ASSERT(HasOrigin(shared, name, line_offset, column_offset));
+ ASSERT(HasOrigin(shared,
+ name,
+ line_offset,
+ column_offset,
+ is_shared_cross_origin));
// If the script was found in a later generation, we promote it to
// the first generation to let it survive longer in the cache.
if (generation != 0) Put(source, context, shared);
@@ -391,12 +403,18 @@ Handle<SharedFunctionInfo> CompilationCache::LookupScript(
Handle<Object> name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context) {
if (!IsEnabled()) {
return Handle<SharedFunctionInfo>::null();
}
- return script_.Lookup(source, name, line_offset, column_offset, context);
+ return script_.Lookup(source,
+ name,
+ line_offset,
+ column_offset,
+ is_shared_cross_origin,
+ context);
}
diff --git a/deps/v8/src/compilation-cache.h b/deps/v8/src/compilation-cache.h
index 7a236e8fbf..414e09e655 100644
--- a/deps/v8/src/compilation-cache.h
+++ b/deps/v8/src/compilation-cache.h
@@ -99,6 +99,7 @@ class CompilationCacheScript : public CompilationSubCache {
Handle<Object> name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context);
void Put(Handle<String> source,
Handle<Context> context,
@@ -119,7 +120,8 @@ class CompilationCacheScript : public CompilationSubCache {
bool HasOrigin(Handle<SharedFunctionInfo> function_info,
Handle<Object> name,
int line_offset,
- int column_offset);
+ int column_offset,
+ bool is_shared_cross_origin);
void* script_histogram_;
bool script_histogram_initialized_;
@@ -212,6 +214,7 @@ class CompilationCache {
Handle<Object> name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context);
// Finds the shared function info for a source string for eval in a
diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc
index 4cac73f7b6..ebd4995871 100644
--- a/deps/v8/src/compiler.cc
+++ b/deps/v8/src/compiler.cc
@@ -127,7 +127,7 @@ void CompilationInfo::Initialize(Isolate* isolate,
ASSERT(language_mode() == CLASSIC_MODE);
SetLanguageMode(shared_info_->language_mode());
}
- set_bailout_reason("unknown");
+ set_bailout_reason(kUnknown);
}
@@ -342,7 +342,7 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
const int kMaxOptCount =
FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
if (info()->opt_count() > kMaxOptCount) {
- info()->set_bailout_reason("optimized too many times");
+ info()->set_bailout_reason(kOptimizedTooManyTimes);
return AbortOptimization();
}
@@ -356,14 +356,14 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
Scope* scope = info()->scope();
if ((scope->num_parameters() + 1) > parameter_limit) {
- info()->set_bailout_reason("too many parameters");
+ info()->set_bailout_reason(kTooManyParameters);
return AbortOptimization();
}
const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
if (!info()->osr_ast_id().IsNone() &&
scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
- info()->set_bailout_reason("too many parameters/locals");
+ info()->set_bailout_reason(kTooManyParametersLocals);
return AbortOptimization();
}
@@ -458,9 +458,9 @@ OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
ASSERT(last_status() == SUCCEEDED);
Timer t(this, &time_taken_to_optimize_);
ASSERT(graph_ != NULL);
- SmartArrayPointer<char> bailout_reason;
+ BailoutReason bailout_reason = kNoReason;
if (!graph_->Optimize(&bailout_reason)) {
- if (!bailout_reason.is_empty()) graph_builder_->Bailout(*bailout_reason);
+ if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason);
return SetLastStatus(BAILED_OUT);
} else {
chunk_ = LChunk::NewChunk(graph_);
@@ -485,7 +485,9 @@ OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() {
DisallowDeferredHandleDereference no_deferred_handle_deref;
Handle<Code> optimized_code = chunk_->Codegen();
if (optimized_code.is_null()) {
- info()->set_bailout_reason("code generation failed");
+ if (info()->bailout_reason() != kNoReason) {
+ info()->set_bailout_reason(kCodeGenerationFailed);
+ }
return AbortOptimization();
}
info()->SetCode(optimized_code);
@@ -667,6 +669,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
Handle<Object> script_name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
@@ -689,6 +692,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
script_name,
line_offset,
column_offset,
+ is_shared_cross_origin,
context);
}
@@ -712,6 +716,7 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
script->set_line_offset(Smi::FromInt(line_offset));
script->set_column_offset(Smi::FromInt(column_offset));
}
+ script->set_is_shared_cross_origin(is_shared_cross_origin);
script->set_data(script_data.is_null() ? HEAP->undefined_value()
: *script_data);
@@ -777,7 +782,7 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
if (!result.is_null()) {
// Explicitly disable optimization for eval code. We're not yet prepared
// to handle eval-code in the optimizing compiler.
- result->DisableOptimization("eval");
+ result->DisableOptimization(kEval);
// If caller is strict mode, the result must be in strict mode or
// extended mode as well, but not the other way around. Consider:
@@ -1052,13 +1057,13 @@ void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) {
// the unoptimized code.
OptimizingCompiler::Status status = optimizing_compiler->last_status();
if (info->HasAbortedDueToDependencyChange()) {
- info->set_bailout_reason("bailed out due to dependent map");
+ info->set_bailout_reason(kBailedOutDueToDependentMap);
status = optimizing_compiler->AbortOptimization();
} else if (status != OptimizingCompiler::SUCCEEDED) {
- info->set_bailout_reason("failed/bailed out last time");
+ info->set_bailout_reason(kFailedBailedOutLastTime);
status = optimizing_compiler->AbortOptimization();
} else if (isolate->DebuggerHasBreakPoints()) {
- info->set_bailout_reason("debugger is active");
+ info->set_bailout_reason(kDebuggerIsActive);
status = optimizing_compiler->AbortOptimization();
} else {
status = optimizing_compiler->GenerateAndInstallCode();
diff --git a/deps/v8/src/compiler.h b/deps/v8/src/compiler.h
index 332d575dc2..50053e5625 100644
--- a/deps/v8/src/compiler.h
+++ b/deps/v8/src/compiler.h
@@ -258,8 +258,8 @@ class CompilationInfo {
SaveHandle(&script_);
}
- const char* bailout_reason() const { return bailout_reason_; }
- void set_bailout_reason(const char* reason) { bailout_reason_ = reason; }
+ BailoutReason bailout_reason() const { return bailout_reason_; }
+ void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; }
int prologue_offset() const {
ASSERT_NE(kPrologueOffsetNotSet, prologue_offset_);
@@ -412,7 +412,7 @@ class CompilationInfo {
}
}
- const char* bailout_reason_;
+ BailoutReason bailout_reason_;
int prologue_offset_;
@@ -572,6 +572,7 @@ class Compiler : public AllStatic {
Handle<Object> script_name,
int line_offset,
int column_offset,
+ bool is_shared_cross_origin,
Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc
index 1efe2ae0b4..c7b66c2a15 100644
--- a/deps/v8/src/d8.cc
+++ b/deps/v8/src/d8.cc
@@ -1406,6 +1406,14 @@ bool Shell::SetOptions(int argc, char* argv[]) {
#else
options.num_parallel_files++;
#endif // V8_SHARED
+ } else if (strcmp(argv[i], "--dump-heap-constants") == 0) {
+#ifdef V8_SHARED
+ printf("D8 with shared library does not support constant dumping\n");
+ return false;
+#else
+ options.dump_heap_constants = true;
+ argv[i] = NULL;
+#endif
}
#ifdef V8_SHARED
else if (strcmp(argv[i], "--dump-counters") == 0) {
@@ -1560,10 +1568,80 @@ static void SetStandaloneFlagsViaCommandLine() {
#endif
+#ifndef V8_SHARED
+static void DumpHeapConstants(i::Isolate* isolate) {
+ i::Heap* heap = isolate->heap();
+
+ // Dump the INSTANCE_TYPES table to the console.
+ printf("# List of known V8 instance types.\n");
+#define DUMP_TYPE(T) printf(" %d: \"%s\",\n", i::T, #T);
+ printf("INSTANCE_TYPES = {\n");
+ INSTANCE_TYPE_LIST(DUMP_TYPE)
+ printf("}\n");
+#undef DUMP_TYPE
+
+ // Dump the KNOWN_MAP table to the console.
+ printf("\n# List of known V8 maps.\n");
+#define ROOT_LIST_CASE(type, name, camel_name) \
+ if (n == NULL && o == heap->name()) n = #camel_name;
+#define STRUCT_LIST_CASE(upper_name, camel_name, name) \
+ if (n == NULL && o == heap->name##_map()) n = #camel_name "Map";
+ i::HeapObjectIterator it(heap->map_space());
+ printf("KNOWN_MAPS = {\n");
+ for (i::Object* o = it.Next(); o != NULL; o = it.Next()) {
+ i::Map* m = i::Map::cast(o);
+ const char* n = NULL;
+ intptr_t p = reinterpret_cast<intptr_t>(m) & 0xfffff;
+ int t = m->instance_type();
+ ROOT_LIST(ROOT_LIST_CASE)
+ STRUCT_LIST(STRUCT_LIST_CASE)
+ if (n == NULL) continue;
+ printf(" 0x%05" V8PRIxPTR ": (%d, \"%s\"),\n", p, t, n);
+ }
+ printf("}\n");
+#undef STRUCT_LIST_CASE
+#undef ROOT_LIST_CASE
+
+ // Dump the KNOWN_OBJECTS table to the console.
+ printf("\n# List of known V8 objects.\n");
+#define ROOT_LIST_CASE(type, name, camel_name) \
+ if (n == NULL && o == heap->name()) n = #camel_name;
+ i::OldSpaces spit(heap);
+ printf("KNOWN_OBJECTS = {\n");
+ for (i::PagedSpace* s = spit.next(); s != NULL; s = spit.next()) {
+ i::HeapObjectIterator it(s);
+ const char* sname = AllocationSpaceName(s->identity());
+ for (i::Object* o = it.Next(); o != NULL; o = it.Next()) {
+ const char* n = NULL;
+ intptr_t p = reinterpret_cast<intptr_t>(o) & 0xfffff;
+ ROOT_LIST(ROOT_LIST_CASE)
+ if (n == NULL) continue;
+ printf(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", sname, p, n);
+ }
+ }
+ printf("}\n");
+#undef ROOT_LIST_CASE
+}
+#endif // V8_SHARED
+
+
class ShellArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
public:
- virtual void* Allocate(size_t length) { return malloc(length); }
- virtual void Free(void* data) { free(data); }
+ virtual void* Allocate(size_t length) {
+ void* result = malloc(length);
+ memset(result, 0, length);
+ return result;
+ }
+ virtual void* AllocateUninitialized(size_t length) {
+ return malloc(length);
+ }
+ virtual void Free(void* data, size_t) { free(data); }
+ // TODO(dslomov): Remove when v8:2823 is fixed.
+ virtual void Free(void* data) {
+#ifndef V8_SHARED
+ UNREACHABLE();
+#endif
+ }
};
@@ -1590,6 +1668,13 @@ int Shell::Main(int argc, char* argv[]) {
PerIsolateData data(isolate);
InitializeDebugger(isolate);
+#ifndef V8_SHARED
+ if (options.dump_heap_constants) {
+ DumpHeapConstants(reinterpret_cast<i::Isolate*>(isolate));
+ return 0;
+ }
+#endif
+
if (options.stress_opt || options.stress_deopt) {
Testing::SetStressRunType(options.stress_opt
? Testing::kStressTypeOpt
diff --git a/deps/v8/src/d8.h b/deps/v8/src/d8.h
index 4f04342cf4..3b06985ca2 100644
--- a/deps/v8/src/d8.h
+++ b/deps/v8/src/d8.h
@@ -231,6 +231,7 @@ class ShellOptions {
stress_deopt(false),
interactive_shell(false),
test_shell(false),
+ dump_heap_constants(false),
num_isolates(1),
isolate_sources(NULL) { }
@@ -254,6 +255,7 @@ class ShellOptions {
bool stress_deopt;
bool interactive_shell;
bool test_shell;
+ bool dump_heap_constants;
int num_isolates;
SourceGroup* isolate_sources;
};
diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc
index 04f8a7a027..a349502343 100644
--- a/deps/v8/src/debug.cc
+++ b/deps/v8/src/debug.cc
@@ -159,7 +159,6 @@ void BreakLocationIterator::Next() {
Code* code = Code::GetCodeFromTargetAddress(target);
if ((code->is_inline_cache_stub() &&
!code->is_binary_op_stub() &&
- !code->is_unary_op_stub() &&
!code->is_compare_ic_stub() &&
!code->is_to_boolean_ic_stub()) ||
RelocInfo::IsConstructCall(rmode())) {
@@ -786,6 +785,7 @@ bool Debug::CompileDebuggerScript(int index) {
function_info = Compiler::Compile(source_code,
script_name,
0, 0,
+ false,
context,
NULL, NULL,
Handle<String>::null(),
diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc
index 53b9b76377..50d6f0b399 100644
--- a/deps/v8/src/deoptimizer.cc
+++ b/deps/v8/src/deoptimizer.cc
@@ -2426,25 +2426,19 @@ void Deoptimizer::PatchInterruptCode(Code* unoptimized_code,
Code* replacement_code) {
// Iterate over the back edge table and patch every interrupt
// call to an unconditional call to the replacement code.
- ASSERT(unoptimized_code->kind() == Code::FUNCTION);
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
- Address back_edge_cursor = unoptimized_code->instruction_start() +
- unoptimized_code->back_edge_table_offset();
- uint32_t table_length = Memory::uint32_at(back_edge_cursor);
- back_edge_cursor += kIntSize;
- for (uint32_t i = 0; i < table_length; ++i) {
- uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
- if (static_cast<int>(loop_depth) == loop_nesting_level) {
- // Loop back edge has the loop depth that we want to patch.
- uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
- Address pc_after = unoptimized_code->instruction_start() + pc_offset;
+
+ for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
+ !back_edges.Done();
+ back_edges.Next()) {
+ if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) {
PatchInterruptCodeAt(unoptimized_code,
- pc_after,
+ back_edges.pc(),
interrupt_code,
replacement_code);
}
- back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
+
unoptimized_code->set_back_edges_patched_for_osr(true);
#ifdef DEBUG
Deoptimizer::VerifyInterruptCode(
@@ -2457,25 +2451,20 @@ void Deoptimizer::RevertInterruptCode(Code* unoptimized_code,
Code* interrupt_code,
Code* replacement_code) {
// Iterate over the back edge table and revert the patched interrupt calls.
- ASSERT(unoptimized_code->kind() == Code::FUNCTION);
ASSERT(unoptimized_code->back_edges_patched_for_osr());
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level();
- Address back_edge_cursor = unoptimized_code->instruction_start() +
- unoptimized_code->back_edge_table_offset();
- uint32_t table_length = Memory::uint32_at(back_edge_cursor);
- back_edge_cursor += kIntSize;
- for (uint32_t i = 0; i < table_length; ++i) {
- uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
- if (static_cast<int>(loop_depth) <= loop_nesting_level) {
- uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
- Address pc_after = unoptimized_code->instruction_start() + pc_offset;
+
+ for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
+ !back_edges.Done();
+ back_edges.Next()) {
+ if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) {
RevertInterruptCodeAt(unoptimized_code,
- pc_after,
+ back_edges.pc(),
interrupt_code,
replacement_code);
}
- back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
+
unoptimized_code->set_back_edges_patched_for_osr(false);
unoptimized_code->set_allow_osr_at_loop_nesting_level(0);
#ifdef DEBUG
@@ -2491,24 +2480,18 @@ void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code,
Code* interrupt_code,
Code* replacement_code,
int loop_nesting_level) {
- CHECK(unoptimized_code->kind() == Code::FUNCTION);
- Address back_edge_cursor = unoptimized_code->instruction_start() +
- unoptimized_code->back_edge_table_offset();
- uint32_t table_length = Memory::uint32_at(back_edge_cursor);
- back_edge_cursor += kIntSize;
- for (uint32_t i = 0; i < table_length; ++i) {
- uint32_t loop_depth = Memory::uint32_at(back_edge_cursor + 2 * kIntSize);
+ for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code);
+ !back_edges.Done();
+ back_edges.Next()) {
+ uint32_t loop_depth = back_edges.loop_depth();
CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
// Assert that all back edges for shallower loops (and only those)
// have already been patched.
- uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
- Address pc_after = unoptimized_code->instruction_start() + pc_offset;
CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
InterruptCodeIsPatched(unoptimized_code,
- pc_after,
+ back_edges.pc(),
interrupt_code,
replacement_code));
- back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
}
#endif // DEBUG
diff --git a/deps/v8/src/extensions/i18n/date-format.cc b/deps/v8/src/extensions/i18n/date-format.cc
deleted file mode 100644
index 1058e37a58..0000000000
--- a/deps/v8/src/extensions/i18n/date-format.cc
+++ /dev/null
@@ -1,329 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// limitations under the License.
-
-#include "date-format.h"
-
-#include <string.h>
-
-#include "i18n-utils.h"
-#include "unicode/calendar.h"
-#include "unicode/dtfmtsym.h"
-#include "unicode/dtptngen.h"
-#include "unicode/locid.h"
-#include "unicode/numsys.h"
-#include "unicode/smpdtfmt.h"
-#include "unicode/timezone.h"
-
-namespace v8_i18n {
-
-static icu::SimpleDateFormat* InitializeDateTimeFormat(v8::Handle<v8::String>,
- v8::Handle<v8::Object>,
- v8::Handle<v8::Object>);
-static icu::SimpleDateFormat* CreateICUDateFormat(const icu::Locale&,
- v8::Handle<v8::Object>);
-static void SetResolvedSettings(const icu::Locale&,
- icu::SimpleDateFormat*,
- v8::Handle<v8::Object>);
-
-icu::SimpleDateFormat* DateFormat::UnpackDateFormat(
- v8::Handle<v8::Object> obj) {
- v8::HandleScope handle_scope;
-
- if (obj->HasOwnProperty(v8::String::New("dateFormat"))) {
- return static_cast<icu::SimpleDateFormat*>(
- obj->GetAlignedPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-void DateFormat::DeleteDateFormat(v8::Isolate* isolate,
- v8::Persistent<v8::Object>* object,
- void* param) {
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a date time formatter.
- v8::HandleScope handle_scope(isolate);
- v8::Local<v8::Object> handle = v8::Local<v8::Object>::New(isolate, *object);
- delete UnpackDateFormat(handle);
-
- // Then dispose of the persistent handle to JS object.
- object->Dispose(isolate);
-}
-
-void DateFormat::JSInternalFormat(
- const v8::FunctionCallbackInfo<v8::Value>& args) {
- double millis = 0.0;
- if (args.Length() != 2 || !args[0]->IsObject() || !args[1]->IsDate()) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New(
- "Internal error. Formatter and date value have to be specified.")));
- return;
- } else {
- millis = v8::Date::Cast(*args[1])->NumberValue();
- }
-
- icu::SimpleDateFormat* date_format = UnpackDateFormat(args[0]->ToObject());
- if (!date_format) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New("DateTimeFormat method called on an object "
- "that is not a DateTimeFormat.")));
- return;
- }
-
- icu::UnicodeString result;
- date_format->format(millis, result);
-
- args.GetReturnValue().Set(v8::String::New(
- reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length()));
-}
-
-void DateFormat::JSInternalParse(
- const v8::FunctionCallbackInfo<v8::Value>& args) {
- icu::UnicodeString string_date;
- if (args.Length() != 2 || !args[0]->IsObject() || !args[1]->IsString()) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New(
- "Internal error. Formatter and string have to be specified.")));
- return;
- } else {
- if (!Utils::V8StringToUnicodeString(args[1], &string_date)) {
- string_date = "";
- }
- }
-
- icu::SimpleDateFormat* date_format = UnpackDateFormat(args[0]->ToObject());
- if (!date_format) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New("DateTimeFormat method called on an object "
- "that is not a DateTimeFormat.")));
- return;
- }
-
- UErrorCode status = U_ZERO_ERROR;
- UDate date = date_format->parse(string_date, status);
- if (U_FAILURE(status)) {
- return;
- }
-
- args.GetReturnValue().Set(v8::Date::New(static_cast<double>(date)));
-}
-
-void DateFormat::JSCreateDateTimeFormat(
- const v8::FunctionCallbackInfo<v8::Value>& args) {
- if (args.Length() != 3 ||
- !args[0]->IsString() ||
- !args[1]->IsObject() ||
- !args[2]->IsObject()) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New("Internal error, wrong parameters.")));
- return;
- }
-
- v8::Isolate* isolate = args.GetIsolate();
- v8::Local<v8::ObjectTemplate> date_format_template =
- Utils::GetTemplate(isolate);
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object = date_format_template->NewInstance();
- // But the handle shouldn't be empty.
- // That can happen if there was a stack overflow when creating the object.
- if (local_object.IsEmpty()) {
- args.GetReturnValue().Set(local_object);
- return;
- }
-
- // Set date time formatter as internal field of the resulting JS object.
- icu::SimpleDateFormat* date_format = InitializeDateTimeFormat(
- args[0]->ToString(), args[1]->ToObject(), args[2]->ToObject());
-
- if (!date_format) {
- v8::ThrowException(v8::Exception::Error(v8::String::New(
- "Internal error. Couldn't create ICU date time formatter.")));
- return;
- } else {
- local_object->SetAlignedPointerInInternalField(0, date_format);
-
- v8::TryCatch try_catch;
- local_object->Set(v8::String::New("dateFormat"), v8::String::New("valid"));
- if (try_catch.HasCaught()) {
- v8::ThrowException(v8::Exception::Error(
- v8::String::New("Internal error, couldn't set property.")));
- return;
- }
- }
-
- v8::Persistent<v8::Object> wrapper(isolate, local_object);
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak<void>(NULL, &DeleteDateFormat);
- args.GetReturnValue().Set(wrapper);
- wrapper.ClearAndLeak();
-}
-
-static icu::SimpleDateFormat* InitializeDateTimeFormat(
- v8::Handle<v8::String> locale,
- v8::Handle<v8::Object> options,
- v8::Handle<v8::Object> resolved) {
- // Convert BCP47 into ICU locale format.
- UErrorCode status = U_ZERO_ERROR;
- icu::Locale icu_locale;
- char icu_result[ULOC_FULLNAME_CAPACITY];
- int icu_length = 0;
- v8::String::AsciiValue bcp47_locale(locale);
- if (bcp47_locale.length() != 0) {
- uloc_forLanguageTag(*bcp47_locale, icu_result, ULOC_FULLNAME_CAPACITY,
- &icu_length, &status);
- if (U_FAILURE(status) || icu_length == 0) {
- return NULL;
- }
- icu_locale = icu::Locale(icu_result);
- }
-
- icu::SimpleDateFormat* date_format = CreateICUDateFormat(icu_locale, options);
- if (!date_format) {
- // Remove extensions and try again.
- icu::Locale no_extension_locale(icu_locale.getBaseName());
- date_format = CreateICUDateFormat(no_extension_locale, options);
-
- // Set resolved settings (pattern, numbering system, calendar).
- SetResolvedSettings(no_extension_locale, date_format, resolved);
- } else {
- SetResolvedSettings(icu_locale, date_format, resolved);
- }
-
- return date_format;
-}
-
-static icu::SimpleDateFormat* CreateICUDateFormat(
- const icu::Locale& icu_locale, v8::Handle<v8::Object> options) {
- // Create time zone as specified by the user. We have to re-create time zone
- // since calendar takes ownership.
- icu::TimeZone* tz = NULL;
- icu::UnicodeString timezone;
- if (Utils::ExtractStringSetting(options, "timeZone", &timezone)) {
- tz = icu::TimeZone::createTimeZone(timezone);
- } else {
- tz = icu::TimeZone::createDefault();
- }
-
- // Create a calendar using locale, and apply time zone to it.
- UErrorCode status = U_ZERO_ERROR;
- icu::Calendar* calendar =
- icu::Calendar::createInstance(tz, icu_locale, status);
-
- // Make formatter from skeleton. Calendar and numbering system are added
- // to the locale as Unicode extension (if they were specified at all).
- icu::SimpleDateFormat* date_format = NULL;
- icu::UnicodeString skeleton;
- if (Utils::ExtractStringSetting(options, "skeleton", &skeleton)) {
- icu::DateTimePatternGenerator* generator =
- icu::DateTimePatternGenerator::createInstance(icu_locale, status);
- icu::UnicodeString pattern;
- if (U_SUCCESS(status)) {
- pattern = generator->getBestPattern(skeleton, status);
- delete generator;
- }
-
- date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
- if (U_SUCCESS(status)) {
- date_format->adoptCalendar(calendar);
- }
- }
-
- if (U_FAILURE(status)) {
- delete calendar;
- delete date_format;
- date_format = NULL;
- }
-
- return date_format;
-}
-
-static void SetResolvedSettings(const icu::Locale& icu_locale,
- icu::SimpleDateFormat* date_format,
- v8::Handle<v8::Object> resolved) {
- UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString pattern;
- date_format->toPattern(pattern);
- resolved->Set(v8::String::New("pattern"),
- v8::String::New(reinterpret_cast<const uint16_t*>(
- pattern.getBuffer()), pattern.length()));
-
- // Set time zone and calendar.
- if (date_format) {
- const icu::Calendar* calendar = date_format->getCalendar();
- const char* calendar_name = calendar->getType();
- resolved->Set(v8::String::New("calendar"), v8::String::New(calendar_name));
-
- const icu::TimeZone& tz = calendar->getTimeZone();
- icu::UnicodeString time_zone;
- tz.getID(time_zone);
-
- icu::UnicodeString canonical_time_zone;
- icu::TimeZone::getCanonicalID(time_zone, canonical_time_zone, status);
- if (U_SUCCESS(status)) {
- if (canonical_time_zone == UNICODE_STRING_SIMPLE("Etc/GMT")) {
- resolved->Set(v8::String::New("timeZone"), v8::String::New("UTC"));
- } else {
- resolved->Set(v8::String::New("timeZone"),
- v8::String::New(reinterpret_cast<const uint16_t*>(
- canonical_time_zone.getBuffer()),
- canonical_time_zone.length()));
- }
- }
- }
-
- // Ugly hack. ICU doesn't expose numbering system in any way, so we have
- // to assume that for given locale NumberingSystem constructor produces the
- // same digits as NumberFormat/Calendar would.
- status = U_ZERO_ERROR;
- icu::NumberingSystem* numbering_system =
- icu::NumberingSystem::createInstance(icu_locale, status);
- if (U_SUCCESS(status)) {
- const char* ns = numbering_system->getName();
- resolved->Set(v8::String::New("numberingSystem"), v8::String::New(ns));
- } else {
- resolved->Set(v8::String::New("numberingSystem"), v8::Undefined());
- }
- delete numbering_system;
-
- // Set the locale
- char result[ULOC_FULLNAME_CAPACITY];
- status = U_ZERO_ERROR;
- uloc_toLanguageTag(
- icu_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
- if (U_SUCCESS(status)) {
- resolved->Set(v8::String::New("locale"), v8::String::New(result));
- } else {
- // This would never happen, since we got the locale from ICU.
- resolved->Set(v8::String::New("locale"), v8::String::New("und"));
- }
-}
-
-} // namespace v8_i18n
diff --git a/deps/v8/src/extensions/i18n/date-format.js b/deps/v8/src/extensions/i18n/date-format.js
index 04e7a7c7b9..b1d28e535c 100644
--- a/deps/v8/src/extensions/i18n/date-format.js
+++ b/deps/v8/src/extensions/i18n/date-format.js
@@ -235,7 +235,6 @@ function toDateTimeOptions(options, required, defaults) {
* Useful for subclassing.
*/
function initializeDateTimeFormat(dateFormat, locales, options) {
- native function NativeJSCreateDateTimeFormat();
if (dateFormat.hasOwnProperty('__initializedIntlObject')) {
throw new TypeError('Trying to re-initialize DateTimeFormat object.');
@@ -292,7 +291,7 @@ function initializeDateTimeFormat(dateFormat, locales, options) {
year: {writable: true}
});
- var formatter = NativeJSCreateDateTimeFormat(
+ var formatter = %CreateDateTimeFormat(
requestedLocale, {skeleton: ldmlString, timeZone: tz}, resolved);
if (tz !== undefined && tz !== resolved.timeZone) {
@@ -409,8 +408,6 @@ function initializeDateTimeFormat(dateFormat, locales, options) {
* DateTimeFormat.
*/
function formatDate(formatter, dateValue) {
- native function NativeJSInternalDateFormat();
-
var dateMs;
if (dateValue === undefined) {
dateMs = Date.now();
@@ -422,7 +419,7 @@ function formatDate(formatter, dateValue) {
throw new RangeError('Provided date is not in valid range.');
}
- return NativeJSInternalDateFormat(formatter.formatter, new Date(dateMs));
+ return %InternalDateFormat(formatter.formatter, new Date(dateMs));
}
@@ -433,8 +430,7 @@ function formatDate(formatter, dateValue) {
* Returns undefined if date string cannot be parsed.
*/
function parseDate(formatter, value) {
- native function NativeJSInternalDateParse();
- return NativeJSInternalDateParse(formatter.formatter, String(value));
+ return %InternalDateParse(formatter.formatter, String(value));
}
diff --git a/deps/v8/src/extensions/i18n/footer.js b/deps/v8/src/extensions/i18n/footer.js
index ac33f1e242..adaa633462 100644
--- a/deps/v8/src/extensions/i18n/footer.js
+++ b/deps/v8/src/extensions/i18n/footer.js
@@ -37,4 +37,4 @@ var CLEANUP_RE = new RegExp('');
CLEANUP_RE.test('');
return Intl;
-}());
+}())});
diff --git a/deps/v8/src/extensions/i18n/header.js b/deps/v8/src/extensions/i18n/header.js
index 1c0a2d8874..b854ce5ead 100644
--- a/deps/v8/src/extensions/i18n/header.js
+++ b/deps/v8/src/extensions/i18n/header.js
@@ -34,7 +34,7 @@
* Intl object is a single object that has some named properties,
* all of which are constructors.
*/
-var Intl = (function() {
+Object.defineProperty(this, "Intl", { enumerable: false, value: (function() {
'use strict';
diff --git a/deps/v8/src/extensions/i18n/i18n-extension.cc b/deps/v8/src/extensions/i18n/i18n-extension.cc
index 1c77b8899f..b110b7d80e 100644
--- a/deps/v8/src/extensions/i18n/i18n-extension.cc
+++ b/deps/v8/src/extensions/i18n/i18n-extension.cc
@@ -30,8 +30,6 @@
#include "break-iterator.h"
#include "collator.h"
-#include "date-format.h"
-#include "locale.h"
#include "natives.h"
#include "number-format.h"
@@ -49,26 +47,6 @@ Extension::Extension()
v8::Handle<v8::FunctionTemplate> Extension::GetNativeFunction(
v8::Handle<v8::String> name) {
- // Standalone, helper methods.
- if (name->Equals(v8::String::New("NativeJSCanonicalizeLanguageTag"))) {
- return v8::FunctionTemplate::New(JSCanonicalizeLanguageTag);
- } else if (name->Equals(v8::String::New("NativeJSAvailableLocalesOf"))) {
- return v8::FunctionTemplate::New(JSAvailableLocalesOf);
- } else if (name->Equals(v8::String::New("NativeJSGetDefaultICULocale"))) {
- return v8::FunctionTemplate::New(JSGetDefaultICULocale);
- } else if (name->Equals(v8::String::New("NativeJSGetLanguageTagVariants"))) {
- return v8::FunctionTemplate::New(JSGetLanguageTagVariants);
- }
-
- // Date format and parse.
- if (name->Equals(v8::String::New("NativeJSCreateDateTimeFormat"))) {
- return v8::FunctionTemplate::New(DateFormat::JSCreateDateTimeFormat);
- } else if (name->Equals(v8::String::New("NativeJSInternalDateFormat"))) {
- return v8::FunctionTemplate::New(DateFormat::JSInternalFormat);
- } else if (name->Equals(v8::String::New("NativeJSInternalDateParse"))) {
- return v8::FunctionTemplate::New(DateFormat::JSInternalParse);
- }
-
// Number format and parse.
if (name->Equals(v8::String::New("NativeJSCreateNumberFormat"))) {
return v8::FunctionTemplate::New(NumberFormat::JSCreateNumberFormat);
diff --git a/deps/v8/src/extensions/i18n/i18n-utils.cc b/deps/v8/src/extensions/i18n/i18n-utils.cc
index eac1166904..8c87f0715b 100644
--- a/deps/v8/src/extensions/i18n/i18n-utils.cc
+++ b/deps/v8/src/extensions/i18n/i18n-utils.cc
@@ -141,35 +141,37 @@ void Utils::AsciiToUChar(const char* source,
}
-// static
-v8::Local<v8::ObjectTemplate> Utils::GetTemplate(v8::Isolate* isolate) {
- i::Isolate* internal = reinterpret_cast<i::Isolate*>(isolate);
- if (internal->heap()->i18n_template_one() ==
- internal->heap()->the_hole_value()) {
- v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
- raw_template->SetInternalFieldCount(1);
- internal->heap()
- ->SetI18nTemplateOne(*v8::Utils::OpenHandle(*raw_template));
+static v8::Local<v8::ObjectTemplate> ToLocal(i::Handle<i::Object> handle) {
+ return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(handle));
+}
+
+
+template<int internal_fields, i::EternalHandles::SingletonHandle field>
+static v8::Local<v8::ObjectTemplate> GetEternal(v8::Isolate* external) {
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external);
+ if (isolate->eternal_handles()->Exists(field)) {
+ return ToLocal(isolate->eternal_handles()->GetSingleton(field));
}
+ v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
+ raw_template->SetInternalFieldCount(internal_fields);
+ return ToLocal(
+ isolate->eternal_handles()->CreateSingleton(
+ isolate,
+ *v8::Utils::OpenHandle(*raw_template),
+ field));
+}
- return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(
- internal->factory()->i18n_template_one()));
+
+// static
+v8::Local<v8::ObjectTemplate> Utils::GetTemplate(v8::Isolate* isolate) {
+ return GetEternal<1, i::EternalHandles::I18N_TEMPLATE_ONE>(isolate);
}
// static
v8::Local<v8::ObjectTemplate> Utils::GetTemplate2(v8::Isolate* isolate) {
- i::Isolate* internal = reinterpret_cast<i::Isolate*>(isolate);
- if (internal->heap()->i18n_template_two() ==
- internal->heap()->the_hole_value()) {
- v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
- raw_template->SetInternalFieldCount(2);
- internal->heap()
- ->SetI18nTemplateTwo(*v8::Utils::OpenHandle(*raw_template));
- }
-
- return v8::Utils::ToLocal(i::Handle<i::ObjectTemplateInfo>::cast(
- internal->factory()->i18n_template_two()));
+ return GetEternal<2, i::EternalHandles::I18N_TEMPLATE_TWO>(isolate);
}
+
} // namespace v8_i18n
diff --git a/deps/v8/src/extensions/i18n/i18n-utils.js b/deps/v8/src/extensions/i18n/i18n-utils.js
index d7e9486c50..545082ecbb 100644
--- a/deps/v8/src/extensions/i18n/i18n-utils.js
+++ b/deps/v8/src/extensions/i18n/i18n-utils.js
@@ -255,8 +255,6 @@ function resolveLocale(service, requestedLocales, options) {
* lookup algorithm.
*/
function lookupMatcher(service, requestedLocales) {
- native function NativeJSGetDefaultICULocale();
-
if (service.match(SERVICE_RE) === null) {
throw new Error('Internal error, wrong service type: ' + service);
}
@@ -287,7 +285,7 @@ function lookupMatcher(service, requestedLocales) {
// Didn't find a match, return default.
if (DEFAULT_ICU_LOCALE === undefined) {
- DEFAULT_ICU_LOCALE = NativeJSGetDefaultICULocale();
+ DEFAULT_ICU_LOCALE = %GetDefaultICULocale();
}
return {'locale': DEFAULT_ICU_LOCALE, 'extension': '', 'position': -1};
@@ -446,14 +444,12 @@ function getOptimalLanguageTag(original, resolved) {
// Returns Array<Object>, where each object has maximized and base properties.
// Maximized: zh -> zh-Hans-CN
// Base: zh-CN-u-ca-gregory -> zh-CN
- native function NativeJSGetLanguageTagVariants();
-
// Take care of grandfathered or simple cases.
if (original === resolved) {
return original;
}
- var locales = NativeJSGetLanguageTagVariants([original, resolved]);
+ var locales = %GetLanguageTagVariants([original, resolved]);
if (locales[0].maximized !== locales[1].maximized) {
return resolved;
}
@@ -471,8 +467,7 @@ function getOptimalLanguageTag(original, resolved) {
* that is supported. This is required by the spec.
*/
function getAvailableLocalesOf(service) {
- native function NativeJSAvailableLocalesOf();
- var available = NativeJSAvailableLocalesOf(service);
+ var available = %AvailableLocalesOf(service);
for (var i in available) {
if (available.hasOwnProperty(i)) {
diff --git a/deps/v8/src/extensions/i18n/locale.cc b/deps/v8/src/extensions/i18n/locale.cc
deleted file mode 100644
index 6b6f9ac314..0000000000
--- a/deps/v8/src/extensions/i18n/locale.cc
+++ /dev/null
@@ -1,251 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// limitations under the License.
-
-#include "locale.h"
-
-#include <string.h>
-
-#include "unicode/brkiter.h"
-#include "unicode/coll.h"
-#include "unicode/datefmt.h"
-#include "unicode/numfmt.h"
-#include "unicode/uloc.h"
-#include "unicode/uversion.h"
-
-namespace v8_i18n {
-
-void JSCanonicalizeLanguageTag(
- const v8::FunctionCallbackInfo<v8::Value>& args) {
- // Expect locale id which is a string.
- if (args.Length() != 1 || !args[0]->IsString()) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale identifier, as a string, is required.")));
- return;
- }
-
- UErrorCode error = U_ZERO_ERROR;
-
- char icu_result[ULOC_FULLNAME_CAPACITY];
- int icu_length = 0;
-
- // Return value which denotes invalid language tag.
- const char* const kInvalidTag = "invalid-tag";
-
- v8::String::AsciiValue locale_id(args[0]->ToString());
- if (*locale_id == NULL) {
- args.GetReturnValue().Set(v8::String::New(kInvalidTag));
- return;
- }
-
- uloc_forLanguageTag(*locale_id, icu_result, ULOC_FULLNAME_CAPACITY,
- &icu_length, &error);
- if (U_FAILURE(error) || icu_length == 0) {
- args.GetReturnValue().Set(v8::String::New(kInvalidTag));
- return;
- }
-
- char result[ULOC_FULLNAME_CAPACITY];
-
- // Force strict BCP47 rules.
- uloc_toLanguageTag(icu_result, result, ULOC_FULLNAME_CAPACITY, TRUE, &error);
-
- if (U_FAILURE(error)) {
- args.GetReturnValue().Set(v8::String::New(kInvalidTag));
- return;
- }
-
- args.GetReturnValue().Set(v8::String::New(result));
-}
-
-
-void JSAvailableLocalesOf(const v8::FunctionCallbackInfo<v8::Value>& args) {
- // Expect service name which is a string.
- if (args.Length() != 1 || !args[0]->IsString()) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Service identifier, as a string, is required.")));
- return;
- }
-
- const icu::Locale* available_locales = NULL;
-
- int32_t count = 0;
- v8::String::AsciiValue service(args[0]->ToString());
- if (strcmp(*service, "collator") == 0) {
- available_locales = icu::Collator::getAvailableLocales(count);
- } else if (strcmp(*service, "numberformat") == 0) {
- available_locales = icu::NumberFormat::getAvailableLocales(count);
- } else if (strcmp(*service, "dateformat") == 0) {
- available_locales = icu::DateFormat::getAvailableLocales(count);
- } else if (strcmp(*service, "breakiterator") == 0) {
- available_locales = icu::BreakIterator::getAvailableLocales(count);
- }
-
- v8::TryCatch try_catch;
- UErrorCode error = U_ZERO_ERROR;
- char result[ULOC_FULLNAME_CAPACITY];
- v8::Handle<v8::Object> locales = v8::Object::New();
-
- for (int32_t i = 0; i < count; ++i) {
- const char* icu_name = available_locales[i].getName();
-
- error = U_ZERO_ERROR;
- // No need to force strict BCP47 rules.
- uloc_toLanguageTag(icu_name, result, ULOC_FULLNAME_CAPACITY, FALSE, &error);
- if (U_FAILURE(error)) {
- // This shouldn't happen, but lets not break the user.
- continue;
- }
-
- // Index is just a dummy value for the property value.
- locales->Set(v8::String::New(result), v8::Integer::New(i));
- if (try_catch.HasCaught()) {
- // Ignore error, but stop processing and return.
- break;
- }
- }
-
- args.GetReturnValue().Set(locales);
-}
-
-
-void JSGetDefaultICULocale(const v8::FunctionCallbackInfo<v8::Value>& args) {
- icu::Locale default_locale;
-
- // Set the locale
- char result[ULOC_FULLNAME_CAPACITY];
- UErrorCode status = U_ZERO_ERROR;
- uloc_toLanguageTag(
- default_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
- if (U_SUCCESS(status)) {
- args.GetReturnValue().Set(v8::String::New(result));
- return;
- }
-
- args.GetReturnValue().Set(v8::String::New("und"));
-}
-
-
-void JSGetLanguageTagVariants(const v8::FunctionCallbackInfo<v8::Value>& args) {
- v8::TryCatch try_catch;
-
- // Expect an array of strings.
- if (args.Length() != 1 || !args[0]->IsArray()) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Internal error. Expected Array<String>.")));
- return;
- }
-
- v8::Local<v8::Array> input = v8::Local<v8::Array>::Cast(args[0]);
- v8::Handle<v8::Array> output = v8::Array::New(input->Length());
- for (unsigned int i = 0; i < input->Length(); ++i) {
- v8::Local<v8::Value> locale_id = input->Get(i);
- if (try_catch.HasCaught()) {
- break;
- }
-
- if (!locale_id->IsString()) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Internal error. Array element is missing "
- "or it isn't a string.")));
- return;
- }
-
- v8::String::AsciiValue ascii_locale_id(locale_id);
- if (*ascii_locale_id == NULL) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Internal error. Non-ASCII locale identifier.")));
- return;
- }
-
- UErrorCode error = U_ZERO_ERROR;
-
- // Convert from BCP47 to ICU format.
- // de-DE-u-co-phonebk -> de_DE@collation=phonebook
- char icu_locale[ULOC_FULLNAME_CAPACITY];
- int icu_locale_length = 0;
- uloc_forLanguageTag(*ascii_locale_id, icu_locale, ULOC_FULLNAME_CAPACITY,
- &icu_locale_length, &error);
- if (U_FAILURE(error) || icu_locale_length == 0) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Internal error. Failed to convert locale to ICU.")));
- return;
- }
-
- // Maximize the locale.
- // de_DE@collation=phonebook -> de_Latn_DE@collation=phonebook
- char icu_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_addLikelySubtags(
- icu_locale, icu_max_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Remove extensions from maximized locale.
- // de_Latn_DE@collation=phonebook -> de_Latn_DE
- char icu_base_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_getBaseName(
- icu_max_locale, icu_base_max_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Get original name without extensions.
- // de_DE@collation=phonebook -> de_DE
- char icu_base_locale[ULOC_FULLNAME_CAPACITY];
- uloc_getBaseName(
- icu_locale, icu_base_locale, ULOC_FULLNAME_CAPACITY, &error);
-
- // Convert from ICU locale format to BCP47 format.
- // de_Latn_DE -> de-Latn-DE
- char base_max_locale[ULOC_FULLNAME_CAPACITY];
- uloc_toLanguageTag(icu_base_max_locale, base_max_locale,
- ULOC_FULLNAME_CAPACITY, FALSE, &error);
-
- // de_DE -> de-DE
- char base_locale[ULOC_FULLNAME_CAPACITY];
- uloc_toLanguageTag(
- icu_base_locale, base_locale, ULOC_FULLNAME_CAPACITY, FALSE, &error);
-
- if (U_FAILURE(error)) {
- v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Internal error. Couldn't generate maximized "
- "or base locale.")));
- return;
- }
-
- v8::Handle<v8::Object> result = v8::Object::New();
- result->Set(v8::String::New("maximized"), v8::String::New(base_max_locale));
- result->Set(v8::String::New("base"), v8::String::New(base_locale));
- if (try_catch.HasCaught()) {
- break;
- }
-
- output->Set(i, result);
- if (try_catch.HasCaught()) {
- break;
- }
- }
-
- args.GetReturnValue().Set(output);
-}
-
-} // namespace v8_i18n
diff --git a/deps/v8/src/extensions/i18n/locale.js b/deps/v8/src/extensions/i18n/locale.js
index ea95b87192..e4783277e6 100644
--- a/deps/v8/src/extensions/i18n/locale.js
+++ b/deps/v8/src/extensions/i18n/locale.js
@@ -34,8 +34,6 @@
* Canonicalizes the language tag, or throws in case the tag is invalid.
*/
function canonicalizeLanguageTag(localeID) {
- native function NativeJSCanonicalizeLanguageTag();
-
// null is typeof 'object' so we have to do extra check.
if (typeof localeID !== 'string' && typeof localeID !== 'object' ||
localeID === null) {
@@ -52,7 +50,7 @@ function canonicalizeLanguageTag(localeID) {
// ICU bug filled - http://bugs.icu-project.org/trac/ticket/9265.
// TODO(cira): check if -u-kn-true-kc-true-kh-true still throws after
// upgrade to ICU 4.9.
- var tag = NativeJSCanonicalizeLanguageTag(localeString);
+ var tag = %CanonicalizeLanguageTag(localeString);
if (tag === 'invalid-tag') {
throw new RangeError('Invalid language tag: ' + localeString);
}
diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc
index c5a1fddb88..3ca0efa210 100644
--- a/deps/v8/src/factory.cc
+++ b/deps/v8/src/factory.cc
@@ -1097,73 +1097,69 @@ void Factory::EnsureCanContainElements(Handle<JSArray> array,
Handle<JSArrayBuffer> Factory::NewJSArrayBuffer() {
- JSFunction* array_buffer_fun =
- isolate()->context()->native_context()->array_buffer_fun();
+ Handle<JSFunction> array_buffer_fun(
+ isolate()->context()->native_context()->array_buffer_fun());
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateJSObject(array_buffer_fun),
+ isolate()->heap()->AllocateJSObject(*array_buffer_fun),
JSArrayBuffer);
}
Handle<JSDataView> Factory::NewJSDataView() {
- JSFunction* data_view_fun =
- isolate()->context()->native_context()->data_view_fun();
+ Handle<JSFunction> data_view_fun(
+ isolate()->context()->native_context()->data_view_fun());
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateJSObject(data_view_fun),
+ isolate()->heap()->AllocateJSObject(*data_view_fun),
JSDataView);
}
-Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type) {
- JSFunction* typed_array_fun;
- Context* native_context = isolate()->context()->native_context();
+static JSFunction* GetTypedArrayFun(ExternalArrayType type,
+ Isolate* isolate) {
+ Context* native_context = isolate->context()->native_context();
switch (type) {
case kExternalUnsignedByteArray:
- typed_array_fun = native_context->uint8_array_fun();
- break;
+ return native_context->uint8_array_fun();
case kExternalByteArray:
- typed_array_fun = native_context->int8_array_fun();
- break;
+ return native_context->int8_array_fun();
case kExternalUnsignedShortArray:
- typed_array_fun = native_context->uint16_array_fun();
- break;
+ return native_context->uint16_array_fun();
case kExternalShortArray:
- typed_array_fun = native_context->int16_array_fun();
- break;
+ return native_context->int16_array_fun();
case kExternalUnsignedIntArray:
- typed_array_fun = native_context->uint32_array_fun();
- break;
+ return native_context->uint32_array_fun();
case kExternalIntArray:
- typed_array_fun = native_context->int32_array_fun();
- break;
+ return native_context->int32_array_fun();
case kExternalFloatArray:
- typed_array_fun = native_context->float_array_fun();
- break;
+ return native_context->float_array_fun();
case kExternalDoubleArray:
- typed_array_fun = native_context->double_array_fun();
- break;
+ return native_context->double_array_fun();
case kExternalPixelArray:
- typed_array_fun = native_context->uint8c_array_fun();
- break;
+ return native_context->uint8c_array_fun();
default:
UNREACHABLE();
- return Handle<JSTypedArray>();
+ return NULL;
}
+}
+
+
+Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type) {
+ Handle<JSFunction> typed_array_fun_handle(GetTypedArrayFun(type, isolate()));
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateJSObject(typed_array_fun),
+ isolate()->heap()->AllocateJSObject(*typed_array_fun_handle),
JSTypedArray);
}
diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h
index fa202f921a..c68beb5ed9 100644
--- a/deps/v8/src/flag-definitions.h
+++ b/deps/v8/src/flag-definitions.h
@@ -173,6 +173,8 @@ DEFINE_bool(harmony_generators, false, "enable harmony generators")
DEFINE_bool(harmony_iteration, false, "enable harmony iteration (for-of)")
DEFINE_bool(harmony_numeric_literals, false,
"enable harmony numeric literals (0o77, 0b11)")
+DEFINE_bool(harmony_strings, false, "enable harmony string")
+DEFINE_bool(harmony_arrays, false, "enable harmony arrays")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping)
DEFINE_implication(harmony, harmony_modules)
@@ -183,6 +185,8 @@ DEFINE_implication(harmony, harmony_observation)
DEFINE_implication(harmony, harmony_generators)
DEFINE_implication(harmony, harmony_iteration)
DEFINE_implication(harmony, harmony_numeric_literals)
+DEFINE_implication(harmony, harmony_strings)
+DEFINE_implication(harmony, harmony_arrays)
DEFINE_implication(harmony_modules, harmony_scoping)
DEFINE_implication(harmony_observation, harmony_collections)
// TODO[dslomov] add harmony => harmony_typed_arrays
@@ -263,7 +267,6 @@ DEFINE_bool(trap_on_deopt, false, "put a break point before deoptimizing")
DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases")
DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
DEFINE_bool(use_osr, true, "use on-stack replacement")
-DEFINE_bool(idefs, false, "use informative definitions")
DEFINE_bool(array_bounds_checks_elimination, true,
"perform array bounds checks elimination")
DEFINE_bool(array_bounds_checks_hoisting, false,
@@ -307,9 +310,6 @@ DEFINE_int(parallel_recompilation_queue_length, 8,
"the length of the parallel compilation queue")
DEFINE_int(parallel_recompilation_delay, 0,
"artificial compilation delay in ms")
-DEFINE_bool(omit_prototype_checks_for_leaf_maps, true,
- "do not emit prototype checks if all prototypes have leaf maps, "
- "deoptimize the optimized code if the layout of the maps changes.")
DEFINE_bool(omit_map_checks_for_leaf_maps, true,
"do not emit check maps for constant values that have a leaf map, "
"deoptimize the optimized code if the layout of the maps changes.")
diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc
index 61792a628c..c17a9d5f82 100644
--- a/deps/v8/src/frames.cc
+++ b/deps/v8/src/frames.cc
@@ -1521,9 +1521,9 @@ void StackHandler::Unwind(Isolate* isolate,
FixedArray* array,
int offset,
int previous_handler_offset) const {
- STATIC_ASSERT(StackHandlerConstants::kSlotCount == 5);
+ STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
ASSERT_LE(0, offset);
- ASSERT_GE(array->length(), offset + 5);
+ ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
// Unwinding a stack handler into an array chains it in the opposite
// direction, re-using the "next" slot as a "previous" link, so that stack
// handlers can be later re-wound in the correct order. Decode the "state"
@@ -1542,9 +1542,9 @@ int StackHandler::Rewind(Isolate* isolate,
FixedArray* array,
int offset,
Address fp) {
- STATIC_ASSERT(StackHandlerConstants::kSlotCount == 5);
+ STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
ASSERT_LE(0, offset);
- ASSERT_GE(array->length(), offset + 5);
+ ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
Smi* prev_handler_offset = Smi::cast(array->get(offset));
Code* code = Code::cast(array->get(offset + 1));
Smi* smi_index = Smi::cast(array->get(offset + 2));
@@ -1560,7 +1560,7 @@ int StackHandler::Rewind(Isolate* isolate,
Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
context;
- Memory::Address_at(address() + StackHandlerConstants::kFPOffset) = fp;
+ SetFp(address() + StackHandlerConstants::kFPOffset, fp);
*isolate->handler_address() = address();
diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h
index 634ff8a7cb..2bbbd98ac0 100644
--- a/deps/v8/src/frames.h
+++ b/deps/v8/src/frames.h
@@ -145,6 +145,7 @@ class StackHandler BASE_EMBEDDED {
inline Object** context_address() const;
inline Object** code_address() const;
+ inline void SetFp(Address slot, Address fp);
DISALLOW_IMPLICIT_CONSTRUCTORS(StackHandler);
};
@@ -176,7 +177,7 @@ class StandardFrameConstants : public AllStatic {
static const int kContextOffset = -1 * kPointerSize;
static const int kCallerFPOffset = 0 * kPointerSize;
static const int kCallerPCOffset = +1 * kFPOnStackSize;
- static const int kCallerSPOffset = +2 * kPCOnStackSize;
+ static const int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize;
};
diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc
index 6d802e965d..f5539e8b18 100644
--- a/deps/v8/src/full-codegen.cc
+++ b/deps/v8/src/full-codegen.cc
@@ -512,7 +512,7 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
- __ push(reg);
+ __ Push(reg);
}
@@ -530,7 +530,7 @@ void FullCodeGenerator::EffectContext::PlugTOS() const {
void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
- __ pop(result_register());
+ __ Pop(result_register());
}
@@ -540,7 +540,7 @@ void FullCodeGenerator::StackValueContext::PlugTOS() const {
void FullCodeGenerator::TestContext::PlugTOS() const {
// For simplicity we always test the accumulator register.
- __ pop(result_register());
+ __ Pop(result_register());
codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
codegen()->DoTest(this);
}
@@ -1006,7 +1006,7 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
VisitForAccumulatorValue(left);
// We want the value in the accumulator for the test, and on the stack in
// case we need it.
- __ push(result_register());
+ __ Push(result_register());
Label discard, restore;
if (is_logical_and) {
DoTest(left, &discard, &restore, &restore);
@@ -1014,7 +1014,7 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
DoTest(left, &restore, &discard, &restore);
}
__ bind(&restore);
- __ pop(result_register());
+ __ Pop(result_register());
__ jmp(&done);
__ bind(&discard);
__ Drop(1);
@@ -1024,7 +1024,7 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
VisitForAccumulatorValue(left);
// We want the value in the accumulator for the test, and on the stack in
// case we need it.
- __ push(result_register());
+ __ Push(result_register());
Label discard;
if (is_logical_and) {
DoTest(left, &discard, &done, &discard);
@@ -1416,7 +1416,7 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
// Extend the context before executing the catch block.
{ Comment cmnt(masm_, "[ Extend catch context");
__ Push(stmt->variable()->name());
- __ push(result_register());
+ __ Push(result_register());
PushFunctionArgumentForContextAllocation();
__ CallRuntime(Runtime::kPushCatchContext, 3);
StoreToFrameField(StandardFrameConstants::kContextOffset,
@@ -1481,7 +1481,7 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// preserved by the finally block. Call the finally block and then
// rethrow the exception if it returns.
__ Call(&finally_entry);
- __ push(result_register());
+ __ Push(result_register());
__ CallRuntime(Runtime::kReThrow, 1);
// Finally block implementation.
diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h
index a9db54e32c..af63aedfbf 100644
--- a/deps/v8/src/full-codegen.h
+++ b/deps/v8/src/full-codegen.h
@@ -31,11 +31,14 @@
#include "v8.h"
#include "allocation.h"
+#include "assert-scope.h"
#include "ast.h"
#include "code-stubs.h"
#include "codegen.h"
#include "compiler.h"
#include "data-flow.h"
+#include "globals.h"
+#include "objects.h"
namespace v8 {
namespace internal {
@@ -136,7 +139,64 @@ class FullCodeGenerator: public AstVisitor {
#error Unsupported target architecture.
#endif
- static const int kBackEdgeEntrySize = 3 * kIntSize;
+ class BackEdgeTableIterator {
+ public:
+ explicit BackEdgeTableIterator(Code* unoptimized) {
+ ASSERT(unoptimized->kind() == Code::FUNCTION);
+ instruction_start_ = unoptimized->instruction_start();
+ cursor_ = instruction_start_ + unoptimized->back_edge_table_offset();
+ ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size());
+ table_length_ = Memory::uint32_at(cursor_);
+ cursor_ += kTableLengthSize;
+ end_ = cursor_ + table_length_ * kEntrySize;
+ }
+
+ bool Done() { return cursor_ >= end_; }
+
+ void Next() {
+ ASSERT(!Done());
+ cursor_ += kEntrySize;
+ }
+
+ BailoutId ast_id() {
+ ASSERT(!Done());
+ return BailoutId(static_cast<int>(
+ Memory::uint32_at(cursor_ + kAstIdOffset)));
+ }
+
+ uint32_t loop_depth() {
+ ASSERT(!Done());
+ return Memory::uint32_at(cursor_ + kLoopDepthOffset);
+ }
+
+ uint32_t pc_offset() {
+ ASSERT(!Done());
+ return Memory::uint32_at(cursor_ + kPcOffsetOffset);
+ }
+
+ Address pc() {
+ ASSERT(!Done());
+ return instruction_start_ + pc_offset();
+ }
+
+ uint32_t table_length() { return table_length_; }
+
+ private:
+ static const int kTableLengthSize = kIntSize;
+ static const int kAstIdOffset = 0 * kIntSize;
+ static const int kPcOffsetOffset = 1 * kIntSize;
+ static const int kLoopDepthOffset = 2 * kIntSize;
+ static const int kEntrySize = 3 * kIntSize;
+
+ Address cursor_;
+ Address end_;
+ Address instruction_start_;
+ uint32_t table_length_;
+ DisallowHeapAllocation no_gc_while_iterating_over_raw_addresses_;
+
+ DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator);
+ };
+
private:
class Breakable;
@@ -625,8 +685,6 @@ class FullCodeGenerator: public AstVisitor {
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
- void EmitUnaryOperation(UnaryOperation* expr, const char* comment);
-
void VisitComma(BinaryOperation* expr);
void VisitLogicalExpression(BinaryOperation* expr);
void VisitArithmeticExpression(BinaryOperation* expr);
diff --git a/deps/v8/src/global-handles.cc b/deps/v8/src/global-handles.cc
index 88ebe31647..2eae474510 100644
--- a/deps/v8/src/global-handles.cc
+++ b/deps/v8/src/global-handles.cc
@@ -56,7 +56,9 @@ class GlobalHandles::Node {
NORMAL, // Normal global handle.
WEAK, // Flagged as weak but not yet finalized.
PENDING, // Has been recognized as only reachable by weak handles.
- NEAR_DEATH // Callback has informed the handle is near death.
+ NEAR_DEATH, // Callback has informed the handle is near death.
+
+ NUMBER_OF_STATES
};
// Maps handle location (slot) to the containing node.
@@ -71,6 +73,7 @@ class GlobalHandles::Node {
STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
Internals::kNodeStateMask);
STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
+ STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
Internals::kNodeIsIndependentShift);
@@ -93,13 +96,12 @@ class GlobalHandles::Node {
}
#endif
- void Initialize(int index, Node** first_free) {
+ void Initialize(int index, Node* first_free) {
index_ = static_cast<uint8_t>(index);
ASSERT(static_cast<int>(index_) == index);
set_state(FREE);
set_in_new_space_list(false);
- parameter_or_next_free_.next_free = *first_free;
- *first_free = this;
+ parameter_or_next_free_.next_free = first_free;
}
void Acquire(Object* object) {
@@ -111,7 +113,6 @@ class GlobalHandles::Node {
set_state(NORMAL);
parameter_or_next_free_.parameter = NULL;
weak_reference_callback_ = NULL;
- IncreaseBlockUses();
}
void Release() {
@@ -125,7 +126,7 @@ class GlobalHandles::Node {
set_partially_dependent(false);
weak_reference_callback_ = NULL;
#endif
- DecreaseBlockUses();
+ ReleaseFromBlock();
}
// Object slot accessors.
@@ -204,10 +205,6 @@ class GlobalHandles::Node {
}
void clear_partially_dependent() { set_partially_dependent(false); }
- // Callback accessor.
- // TODO(svenpanne) Re-enable or nuke later.
- // WeakReferenceCallback callback() { return callback_; }
-
// Callback parameter accessors.
void set_parameter(void* parameter) {
ASSERT(state() != FREE);
@@ -276,8 +273,7 @@ class GlobalHandles::Node {
private:
inline NodeBlock* FindBlock();
inline GlobalHandles* GetGlobalHandles();
- inline void IncreaseBlockUses();
- inline void DecreaseBlockUses();
+ inline void ReleaseFromBlock();
// Storage for object pointer.
// Placed first to avoid offset computation.
@@ -315,163 +311,404 @@ class GlobalHandles::Node {
};
-class GlobalHandles::NodeBlock {
+class GlobalHandles::BlockListIterator {
public:
- static const int kSize = 256;
+ explicit inline BlockListIterator(BlockList* anchor)
+ : anchor_(anchor), current_(anchor->next()) {
+ ASSERT(anchor->IsAnchor());
+ }
+ inline BlockList* block() const {
+ ASSERT(!done());
+ return current_;
+ }
+ inline bool done() const {
+ ASSERT_EQ(anchor_ == current_, current_->IsAnchor());
+ return anchor_ == current_;
+ }
+ inline void Advance() {
+ ASSERT(!done());
+ current_ = current_->next();
+ }
+
+ private:
+ BlockList* const anchor_;
+ BlockList* current_;
+ DISALLOW_COPY_AND_ASSIGN(BlockListIterator);
+};
+
+
+GlobalHandles::BlockList::BlockList()
+ : prev_block_(this),
+ next_block_(this),
+ first_free_(NULL),
+ used_nodes_(0) {}
+
+
+void GlobalHandles::BlockList::InsertAsNext(BlockList* const block) {
+ ASSERT(block != this);
+ ASSERT(!block->IsAnchor());
+ ASSERT(block->IsDetached());
+ block->prev_block_ = this;
+ block->next_block_ = next_block_;
+ next_block_->prev_block_ = block;
+ next_block_ = block;
+ ASSERT(!IsDetached());
+ ASSERT(!block->IsDetached());
+}
+
+
+void GlobalHandles::BlockList::Detach() {
+ ASSERT(!IsAnchor());
+ ASSERT(!IsDetached());
+ prev_block_->next_block_ = next_block_;
+ next_block_->prev_block_ = prev_block_;
+ prev_block_ = this;
+ next_block_ = this;
+ ASSERT(IsDetached());
+}
- explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
- : next_(next),
- used_nodes_(0),
- next_used_(NULL),
- prev_used_(NULL),
- global_handles_(global_handles) {}
- void PutNodesOnFreeList(Node** first_free) {
+bool GlobalHandles::BlockList::HasAtLeastLength(int length) {
+ ASSERT(IsAnchor());
+ ASSERT(length > 0);
+ for (BlockListIterator it(this); !it.done(); it.Advance()) {
+ if (--length <= 0) return true;
+ }
+ return false;
+}
+
+
+#ifdef DEBUG
+int GlobalHandles::BlockList::LengthOfFreeList() {
+ int count = 0;
+ Node* node = first_free_;
+ while (node != NULL) {
+ count++;
+ node = node->next_free();
+ }
+ return count;
+}
+#endif
+
+
+int GlobalHandles::BlockList::CompareBlocks(const void* a, const void* b) {
+ const BlockList* block_a =
+ *reinterpret_cast<const BlockList**>(reinterpret_cast<uintptr_t>(a));
+ const BlockList* block_b =
+ *reinterpret_cast<const BlockList**>(reinterpret_cast<uintptr_t>(b));
+ if (block_a->used_nodes() > block_b->used_nodes()) return -1;
+ if (block_a->used_nodes() == block_b->used_nodes()) return 0;
+ return 1;
+}
+
+
+class GlobalHandles::NodeBlock : public BlockList {
+ public:
+ static const int kSize = 256;
+
+ explicit NodeBlock(GlobalHandles* global_handles)
+ : global_handles_(global_handles) {
+ // Initialize nodes
+ Node* first_free = first_free_;
for (int i = kSize - 1; i >= 0; --i) {
nodes_[i].Initialize(i, first_free);
+ first_free = &nodes_[i];
}
+ first_free_ = first_free;
+ ASSERT(!IsAnchor());
+ // Link into global_handles
+ ASSERT(global_handles->non_full_blocks_.IsDetached());
+ global_handles->non_full_blocks_.InsertAsHead(this);
+ global_handles->number_of_blocks_++;
}
- Node* node_at(int index) {
- ASSERT(0 <= index && index < kSize);
- return &nodes_[index];
- }
-
- void IncreaseUses() {
+ Node* Acquire(Object* o) {
ASSERT(used_nodes_ < kSize);
- if (used_nodes_++ == 0) {
- NodeBlock* old_first = global_handles_->first_used_block_;
- global_handles_->first_used_block_ = this;
- next_used_ = old_first;
- prev_used_ = NULL;
- if (old_first == NULL) return;
- old_first->prev_used_ = this;
+ ASSERT(first_free_ != NULL);
+ ASSERT(global_handles_->non_full_blocks_.next() == this);
+ // Remove from free list
+ Node* node = first_free_;
+ first_free_ = node->next_free();
+ // Increment counters
+ global_handles_->isolate()->counters()->global_handles()->Increment();
+ global_handles_->number_of_global_handles_++;
+ // Initialize node with value
+ node->Acquire(o);
+ bool now_full = ++used_nodes_ == kSize;
+ ASSERT_EQ(now_full, first_free_ == NULL);
+ if (now_full) {
+ // Move block to tail of non_full_blocks_
+ Detach();
+ global_handles_->full_blocks_.InsertAsTail(this);
}
+ return node;
}
- void DecreaseUses() {
+ void Release(Node* node) {
ASSERT(used_nodes_ > 0);
- if (--used_nodes_ == 0) {
- if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
- if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
- if (this == global_handles_->first_used_block_) {
- global_handles_->first_used_block_ = next_used_;
- }
+ // Add to free list
+ node->set_next_free(first_free_);
+ first_free_ = node;
+ // Decrement counters
+ global_handles_->isolate()->counters()->global_handles()->Decrement();
+ global_handles_->number_of_global_handles_--;
+ bool was_full = used_nodes_-- == kSize;
+ ASSERT_EQ(was_full, first_free_->next_free() == NULL);
+ if (was_full) {
+ // Move this block to head of non_full_blocks_
+ Detach();
+ global_handles_->non_full_blocks_.InsertAsHead(this);
}
}
+ Node* node_at(int index) {
+ ASSERT(0 <= index && index < kSize);
+ return &nodes_[index];
+ }
+
GlobalHandles* global_handles() { return global_handles_; }
- // Next block in the list of all blocks.
- NodeBlock* next() const { return next_; }
+ static NodeBlock* Cast(BlockList* block_list) {
+ ASSERT(!block_list->IsAnchor());
+ return static_cast<NodeBlock*>(block_list);
+ }
- // Next/previous block in the list of blocks with used nodes.
- NodeBlock* next_used() const { return next_used_; }
- NodeBlock* prev_used() const { return prev_used_; }
+ static NodeBlock* From(Node* node, uint8_t index) {
+ uintptr_t ptr = reinterpret_cast<uintptr_t>(node - index);
+ ptr -= OFFSET_OF(NodeBlock, nodes_);
+ NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
+ ASSERT(block->node_at(index) == node);
+ return block;
+ }
private:
Node nodes_[kSize];
- NodeBlock* const next_;
- int used_nodes_;
- NodeBlock* next_used_;
- NodeBlock* prev_used_;
GlobalHandles* global_handles_;
};
-GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
- return FindBlock()->global_handles();
+void GlobalHandles::BlockList::SortBlocks(GlobalHandles* global_handles,
+ bool prune) {
+ // Always sort at least 2 blocks
+ if (!global_handles->non_full_blocks_.HasAtLeastLength(2)) return;
+ // build a vector that could contain the upper bound of the block count
+ int number_of_blocks = global_handles->block_count();
+ // Build array of blocks and update number_of_blocks to actual count
+ ScopedVector<BlockList*> blocks(number_of_blocks);
+ {
+ int i = 0;
+ BlockList* anchor = &global_handles->non_full_blocks_;
+ for (BlockListIterator it(anchor); !it.done(); it.Advance()) {
+ blocks[i++] = it.block();
+ }
+ number_of_blocks = i;
+ }
+ // Nothing to do.
+ if (number_of_blocks <= 1) return;
+ // Sort blocks
+ qsort(blocks.start(), number_of_blocks, sizeof(blocks[0]), CompareBlocks);
+ // Prune empties
+ if (prune) {
+ static const double kUnusedPercentage = 0.30;
+ static const double kUsedPercentage = 1.30;
+ int total_slots = global_handles->number_of_blocks_ * NodeBlock::kSize;
+ const int total_used = global_handles->number_of_global_handles_;
+ const int target_unused = static_cast<int>(Max(
+ total_used * kUsedPercentage,
+ total_slots * kUnusedPercentage));
+ // Reverse through empty blocks. Note: always leave one block free.
+ int blocks_deleted = 0;
+ for (int i = number_of_blocks - 1; i > 0 && blocks[i]->IsUnused(); i--) {
+ // Not worth deleting
+ if (total_slots - total_used < target_unused) break;
+ blocks[i]->Detach();
+ delete blocks[i];
+ blocks_deleted++;
+ total_slots -= NodeBlock::kSize;
+ }
+ global_handles->number_of_blocks_ -= blocks_deleted;
+ number_of_blocks -= blocks_deleted;
+ }
+ // Relink all blocks
+ for (int i = 0; i < number_of_blocks; i++) {
+ blocks[i]->Detach();
+ global_handles->non_full_blocks_.InsertAsTail(blocks[i]);
+ }
+#ifdef DEBUG
+ // Check sorting
+ BlockList* anchor = &global_handles->non_full_blocks_;
+ int last_size = NodeBlock::kSize;
+ for (BlockListIterator it(anchor); !it.done(); it.Advance()) {
+ ASSERT(it.block()->used_nodes() <= last_size);
+ last_size = it.block()->used_nodes();
+ }
+#endif
}
-GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
- intptr_t ptr = reinterpret_cast<intptr_t>(this);
- ptr = ptr - index_ * sizeof(Node);
- NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
- ASSERT(block->node_at(index_) == this);
- return block;
+#ifdef DEBUG
+void GlobalHandles::VerifyBlockInvariants() {
+ int number_of_blocks = 0;
+ int number_of_handles = 0;
+ for (int i = 0; i < kAllAnchorsSize; i++) {
+ for (BlockListIterator it(all_anchors_[i]); !it.done(); it.Advance()) {
+ BlockList* block = it.block();
+ number_of_blocks++;
+ int used_nodes = block->used_nodes();
+ number_of_handles += used_nodes;
+ int unused_nodes = block->LengthOfFreeList();
+ ASSERT_EQ(used_nodes + unused_nodes, NodeBlock::kSize);
+ if (all_anchors_[i] == &full_blocks_) {
+ ASSERT_EQ(NodeBlock::kSize, used_nodes);
+ } else {
+ ASSERT_NE(NodeBlock::kSize, used_nodes);
+ }
+ }
+ }
+ ASSERT_EQ(number_of_handles, number_of_global_handles_);
+ ASSERT_EQ(number_of_blocks, number_of_blocks_);
}
+#endif
-void GlobalHandles::Node::IncreaseBlockUses() {
- NodeBlock* node_block = FindBlock();
- node_block->IncreaseUses();
- GlobalHandles* global_handles = node_block->global_handles();
- global_handles->isolate()->counters()->global_handles()->Increment();
- global_handles->number_of_global_handles_++;
+void GlobalHandles::SortBlocks(bool shouldPrune) {
+#ifdef DEBUG
+ VerifyBlockInvariants();
+#endif
+ BlockList::SortBlocks(this, shouldPrune);
+#ifdef DEBUG
+ VerifyBlockInvariants();
+#endif
}
-void GlobalHandles::Node::DecreaseBlockUses() {
- NodeBlock* node_block = FindBlock();
- GlobalHandles* global_handles = node_block->global_handles();
- parameter_or_next_free_.next_free = global_handles->first_free_;
- global_handles->first_free_ = this;
- node_block->DecreaseUses();
- global_handles->isolate()->counters()->global_handles()->Decrement();
- global_handles->number_of_global_handles_--;
+GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
+ return FindBlock()->global_handles();
+}
+
+
+GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
+ return NodeBlock::From(this, index_);
+}
+
+
+void GlobalHandles::Node::ReleaseFromBlock() {
+ FindBlock()->Release(this);
}
class GlobalHandles::NodeIterator {
public:
explicit NodeIterator(GlobalHandles* global_handles)
- : block_(global_handles->first_used_block_),
- index_(0) {}
+ : all_anchors_(global_handles->all_anchors_),
+ block_(all_anchors_[0]),
+ anchor_index_(0),
+ node_index_(0) {
+ AdvanceBlock();
+ }
- bool done() const { return block_ == NULL; }
+ bool done() const {
+ return anchor_index_ == kAllAnchorsSize;
+ }
Node* node() const {
ASSERT(!done());
- return block_->node_at(index_);
+ return NodeBlock::Cast(block_)->node_at(node_index_);
}
void Advance() {
ASSERT(!done());
- if (++index_ < NodeBlock::kSize) return;
- index_ = 0;
- block_ = block_->next_used();
+ if (++node_index_ < NodeBlock::kSize) return;
+ node_index_ = 0;
+ AdvanceBlock();
}
+ typedef int CountArray[Node::NUMBER_OF_STATES];
+ static int CollectStats(GlobalHandles* global_handles, CountArray counts);
+
private:
- NodeBlock* block_;
- int index_;
+ void AdvanceBlock() {
+ ASSERT(!done());
+ while (true) {
+ block_ = block_->next();
+ // block is valid
+ if (block_ != all_anchors_[anchor_index_]) {
+ ASSERT(!done());
+ ASSERT(!block_->IsAnchor());
+ // skip empty blocks
+ if (block_->IsUnused()) continue;
+ return;
+ }
+ // jump lists
+ anchor_index_++;
+ if (anchor_index_ == kAllAnchorsSize) break;
+ block_ = all_anchors_[anchor_index_];
+ }
+ ASSERT(done());
+ }
+
+ BlockList* const * const all_anchors_;
+ BlockList* block_;
+ int anchor_index_;
+ int node_index_;
DISALLOW_COPY_AND_ASSIGN(NodeIterator);
};
+int GlobalHandles::NodeIterator::CollectStats(GlobalHandles* global_handles,
+ CountArray counts) {
+ static const int kSize = Node::NUMBER_OF_STATES;
+ for (int i = 0; i < kSize; i++) {
+ counts[i] = 0;
+ }
+ int total = 0;
+ for (NodeIterator it(global_handles); !it.done(); it.Advance()) {
+ total++;
+ Node::State state = it.node()->state();
+ ASSERT(state >= 0 && state < kSize);
+ counts[state]++;
+ }
+ // NodeIterator skips empty blocks
+ int skipped = global_handles->number_of_blocks_ * NodeBlock::kSize - total;
+ total += skipped;
+ counts[Node::FREE] += total;
+ return total;
+}
+
+
GlobalHandles::GlobalHandles(Isolate* isolate)
: isolate_(isolate),
+ number_of_blocks_(0),
number_of_global_handles_(0),
- first_block_(NULL),
- first_used_block_(NULL),
- first_free_(NULL),
post_gc_processing_count_(0),
- object_group_connections_(kObjectGroupConnectionsCapacity) {}
+ object_group_connections_(kObjectGroupConnectionsCapacity) {
+ all_anchors_[0] = &full_blocks_;
+ all_anchors_[1] = &non_full_blocks_;
+}
GlobalHandles::~GlobalHandles() {
- NodeBlock* block = first_block_;
- while (block != NULL) {
- NodeBlock* tmp = block->next();
- delete block;
- block = tmp;
+ for (int i = 0; i < kAllAnchorsSize; i++) {
+ BlockList* block = all_anchors_[i]->next();
+ while (block != all_anchors_[i]) {
+ BlockList* tmp = block->next();
+ block->Detach();
+ delete NodeBlock::Cast(block);
+ block = tmp;
+ }
}
- first_block_ = NULL;
}
Handle<Object> GlobalHandles::Create(Object* value) {
- if (first_free_ == NULL) {
- first_block_ = new NodeBlock(this, first_block_);
- first_block_->PutNodesOnFreeList(&first_free_);
- }
- ASSERT(first_free_ != NULL);
- // Take the first node in the free list.
- Node* result = first_free_;
- first_free_ = result->next_free();
- result->Acquire(value);
+ if (non_full_blocks_.IsDetached()) {
+ new NodeBlock(this);
+ ASSERT(!non_full_blocks_.IsDetached());
+ }
+ ASSERT(non_full_blocks_.IsAnchor());
+ ASSERT(!non_full_blocks_.next()->IsAnchor());
+ Node* result = NodeBlock::Cast(non_full_blocks_.next())->Acquire(value);
if (isolate_->heap()->InNewSpace(value) &&
!result->is_in_new_space_list()) {
new_space_nodes_.Add(result);
@@ -661,21 +898,32 @@ bool GlobalHandles::PostGarbageCollectionProcessing(
}
}
} else {
- for (NodeIterator it(this); !it.done(); it.Advance()) {
- if (!it.node()->IsRetainer()) {
- // Free nodes do not have weak callbacks. Do not use them to compute
- // the next_gc_likely_to_collect_more.
- continue;
+ // Must cache all blocks, as NodeIterator can't survive mutation.
+ List<NodeBlock*> blocks(number_of_blocks_);
+ for (int i = 0; i < kAllAnchorsSize; i++) {
+ for (BlockListIterator it(all_anchors_[i]); !it.done(); it.Advance()) {
+ blocks.Add(NodeBlock::Cast(it.block()));
}
- it.node()->clear_partially_dependent();
- if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
- if (initial_post_gc_processing_count != post_gc_processing_count_) {
- // See the comment above.
- return next_gc_likely_to_collect_more;
+ }
+ for (int block_index = 0; block_index < blocks.length(); block_index++) {
+ NodeBlock* block = blocks[block_index];
+ for (int node_index = 0; node_index < NodeBlock::kSize; node_index++) {
+ Node* node = block->node_at(node_index);
+ if (!node->IsRetainer()) {
+ // Free nodes do not have weak callbacks. Do not use them to compute
+ // the next_gc_likely_to_collect_more.
+ continue;
+ }
+ node->clear_partially_dependent();
+ if (node->PostGarbageCollectionProcessing(isolate_)) {
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+ // See the comment above.
+ return next_gc_likely_to_collect_more;
+ }
+ }
+ if (!node->IsRetainer()) {
+ next_gc_likely_to_collect_more = true;
}
- }
- if (!it.node()->IsRetainer()) {
- next_gc_likely_to_collect_more = true;
}
}
}
@@ -698,6 +946,8 @@ bool GlobalHandles::PostGarbageCollectionProcessing(
}
}
new_space_nodes_.Rewind(last);
+ bool shouldPruneBlocks = collector != SCAVENGER;
+ SortBlocks(shouldPruneBlocks);
return next_gc_likely_to_collect_more;
}
@@ -765,48 +1015,30 @@ int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
void GlobalHandles::RecordStats(HeapStats* stats) {
- *stats->global_handle_count = 0;
- *stats->weak_global_handle_count = 0;
- *stats->pending_global_handle_count = 0;
- *stats->near_death_global_handle_count = 0;
- *stats->free_global_handle_count = 0;
- for (NodeIterator it(this); !it.done(); it.Advance()) {
- *stats->global_handle_count += 1;
- if (it.node()->state() == Node::WEAK) {
- *stats->weak_global_handle_count += 1;
- } else if (it.node()->state() == Node::PENDING) {
- *stats->pending_global_handle_count += 1;
- } else if (it.node()->state() == Node::NEAR_DEATH) {
- *stats->near_death_global_handle_count += 1;
- } else if (it.node()->state() == Node::FREE) {
- *stats->free_global_handle_count += 1;
- }
- }
+ NodeIterator::CountArray counts;
+ int total = NodeIterator::CollectStats(this, counts);
+ *stats->global_handle_count = total;
+ *stats->weak_global_handle_count = counts[Node::WEAK];
+ *stats->pending_global_handle_count = counts[Node::PENDING];
+ *stats->near_death_global_handle_count = counts[Node::NEAR_DEATH];
+ *stats->free_global_handle_count = counts[Node::FREE];
}
+
#ifdef DEBUG
void GlobalHandles::PrintStats() {
- int total = 0;
- int weak = 0;
- int pending = 0;
- int near_death = 0;
- int destroyed = 0;
-
- for (NodeIterator it(this); !it.done(); it.Advance()) {
- total++;
- if (it.node()->state() == Node::WEAK) weak++;
- if (it.node()->state() == Node::PENDING) pending++;
- if (it.node()->state() == Node::NEAR_DEATH) near_death++;
- if (it.node()->state() == Node::FREE) destroyed++;
- }
-
+ NodeIterator::CountArray counts;
+ int total = NodeIterator::CollectStats(this, counts);
+ size_t total_consumed = sizeof(NodeBlock) * number_of_blocks_;
PrintF("Global Handle Statistics:\n");
- PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
- PrintF(" # weak = %d\n", weak);
- PrintF(" # pending = %d\n", pending);
- PrintF(" # near_death = %d\n", near_death);
- PrintF(" # free = %d\n", destroyed);
+ PrintF(" allocated blocks = %d\n", number_of_blocks_);
+ PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", total_consumed);
+ PrintF(" # normal = %d\n", counts[Node::NORMAL]);
+ PrintF(" # weak = %d\n", counts[Node::WEAK]);
+ PrintF(" # pending = %d\n", counts[Node::PENDING]);
+ PrintF(" # near_death = %d\n", counts[Node::NEAR_DEATH]);
+ PrintF(" # free = %d\n", counts[Node::FREE]);
PrintF(" # total = %d\n", total);
}
@@ -1018,4 +1250,68 @@ void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
}
+EternalHandles::EternalHandles() : size_(0) {
+ STATIC_ASSERT(v8::kUninitializedEternalIndex == kInvalidIndex);
+ for (unsigned i = 0; i < ARRAY_SIZE(singleton_handles_); i++) {
+ singleton_handles_[i] = kInvalidIndex;
+ }
+}
+
+
+EternalHandles::~EternalHandles() {
+ for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
+}
+
+
+void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
+ int limit = size_;
+ for (int i = 0; i < blocks_.length(); i++) {
+ ASSERT(limit > 0);
+ Object** block = blocks_[i];
+ visitor->VisitPointers(block, block + Min(limit, kSize));
+ limit -= kSize;
+ }
+}
+
+
+void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
+ for (int i = 0; i < new_space_indices_.length(); i++) {
+ visitor->VisitPointer(GetLocation(new_space_indices_[i]));
+ }
+}
+
+
+void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
+ int last = 0;
+ for (int i = 0; i < new_space_indices_.length(); i++) {
+ int index = new_space_indices_[i];
+ if (heap->InNewSpace(*GetLocation(index))) {
+ new_space_indices_[last++] = index;
+ }
+ }
+ new_space_indices_.Rewind(last);
+}
+
+
+int EternalHandles::Create(Isolate* isolate, Object* object) {
+ if (object == NULL) return kInvalidIndex;
+ ASSERT_NE(isolate->heap()->the_hole_value(), object);
+ int block = size_ >> kShift;
+ int offset = size_ & kMask;
+ // need to resize
+ if (offset == 0) {
+ Object** next_block = new Object*[kSize];
+ Object* the_hole = isolate->heap()->the_hole_value();
+ MemsetPointer(next_block, the_hole, kSize);
+ blocks_.Add(next_block);
+ }
+ ASSERT_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
+ blocks_[block][offset] = object;
+ if (isolate->heap()->InNewSpace(object)) {
+ new_space_indices_.Add(size_);
+ }
+ return size_++;
+}
+
+
} } // namespace v8::internal
diff --git a/deps/v8/src/global-handles.h b/deps/v8/src/global-handles.h
index cd75133a24..2c20711ea1 100644
--- a/deps/v8/src/global-handles.h
+++ b/deps/v8/src/global-handles.h
@@ -31,6 +31,7 @@
#include "../include/v8.h"
#include "../include/v8-profiler.h"
+#include "handles.h"
#include "list.h"
#include "v8utils.h"
@@ -156,6 +157,9 @@ class GlobalHandles {
return number_of_global_handles_;
}
+ // Returns the current number of allocated blocks
+ int block_count() const { return number_of_blocks_; }
+
// Clear the weakness of a global handle.
static void ClearWeakness(Object** location);
@@ -275,11 +279,14 @@ class GlobalHandles {
#ifdef DEBUG
void PrintStats();
void Print();
+ void VerifyBlockInvariants();
#endif
private:
explicit GlobalHandles(Isolate* isolate);
+ void SortBlocks(bool shouldPrune);
+
// Migrates data from the internal representation (object_group_connections_,
// retainer_infos_ and implicit_ref_connections_) to the public and more
// efficient representation (object_groups_ and implicit_ref_groups_).
@@ -293,20 +300,64 @@ class GlobalHandles {
class Node;
class NodeBlock;
class NodeIterator;
+ class BlockListIterator;
+ // Base class for NodeBlock
+ class BlockList {
+ public:
+ BlockList();
+ ~BlockList() { ASSERT(IsDetached()); }
+ void Detach();
+ void InsertAsHead(BlockList* block) {
+ ASSERT(IsAnchor());
+ InsertAsNext(block);
+ }
+ void InsertAsTail(BlockList* block) {
+ ASSERT(IsAnchor());
+ prev_block_->InsertAsNext(block);
+ }
+ inline bool IsAnchor() { return first_free_ == NULL && used_nodes_ == 0; }
+ inline bool IsDetached() {
+ ASSERT_EQ(prev_block_ == this, next_block_ == this);
+ return prev_block_ == this;
+ }
+ bool HasAtLeastLength(int length);
+ bool IsUnused() { return used_nodes_ == 0; }
+ int used_nodes() const { return used_nodes_; }
+ BlockList* next() { return next_block_; }
+ BlockList* prev() { return prev_block_; }
+#ifdef DEBUG
+ int LengthOfFreeList();
+#endif
+ static void SortBlocks(GlobalHandles* global_handles, bool prune);
+
+ protected:
+ BlockList* prev_block_;
+ BlockList* next_block_;
+ Node* first_free_;
+ int used_nodes_;
+
+ private:
+ // Needed for quicksort
+ static int CompareBlocks(const void* a, const void* b);
+ void InsertAsNext(BlockList* block);
+ DISALLOW_COPY_AND_ASSIGN(BlockList);
+ };
Isolate* isolate_;
+ // Field always containing the number of blocks allocated.
+ int number_of_blocks_;
// Field always containing the number of handles to global objects.
int number_of_global_handles_;
- // List of all allocated node blocks.
- NodeBlock* first_block_;
+ // Anchors for doubly linked lists of blocks
+ BlockList full_blocks_;
+ BlockList non_full_blocks_;
- // List of node blocks with used nodes.
- NodeBlock* first_used_block_;
-
- // Free list of nodes.
- Node* first_free_;
+ // An array of all the anchors held by GlobalHandles.
+ // This simplifies iteration across all blocks.
+ static const int kAllAnchorsSize = 2;
+ BlockList* all_anchors_[kAllAnchorsSize];
// Contains all nodes holding new space objects. Note: when the list
// is accessed, some of the objects may have been promoted already.
@@ -331,6 +382,76 @@ class GlobalHandles {
};
+class EternalHandles {
+ public:
+ enum SingletonHandle {
+ I18N_TEMPLATE_ONE,
+ I18N_TEMPLATE_TWO,
+
+ NUMBER_OF_SINGLETON_HANDLES
+ };
+
+ EternalHandles();
+ ~EternalHandles();
+
+ int NumberOfHandles() { return size_; }
+
+ // Create an EternalHandle, returning the index.
+ int Create(Isolate* isolate, Object* object);
+
+ // Grab the handle for an existing EternalHandle.
+ inline Handle<Object> Get(int index) {
+ return Handle<Object>(GetLocation(index));
+ }
+
+ // Grab the handle for an existing SingletonHandle.
+ inline Handle<Object> GetSingleton(SingletonHandle singleton) {
+ ASSERT(Exists(singleton));
+ return Get(singleton_handles_[singleton]);
+ }
+
+ // Checks whether a SingletonHandle has been assigned.
+ inline bool Exists(SingletonHandle singleton) {
+ return singleton_handles_[singleton] != kInvalidIndex;
+ }
+
+ // Assign a SingletonHandle to an empty slot and returns the handle.
+ Handle<Object> CreateSingleton(Isolate* isolate,
+ Object* object,
+ SingletonHandle singleton) {
+ ASSERT(singleton_handles_[singleton] == kInvalidIndex);
+ singleton_handles_[singleton] = Create(isolate, object);
+ return Get(singleton_handles_[singleton]);
+ }
+
+ // Iterates over all handles.
+ void IterateAllRoots(ObjectVisitor* visitor);
+ // Iterates over all handles which might be in new space.
+ void IterateNewSpaceRoots(ObjectVisitor* visitor);
+ // Rebuilds new space list.
+ void PostGarbageCollectionProcessing(Heap* heap);
+
+ private:
+ static const int kInvalidIndex = -1;
+ static const int kShift = 8;
+ static const int kSize = 1 << kShift;
+ static const int kMask = 0xff;
+
+ // Gets the slot for an index
+ inline Object** GetLocation(int index) {
+ ASSERT(index >= 0 && index < size_);
+ return &blocks_[index >> kShift][index & kMask];
+ }
+
+ int size_;
+ List<Object**> blocks_;
+ List<int> new_space_indices_;
+ int singleton_handles_[NUMBER_OF_SINGLETON_HANDLES];
+
+ DISALLOW_COPY_AND_ASSIGN(EternalHandles);
+};
+
+
} } // namespace v8::internal
#endif // V8_GLOBAL_HANDLES_H_
diff --git a/deps/v8/src/globals.h b/deps/v8/src/globals.h
index 627c951236..26fd53114c 100644
--- a/deps/v8/src/globals.h
+++ b/deps/v8/src/globals.h
@@ -28,172 +28,72 @@
#ifndef V8_GLOBALS_H_
#define V8_GLOBALS_H_
-// ----------------------------------------------------------------------------
-// Operating system detection (V8_OS_x)
-//
-// ANDROID - Android
-// BSD4 - Any BSD 4.4 system
-// CYGWIN - Cygwin
-// DARWIN - Darwin / Mac OS X
-// FREEBSD - FreeBSD
-// LINUX - Linux
-// NACL - Native Client
-// NETBSD - NetBSD
-// OPENBSD - OpenBSD
-// SOLARIS - Solaris
-// UNIX - Any UNIX BSD/SYSV system
-// WIN32 - Win32 (Windows 2000/XP/Vista/7 and Windows Server 2003/2008)
-
-#if defined(ANDROID) || defined(__ANDROID__)
-# define V8_OS_ANDROID 1
-# define V8_OS_LINUX 1
-# define V8_OS_UNIX 1
-#elif defined(__APPLE__) && defined(__MACH__)
-# define V8_OS_DARWIN 1
-# define V8_OS_BSD4 1
-# define V8_OS_UNIX 1
-#elif defined(__CYGWIN__)
-# define V8_OS_CYGWIN 1
-# define V8_OS_UNIX 1
-#elif defined(WIN64) || defined(_WIN64) || defined(__WIN64__)
-# define V8_OS_WIN32 1
-# define V8_OS_WIN64 1
-#elif defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || \
- (defined(__MWERKS__) && defined(__INTEL__))
-# define V8_OS_WIN32 1
-#elif defined(__sun) || defined(sun)
-# define V8_OS_SOLARIS 1
-# define V8_OS_UNIX 1
-#elif defined(__native_client__)
-# define V8_OS_NACL 1
-#elif defined(__linux__) || defined(__linux)
-# define V8_OS_LINUX 1
-# define V8_OS_UNIX 1
-#elif defined(__FreeBSD__) || defined(__DragonFly__)
-# define V8_OS_FREEBSD 1
-# define V8_OS_BSD4 1
-# define V8_OS_UNIX 1
-#elif defined(__NetBSD__)
-# define V8_OS_NETBSD 1
-# define V8_OS_BSD4 1
-# define V8_OS_UNIX 1
-#elif defined(__OpenBSD__)
-# define V8_OS_OPENBSD 1
-# define V8_OS_BSD4 1
-# define V8_OS_UNIX 1
-#else
-# error Operating system was not detected as supported by v8
-#endif
+// Define V8_INFINITY
+#define V8_INFINITY INFINITY
+// GCC specific stuff
+#ifdef __GNUC__
-// ----------------------------------------------------------------------------
-// Compiler detection (V8_CC_x)
-//
-// CLANG - C++ front-end for the LLVM compiler
-// GNU - GNU C++ or compatible
-// INTEL - Intel C++ for Linux or Windows
-// MINGW - Minimalistic GNU for Windows Compiler
-// MIPS - MIPSpro C++
-// MSVC - Microsoft Visual C/C++ or compatible
-// RVCT - ARM Realview Compiler Suite
+#define __GNUC_VERSION_FOR_INFTY__ (__GNUC__ * 10000 + __GNUC_MINOR__ * 100)
-#if defined(_MSC_VER)
-# define V8_CC_MSVC 1
-# if defined(__INTEL_COMPILER)
-# define V8_CC_INTEL 1
-# endif
-#elif defined(__GNUC__)
-# define V8_CC_GNU 1
-# if defined(__MINGW64__)
-# define V8_CC_MINGW 1
-# define V8_CC_MINGW64 1
-# elif defined(__MINGW32__)
-# define V8_CC_MINGW 1
-# define V8_CC_MINGW32 1
-# elif defined(__ARMCC__) || defined(__CC_ARM)
-# define V8_CC_RVCT 1 // ARM Realview Compiler Suite also masquerades as GCC
-# elif defined(__INTEL_COMPILER)
-# define V8_CC_INTEL 1 // Intel C++ also masquerades as GCC 3.2.0
-# elif defined(__clang__)
-# define V8_CC_CLANG 1 // Clang also masquerades as GCC 4.2.1
-# endif
-#elif defined(__ARMCC__) || defined(__CC_ARM)
-# define V8_CC_RVCT 1
-#elif defined(__INTEL_COMPILER)
-# define V8_CC_INTEL 1
-#elif defined(__SUNPRO_CC) || defined(__SUNPRO_C)
-# define V8_CC_SUN 1
-#else
-# error Compiler was not detected as supported by v8
+// Unfortunately, the INFINITY macro cannot be used with the '-pedantic'
+// warning flag and certain versions of GCC due to a bug:
+// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931
+// For now, we use the more involved template-based version from <limits>, but
+// only when compiling with GCC versions affected by the bug (2.96.x - 4.0.x)
+// __GNUC_PREREQ is not defined in GCC for Mac OS X, so we define our own macro
+#if __GNUC_VERSION_FOR_INFTY__ >= 29600 && __GNUC_VERSION_FOR_INFTY__ < 40100
+#include <limits>
+#undef V8_INFINITY
+#define V8_INFINITY std::numeric_limits<double>::infinity()
#endif
+#undef __GNUC_VERSION_FOR_INFTY__
-#if V8_CC_GNU
-# define V8_GNUC_PREREQ(major, minor) \
- (__GNUC__ > (major) || (__GNUC__ == (major) && __GNUC_MINOR__ >= (minor)))
-#else
-# define V8_GNUC_PREREQ(major, minor) 0
-#endif // V8_CC_GNU
-
-
-// ----------------------------------------------------------------------------
-// Compiler features
+#endif // __GNUC__
-// C++11 deleted functions
-#if __cplusplus >= 201103L
-# define V8_CXX_DELETED_FUNCTIONS 1
-#elif V8_CC_CLANG
-# define V8_CXX_DELETED_FUNCTIONS __has_feature(cxx_deleted_functions)
-#else
-# define V8_CXX_DELETED_FUNCTIONS (defined(__GXX_EXPERIMENTAL_CXX0X__) && \
- V8_GNUC_PREREQ(4, 4))
+#ifdef _MSC_VER
+#undef V8_INFINITY
+#define V8_INFINITY HUGE_VAL
#endif
-// C++11 static_assert()
-#if __cplusplus >= 201103L
-# define V8_CXX_STATIC_ASSERT 1
-#elif V8_CC_CLANG
-# define V8_CXX_STATIC_ASSERT (__has_extension(cxx_static_assert) || \
- __has_feature(cxx_static_assert))
-#else
-# define V8_CXX_STATIC_ASSERT (defined(__GXX_EXPERIMENTAL_CXX0X__) && \
- V8_GNUC_PREREQ(4, 3))
-#endif
+#include "../include/v8stdint.h"
+
+namespace v8 {
+namespace internal {
-// ----------------------------------------------------------------------------
-// Host architecture detection. For more info on what's defined, see:
+// Processor architecture detection. For more info on what's defined, see:
// http://msdn.microsoft.com/en-us/library/b0084kay.aspx
// http://www.agner.org/optimize/calling_conventions.pdf
// or with gcc, run: "echo | gcc -E -dM -"
-
#if defined(_M_X64) || defined(__x86_64__)
-# if V8_OS_NACL
+#if defined(__native_client__)
// For Native Client builds of V8, use V8_TARGET_ARCH_ARM, so that V8
// generates ARM machine code, together with a portable ARM simulator
// compiled for the host architecture in question.
//
// Since Native Client is ILP-32 on all architectures we use
// V8_HOST_ARCH_IA32 on both 32- and 64-bit x86.
-# define V8_HOST_ARCH_IA32 1
-# define V8_HOST_ARCH_32_BIT 1
-# define V8_HOST_CAN_READ_UNALIGNED 1
-# else
-# define V8_HOST_ARCH_X64 1
-# define V8_HOST_ARCH_64_BIT 1
-# define V8_HOST_CAN_READ_UNALIGNED 1
-# endif // V8_OS_NACL
+#define V8_HOST_ARCH_IA32 1
+#define V8_HOST_ARCH_32_BIT 1
+#define V8_HOST_CAN_READ_UNALIGNED 1
+#else
+#define V8_HOST_ARCH_X64 1
+#define V8_HOST_ARCH_64_BIT 1
+#define V8_HOST_CAN_READ_UNALIGNED 1
+#endif // __native_client__
#elif defined(_M_IX86) || defined(__i386__)
-# define V8_HOST_ARCH_IA32 1
-# define V8_HOST_ARCH_32_BIT 1
-# define V8_HOST_CAN_READ_UNALIGNED 1
+#define V8_HOST_ARCH_IA32 1
+#define V8_HOST_ARCH_32_BIT 1
+#define V8_HOST_CAN_READ_UNALIGNED 1
#elif defined(__ARMEL__)
-# define V8_HOST_ARCH_ARM 1
-# define V8_HOST_ARCH_32_BIT 1
+#define V8_HOST_ARCH_ARM 1
+#define V8_HOST_ARCH_32_BIT 1
#elif defined(__MIPSEL__)
-# define V8_HOST_ARCH_MIPS 1
-# define V8_HOST_ARCH_32_BIT 1
+#define V8_HOST_ARCH_MIPS 1
+#define V8_HOST_ARCH_32_BIT 1
#else
-# error Host architecture was not detected as supported by v8
+#error Host architecture was not detected as supported by v8
#endif
#if defined(__ARM_ARCH_7A__) || \
@@ -201,125 +101,68 @@
defined(__ARM_ARCH_7__)
# define CAN_USE_ARMV7_INSTRUCTIONS 1
# ifndef CAN_USE_VFP3_INSTRUCTIONS
-# define CAN_USE_VFP3_INSTRUCTIONS 1
+# define CAN_USE_VFP3_INSTRUCTIONS
# endif
#endif
-// ----------------------------------------------------------------------------
// Target architecture detection. This may be set externally. If not, detect
// in the same way as the host architecture, that is, target the native
// environment as presented by the compiler.
-
#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && \
!V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_MIPS
-# if V8_HOST_ARCH_X64
-# define V8_TARGET_ARCH_X64 1
-# elif V8_HOST_ARCH_IA32
-# define V8_TARGET_ARCH_IA32 1
-# elif V8_HOST_ARCH_ARM
-# define V8_TARGET_ARCH_ARM 1
-# elif V8_HOST_ARCH_MIPS
-# define V8_TARGET_ARCH_MIPS 1
-# else
-# error Target architecture was not detected as supported by v8
-# endif
+#if defined(_M_X64) || defined(__x86_64__)
+#define V8_TARGET_ARCH_X64 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define V8_TARGET_ARCH_IA32 1
+#elif defined(__ARMEL__)
+#define V8_TARGET_ARCH_ARM 1
+#elif defined(__MIPSEL__)
+#define V8_TARGET_ARCH_MIPS 1
+#else
+#error Target architecture was not detected as supported by v8
+#endif
#endif
// Check for supported combinations of host and target architectures.
#if V8_TARGET_ARCH_IA32 && !V8_HOST_ARCH_IA32
-# error Target architecture ia32 is only supported on ia32 host
-#elif V8_TARGET_ARCH_X64 && !V8_HOST_ARCH_X64
-# error Target architecture x64 is only supported on x64 host
-#elif V8_TARGET_ARCH_ARM && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_ARM)
-# error Target architecture arm is only supported on arm and ia32 host
-#elif V8_TARGET_ARCH_MIPS && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_MIPS)
-# error Target architecture mips is only supported on mips and ia32 host
+#error Target architecture ia32 is only supported on ia32 host
+#endif
+#if V8_TARGET_ARCH_X64 && !V8_HOST_ARCH_X64
+#error Target architecture x64 is only supported on x64 host
+#endif
+#if (V8_TARGET_ARCH_ARM && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_ARM))
+#error Target architecture arm is only supported on arm and ia32 host
+#endif
+#if (V8_TARGET_ARCH_MIPS && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_MIPS))
+#error Target architecture mips is only supported on mips and ia32 host
#endif
// Determine whether we are running in a simulated environment.
// Setting USE_SIMULATOR explicitly from the build script will force
// the use of a simulated environment.
#if !defined(USE_SIMULATOR)
-# if V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM
-# define USE_SIMULATOR 1
-# elif V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS
-# define USE_SIMULATOR 1
-# endif
+#if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM)
+#define USE_SIMULATOR 1
+#endif
+#if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS)
+#define USE_SIMULATOR 1
#endif
-
-// Determine architecture endiannes (we only support little-endian).
-#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || \
- V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_MIPS
-# define V8_TARGET_LITTLE_ENDIAN 1
-#else
-# error Unknown target architecture endiannes
#endif
-
-// ----------------------------------------------------------------------------
-// Define our own macros for writing 64-bit constants. This is less fragile
-// than defining __STDC_CONSTANT_MACROS before including <stdint.h>, and it
-// works on compilers that don't have it (like MSVC).
-#if V8_HOST_ARCH_64_BIT
-# if V8_CC_MSVC
-# define V8_UINT64_C(x) (x ## UI64)
-# define V8_INT64_C(x) (x ## I64)
-# define V8_INTPTR_C(x) (x ## I64)
-# define V8_PTR_PREFIX "ll"
-# elif V8_CC_MINGW
-# define V8_UINT64_C(x) (x ## ULL)
-# define V8_INT64_C(x) (x ## LL)
-# define V8_INTPTR_C(x) (x ## LL)
-# define V8_PTR_PREFIX "I64"
-# else
-# define V8_UINT64_C(x) (x ## UL)
-# define V8_INT64_C(x) (x ## L)
-# define V8_INTPTR_C(x) (x ## L)
-# define V8_PTR_PREFIX "l"
-# endif
-#else // V8_HOST_ARCH_64_BIT
-# define V8_INTPTR_C(x) (x)
-# define V8_PTR_PREFIX ""
-#endif // V8_HOST_ARCH_64_BIT
-
-// The following macro works on both 32 and 64-bit platforms.
-// Usage: instead of writing 0x1234567890123456
-// write V8_2PART_UINT64_C(0x12345678,90123456);
-#define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u))
-
-#if V8_OS_DARWIN
-// Fix for Mac OS X defining uintptr_t as "unsigned long":
-# define V8PRIxPTR "lx"
-#else
-# define V8PRIxPTR V8_PTR_PREFIX "x"
-#endif // V8_OS_DARWIN
-#define V8PRIdPTR V8_PTR_PREFIX "d"
-#define V8PRIuPTR V8_PTR_PREFIX "u"
-
-
-// ----------------------------------------------------------------------------
-// Define V8_INFINITY
-#if V8_GNUC_PREREQ(2, 96) && !V8_GNUC_PREREQ(4, 1)
-// Unfortunately, the INFINITY macro cannot be used with the '-pedantic'
-// warning flag and certain versions of GCC due to a bug:
-// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931
-// For now, we use the more involved template-based version from <limits>, but
-// only when compiling with GCC versions affected by the bug (2.96.x - 4.0.x)
-# include <limits>
-# define V8_INFINITY std::numeric_limits<double>::infinity()
-#elif V8_CC_MSVC
-# define V8_INFINITY HUGE_VAL
+// Determine architecture endiannes (we only support little-endian).
+#if V8_TARGET_ARCH_IA32
+#define V8_TARGET_LITTLE_ENDIAN 1
+#elif V8_TARGET_ARCH_X64
+#define V8_TARGET_LITTLE_ENDIAN 1
+#elif V8_TARGET_ARCH_ARM
+#define V8_TARGET_LITTLE_ENDIAN 1
+#elif V8_TARGET_ARCH_MIPS
+#define V8_TARGET_LITTLE_ENDIAN 1
#else
-# define V8_INFINITY INFINITY
+#error Unknown target architecture endiannes
#endif
-
-#include "../include/v8stdint.h"
-
-namespace v8 {
-namespace internal {
-
// Support for alternative bool type. This is only enabled if the code is
// compiled with USE_MYBOOL defined. This catches some nasty type bugs.
// For instance, 'bool b = "false";' results in b == true! This is a hidden
@@ -340,6 +183,51 @@ typedef unsigned int __my_bool__;
typedef uint8_t byte;
typedef byte* Address;
+// Define our own macros for writing 64-bit constants. This is less fragile
+// than defining __STDC_CONSTANT_MACROS before including <stdint.h>, and it
+// works on compilers that don't have it (like MSVC).
+#if V8_HOST_ARCH_64_BIT
+#if defined(_MSC_VER)
+#define V8_UINT64_C(x) (x ## UI64)
+#define V8_INT64_C(x) (x ## I64)
+#define V8_INTPTR_C(x) (x ## I64)
+#define V8_PTR_PREFIX "ll"
+#elif defined(__MINGW64__)
+#define V8_UINT64_C(x) (x ## ULL)
+#define V8_INT64_C(x) (x ## LL)
+#define V8_INTPTR_C(x) (x ## LL)
+#define V8_PTR_PREFIX "I64"
+#else
+#define V8_UINT64_C(x) (x ## UL)
+#define V8_INT64_C(x) (x ## L)
+#define V8_INTPTR_C(x) (x ## L)
+#define V8_PTR_PREFIX "l"
+#endif
+#else // V8_HOST_ARCH_64_BIT
+#define V8_INTPTR_C(x) (x)
+#define V8_PTR_PREFIX ""
+#endif // V8_HOST_ARCH_64_BIT
+
+// The following macro works on both 32 and 64-bit platforms.
+// Usage: instead of writing 0x1234567890123456
+// write V8_2PART_UINT64_C(0x12345678,90123456);
+#define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u))
+
+#define V8PRIxPTR V8_PTR_PREFIX "x"
+#define V8PRIdPTR V8_PTR_PREFIX "d"
+#define V8PRIuPTR V8_PTR_PREFIX "u"
+
+// Fix for Mac OS X defining uintptr_t as "unsigned long":
+#if defined(__APPLE__) && defined(__MACH__)
+#undef V8PRIxPTR
+#define V8PRIxPTR "lx"
+#endif
+
+#if (defined(__APPLE__) && defined(__MACH__)) || \
+ defined(__FreeBSD__) || defined(__OpenBSD__)
+#define USING_BSD_ABI
+#endif
+
// -----------------------------------------------------------------------------
// Constants
@@ -442,10 +330,10 @@ F FUNCTION_CAST(Address addr) {
}
-#if V8_CXX_DELETED_FUNCTIONS
-# define DISALLOW_BY_DELETE = delete
+#if __cplusplus >= 201103L
+#define DISALLOW_BY_DELETE = delete
#else
-# define DISALLOW_BY_DELETE
+#define DISALLOW_BY_DELETE
#endif
@@ -470,22 +358,24 @@ F FUNCTION_CAST(Address addr) {
// Define used for helping GCC to make better inlining. Don't bother for debug
// builds. On GCC 3.4.5 using __attribute__((always_inline)) causes compilation
// errors in debug build.
-#if V8_GNUC_PREREQ(4, 0) && !defined(DEBUG)
-# define INLINE(header) inline header __attribute__((always_inline))
-# define NO_INLINE(header) header __attribute__((noinline))
-#elif V8_CC_GNU && !defined(DEBUG)
-# define INLINE(header) inline __attribute__((always_inline)) header
-# define NO_INLINE(header) __attribute__((noinline)) header
-#elif V8_CC_MSVC && !defined(DEBUG)
-# define INLINE(header) __forceinline header
-# define NO_INLINE(header) header
+#if defined(__GNUC__) && !defined(DEBUG)
+#if (__GNUC__ >= 4)
+#define INLINE(header) inline header __attribute__((always_inline))
+#define NO_INLINE(header) header __attribute__((noinline))
+#else
+#define INLINE(header) inline __attribute__((always_inline)) header
+#define NO_INLINE(header) __attribute__((noinline)) header
+#endif
+#elif defined(_MSC_VER) && !defined(DEBUG)
+#define INLINE(header) __forceinline header
+#define NO_INLINE(header) header
#else
-# define INLINE(header) inline header
-# define NO_INLINE(header) header
+#define INLINE(header) inline header
+#define NO_INLINE(header) header
#endif
-#if V8_GNUC_PREREQ(4, 0)
+#if defined(__GNUC__) && __GNUC__ >= 4
#define MUST_USE_RESULT __attribute__ ((warn_unused_result))
#else
#define MUST_USE_RESULT
diff --git a/deps/v8/src/harmony-array.js b/deps/v8/src/harmony-array.js
new file mode 100644
index 0000000000..e440299ff6
--- /dev/null
+++ b/deps/v8/src/harmony-array.js
@@ -0,0 +1,124 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+'use strict';
+
+// This file relies on the fact that the following declaration has been made
+// in runtime.js:
+// var $Array = global.Array;
+
+// -------------------------------------------------------------------
+
+// ES6 draft 07-15-13, section 15.4.3.23
+function ArrayFind(predicate /* thisArg */) { // length == 1
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["Array.prototype.find"]);
+ }
+
+ var array = ToObject(this);
+ var length = ToInteger(array.length);
+
+ if (!IS_SPEC_FUNCTION(predicate)) {
+ throw MakeTypeError('called_non_callable', [predicate]);
+ }
+
+ var thisArg;
+ if (%_ArgumentsLength() > 1) {
+ thisArg = %_Arguments(1);
+ }
+
+ if (IS_NULL_OR_UNDEFINED(thisArg)) {
+ thisArg = %GetDefaultReceiver(predicate) || thisArg;
+ } else if (!IS_SPEC_OBJECT(thisArg) && %IsClassicModeFunction(predicate)) {
+ thisArg = ToObject(thisArg);
+ }
+
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(thisArg, element, i, array, predicate)) {
+ return element;
+ }
+ }
+ }
+
+ return;
+}
+
+
+// ES6 draft 07-15-13, section 15.4.3.24
+function ArrayFindIndex(predicate /* thisArg */) { // length == 1
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["Array.prototype.findIndex"]);
+ }
+
+ var array = ToObject(this);
+ var length = ToInteger(array.length);
+
+ if (!IS_SPEC_FUNCTION(predicate)) {
+ throw MakeTypeError('called_non_callable', [predicate]);
+ }
+
+ var thisArg;
+ if (%_ArgumentsLength() > 1) {
+ thisArg = %_Arguments(1);
+ }
+
+ if (IS_NULL_OR_UNDEFINED(thisArg)) {
+ thisArg = %GetDefaultReceiver(predicate) || thisArg;
+ } else if (!IS_SPEC_OBJECT(thisArg) && %IsClassicModeFunction(predicate)) {
+ thisArg = ToObject(thisArg);
+ }
+
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(thisArg, element, i, array, predicate)) {
+ return i;
+ }
+ }
+ }
+
+ return -1;
+}
+
+
+// -------------------------------------------------------------------
+
+function HarmonyArrayExtendArrayPrototype() {
+ %CheckIsBootstrapping();
+
+ // Set up the non-enumerable functions on the Array prototype object.
+ InstallFunctions($Array.prototype, DONT_ENUM, $Array(
+ "find", ArrayFind,
+ "findIndex", ArrayFindIndex
+ ));
+}
+
+HarmonyArrayExtendArrayPrototype(); \ No newline at end of file
diff --git a/deps/v8/src/harmony-string.js b/deps/v8/src/harmony-string.js
new file mode 100644
index 0000000000..a5c6f4e2ec
--- /dev/null
+++ b/deps/v8/src/harmony-string.js
@@ -0,0 +1,154 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+'use strict';
+
+// This file relies on the fact that the following declaration has been made
+// in runtime.js:
+// var $String = global.String;
+// var $Array = global.Array;
+
+// -------------------------------------------------------------------
+
+// ES6 draft 07-15-13, section 15.5.3.21
+function StringRepeat(count) {
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["String.prototype.repeat"]);
+ }
+
+ var s = TO_STRING_INLINE(this);
+ var n = ToInteger(count);
+ if (n < 0 || !NUMBER_IS_FINITE(n)) {
+ throw MakeRangeError("invalid_count_value", []);
+ }
+
+ var elements = new InternalArray(n);
+ for (var i = 0; i < n; i++) {
+ elements[i] = s;
+ }
+
+ return %StringBuilderConcat(elements, n, "");
+}
+
+
+// ES6 draft 07-15-13, section 15.5.3.22
+function StringStartsWith(searchString /* position */) { // length == 1
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["String.prototype.startsWith"]);
+ }
+
+ var s = TO_STRING_INLINE(this);
+ var ss = TO_STRING_INLINE(searchString);
+ var pos = 0;
+ if (%_ArgumentsLength() > 1) {
+ pos = %_Arguments(1); // position
+ pos = ToInteger(pos);
+ }
+
+ var s_len = s.length;
+ var start = MathMin(MathMax(pos, 0), s_len);
+ var ss_len = ss.length;
+ if (ss_len + start > s_len) {
+ return false;
+ }
+
+ return %StringIndexOf(s, ss, start) === start;
+}
+
+
+// ES6 draft 07-15-13, section 15.5.3.23
+function StringEndsWith(searchString /* position */) { // length == 1
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["String.prototype.endsWith"]);
+ }
+
+ var s = TO_STRING_INLINE(this);
+ var ss = TO_STRING_INLINE(searchString);
+ var s_len = s.length;
+ var pos = s_len;
+ if (%_ArgumentsLength() > 1) {
+ var arg = %_Arguments(1); // position
+ if (!IS_UNDEFINED(arg)) {
+ pos = ToInteger(arg);
+ }
+ }
+
+ var end = MathMin(MathMax(pos, 0), s_len);
+ var ss_len = ss.length;
+ var start = end - ss_len;
+ if (start < 0) {
+ return false;
+ }
+
+ return %StringLastIndexOf(s, ss, start) === start;
+}
+
+
+// ES6 draft 07-15-13, section 15.5.3.24
+function StringContains(searchString /* position */) { // length == 1
+ if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["String.prototype.contains"]);
+ }
+
+ var s = TO_STRING_INLINE(this);
+ var ss = TO_STRING_INLINE(searchString);
+ var pos = 0;
+ if (%_ArgumentsLength() > 1) {
+ pos = %_Arguments(1); // position
+ pos = ToInteger(pos);
+ }
+
+ var s_len = s.length;
+ var start = MathMin(MathMax(pos, 0), s_len);
+ var ss_len = ss.length;
+ if (ss_len + start > s_len) {
+ return false;
+ }
+
+ return %StringIndexOf(s, ss, start) !== -1;
+}
+
+
+// -------------------------------------------------------------------
+
+function ExtendStringPrototype() {
+ %CheckIsBootstrapping();
+
+ // Set up the non-enumerable functions on the String prototype object.
+ InstallFunctions($String.prototype, DONT_ENUM, $Array(
+ "repeat", StringRepeat,
+ "startsWith", StringStartsWith,
+ "endsWith", StringEndsWith,
+ "contains", StringContains
+ ));
+}
+
+ExtendStringPrototype(); \ No newline at end of file
diff --git a/deps/v8/src/heap-snapshot-generator.cc b/deps/v8/src/heap-snapshot-generator.cc
index 9f9f84a01d..1c8a7b3dc4 100644
--- a/deps/v8/src/heap-snapshot-generator.cc
+++ b/deps/v8/src/heap-snapshot-generator.cc
@@ -369,6 +369,12 @@ const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
HeapObjectsMap::kGcRootsFirstSubrootId +
VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
+
+static bool AddressesMatch(void* key1, void* key2) {
+ return key1 == key2;
+}
+
+
HeapObjectsMap::HeapObjectsMap(Heap* heap)
: next_id_(kFirstAvailableObjectId),
entries_map_(AddressesMatch),
@@ -393,19 +399,20 @@ void HeapObjectsMap::MoveObject(Address from, Address to) {
ASSERT(to != NULL);
ASSERT(from != NULL);
if (from == to) return;
- void* from_value = entries_map_.Remove(from, AddressHash(from));
+ void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
if (from_value == NULL) {
// It may occur that some untracked object moves to an address X and there
// is a tracked object at that address. In this case we should remove the
// entry as we know that the object has died.
- void* to_value = entries_map_.Remove(to, AddressHash(to));
+ void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
if (to_value != NULL) {
int to_entry_info_index =
static_cast<int>(reinterpret_cast<intptr_t>(to_value));
entries_.at(to_entry_info_index).addr = NULL;
}
} else {
- HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
+ HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
+ true);
if (to_entry->value != NULL) {
// We found the existing entry with to address for an old object.
// Without this operation we will have two EntryInfo's with the same
@@ -425,7 +432,8 @@ void HeapObjectsMap::MoveObject(Address from, Address to) {
SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
- HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
+ HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
+ false);
if (entry == NULL) return 0;
int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
EntryInfo& entry_info = entries_.at(entry_index);
@@ -437,7 +445,8 @@ SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
unsigned int size) {
ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
- HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
+ HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
+ true);
if (entry->value != NULL) {
int entry_index =
static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
@@ -532,13 +541,14 @@ void HeapObjectsMap::RemoveDeadEntries() {
}
entries_.at(first_free_entry).accessed = false;
HashMap::Entry* entry = entries_map_.Lookup(
- entry_info.addr, AddressHash(entry_info.addr), false);
+ entry_info.addr, ComputePointerHash(entry_info.addr), false);
ASSERT(entry);
entry->value = reinterpret_cast<void*>(first_free_entry);
++first_free_entry;
} else {
if (entry_info.addr) {
- entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
+ entries_map_.Remove(entry_info.addr,
+ ComputePointerHash(entry_info.addr));
}
}
}
diff --git a/deps/v8/src/heap-snapshot-generator.h b/deps/v8/src/heap-snapshot-generator.h
index 31d808856d..cea995820f 100644
--- a/deps/v8/src/heap-snapshot-generator.h
+++ b/deps/v8/src/heap-snapshot-generator.h
@@ -266,16 +266,6 @@ class HeapObjectsMap {
void UpdateHeapObjectsMap();
void RemoveDeadEntries();
- static bool AddressesMatch(void* key1, void* key2) {
- return key1 == key2;
- }
-
- static uint32_t AddressHash(Address addr) {
- return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)),
- v8::internal::kZeroHashSeed);
- }
-
SnapshotObjectId next_id_;
HashMap entries_map_;
List<EntryInfo> entries_;
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index 692ec21820..53088e289f 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -1014,6 +1014,8 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
}
gc_post_processing_depth_--;
+ isolate_->eternal_handles()->PostGarbageCollectionProcessing(this);
+
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing();
@@ -2006,7 +2008,6 @@ class ScavengingVisitor : public StaticVisitorBase {
private:
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
- enum SizeRestriction { SMALL, UNKNOWN_SIZE };
static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
bool should_record = false;
@@ -2058,15 +2059,12 @@ class ScavengingVisitor : public StaticVisitorBase {
}
- template<ObjectContents object_contents,
- SizeRestriction size_restriction,
- int alignment>
+ template<ObjectContents object_contents, int alignment>
static inline void EvacuateObject(Map* map,
HeapObject** slot,
HeapObject* object,
int object_size) {
- SLOW_ASSERT((size_restriction != SMALL) ||
- (object_size <= Page::kMaxNonCodeHeapObjectSize));
+ SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
SLOW_ASSERT(object->Size() == object_size);
int allocation_size = object_size;
@@ -2079,17 +2077,11 @@ class ScavengingVisitor : public StaticVisitorBase {
if (heap->ShouldBePromoted(object->address(), object_size)) {
MaybeObject* maybe_result;
- if ((size_restriction != SMALL) &&
- (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
- maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
- NOT_EXECUTABLE);
+ if (object_contents == DATA_OBJECT) {
+ maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
} else {
- if (object_contents == DATA_OBJECT) {
- maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
- } else {
- maybe_result =
- heap->old_pointer_space()->AllocateRaw(allocation_size);
- }
+ maybe_result =
+ heap->old_pointer_space()->AllocateRaw(allocation_size);
}
Object* result = NULL; // Initialization to please compiler.
@@ -2163,10 +2155,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
- EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
- slot,
- object,
- object_size);
+ EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
+ map, slot, object, object_size);
}
@@ -2175,11 +2165,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
int object_size = FixedDoubleArray::SizeFor(length);
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
- map,
- slot,
- object,
- object_size);
+ EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
+ map, slot, object, object_size);
}
@@ -2187,7 +2174,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2197,7 +2184,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int object_size = SeqOneByteString::cast(object)->
SeqOneByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2207,7 +2194,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2251,7 +2238,7 @@ class ScavengingVisitor : public StaticVisitorBase {
}
int object_size = ConsString::kSize;
- EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
+ EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2262,7 +2249,7 @@ class ScavengingVisitor : public StaticVisitorBase {
static inline void VisitSpecialized(Map* map,
HeapObject** slot,
HeapObject* object) {
- EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+ EvacuateObject<object_contents, kObjectAlignment>(
map, slot, object, object_size);
}
@@ -2270,7 +2257,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot,
HeapObject* object) {
int object_size = map->instance_size();
- EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+ EvacuateObject<object_contents, kObjectAlignment>(
map, slot, object, object_size);
}
};
@@ -3202,6 +3189,11 @@ bool Heap::CreateInitialObjects() {
}
set_frozen_symbol(Symbol::cast(obj));
+ { MaybeObject* maybe_obj = AllocateSymbol();
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_elements_transition_symbol(Symbol::cast(obj));
+
{ MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -3213,9 +3205,6 @@ bool Heap::CreateInitialObjects() {
}
set_observed_symbol(Symbol::cast(obj));
- set_i18n_template_one(the_hole_value());
- set_i18n_template_two(the_hole_value());
-
// Handling of script id generation is in Factory::NewScript.
set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId));
@@ -6603,6 +6592,14 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
}
v->Synchronize(VisitorSynchronization::kGlobalHandles);
+ // Iterate over eternal handles.
+ if (mode == VISIT_ALL_IN_SCAVENGE) {
+ isolate_->eternal_handles()->IterateNewSpaceRoots(v);
+ } else {
+ isolate_->eternal_handles()->IterateAllRoots(v);
+ }
+ v->Synchronize(VisitorSynchronization::kEternalHandles);
+
// Iterate over pointers being held by inactive threads.
isolate_->thread_manager()->Iterate(v);
v->Synchronize(VisitorSynchronization::kThreadManager);
@@ -6945,6 +6942,8 @@ void Heap::TearDown() {
external_string_table_.TearDown();
+ mark_compact_collector()->TearDown();
+
new_space_.TearDown();
if (old_pointer_space_ != NULL) {
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index fbe0531014..1b6bf8eb01 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -178,7 +178,7 @@ namespace internal {
V(Smi, last_script_id, LastScriptId) \
V(Script, empty_script, EmptyScript) \
V(Smi, real_stack_limit, RealStackLimit) \
- V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
+ V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
@@ -186,11 +186,10 @@ namespace internal {
V(JSObject, observation_state, ObservationState) \
V(Map, external_map, ExternalMap) \
V(Symbol, frozen_symbol, FrozenSymbol) \
+ V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \
- V(Symbol, observed_symbol, ObservedSymbol) \
- V(HeapObject, i18n_template_one, I18nTemplateOne) \
- V(HeapObject, i18n_template_two, I18nTemplateTwo)
+ V(Symbol, observed_symbol, ObservedSymbol)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -482,6 +481,7 @@ enum ArrayStorageAllocationMode {
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
};
+
class Heap {
public:
// Configure heap size before setup. Return false if the heap has been
@@ -1300,12 +1300,6 @@ class Heap {
ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
global_gc_epilogue_callback_ = callback;
}
- void SetI18nTemplateOne(ObjectTemplateInfo* tmpl) {
- set_i18n_template_one(tmpl);
- }
- void SetI18nTemplateTwo(ObjectTemplateInfo* tmpl) {
- set_i18n_template_two(tmpl);
- }
// Heap root getters. We have versions with and without type::cast() here.
// You can't use type::cast during GC because the assert fails.
@@ -1398,7 +1392,7 @@ class Heap {
// Finds out which space an object should get promoted to based on its type.
inline OldSpace* TargetSpace(HeapObject* object);
- inline AllocationSpace TargetSpaceId(InstanceType type);
+ static inline AllocationSpace TargetSpaceId(InstanceType type);
// Sets the stub_cache_ (only used when expanding the dictionary).
void public_set_code_stubs(UnseededNumberDictionary* value) {
@@ -1545,19 +1539,16 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
PretenureFlag pretenure);
- // Predicate that governs global pre-tenuring decisions based on observed
- // promotion rates of previous collections.
- inline bool ShouldGloballyPretenure() {
- return FLAG_pretenuring && new_space_high_promotion_mode_active_;
- }
-
// This is only needed for testing high promotion mode.
void SetNewSpaceHighPromotionModeActive(bool mode) {
new_space_high_promotion_mode_active_ = mode;
}
+ // Returns the allocation mode (pre-tenuring) based on observed promotion
+ // rates of previous collections.
inline PretenureFlag GetPretenureMode() {
- return new_space_high_promotion_mode_active_ ? TENURED : NOT_TENURED;
+ return FLAG_pretenuring && new_space_high_promotion_mode_active_
+ ? TENURED : NOT_TENURED;
}
inline Address* NewSpaceHighPromotionModeActiveAddress() {
diff --git a/deps/v8/src/hydrogen-bce.cc b/deps/v8/src/hydrogen-bce.cc
index ff0b072ce0..7c81ec145c 100644
--- a/deps/v8/src/hydrogen-bce.cc
+++ b/deps/v8/src/hydrogen-bce.cc
@@ -260,12 +260,12 @@ class BoundsCheckBbData: public ZoneObject {
HValue* index_context = IndexContext(*add, check);
if (index_context == NULL) return false;
- HConstant* new_constant = new(BasicBlock()->zone()) HConstant(
- new_offset, representation);
+ Zone* zone = BasicBlock()->zone();
+ HConstant* new_constant = HConstant::New(zone, index_context,
+ new_offset, representation);
if (*add == NULL) {
new_constant->InsertBefore(check);
- (*add) = HAdd::New(
- BasicBlock()->zone(), index_context, original_value, new_constant);
+ (*add) = HAdd::New(zone, index_context, original_value, new_constant);
(*add)->AssumeRepresentation(representation);
(*add)->InsertBefore(check);
} else {
diff --git a/deps/v8/src/hydrogen-bch.cc b/deps/v8/src/hydrogen-bch.cc
index 8646747caf..137d629547 100644
--- a/deps/v8/src/hydrogen-bch.cc
+++ b/deps/v8/src/hydrogen-bch.cc
@@ -278,10 +278,12 @@ class InductionVariableBlocksTable BASE_EMBEDDED {
}
// Choose the appropriate limit.
+ Zone* zone = graph()->zone();
+ HValue* context = graph()->GetInvalidContext();
HValue* limit = data->limit();
if (has_upper_constant_limit) {
- HConstant* new_limit = new(pre_header->graph()->zone()) HConstant(
- upper_constant_limit, length->representation());
+ HConstant* new_limit = HConstant::New(zone, context,
+ upper_constant_limit);
new_limit->InsertBefore(pre_header->end());
limit = new_limit;
}
@@ -290,15 +292,15 @@ class InductionVariableBlocksTable BASE_EMBEDDED {
if (limit->IsInteger32Constant() &&
limit->block() != pre_header &&
!limit->block()->Dominates(pre_header)) {
- HConstant* new_limit = new(pre_header->graph()->zone()) HConstant(
- limit->GetInteger32Constant(), length->representation());
+ HConstant* new_limit = HConstant::New(zone, context,
+ limit->GetInteger32Constant());
new_limit->InsertBefore(pre_header->end());
limit = new_limit;
}
// Do the hoisting.
- HBoundsCheck* hoisted_check = new(pre_header->zone()) HBoundsCheck(
- limit, check->check()->length());
+ HBoundsCheck* hoisted_check = HBoundsCheck::New(
+ zone, context, limit, check->check()->length());
hoisted_check->InsertBefore(pre_header->end());
hoisted_check->set_allow_equality(true);
hoisted_check->block()->graph()->isolate()->counters()->
diff --git a/deps/v8/src/hydrogen-dehoist.cc b/deps/v8/src/hydrogen-dehoist.cc
index 696d22c608..67e6718998 100644
--- a/deps/v8/src/hydrogen-dehoist.cc
+++ b/deps/v8/src/hydrogen-dehoist.cc
@@ -38,7 +38,7 @@ static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
HConstant* constant;
HValue* subexpression;
HBinaryOperation* binary_operation = HBinaryOperation::cast(index);
- if (binary_operation->left()->IsConstant()) {
+ if (binary_operation->left()->IsConstant() && index->IsAdd()) {
subexpression = binary_operation->right();
constant = HConstant::cast(binary_operation->left());
} else if (binary_operation->right()->IsConstant()) {
diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc
index 54b53db594..997b7c2fda 100644
--- a/deps/v8/src/hydrogen-instructions.cc
+++ b/deps/v8/src/hydrogen-instructions.cc
@@ -149,116 +149,6 @@ void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
}
-// This method is recursive but it is guaranteed to terminate because
-// RedefinedOperand() always dominates "this".
-bool HValue::IsRelationTrue(NumericRelation relation,
- HValue* other,
- int offset,
- int scale) {
- if (this == other) {
- return scale == 0 && relation.IsExtendable(offset);
- }
-
- // Test the direct relation.
- if (IsRelationTrueInternal(relation, other, offset, scale)) return true;
-
- // If scale is 0 try the reversed relation.
- if (scale == 0 &&
- // TODO(mmassi): do we need the full, recursive IsRelationTrue?
- other->IsRelationTrueInternal(relation.Reversed(), this, -offset)) {
- return true;
- }
-
- // Try decomposition (but do not accept scaled compounds).
- DecompositionResult decomposition;
- if (TryDecompose(&decomposition) &&
- decomposition.scale() == 0 &&
- decomposition.base()->IsRelationTrue(relation, other,
- offset + decomposition.offset(),
- scale)) {
- return true;
- }
-
- // Pass the request to the redefined value.
- HValue* redefined = RedefinedOperand();
- return redefined != NULL && redefined->IsRelationTrue(relation, other,
- offset, scale);
-}
-
-
-bool HValue::TryGuaranteeRange(HValue* upper_bound) {
- RangeEvaluationContext context = RangeEvaluationContext(this, upper_bound);
- TryGuaranteeRangeRecursive(&context);
- bool result = context.is_range_satisfied();
- if (result) {
- context.lower_bound_guarantee()->SetResponsibilityForRange(DIRECTION_LOWER);
- context.upper_bound_guarantee()->SetResponsibilityForRange(DIRECTION_UPPER);
- }
- return result;
-}
-
-
-void HValue::TryGuaranteeRangeRecursive(RangeEvaluationContext* context) {
- // Check if we already know that this value satisfies the lower bound.
- if (context->lower_bound_guarantee() == NULL) {
- if (IsRelationTrueInternal(NumericRelation::Ge(), context->lower_bound(),
- context->offset(), context->scale())) {
- context->set_lower_bound_guarantee(this);
- }
- }
-
- // Check if we already know that this value satisfies the upper bound.
- if (context->upper_bound_guarantee() == NULL) {
- if (IsRelationTrueInternal(NumericRelation::Lt(), context->upper_bound(),
- context->offset(), context->scale()) ||
- (context->scale() == 0 &&
- context->upper_bound()->IsRelationTrue(NumericRelation::Gt(),
- this, -context->offset()))) {
- context->set_upper_bound_guarantee(this);
- }
- }
-
- if (context->is_range_satisfied()) return;
-
- // See if our RedefinedOperand() satisfies the constraints.
- if (RedefinedOperand() != NULL) {
- RedefinedOperand()->TryGuaranteeRangeRecursive(context);
- }
- if (context->is_range_satisfied()) return;
-
- // See if the constraints can be satisfied by decomposition.
- DecompositionResult decomposition;
- if (TryDecompose(&decomposition)) {
- context->swap_candidate(&decomposition);
- context->candidate()->TryGuaranteeRangeRecursive(context);
- context->swap_candidate(&decomposition);
- }
- if (context->is_range_satisfied()) return;
-
- // Try to modify this to satisfy the constraint.
-
- TryGuaranteeRangeChanging(context);
-}
-
-
-RangeEvaluationContext::RangeEvaluationContext(HValue* value, HValue* upper)
- : lower_bound_(upper->block()->graph()->GetConstant0()),
- lower_bound_guarantee_(NULL),
- candidate_(value),
- upper_bound_(upper),
- upper_bound_guarantee_(NULL),
- offset_(0),
- scale_(0) {
-}
-
-
-HValue* RangeEvaluationContext::ConvertGuarantee(HValue* guarantee) {
- return guarantee->IsBoundsCheckBaseIndexInformation()
- ? HBoundsCheckBaseIndexInformation::cast(guarantee)->bounds_check()
- : guarantee;
-}
-
-
static int32_t ConvertAndSetOverflow(Representation r,
int64_t result,
bool* overflow) {
@@ -484,55 +374,6 @@ HType HType::TypeFromValue(Handle<Object> value) {
}
-bool HValue::Dominates(HValue* dominator, HValue* dominated) {
- if (dominator->block() != dominated->block()) {
- // If they are in different blocks we can use the dominance relation
- // between the blocks.
- return dominator->block()->Dominates(dominated->block());
- } else {
- // Otherwise we must see which instruction comes first, considering
- // that phis always precede regular instructions.
- if (dominator->IsInstruction()) {
- if (dominated->IsInstruction()) {
- for (HInstruction* next = HInstruction::cast(dominator)->next();
- next != NULL;
- next = next->next()) {
- if (next == dominated) return true;
- }
- return false;
- } else if (dominated->IsPhi()) {
- return false;
- } else {
- UNREACHABLE();
- }
- } else if (dominator->IsPhi()) {
- if (dominated->IsInstruction()) {
- return true;
- } else {
- // We cannot compare which phi comes first.
- UNREACHABLE();
- }
- } else {
- UNREACHABLE();
- }
- return false;
- }
-}
-
-
-bool HValue::TestDominanceUsingProcessedFlag(HValue* dominator,
- HValue* dominated) {
- if (dominator->block() != dominated->block()) {
- return dominator->block()->Dominates(dominated->block());
- } else {
- // If both arguments are in the same block we check if dominator is a phi
- // or if dominated has not already been processed: in either case we know
- // that dominator precedes dominated.
- return dominator->IsPhi() || !dominated->CheckFlag(kIDefsProcessingDone);
- }
-}
-
-
bool HValue::IsDefinedAfter(HBasicBlock* other) const {
return block()->block_id() > other->block_id();
}
@@ -960,58 +801,6 @@ void HInstruction::Verify() {
#endif
-HNumericConstraint* HNumericConstraint::AddToGraph(
- HValue* constrained_value,
- NumericRelation relation,
- HValue* related_value,
- HInstruction* insertion_point) {
- if (insertion_point == NULL) {
- if (constrained_value->IsInstruction()) {
- insertion_point = HInstruction::cast(constrained_value);
- } else if (constrained_value->IsPhi()) {
- insertion_point = constrained_value->block()->first();
- } else {
- UNREACHABLE();
- }
- }
- HNumericConstraint* result =
- new(insertion_point->block()->zone()) HNumericConstraint(
- constrained_value, relation, related_value);
- result->InsertAfter(insertion_point);
- return result;
-}
-
-
-void HNumericConstraint::PrintDataTo(StringStream* stream) {
- stream->Add("(");
- constrained_value()->PrintNameTo(stream);
- stream->Add(" %s ", relation().Mnemonic());
- related_value()->PrintNameTo(stream);
- stream->Add(")");
-}
-
-
-HInductionVariableAnnotation* HInductionVariableAnnotation::AddToGraph(
- HPhi* phi,
- NumericRelation relation,
- int operand_index) {
- HInductionVariableAnnotation* result =
- new(phi->block()->zone()) HInductionVariableAnnotation(phi, relation,
- operand_index);
- result->InsertAfter(phi->block()->first());
- return result;
-}
-
-
-void HInductionVariableAnnotation::PrintDataTo(StringStream* stream) {
- stream->Add("(");
- RedefinedOperand()->PrintNameTo(stream);
- stream->Add(" %s ", relation().Mnemonic());
- induction_base()->PrintNameTo(stream);
- stream->Add(")");
-}
-
-
void HDummyUse::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
}
@@ -1038,40 +827,6 @@ void HBinaryCall::PrintDataTo(StringStream* stream) {
}
-void HBoundsCheck::TryGuaranteeRangeChanging(RangeEvaluationContext* context) {
- if (context->candidate()->ActualValue() != base()->ActualValue() ||
- context->scale() < scale()) {
- return;
- }
-
- // TODO(mmassi)
- // Instead of checking for "same basic block" we should check for
- // "dominates and postdominates".
- if (context->upper_bound() == length() &&
- context->lower_bound_guarantee() != NULL &&
- context->lower_bound_guarantee() != this &&
- context->lower_bound_guarantee()->block() != block() &&
- offset() < context->offset() &&
- index_can_increase() &&
- context->upper_bound_guarantee() == NULL) {
- offset_ = context->offset();
- SetResponsibilityForRange(DIRECTION_UPPER);
- context->set_upper_bound_guarantee(this);
- isolate()->counters()->bounds_checks_eliminated()->Increment();
- } else if (context->upper_bound_guarantee() != NULL &&
- context->upper_bound_guarantee() != this &&
- context->upper_bound_guarantee()->block() != block() &&
- offset() > context->offset() &&
- index_can_decrease() &&
- context->lower_bound_guarantee() == NULL) {
- offset_ = context->offset();
- SetResponsibilityForRange(DIRECTION_LOWER);
- context->set_lower_bound_guarantee(this);
- isolate()->counters()->bounds_checks_eliminated()->Increment();
- }
-}
-
-
void HBoundsCheck::ApplyIndexChange() {
if (skip_check()) return;
@@ -1091,12 +846,13 @@ void HBoundsCheck::ApplyIndexChange() {
int actual_offset = decomposition.offset() + offset();
int actual_scale = decomposition.scale() + scale();
+ Zone* zone = block()->graph()->zone();
+ HValue* context = block()->graph()->GetInvalidContext();
if (actual_offset != 0) {
- HConstant* add_offset = new(block()->graph()->zone()) HConstant(
- actual_offset, index()->representation());
+ HConstant* add_offset = HConstant::New(zone, context, actual_offset);
add_offset->InsertBefore(this);
- HInstruction* add = HAdd::New(block()->graph()->zone(),
- block()->graph()->GetInvalidContext(), current_index, add_offset);
+ HInstruction* add = HAdd::New(zone, context,
+ current_index, add_offset);
add->InsertBefore(this);
add->AssumeRepresentation(index()->representation());
add->ClearFlag(kCanOverflow);
@@ -1104,11 +860,10 @@ void HBoundsCheck::ApplyIndexChange() {
}
if (actual_scale != 0) {
- HConstant* sar_scale = new(block()->graph()->zone()) HConstant(
- actual_scale, index()->representation());
+ HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
sar_scale->InsertBefore(this);
- HInstruction* sar = HSar::New(block()->graph()->zone(),
- block()->graph()->GetInvalidContext(), current_index, sar_scale);
+ HInstruction* sar = HSar::New(zone, context,
+ current_index, sar_scale);
sar->InsertBefore(this);
sar->AssumeRepresentation(index()->representation());
current_index = sar;
@@ -1119,40 +874,6 @@ void HBoundsCheck::ApplyIndexChange() {
base_ = NULL;
offset_ = 0;
scale_ = 0;
- responsibility_direction_ = DIRECTION_NONE;
-}
-
-
-void HBoundsCheck::AddInformativeDefinitions() {
- // TODO(mmassi): Executing this code during AddInformativeDefinitions
- // is a hack. Move it to some other HPhase.
- if (FLAG_array_bounds_checks_elimination) {
- if (index()->TryGuaranteeRange(length())) {
- set_skip_check();
- }
- if (DetectCompoundIndex()) {
- HBoundsCheckBaseIndexInformation* base_index_info =
- new(block()->graph()->zone())
- HBoundsCheckBaseIndexInformation(this);
- base_index_info->InsertAfter(this);
- }
- }
-}
-
-
-bool HBoundsCheck::IsRelationTrueInternal(NumericRelation relation,
- HValue* related_value,
- int offset,
- int scale) {
- if (related_value == length()) {
- // A HBoundsCheck is smaller than the length it compared against.
- return NumericRelation::Lt().CompoundImplies(relation, 0, 0, offset, scale);
- } else if (related_value == block()->graph()->GetConstant0()) {
- // A HBoundsCheck is greater than or equal to zero.
- return NumericRelation::Ge().CompoundImplies(relation, 0, 0, offset, scale);
- } else {
- return false;
- }
}
@@ -1195,25 +916,6 @@ void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
}
-bool HBoundsCheckBaseIndexInformation::IsRelationTrueInternal(
- NumericRelation relation,
- HValue* related_value,
- int offset,
- int scale) {
- if (related_value == bounds_check()->length()) {
- return NumericRelation::Lt().CompoundImplies(
- relation,
- bounds_check()->offset(), bounds_check()->scale(), offset, scale);
- } else if (related_value == block()->graph()->GetConstant0()) {
- return NumericRelation::Ge().CompoundImplies(
- relation,
- bounds_check()->offset(), bounds_check()->scale(), offset, scale);
- } else {
- return false;
- }
-}
-
-
void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
stream->Add("base: ");
base_index()->PrintNameTo(stream);
@@ -1453,6 +1155,29 @@ void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
}
+static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
+ if (!l->EqualsInteger32Constant(~0)) return false;
+ *negated = r;
+ return true;
+}
+
+
+static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
+ if (!instr->IsBitwise()) return false;
+ HBitwise* b = HBitwise::cast(instr);
+ return (b->op() == Token::BIT_XOR) &&
+ (MatchLeftIsOnes(b->left(), b->right(), negated) ||
+ MatchLeftIsOnes(b->right(), b->left(), negated));
+}
+
+
+static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
+ HValue* negated;
+ return MatchNegationViaXor(instr, &negated) &&
+ MatchNegationViaXor(negated, arg);
+}
+
+
HValue* HBitwise::Canonicalize() {
if (!representation().IsSmiOrInteger32()) return this;
// If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
@@ -1465,18 +1190,10 @@ HValue* HBitwise::Canonicalize() {
!left()->CheckFlag(kUint32)) {
return left();
}
- return this;
-}
-
-
-HValue* HBitNot::Canonicalize() {
- // Optimize ~~x, a common pattern used for ToInt32(x).
- if (value()->IsBitNot()) {
- HValue* result = HBitNot::cast(value())->value();
- ASSERT(result->representation().IsInteger32());
- if (!result->CheckFlag(kUint32)) {
- return result;
- }
+ // Optimize double negation, a common pattern used for ToInt32(x).
+ HValue* arg;
+ if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
+ return arg;
}
return this;
}
@@ -1612,8 +1329,8 @@ HValue* HUnaryMathOperation::Canonicalize() {
!HInstruction::cast(new_right)->IsLinked()) {
HInstruction::cast(new_right)->InsertBefore(this);
}
- HMathFloorOfDiv* instr = new(block()->zone())
- HMathFloorOfDiv(context(), new_left, new_right);
+ HMathFloorOfDiv* instr =
+ HMathFloorOfDiv::New(block()->zone(), context(), new_left, new_right);
// Replace this HMathFloor instruction by the new HMathFloorOfDiv.
instr->InsertBefore(this);
ReplaceAllUsesWith(instr);
@@ -1633,11 +1350,13 @@ HValue* HUnaryMathOperation::Canonicalize() {
HValue* HCheckInstanceType::Canonicalize() {
if (check_ == IS_STRING && value()->type().IsString()) {
- return NULL;
+ return value();
}
if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
- if (HConstant::cast(value())->HasInternalizedStringValue()) return NULL;
+ if (HConstant::cast(value())->HasInternalizedStringValue()) {
+ return value();
+ }
}
return this;
}
@@ -1685,10 +1404,10 @@ void HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect,
// for which the map is known.
if (HasNoUses() && dominator->IsStoreNamedField()) {
HStoreNamedField* store = HStoreNamedField::cast(dominator);
- UniqueValueId map_unique_id = store->transition_unique_id();
- if (!map_unique_id.IsInitialized() || store->object() != value()) return;
+ if (!store->has_transition() || store->object() != value()) return;
+ HConstant* transition = HConstant::cast(store->transition());
for (int i = 0; i < map_set()->length(); i++) {
- if (map_unique_id == map_unique_ids_.at(i)) {
+ if (transition->UniqueValueIdsMatch(map_unique_ids_.at(i))) {
DeleteAndReplaceWith(NULL);
return;
}
@@ -1739,13 +1458,6 @@ void HCheckInstanceType::PrintDataTo(StringStream* stream) {
}
-void HCheckPrototypeMaps::PrintDataTo(StringStream* stream) {
- stream->Add("[receiver_prototype=%p,holder=%p]%s",
- *prototypes_.first(), *prototypes_.last(),
- CanOmitPrototypeChecks() ? " (omitted)" : "");
-}
-
-
void HCallStub::PrintDataTo(StringStream* stream) {
stream->Add("%s ",
CodeStub::MajorName(major_key_, false));
@@ -1948,60 +1660,6 @@ Range* HMod::InferRange(Zone* zone) {
}
-void HPhi::AddInformativeDefinitions() {
- if (OperandCount() == 2) {
- // If one of the operands is an OSR block give up (this cannot be an
- // induction variable).
- if (OperandAt(0)->block()->is_osr_entry() ||
- OperandAt(1)->block()->is_osr_entry()) return;
-
- for (int operand_index = 0; operand_index < 2; operand_index++) {
- int other_operand_index = (operand_index + 1) % 2;
-
- static NumericRelation relations[] = {
- NumericRelation::Ge(),
- NumericRelation::Le()
- };
-
- // Check if this phi is an induction variable. If, e.g., we know that
- // its first input is greater than the phi itself, then that must be
- // the back edge, and the phi is always greater than its second input.
- for (int relation_index = 0; relation_index < 2; relation_index++) {
- if (OperandAt(operand_index)->IsRelationTrue(relations[relation_index],
- this)) {
- HInductionVariableAnnotation::AddToGraph(this,
- relations[relation_index],
- other_operand_index);
- }
- }
- }
- }
-}
-
-
-bool HPhi::IsRelationTrueInternal(NumericRelation relation,
- HValue* other,
- int offset,
- int scale) {
- if (CheckFlag(kNumericConstraintEvaluationInProgress)) return false;
-
- SetFlag(kNumericConstraintEvaluationInProgress);
- bool result = true;
- for (int i = 0; i < OperandCount(); i++) {
- // Skip OSR entry blocks
- if (OperandAt(i)->block()->is_osr_entry()) continue;
-
- if (!OperandAt(i)->IsRelationTrue(relation, other, offset, scale)) {
- result = false;
- break;
- }
- }
- ClearFlag(kNumericConstraintEvaluationInProgress);
-
- return result;
-}
-
-
InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
if (phi->block()->loop_information() == NULL) return NULL;
if (phi->OperandCount() != 2) return NULL;
@@ -2129,8 +1787,8 @@ void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
HValue* previous_index = first_check_in_block()->index();
ASSERT(context != NULL);
- set_added_constant(new(index_base->block()->graph()->zone()) HConstant(
- mask, index_base->representation()));
+ Zone* zone = index_base->block()->graph()->zone();
+ set_added_constant(HConstant::New(zone, context, mask));
if (added_index() != NULL) {
added_constant()->InsertBefore(added_index());
} else {
@@ -2139,9 +1797,8 @@ void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
if (added_index() == NULL) {
first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
- HInstruction* new_index = HBitwise::New(
- index_base->block()->graph()->zone(),
- token, context, index_base, added_constant());
+ HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
+ added_constant());
ASSERT(new_index->IsBitwise());
new_index->ClearAllSideEffects();
new_index->AssumeRepresentation(Representation::Integer32());
@@ -2640,7 +2297,8 @@ static bool IsInteger32(double value) {
HConstant::HConstant(Handle<Object> handle, Representation r)
- : handle_(handle),
+ : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
+ handle_(handle),
unique_id_(),
has_smi_value_(false),
has_int32_value_(false),
@@ -2650,8 +2308,6 @@ HConstant::HConstant(Handle<Object> handle, Representation r)
is_not_in_new_space_(true),
is_cell_(false),
boolean_value_(handle->BooleanValue()) {
- set_type(HType::TypeFromValue(handle));
-
if (handle_->IsHeapObject()) {
Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap();
is_not_in_new_space_ = !heap->InNewSpace(*handle);
@@ -2681,7 +2337,8 @@ HConstant::HConstant(Handle<Object> handle,
bool is_not_in_new_space,
bool is_cell,
bool boolean_value)
- : handle_(handle),
+ : HTemplateInstruction<0>(type),
+ handle_(handle),
unique_id_(unique_id),
has_smi_value_(false),
has_int32_value_(false),
@@ -2693,7 +2350,6 @@ HConstant::HConstant(Handle<Object> handle,
boolean_value_(boolean_value) {
ASSERT(!handle.is_null());
ASSERT(!type.IsTaggedNumber());
- set_type(type);
Initialize(r);
}
@@ -2741,7 +2397,8 @@ HConstant::HConstant(double double_value,
HConstant::HConstant(ExternalReference reference)
- : has_smi_value_(false),
+ : HTemplateInstruction<0>(HType::None()),
+ has_smi_value_(false),
has_int32_value_(false),
has_double_value_(false),
has_external_reference_value_(true),
@@ -2750,11 +2407,18 @@ HConstant::HConstant(ExternalReference reference)
is_cell_(false),
boolean_value_(true),
external_reference_value_(reference) {
- set_type(HType::None());
Initialize(Representation::External());
}
+static void PrepareConstant(Handle<Object> object) {
+ if (!object->IsJSObject()) return;
+ Handle<JSObject> js_object = Handle<JSObject>::cast(object);
+ if (!js_object->map()->is_deprecated()) return;
+ JSObject::TryMigrateInstance(js_object);
+}
+
+
void HConstant::Initialize(Representation r) {
if (r.IsNone()) {
if (has_smi_value_ && kSmiValueSize == 31) {
@@ -2766,6 +2430,7 @@ void HConstant::Initialize(Representation r) {
} else if (has_external_reference_value_) {
r = Representation::External();
} else {
+ PrepareConstant(handle_);
r = Representation::Tagged();
}
}
@@ -3050,6 +2715,14 @@ Range* HShl::InferRange(Zone* zone) {
}
+Range* HLoadNamedField::InferRange(Zone* zone) {
+ if (access().IsStringLength()) {
+ return new(zone) Range(0, String::kMaxLength);
+ }
+ return HValue::InferRange(zone);
+}
+
+
Range* HLoadKeyed::InferRange(Zone* zone) {
switch (elements_kind()) {
case EXTERNAL_PIXEL_ELEMENTS:
@@ -3082,16 +2755,6 @@ void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
}
-void HCompareNumericAndBranch::AddInformativeDefinitions() {
- NumericRelation r = NumericRelation::FromToken(token());
- if (r.IsNone()) return;
-
- HNumericConstraint::AddToGraph(left(), r, right(), SuccessorAt(0)->first());
- HNumericConstraint::AddToGraph(
- left(), r.Negated(), right(), SuccessorAt(1)->first());
-}
-
-
void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
stream->Add(Token::Name(token()));
stream->Add(" ");
@@ -3282,13 +2945,15 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
}
-HCheckMaps* HCheckMaps::New(HValue* value,
+HCheckMaps* HCheckMaps::New(Zone* zone,
+ HValue* context,
+ HValue* value,
Handle<Map> map,
- Zone* zone,
CompilationInfo* info,
HValue* typecheck) {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
check_map->map_set_.Add(map, zone);
+ check_map->has_migration_target_ = map->is_migration_target();
if (map->CanOmitMapChecks() &&
value->IsConstant() &&
HConstant::cast(value)->InstanceOf(map)) {
@@ -3298,39 +2963,6 @@ HCheckMaps* HCheckMaps::New(HValue* value,
}
-HCheckMaps* HCheckMaps::NewWithTransitions(HValue* value,
- Handle<Map> map,
- Zone* zone,
- CompilationInfo* info) {
- HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, value);
- check_map->map_set_.Add(map, zone);
-
- // Since transitioned elements maps of the initial map don't fail the map
- // check, the CheckMaps instruction doesn't need to depend on ElementsKinds.
- check_map->ClearGVNFlag(kDependsOnElementsKind);
-
- ElementsKind kind = map->elements_kind();
- bool packed = IsFastPackedElementsKind(kind);
- while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
- kind = GetNextMoreGeneralFastElementsKind(kind, packed);
- Map* transitioned_map =
- map->LookupElementsTransitionMap(kind);
- if (transitioned_map) {
- check_map->map_set_.Add(Handle<Map>(transitioned_map), zone);
- }
- };
-
- if (map->CanOmitMapChecks() &&
- value->IsConstant() &&
- HConstant::cast(value)->InstanceOf(map)) {
- check_map->omit(info);
- }
-
- check_map->map_set_.Sort();
- return check_map;
-}
-
-
void HCheckMaps::FinalizeUniqueValueId() {
if (!map_unique_ids_.is_empty()) return;
Zone* zone = block()->zone();
@@ -3503,8 +3135,13 @@ HValue* HLoadKeyedGeneric::Canonicalize() {
HForInCacheArray* index_cache =
names_cache->index_cache();
HCheckMapValue* map_check =
- new(block()->zone()) HCheckMapValue(object(), names_cache->map());
- HInstruction* index = new(block()->zone()) HLoadKeyed(
+ HCheckMapValue::New(block()->graph()->zone(),
+ block()->graph()->GetInvalidContext(),
+ object(),
+ names_cache->map());
+ HInstruction* index = HLoadKeyed::New(
+ block()->graph()->zone(),
+ block()->graph()->GetInvalidContext(),
index_cache,
key_load->key(),
key_load->key(),
@@ -3541,8 +3178,8 @@ void HStoreNamedField::PrintDataTo(StringStream* stream) {
if (NeedsWriteBarrier()) {
stream->Add(" (write-barrier)");
}
- if (!transition().is_null()) {
- stream->Add(" (transition map %p)", *transition());
+ if (has_transition()) {
+ stream->Add(" (transition map %p)", *transition_map());
}
}
@@ -3633,12 +3270,6 @@ void HStoreGlobalGeneric::PrintDataTo(StringStream* stream) {
}
-void HLinkObjectInList::PrintDataTo(StringStream* stream) {
- value()->PrintNameTo(stream);
- stream->Add(" offset %d", store_field_.offset());
-}
-
-
void HLoadContextSlot::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
stream->Add("[%d]", slot_index());
@@ -3660,26 +3291,6 @@ HType HValue::CalculateInferredType() {
}
-HType HCheckMaps::CalculateInferredType() {
- return value()->type();
-}
-
-
-HType HCheckFunction::CalculateInferredType() {
- return value()->type();
-}
-
-
-HType HCheckHeapObject::CalculateInferredType() {
- return HType::NonPrimitive();
-}
-
-
-HType HCheckSmi::CalculateInferredType() {
- return HType::Smi();
-}
-
-
HType HPhi::CalculateInferredType() {
if (OperandCount() == 0) return HType::Tagged();
HType result = OperandAt(0)->type();
@@ -3691,52 +3302,12 @@ HType HPhi::CalculateInferredType() {
}
-HType HCompareGeneric::CalculateInferredType() {
- return HType::Boolean();
-}
-
-
-HType HInstanceOf::CalculateInferredType() {
- return HType::Boolean();
-}
-
-
-HType HInstanceOfKnownGlobal::CalculateInferredType() {
- return HType::Boolean();
-}
-
-
HType HChange::CalculateInferredType() {
if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
return type();
}
-HType HBitwiseBinaryOperation::CalculateInferredType() {
- return HType::TaggedNumber();
-}
-
-
-HType HArithmeticBinaryOperation::CalculateInferredType() {
- return HType::TaggedNumber();
-}
-
-
-HType HAdd::CalculateInferredType() {
- return HType::Tagged();
-}
-
-
-HType HBitNot::CalculateInferredType() {
- return HType::TaggedNumber();
-}
-
-
-HType HUnaryMathOperation::CalculateInferredType() {
- return HType::TaggedNumber();
-}
-
-
Representation HUnaryMathOperation::RepresentationFromInputs() {
Representation rep = representation();
// If any of the actual input representation is more general than what we
@@ -3766,8 +3337,8 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
HValue* current_size = size();
// We can just fold allocations that are guaranteed in new space.
// TODO(hpayer): Add support for non-constant allocation in dominator.
- if (!GuaranteedInNewSpace() || !current_size->IsInteger32Constant() ||
- !dominator_allocate_instr->GuaranteedInNewSpace() ||
+ if (!IsNewSpaceAllocation() || !current_size->IsInteger32Constant() ||
+ !dominator_allocate_instr->IsNewSpaceAllocation() ||
!dominator_size->IsInteger32Constant()) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s)\n",
@@ -3785,7 +3356,7 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
if (MustAllocateDoubleAligned()) {
if (!dominator_allocate_instr->MustAllocateDoubleAligned()) {
- dominator_allocate_instr->SetFlags(HAllocate::ALLOCATE_DOUBLE_ALIGNED);
+ dominator_allocate_instr->MakeDoubleAligned();
}
if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
dominator_size_constant += kDoubleSize / 2;
@@ -3803,22 +3374,24 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
}
HBasicBlock* block = dominator->block();
Zone* zone = block->zone();
- HInstruction* new_dominator_size_constant = new(zone) HConstant(
- new_dominator_size);
+ HInstruction* new_dominator_size_constant =
+ HConstant::New(zone, context(), new_dominator_size);
new_dominator_size_constant->InsertBefore(dominator_allocate_instr);
dominator_allocate_instr->UpdateSize(new_dominator_size_constant);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
- dominator_allocate_instr->SetFlags(HAllocate::PREFILL_WITH_FILLER);
+ dominator_allocate_instr->MakePrefillWithFiller();
}
#endif
// After that replace the dominated allocate instruction.
HInstruction* dominated_allocate_instr =
- new(zone) HInnerAllocatedObject(dominator_allocate_instr,
- dominator_size_constant,
- type());
+ HInnerAllocatedObject::New(zone,
+ context(),
+ dominator_allocate_instr,
+ dominator_size_constant,
+ type());
dominated_allocate_instr->InsertBefore(this);
DeleteAndReplaceWith(dominated_allocate_instr);
if (FLAG_trace_allocation_folding) {
@@ -3830,17 +3403,13 @@ void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
void HAllocate::PrintDataTo(StringStream* stream) {
size()->PrintNameTo(stream);
- if (!GuaranteedInNewSpace()) stream->Add(" (pretenure)");
-}
-
-
-HType HRegExpLiteral::CalculateInferredType() {
- return HType::JSObject();
-}
-
-
-HType HFunctionLiteral::CalculateInferredType() {
- return HType::JSObject();
+ stream->Add(" (");
+ if (IsNewSpaceAllocation()) stream->Add("N");
+ if (IsOldPointerSpaceAllocation()) stream->Add("P");
+ if (IsOldDataSpaceAllocation()) stream->Add("D");
+ if (MustAllocateDoubleAligned()) stream->Add("A");
+ if (MustPrefillWithFiller()) stream->Add("F");
+ stream->Add(")");
}
@@ -3966,10 +3535,10 @@ bool HStoreKeyed::NeedsCanonicalization() {
}
-#define H_CONSTANT_INT(val) \
-new(zone) HConstant(static_cast<int32_t>(val))
+#define H_CONSTANT_INT(val) \
+HConstant::New(zone, context, static_cast<int32_t>(val))
#define H_CONSTANT_DOUBLE(val) \
-new(zone) HConstant(static_cast<double>(val), Representation::Double())
+HConstant::New(zone, context, static_cast<double>(val))
#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
HInstruction* HInstr::New( \
@@ -3980,7 +3549,7 @@ HInstruction* HInstr::New( \
if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
if (TypeInfo::IsInt32Double(double_res)) { \
- return H_CONSTANT_INT(double_res); \
+ return H_CONSTANT_INT(double_res); \
} \
return H_CONSTANT_DOUBLE(double_res); \
} \
@@ -4007,7 +3576,7 @@ HInstruction* HStringAdd::New(Zone* zone,
if (c_left->HasStringValue() && c_right->HasStringValue()) {
Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
c_left->StringValue(), c_right->StringValue());
- return new(zone) HConstant(concat, Representation::Tagged());
+ return HConstant::New(zone, context, concat);
}
}
return new(zone) HStringAdd(context, left, right, flags);
@@ -4022,29 +3591,16 @@ HInstruction* HStringCharFromCode::New(
if (c_code->HasNumberValue()) {
if (std::isfinite(c_code->DoubleValue())) {
uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
- return new(zone) HConstant(LookupSingleCharacterStringFromCode(isolate,
- code),
- Representation::Tagged());
+ return HConstant::New(zone, context,
+ LookupSingleCharacterStringFromCode(isolate, code));
}
- return new(zone) HConstant(isolate->factory()->empty_string(),
- Representation::Tagged());
+ return HConstant::New(zone, context, isolate->factory()->empty_string());
}
}
return new(zone) HStringCharFromCode(context, char_code);
}
-HInstruction* HStringLength::New(Zone* zone, HValue* string) {
- if (FLAG_fold_constants && string->IsConstant()) {
- HConstant* c_string = HConstant::cast(string);
- if (c_string->HasStringValue()) {
- return new(zone) HConstant(c_string->StringValue()->length());
- }
- }
- return new(zone) HStringLength(string);
-}
-
-
HInstruction* HUnaryMathOperation::New(
Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
do {
@@ -4113,7 +3669,10 @@ HInstruction* HUnaryMathOperation::New(
}
-HInstruction* HPower::New(Zone* zone, HValue* left, HValue* right) {
+HInstruction* HPower::New(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
HConstant* c_left = HConstant::cast(left);
HConstant* c_right = HConstant::cast(right);
@@ -4212,7 +3771,7 @@ HInstruction* HDiv::New(
HInstruction* HBitwise::New(
- Zone* zone, Token::Value op, HValue* context, HValue* left, HValue* right) {
+ Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
HConstant* c_left = HConstant::cast(left);
HConstant* c_right = HConstant::cast(right);
@@ -4237,7 +3796,7 @@ HInstruction* HBitwise::New(
return H_CONSTANT_INT(result);
}
}
- return new(zone) HBitwise(op, context, left, right);
+ return new(zone) HBitwise(context, op, left, right);
}
@@ -4306,7 +3865,8 @@ void HPhi::SimplifyConstantInputs() {
continue;
} else if (operand->HasDoubleValue()) {
HConstant* integer_input =
- new(graph->zone()) HConstant(DoubleToInt32(operand->DoubleValue()));
+ HConstant::New(graph->zone(), graph->GetInvalidContext(),
+ DoubleToInt32(operand->DoubleValue()));
integer_input->InsertAfter(operand);
SetOperandAt(i, integer_input);
} else if (operand == graph->GetConstantTrue()) {
@@ -4518,6 +4078,10 @@ void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) {
instr->SetGVNFlag(is_store
? kChangesArrayLengths : kDependsOnArrayLengths);
break;
+ case kStringLengths:
+ instr->SetGVNFlag(is_store
+ ? kChangesStringLengths : kDependsOnStringLengths);
+ break;
case kInobject:
instr->SetGVNFlag(is_store
? kChangesInobjectFields : kDependsOnInobjectFields);
@@ -4551,6 +4115,7 @@ void HObjectAccess::PrintTo(StringStream* stream) {
switch (portion()) {
case kArrayLengths:
+ case kStringLengths:
stream->Add("%length");
break;
case kElementsPointer:
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index e71b7cdf41..3fae45bcb7 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -72,7 +72,6 @@ class LChunkBuilder;
V(ArgumentsLength) \
V(ArgumentsObject) \
V(Bitwise) \
- V(BitNot) \
V(BlockEntry) \
V(BoundsCheck) \
V(BoundsCheckBaseIndexInformation) \
@@ -93,7 +92,6 @@ class LChunkBuilder;
V(CheckInstanceType) \
V(CheckMaps) \
V(CheckMapValue) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampToUint8) \
V(ClassOfTestAndBranch) \
@@ -122,7 +120,6 @@ class LChunkBuilder;
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInstanceTypeAndBranch) \
- V(InductionVariableAnnotation) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
@@ -135,7 +132,6 @@ class LChunkBuilder;
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
V(LeaveInlined) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -152,7 +148,6 @@ class LChunkBuilder;
V(MathMinMax) \
V(Mod) \
V(Mul) \
- V(NumericConstraint) \
V(OsrEntry) \
V(OuterContext) \
V(Parameter) \
@@ -179,7 +174,6 @@ class LChunkBuilder;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(Sub) \
V(ThisFunction) \
V(Throw) \
@@ -201,6 +195,7 @@ class LChunkBuilder;
#define GVN_UNTRACKED_FLAG_LIST(V) \
V(ArrayElements) \
V(ArrayLengths) \
+ V(StringLengths) \
V(BackingStoreFields) \
V(Calls) \
V(ContextSlots) \
@@ -543,158 +538,6 @@ enum GVNFlag {
};
-class NumericRelation {
- public:
- enum Kind { NONE, EQ, GT, GE, LT, LE, NE };
- static const char* MnemonicFromKind(Kind kind) {
- switch (kind) {
- case NONE: return "NONE";
- case EQ: return "EQ";
- case GT: return "GT";
- case GE: return "GE";
- case LT: return "LT";
- case LE: return "LE";
- case NE: return "NE";
- }
- UNREACHABLE();
- return NULL;
- }
- const char* Mnemonic() const { return MnemonicFromKind(kind_); }
-
- static NumericRelation None() { return NumericRelation(NONE); }
- static NumericRelation Eq() { return NumericRelation(EQ); }
- static NumericRelation Gt() { return NumericRelation(GT); }
- static NumericRelation Ge() { return NumericRelation(GE); }
- static NumericRelation Lt() { return NumericRelation(LT); }
- static NumericRelation Le() { return NumericRelation(LE); }
- static NumericRelation Ne() { return NumericRelation(NE); }
-
- bool IsNone() { return kind_ == NONE; }
-
- static NumericRelation FromToken(Token::Value token) {
- switch (token) {
- case Token::EQ: return Eq();
- case Token::EQ_STRICT: return Eq();
- case Token::LT: return Lt();
- case Token::GT: return Gt();
- case Token::LTE: return Le();
- case Token::GTE: return Ge();
- case Token::NE: return Ne();
- case Token::NE_STRICT: return Ne();
- default: return None();
- }
- }
-
- // The semantics of "Reversed" is that if "x rel y" is true then also
- // "y rel.Reversed() x" is true, and that rel.Reversed().Reversed() == rel.
- NumericRelation Reversed() {
- switch (kind_) {
- case NONE: return None();
- case EQ: return Eq();
- case GT: return Lt();
- case GE: return Le();
- case LT: return Gt();
- case LE: return Ge();
- case NE: return Ne();
- }
- UNREACHABLE();
- return None();
- }
-
- // The semantics of "Negated" is that if "x rel y" is true then also
- // "!(x rel.Negated() y)" is true.
- NumericRelation Negated() {
- switch (kind_) {
- case NONE: return None();
- case EQ: return Ne();
- case GT: return Le();
- case GE: return Lt();
- case LT: return Ge();
- case LE: return Gt();
- case NE: return Eq();
- }
- UNREACHABLE();
- return None();
- }
-
- // The semantics of "Implies" is that if "x rel y" is true
- // then also "x other_relation y" is true.
- bool Implies(NumericRelation other_relation) {
- switch (kind_) {
- case NONE: return false;
- case EQ: return (other_relation.kind_ == EQ)
- || (other_relation.kind_ == GE)
- || (other_relation.kind_ == LE);
- case GT: return (other_relation.kind_ == GT)
- || (other_relation.kind_ == GE)
- || (other_relation.kind_ == NE);
- case LT: return (other_relation.kind_ == LT)
- || (other_relation.kind_ == LE)
- || (other_relation.kind_ == NE);
- case GE: return (other_relation.kind_ == GE);
- case LE: return (other_relation.kind_ == LE);
- case NE: return (other_relation.kind_ == NE);
- }
- UNREACHABLE();
- return false;
- }
-
- // The semantics of "IsExtendable" is that if
- // "rel.IsExtendable(direction)" is true then
- // "x rel y" implies "(x + direction) rel y" .
- bool IsExtendable(int direction) {
- switch (kind_) {
- case NONE: return false;
- case EQ: return false;
- case GT: return (direction >= 0);
- case GE: return (direction >= 0);
- case LT: return (direction <= 0);
- case LE: return (direction <= 0);
- case NE: return false;
- }
- UNREACHABLE();
- return false;
- }
-
- // CompoundImplies returns true when
- // "((x + my_offset) >> my_scale) rel y" implies
- // "((x + other_offset) >> other_scale) other_relation y".
- bool CompoundImplies(NumericRelation other_relation,
- int my_offset,
- int my_scale,
- int other_offset = 0,
- int other_scale = 0) {
- return Implies(other_relation) && ComponentsImply(
- my_offset, my_scale, other_offset, other_scale);
- }
-
- private:
- // ComponentsImply returns true when
- // "((x + my_offset) >> my_scale) rel y" implies
- // "((x + other_offset) >> other_scale) rel y".
- bool ComponentsImply(int my_offset,
- int my_scale,
- int other_offset,
- int other_scale) {
- switch (kind_) {
- case NONE: break; // Fall through to UNREACHABLE().
- case EQ:
- case NE: return my_offset == other_offset && my_scale == other_scale;
- case GT:
- case GE: return my_offset <= other_offset && my_scale >= other_scale;
- case LT:
- case LE: return my_offset >= other_offset && my_scale <= other_scale;
- }
- UNREACHABLE();
- return false;
- }
-
- explicit NumericRelation(Kind kind) : kind_(kind) {}
-
- Kind kind_;
-};
-
-
class DecompositionResult BASE_EMBEDDED {
public:
DecompositionResult() : base_(NULL), offset_(0), scale_(0) {}
@@ -740,46 +583,6 @@ class DecompositionResult BASE_EMBEDDED {
};
-class RangeEvaluationContext BASE_EMBEDDED {
- public:
- RangeEvaluationContext(HValue* value, HValue* upper);
-
- HValue* lower_bound() { return lower_bound_; }
- HValue* lower_bound_guarantee() { return lower_bound_guarantee_; }
- HValue* candidate() { return candidate_; }
- HValue* upper_bound() { return upper_bound_; }
- HValue* upper_bound_guarantee() { return upper_bound_guarantee_; }
- int offset() { return offset_; }
- int scale() { return scale_; }
-
- bool is_range_satisfied() {
- return lower_bound_guarantee() != NULL && upper_bound_guarantee() != NULL;
- }
-
- void set_lower_bound_guarantee(HValue* guarantee) {
- lower_bound_guarantee_ = ConvertGuarantee(guarantee);
- }
- void set_upper_bound_guarantee(HValue* guarantee) {
- upper_bound_guarantee_ = ConvertGuarantee(guarantee);
- }
-
- void swap_candidate(DecompositionResult* other_candicate) {
- other_candicate->SwapValues(&candidate_, &offset_, &scale_);
- }
-
- private:
- HValue* ConvertGuarantee(HValue* guarantee);
-
- HValue* lower_bound_;
- HValue* lower_bound_guarantee_;
- HValue* candidate_;
- HValue* upper_bound_;
- HValue* upper_bound_guarantee_;
- int offset_;
- int scale_;
-};
-
-
typedef EnumSet<GVNFlag> GVNFlagSet;
@@ -817,12 +620,6 @@ class HValue: public ZoneObject {
// HGraph::ComputeSafeUint32Operations is responsible for setting this
// flag.
kUint32,
- // If a phi is involved in the evaluation of a numeric constraint the
- // recursion can cause an endless cycle: we use this flag to exit the loop.
- kNumericConstraintEvaluationInProgress,
- // This flag is set to true after the SetupInformativeDefinitions() pass
- // has processed this instruction.
- kIDefsProcessingDone,
kHasNoObservableSideEffects,
// Indicates the instruction is live during dead code elimination.
kIsLive,
@@ -873,12 +670,13 @@ class HValue: public ZoneObject {
HYDROGEN_ABSTRACT_INSTRUCTION_LIST(DECLARE_PREDICATE)
#undef DECLARE_PREDICATE
- HValue() : block_(NULL),
- id_(kNoNumber),
- type_(HType::Tagged()),
- use_list_(NULL),
- range_(NULL),
- flags_(0) {}
+ HValue(HType type = HType::Tagged())
+ : block_(NULL),
+ id_(kNoNumber),
+ type_(type),
+ use_list_(NULL),
+ range_(NULL),
+ flags_(0) {}
virtual ~HValue() {}
HBasicBlock* block() const { return block_; }
@@ -959,8 +757,8 @@ class HValue: public ZoneObject {
return RedefinedOperandIndex() != kNoRedefinedOperand;
}
HValue* RedefinedOperand() {
- return IsInformativeDefinition() ? OperandAt(RedefinedOperandIndex())
- : NULL;
+ int index = RedefinedOperandIndex();
+ return index == kNoRedefinedOperand ? NULL : OperandAt(index);
}
// A purely informative definition is an idef that will not emit code and
@@ -971,17 +769,8 @@ class HValue: public ZoneObject {
// This method must always return the original HValue SSA definition
// (regardless of any iDef of this value).
HValue* ActualValue() {
- return IsInformativeDefinition() ? RedefinedOperand()->ActualValue()
- : this;
- }
-
- virtual void AddInformativeDefinitions() {}
-
- void UpdateRedefinedUsesWhileSettingUpInformativeDefinitions() {
- UpdateRedefinedUsesInner<TestDominanceUsingProcessedFlag>();
- }
- void UpdateRedefinedUses() {
- UpdateRedefinedUsesInner<Dominates>();
+ int index = RedefinedOperandIndex();
+ return index == kNoRedefinedOperand ? this : OperandAt(index);
}
bool IsInteger32Constant();
@@ -1132,12 +921,6 @@ class HValue: public ZoneObject {
virtual void Verify() = 0;
#endif
- bool IsRelationTrue(NumericRelation relation,
- HValue* other,
- int offset = 0,
- int scale = 0);
-
- bool TryGuaranteeRange(HValue* upper_bound);
virtual bool TryDecompose(DecompositionResult* decomposition) {
if (RedefinedOperand() != NULL) {
return RedefinedOperand()->TryDecompose(decomposition);
@@ -1159,17 +942,6 @@ class HValue: public ZoneObject {
}
protected:
- void TryGuaranteeRangeRecursive(RangeEvaluationContext* context);
-
- enum RangeGuaranteeDirection {
- DIRECTION_NONE = 0,
- DIRECTION_UPPER = 1,
- DIRECTION_LOWER = 2,
- DIRECTION_BOTH = DIRECTION_UPPER | DIRECTION_LOWER
- };
- virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {}
- virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context) {}
-
// This function must be overridden for instructions with flag kUseGVN, to
// compare the non-Operand parts of the instruction.
virtual bool DataEquals(HValue* other) {
@@ -1203,47 +975,6 @@ class HValue: public ZoneObject {
representation_ = r;
}
- // Signature of a function testing if a HValue properly dominates another.
- typedef bool (*DominanceTest)(HValue*, HValue*);
-
- // Simple implementation of DominanceTest implemented walking the chain
- // of Hinstructions (used in UpdateRedefinedUsesInner).
- static bool Dominates(HValue* dominator, HValue* dominated);
-
- // A fast implementation of DominanceTest that works only for the
- // "current" instruction in the SetupInformativeDefinitions() phase.
- // During that phase we use a flag to mark processed instructions, and by
- // checking the flag we can quickly test if an instruction comes before or
- // after the "current" one.
- static bool TestDominanceUsingProcessedFlag(HValue* dominator,
- HValue* dominated);
-
- // If we are redefining an operand, update all its dominated uses (the
- // function that checks if a use is dominated is the template argument).
- template<DominanceTest TestDominance>
- void UpdateRedefinedUsesInner() {
- HValue* input = RedefinedOperand();
- if (input != NULL) {
- for (HUseIterator uses = input->uses(); !uses.Done(); uses.Advance()) {
- HValue* use = uses.value();
- if (TestDominance(this, use)) {
- use->SetOperandAt(uses.index(), this);
- }
- }
- }
- }
-
- // Informative definitions can override this method to state any numeric
- // relation they provide on the redefined value.
- // Returns true if it is guaranteed that:
- // ((this + offset) >> scale) relation other
- virtual bool IsRelationTrueInternal(NumericRelation relation,
- HValue* other,
- int offset = 0,
- int scale = 0) {
- return false;
- }
-
static GVNFlagSet AllDependsOnFlagSet() {
GVNFlagSet result;
// Create changes mask.
@@ -1308,6 +1039,48 @@ class HValue: public ZoneObject {
};
+#define DECLARE_INSTRUCTION_FACTORY_P0(I) \
+ static I* New(Zone* zone, HValue* context) { \
+ return new(zone) I(); \
+}
+
+#define DECLARE_INSTRUCTION_FACTORY_P1(I, P1) \
+ static I* New(Zone* zone, HValue* context, P1 p1) { \
+ return new(zone) I(p1); \
+ }
+
+#define DECLARE_INSTRUCTION_FACTORY_P2(I, P1, P2) \
+ static I* New(Zone* zone, HValue* context, P1 p1, P2 p2) { \
+ return new(zone) I(p1, p2); \
+ }
+
+#define DECLARE_INSTRUCTION_FACTORY_P3(I, P1, P2, P3) \
+ static I* New(Zone* zone, HValue* context, P1 p1, P2 p2, P3 p3) { \
+ return new(zone) I(p1, p2, p3); \
+ }
+
+#define DECLARE_INSTRUCTION_FACTORY_P4(I, P1, P2, P3, P4) \
+ static I* New(Zone* zone, \
+ HValue* context, \
+ P1 p1, \
+ P2 p2, \
+ P3 p3, \
+ P4 p4) { \
+ return new(zone) I(p1, p2, p3, p4); \
+ }
+
+#define DECLARE_INSTRUCTION_FACTORY_P5(I, P1, P2, P3, P4, P5) \
+ static I* New(Zone* zone, \
+ HValue* context, \
+ P1 p1, \
+ P2 p2, \
+ P3 p3, \
+ P4 p4, \
+ P5 p5) { \
+ return new(zone) I(p1, p2, p3, p4, p5); \
+ }
+
+
class HInstruction: public HValue {
public:
HInstruction* next() const { return next_; }
@@ -1343,8 +1116,9 @@ class HInstruction: public HValue {
DECLARE_ABSTRACT_INSTRUCTION(Instruction)
protected:
- HInstruction()
- : next_(NULL),
+ HInstruction(HType type = HType::Tagged())
+ : HValue(type),
+ next_(NULL),
previous_(NULL),
position_(RelocInfo::kNoPosition) {
SetGVNFlag(kDependsOnOsrEntries);
@@ -1375,6 +1149,8 @@ class HTemplateInstruction : public HInstruction {
HValue* OperandAt(int i) const { return inputs_[i]; }
protected:
+ HTemplateInstruction(HType type = HType::Tagged()) : HInstruction(type) {}
+
void InternalSetOperandAt(int i, HValue* value) { inputs_[i] = value; }
private:
@@ -1448,12 +1224,12 @@ class HBlockEntry: public HTemplateInstruction<0> {
class HDummyUse: public HTemplateInstruction<1> {
public:
- explicit HDummyUse(HValue* value) {
+ explicit HDummyUse(HValue* value)
+ : HTemplateInstruction<1>(HType::Smi()) {
SetOperandAt(0, value);
// Pretend to be a Smi so that the HChange instructions inserted
// before any use generate as little code as possible.
set_representation(Representation::Tagged());
- set_type(HType::Smi());
}
HValue* value() { return OperandAt(0); }
@@ -1469,55 +1245,9 @@ class HDummyUse: public HTemplateInstruction<1> {
};
-class HNumericConstraint : public HTemplateInstruction<2> {
- public:
- static HNumericConstraint* AddToGraph(HValue* constrained_value,
- NumericRelation relation,
- HValue* related_value,
- HInstruction* insertion_point = NULL);
-
- HValue* constrained_value() { return OperandAt(0); }
- HValue* related_value() { return OperandAt(1); }
- NumericRelation relation() { return relation_; }
-
- virtual int RedefinedOperandIndex() { return 0; }
- virtual bool IsPurelyInformativeDefinition() { return true; }
-
- virtual Representation RequiredInputRepresentation(int index) {
- return representation();
- }
-
- virtual void PrintDataTo(StringStream* stream);
-
- virtual bool IsRelationTrueInternal(NumericRelation other_relation,
- HValue* other_related_value,
- int offset = 0,
- int scale = 0) {
- if (related_value() == other_related_value) {
- return relation().CompoundImplies(other_relation, offset, scale);
- } else {
- return false;
- }
- }
-
- DECLARE_CONCRETE_INSTRUCTION(NumericConstraint)
-
- private:
- HNumericConstraint(HValue* constrained_value,
- NumericRelation relation,
- HValue* related_value)
- : relation_(relation) {
- SetOperandAt(0, constrained_value);
- SetOperandAt(1, related_value);
- }
-
- NumericRelation relation_;
-};
-
-
class HDeoptimize: public HTemplateInstruction<0> {
public:
- explicit HDeoptimize(Deoptimizer::BailoutType type) : type_(type) {}
+ DECLARE_INSTRUCTION_FACTORY_P1(HDeoptimize, Deoptimizer::BailoutType);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -1528,6 +1258,8 @@ class HDeoptimize: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(Deoptimize)
private:
+ explicit HDeoptimize(Deoptimizer::BailoutType type) : type_(type) {}
+
Deoptimizer::BailoutType type_;
};
@@ -1628,12 +1360,44 @@ class HCompareMap: public HUnaryControlInstruction {
};
+class HContext: public HTemplateInstruction<0> {
+ public:
+ static HContext* New(Zone* zone) {
+ return new(zone) HContext();
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::None();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Context)
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ HContext() {
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ }
+
+ virtual bool IsDeletable() const { return true; }
+};
+
+
class HReturn: public HTemplateControlInstruction<0, 3> {
public:
- HReturn(HValue* value, HValue* context, HValue* parameter_count) {
- SetOperandAt(0, value);
- SetOperandAt(1, context);
- SetOperandAt(2, parameter_count);
+ static HInstruction* New(Zone* zone,
+ HValue* context,
+ HValue* value,
+ HValue* parameter_count) {
+ return new(zone) HReturn(value, context, parameter_count);
+ }
+
+ static HInstruction* New(Zone* zone,
+ HValue* context,
+ HValue* value) {
+ return new(zone) HReturn(value, context, 0);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1647,6 +1411,13 @@ class HReturn: public HTemplateControlInstruction<0, 3> {
HValue* parameter_count() { return OperandAt(2); }
DECLARE_CONCRETE_INSTRUCTION(Return)
+
+ private:
+ HReturn(HValue* value, HValue* context, HValue* parameter_count) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, context);
+ SetOperandAt(2, parameter_count);
+ }
};
@@ -1662,7 +1433,8 @@ class HAbnormalExit: public HTemplateControlInstruction<0, 0> {
class HUnaryOperation: public HTemplateInstruction<1> {
public:
- explicit HUnaryOperation(HValue* value) {
+ HUnaryOperation(HValue* value, HType type = HType::Tagged())
+ : HTemplateInstruction<1>(type) {
SetOperandAt(0, value);
}
@@ -1677,10 +1449,10 @@ class HUnaryOperation: public HTemplateInstruction<1> {
class HThrow: public HTemplateInstruction<2> {
public:
- HThrow(HValue* context, HValue* value) {
- SetOperandAt(0, context);
- SetOperandAt(1, value);
- SetAllSideEffects();
+ static HThrow* New(Zone* zone,
+ HValue* context,
+ HValue* value) {
+ return new(zone) HThrow(context, value);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1691,27 +1463,34 @@ class HThrow: public HTemplateInstruction<2> {
HValue* value() { return OperandAt(1); }
DECLARE_CONCRETE_INSTRUCTION(Throw)
+
+ private:
+ HThrow(HValue* context, HValue* value) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, value);
+ SetAllSideEffects();
+ }
};
class HUseConst: public HUnaryOperation {
public:
- explicit HUseConst(HValue* old_value) : HUnaryOperation(old_value) { }
+ DECLARE_INSTRUCTION_FACTORY_P1(HUseConst, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
DECLARE_CONCRETE_INSTRUCTION(UseConst)
+
+ private:
+ explicit HUseConst(HValue* old_value) : HUnaryOperation(old_value) { }
};
class HForceRepresentation: public HTemplateInstruction<1> {
public:
- HForceRepresentation(HValue* value, Representation required_representation) {
- SetOperandAt(0, value);
- set_representation(required_representation);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HForceRepresentation, HValue*, Representation);
HValue* value() { return OperandAt(0); }
@@ -1724,6 +1503,12 @@ class HForceRepresentation: public HTemplateInstruction<1> {
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(ForceRepresentation)
+
+ private:
+ HForceRepresentation(HValue* value, Representation required_representation) {
+ SetOperandAt(0, value);
+ set_representation(required_representation);
+ }
};
@@ -1785,12 +1570,7 @@ class HChange: public HUnaryOperation {
class HClampToUint8: public HUnaryOperation {
public:
- explicit HClampToUint8(HValue* value)
- : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- SetFlag(kAllowUndefinedAsNaN);
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HClampToUint8, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -1802,6 +1582,13 @@ class HClampToUint8: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HClampToUint8(HValue* value)
+ : HUnaryOperation(value) {
+ set_representation(Representation::Integer32());
+ SetFlag(kAllowUndefinedAsNaN);
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const { return true; }
};
@@ -1958,10 +1745,7 @@ class HStackCheck: public HTemplateInstruction<1> {
kBackwardsBranch
};
- HStackCheck(HValue* context, Type type) : type_(type) {
- SetOperandAt(0, context);
- SetGVNFlag(kChangesNewSpacePromotion);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HStackCheck, HValue*, Type);
HValue* context() { return OperandAt(0); }
@@ -1983,6 +1767,11 @@ class HStackCheck: public HTemplateInstruction<1> {
DECLARE_CONCRETE_INSTRUCTION(StackCheck)
private:
+ HStackCheck(HValue* context, Type type) : type_(type) {
+ SetOperandAt(0, context);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+
Type type_;
};
@@ -2001,23 +1790,18 @@ class HArgumentsObject;
class HEnterInlined: public HTemplateInstruction<0> {
public:
- HEnterInlined(Handle<JSFunction> closure,
- int arguments_count,
- FunctionLiteral* function,
- InliningKind inlining_kind,
- Variable* arguments_var,
- HArgumentsObject* arguments_object,
- bool undefined_receiver,
- Zone* zone)
- : closure_(closure),
- arguments_count_(arguments_count),
- arguments_pushed_(false),
- function_(function),
- inlining_kind_(inlining_kind),
- arguments_var_(arguments_var),
- arguments_object_(arguments_object),
- undefined_receiver_(undefined_receiver),
- return_targets_(2, zone) {
+ static HEnterInlined* New(Zone* zone,
+ HValue* context,
+ Handle<JSFunction> closure,
+ int arguments_count,
+ FunctionLiteral* function,
+ InliningKind inlining_kind,
+ Variable* arguments_var,
+ HArgumentsObject* arguments_object,
+ bool undefined_receiver) {
+ return new(zone) HEnterInlined(closure, arguments_count, function,
+ inlining_kind, arguments_var,
+ arguments_object, undefined_receiver, zone);
}
void RegisterReturnTarget(HBasicBlock* return_target, Zone* zone);
@@ -2043,6 +1827,25 @@ class HEnterInlined: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(EnterInlined)
private:
+ HEnterInlined(Handle<JSFunction> closure,
+ int arguments_count,
+ FunctionLiteral* function,
+ InliningKind inlining_kind,
+ Variable* arguments_var,
+ HArgumentsObject* arguments_object,
+ bool undefined_receiver,
+ Zone* zone)
+ : closure_(closure),
+ arguments_count_(arguments_count),
+ arguments_pushed_(false),
+ function_(function),
+ inlining_kind_(inlining_kind),
+ arguments_var_(arguments_var),
+ arguments_object_(arguments_object),
+ undefined_receiver_(undefined_receiver),
+ return_targets_(2, zone) {
+ }
+
Handle<JSFunction> closure_;
int arguments_count_;
bool arguments_pushed_;
@@ -2069,9 +1872,7 @@ class HLeaveInlined: public HTemplateInstruction<0> {
class HPushArgument: public HUnaryOperation {
public:
- explicit HPushArgument(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Tagged());
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HPushArgument, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -2080,6 +1881,11 @@ class HPushArgument: public HUnaryOperation {
HValue* argument() { return OperandAt(0); }
DECLARE_CONCRETE_INSTRUCTION(PushArgument)
+
+ private:
+ explicit HPushArgument(HValue* value) : HUnaryOperation(value) {
+ set_representation(Representation::Tagged());
+ }
};
@@ -2104,44 +1910,25 @@ class HThisFunction: public HTemplateInstruction<0> {
};
-class HContext: public HTemplateInstruction<0> {
+class HOuterContext: public HUnaryOperation {
public:
- HContext() {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HOuterContext, HValue*);
+
+ DECLARE_CONCRETE_INSTRUCTION(OuterContext);
virtual Representation RequiredInputRepresentation(int index) {
- return Representation::None();
+ return Representation::Tagged();
}
- DECLARE_CONCRETE_INSTRUCTION(Context)
-
protected:
virtual bool DataEquals(HValue* other) { return true; }
private:
- virtual bool IsDeletable() const { return true; }
-};
-
-
-class HOuterContext: public HUnaryOperation {
- public:
explicit HOuterContext(HValue* inner) : HUnaryOperation(inner) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
}
- DECLARE_CONCRETE_INSTRUCTION(OuterContext);
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::Tagged();
- }
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-
- private:
virtual bool IsDeletable() const { return true; }
};
@@ -2158,6 +1945,13 @@ class HDeclareGlobals: public HUnaryOperation {
SetAllSideEffects();
}
+ static HDeclareGlobals* New(Zone* zone,
+ HValue* context,
+ Handle<FixedArray> pairs,
+ int flags) {
+ return new(zone) HDeclareGlobals(context, pairs, flags);
+ }
+
HValue* context() { return OperandAt(0); }
Handle<FixedArray> pairs() const { return pairs_; }
int flags() const { return flags_; }
@@ -2167,6 +1961,7 @@ class HDeclareGlobals: public HUnaryOperation {
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
+
private:
Handle<FixedArray> pairs_;
int flags_;
@@ -2180,6 +1975,10 @@ class HGlobalObject: public HUnaryOperation {
SetFlag(kUseGVN);
}
+ static HGlobalObject* New(Zone* zone, HValue* context) {
+ return new(zone) HGlobalObject(context);
+ }
+
DECLARE_CONCRETE_INSTRUCTION(GlobalObject)
virtual Representation RequiredInputRepresentation(int index) {
@@ -2196,11 +1995,7 @@ class HGlobalObject: public HUnaryOperation {
class HGlobalReceiver: public HUnaryOperation {
public:
- explicit HGlobalReceiver(HValue* global_object)
- : HUnaryOperation(global_object) {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HGlobalReceiver, HValue*);
DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver)
@@ -2212,6 +2007,12 @@ class HGlobalReceiver: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HGlobalReceiver(HValue* global_object)
+ : HUnaryOperation(global_object) {
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const { return true; }
};
@@ -2278,6 +2079,13 @@ class HInvokeFunction: public HBinaryCall {
: HBinaryCall(context, function, argument_count) {
}
+ static HInvokeFunction* New(Zone* zone,
+ HValue* context,
+ HValue* function,
+ int argument_count) {
+ return new(zone) HInvokeFunction(context, function, argument_count);
+ }
+
HInvokeFunction(HValue* context,
HValue* function,
Handle<JSFunction> known_function,
@@ -2288,6 +2096,15 @@ class HInvokeFunction: public HBinaryCall {
? 0 : known_function->shared()->formal_parameter_count();
}
+ static HInvokeFunction* New(Zone* zone,
+ HValue* context,
+ HValue* function,
+ Handle<JSFunction> known_function,
+ int argument_count) {
+ return new(zone) HInvokeFunction(context, function,
+ known_function, argument_count);
+ }
+
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
@@ -2379,6 +2196,13 @@ class HCallFunction: public HBinaryCall {
: HBinaryCall(context, function, argument_count) {
}
+ static HCallFunction* New(Zone* zone,
+ HValue* context,
+ HValue* function,
+ int argument_count) {
+ return new(zone) HCallFunction(context, function, argument_count);
+ }
+
HValue* context() { return first(); }
HValue* function() { return second(); }
@@ -2396,6 +2220,13 @@ class HCallGlobal: public HUnaryCall {
: HUnaryCall(context, argument_count), name_(name) {
}
+ static HCallGlobal* New(Zone* zone,
+ HValue* context,
+ Handle<String> name,
+ int argument_count) {
+ return new(zone) HCallGlobal(context, name, argument_count);
+ }
+
virtual void PrintDataTo(StringStream* stream);
HValue* context() { return value(); }
@@ -2479,12 +2310,12 @@ class HCallNewArray: public HCallNew {
class HCallRuntime: public HCall<1> {
public:
- HCallRuntime(HValue* context,
- Handle<String> name,
- const Runtime::Function* c_function,
- int argument_count)
- : HCall<1>(argument_count), c_function_(c_function), name_(name) {
- SetOperandAt(0, context);
+ static HCallRuntime* New(Zone* zone,
+ HValue* context,
+ Handle<String> name,
+ const Runtime::Function* c_function,
+ int argument_count) {
+ return new(zone) HCallRuntime(context, name, c_function, argument_count);
}
virtual void PrintDataTo(StringStream* stream);
@@ -2500,6 +2331,14 @@ class HCallRuntime: public HCall<1> {
DECLARE_CONCRETE_INSTRUCTION(CallRuntime)
private:
+ HCallRuntime(HValue* context,
+ Handle<String> name,
+ const Runtime::Function* c_function,
+ int argument_count)
+ : HCall<1>(argument_count), c_function_(c_function), name_(name) {
+ SetOperandAt(0, context);
+ }
+
const Runtime::Function* c_function_;
Handle<String> name_;
};
@@ -2507,12 +2346,7 @@ class HCallRuntime: public HCall<1> {
class HMapEnumLength: public HUnaryOperation {
public:
- explicit HMapEnumLength(HValue* value) : HUnaryOperation(value) {
- set_type(HType::Smi());
- set_representation(Representation::Smi());
- SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HMapEnumLength, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -2524,6 +2358,13 @@ class HMapEnumLength: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HMapEnumLength(HValue* value)
+ : HUnaryOperation(value, HType::Smi()) {
+ set_representation(Representation::Smi());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ }
+
virtual bool IsDeletable() const { return true; }
};
@@ -2550,35 +2391,6 @@ class HElementsKind: public HUnaryOperation {
};
-class HBitNot: public HUnaryOperation {
- public:
- explicit HBitNot(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- SetFlag(kTruncatingToInt32);
- SetFlag(kAllowUndefinedAsNaN);
- }
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::Integer32();
- }
- virtual Representation observed_input_representation(int index) {
- return Representation::Integer32();
- }
- virtual HType CalculateInferredType();
-
- virtual HValue* Canonicalize();
-
- DECLARE_CONCRETE_INSTRUCTION(BitNot)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-
- private:
- virtual bool IsDeletable() const { return true; }
-};
-
-
class HUnaryMathOperation: public HTemplateInstruction<2> {
public:
static HInstruction* New(Zone* zone,
@@ -2591,8 +2403,6 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
virtual void PrintDataTo(StringStream* stream);
- virtual HType CalculateInferredType();
-
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
virtual Representation RequiredInputRepresentation(int index) {
@@ -2637,7 +2447,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
private:
HUnaryMathOperation(HValue* context, HValue* value, BuiltinFunctionId op)
- : op_(op) {
+ : HTemplateInstruction<2>(HType::TaggedNumber()), op_(op) {
SetOperandAt(0, context);
SetOperandAt(1, value);
switch (op) {
@@ -2681,15 +2491,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
class HLoadExternalArrayPointer: public HUnaryOperation {
public:
- explicit HLoadExternalArrayPointer(HValue* value)
- : HUnaryOperation(value) {
- set_representation(Representation::External());
- // The result of this instruction is idempotent as long as its inputs don't
- // change. The external array of a specialized array elements object cannot
- // change once set, so it's no necessary to introduce any additional
- // dependencies on top of the inputs.
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HLoadExternalArrayPointer, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -2705,27 +2507,37 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HLoadExternalArrayPointer(HValue* value)
+ : HUnaryOperation(value) {
+ set_representation(Representation::External());
+ // The result of this instruction is idempotent as long as its inputs don't
+ // change. The external array of a specialized array elements object cannot
+ // change once set, so it's no necessary to introduce any additional
+ // dependencies on top of the inputs.
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const { return true; }
};
class HCheckMaps: public HTemplateInstruction<2> {
public:
- static HCheckMaps* New(HValue* value, Handle<Map> map, Zone* zone,
- CompilationInfo* info, HValue *typecheck = NULL);
- static HCheckMaps* New(HValue* value, SmallMapList* maps, Zone* zone,
+ static HCheckMaps* New(Zone* zone, HValue* context, HValue* value,
+ Handle<Map> map, CompilationInfo* info,
+ HValue *typecheck = NULL);
+ static HCheckMaps* New(Zone* zone, HValue* context,
+ HValue* value, SmallMapList* maps,
HValue *typecheck = NULL) {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
for (int i = 0; i < maps->length(); i++) {
check_map->map_set_.Add(maps->at(i), zone);
+ check_map->has_migration_target_ |= maps->at(i)->is_migration_target();
}
check_map->map_set_.Sort();
return check_map;
}
- static HCheckMaps* NewWithTransitions(HValue* value, Handle<Map> map,
- Zone* zone, CompilationInfo* info);
-
bool CanOmitMapChecks() { return omit_; }
virtual bool HasEscapingOperandAt(int index) { return false; }
@@ -2735,11 +2547,14 @@ class HCheckMaps: public HTemplateInstruction<2> {
virtual void HandleSideEffectDominator(GVNFlag side_effect,
HValue* dominator);
virtual void PrintDataTo(StringStream* stream);
- virtual HType CalculateInferredType();
HValue* value() { return OperandAt(0); }
SmallMapList* map_set() { return &map_set_; }
+ bool has_migration_target() {
+ return has_migration_target_;
+ }
+
virtual void FinalizeUniqueValueId();
DECLARE_CONCRETE_INSTRUCTION(CheckMaps)
@@ -2763,7 +2578,8 @@ class HCheckMaps: public HTemplateInstruction<2> {
private:
// Clients should use one of the static New* methods above.
HCheckMaps(HValue* value, Zone *zone, HValue* typecheck)
- : omit_(false), map_unique_ids_(0, zone) {
+ : HTemplateInstruction<2>(value->type()),
+ omit_(false), has_migration_target_(false), map_unique_ids_(0, zone) {
SetOperandAt(0, value);
// Use the object value for the dependency if NULL is passed.
// TODO(titzer): do GVN flags already express this dependency?
@@ -2785,6 +2601,7 @@ class HCheckMaps: public HTemplateInstruction<2> {
}
bool omit_;
+ bool has_migration_target_;
SmallMapList map_set_;
ZoneList<UniqueValueId> map_unique_ids_;
};
@@ -2792,18 +2609,12 @@ class HCheckMaps: public HTemplateInstruction<2> {
class HCheckFunction: public HUnaryOperation {
public:
- HCheckFunction(HValue* value, Handle<JSFunction> function)
- : HUnaryOperation(value), target_(function), target_unique_id_() {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- target_in_new_space_ = Isolate::Current()->heap()->InNewSpace(*function);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HCheckFunction, HValue*, Handle<JSFunction>);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
virtual void PrintDataTo(StringStream* stream);
- virtual HType CalculateInferredType();
virtual HValue* Canonicalize();
@@ -2827,6 +2638,14 @@ class HCheckFunction: public HUnaryOperation {
}
private:
+ HCheckFunction(HValue* value, Handle<JSFunction> function)
+ : HUnaryOperation(value, value->type()),
+ target_(function), target_unique_id_() {
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ target_in_new_space_ = Isolate::Current()->heap()->InNewSpace(*function);
+ }
+
Handle<JSFunction> target_;
UniqueValueId target_unique_id_;
bool target_in_new_space_;
@@ -2895,17 +2714,12 @@ class HCheckInstanceType: public HUnaryOperation {
class HCheckSmi: public HUnaryOperation {
public:
- explicit HCheckSmi(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Smi());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HCheckSmi, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
-
virtual HValue* Canonicalize() {
HType value_type = value()->type();
if (value_type.IsSmi()) {
@@ -2918,6 +2732,12 @@ class HCheckSmi: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ private:
+ explicit HCheckSmi(HValue* value) : HUnaryOperation(value, HType::Smi()) {
+ set_representation(Representation::Smi());
+ SetFlag(kUseGVN);
+ }
};
@@ -2938,17 +2758,12 @@ class HIsNumberAndBranch: public HUnaryControlInstruction {
class HCheckHeapObject: public HUnaryOperation {
public:
- explicit HCheckHeapObject(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HCheckHeapObject, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
-
#ifdef DEBUG
virtual void Verify();
#endif
@@ -2961,79 +2776,13 @@ class HCheckHeapObject: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
-};
-
-class HCheckPrototypeMaps: public HTemplateInstruction<0> {
- public:
- HCheckPrototypeMaps(Handle<JSObject> prototype,
- Handle<JSObject> holder,
- Zone* zone,
- CompilationInfo* info)
- : prototypes_(2, zone),
- maps_(2, zone),
- first_prototype_unique_id_(),
- last_prototype_unique_id_(),
- can_omit_prototype_maps_(true) {
+ private:
+ explicit HCheckHeapObject(HValue* value)
+ : HUnaryOperation(value, HType::NonPrimitive()) {
+ set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- // Keep a list of all objects on the prototype chain up to the holder
- // and the expected maps.
- while (true) {
- prototypes_.Add(prototype, zone);
- Handle<Map> map(prototype->map());
- maps_.Add(map, zone);
- can_omit_prototype_maps_ &= map->CanOmitPrototypeChecks();
- if (prototype.is_identical_to(holder)) break;
- prototype = Handle<JSObject>(JSObject::cast(prototype->GetPrototype()));
- }
- if (can_omit_prototype_maps_) {
- // Mark in-flight compilation as dependent on those maps.
- for (int i = 0; i < maps()->length(); i++) {
- Handle<Map> map = maps()->at(i);
- map->AddDependentCompilationInfo(DependentCode::kPrototypeCheckGroup,
- info);
- }
- }
}
-
- ZoneList<Handle<JSObject> >* prototypes() { return &prototypes_; }
-
- ZoneList<Handle<Map> >* maps() { return &maps_; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps)
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::None();
- }
-
- virtual void PrintDataTo(StringStream* stream);
-
- virtual intptr_t Hashcode() {
- return first_prototype_unique_id_.Hashcode() * 17 +
- last_prototype_unique_id_.Hashcode();
- }
-
- virtual void FinalizeUniqueValueId() {
- first_prototype_unique_id_ = UniqueValueId(prototypes_.first());
- last_prototype_unique_id_ = UniqueValueId(prototypes_.last());
- }
-
- bool CanOmitPrototypeChecks() { return can_omit_prototype_maps_; }
-
- protected:
- virtual bool DataEquals(HValue* other) {
- HCheckPrototypeMaps* b = HCheckPrototypeMaps::cast(other);
- return first_prototype_unique_id_ == b->first_prototype_unique_id_ &&
- last_prototype_unique_id_ == b->last_prototype_unique_id_;
- }
-
- private:
- ZoneList<Handle<JSObject> > prototypes_;
- ZoneList<Handle<Map> > maps_;
- UniqueValueId first_prototype_unique_id_;
- UniqueValueId last_prototype_unique_id_;
- bool can_omit_prototype_maps_;
};
@@ -3310,8 +3059,6 @@ class HPhi: public HValue {
induction_variable_data_ = InductionVariableData::ExaminePhi(this);
}
- virtual void AddInformativeDefinitions();
-
virtual void PrintTo(StringStream* stream);
#ifdef DEBUG
@@ -3362,11 +3109,6 @@ class HPhi: public HValue {
inputs_[index] = value;
}
- virtual bool IsRelationTrueInternal(NumericRelation relation,
- HValue* other,
- int offset = 0,
- int scale = 0);
-
private:
ZoneList<HValue*> inputs_;
int merged_index_;
@@ -3381,58 +3123,12 @@ class HPhi: public HValue {
};
-class HInductionVariableAnnotation : public HUnaryOperation {
- public:
- static HInductionVariableAnnotation* AddToGraph(HPhi* phi,
- NumericRelation relation,
- int operand_index);
-
- NumericRelation relation() { return relation_; }
- HValue* induction_base() { return phi_->OperandAt(operand_index_); }
-
- virtual int RedefinedOperandIndex() { return 0; }
- virtual bool IsPurelyInformativeDefinition() { return true; }
- virtual Representation RequiredInputRepresentation(int index) {
- return representation();
- }
-
- virtual void PrintDataTo(StringStream* stream);
-
- virtual bool IsRelationTrueInternal(NumericRelation other_relation,
- HValue* other_related_value,
- int offset = 0,
- int scale = 0) {
- if (induction_base() == other_related_value) {
- return relation().CompoundImplies(other_relation, offset, scale);
- } else {
- return false;
- }
- }
-
- DECLARE_CONCRETE_INSTRUCTION(InductionVariableAnnotation)
-
- private:
- HInductionVariableAnnotation(HPhi* phi,
- NumericRelation relation,
- int operand_index)
- : HUnaryOperation(phi),
- phi_(phi), relation_(relation), operand_index_(operand_index) {
- }
-
- // We need to store the phi both here and in the instruction operand because
- // the operand can change if a new idef of the phi is added between the phi
- // and this instruction (inserting an idef updates every use).
- HPhi* phi_;
- NumericRelation relation_;
- int operand_index_;
-};
-
-
class HArgumentsObject: public HTemplateInstruction<0> {
public:
- HArgumentsObject(int count, Zone* zone) : values_(count, zone) {
- set_representation(Representation::Tagged());
- SetFlag(kIsArguments);
+ static HArgumentsObject* New(Zone* zone,
+ HValue* context,
+ int count) {
+ return new(zone) HArgumentsObject(count, zone);
}
const ZoneList<HValue*>* arguments_values() const { return &values_; }
@@ -3459,6 +3155,11 @@ class HArgumentsObject: public HTemplateInstruction<0> {
}
private:
+ HArgumentsObject(int count, Zone* zone) : values_(count, zone) {
+ set_representation(Representation::Tagged());
+ SetFlag(kIsArguments);
+ }
+
virtual bool IsDeletable() const { return true; }
ZoneList<HValue*> values_;
@@ -3467,24 +3168,11 @@ class HArgumentsObject: public HTemplateInstruction<0> {
class HConstant: public HTemplateInstruction<0> {
public:
- HConstant(Handle<Object> handle, Representation r = Representation::None());
- HConstant(int32_t value,
- Representation r = Representation::None(),
- bool is_not_in_new_space = true,
- Handle<Object> optional_handle = Handle<Object>::null());
- HConstant(double value,
- Representation r = Representation::None(),
- bool is_not_in_new_space = true,
- Handle<Object> optional_handle = Handle<Object>::null());
- HConstant(Handle<Object> handle,
- UniqueValueId unique_id,
- Representation r,
- HType type,
- bool is_internalized_string,
- bool is_not_in_new_space,
- bool is_cell,
- bool boolean_value);
- explicit HConstant(ExternalReference reference);
+ DECLARE_INSTRUCTION_FACTORY_P1(HConstant, int32_t);
+ DECLARE_INSTRUCTION_FACTORY_P2(HConstant, int32_t, Representation);
+ DECLARE_INSTRUCTION_FACTORY_P1(HConstant, double);
+ DECLARE_INSTRUCTION_FACTORY_P1(HConstant, Handle<Object>);
+ DECLARE_INSTRUCTION_FACTORY_P1(HConstant, ExternalReference);
Handle<Object> handle() {
if (handle_.is_null()) {
@@ -3524,6 +3212,9 @@ class HConstant: public HTemplateInstruction<0> {
}
return false;
}
+ if (has_external_reference_value_) {
+ return false;
+ }
ASSERT(!handle_.is_null());
Heap* heap = isolate()->heap();
@@ -3665,6 +3356,26 @@ class HConstant: public HTemplateInstruction<0> {
}
private:
+ friend class HGraph;
+ HConstant(Handle<Object> handle, Representation r = Representation::None());
+ HConstant(int32_t value,
+ Representation r = Representation::None(),
+ bool is_not_in_new_space = true,
+ Handle<Object> optional_handle = Handle<Object>::null());
+ HConstant(double value,
+ Representation r = Representation::None(),
+ bool is_not_in_new_space = true,
+ Handle<Object> optional_handle = Handle<Object>::null());
+ HConstant(Handle<Object> handle,
+ UniqueValueId unique_id,
+ Representation r,
+ HType type,
+ bool is_internalized_string,
+ bool is_not_in_new_space,
+ bool is_cell,
+ bool boolean_value);
+ explicit HConstant(ExternalReference reference);
+
void Initialize(Representation r);
virtual bool IsDeletable() const { return true; }
@@ -3697,8 +3408,10 @@ class HConstant: public HTemplateInstruction<0> {
class HBinaryOperation: public HTemplateInstruction<3> {
public:
- HBinaryOperation(HValue* context, HValue* left, HValue* right)
- : observed_output_representation_(Representation::None()) {
+ HBinaryOperation(HValue* context, HValue* left, HValue* right,
+ HType type = HType::Tagged())
+ : HTemplateInstruction<3>(type),
+ observed_output_representation_(Representation::None()) {
ASSERT(left != NULL && right != NULL);
SetOperandAt(0, context);
SetOperandAt(1, left);
@@ -3782,11 +3495,7 @@ class HBinaryOperation: public HTemplateInstruction<3> {
class HWrapReceiver: public HTemplateInstruction<2> {
public:
- HWrapReceiver(HValue* receiver, HValue* function) {
- set_representation(Representation::Tagged());
- SetOperandAt(0, receiver);
- SetOperandAt(1, function);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HWrapReceiver, HValue*, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -3800,6 +3509,13 @@ class HWrapReceiver: public HTemplateInstruction<2> {
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(WrapReceiver)
+
+ private:
+ HWrapReceiver(HValue* receiver, HValue* function) {
+ set_representation(Representation::Tagged());
+ SetOperandAt(0, receiver);
+ SetOperandAt(1, function);
+ }
};
@@ -3835,12 +3551,7 @@ class HApplyArguments: public HTemplateInstruction<4> {
class HArgumentsElements: public HTemplateInstruction<0> {
public:
- explicit HArgumentsElements(bool from_inlined) : from_inlined_(from_inlined) {
- // The value produced by this instruction is a pointer into the stack
- // that looks as if it was a smi because of alignment.
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HArgumentsElements, bool);
DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements)
@@ -3854,6 +3565,13 @@ class HArgumentsElements: public HTemplateInstruction<0> {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HArgumentsElements(bool from_inlined) : from_inlined_(from_inlined) {
+ // The value produced by this instruction is a pointer into the stack
+ // that looks as if it was a smi because of alignment.
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const { return true; }
bool from_inlined_;
@@ -3862,10 +3580,7 @@ class HArgumentsElements: public HTemplateInstruction<0> {
class HArgumentsLength: public HUnaryOperation {
public:
- explicit HArgumentsLength(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HArgumentsLength, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -3877,6 +3592,11 @@ class HArgumentsLength: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ explicit HArgumentsLength(HValue* value) : HUnaryOperation(value) {
+ set_representation(Representation::Integer32());
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const { return true; }
};
@@ -3915,32 +3635,14 @@ class HBoundsCheckBaseIndexInformation;
class HBoundsCheck: public HTemplateInstruction<2> {
public:
- // Normally HBoundsCheck should be created using the
- // HGraphBuilder::AddBoundsCheck() helper.
- // However when building stubs, where we know that the arguments are Int32,
- // it makes sense to invoke this constructor directly.
- HBoundsCheck(HValue* index, HValue* length)
- : skip_check_(false),
- base_(NULL), offset_(0), scale_(0),
- responsibility_direction_(DIRECTION_NONE),
- allow_equality_(false) {
- SetOperandAt(0, index);
- SetOperandAt(1, length);
- SetFlag(kFlexibleRepresentation);
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HBoundsCheck, HValue*, HValue*);
bool skip_check() const { return skip_check_; }
void set_skip_check() { skip_check_ = true; }
+
HValue* base() { return base_; }
int offset() { return offset_; }
int scale() { return scale_; }
- bool index_can_increase() {
- return (responsibility_direction_ & DIRECTION_LOWER) == 0;
- }
- bool index_can_decrease() {
- return (responsibility_direction_ & DIRECTION_UPPER) == 0;
- }
void ApplyIndexChange();
bool DetectCompoundIndex() {
@@ -3964,11 +3666,6 @@ class HBoundsCheck: public HTemplateInstruction<2> {
return representation();
}
- virtual bool IsRelationTrueInternal(NumericRelation relation,
- HValue* related_value,
- int offset = 0,
- int scale = 0);
-
virtual void PrintDataTo(StringStream* stream);
virtual void InferRepresentation(HInferRepresentationPhase* h_infer);
@@ -3979,28 +3676,34 @@ class HBoundsCheck: public HTemplateInstruction<2> {
virtual int RedefinedOperandIndex() { return 0; }
virtual bool IsPurelyInformativeDefinition() { return skip_check(); }
- virtual void AddInformativeDefinitions();
DECLARE_CONCRETE_INSTRUCTION(BoundsCheck)
protected:
friend class HBoundsCheckBaseIndexInformation;
- virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {
- responsibility_direction_ = static_cast<RangeGuaranteeDirection>(
- responsibility_direction_ | direction);
- }
-
virtual bool DataEquals(HValue* other) { return true; }
- virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context);
bool skip_check_;
HValue* base_;
int offset_;
int scale_;
- RangeGuaranteeDirection responsibility_direction_;
bool allow_equality_;
private:
+ // Normally HBoundsCheck should be created using the
+ // HGraphBuilder::AddBoundsCheck() helper.
+ // However when building stubs, where we know that the arguments are Int32,
+ // it makes sense to invoke this constructor directly.
+ HBoundsCheck(HValue* index, HValue* length)
+ : skip_check_(false),
+ base_(NULL), offset_(0), scale_(0),
+ allow_equality_(false) {
+ SetOperandAt(0, index);
+ SetOperandAt(1, length);
+ SetFlag(kFlexibleRepresentation);
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const {
return skip_check() && !FLAG_debug_code;
}
@@ -4028,29 +3731,18 @@ class HBoundsCheckBaseIndexInformation: public HTemplateInstruction<2> {
return representation();
}
- virtual bool IsRelationTrueInternal(NumericRelation relation,
- HValue* related_value,
- int offset = 0,
- int scale = 0);
virtual void PrintDataTo(StringStream* stream);
virtual int RedefinedOperandIndex() { return 0; }
virtual bool IsPurelyInformativeDefinition() { return true; }
-
- protected:
- virtual void SetResponsibilityForRange(RangeGuaranteeDirection direction) {
- bounds_check()->SetResponsibilityForRange(direction);
- }
- virtual void TryGuaranteeRangeChanging(RangeEvaluationContext* context) {
- bounds_check()->TryGuaranteeRangeChanging(context);
- }
};
class HBitwiseBinaryOperation: public HBinaryOperation {
public:
- HBitwiseBinaryOperation(HValue* context, HValue* left, HValue* right)
- : HBinaryOperation(context, left, right) {
+ HBitwiseBinaryOperation(HValue* context, HValue* left, HValue* right,
+ HType type = HType::Tagged())
+ : HBinaryOperation(context, left, right, type) {
SetFlag(kFlexibleRepresentation);
SetFlag(kTruncatingToInt32);
SetFlag(kAllowUndefinedAsNaN);
@@ -4087,8 +3779,6 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
HBinaryOperation::initialize_output_representation(observed);
}
- virtual HType CalculateInferredType();
-
DECLARE_ABSTRACT_INSTRUCTION(BitwiseBinaryOperation)
private:
@@ -4098,15 +3788,11 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
class HMathFloorOfDiv: public HBinaryOperation {
public:
- HMathFloorOfDiv(HValue* context, HValue* left, HValue* right)
- : HBinaryOperation(context, left, right) {
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- SetFlag(kCanOverflow);
- if (!right->IsConstant()) {
- SetFlag(kCanBeDivByZero);
- }
- SetFlag(kAllowUndefinedAsNaN);
+ static HMathFloorOfDiv* New(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ return new(zone) HMathFloorOfDiv(context, left, right);
}
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
@@ -4121,6 +3807,17 @@ class HMathFloorOfDiv: public HBinaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
private:
+ HMathFloorOfDiv(HValue* context, HValue* left, HValue* right)
+ : HBinaryOperation(context, left, right) {
+ set_representation(Representation::Integer32());
+ SetFlag(kUseGVN);
+ SetFlag(kCanOverflow);
+ if (!right->IsConstant()) {
+ SetFlag(kCanBeDivByZero);
+ }
+ SetFlag(kAllowUndefinedAsNaN);
+ }
+
virtual bool IsDeletable() const { return true; }
};
@@ -4128,7 +3825,7 @@ class HMathFloorOfDiv: public HBinaryOperation {
class HArithmeticBinaryOperation: public HBinaryOperation {
public:
HArithmeticBinaryOperation(HValue* context, HValue* left, HValue* right)
- : HBinaryOperation(context, left, right) {
+ : HBinaryOperation(context, left, right, HType::TaggedNumber()) {
SetAllSideEffects();
SetFlag(kFlexibleRepresentation);
SetFlag(kAllowUndefinedAsNaN);
@@ -4144,8 +3841,6 @@ class HArithmeticBinaryOperation: public HBinaryOperation {
}
}
- virtual HType CalculateInferredType();
-
DECLARE_ABSTRACT_INSTRUCTION(ArithmeticBinaryOperation)
private:
@@ -4159,7 +3854,8 @@ class HCompareGeneric: public HBinaryOperation {
HValue* left,
HValue* right,
Token::Value token)
- : HBinaryOperation(context, left, right), token_(token) {
+ : HBinaryOperation(context, left, right, HType::Boolean()),
+ token_(token) {
ASSERT(Token::IsCompareOp(token));
set_representation(Representation::Tagged());
SetAllSideEffects();
@@ -4174,8 +3870,6 @@ class HCompareGeneric: public HBinaryOperation {
Token::Value token() const { return token_; }
virtual void PrintDataTo(StringStream* stream);
- virtual HType CalculateInferredType();
-
DECLARE_CONCRETE_INSTRUCTION(CompareGeneric)
private:
@@ -4213,8 +3907,6 @@ class HCompareNumericAndBranch: public HTemplateControlInstruction<2, 2> {
}
virtual void PrintDataTo(StringStream* stream);
- virtual void AddInformativeDefinitions();
-
DECLARE_CONCRETE_INSTRUCTION(CompareNumericAndBranch)
private:
@@ -4225,11 +3917,16 @@ class HCompareNumericAndBranch: public HTemplateControlInstruction<2, 2> {
class HCompareObjectEqAndBranch: public HTemplateControlInstruction<2, 2> {
public:
- HCompareObjectEqAndBranch(HValue* left, HValue* right) {
+ // TODO(danno): make this private when the IfBuilder properly constructs
+ // control flow instructions.
+ HCompareObjectEqAndBranch(HValue* left,
+ HValue* right) {
SetOperandAt(0, left);
SetOperandAt(1, right);
}
+ DECLARE_INSTRUCTION_FACTORY_P2(HCompareObjectEqAndBranch, HValue*, HValue*);
+
HValue* left() { return OperandAt(0); }
HValue* right() { return OperandAt(1); }
@@ -4451,7 +4148,7 @@ class HTypeofIsAndBranch: public HUnaryControlInstruction {
class HInstanceOf: public HBinaryOperation {
public:
HInstanceOf(HValue* context, HValue* left, HValue* right)
- : HBinaryOperation(context, left, right) {
+ : HBinaryOperation(context, left, right, HType::Boolean()) {
set_representation(Representation::Tagged());
SetAllSideEffects();
}
@@ -4460,8 +4157,6 @@ class HInstanceOf: public HBinaryOperation {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
-
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(InstanceOf)
@@ -4473,7 +4168,7 @@ class HInstanceOfKnownGlobal: public HTemplateInstruction<2> {
HInstanceOfKnownGlobal(HValue* context,
HValue* left,
Handle<JSFunction> right)
- : function_(right) {
+ : HTemplateInstruction<2>(HType::Boolean()), function_(right) {
SetOperandAt(0, context);
SetOperandAt(1, left);
set_representation(Representation::Tagged());
@@ -4488,8 +4183,6 @@ class HInstanceOfKnownGlobal: public HTemplateInstruction<2> {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
-
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal)
private:
@@ -4519,7 +4212,10 @@ class HInstanceSize: public HTemplateInstruction<1> {
class HPower: public HTemplateInstruction<2> {
public:
- static HInstruction* New(Zone* zone, HValue* left, HValue* right);
+ static HInstruction* New(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
HValue* left() { return OperandAt(0); }
HValue* right() const { return OperandAt(1); }
@@ -4588,8 +4284,6 @@ class HAdd: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
- virtual HType CalculateInferredType();
-
virtual HValue* Canonicalize();
virtual bool TryDecompose(DecompositionResult* decomposition) {
@@ -4856,8 +4550,8 @@ class HMathMinMax: public HArithmeticBinaryOperation {
class HBitwise: public HBitwiseBinaryOperation {
public:
static HInstruction* New(Zone* zone,
- Token::Value op,
HValue* context,
+ Token::Value op,
HValue* left,
HValue* right);
@@ -4879,8 +4573,12 @@ class HBitwise: public HBitwiseBinaryOperation {
virtual Range* InferRange(Zone* zone);
private:
- HBitwise(Token::Value op, HValue* context, HValue* left, HValue* right)
- : HBitwiseBinaryOperation(context, left, right), op_(op) {
+ HBitwise(HValue* context,
+ Token::Value op,
+ HValue* left,
+ HValue* right)
+ : HBitwiseBinaryOperation(context, left, right, HType::TaggedNumber()),
+ op_(op) {
ASSERT(op == Token::BIT_AND || op == Token::BIT_OR || op == Token::BIT_XOR);
// BIT_AND with a smi-range positive value will always unset the
// entire sign-extension of the smi-sign.
@@ -5041,10 +4739,7 @@ class HRor: public HBitwiseBinaryOperation {
class HOsrEntry: public HTemplateInstruction<0> {
public:
- explicit HOsrEntry(BailoutId ast_id) : ast_id_(ast_id) {
- SetGVNFlag(kChangesOsrEntries);
- SetGVNFlag(kChangesNewSpacePromotion);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HOsrEntry, BailoutId);
BailoutId ast_id() const { return ast_id_; }
@@ -5055,6 +4750,11 @@ class HOsrEntry: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(OsrEntry)
private:
+ explicit HOsrEntry(BailoutId ast_id) : ast_id_(ast_id) {
+ SetGVNFlag(kChangesOsrEntries);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+
BailoutId ast_id_;
};
@@ -5066,6 +4766,23 @@ class HParameter: public HTemplateInstruction<0> {
REGISTER_PARAMETER
};
+ DECLARE_INSTRUCTION_FACTORY_P1(HParameter, unsigned);
+ DECLARE_INSTRUCTION_FACTORY_P2(HParameter, unsigned, ParameterKind);
+ DECLARE_INSTRUCTION_FACTORY_P3(HParameter, unsigned, ParameterKind,
+ Representation);
+
+ unsigned index() const { return index_; }
+ ParameterKind kind() const { return kind_; }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::None();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Parameter)
+
+ private:
explicit HParameter(unsigned index,
ParameterKind kind = STACK_PARAMETER)
: index_(index),
@@ -5081,18 +4798,6 @@ class HParameter: public HTemplateInstruction<0> {
set_representation(r);
}
- unsigned index() const { return index_; }
- ParameterKind kind() const { return kind_; }
-
- virtual void PrintDataTo(StringStream* stream);
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::None();
- }
-
- DECLARE_CONCRETE_INSTRUCTION(Parameter)
-
- private:
unsigned index_;
ParameterKind kind_;
};
@@ -5133,10 +4838,7 @@ class HCallStub: public HUnaryCall {
class HUnknownOSRValue: public HTemplateInstruction<0> {
public:
- HUnknownOSRValue()
- : incoming_value_(NULL) {
- set_representation(Representation::Tagged());
- }
+ DECLARE_INSTRUCTION_FACTORY_P0(HUnknownOSRValue)
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -5158,6 +4860,11 @@ class HUnknownOSRValue: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(UnknownOSRValue)
private:
+ HUnknownOSRValue()
+ : incoming_value_(NULL) {
+ set_representation(Representation::Tagged());
+ }
+
HPhi* incoming_value_;
};
@@ -5240,41 +4947,19 @@ class HLoadGlobalGeneric: public HTemplateInstruction<2> {
class HAllocate: public HTemplateInstruction<2> {
public:
- enum Flags {
- CAN_ALLOCATE_IN_NEW_SPACE = 1 << 0,
- CAN_ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
- CAN_ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
- ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
- PREFILL_WITH_FILLER = 1 << 4
- };
-
- HAllocate(HValue* context, HValue* size, HType type, Flags flags)
- : flags_(flags) {
- SetOperandAt(0, context);
- SetOperandAt(1, size);
- set_type(type);
- set_representation(Representation::Tagged());
- SetFlag(kTrackSideEffectDominators);
- SetGVNFlag(kChangesNewSpacePromotion);
- SetGVNFlag(kDependsOnNewSpacePromotion);
+ static HAllocate* New(Zone* zone,
+ HValue* context,
+ HValue* size,
+ HType type,
+ PretenureFlag pretenure_flag,
+ InstanceType instance_type) {
+ return new(zone) HAllocate(context, size, type, pretenure_flag,
+ instance_type);
}
// Maximum instance size for which allocations will be inlined.
static const int kMaxInlineSize = 64 * kPointerSize;
- static Flags DefaultFlags() {
- return CAN_ALLOCATE_IN_NEW_SPACE;
- }
-
- static Flags DefaultFlags(ElementsKind kind) {
- Flags flags = CAN_ALLOCATE_IN_NEW_SPACE;
- if (IsFastDoubleElementsKind(kind)) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
- }
- return flags;
- }
-
HValue* context() { return OperandAt(0); }
HValue* size() { return OperandAt(1); }
@@ -5294,25 +4979,16 @@ class HAllocate: public HTemplateInstruction<2> {
known_initial_map_ = known_initial_map;
}
- bool CanAllocateInNewSpace() const {
- return (flags_ & CAN_ALLOCATE_IN_NEW_SPACE) != 0;
- }
-
- bool CanAllocateInOldDataSpace() const {
- return (flags_ & CAN_ALLOCATE_IN_OLD_DATA_SPACE) != 0;
- }
-
- bool CanAllocateInOldPointerSpace() const {
- return (flags_ & CAN_ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
+ bool IsNewSpaceAllocation() const {
+ return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
}
- bool CanAllocateInOldSpace() const {
- return CanAllocateInOldDataSpace() ||
- CanAllocateInOldPointerSpace();
+ bool IsOldDataSpaceAllocation() const {
+ return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
}
- bool GuaranteedInNewSpace() const {
- return CanAllocateInNewSpace() && !CanAllocateInOldSpace();
+ bool IsOldPointerSpaceAllocation() const {
+ return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
}
bool MustAllocateDoubleAligned() const {
@@ -5323,8 +4999,12 @@ class HAllocate: public HTemplateInstruction<2> {
return (flags_ & PREFILL_WITH_FILLER) != 0;
}
- void SetFlags(Flags flags) {
- flags_ = static_cast<HAllocate::Flags>(flags_ | flags);
+ void MakePrefillWithFiller() {
+ flags_ = static_cast<HAllocate::Flags>(flags_ | PREFILL_WITH_FILLER);
+ }
+
+ void MakeDoubleAligned() {
+ flags_ = static_cast<HAllocate::Flags>(flags_ | ALLOCATE_DOUBLE_ALIGNED);
}
void UpdateSize(HValue* size) {
@@ -5339,6 +5019,36 @@ class HAllocate: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(Allocate)
private:
+ enum Flags {
+ ALLOCATE_IN_NEW_SPACE = 1 << 0,
+ ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
+ ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
+ ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
+ PREFILL_WITH_FILLER = 1 << 4
+ };
+
+ HAllocate(HValue* context,
+ HValue* size,
+ HType type,
+ PretenureFlag pretenure_flag,
+ InstanceType instance_type)
+ : HTemplateInstruction<2>(type) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, size);
+ set_representation(Representation::Tagged());
+ SetFlag(kTrackSideEffectDominators);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ SetGVNFlag(kDependsOnNewSpacePromotion);
+ flags_ = pretenure_flag == TENURED
+ ? (Heap::TargetSpaceId(instance_type) == OLD_POINTER_SPACE
+ ? ALLOCATE_IN_OLD_POINTER_SPACE : ALLOCATE_IN_OLD_DATA_SPACE)
+ : ALLOCATE_IN_NEW_SPACE;
+ if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
+ flags_ = static_cast<HAllocate::Flags>(flags_ |
+ ALLOCATE_DOUBLE_ALIGNED);
+ }
+ }
+
Flags flags_;
Handle<Map> known_initial_map_;
};
@@ -5346,12 +5056,12 @@ class HAllocate: public HTemplateInstruction<2> {
class HInnerAllocatedObject: public HTemplateInstruction<1> {
public:
- HInnerAllocatedObject(HValue* value, int offset, HType type = HType::Tagged())
- : offset_(offset) {
- ASSERT(value->IsAllocate());
- SetOperandAt(0, value);
- set_type(type);
- set_representation(Representation::Tagged());
+ static HInnerAllocatedObject* New(Zone* zone,
+ HValue* context,
+ HValue* value,
+ int offset,
+ HType type = HType::Tagged()) {
+ return new(zone) HInnerAllocatedObject(value, offset, type);
}
HValue* base_object() { return OperandAt(0); }
@@ -5366,6 +5076,14 @@ class HInnerAllocatedObject: public HTemplateInstruction<1> {
DECLARE_CONCRETE_INSTRUCTION(InnerAllocatedObject)
private:
+ HInnerAllocatedObject(HValue* value, int offset, HType type = HType::Tagged())
+ : HTemplateInstruction<1>(type), offset_(offset) {
+ ASSERT(value->IsAllocate());
+ SetOperandAt(0, value);
+ set_type(type);
+ set_representation(Representation::Tagged());
+ }
+
int offset_;
};
@@ -5387,9 +5105,14 @@ inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
if (object->IsConstant() && HConstant::cast(object)->IsCell()) {
return false;
}
+ if (object->IsConstant() &&
+ HConstant::cast(object)->HasExternalReferenceValue()) {
+ // Stores to external references require no write barriers
+ return false;
+ }
if (object != new_space_dominator) return true;
if (object->IsAllocate()) {
- return !HAllocate::cast(object)->GuaranteedInNewSpace();
+ return !HAllocate::cast(object)->IsNewSpaceAllocation();
}
return true;
}
@@ -5397,14 +5120,8 @@ inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
class HStoreGlobalCell: public HUnaryOperation {
public:
- HStoreGlobalCell(HValue* value,
- Handle<PropertyCell> cell,
- PropertyDetails details)
- : HUnaryOperation(value),
- cell_(cell),
- details_(details) {
- SetGVNFlag(kChangesGlobalVars);
- }
+ DECLARE_INSTRUCTION_FACTORY_P3(HStoreGlobalCell, HValue*,
+ Handle<PropertyCell>, PropertyDetails);
Handle<PropertyCell> cell() const { return cell_; }
bool RequiresHoleCheck() {
@@ -5422,6 +5139,15 @@ class HStoreGlobalCell: public HUnaryOperation {
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell)
private:
+ HStoreGlobalCell(HValue* value,
+ Handle<PropertyCell> cell,
+ PropertyDetails details)
+ : HUnaryOperation(value),
+ cell_(cell),
+ details_(details) {
+ SetGVNFlag(kChangesGlobalVars);
+ }
+
Handle<PropertyCell> cell_;
PropertyDetails details_;
};
@@ -5429,18 +5155,14 @@ class HStoreGlobalCell: public HUnaryOperation {
class HStoreGlobalGeneric: public HTemplateInstruction<3> {
public:
- HStoreGlobalGeneric(HValue* context,
- HValue* global_object,
- Handle<Object> name,
- HValue* value,
- StrictModeFlag strict_mode_flag)
- : name_(name),
- strict_mode_flag_(strict_mode_flag) {
- SetOperandAt(0, context);
- SetOperandAt(1, global_object);
- SetOperandAt(2, value);
- set_representation(Representation::Tagged());
- SetAllSideEffects();
+ inline static HStoreGlobalGeneric* New(Zone* zone,
+ HValue* context,
+ HValue* global_object,
+ Handle<Object> name,
+ HValue* value,
+ StrictModeFlag strict_mode_flag) {
+ return new(zone) HStoreGlobalGeneric(context, global_object,
+ name, value, strict_mode_flag);
}
HValue* context() { return OperandAt(0); }
@@ -5458,6 +5180,20 @@ class HStoreGlobalGeneric: public HTemplateInstruction<3> {
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalGeneric)
private:
+ HStoreGlobalGeneric(HValue* context,
+ HValue* global_object,
+ Handle<Object> name,
+ HValue* value,
+ StrictModeFlag strict_mode_flag)
+ : name_(name),
+ strict_mode_flag_(strict_mode_flag) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, global_object);
+ SetOperandAt(2, value);
+ set_representation(Representation::Tagged());
+ SetAllSideEffects();
+ }
+
Handle<Object> name_;
StrictModeFlag strict_mode_flag_;
};
@@ -5545,12 +5281,8 @@ class HStoreContextSlot: public HTemplateInstruction<2> {
kCheckIgnoreAssignment
};
- HStoreContextSlot(HValue* context, int slot_index, Mode mode, HValue* value)
- : slot_index_(slot_index), mode_(mode) {
- SetOperandAt(0, context);
- SetOperandAt(1, value);
- SetGVNFlag(kChangesContextSlots);
- }
+ DECLARE_INSTRUCTION_FACTORY_P4(HStoreContextSlot, HValue*, int,
+ Mode, HValue*);
HValue* context() { return OperandAt(0); }
HValue* value() { return OperandAt(1); }
@@ -5578,6 +5310,13 @@ class HStoreContextSlot: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot)
private:
+ HStoreContextSlot(HValue* context, int slot_index, Mode mode, HValue* value)
+ : slot_index_(slot_index), mode_(mode) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, value);
+ SetGVNFlag(kChangesContextSlots);
+ }
+
int slot_index_;
Mode mode_;
};
@@ -5595,6 +5334,10 @@ class HObjectAccess {
return portion() == kExternalMemory;
}
+ inline bool IsStringLength() const {
+ return portion() == kStringLengths;
+ }
+
inline int offset() const {
return OffsetField::decode(value_);
}
@@ -5637,6 +5380,10 @@ class HObjectAccess {
return HObjectAccess(kInobject, AllocationSite::kWeakNextOffset);
}
+ static HObjectAccess ForAllocationSiteList() {
+ return HObjectAccess(kExternalMemory, 0, Representation::Tagged());
+ }
+
static HObjectAccess ForFixedArrayLength() {
return HObjectAccess(
kArrayLengths,
@@ -5644,6 +5391,14 @@ class HObjectAccess {
FLAG_track_fields ? Representation::Smi() : Representation::Tagged());
}
+ static HObjectAccess ForStringLength() {
+ STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
+ return HObjectAccess(
+ kStringLengths,
+ String::kLengthOffset,
+ FLAG_track_fields ? Representation::Smi() : Representation::Tagged());
+ }
+
static HObjectAccess ForPropertiesPointer() {
return HObjectAccess(kInobject, JSObject::kPropertiesOffset);
}
@@ -5707,6 +5462,7 @@ class HObjectAccess {
enum Portion {
kMaps, // map of an object
kArrayLengths, // the length of an array
+ kStringLengths, // the length of a string
kElementsPointer, // elements pointer
kBackingStore, // some field in the backing store
kDouble, // some double field
@@ -5743,40 +5499,44 @@ class HObjectAccess {
};
-class HLinkObjectInList: public HUnaryOperation {
+class HLoadNamedField: public HTemplateInstruction<2> {
public:
- // There needs to be a mapping from every KnownList to an external reference
- enum KnownList {
- ALLOCATION_SITE_LIST
- };
+ DECLARE_INSTRUCTION_FACTORY_P2(HLoadNamedField, HValue*, HObjectAccess);
+ DECLARE_INSTRUCTION_FACTORY_P3(HLoadNamedField, HValue*, HObjectAccess,
+ HValue*);
- HLinkObjectInList(HValue* object, HObjectAccess store_field,
- KnownList known_list)
- : HUnaryOperation(object),
- store_field_(store_field),
- known_list_(known_list) {
- set_representation(Representation::Tagged());
+ HValue* object() { return OperandAt(0); }
+ HValue* typecheck() {
+ ASSERT(HasTypeCheck());
+ return OperandAt(1);
}
- HObjectAccess store_field() const { return store_field_; }
- KnownList known_list() const { return known_list_; }
+ bool HasTypeCheck() const { return OperandAt(0) != OperandAt(1); }
+ HObjectAccess access() const { return access_; }
+ Representation field_representation() const {
+ return access_.representation();
+ }
+ virtual bool HasEscapingOperandAt(int index) { return false; }
virtual Representation RequiredInputRepresentation(int index) {
+ if (index == 0 && access().IsExternalMemory()) {
+ // object must be external in case of external memory access
+ return Representation::External();
+ }
return Representation::Tagged();
}
-
+ virtual Range* InferRange(Zone* zone);
virtual void PrintDataTo(StringStream* stream);
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList)
-
- private:
- HObjectAccess store_field_;
- KnownList known_list_;
-};
+ DECLARE_CONCRETE_INSTRUCTION(LoadNamedField)
+ protected:
+ virtual bool DataEquals(HValue* other) {
+ HLoadNamedField* b = HLoadNamedField::cast(other);
+ return access_.Equals(b->access_);
+ }
-class HLoadNamedField: public HTemplateInstruction<2> {
- public:
+ private:
HLoadNamedField(HValue* object,
HObjectAccess access,
HValue* typecheck = NULL)
@@ -5803,37 +5563,6 @@ class HLoadNamedField: public HTemplateInstruction<2> {
access.SetGVNFlags(this, false);
}
- HValue* object() { return OperandAt(0); }
- HValue* typecheck() {
- ASSERT(HasTypeCheck());
- return OperandAt(1);
- }
-
- bool HasTypeCheck() const { return OperandAt(0) != OperandAt(1); }
- HObjectAccess access() const { return access_; }
- Representation field_representation() const {
- return access_.representation();
- }
-
- virtual bool HasEscapingOperandAt(int index) { return false; }
- virtual Representation RequiredInputRepresentation(int index) {
- if (index == 0 && access().IsExternalMemory()) {
- // object must be external in case of external memory access
- return Representation::External();
- }
- return Representation::Tagged();
- }
- virtual void PrintDataTo(StringStream* stream);
-
- DECLARE_CONCRETE_INSTRUCTION(LoadNamedField)
-
- protected:
- virtual bool DataEquals(HValue* other) {
- HLoadNamedField* b = HLoadNamedField::cast(other);
- return access_.Equals(b->access_);
- }
-
- private:
virtual bool IsDeletable() const { return true; }
HObjectAccess access_;
@@ -5952,55 +5681,10 @@ enum LoadKeyedHoleMode {
class HLoadKeyed
: public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
- HLoadKeyed(HValue* obj,
- HValue* key,
- HValue* dependency,
- ElementsKind elements_kind,
- LoadKeyedHoleMode mode = NEVER_RETURN_HOLE)
- : bit_field_(0) {
- bit_field_ = ElementsKindField::encode(elements_kind) |
- HoleModeField::encode(mode);
-
- SetOperandAt(0, obj);
- SetOperandAt(1, key);
- SetOperandAt(2, dependency != NULL ? dependency : obj);
-
- if (!is_external()) {
- // I can detect the case between storing double (holey and fast) and
- // smi/object by looking at elements_kind_.
- ASSERT(IsFastSmiOrObjectElementsKind(elements_kind) ||
- IsFastDoubleElementsKind(elements_kind));
-
- if (IsFastSmiOrObjectElementsKind(elements_kind)) {
- if (IsFastSmiElementsKind(elements_kind) &&
- (!IsHoleyElementsKind(elements_kind) ||
- mode == NEVER_RETURN_HOLE)) {
- set_type(HType::Smi());
- set_representation(Representation::Smi());
- } else {
- set_representation(Representation::Tagged());
- }
-
- SetGVNFlag(kDependsOnArrayElements);
- } else {
- set_representation(Representation::Double());
- SetGVNFlag(kDependsOnDoubleArrayElements);
- }
- } else {
- if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
- elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- set_representation(Representation::Double());
- } else {
- set_representation(Representation::Integer32());
- }
-
- SetGVNFlag(kDependsOnExternalMemory);
- // Native code could change the specialized array.
- SetGVNFlag(kDependsOnCalls);
- }
-
- SetFlag(kUseGVN);
- }
+ DECLARE_INSTRUCTION_FACTORY_P4(HLoadKeyed, HValue*, HValue*, HValue*,
+ ElementsKind);
+ DECLARE_INSTRUCTION_FACTORY_P5(HLoadKeyed, HValue*, HValue*, HValue*,
+ ElementsKind, LoadKeyedHoleMode);
bool is_external() const {
return IsExternalArrayElementsKind(elements_kind());
@@ -6069,6 +5753,56 @@ class HLoadKeyed
}
private:
+ HLoadKeyed(HValue* obj,
+ HValue* key,
+ HValue* dependency,
+ ElementsKind elements_kind,
+ LoadKeyedHoleMode mode = NEVER_RETURN_HOLE)
+ : bit_field_(0) {
+ bit_field_ = ElementsKindField::encode(elements_kind) |
+ HoleModeField::encode(mode);
+
+ SetOperandAt(0, obj);
+ SetOperandAt(1, key);
+ SetOperandAt(2, dependency != NULL ? dependency : obj);
+
+ if (!is_external()) {
+ // I can detect the case between storing double (holey and fast) and
+ // smi/object by looking at elements_kind_.
+ ASSERT(IsFastSmiOrObjectElementsKind(elements_kind) ||
+ IsFastDoubleElementsKind(elements_kind));
+
+ if (IsFastSmiOrObjectElementsKind(elements_kind)) {
+ if (IsFastSmiElementsKind(elements_kind) &&
+ (!IsHoleyElementsKind(elements_kind) ||
+ mode == NEVER_RETURN_HOLE)) {
+ set_type(HType::Smi());
+ set_representation(Representation::Smi());
+ } else {
+ set_representation(Representation::Tagged());
+ }
+
+ SetGVNFlag(kDependsOnArrayElements);
+ } else {
+ set_representation(Representation::Double());
+ SetGVNFlag(kDependsOnDoubleArrayElements);
+ }
+ } else {
+ if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
+ elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
+ set_representation(Representation::Double());
+ } else {
+ set_representation(Representation::Integer32());
+ }
+
+ SetGVNFlag(kDependsOnExternalMemory);
+ // Native code could change the specialized array.
+ SetGVNFlag(kDependsOnCalls);
+ }
+
+ SetFlag(kUseGVN);
+ }
+
virtual bool IsDeletable() const {
return !RequiresHoleCheck();
}
@@ -6132,20 +5866,10 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> {
};
-class HStoreNamedField: public HTemplateInstruction<2> {
+class HStoreNamedField: public HTemplateInstruction<3> {
public:
- HStoreNamedField(HValue* obj,
- HObjectAccess access,
- HValue* val)
- : access_(access),
- transition_(),
- transition_unique_id_(),
- new_space_dominator_(NULL),
- write_barrier_mode_(UPDATE_WRITE_BARRIER) {
- SetOperandAt(0, obj);
- SetOperandAt(1, val);
- access.SetGVNFlags(this, true);
- }
+ DECLARE_INSTRUCTION_FACTORY_P3(HStoreNamedField, HValue*,
+ HObjectAccess, HValue*);
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField)
@@ -6174,28 +5898,40 @@ class HStoreNamedField: public HTemplateInstruction<2> {
return write_barrier_mode_ == SKIP_WRITE_BARRIER;
}
- HValue* object() { return OperandAt(0); }
- HValue* value() { return OperandAt(1); }
+ HValue* object() const { return OperandAt(0); }
+ HValue* value() const { return OperandAt(1); }
+ HValue* transition() const { return OperandAt(2); }
HObjectAccess access() const { return access_; }
- Handle<Map> transition() const { return transition_; }
- UniqueValueId transition_unique_id() const { return transition_unique_id_; }
- void SetTransition(Handle<Map> map, CompilationInfo* info) {
- ASSERT(transition_.is_null()); // Only set once.
+ HValue* new_space_dominator() const { return new_space_dominator_; }
+ bool has_transition() const { return has_transition_; }
+
+ Handle<Map> transition_map() const {
+ if (has_transition()) {
+ return Handle<Map>::cast(HConstant::cast(transition())->handle());
+ } else {
+ return Handle<Map>();
+ }
+ }
+
+ void SetTransition(HConstant* map_constant, CompilationInfo* info) {
+ ASSERT(!has_transition()); // Only set once.
+ Handle<Map> map = Handle<Map>::cast(map_constant->handle());
if (map->CanBeDeprecated()) {
map->AddDependentCompilationInfo(DependentCode::kTransitionGroup, info);
}
- transition_ = map;
+ SetOperandAt(2, map_constant);
+ has_transition_ = true;
}
- HValue* new_space_dominator() const { return new_space_dominator_; }
bool NeedsWriteBarrier() {
ASSERT(!(FLAG_track_double_fields && field_representation().IsDouble()) ||
- transition_.is_null());
+ !has_transition());
if (IsSkipWriteBarrier()) return false;
if (field_representation().IsDouble()) return false;
if (field_representation().IsSmi()) return false;
if (field_representation().IsInteger32()) return false;
+ if (field_representation().IsExternal()) return false;
return StoringValueNeedsWriteBarrier(value()) &&
ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
}
@@ -6205,20 +5941,28 @@ class HStoreNamedField: public HTemplateInstruction<2> {
return ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
}
- virtual void FinalizeUniqueValueId() {
- transition_unique_id_ = UniqueValueId(transition_);
- }
-
Representation field_representation() const {
return access_.representation();
}
private:
+ HStoreNamedField(HValue* obj,
+ HObjectAccess access,
+ HValue* val)
+ : access_(access),
+ new_space_dominator_(NULL),
+ write_barrier_mode_(UPDATE_WRITE_BARRIER),
+ has_transition_(false) {
+ SetOperandAt(0, obj);
+ SetOperandAt(1, val);
+ SetOperandAt(2, obj);
+ access.SetGVNFlags(this, true);
+ }
+
HObjectAccess access_;
- Handle<Map> transition_;
- UniqueValueId transition_unique_id_;
HValue* new_space_dominator_;
- WriteBarrierMode write_barrier_mode_;
+ WriteBarrierMode write_barrier_mode_ : 1;
+ bool has_transition_ : 1;
};
@@ -6260,38 +6004,8 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> {
class HStoreKeyed
: public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
- HStoreKeyed(HValue* obj, HValue* key, HValue* val,
- ElementsKind elements_kind)
- : elements_kind_(elements_kind),
- index_offset_(0),
- is_dehoisted_(false),
- is_uninitialized_(false),
- new_space_dominator_(NULL) {
- SetOperandAt(0, obj);
- SetOperandAt(1, key);
- SetOperandAt(2, val);
-
- if (IsFastObjectElementsKind(elements_kind)) {
- SetFlag(kTrackSideEffectDominators);
- SetGVNFlag(kDependsOnNewSpacePromotion);
- }
- if (is_external()) {
- SetGVNFlag(kChangesExternalMemory);
- SetFlag(kAllowUndefinedAsNaN);
- } else if (IsFastDoubleElementsKind(elements_kind)) {
- SetGVNFlag(kChangesDoubleArrayElements);
- } else if (IsFastSmiElementsKind(elements_kind)) {
- SetGVNFlag(kChangesArrayElements);
- } else {
- SetGVNFlag(kChangesArrayElements);
- }
-
- // EXTERNAL_{UNSIGNED_,}{BYTE,SHORT,INT}_ELEMENTS are truncating.
- if (elements_kind >= EXTERNAL_BYTE_ELEMENTS &&
- elements_kind <= EXTERNAL_UNSIGNED_INT_ELEMENTS) {
- SetFlag(kTruncatingToInt32);
- }
- }
+ DECLARE_INSTRUCTION_FACTORY_P4(HStoreKeyed, HValue*, HValue*, HValue*,
+ ElementsKind);
virtual bool HasEscapingOperandAt(int index) { return index != 0; }
virtual Representation RequiredInputRepresentation(int index) {
@@ -6388,6 +6102,39 @@ class HStoreKeyed
DECLARE_CONCRETE_INSTRUCTION(StoreKeyed)
private:
+ HStoreKeyed(HValue* obj, HValue* key, HValue* val,
+ ElementsKind elements_kind)
+ : elements_kind_(elements_kind),
+ index_offset_(0),
+ is_dehoisted_(false),
+ is_uninitialized_(false),
+ new_space_dominator_(NULL) {
+ SetOperandAt(0, obj);
+ SetOperandAt(1, key);
+ SetOperandAt(2, val);
+
+ if (IsFastObjectElementsKind(elements_kind)) {
+ SetFlag(kTrackSideEffectDominators);
+ SetGVNFlag(kDependsOnNewSpacePromotion);
+ }
+ if (is_external()) {
+ SetGVNFlag(kChangesExternalMemory);
+ SetFlag(kAllowUndefinedAsNaN);
+ } else if (IsFastDoubleElementsKind(elements_kind)) {
+ SetGVNFlag(kChangesDoubleArrayElements);
+ } else if (IsFastSmiElementsKind(elements_kind)) {
+ SetGVNFlag(kChangesArrayElements);
+ } else {
+ SetGVNFlag(kChangesArrayElements);
+ }
+
+ // EXTERNAL_{UNSIGNED_,}{BYTE,SHORT,INT}_ELEMENTS are truncating.
+ if (elements_kind >= EXTERNAL_BYTE_ELEMENTS &&
+ elements_kind <= EXTERNAL_UNSIGNED_INT_ELEMENTS) {
+ SetFlag(kTruncatingToInt32);
+ }
+ }
+
ElementsKind elements_kind_;
uint32_t index_offset_;
bool is_dehoisted_ : 1;
@@ -6433,29 +6180,13 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> {
class HTransitionElementsKind: public HTemplateInstruction<2> {
public:
- HTransitionElementsKind(HValue* context,
- HValue* object,
- Handle<Map> original_map,
- Handle<Map> transitioned_map)
- : original_map_(original_map),
- transitioned_map_(transitioned_map),
- original_map_unique_id_(),
- transitioned_map_unique_id_(),
- from_kind_(original_map->elements_kind()),
- to_kind_(transitioned_map->elements_kind()) {
- SetOperandAt(0, object);
- SetOperandAt(1, context);
- SetFlag(kUseGVN);
- SetGVNFlag(kChangesElementsKind);
- if (original_map->has_fast_double_elements()) {
- SetGVNFlag(kChangesElementsPointer);
- SetGVNFlag(kChangesNewSpacePromotion);
- }
- if (transitioned_map->has_fast_double_elements()) {
- SetGVNFlag(kChangesElementsPointer);
- SetGVNFlag(kChangesNewSpacePromotion);
- }
- set_representation(Representation::Tagged());
+ inline static HTransitionElementsKind* New(Zone* zone,
+ HValue* context,
+ HValue* object,
+ Handle<Map> original_map,
+ Handle<Map> transitioned_map) {
+ return new(zone) HTransitionElementsKind(context, object,
+ original_map, transitioned_map);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -6486,6 +6217,31 @@ class HTransitionElementsKind: public HTemplateInstruction<2> {
}
private:
+ HTransitionElementsKind(HValue* context,
+ HValue* object,
+ Handle<Map> original_map,
+ Handle<Map> transitioned_map)
+ : original_map_(original_map),
+ transitioned_map_(transitioned_map),
+ original_map_unique_id_(),
+ transitioned_map_unique_id_(),
+ from_kind_(original_map->elements_kind()),
+ to_kind_(transitioned_map->elements_kind()) {
+ SetOperandAt(0, object);
+ SetOperandAt(1, context);
+ SetFlag(kUseGVN);
+ SetGVNFlag(kChangesElementsKind);
+ if (original_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+ if (transitioned_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+ set_representation(Representation::Tagged());
+ }
+
Handle<Map> original_map_;
Handle<Map> transitioned_map_;
UniqueValueId original_map_unique_id_;
@@ -6509,10 +6265,6 @@ class HStringAdd: public HBinaryOperation {
return Representation::Tagged();
}
- virtual HType CalculateInferredType() {
- return HType::String();
- }
-
DECLARE_CONCRETE_INSTRUCTION(StringAdd)
protected:
@@ -6520,7 +6272,7 @@ class HStringAdd: public HBinaryOperation {
private:
HStringAdd(HValue* context, HValue* left, HValue* right, StringAddFlags flags)
- : HBinaryOperation(context, left, right), flags_(flags) {
+ : HBinaryOperation(context, left, right, HType::String()), flags_(flags) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
@@ -6537,14 +6289,11 @@ class HStringAdd: public HBinaryOperation {
class HStringCharCodeAt: public HTemplateInstruction<3> {
public:
- HStringCharCodeAt(HValue* context, HValue* string, HValue* index) {
- SetOperandAt(0, context);
- SetOperandAt(1, string);
- SetOperandAt(2, index);
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- SetGVNFlag(kChangesNewSpacePromotion);
+ static HStringCharCodeAt* New(Zone* zone,
+ HValue* context,
+ HValue* string,
+ HValue* index) {
+ return new(zone) HStringCharCodeAt(context, string, index);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -6568,6 +6317,16 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
}
private:
+ HStringCharCodeAt(HValue* context, HValue* string, HValue* index) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, string);
+ SetOperandAt(2, index);
+ set_representation(Representation::Integer32());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+
// No side effects: runtime function assumes string + number inputs.
virtual bool IsDeletable() const { return true; }
};
@@ -6584,7 +6343,6 @@ class HStringCharFromCode: public HTemplateInstruction<2> {
? Representation::Tagged()
: Representation::Integer32();
}
- virtual HType CalculateInferredType() { return HType::String(); }
HValue* context() const { return OperandAt(0); }
HValue* value() const { return OperandAt(1); }
@@ -6594,7 +6352,8 @@ class HStringCharFromCode: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode)
private:
- HStringCharFromCode(HValue* context, HValue* char_code) {
+ HStringCharFromCode(HValue* context, HValue* char_code)
+ : HTemplateInstruction<2>(HType::String()) {
SetOperandAt(0, context);
SetOperandAt(1, char_code);
set_representation(Representation::Tagged());
@@ -6608,39 +6367,6 @@ class HStringCharFromCode: public HTemplateInstruction<2> {
};
-class HStringLength: public HUnaryOperation {
- public:
- static HInstruction* New(Zone* zone, HValue* string);
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::Tagged();
- }
-
- virtual HType CalculateInferredType() {
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
- return HType::Smi();
- }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-
- virtual Range* InferRange(Zone* zone) {
- return new(zone) Range(0, String::kMaxLength);
- }
-
- private:
- explicit HStringLength(HValue* string) : HUnaryOperation(string) {
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- }
-
- virtual bool IsDeletable() const { return true; }
-};
-
-
template <int V>
class HMaterializedLiteral: public HTemplateInstruction<V> {
public:
@@ -6683,6 +6409,7 @@ class HRegExpLiteral: public HMaterializedLiteral<1> {
flags_(flags) {
SetOperandAt(0, context);
SetAllSideEffects();
+ set_type(HType::JSObject());
}
HValue* context() { return OperandAt(0); }
@@ -6693,7 +6420,6 @@ class HRegExpLiteral: public HMaterializedLiteral<1> {
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
DECLARE_CONCRETE_INSTRUCTION(RegExpLiteral)
@@ -6709,7 +6435,8 @@ class HFunctionLiteral: public HTemplateInstruction<1> {
HFunctionLiteral(HValue* context,
Handle<SharedFunctionInfo> shared,
bool pretenure)
- : shared_info_(shared),
+ : HTemplateInstruction<1>(HType::JSObject()),
+ shared_info_(shared),
pretenure_(pretenure),
has_no_literals_(shared->num_literals() == 0),
is_generator_(shared->is_generator()),
@@ -6724,7 +6451,6 @@ class HFunctionLiteral: public HTemplateInstruction<1> {
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
- virtual HType CalculateInferredType();
DECLARE_CONCRETE_INSTRUCTION(FunctionLiteral)
@@ -6771,9 +6497,7 @@ class HTypeof: public HTemplateInstruction<2> {
class HTrapAllocationMemento : public HTemplateInstruction<1> {
public:
- explicit HTrapAllocationMemento(HValue* obj) {
- SetOperandAt(0, obj);
- }
+ DECLARE_INSTRUCTION_FACTORY_P1(HTrapAllocationMemento, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -6782,11 +6506,25 @@ class HTrapAllocationMemento : public HTemplateInstruction<1> {
HValue* object() { return OperandAt(0); }
DECLARE_CONCRETE_INSTRUCTION(TrapAllocationMemento)
+
+ private:
+ explicit HTrapAllocationMemento(HValue* obj) {
+ SetOperandAt(0, obj);
+ }
};
class HToFastProperties: public HUnaryOperation {
public:
+ DECLARE_INSTRUCTION_FACTORY_P1(HToFastProperties, HValue*);
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ToFastProperties)
+
+ private:
explicit HToFastProperties(HValue* value) : HUnaryOperation(value) {
// This instruction is not marked as having side effects, but
// changes the map of the input operand. Use it only when creating
@@ -6800,13 +6538,6 @@ class HToFastProperties: public HUnaryOperation {
set_representation(Representation::Tagged());
}
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::Tagged();
- }
-
- DECLARE_CONCRETE_INSTRUCTION(ToFastProperties)
-
- private:
virtual bool IsDeletable() const { return true; }
};
@@ -6879,15 +6610,7 @@ class HSeqStringSetChar: public HTemplateInstruction<3> {
class HCheckMapValue: public HTemplateInstruction<2> {
public:
- HCheckMapValue(HValue* value,
- HValue* map) {
- SetOperandAt(0, value);
- SetOperandAt(1, map);
- set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- SetGVNFlag(kDependsOnElementsKind);
- }
+ DECLARE_INSTRUCTION_FACTORY_P2(HCheckMapValue, HValue*, HValue*);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -6908,17 +6631,26 @@ class HCheckMapValue: public HTemplateInstruction<2> {
virtual bool DataEquals(HValue* other) {
return true;
}
+
+ private:
+ HCheckMapValue(HValue* value,
+ HValue* map) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, map);
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnElementsKind);
+ }
};
class HForInPrepareMap : public HTemplateInstruction<2> {
public:
- HForInPrepareMap(HValue* context,
- HValue* object) {
- SetOperandAt(0, context);
- SetOperandAt(1, object);
- set_representation(Representation::Tagged());
- SetAllSideEffects();
+ static HForInPrepareMap* New(Zone* zone,
+ HValue* context,
+ HValue* object) {
+ return new(zone) HForInPrepareMap(context, object);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -6935,18 +6667,21 @@ class HForInPrepareMap : public HTemplateInstruction<2> {
}
DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap);
+
+ private:
+ HForInPrepareMap(HValue* context,
+ HValue* object) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, object);
+ set_representation(Representation::Tagged());
+ SetAllSideEffects();
+ }
};
class HForInCacheArray : public HTemplateInstruction<2> {
public:
- HForInCacheArray(HValue* enumerable,
- HValue* keys,
- int idx) : idx_(idx) {
- SetOperandAt(0, enumerable);
- SetOperandAt(1, keys);
- set_representation(Representation::Tagged());
- }
+ DECLARE_INSTRUCTION_FACTORY_P3(HForInCacheArray, HValue*, HValue*, int);
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -6973,6 +6708,14 @@ class HForInCacheArray : public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray);
private:
+ HForInCacheArray(HValue* enumerable,
+ HValue* keys,
+ int idx) : idx_(idx) {
+ SetOperandAt(0, enumerable);
+ SetOperandAt(1, keys);
+ set_representation(Representation::Tagged());
+ }
+
int idx_;
HForInCacheArray* index_cache_;
};
diff --git a/deps/v8/src/hydrogen-uint32-analysis.cc b/deps/v8/src/hydrogen-uint32-analysis.cc
index 67219f55df..835a198d4d 100644
--- a/deps/v8/src/hydrogen-uint32-analysis.cc
+++ b/deps/v8/src/hydrogen-uint32-analysis.cc
@@ -33,11 +33,7 @@ namespace internal {
bool HUint32AnalysisPhase::IsSafeUint32Use(HValue* val, HValue* use) {
// Operations that operate on bits are safe.
- if (use->IsBitwise() ||
- use->IsShl() ||
- use->IsSar() ||
- use->IsShr() ||
- use->IsBitNot()) {
+ if (use->IsBitwise() || use->IsShl() || use->IsSar() || use->IsShr()) {
return true;
} else if (use->IsChange() || use->IsSimulate()) {
// Conversions and deoptimization have special support for unt32.
diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc
index 0875f29112..837c9780c5 100644
--- a/deps/v8/src/hydrogen.cc
+++ b/deps/v8/src/hydrogen.cc
@@ -609,7 +609,9 @@ void HGraph::Verify(bool do_full_verify) const {
HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
int32_t value) {
if (!pointer->is_set()) {
- HConstant* constant = new(zone()) HConstant(value);
+ // Can't pass GetInvalidContext() to HConstant::New, because that will
+ // recursively call GetConstant
+ HConstant* constant = HConstant::New(zone(), NULL, value);
constant->InsertAfter(GetConstantUndefined());
pointer->set(constant);
}
@@ -835,9 +837,8 @@ void HGraphBuilder::IfBuilder::Deopt() {
void HGraphBuilder::IfBuilder::Return(HValue* value) {
HBasicBlock* block = builder_->current_block();
- HValue* context = builder_->environment()->LookupContext();
HValue* parameter_count = builder_->graph()->GetConstantMinus1();
- block->FinishExit(new(zone()) HReturn(value, context, parameter_count));
+ block->FinishExit(builder_->New<HReturn>(value, parameter_count));
builder_->set_current_block(NULL);
if (did_else_) {
first_false_block_ = NULL;
@@ -956,8 +957,9 @@ void HGraphBuilder::LoopBuilder::EndBody() {
// Push the new increment value on the expression stack to merge into the phi.
builder_->environment()->Push(increment_);
- builder_->current_block()->GotoNoSimulate(header_block_);
- header_block_->loop_information()->RegisterBackEdge(body_block_);
+ HBasicBlock* last_block = builder_->current_block();
+ last_block->GotoNoSimulate(header_block_);
+ header_block_->loop_information()->RegisterBackEdge(last_block);
builder_->set_current_block(exit_block_);
// Pop the phi from the expression stack
@@ -991,15 +993,24 @@ void HGraphBuilder::AddIncrementCounter(StatsCounter* counter,
HValue* context) {
if (FLAG_native_code_counters && counter->Enabled()) {
HValue* reference = Add<HConstant>(ExternalReference(counter));
- HValue* old_value = AddLoad(reference, HObjectAccess::ForCounter(), NULL);
- HValue* new_value = AddInstruction(
- HAdd::New(zone(), context, old_value, graph()->GetConstant1()));
+ HValue* old_value = Add<HLoadNamedField>(reference,
+ HObjectAccess::ForCounter());
+ HValue* new_value = Add<HAdd>(old_value, graph()->GetConstant1());
new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
- AddStore(reference, HObjectAccess::ForCounter(), new_value);
+ Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
+ new_value);
}
}
+void HGraphBuilder::AddSimulate(BailoutId id,
+ RemovableSimulate removable) {
+ ASSERT(current_block() != NULL);
+ ASSERT(no_side_effects_scope_count_ == 0);
+ current_block()->AddSimulate(id, removable);
+}
+
+
HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
HBasicBlock* b = graph()->CreateBasicBlock();
b->SetInitialEnvironment(env);
@@ -1039,12 +1050,14 @@ void HGraphBuilder::PadEnvironmentForContinuation(
HBasicBlock* continuation) {
if (continuation->last_environment() != NULL) {
// When merging from a deopt block to a continuation, resolve differences in
- // environment by pushing undefined and popping extra values so that the
- // environments match during the join.
+ // environment by pushing constant 0 and popping extra values so that the
+ // environments match during the join. Push 0 since it has the most specific
+ // representation, and will not influence representation inference of the
+ // phi.
int continuation_env_length = continuation->last_environment()->length();
while (continuation_env_length != from->last_environment()->length()) {
if (continuation_env_length > from->last_environment()->length()) {
- from->last_environment()->Push(graph()->GetConstantUndefined());
+ from->last_environment()->Push(graph()->GetConstant0());
} else {
from->last_environment()->Pop();
}
@@ -1056,9 +1069,7 @@ void HGraphBuilder::PadEnvironmentForContinuation(
HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) {
- HCheckMaps* check = HCheckMaps::New(obj, map, zone(), top_info());
- AddInstruction(check);
- return check;
+ return Add<HCheckMaps>(obj, map, top_info());
}
@@ -1090,18 +1101,17 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
Token::GTE);
capacity_checker.Then();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
- HValue* max_capacity = AddInstruction(
- HAdd::New(zone, context, current_capacity, max_gap));
+ HValue* max_capacity = Add<HAdd>(current_capacity, max_gap);
IfBuilder key_checker(this);
key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT);
key_checker.Then();
key_checker.ElseDeopt();
key_checker.End();
- HValue* new_capacity = BuildNewElementsCapacity(context, key);
+ HValue* new_capacity = BuildNewElementsCapacity(key);
HValue* new_elements = BuildGrowElementsCapacity(object, elements,
kind, kind, length,
new_capacity);
@@ -1117,7 +1127,8 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
HAdd::New(zone, context, key, graph_->GetConstant1()));
new_length->ClearFlag(HValue::kCanOverflow);
- AddStore(object, HObjectAccess::ForArrayLength(kind), new_length);
+ Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
+ new_length);
}
length_checker.Else();
@@ -1174,7 +1185,7 @@ void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
HInstruction* elements = AddLoadElements(object);
HInstruction* empty_fixed_array = Add<HConstant>(
- isolate()->factory()->empty_fixed_array(), Representation::Tagged());
+ isolate()->factory()->empty_fixed_array());
IfBuilder if_builder(this);
@@ -1185,7 +1196,7 @@ void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
HInstruction* elements_length = AddLoadFixedArrayLength(elements);
HInstruction* array_length = is_jsarray
- ? AddLoad(object, HObjectAccess::ForArrayLength(from_kind), NULL)
+ ? Add<HLoadNamedField>(object, HObjectAccess::ForArrayLength(from_kind))
: elements_length;
BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
@@ -1194,7 +1205,7 @@ void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
if_builder.End();
}
- AddStore(object, HObjectAccess::ForMap(), map);
+ Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
}
@@ -1209,7 +1220,6 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
LoadKeyedHoleMode load_mode,
KeyedAccessStoreMode store_mode) {
ASSERT(!IsExternalArrayElementsKind(elements_kind) || !is_js_array);
- Zone* zone = this->zone();
// No GVNFlag is necessary for ElementsKind if there is an explicit dependency
// on a HElementsTransition instruction. The flag can also be removed if the
// map to check has FAST_HOLEY_ELEMENTS, since there can be no further
@@ -1227,15 +1237,14 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
HValue* elements = AddLoadElements(object, mapcheck);
if (is_store && (fast_elements || fast_smi_only_elements) &&
store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
- HCheckMaps* check_cow_map = HCheckMaps::New(
- elements, isolate()->factory()->fixed_array_map(), zone, top_info());
+ HCheckMaps* check_cow_map = Add<HCheckMaps>(
+ elements, isolate()->factory()->fixed_array_map(), top_info());
check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
- AddInstruction(check_cow_map);
}
HInstruction* length = NULL;
if (is_js_array) {
- length = AddLoad(object, HObjectAccess::ForArrayLength(elements_kind),
- mapcheck);
+ length = Add<HLoadNamedField>(object,
+ HObjectAccess::ForArrayLength(elements_kind), mapcheck);
} else {
length = AddLoadFixedArrayLength(elements);
}
@@ -1244,8 +1253,8 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
if (IsExternalArrayElementsKind(elements_kind)) {
if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
NoObservableSideEffectsScope no_effects(this);
- HLoadExternalArrayPointer* external_elements =
- Add<HLoadExternalArrayPointer>(elements);
+ HLoadExternalArrayPointer* external_elements =
+ Add<HLoadExternalArrayPointer>(elements);
IfBuilder length_checker(this);
length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
length_checker.Then();
@@ -1295,11 +1304,10 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
elements = BuildCopyElementsOnWrite(object, elements, elements_kind,
length);
} else {
- HCheckMaps* check_cow_map = HCheckMaps::New(
+ HCheckMaps* check_cow_map = Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(),
- zone, top_info());
+ top_info());
check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
- AddInstruction(check_cow_map);
}
}
}
@@ -1308,37 +1316,29 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
}
-HValue* HGraphBuilder::BuildAllocateElements(HValue* context,
- ElementsKind kind,
+HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
HValue* capacity) {
- Zone* zone = this->zone();
+ int elements_size;
+ InstanceType instance_type;
+
+ if (IsFastDoubleElementsKind(kind)) {
+ elements_size = kDoubleSize;
+ instance_type = FIXED_DOUBLE_ARRAY_TYPE;
+ } else {
+ elements_size = kPointerSize;
+ instance_type = FIXED_ARRAY_TYPE;
+ }
- int elements_size = IsFastDoubleElementsKind(kind)
- ? kDoubleSize : kPointerSize;
HConstant* elements_size_value = Add<HConstant>(elements_size);
- HValue* mul = AddInstruction(
- HMul::New(zone, context, capacity, elements_size_value));
+ HValue* mul = Add<HMul>(capacity, elements_size_value);
mul->ClearFlag(HValue::kCanOverflow);
HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
- HValue* total_size = AddInstruction(
- HAdd::New(zone, context, mul, header_size));
+ HValue* total_size = Add<HAdd>(mul, header_size);
total_size->ClearFlag(HValue::kCanOverflow);
- HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
- if (isolate()->heap()->ShouldGloballyPretenure()) {
- // TODO(hpayer): When pretenuring can be internalized, flags can become
- // private to HAllocate.
- if (IsFastDoubleElementsKind(kind)) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
- } else {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
- }
- }
-
- return Add<HAllocate>(context, total_size, HType::JSArray(), flags);
+ return Add<HAllocate>(total_size, HType::JSArray(),
+ isolate()->heap()->GetPretenureMode(), instance_type);
}
@@ -1351,18 +1351,18 @@ void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
: factory->fixed_array_map();
AddStoreMapConstant(elements, map);
- AddStore(elements, HObjectAccess::ForFixedArrayLength(), capacity);
+ Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
+ capacity);
}
HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
- HValue* context,
ElementsKind kind,
HValue* capacity) {
// The HForceRepresentation is to prevent possible deopt on int-smi
// conversion after allocation but before the new object fields are set.
capacity = Add<HForceRepresentation>(capacity, Representation::Smi());
- HValue* new_elements = BuildAllocateElements(context, kind, capacity);
+ HValue* new_elements = BuildAllocateElements(kind, capacity);
BuildInitializeElementsHeader(new_elements, kind, capacity);
return new_elements;
}
@@ -1375,14 +1375,15 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
HValue* allocation_site_payload,
HValue* length_field) {
- AddStore(array, HObjectAccess::ForMap(), array_map);
+ Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
HConstant* empty_fixed_array =
- Add<HConstant>(isolate()->factory()->empty_fixed_array());
+ Add<HConstant>(isolate()->factory()->empty_fixed_array());
HObjectAccess access = HObjectAccess::ForPropertiesPointer();
- AddStore(array, access, empty_fixed_array);
- AddStore(array, HObjectAccess::ForArrayLength(elements_kind), length_field);
+ Add<HStoreNamedField>(array, access, empty_fixed_array);
+ Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
+ length_field);
if (mode == TRACK_ALLOCATION_SITE) {
BuildCreateAllocationMemento(array,
@@ -1395,10 +1396,9 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
elements_location += AllocationMemento::kSize;
}
- HInnerAllocatedObject* elements =
- Add<HInnerAllocatedObject>(array, elements_location);
- AddStore(array, HObjectAccess::ForElementsPointer(), elements);
- return elements;
+ HValue* elements = Add<HInnerAllocatedObject>(array, elements_location);
+ Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
+ return static_cast<HInnerAllocatedObject*>(elements);
}
@@ -1441,8 +1441,10 @@ HInstruction* HGraphBuilder::AddExternalArrayElementAccess(
return Add<HStoreKeyed>(external_elements, checked_key, val, elements_kind);
} else {
ASSERT(val == NULL);
- HLoadKeyed* load = Add<HLoadKeyed>(external_elements, checked_key,
- dependency, elements_kind);
+ HLoadKeyed* load = Add<HLoadKeyed>(external_elements,
+ checked_key,
+ dependency,
+ elements_kind);
if (FLAG_opt_safe_uint32_operations &&
elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
graph()->RecordUint32Instruction(load);
@@ -1484,30 +1486,27 @@ HInstruction* HGraphBuilder::AddFastElementAccess(
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
HValue* typecheck) {
- return AddLoad(object, HObjectAccess::ForElementsPointer(), typecheck);
+ return Add<HLoadNamedField>(object,
+ HObjectAccess::ForElementsPointer(),
+ typecheck);
}
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
- return AddLoad(object, HObjectAccess::ForFixedArrayLength());
+ return Add<HLoadNamedField>(object,
+ HObjectAccess::ForFixedArrayLength());
}
-HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* context,
- HValue* old_capacity) {
- Zone* zone = this->zone();
- HValue* half_old_capacity =
- AddInstruction(HShr::New(zone, context, old_capacity,
- graph_->GetConstant1()));
+HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
+ HValue* half_old_capacity = Add<HShr>(old_capacity, graph_->GetConstant1());
- HValue* new_capacity = AddInstruction(
- HAdd::New(zone, context, half_old_capacity, old_capacity));
+ HValue* new_capacity = Add<HAdd>(half_old_capacity, old_capacity);
new_capacity->ClearFlag(HValue::kCanOverflow);
HValue* min_growth = Add<HConstant>(16);
- new_capacity = AddInstruction(
- HAdd::New(zone, context, new_capacity, min_growth));
+ new_capacity = Add<HAdd>(new_capacity, min_growth);
new_capacity->ClearFlag(HValue::kCanOverflow);
return new_capacity;
@@ -1531,25 +1530,23 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
ElementsKind new_kind,
HValue* length,
HValue* new_capacity) {
- HValue* context = environment()->LookupContext();
-
BuildNewSpaceArrayCheck(new_capacity, new_kind);
HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
- context, new_kind, new_capacity);
+ new_kind, new_capacity);
- BuildCopyElements(context, elements, kind,
+ BuildCopyElements(elements, kind,
new_elements, new_kind,
length, new_capacity);
- AddStore(object, HObjectAccess::ForElementsPointer(), new_elements);
+ Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
+ new_elements);
return new_elements;
}
-void HGraphBuilder::BuildFillElementsWithHole(HValue* context,
- HValue* elements,
+void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
ElementsKind elements_kind,
HValue* from,
HValue* to) {
@@ -1591,7 +1588,7 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* context,
Add<HStoreKeyed>(elements, key, hole, elements_kind);
}
} else {
- LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
+ LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
HValue* key = builder.BeginBody(from, to, Token::LT);
@@ -1602,8 +1599,7 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* context,
}
-void HGraphBuilder::BuildCopyElements(HValue* context,
- HValue* from_elements,
+void HGraphBuilder::BuildCopyElements(HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
@@ -1617,11 +1613,11 @@ void HGraphBuilder::BuildCopyElements(HValue* context,
// If the copy might trigger a GC, make sure that the FixedArray is
// pre-initialized with holes to make sure that it's always in a consistent
// state.
- BuildFillElementsWithHole(context, to_elements, to_elements_kind,
+ BuildFillElementsWithHole(to_elements, to_elements_kind,
graph()->GetConstant0(), capacity);
}
- LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
+ LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
@@ -1643,14 +1639,13 @@ void HGraphBuilder::BuildCopyElements(HValue* context,
if (!pre_fill_with_holes && length != capacity) {
// Fill unused capacity with the hole.
- BuildFillElementsWithHole(context, to_elements, to_elements_kind,
+ BuildFillElementsWithHole(to_elements, to_elements_kind,
key, capacity);
}
}
-HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
- HValue* boilerplate,
+HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
@@ -1663,26 +1658,28 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
size += AllocationMemento::kSize;
}
int elems_offset = size;
+ InstanceType instance_type = IsFastDoubleElementsKind(kind) ?
+ FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
if (length > 0) {
size += IsFastDoubleElementsKind(kind)
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
- HAllocate::Flags allocate_flags = HAllocate::DefaultFlags(kind);
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
HValue* size_in_bytes = Add<HConstant>(size);
- HInstruction* object = Add<HAllocate>(context,
- size_in_bytes,
+ HInstruction* object = Add<HAllocate>(size_in_bytes,
HType::JSObject(),
- allocate_flags);
+ NOT_TENURED,
+ instance_type);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
- AddStore(object, access, AddLoad(boilerplate, access));
+ Add<HStoreNamedField>(object, access,
+ Add<HLoadNamedField>(boilerplate, access));
}
}
@@ -1696,12 +1693,14 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
// elements pointer in the resulting object.
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* object_elements = Add<HInnerAllocatedObject>(object, elems_offset);
- AddStore(object, HObjectAccess::ForElementsPointer(), object_elements);
+ Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
+ object_elements);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
- AddStore(object_elements, access, AddLoad(boilerplate_elements, access));
+ Add<HStoreNamedField>(object_elements, access,
+ Add<HLoadNamedField>(boilerplate_elements, access));
}
// Copy the elements array contents.
@@ -1720,39 +1719,6 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
}
-HInstruction* HGraphBuilder::BuildUnaryMathOp(
- HValue* input, Handle<Type> type, Token::Value operation) {
- // We only handle the numeric cases here
- type = handle(
- Type::Intersect(type, handle(Type::Number(), isolate())), isolate());
-
- switch (operation) {
- default:
- UNREACHABLE();
- case Token::SUB: {
- HInstruction* instr =
- HMul::New(zone(), environment()->LookupContext(),
- input, graph()->GetConstantMinus1());
- Representation rep = Representation::FromType(type);
- if (type->Is(Type::None())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
- }
- if (instr->IsBinaryOperation()) {
- HBinaryOperation* binop = HBinaryOperation::cast(instr);
- binop->set_observed_input_representation(1, rep);
- binop->set_observed_input_representation(2, rep);
- }
- return instr;
- }
- case Token::BIT_NOT:
- if (type->Is(Type::None())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
- }
- return new(zone()) HBitNot(input);
- }
-}
-
-
void HGraphBuilder::BuildCompareNil(
HValue* value,
Handle<Type> type,
@@ -1803,22 +1769,22 @@ HValue* HGraphBuilder::BuildCreateAllocationMemento(HValue* previous_object,
isolate()->heap()->allocation_memento_map());
AddStoreMapConstant(alloc_memento, alloc_memento_map);
HObjectAccess access = HObjectAccess::ForAllocationMementoSite();
- AddStore(alloc_memento, access, alloc_site);
+ Add<HStoreNamedField>(alloc_memento, access, alloc_site);
return alloc_memento;
}
-HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* context) {
+HInstruction* HGraphBuilder::BuildGetNativeContext() {
// Get the global context, then the native context
- HInstruction* global_object = Add<HGlobalObject>(context);
+ HInstruction* global_object = Add<HGlobalObject>();
HObjectAccess access = HObjectAccess::ForJSObjectOffset(
GlobalObject::kNativeContextOffset);
- return AddLoad(global_object, access);
+ return Add<HLoadNamedField>(global_object, access);
}
-HInstruction* HGraphBuilder::BuildGetArrayFunction(HValue* context) {
- HInstruction* native_context = BuildGetNativeContext(context);
+HInstruction* HGraphBuilder::BuildGetArrayFunction() {
+ HInstruction* native_context = BuildGetNativeContext();
HInstruction* index =
Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
return Add<HLoadKeyed>(
@@ -1852,7 +1818,7 @@ HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
}
-HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
+HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
if (kind_ == GetInitialFastElementsKind()) {
// No need for a context lookup if the kind_ matches the initial
// map, because we can just load the map in that case.
@@ -1861,7 +1827,7 @@ HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
builder()->BuildLoadNamedField(constructor_function_, access));
}
- HInstruction* native_context = builder()->BuildGetNativeContext(context);
+ HInstruction* native_context = builder()->BuildGetNativeContext();
HInstruction* index = builder()->Add<HConstant>(
static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
@@ -1885,7 +1851,6 @@ HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
HValue* length_node) {
- HValue* context = builder()->environment()->LookupContext();
ASSERT(length_node != NULL);
int base_size = JSArray::kSize;
@@ -1898,15 +1863,12 @@ HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
HInstruction* elements_size_value =
builder()->Add<HConstant>(elements_size());
- HInstruction* mul = HMul::New(zone(), context, length_node,
- elements_size_value);
+ HInstruction* mul = builder()->Add<HMul>(length_node, elements_size_value);
mul->ClearFlag(HValue::kCanOverflow);
- builder()->AddInstruction(mul);
HInstruction* base = builder()->Add<HConstant>(base_size);
- HInstruction* total_size = HAdd::New(zone(), context, base, mul);
+ HInstruction* total_size = builder()->Add<HAdd>(base, mul);
total_size->ClearFlag(HValue::kCanOverflow);
- builder()->AddInstruction(total_size);
return total_size;
}
@@ -1947,8 +1909,6 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
HValue* capacity,
HValue* length_field,
bool fill_with_hole) {
- HValue* context = builder()->environment()->LookupContext();
-
// These HForceRepresentations are because we store these as fields in the
// objects we construct, and an int32-to-smi HChange could deopt. Accept
// the deopt possibility now, before allocation occurs.
@@ -1956,18 +1916,16 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
Representation::Smi());
length_field = builder()->Add<HForceRepresentation>(length_field,
Representation::Smi());
-
// Allocate (dealing with failure appropriately)
- HAllocate::Flags flags = HAllocate::DefaultFlags(kind_);
- HAllocate* new_object = builder()->Add<HAllocate>(context, size_in_bytes,
- HType::JSArray(), flags);
+ HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes,
+ HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
// Fill in the fields: map, properties, length
HValue* map;
if (allocation_site_payload_ == NULL) {
map = EmitInternalMapCode();
} else {
- map = EmitMapCode(context);
+ map = EmitMapCode();
}
elements_location_ = builder()->BuildJSArrayHeader(new_object,
map,
@@ -1980,7 +1938,7 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
if (fill_with_hole) {
- builder()->BuildFillElementsWithHole(context, elements_location_, kind_,
+ builder()->BuildFillElementsWithHole(elements_location_, kind_,
graph()->GetConstant0(), capacity);
}
@@ -1988,20 +1946,6 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
}
-HStoreNamedField* HGraphBuilder::AddStore(HValue *object,
- HObjectAccess access,
- HValue *val) {
- return Add<HStoreNamedField>(object, access, val);
-}
-
-
-HLoadNamedField* HGraphBuilder::AddLoad(HValue *object,
- HObjectAccess access,
- HValue *typecheck) {
- return Add<HLoadNamedField>(object, access, typecheck);
-}
-
-
HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
Handle<Map> map) {
return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
@@ -2009,15 +1953,14 @@ HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
}
-HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin,
- HValue* context) {
- HGlobalObject* global_object = Add<HGlobalObject>(context);
+HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
+ HGlobalObject* global_object = Add<HGlobalObject>();
HObjectAccess access = HObjectAccess::ForJSObjectOffset(
GlobalObject::kBuiltinsOffset);
- HValue* builtins = AddLoad(global_object, access);
+ HValue* builtins = Add<HLoadNamedField>(global_object, access);
HObjectAccess function_access = HObjectAccess::ForJSObjectOffset(
JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
- return AddLoad(builtins, function_access);
+ return Add<HLoadNamedField>(builtins, function_access);
}
@@ -2606,7 +2549,7 @@ void ValueContext::ReturnValue(HValue* value) {
// The value is tracked in the bailout environment, and communicated
// through the environment as the result of the expression.
if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
- owner()->Bailout("bad value context for arguments value");
+ owner()->Bailout(kBadValueContextForArgumentsValue);
}
owner()->Push(value);
}
@@ -2658,7 +2601,7 @@ void EffectContext::ReturnContinuation(HIfContinuation* continuation,
void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->IsControlInstruction());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
- return owner()->Bailout("bad value context for arguments object value");
+ return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
}
owner()->AddInstruction(instr);
owner()->Push(instr);
@@ -2671,7 +2614,7 @@ void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
ASSERT(!instr->HasObservableSideEffects());
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
- return owner()->Bailout("bad value context for arguments object value");
+ return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
}
HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
@@ -2761,7 +2704,7 @@ void TestContext::BuildBranch(HValue* value) {
// branch.
HOptimizedGraphBuilder* builder = owner();
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
- builder->Bailout("arguments object value in a test context");
+ builder->Bailout(kArgumentsObjectValueInATestContext);
}
if (value->IsConstant()) {
HConstant* constant_value = HConstant::cast(value);
@@ -2807,7 +2750,7 @@ void TestContext::BuildBranch(HValue* value) {
} while (false)
-void HOptimizedGraphBuilder::Bailout(const char* reason) {
+void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
current_info()->set_bailout_reason(reason);
SetStackOverflow();
}
@@ -2866,16 +2809,16 @@ void HOptimizedGraphBuilder::VisitExpressions(
bool HOptimizedGraphBuilder::BuildGraph() {
if (current_info()->function()->is_generator()) {
- Bailout("function is a generator");
+ Bailout(kFunctionIsAGenerator);
return false;
}
Scope* scope = current_info()->scope();
if (scope->HasIllegalRedeclaration()) {
- Bailout("function with illegal redeclaration");
+ Bailout(kFunctionWithIllegalRedeclaration);
return false;
}
if (scope->calls_eval()) {
- Bailout("function calls eval");
+ Bailout(kFunctionCallsEval);
return false;
}
SetUpScope(scope);
@@ -2909,7 +2852,7 @@ bool HOptimizedGraphBuilder::BuildGraph() {
VisitDeclarations(scope->declarations());
Add<HSimulate>(BailoutId::Declarations());
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Add<HStackCheck>(context, HStackCheck::kFunctionEntry);
VisitStatements(current_info()->function()->body());
@@ -2941,8 +2884,7 @@ bool HOptimizedGraphBuilder::BuildGraph() {
}
-bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
- *bailout_reason = SmartArrayPointer<char>();
+bool HGraph::Optimize(BailoutReason* bailout_reason) {
OrderBlocks();
AssignDominators();
@@ -2963,14 +2905,12 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
Run<HPropagateDeoptimizingMarkPhase>();
if (!CheckConstPhiUses()) {
- *bailout_reason = SmartArrayPointer<char>(StrDup(
- "Unsupported phi use of const variable"));
+ *bailout_reason = kUnsupportedPhiUseOfConstVariable;
return false;
}
Run<HRedundantPhiEliminationPhase>();
if (!CheckArgumentsPhiUses()) {
- *bailout_reason = SmartArrayPointer<char>(StrDup(
- "Unsupported phi use of arguments"));
+ *bailout_reason = kUnsupportedPhiUseOfArguments;
return false;
}
@@ -3010,11 +2950,10 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
// Eliminate redundant stack checks on backwards branches.
Run<HStackCheckEliminationPhase>();
- if (FLAG_idefs) SetupInformativeDefinitions();
- if (FLAG_array_bounds_checks_elimination && !FLAG_idefs) {
+ if (FLAG_array_bounds_checks_elimination) {
Run<HBoundsCheckEliminationPhase>();
}
- if (FLAG_array_bounds_checks_hoisting && !FLAG_idefs) {
+ if (FLAG_array_bounds_checks_hoisting) {
Run<HBoundsCheckHoistingPhase>();
}
if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
@@ -3026,50 +2965,6 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
}
-void HGraph::SetupInformativeDefinitionsInBlock(HBasicBlock* block) {
- for (int phi_index = 0; phi_index < block->phis()->length(); phi_index++) {
- HPhi* phi = block->phis()->at(phi_index);
- phi->AddInformativeDefinitions();
- phi->SetFlag(HValue::kIDefsProcessingDone);
- // We do not support phis that "redefine just one operand".
- ASSERT(!phi->IsInformativeDefinition());
- }
-
- for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
- HInstruction* i = it.Current();
- i->AddInformativeDefinitions();
- i->SetFlag(HValue::kIDefsProcessingDone);
- i->UpdateRedefinedUsesWhileSettingUpInformativeDefinitions();
- }
-}
-
-
-// This method is recursive, so if its stack frame is large it could
-// cause a stack overflow.
-// To keep the individual stack frames small we do the actual work inside
-// SetupInformativeDefinitionsInBlock();
-void HGraph::SetupInformativeDefinitionsRecursively(HBasicBlock* block) {
- SetupInformativeDefinitionsInBlock(block);
- for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
- SetupInformativeDefinitionsRecursively(block->dominated_blocks()->at(i));
- }
-
- for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
- HInstruction* i = it.Current();
- if (i->IsBoundsCheck()) {
- HBoundsCheck* check = HBoundsCheck::cast(i);
- check->ApplyIndexChange();
- }
- }
-}
-
-
-void HGraph::SetupInformativeDefinitions() {
- HPhase phase("H_Setup informative definitions", this);
- SetupInformativeDefinitionsRecursively(entry_block());
-}
-
-
void HGraph::RestoreActualValues() {
HPhase phase("H_Restore actual values", this);
@@ -3120,15 +3015,19 @@ HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
- HConstant* undefined_constant = Add<HConstant>(
- isolate()->factory()->undefined_value());
+ // First special is HContext.
+ HInstruction* context = Add<HContext>();
+ environment()->BindContext(context);
+
+ HConstant* undefined_constant = HConstant::cast(Add<HConstant>(
+ isolate()->factory()->undefined_value()));
graph()->set_undefined_constant(undefined_constant);
// Create an arguments object containing the initial parameters. Set the
// initial values of parameters including "this" having parameter index 0.
ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
HArgumentsObject* arguments_object =
- new(zone()) HArgumentsObject(environment()->parameter_count(), zone());
+ New<HArgumentsObject>(environment()->parameter_count());
for (int i = 0; i < environment()->parameter_count(); ++i) {
HInstruction* parameter = Add<HParameter>(i);
arguments_object->AddArgument(parameter, zone());
@@ -3137,10 +3036,6 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
AddInstruction(arguments_object);
graph()->SetArgumentsObject(arguments_object);
- // First special is HContext.
- HInstruction* context = Add<HContext>();
- environment()->BindContext(context);
-
// Initialize specials and locals to undefined.
for (int i = environment()->parameter_count() + 1;
i < environment()->length();
@@ -3152,7 +3047,7 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
// not have declarations).
if (scope->arguments() != NULL) {
if (!scope->arguments()->IsStackAllocated()) {
- return Bailout("context-allocated arguments");
+ return Bailout(kContextAllocatedArguments);
}
environment()->Bind(scope->arguments(),
@@ -3173,7 +3068,7 @@ void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
if (stmt->scope() != NULL) {
- return Bailout("ScopedBlock");
+ return Bailout(kScopedBlock);
}
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
@@ -3385,7 +3280,7 @@ void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("WithStatement");
+ return Bailout(kWithStatement);
}
@@ -3400,15 +3295,15 @@ void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
ZoneList<CaseClause*>* clauses = stmt->cases();
int clause_count = clauses->length();
if (clause_count > kCaseClauseLimit) {
- return Bailout("SwitchStatement: too many clauses");
+ return Bailout(kSwitchStatementTooManyClauses);
}
ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH);
if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) {
- return Bailout("SwitchStatement: mixed or non-literal switch labels");
+ return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels);
}
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
CHECK_ALIVE(VisitForValue(stmt->tag()));
Add<HSimulate>(stmt->EntryId());
@@ -3556,9 +3451,9 @@ void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
BreakAndContinueInfo* break_info) {
BreakAndContinueScope push(break_info, this);
Add<HSimulate>(stmt->StackCheckId());
- HValue* context = environment()->LookupContext();
- HStackCheck* stack_check = Add<HStackCheck>(
- context, HStackCheck::kBackwardsBranch);
+ HValue* context = environment()->context();
+ HStackCheck* stack_check = HStackCheck::cast(Add<HStackCheck>(
+ context, HStackCheck::kBackwardsBranch));
ASSERT(loop_entry->IsLoopHeader());
loop_entry->loop_information()->set_stack_check(stack_check);
CHECK_BAILOUT(Visit(stmt->body()));
@@ -3697,16 +3592,16 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
ASSERT(current_block()->HasPredecessor());
if (!FLAG_optimize_for_in) {
- return Bailout("ForInStatement optimization is disabled");
+ return Bailout(kForInStatementOptimizationIsDisabled);
}
if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
- return Bailout("ForInStatement is not fast case");
+ return Bailout(kForInStatementIsNotFastCase);
}
if (!stmt->each()->IsVariableProxy() ||
!stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
- return Bailout("ForInStatement with non-local each variable");
+ return Bailout(kForInStatementWithNonLocalEachVariable);
}
Variable* each_var = stmt->each()->AsVariableProxy()->var();
@@ -3714,8 +3609,7 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
CHECK_ALIVE(VisitForValue(stmt->enumerable()));
HValue* enumerable = Top(); // Leave enumerable at the top.
- HInstruction* map = Add<HForInPrepareMap>(
- environment()->LookupContext(), enumerable);
+ HInstruction* map = Add<HForInPrepareMap>(enumerable);
Add<HSimulate>(stmt->PrepareId());
HInstruction* array = Add<HForInCacheArray>(
@@ -3781,9 +3675,7 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
set_current_block(body_exit);
HValue* current_index = Pop();
- HInstruction* new_index = HAdd::New(zone(),
- environment()->LookupContext(),
- current_index,
+ HInstruction* new_index = New<HAdd>(current_index,
graph()->GetConstant1());
PushAndAdd(new_index);
body_exit = current_block();
@@ -3803,7 +3695,7 @@ void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("ForOfStatement");
+ return Bailout(kForOfStatement);
}
@@ -3811,7 +3703,7 @@ void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("TryCatchStatement");
+ return Bailout(kTryCatchStatement);
}
@@ -3820,7 +3712,7 @@ void HOptimizedGraphBuilder::VisitTryFinallyStatement(
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("TryFinallyStatement");
+ return Bailout(kTryFinallyStatement);
}
@@ -3828,7 +3720,7 @@ void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("DebuggerStatement");
+ return Bailout(kDebuggerStatement);
}
@@ -3862,7 +3754,7 @@ void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
}
// We also have a stack overflow if the recursive compilation did.
if (HasStackOverflow()) return;
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HFunctionLiteral* instr =
new(zone()) HFunctionLiteral(context, shared_info, expr->pretenure());
return ast_context()->ReturnInstruction(instr, expr->id());
@@ -3874,7 +3766,7 @@ void HOptimizedGraphBuilder::VisitSharedFunctionInfoLiteral(
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("SharedFunctionInfoLiteral");
+ return Bailout(kSharedFunctionInfoLiteral);
}
@@ -3936,7 +3828,7 @@ HOptimizedGraphBuilder::GlobalPropertyAccess
HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
ASSERT(var->IsContextSlot());
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
int length = current_info()->scope()->ContextChainLength(var->scope());
while (length-- > 0) {
context = Add<HOuterContext>(context);
@@ -3954,14 +3846,14 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
case Variable::UNALLOCATED: {
if (IsLexicalVariableMode(variable->mode())) {
// TODO(rossberg): should this be an ASSERT?
- return Bailout("reference to global lexical variable");
+ return Bailout(kReferenceToGlobalLexicalVariable);
}
// Handle known global constants like 'undefined' specially to avoid a
// load from a global cell for them.
Handle<Object> constant_value =
isolate()->factory()->GlobalConstantFor(variable->name());
if (!constant_value.is_null()) {
- HConstant* instr = new(zone()) HConstant(constant_value);
+ HConstant* instr = New<HConstant>(constant_value);
return ast_context()->ReturnInstruction(instr, expr->id());
}
@@ -3984,7 +3876,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
constant_object =
FlattenGetString(Handle<String>::cast(constant_object));
}
- HConstant* constant = new(zone()) HConstant(constant_object);
+ HConstant* constant = New<HConstant>(constant_object);
return ast_context()->ReturnInstruction(constant, expr->id());
} else {
HLoadGlobalCell* instr =
@@ -3992,7 +3884,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
return ast_context()->ReturnInstruction(instr, expr->id());
}
} else {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
AddInstruction(global_object);
HLoadGlobalGeneric* instr =
@@ -4011,7 +3903,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
if (value == graph()->GetConstantHole()) {
ASSERT(IsDeclaredVariableMode(variable->mode()) &&
variable->mode() != VAR);
- return Bailout("reference to uninitialized variable");
+ return Bailout(kReferenceToUninitializedVariable);
}
return ast_context()->ReturnValue(value);
}
@@ -4023,7 +3915,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
}
case Variable::LOOKUP:
- return Bailout("reference to a variable which requires dynamic lookup");
+ return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
}
}
@@ -4032,7 +3924,7 @@ void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- HConstant* instr = new(zone()) HConstant(expr->value());
+ HConstant* instr = New<HConstant>(expr->value());
return ast_context()->ReturnInstruction(instr, expr->id());
}
@@ -4043,7 +3935,7 @@ void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
ASSERT(current_block()->HasPredecessor());
Handle<JSFunction> closure = function_state()->compilation_info()->closure();
Handle<FixedArray> literals(closure->literals());
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HRegExpLiteral* instr = new(zone()) HRegExpLiteral(context,
literals,
@@ -4144,8 +4036,7 @@ static bool IsFastLiteral(Handle<JSObject> boilerplate,
int* data_size,
int* pointer_size) {
if (boilerplate->map()->is_deprecated()) {
- Handle<Object> result =
- JSObject::TryMigrateInstance(boilerplate);
+ Handle<Object> result = JSObject::TryMigrateInstance(boilerplate);
if (result->IsSmi()) return false;
}
@@ -4220,7 +4111,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
Handle<JSFunction> closure = function_state()->compilation_info()->closure();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* literal;
// Check whether to use fast or slow deep-copying for boilerplate.
@@ -4265,8 +4156,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
Runtime::FunctionId function_id =
(expr->depth() > 1 || expr->may_store_doubles())
? Runtime::kCreateObjectLiteral : Runtime::kCreateObjectLiteralShallow;
- literal = Add<HCallRuntime>(context,
- isolate()->factory()->empty_string(),
+ literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
Runtime::FunctionForId(function_id),
4);
}
@@ -4323,7 +4213,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
case ObjectLiteral::Property::PROTOTYPE:
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
- return Bailout("Object literal with complex property");
+ return Bailout(kObjectLiteralWithComplexProperty);
default: UNREACHABLE();
}
}
@@ -4348,7 +4238,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(current_block()->HasPredecessor());
ZoneList<Expression*>* subexprs = expr->values();
int length = subexprs->length();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* literal;
Handle<AllocationSite> site;
@@ -4362,7 +4252,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
isolate(), literals, expr->constant_elements());
if (raw_boilerplate.is_null()) {
- return Bailout("array boilerplate creation failed");
+ return Bailout(kArrayBoilerplateCreationFailed);
}
site = isolate()->factory()->NewAllocationSite();
@@ -4427,15 +4317,14 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
Runtime::FunctionId function_id = (expr->depth() > 1)
? Runtime::kCreateArrayLiteral : Runtime::kCreateArrayLiteralShallow;
- literal = Add<HCallRuntime>(context,
- isolate()->factory()->empty_string(),
+ literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
Runtime::FunctionForId(function_id),
3);
// De-opt if elements kind changed from boilerplate_elements_kind.
Handle<Map> map = Handle<Map>(original_boilerplate_object->map(),
isolate());
- AddInstruction(HCheckMaps::New(literal, map, zone(), top_info()));
+ Add<HCheckMaps>(literal, map, top_info());
}
// The array is expected in the bailout environment during computation
@@ -4454,7 +4343,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
CHECK_ALIVE(VisitForValue(subexpr));
HValue* value = Pop();
- if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
+ if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
elements = AddLoadElements(literal);
@@ -4512,15 +4401,7 @@ static bool ComputeLoadStoreField(Handle<Map> type,
void HOptimizedGraphBuilder::AddCheckMap(HValue* object, Handle<Map> map) {
BuildCheckHeapObject(object);
- AddInstruction(HCheckMaps::New(object, map, zone(), top_info()));
-}
-
-
-void HOptimizedGraphBuilder::AddCheckMapsWithTransitions(HValue* object,
- Handle<Map> map) {
- BuildCheckHeapObject(object);
- AddInstruction(HCheckMaps::NewWithTransitions(
- object, map, zone(), top_info()));
+ Add<HCheckMaps>(object, map, top_info());
}
@@ -4542,7 +4423,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
if (proto_result.IsProperty()) {
// If the inherited property could induce readonly-ness, bail out.
if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
- Bailout("improper object on prototype chain for store");
+ Bailout(kImproperObjectOnPrototypeChainForStore);
return NULL;
}
// We only need to check up to the preexisting property.
@@ -4555,9 +4436,9 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
ASSERT(proto->GetPrototype(isolate())->IsNull());
}
ASSERT(proto->IsJSObject());
- Add<HCheckPrototypeMaps>(Handle<JSObject>(JSObject::cast(map->prototype())),
- Handle<JSObject>(JSObject::cast(proto)),
- zone(), top_info());
+ BuildCheckPrototypeMaps(
+ Handle<JSObject>(JSObject::cast(map->prototype())),
+ Handle<JSObject>(JSObject::cast(proto)));
}
HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name);
@@ -4571,28 +4452,32 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
// The store requires a mutable HeapNumber to be allocated.
NoObservableSideEffectsScope no_side_effects(this);
HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
- HInstruction* heap_number = Add<HAllocate>(
- environment()->LookupContext(), heap_number_size,
- HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE);
+ HInstruction* heap_number = Add<HAllocate>(heap_number_size,
+ HType::HeapNumber(), isolate()->heap()->GetPretenureMode(),
+ HEAP_NUMBER_TYPE);
AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
- AddStore(heap_number, HObjectAccess::ForHeapNumberValue(), value);
- instr = new(zone()) HStoreNamedField(
- object, heap_number_access, heap_number);
+ Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
+ value);
+ instr = New<HStoreNamedField>(object, heap_number_access,
+ heap_number);
} else {
// Already holds a HeapNumber; load the box and write its value field.
- HInstruction* heap_number = AddLoad(object, heap_number_access);
+ HInstruction* heap_number = Add<HLoadNamedField>(object,
+ heap_number_access);
heap_number->set_type(HType::HeapNumber());
- instr = new(zone()) HStoreNamedField(heap_number,
- HObjectAccess::ForHeapNumberValue(), value);
+ instr = New<HStoreNamedField>(heap_number,
+ HObjectAccess::ForHeapNumberValue(),
+ value);
}
} else {
// This is a normal store.
- instr = new(zone()) HStoreNamedField(object, field_access, value);
+ instr = New<HStoreNamedField>(object, field_access, value);
}
if (transition_to_field) {
Handle<Map> transition(lookup->GetTransitionMapFromMap(*map));
- instr->SetTransition(transition, top_info());
+ HConstant* transition_constant = Add<HConstant>(transition);
+ instr->SetTransition(transition_constant, top_info());
// TODO(fschneider): Record the new map type of the object in the IR to
// enable elimination of redundant checks after the transition store.
instr->SetGVNFlag(kChangesMaps);
@@ -4605,7 +4490,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedGeneric(
HValue* object,
Handle<String> name,
HValue* value) {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
return new(zone()) HStoreNamedGeneric(
context,
object,
@@ -4623,7 +4508,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedMonomorphic(
// Handle a store to a known field.
LookupResult lookup(isolate());
if (ComputeLoadStoreField(map, name, &lookup, true)) {
- AddCheckMapsWithTransitions(object, map);
+ AddCheckMap(object, map);
return BuildStoreNamedField(object, name, value, map, &lookup);
}
@@ -4682,7 +4567,7 @@ HInstruction* HOptimizedGraphBuilder::TryLoadPolymorphicAsMonomorphic(
if (count == types->length()) {
// Everything matched; can use monomorphic load.
BuildCheckHeapObject(object);
- AddInstruction(HCheckMaps::New(object, types, zone()));
+ Add<HCheckMaps>(object, types);
return BuildLoadNamedField(object, access);
}
@@ -4704,13 +4589,12 @@ HInstruction* HOptimizedGraphBuilder::TryLoadPolymorphicAsMonomorphic(
if (!lookup.IsField()) return NULL;
BuildCheckHeapObject(object);
- AddInstruction(HCheckMaps::New(object, types, zone()));
+ Add<HCheckMaps>(object, types);
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
- AddInstruction(new(zone()) HCheckPrototypeMaps(
- Handle<JSObject>::cast(prototype), holder, zone(), top_info()));
- HValue* holder_value = AddInstruction(new(zone()) HConstant(holder));
+ BuildCheckPrototypeMaps(Handle<JSObject>::cast(prototype), holder);
+ HValue* holder_value = Add<HConstant>(holder);
return BuildLoadNamedField(holder_value,
HObjectAccess::ForField(holder_map, &lookup, name));
}
@@ -4726,7 +4610,7 @@ void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField(
if (instr == NULL) {
// Something did not match; must use a polymorphic load.
BuildCheckHeapObject(object);
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
instr = new(zone()) HLoadNamedFieldPolymorphic(
context, object, types, name, zone());
}
@@ -4783,7 +4667,7 @@ bool HOptimizedGraphBuilder::TryStorePolymorphicAsMonomorphic(
// Everything matched; can use monomorphic store.
BuildCheckHeapObject(object);
- AddInstruction(HCheckMaps::New(object, types, zone()));
+ Add<HCheckMaps>(object, types);
HInstruction* store;
CHECK_ALIVE_OR_RETURN(
store = BuildStoreNamedField(
@@ -4955,10 +4839,9 @@ void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
}
} else {
- HValue* context = environment()->LookupContext();
- HGlobalObject* global_object = Add<HGlobalObject>(context);
+ HGlobalObject* global_object = Add<HGlobalObject>();
HStoreGlobalGeneric* instr =
- Add<HStoreGlobalGeneric>(context, global_object, var->name(),
+ Add<HStoreGlobalGeneric>(global_object, var->name(),
value, function_strict_mode_flag());
instr->set_position(position);
ASSERT(instr->HasObservableSideEffects());
@@ -5044,7 +4927,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
if (proxy != NULL) {
Variable* var = proxy->var();
if (var->mode() == LET) {
- return Bailout("unsupported let compound assignment");
+ return Bailout(kUnsupportedLetCompoundAssignment);
}
CHECK_ALIVE(VisitForValue(operation));
@@ -5060,7 +4943,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
case Variable::PARAMETER:
case Variable::LOCAL:
if (var->mode() == CONST) {
- return Bailout("unsupported const compound assignment");
+ return Bailout(kUnsupportedConstCompoundAssignment);
}
BindIfLive(var, Top());
break;
@@ -5076,8 +4959,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- Bailout(
- "assignment to parameter, function uses arguments object");
+ Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
}
}
}
@@ -5099,8 +4981,8 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
}
HValue* context = BuildContextChainWalk(var);
- HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
- mode, Top());
+ HStoreContextSlot* instr = Add<HStoreContextSlot>(
+ context, var->index(), mode, Top());
if (instr->HasObservableSideEffects()) {
Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
}
@@ -5108,7 +4990,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
}
case Variable::LOOKUP:
- return Bailout("compound assignment to lookup slot");
+ return Bailout(kCompoundAssignmentToLookupSlot);
}
return ast_context()->ReturnValue(Pop());
@@ -5197,7 +5079,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
}
} else {
- return Bailout("invalid lhs in compound assignment");
+ return Bailout(kInvalidLhsInCompoundAssignment);
}
}
@@ -5234,11 +5116,11 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
}
} else if (var->mode() == CONST_HARMONY) {
if (expr->op() != Token::INIT_CONST_HARMONY) {
- return Bailout("non-initializer assignment to const");
+ return Bailout(kNonInitializerAssignmentToConst);
}
}
- if (proxy->IsArguments()) return Bailout("assignment to arguments");
+ if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
// Handle the assignment.
switch (var->location()) {
@@ -5257,7 +5139,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
if (var->mode() == LET && expr->op() == Token::ASSIGN) {
HValue* env_value = environment()->Lookup(var);
if (env_value == graph()->GetConstantHole()) {
- return Bailout("assignment to let variable before initialization");
+ return Bailout(kAssignmentToLetVariableBeforeInitialization);
}
}
// We do not allow the arguments object to occur in a context where it
@@ -5279,7 +5161,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- return Bailout("assignment to parameter in arguments object");
+ return Bailout(kAssignmentToParameterInArgumentsObject);
}
}
}
@@ -5311,8 +5193,8 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
}
HValue* context = BuildContextChainWalk(var);
- HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
- mode, Top());
+ HStoreContextSlot* instr = Add<HStoreContextSlot>(
+ context, var->index(), mode, Top());
if (instr->HasObservableSideEffects()) {
Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
}
@@ -5320,10 +5202,10 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
}
case Variable::LOOKUP:
- return Bailout("assignment to LOOKUP variable");
+ return Bailout(kAssignmentToLOOKUPVariable);
}
} else {
- return Bailout("invalid left-hand side in assignment");
+ return Bailout(kInvalidLeftHandSideInAssignment);
}
}
@@ -5344,9 +5226,8 @@ void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
ASSERT(ast_context()->IsEffect());
CHECK_ALIVE(VisitForValue(expr->exception()));
- HValue* context = environment()->LookupContext();
HValue* value = environment()->Pop();
- HThrow* instr = Add<HThrow>(context, value);
+ HThrow* instr = Add<HThrow>(value);
instr->set_position(expr->position());
Add<HSimulate>(expr->id());
current_block()->FinishExit(new(zone()) HAbnormalExit);
@@ -5355,17 +5236,32 @@ void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
- HObjectAccess access) {
+ HObjectAccess access,
+ HValue* typecheck) {
if (FLAG_track_double_fields && access.representation().IsDouble()) {
// load the heap number
- HLoadNamedField* heap_number =
- AddLoad(object, access.WithRepresentation(Representation::Tagged()));
+ HLoadNamedField* heap_number = Add<HLoadNamedField>(
+ object, access.WithRepresentation(Representation::Tagged()));
heap_number->set_type(HType::HeapNumber());
// load the double value from it
- return new(zone()) HLoadNamedField(heap_number,
- HObjectAccess::ForHeapNumberValue(), NULL);
+ return New<HLoadNamedField>(heap_number,
+ HObjectAccess::ForHeapNumberValue(),
+ typecheck);
+ }
+ return New<HLoadNamedField>(object, access, typecheck);
+}
+
+
+HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* object,
+ HValue* typecheck) {
+ if (FLAG_fold_constants && object->IsConstant()) {
+ HConstant* constant = HConstant::cast(object);
+ if (constant->HasStringValue()) {
+ return New<HConstant>(constant->StringValue()->length());
+ }
}
- return new(zone()) HLoadNamedField(object, access, NULL);
+ return BuildLoadNamedField(
+ object, HObjectAccess::ForStringLength(), typecheck);
}
@@ -5376,7 +5272,7 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedGeneric(
if (expr->IsUninitialized()) {
Add<HDeoptimize>(Deoptimizer::SOFT);
}
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
return new(zone()) HLoadNamedGeneric(context, object, name);
}
@@ -5403,8 +5299,8 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
// Handle access to various length properties
if (name->Equals(isolate()->heap()->length_string())) {
if (map->instance_type() == JS_ARRAY_TYPE) {
- AddCheckMapsWithTransitions(object, map);
- return new(zone()) HLoadNamedField(object,
+ AddCheckMap(object, map);
+ return New<HLoadNamedField>(object,
HObjectAccess::ForArrayLength(map->elements_kind()));
}
}
@@ -5421,7 +5317,7 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
if (lookup.IsConstant()) {
AddCheckMap(object, map);
Handle<Object> constant(lookup.GetConstantFromMap(*map), isolate());
- return new(zone()) HConstant(constant);
+ return New<HConstant>(constant);
}
// Handle a load from a known field somewhere in the prototype chain.
@@ -5431,7 +5327,7 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
AddCheckMap(object, map);
- Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
+ BuildCheckPrototypeMaps(prototype, holder);
HValue* holder_value = Add<HConstant>(holder);
return BuildLoadNamedField(holder_value,
HObjectAccess::ForField(holder_map, &lookup, name));
@@ -5443,9 +5339,9 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
Handle<JSObject> holder(lookup.holder());
Handle<Map> holder_map(holder->map());
AddCheckMap(object, map);
- Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
+ BuildCheckPrototypeMaps(prototype, holder);
Handle<Object> constant(lookup.GetConstantFromMap(*holder_map), isolate());
- return new(zone()) HConstant(constant);
+ return New<HConstant>(constant);
}
// No luck, do a generic load.
@@ -5455,7 +5351,7 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
HInstruction* HOptimizedGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
HValue* key) {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
return new(zone()) HLoadKeyedGeneric(context, object, key);
}
@@ -5468,9 +5364,7 @@ HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
Handle<Map> map,
bool is_store,
KeyedAccessStoreMode store_mode) {
- HCheckMaps* mapcheck = HCheckMaps::New(
- object, map, zone(), top_info(), dependency);
- AddInstruction(mapcheck);
+ HCheckMaps* mapcheck = Add<HCheckMaps>(object, map, top_info(), dependency);
if (dependency) {
mapcheck->ClearGVNFlag(kDependsOnElementsKind);
}
@@ -5481,7 +5375,7 @@ HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
- Add<HCheckPrototypeMaps>(prototype, object_prototype, zone(), top_info());
+ BuildCheckPrototypeMaps(prototype, object_prototype);
load_mode = ALLOW_RETURN_HOLE;
graph()->MarkDependsOnEmptyArrayProtoElements();
}
@@ -5538,8 +5432,7 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
}
if (!has_double_maps && !has_smi_or_object_maps) return NULL;
- HCheckMaps* check_maps = HCheckMaps::New(object, maps, zone());
- AddInstruction(check_maps);
+ HCheckMaps* check_maps = Add<HCheckMaps>(object, maps);
HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
object, key, val, check_maps,
most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
@@ -5604,8 +5497,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
ASSERT(Map::IsValidElementsTransition(
map->elements_kind(),
transition_target.at(i)->elements_kind()));
- HValue* context = environment()->LookupContext();
- transition = Add<HTransitionElementsKind>(context, object, map,
+ transition = Add<HTransitionElementsKind>(object, map,
transition_target.at(i));
} else {
untransitionable_maps.Add(map);
@@ -5651,12 +5543,12 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
HInstruction* access = NULL;
if (IsFastElementsKind(elements_kind)) {
if (is_store && !IsFastDoubleElementsKind(elements_kind)) {
- AddInstruction(HCheckMaps::New(
+ Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(),
- zone(), top_info(), mapcompare));
+ top_info(), mapcompare);
}
if (map->instance_type() == JS_ARRAY_TYPE) {
- HInstruction* length = AddLoad(
+ HInstruction* length = Add<HLoadNamedField>(
object, HObjectAccess::ForArrayLength(elements_kind), mapcompare);
checked_key = Add<HBoundsCheck>(key, length);
} else {
@@ -5752,7 +5644,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreKeyedGeneric(
HValue* object,
HValue* key,
HValue* value) {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
return new(zone()) HStoreKeyedGeneric(
context,
object,
@@ -5778,13 +5670,12 @@ void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
HInstruction* insert_after = entry;
for (int i = 0; i < arguments_values->length(); i++) {
HValue* argument = arguments_values->at(i);
- HInstruction* push_argument = new(zone()) HPushArgument(argument);
+ HInstruction* push_argument = New<HPushArgument>(argument);
push_argument->InsertAfter(insert_after);
insert_after = push_argument;
}
- HArgumentsElements* arguments_elements =
- new(zone()) HArgumentsElements(true);
+ HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
arguments_elements->ClearFlag(HValue::kUseGVN);
arguments_elements->InsertAfter(insert_after);
function_state()->set_arguments_elements(arguments_elements);
@@ -5806,12 +5697,12 @@ bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
if (function_state()->outer() == NULL) {
HInstruction* elements = Add<HArgumentsElements>(false);
- result = new(zone()) HArgumentsLength(elements);
+ result = New<HArgumentsLength>(elements);
} else {
// Number of arguments without receiver.
int argument_count = environment()->
arguments_environment()->parameter_count() - 1;
- result = new(zone()) HConstant(argument_count);
+ result = New<HConstant>(argument_count);
}
} else {
Push(graph()->GetArgumentsObject());
@@ -5854,15 +5745,16 @@ void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
if (expr->IsStringLength()) {
HValue* string = Pop();
BuildCheckHeapObject(string);
- AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
- instr = HStringLength::New(zone(), string);
+ HInstruction* checkstring =
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
+ instr = BuildLoadStringLength(string, checkstring);
} else if (expr->IsStringAccess()) {
CHECK_ALIVE(VisitForValue(expr->key()));
HValue* index = Pop();
HValue* string = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* char_code =
- BuildStringCharCodeAt(context, string, index);
+ BuildStringCharCodeAt(string, index);
AddInstruction(char_code);
instr = HStringCharFromCode::New(zone(), context, char_code);
@@ -5929,11 +5821,38 @@ void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
}
+void HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant,
+ CompilationInfo* info) {
+ HConstant* constant_value = New<HConstant>(constant);
+
+ if (constant->map()->CanOmitMapChecks()) {
+ constant->map()->AddDependentCompilationInfo(
+ DependentCode::kPrototypeCheckGroup, info);
+ return;
+ }
+
+ AddInstruction(constant_value);
+ HCheckMaps* check =
+ Add<HCheckMaps>(constant_value, handle(constant->map()), info);
+ check->ClearGVNFlag(kDependsOnElementsKind);
+}
+
+
+void HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
+ Handle<JSObject> holder) {
+ BuildConstantMapCheck(prototype, top_info());
+ while (!prototype.is_identical_to(holder)) {
+ prototype = handle(JSObject::cast(prototype->GetPrototype()));
+ BuildConstantMapCheck(prototype, top_info());
+ }
+}
+
+
void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
Handle<Map> receiver_map) {
if (!holder.is_null()) {
Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
- Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
+ BuildCheckPrototypeMaps(prototype, holder);
}
}
@@ -5945,7 +5864,7 @@ void HOptimizedGraphBuilder::AddCheckConstantFunction(
// Constant functions have the nice property that the map will change if they
// are overwritten. Therefore it is enough to check the map of the holder and
// its prototypes.
- AddCheckMapsWithTransitions(receiver, receiver_map);
+ AddCheckMap(receiver, receiver_map);
AddCheckPrototypeMaps(holder, receiver_map);
}
@@ -6002,7 +5921,7 @@ bool HOptimizedGraphBuilder::TryCallPolymorphicAsMonomorphic(
if (!expr->ComputeTarget(map, name)) return false;
BuildCheckHeapObject(receiver);
- AddInstruction(HCheckMaps::New(receiver, types, zone()));
+ Add<HCheckMaps>(receiver, types);
AddCheckPrototypeMaps(expr->holder(), map);
if (FLAG_trace_inlining) {
Handle<JSFunction> caller = current_info()->closure();
@@ -6158,7 +6077,7 @@ void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
Drop(argument_count - (ast_context()->IsEffect() ? 0 : 1));
FinishExitWithHardDeoptimization(join);
} else {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count);
call->set_position(expr->position());
PreProcessCall(call);
@@ -6305,7 +6224,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
if (target_info.isolate()->has_pending_exception()) {
// Parse or scope error, never optimize this function.
SetStackOverflow();
- target_shared->DisableOptimization("parse/scope error");
+ target_shared->DisableOptimization(kParseScopeError);
}
TraceInline(target, caller, "parse failure");
return false;
@@ -6424,7 +6343,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
ASSERT(function->scope()->arguments()->IsStackAllocated());
HEnvironment* arguments_env = inner_env->arguments_environment();
int arguments_count = arguments_env->parameter_count();
- arguments_object = Add<HArgumentsObject>(arguments_count, zone());
+ arguments_object = Add<HArgumentsObject>(arguments_count);
inner_env->Bind(function->scope()->arguments(), arguments_object);
for (int i = 0; i < arguments_count; i++) {
arguments_object->AddArgument(arguments_env->Lookup(i), zone());
@@ -6435,7 +6354,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
Add<HEnterInlined>(target, arguments_count, function,
function_state()->inlining_kind(),
function->scope()->arguments(),
- arguments_object, undefined_receiver, zone());
+ arguments_object, undefined_receiver);
function_state()->set_entry(enter_inlined);
VisitDeclarations(target_info.scope()->declarations());
@@ -6444,7 +6363,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
// Bail out if the inline function did, as we cannot residualize a call
// instead.
TraceInline(target, caller, "inline graph construction failed");
- target_shared->DisableOptimization("inlining bailed out");
+ target_shared->DisableOptimization(kInliningBailedOut);
inline_bailout_ = true;
delete target_state;
return true;
@@ -6626,7 +6545,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr,
case kMathTan:
if (expr->arguments()->length() == 1) {
HValue* argument = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Drop(1); // Receiver.
HInstruction* op =
HUnaryMathOperation::New(zone(), context, argument, id);
@@ -6641,7 +6560,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr,
HValue* right = Pop();
HValue* left = Pop();
Drop(1); // Receiver.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* op = HMul::NewImul(zone(), context, left, right);
if (drop_extra) Drop(1); // Optionally drop the function.
ast_context()->ReturnInstruction(op, expr->id());
@@ -6672,13 +6591,13 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
if (argument_count == 2 && check_type == STRING_CHECK) {
HValue* index = Pop();
HValue* string = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
ASSERT(!expr->holder().is_null());
- Add<HCheckPrototypeMaps>(Call::GetPrototypeForPrimitiveCheck(
+ BuildCheckPrototypeMaps(Call::GetPrototypeForPrimitiveCheck(
STRING_CHECK, expr->holder()->GetIsolate()),
- expr->holder(), zone(), top_info());
+ expr->holder());
HInstruction* char_code =
- BuildStringCharCodeAt(context, string, index);
+ BuildStringCharCodeAt(string, index);
if (id == kStringCharCodeAt) {
ast_context()->ReturnInstruction(char_code, expr->id());
return true;
@@ -6694,7 +6613,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
HValue* argument = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Drop(1); // Receiver.
HInstruction* result =
HStringCharFromCode::New(zone(), context, argument);
@@ -6716,7 +6635,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
HValue* argument = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Drop(1); // Receiver.
HInstruction* op =
HUnaryMathOperation::New(zone(), context, argument, id);
@@ -6731,7 +6650,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
HValue* right = Pop();
HValue* left = Pop();
Pop(); // Pop receiver.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* result = NULL;
// Use sqrt() if exponent is 0.5 or -0.5.
if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
@@ -6756,7 +6675,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
}
if (result == NULL) {
- result = HPower::New(zone(), left, right);
+ result = HPower::New(zone(), context, left, right);
}
ast_context()->ReturnInstruction(result, expr->id());
return true;
@@ -6766,8 +6685,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
Drop(1); // Receiver.
- HValue* context = environment()->LookupContext();
- HGlobalObject* global_object = Add<HGlobalObject>(context);
+ HGlobalObject* global_object = Add<HGlobalObject>();
HRandom* result = new(zone()) HRandom(global_object);
ast_context()->ReturnInstruction(result, expr->id());
return true;
@@ -6780,7 +6698,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
HValue* right = Pop();
HValue* left = Pop();
Drop(1); // Receiver.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
: HMathMinMax::kMathMax;
HInstruction* result =
@@ -6795,7 +6713,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
HValue* right = Pop();
HValue* left = Pop();
Drop(1); // Receiver.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* result = HMul::NewImul(zone(), context, left, right);
ast_context()->ReturnInstruction(result, expr->id());
return true;
@@ -6879,12 +6797,12 @@ bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
}
Drop(arguments_count - 1);
- PushAndAdd(new(zone()) HPushArgument(Pop()));
+ PushAndAdd(New<HPushArgument>(Pop()));
for (int i = 1; i < arguments_count; i++) {
- PushAndAdd(new(zone()) HPushArgument(arguments_values->at(i)));
+ PushAndAdd(New<HPushArgument>(arguments_values->at(i)));
}
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInvokeFunction* call = new(zone()) HInvokeFunction(
context,
function,
@@ -6898,55 +6816,6 @@ bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
}
-// Checks if all maps in |types| are from the same family, i.e., are elements
-// transitions of each other. Returns either NULL if they are not from the same
-// family, or a Map* indicating the map with the first elements kind of the
-// family that is in the list.
-static Map* CheckSameElementsFamily(SmallMapList* types) {
- if (types->length() <= 1) return NULL;
- // Check if all maps belong to the same transition family.
- Map* kinds[kFastElementsKindCount];
- Map* first_map = *types->first();
- ElementsKind first_kind = first_map->elements_kind();
- if (!IsFastElementsKind(first_kind)) return NULL;
- int first_index = GetSequenceIndexFromFastElementsKind(first_kind);
- int last_index = first_index;
-
- for (int i = 0; i < kFastElementsKindCount; i++) kinds[i] = NULL;
-
- kinds[first_index] = first_map;
-
- for (int i = 1; i < types->length(); ++i) {
- Map* map = *types->at(i);
- ElementsKind elements_kind = map->elements_kind();
- if (!IsFastElementsKind(elements_kind)) return NULL;
- int index = GetSequenceIndexFromFastElementsKind(elements_kind);
- if (index < first_index) {
- first_index = index;
- } else if (index > last_index) {
- last_index = index;
- } else if (kinds[index] != map) {
- return NULL;
- }
- kinds[index] = map;
- }
-
- Map* current = kinds[first_index];
- for (int i = first_index + 1; i <= last_index; i++) {
- Map* next = kinds[i];
- if (next != NULL) {
- ElementsKind current_kind = next->elements_kind();
- if (next != current->LookupElementsTransitionMap(current_kind)) {
- return NULL;
- }
- current = next;
- }
- }
-
- return kinds[first_index];
-}
-
-
void HOptimizedGraphBuilder::VisitCall(Call* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
@@ -6970,7 +6839,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
call = new(zone()) HCallKeyed(context, key, argument_count);
call->set_position(expr->position());
Drop(argument_count + 1); // 1 is the key.
@@ -6992,12 +6861,6 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
receiver_map = (types == NULL || types->is_empty())
? Handle<Map>::null()
: types->first();
- } else {
- Map* family_map = CheckSameElementsFamily(types);
- if (family_map != NULL) {
- receiver_map = Handle<Map>(family_map);
- monomorphic = expr->ComputeTarget(receiver_map, name);
- }
}
HValue* receiver =
@@ -7020,7 +6883,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
// When the target has a custom call IC generator, use the IC,
// because it is likely to generate better code. Also use the IC
// when a primitive receiver check is required.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
call = PreProcessCall(
new(zone()) HCallNamed(context, name, argument_count));
} else {
@@ -7037,7 +6900,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
return;
} else {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
call = PreProcessCall(
new(zone()) HCallNamed(context, name, argument_count));
}
@@ -7045,7 +6908,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
} else {
VariableProxy* proxy = expr->expression()->AsVariableProxy();
if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
- return Bailout("possible direct call to eval");
+ return Bailout(kPossibleDirectCallToEval);
}
bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
@@ -7065,7 +6928,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
if (known_global_function) {
// Push the global object instead of the global receiver because
// code generated by the full code generator expects it.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
PushAndAdd(global_object);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
@@ -7099,7 +6962,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
if (CallStubCompiler::HasCustomCallGenerator(expr->target())) {
// When the target has a custom call IC generator, use the IC,
// because it is likely to generate better code.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
call = PreProcessCall(
new(zone()) HCallNamed(context, var->name(), argument_count));
} else {
@@ -7107,12 +6970,11 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
argument_count));
}
} else {
- HValue* context = environment()->LookupContext();
- HGlobalObject* receiver = Add<HGlobalObject>(context);
- PushAndAdd(new(zone()) HPushArgument(receiver));
+ HGlobalObject* receiver = Add<HGlobalObject>();
+ PushAndAdd(New<HPushArgument>(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- call = new(zone()) HCallGlobal(context, var->name(), argument_count);
+ call = New<HCallGlobal>(var->name(), argument_count);
Drop(argument_count);
}
@@ -7121,9 +6983,8 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
// evaluation of the arguments.
CHECK_ALIVE(VisitForValue(expr->expression()));
HValue* function = Top();
- HValue* context = environment()->LookupContext();
- HGlobalObject* global = Add<HGlobalObject>(context);
- HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
+ HGlobalObject* global = Add<HGlobalObject>();
+ HGlobalReceiver* receiver = New<HGlobalReceiver>(global);
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
Add<HCheckFunction>(function, expr->target());
@@ -7140,24 +7001,20 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
if (TryInlineCall(expr, true)) { // Drop function from environment.
return;
} else {
- call = PreProcessCall(
- new(zone()) HInvokeFunction(context,
- function,
- expr->target(),
- argument_count));
+ call = PreProcessCall(New<HInvokeFunction>(function, expr->target(),
+ argument_count));
Drop(1); // The function.
}
} else {
CHECK_ALIVE(VisitForValue(expr->expression()));
HValue* function = Top();
- HValue* context = environment()->LookupContext();
- HGlobalObject* global_object = Add<HGlobalObject>(context);
+ HGlobalObject* global_object = Add<HGlobalObject>();
HGlobalReceiver* receiver = Add<HGlobalReceiver>(global_object);
- PushAndAdd(new(zone()) HPushArgument(receiver));
+ PushAndAdd(New<HPushArgument>(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- call = new(zone()) HCallFunction(context, function, argument_count);
+ call = New<HCallFunction>(function, argument_count);
Drop(argument_count + 1);
}
}
@@ -7181,7 +7038,7 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
int argument_count = expr->arguments()->length() + 1; // Plus constructor.
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Factory* factory = isolate()->factory();
if (FLAG_inline_construct &&
@@ -7209,42 +7066,41 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
// Allocate an instance of the implicit receiver object.
HValue* size_in_bytes = Add<HConstant>(instance_size);
- HAllocate::Flags flags = HAllocate::DefaultFlags();
- if (FLAG_pretenuring_call_new &&
- isolate()->heap()->ShouldGloballyPretenure()) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
- }
+ PretenureFlag pretenure_flag =
+ (FLAG_pretenuring_call_new &&
+ isolate()->heap()->GetPretenureMode() == TENURED)
+ ? TENURED : NOT_TENURED;
HAllocate* receiver =
- Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
+ Add<HAllocate>(size_in_bytes, HType::JSObject(), pretenure_flag,
+ JS_OBJECT_TYPE);
receiver->set_known_initial_map(initial_map);
// Load the initial map from the constructor.
HValue* constructor_value = Add<HConstant>(constructor);
HValue* initial_map_value =
- AddLoad(constructor_value, HObjectAccess::ForJSObjectOffset(
+ Add<HLoadNamedField>(constructor_value, HObjectAccess::ForJSObjectOffset(
JSFunction::kPrototypeOrInitialMapOffset));
// Initialize map and fields of the newly allocated object.
{ NoObservableSideEffectsScope no_effects(this);
ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
- AddStore(receiver,
- HObjectAccess::ForJSObjectOffset(JSObject::kMapOffset),
- initial_map_value);
+ Add<HStoreNamedField>(receiver,
+ HObjectAccess::ForJSObjectOffset(JSObject::kMapOffset),
+ initial_map_value);
HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
- AddStore(receiver,
- HObjectAccess::ForJSObjectOffset(JSObject::kPropertiesOffset),
- empty_fixed_array);
- AddStore(receiver,
- HObjectAccess::ForJSObjectOffset(JSObject::kElementsOffset),
- empty_fixed_array);
+ Add<HStoreNamedField>(receiver,
+ HObjectAccess::ForJSObjectOffset(JSObject::kPropertiesOffset),
+ empty_fixed_array);
+ Add<HStoreNamedField>(receiver,
+ HObjectAccess::ForJSObjectOffset(JSObject::kElementsOffset),
+ empty_fixed_array);
if (initial_map->inobject_properties() != 0) {
HConstant* undefined = graph()->GetConstantUndefined();
for (int i = 0; i < initial_map->inobject_properties(); i++) {
int property_offset = JSObject::kHeaderSize + i * kPointerSize;
- AddStore(receiver,
- HObjectAccess::ForJSObjectOffset(property_offset),
- undefined);
+ Add<HStoreNamedField>(receiver,
+ HObjectAccess::ForJSObjectOffset(property_offset),
+ undefined);
}
}
}
@@ -7320,7 +7176,7 @@ void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
if (expr->is_jsruntime()) {
- return Bailout("call to a JavaScript runtime function");
+ return Bailout(kCallToAJavaScriptRuntimeFunction);
}
const Runtime::Function* function = expr->function();
@@ -7342,11 +7198,10 @@ void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
ASSERT(function->intrinsic_type == Runtime::RUNTIME);
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- HValue* context = environment()->LookupContext();
Handle<String> name = expr->name();
int argument_count = expr->arguments()->length();
- HCallRuntime* call =
- new(zone()) HCallRuntime(context, name, function, argument_count);
+ HCallRuntime* call = New<HCallRuntime>(name, function,
+ argument_count);
Drop(argument_count);
return ast_context()->ReturnInstruction(call, expr->id());
}
@@ -7361,8 +7216,6 @@ void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
case Token::DELETE: return VisitDelete(expr);
case Token::VOID: return VisitVoid(expr);
case Token::TYPEOF: return VisitTypeof(expr);
- case Token::SUB: return VisitSub(expr);
- case Token::BIT_NOT: return VisitBitNot(expr);
case Token::NOT: return VisitNot(expr);
default: UNREACHABLE();
}
@@ -7377,19 +7230,18 @@ void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
CHECK_ALIVE(VisitForValue(prop->key()));
HValue* key = Pop();
HValue* obj = Pop();
- HValue* context = environment()->LookupContext();
- HValue* function = AddLoadJSBuiltin(Builtins::DELETE, context);
+ HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
Add<HPushArgument>(obj);
Add<HPushArgument>(key);
Add<HPushArgument>(Add<HConstant>(function_strict_mode_flag()));
// TODO(olivf) InvokeFunction produces a check for the parameter count,
// even though we are certain to pass the correct number of arguments here.
- HInstruction* instr = new(zone()) HInvokeFunction(context, function, 3);
+ HInstruction* instr = New<HInvokeFunction>(function, 3);
return ast_context()->ReturnInstruction(instr, expr->id());
} else if (proxy != NULL) {
Variable* var = proxy->var();
if (var->IsUnallocated()) {
- Bailout("delete with global variable");
+ Bailout(kDeleteWithGlobalVariable);
} else if (var->IsStackAllocated() || var->IsContextSlot()) {
// Result of deleting non-global variables is false. 'this' is not
// really a variable, though we implement it as one. The
@@ -7399,7 +7251,7 @@ void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
: graph()->GetConstantFalse();
return ast_context()->ReturnValue(value);
} else {
- Bailout("delete with non-global variable");
+ Bailout(kDeleteWithNonGlobalVariable);
}
} else {
// Result of deleting non-property, non-variable reference is true.
@@ -7419,30 +7271,12 @@ void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
CHECK_ALIVE(VisitForTypeOf(expr->expression()));
HValue* value = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* instr = new(zone()) HTypeof(context, value);
return ast_context()->ReturnInstruction(instr, expr->id());
}
-void HOptimizedGraphBuilder::VisitSub(UnaryOperation* expr) {
- CHECK_ALIVE(VisitForValue(expr->expression()));
- Handle<Type> operand_type = expr->expression()->bounds().lower;
- HValue* value = TruncateToNumber(Pop(), &operand_type);
- HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::SUB);
- return ast_context()->ReturnInstruction(instr, expr->id());
-}
-
-
-void HOptimizedGraphBuilder::VisitBitNot(UnaryOperation* expr) {
- CHECK_ALIVE(VisitForValue(expr->expression()));
- Handle<Type> operand_type = expr->expression()->bounds().lower;
- HValue* value = TruncateToNumber(Pop(), &operand_type);
- HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::BIT_NOT);
- return ast_context()->ReturnInstruction(instr, expr->id());
-}
-
-
void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
if (ast_context()->IsTest()) {
TestContext* context = TestContext::cast(ast_context());
@@ -7516,11 +7350,9 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(
HConstant* delta = (expr->op() == Token::INC)
? graph()->GetConstant1()
: graph()->GetConstantMinus1();
- HValue* context = environment()->LookupContext();
- HInstruction* instr = HAdd::New(zone(), context, Top(), delta);
+ HInstruction* instr = Add<HAdd>(Top(), delta);
instr->SetFlag(HInstruction::kCannotBeTagged);
instr->ClearAllSideEffects();
- AddInstruction(instr);
return instr;
}
@@ -7533,7 +7365,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
VariableProxy* proxy = target->AsVariableProxy();
Property* prop = target->AsProperty();
if (proxy == NULL && prop == NULL) {
- return Bailout("invalid lhs in count operation");
+ return Bailout(kInvalidLhsInCountOperation);
}
// Match the full code generator stack by simulating an extra stack
@@ -7547,7 +7379,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
if (proxy != NULL) {
Variable* var = proxy->var();
if (var->mode() == CONST) {
- return Bailout("unsupported count operation with const");
+ return Bailout(kUnsupportedCountOperationWithConst);
}
// Argument of the count operation is a variable, not a property.
ASSERT(prop == NULL);
@@ -7581,7 +7413,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
int count = current_info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == current_info()->scope()->parameter(i)) {
- return Bailout("assignment to parameter in arguments object");
+ return Bailout(kAssignmentToParameterInArgumentsObject);
}
}
}
@@ -7598,7 +7430,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
}
case Variable::LOOKUP:
- return Bailout("lookup variable in count operation");
+ return Bailout(kLookupVariableInCountOperation);
}
} else {
@@ -7685,7 +7517,6 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
- HValue* context,
HValue* string,
HValue* index) {
if (string->IsConstant() && index->IsConstant()) {
@@ -7695,17 +7526,18 @@ HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
int32_t i = c_index->NumberValueAsInteger32();
Handle<String> s = c_string->StringValue();
if (i < 0 || i >= s->length()) {
- return new(zone()) HConstant(OS::nan_value());
+ return New<HConstant>(OS::nan_value());
}
- return new(zone()) HConstant(s->Get(i));
+ return New<HConstant>(s->Get(i));
}
}
BuildCheckHeapObject(string);
- AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
- HInstruction* length = HStringLength::New(zone(), string);
+ HValue* checkstring =
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
+ HInstruction* length = BuildLoadStringLength(string, checkstring);
AddInstruction(length);
HInstruction* checked_index = Add<HBoundsCheck>(index, length);
- return new(zone()) HStringCharCodeAt(context, string, checked_index);
+ return New<HStringCharCodeAt>(string, checked_index);
}
@@ -7775,26 +7607,6 @@ HValue* HGraphBuilder::TruncateToNumber(HValue* value, Handle<Type>* expected) {
*expected = handle(Type::Number(), isolate());
return AddInstruction(number.value);
}
- return value;
- }
-
- Handle<Type> expected_type = *expected;
- Representation rep = Representation::FromType(expected_type);
- if (!rep.IsTagged()) return value;
-
- // If our type feedback suggests that we can non-observably truncate to number
- // we introduce the appropriate check here. This avoids 'value' having a
- // tagged representation later on.
- if (expected_type->Is(Type::Oddball())) {
- // TODO(olivf) The BinaryOpStub only records undefined. It might pay off to
- // also record booleans and convert them to 0/1 here.
- IfBuilder if_nan(this);
- if_nan.If<HCompareObjectEqAndBranch>(value,
- graph()->GetConstantUndefined());
- if_nan.Then();
- if_nan.ElseDeopt();
- if_nan.End();
- return Add<HConstant>(OS::nan_value(), Representation::Double());
}
return value;
@@ -7805,7 +7617,7 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation(
BinaryOperation* expr,
HValue* left,
HValue* right) {
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
Handle<Type> left_type = expr->left()->bounds().lower;
Handle<Type> right_type = expr->right()->bounds().lower;
Handle<Type> result_type = expr->bounds().lower;
@@ -7858,7 +7670,7 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation(
break;
case Token::BIT_XOR:
case Token::BIT_AND:
- instr = HBitwise::New(zone(), expr->op(), context, left, right);
+ instr = NewUncasted<HBitwise>(expr->op(), left, right);
break;
case Token::BIT_OR: {
HValue* operand, *shift_amount;
@@ -7867,7 +7679,7 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation(
MatchRotateRight(left, right, &operand, &shift_amount)) {
instr = new(zone()) HRor(context, operand, shift_amount);
} else {
- instr = HBitwise::New(zone(), expr->op(), context, left, right);
+ instr = NewUncasted<HBitwise>(expr->op(), left, right);
}
break;
}
@@ -8112,14 +7924,14 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
CHECK_ALIVE(VisitForValue(expr->left()));
CHECK_ALIVE(VisitForValue(expr->right()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HValue* right = Pop();
HValue* left = Pop();
Token::Value op = expr->op();
if (IsLiteralCompareBool(left, op, right)) {
HCompareObjectEqAndBranch* result =
- new(zone()) HCompareObjectEqAndBranch(left, right);
+ New<HCompareObjectEqAndBranch>(left, right);
result->set_position(expr->position());
return ast_context()->ReturnControl(result, expr->id());
}
@@ -8165,7 +7977,7 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
// Code below assumes that we don't fall through.
UNREACHABLE();
} else if (op == Token::IN) {
- HValue* function = AddLoadJSBuiltin(Builtins::IN, context);
+ HValue* function = AddLoadJSBuiltin(Builtins::IN);
Add<HPushArgument>(left);
Add<HPushArgument>(right);
// TODO(olivf) InvokeFunction produces a check for the parameter count,
@@ -8189,10 +8001,10 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
// Can we get away with map check and not instance type check?
if (combined_type->IsClass()) {
Handle<Map> map = combined_type->AsClass();
- AddCheckMapsWithTransitions(left, map);
- AddCheckMapsWithTransitions(right, map);
+ AddCheckMap(left, map);
+ AddCheckMap(right, map);
HCompareObjectEqAndBranch* result =
- new(zone()) HCompareObjectEqAndBranch(left, right);
+ New<HCompareObjectEqAndBranch>(left, right);
result->set_position(expr->position());
return ast_context()->ReturnControl(result, expr->id());
} else {
@@ -8207,7 +8019,7 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
}
}
default:
- return Bailout("Unsupported non-primitive compare");
+ return Bailout(kUnsupportedNonPrimitiveCompare);
}
} else if (combined_type->Is(Type::InternalizedString()) &&
Token::IsEqualityOp(op)) {
@@ -8273,7 +8085,7 @@ HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
// If we share optimized code between different closures, the
// this-function is not a constant, except inside an inlined body.
if (function_state()->outer() != NULL) {
- return new(zone()) HConstant(
+ return New<HConstant>(
function_state()->compilation_info()->closure());
} else {
return new(zone()) HThisFunction;
@@ -8294,34 +8106,27 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
HInstruction* target = NULL;
HInstruction* data_target = NULL;
- ElementsKind kind = boilerplate_object->map()->elements_kind();
-
- if (isolate()->heap()->ShouldGloballyPretenure()) {
+ if (isolate()->heap()->GetPretenureMode() == TENURED) {
if (data_size != 0) {
- HAllocate::Flags data_flags =
- static_cast<HAllocate::Flags>(HAllocate::DefaultFlags(kind) |
- HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
HValue* size_in_bytes = Add<HConstant>(data_size);
- data_target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(),
- data_flags);
+ data_target = Add<HAllocate>(size_in_bytes, HType::JSObject(), TENURED,
+ FIXED_DOUBLE_ARRAY_TYPE);
Handle<Map> free_space_map = isolate()->factory()->free_space_map();
AddStoreMapConstant(data_target, free_space_map);
HObjectAccess access =
HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset);
- AddStore(data_target, access, size_in_bytes);
+ Add<HStoreNamedField>(data_target, access, size_in_bytes);
}
if (pointer_size != 0) {
- HAllocate::Flags pointer_flags =
- static_cast<HAllocate::Flags>(HAllocate::DefaultFlags() |
- HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
HValue* size_in_bytes = Add<HConstant>(pointer_size);
- target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(),
- pointer_flags);
+ target = Add<HAllocate>(size_in_bytes, HType::JSObject(), TENURED,
+ JS_OBJECT_TYPE);
}
} else {
- HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
+ InstanceType instance_type = boilerplate_object->map()->instance_type();
HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size);
- target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
+ target = Add<HAllocate>(size_in_bytes, HType::JSObject(), NOT_TENURED,
+ instance_type);
}
int offset = 0;
@@ -8342,8 +8147,6 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
HInstruction* data_target,
int* data_offset,
AllocationSiteMode mode) {
- Zone* zone = this->zone();
-
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
boilerplate_object->map()->CanTrackAllocationSite();
@@ -8355,8 +8158,7 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
HInstruction* allocation_site = NULL;
if (create_allocation_site_info) {
- allocation_site = AddInstruction(new(zone) HConstant(
- allocation_site_object, Representation::Tagged()));
+ allocation_site = Add<HConstant>(allocation_site_object);
}
// Only elements backing stores for non-COW arrays need to be copied.
@@ -8437,14 +8239,15 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
}
result = elements;
}
- AddStore(object_header, HObjectAccess::ForElementsPointer(), elements);
+ Add<HStoreNamedField>(object_header, HObjectAccess::ForElementsPointer(),
+ elements);
Handle<Object> properties_field =
Handle<Object>(boilerplate_object->properties(), isolate());
ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
HInstruction* properties = Add<HConstant>(properties_field);
HObjectAccess access = HObjectAccess::ForPropertiesPointer();
- AddStore(object_header, access, properties);
+ Add<HStoreNamedField>(object_header, access, properties);
if (boilerplate_object->IsJSArray()) {
Handle<JSArray> boilerplate_array =
@@ -8454,7 +8257,7 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
HInstruction* length = Add<HConstant>(length_field);
ASSERT(boilerplate_array->length()->IsSmi());
- AddStore(object_header, HObjectAccess::ForArrayLength(
+ Add<HStoreNamedField>(object_header, HObjectAccess::ForArrayLength(
boilerplate_array->GetElementsKind()), length);
}
@@ -8499,7 +8302,7 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
HInstruction* value_instruction = Add<HInnerAllocatedObject>(target,
*offset);
- AddStore(object_properties, access, value_instruction);
+ Add<HStoreNamedField>(object_properties, access, value_instruction);
BuildEmitDeepCopy(value_object, original_value_object,
Handle<Object>::null(), target,
offset, data_target, data_offset,
@@ -8520,12 +8323,12 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
}
AddStoreMapConstant(double_box,
isolate()->factory()->heap_number_map());
- AddStore(double_box, HObjectAccess::ForHeapNumberValue(),
+ Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
value_instruction);
value_instruction = double_box;
}
- AddStore(object_properties, access, value_instruction);
+ Add<HStoreNamedField>(object_properties, access, value_instruction);
}
}
@@ -8536,7 +8339,7 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
ASSERT(boilerplate_object->IsJSObject());
int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
HObjectAccess access = HObjectAccess::ForJSObjectOffset(property_offset);
- AddStore(object_properties, access, value_instruction);
+ Add<HStoreNamedField>(object_properties, access, value_instruction);
}
}
@@ -8645,7 +8448,7 @@ void HOptimizedGraphBuilder::VisitDeclarations(
int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
DeclareGlobalsLanguageMode::encode(current_info()->language_mode());
- Add<HDeclareGlobals>(environment()->LookupContext(), array, flags);
+ Add<HDeclareGlobals>(array, flags);
globals_.Clear();
}
}
@@ -8674,7 +8477,7 @@ void HOptimizedGraphBuilder::VisitVariableDeclaration(
case Variable::CONTEXT:
if (hole_init) {
HValue* value = graph()->GetConstantHole();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HStoreContextSlot* store = Add<HStoreContextSlot>(
context, variable->index(), HStoreContextSlot::kNoCheck, value);
if (store->HasObservableSideEffects()) {
@@ -8683,7 +8486,7 @@ void HOptimizedGraphBuilder::VisitVariableDeclaration(
}
break;
case Variable::LOOKUP:
- return Bailout("unsupported lookup slot in declaration");
+ return Bailout(kUnsupportedLookupSlotInDeclaration);
}
}
@@ -8712,7 +8515,7 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration(
case Variable::CONTEXT: {
CHECK_ALIVE(VisitForValue(declaration->fun()));
HValue* value = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HStoreContextSlot* store = Add<HStoreContextSlot>(
context, variable->index(), HStoreContextSlot::kNoCheck, value);
if (store->HasObservableSideEffects()) {
@@ -8721,7 +8524,7 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration(
break;
}
case Variable::LOOKUP:
- return Bailout("unsupported lookup slot in declaration");
+ return Bailout(kUnsupportedLookupSlotInDeclaration);
}
}
@@ -8842,7 +8645,7 @@ void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
- return Bailout("inlined runtime function: IsNonNegativeSmi");
+ return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
}
@@ -8858,8 +8661,7 @@ void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
CallRuntime* call) {
- return Bailout(
- "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
+ return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
}
@@ -8887,7 +8689,7 @@ void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
ASSERT(function_state()->outer() == NULL);
ASSERT(call->arguments()->length() == 0);
HInstruction* elements = Add<HArgumentsElements>(false);
- HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
+ HArgumentsLength* result = New<HArgumentsLength>(elements);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -8913,7 +8715,7 @@ void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
// The special form detected by IsClassOfTest is detected before we get here
// and does not cause a bailout.
- return Bailout("inlined runtime function: ClassOf");
+ return Bailout(kInlinedRuntimeFunctionClassOf);
}
@@ -8996,7 +8798,7 @@ void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
// Create in-object property store to kValueOffset.
set_current_block(if_js_value);
- AddStore(object,
+ Add<HStoreNamedField>(object,
HObjectAccess::ForJSObjectOffset(JSValue::kValueOffset), value);
if_js_value->Goto(join);
join->SetJoinId(call->id());
@@ -9012,8 +8814,7 @@ void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
HValue* index = Pop();
HValue* string = Pop();
- HValue* context = environment()->LookupContext();
- HInstruction* result = BuildStringCharCodeAt(context, string, index);
+ HInstruction* result = BuildStringCharCodeAt(string, index);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -9023,8 +8824,7 @@ void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
HValue* char_code = Pop();
- HValue* context = environment()->LookupContext();
- HInstruction* result = HStringCharFromCode::New(zone(), context, char_code);
+ HInstruction* result = New<HStringCharFromCode>(char_code);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -9036,10 +8836,9 @@ void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
HValue* index = Pop();
HValue* string = Pop();
- HValue* context = environment()->LookupContext();
- HInstruction* char_code = BuildStringCharCodeAt(context, string, index);
+ HInstruction* char_code = BuildStringCharCodeAt(string, index);
AddInstruction(char_code);
- HInstruction* result = HStringCharFromCode::New(zone(), context, char_code);
+ HInstruction* result = New<HStringCharFromCode>(char_code);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -9052,7 +8851,7 @@ void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
HValue* right = Pop();
HValue* left = Pop();
HCompareObjectEqAndBranch* result =
- new(zone()) HCompareObjectEqAndBranch(left, right);
+ New<HCompareObjectEqAndBranch>(left, right);
return ast_context()->ReturnControl(result, call->id());
}
@@ -9065,8 +8864,7 @@ void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
// Fast support for Math.random().
void HOptimizedGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
- HValue* context = environment()->LookupContext();
- HGlobalObject* global_object = Add<HGlobalObject>(context);
+ HGlobalObject* global_object = Add<HGlobalObject>();
HRandom* result = new(zone()) HRandom(global_object);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -9079,7 +8877,7 @@ void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
HValue* right = Pop();
HValue* left = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* result = HStringAdd::New(
zone(), context, left, right, STRING_ADD_CHECK_BOTH);
return ast_context()->ReturnInstruction(result, call->id());
@@ -9090,7 +8888,7 @@ void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
ASSERT_EQ(3, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
Drop(3);
return ast_context()->ReturnInstruction(result, call->id());
@@ -9101,7 +8899,7 @@ void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
ASSERT_EQ(2, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::StringCompare, 2);
Drop(2);
@@ -9113,7 +8911,7 @@ void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
ASSERT_EQ(4, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
Drop(4);
return ast_context()->ReturnInstruction(result, call->id());
@@ -9124,7 +8922,7 @@ void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
ASSERT_EQ(3, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::RegExpConstructResult, 3);
Drop(3);
@@ -9134,7 +8932,7 @@ void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
// Support for fast native caches.
void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
- return Bailout("inlined runtime function: GetFromCache");
+ return Bailout(kInlinedRuntimeFunctionGetFromCache);
}
@@ -9142,7 +8940,7 @@ void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::NumberToString, 1);
Drop(1);
@@ -9162,7 +8960,6 @@ void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
CHECK_ALIVE(VisitForValue(call->arguments()->last()));
HValue* function = Pop();
- HValue* context = environment()->LookupContext();
// Branch for function proxies, or other non-functions.
HHasInstanceTypeAndBranch* typecheck =
@@ -9175,14 +8972,13 @@ void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
current_block()->Finish(typecheck);
set_current_block(if_jsfunction);
- HInstruction* invoke_result =
- Add<HInvokeFunction>(context, function, arg_count);
+ HInstruction* invoke_result = Add<HInvokeFunction>(function, arg_count);
Drop(arg_count);
Push(invoke_result);
if_jsfunction->Goto(join);
set_current_block(if_nonfunction);
- HInstruction* call_result = Add<HCallFunction>(context, function, arg_count);
+ HInstruction* call_result = Add<HCallFunction>(function, arg_count);
Drop(arg_count);
Push(call_result);
if_nonfunction->Goto(join);
@@ -9200,7 +8996,7 @@ void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
HValue* right = Pop();
HValue* left = Pop();
- HInstruction* result = HPower::New(zone(), left, right);
+ HInstruction* result = HPower::New(zone(), context(), left, right);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -9208,7 +9004,7 @@ void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateMathSin(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
result->set_transcendental_type(TranscendentalCache::SIN);
@@ -9220,7 +9016,7 @@ void HOptimizedGraphBuilder::GenerateMathSin(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateMathCos(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
result->set_transcendental_type(TranscendentalCache::COS);
@@ -9232,7 +9028,7 @@ void HOptimizedGraphBuilder::GenerateMathCos(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateMathTan(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
result->set_transcendental_type(TranscendentalCache::TAN);
@@ -9244,7 +9040,7 @@ void HOptimizedGraphBuilder::GenerateMathTan(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HCallStub* result =
new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
result->set_transcendental_type(TranscendentalCache::LOG);
@@ -9257,7 +9053,7 @@ void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
HValue* value = Pop();
- HValue* context = environment()->LookupContext();
+ HValue* context = environment()->context();
HInstruction* result =
HUnaryMathOperation::New(zone(), context, value, kMathSqrt);
return ast_context()->ReturnInstruction(result, call->id());
@@ -9266,7 +9062,7 @@ void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
// Check whether two RegExps are equivalent
void HOptimizedGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
- return Bailout("inlined runtime function: IsRegExpEquivalent");
+ return Bailout(kInlinedRuntimeFunctionIsRegExpEquivalent);
}
@@ -9280,18 +9076,18 @@ void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
- return Bailout("inlined runtime function: FastAsciiArrayJoin");
+ return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
}
// Support for generators.
void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
- return Bailout("inlined runtime function: GeneratorNext");
+ return Bailout(kInlinedRuntimeFunctionGeneratorNext);
}
void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
- return Bailout("inlined runtime function: GeneratorThrow");
+ return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
}
@@ -9567,7 +9363,7 @@ HEnvironment* HEnvironment::CopyForInlining(
if (undefined_receiver) {
inner->SetValueAt(0, undefined);
}
- inner->SetValueAt(arity + 1, LookupContext());
+ inner->SetValueAt(arity + 1, context());
for (int i = arity + 2; i < inner->length(); ++i) {
inner->SetValueAt(i, undefined);
}
diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h
index 8484cd1eb6..6312a52376 100644
--- a/deps/v8/src/hydrogen.h
+++ b/deps/v8/src/hydrogen.h
@@ -367,7 +367,7 @@ class HGraph: public ZoneObject {
return NULL;
}
- bool Optimize(SmartArrayPointer<char>* bailout_reason);
+ bool Optimize(BailoutReason* bailout_reason);
#ifdef DEBUG
void Verify(bool do_full_verify) const;
@@ -583,7 +583,7 @@ class HEnvironment: public ZoneObject {
return result;
}
- HValue* LookupContext() const {
+ HValue* context() const {
// Return first special.
return Lookup(parameter_count());
}
@@ -990,57 +990,205 @@ class HGraphBuilder {
void Push(HValue* value) { environment()->Push(value); }
HValue* Pop() { return environment()->Pop(); }
+ virtual HValue* context() = 0;
+
// Adding instructions.
HInstruction* AddInstruction(HInstruction* instr);
template<class I>
- I* Add() { return static_cast<I*>(AddInstruction(new(zone()) I())); }
+ HInstruction* NewUncasted() { return I::New(zone(), context()); }
+
+ template<class I>
+ I* New() { return I::cast(NewUncasted<I>()); }
+
+ template<class I>
+ HInstruction* AddUncasted() { return AddInstruction(NewUncasted<I>());}
+
+ template<class I>
+ I* Add() { return I::cast(AddUncasted<I>());}
+
+ template<class I, class P1>
+ HInstruction* NewUncasted(P1 p1) {
+ return I::New(zone(), context(), p1);
+ }
+
+ template<class I, class P1>
+ I* New(P1 p1) { return I::cast(NewUncasted<I>(p1)); }
+
+ template<class I, class P1>
+ HInstruction* AddUncasted(P1 p1) {
+ HInstruction* result = AddInstruction(NewUncasted<I>(p1));
+ // Specializations must have their parameters properly casted
+ // to avoid landing here.
+ ASSERT(!result->IsReturn() && !result->IsSimulate() &&
+ !result->IsDeoptimize());
+ return result;
+ }
template<class I, class P1>
I* Add(P1 p1) {
- return static_cast<I*>(AddInstruction(new(zone()) I(p1)));
+ return I::cast(AddUncasted<I>(p1));
+ }
+
+ template<class I, class P1, class P2>
+ HInstruction* NewUncasted(P1 p1, P2 p2) {
+ return I::New(zone(), context(), p1, p2);
+ }
+
+ template<class I, class P1, class P2>
+ I* New(P1 p1, P2 p2) {
+ return I::cast(NewUncasted<I>(p1, p2));
+ }
+
+ template<class I, class P1, class P2>
+ HInstruction* AddUncasted(P1 p1, P2 p2) {
+ HInstruction* result = AddInstruction(NewUncasted<I>(p1, p2));
+ // Specializations must have their parameters properly casted
+ // to avoid landing here.
+ ASSERT(!result->IsSimulate());
+ return result;
}
template<class I, class P1, class P2>
I* Add(P1 p1, P2 p2) {
- return static_cast<I*>(AddInstruction(new(zone()) I(p1, p2)));
+ return static_cast<I*>(AddUncasted<I>(p1, p2));
+ }
+
+ template<class I, class P1, class P2, class P3>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3) {
+ return I::New(zone(), context(), p1, p2, p3);
+ }
+
+ template<class I, class P1, class P2, class P3>
+ I* New(P1 p1, P2 p2, P3 p3) {
+ return I::cast(NewUncasted<I>(p1, p2, p3));
+ }
+
+ template<class I, class P1, class P2, class P3>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3));
}
template<class I, class P1, class P2, class P3>
I* Add(P1 p1, P2 p2, P3 p3) {
- return static_cast<I*>(AddInstruction(new(zone()) I(p1, p2, p3)));
+ return I::cast(AddUncasted<I>(p1, p2, p3));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3, P4 p4) {
+ return I::New(zone(), context(), p1, p2, p3, p4);
+ }
+
+ template<class I, class P1, class P2, class P3, class P4>
+ I* New(P1 p1, P2 p2, P3 p3, P4 p4) {
+ return I::cast(NewUncasted<I>(p1, p2, p3, p4));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3, P4 p4) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3, p4));
}
template<class I, class P1, class P2, class P3, class P4>
I* Add(P1 p1, P2 p2, P3 p3, P4 p4) {
- return static_cast<I*>(AddInstruction(new(zone()) I(p1, p2, p3, p4)));
+ return I::cast(AddUncasted<I>(p1, p2, p3, p4));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
+ return I::New(zone(), context(), p1, p2, p3, p4, p5);
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5>
+ I* New(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
+ return I::cast(NewUncasted<I>(p1, p2, p3, p4, p5));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5));
}
template<class I, class P1, class P2, class P3, class P4, class P5>
I* Add(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
- return static_cast<I*>(AddInstruction(new(zone()) I(p1, p2, p3, p4, p5)));
+ return I::cast(AddUncasted<I>(p1, p2, p3, p4, p5));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5, class P6>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6) {
+ return I::New(zone(), context(), p1, p2, p3, p4, p5, p6);
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5, class P6>
+ I* New(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6) {
+ return I::cast(NewUncasted<I>(p1, p2, p3, p4, p5, p6));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4, class P5, class P6>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5, p6));
}
template<class I, class P1, class P2, class P3, class P4, class P5, class P6>
I* Add(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6) {
- return static_cast<I*>(AddInstruction(
- new(zone()) I(p1, p2, p3, p4, p5, p6)));
+ return I::cast(AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5, p6)));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4,
+ class P5, class P6, class P7>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7) {
+ return I::New(zone(), context(), p1, p2, p3, p4, p5, p6, p7);
+ }
+
+ template<class I, class P1, class P2, class P3, class P4,
+ class P5, class P6, class P7>
+ I* New(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7) {
+ return I::cast(NewUncasted<I>(p1, p2, p3, p4, p5, p6, p7));
+ }
+
+ template<class I, class P1, class P2, class P3,
+ class P4, class P5, class P6, class P7>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5, p6, p7));
}
template<class I, class P1, class P2, class P3,
class P4, class P5, class P6, class P7>
I* Add(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7) {
- return static_cast<I*>(AddInstruction(
- new(zone()) I(p1, p2, p3, p4, p5, p6, p7)));
+ return I::cast(AddInstruction(NewUncasted<I>(p1, p2, p3, p4,
+ p5, p6, p7)));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4,
+ class P5, class P6, class P7, class P8>
+ HInstruction* NewUncasted(P1 p1, P2 p2, P3 p3, P4 p4,
+ P5 p5, P6 p6, P7 p7, P8 p8) {
+ return I::New(zone(), context(), p1, p2, p3, p4, p5, p6, p7, p8);
+ }
+
+ template<class I, class P1, class P2, class P3, class P4,
+ class P5, class P6, class P7, class P8>
+ I* New(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8) {
+ return I::cast(NewUncasted<I>(p1, p2, p3, p4, p5, p6, p7, p8));
+ }
+
+ template<class I, class P1, class P2, class P3, class P4,
+ class P5, class P6, class P7, class P8>
+ HInstruction* AddUncasted(P1 p1, P2 p2, P3 p3, P4 p4,
+ P5 p5, P6 p6, P7 p7, P8 p8) {
+ return AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5, p6, p7, p8));
}
template<class I, class P1, class P2, class P3, class P4,
class P5, class P6, class P7, class P8>
I* Add(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8) {
- return static_cast<I*>(AddInstruction(
- new(zone()) I(p1, p2, p3, p4, p5, p6, p7, p8)));
+ return I::cast(
+ AddInstruction(NewUncasted<I>(p1, p2, p3, p4, p5, p6, p7, p8)));
}
+ void AddSimulate(BailoutId id,
+ RemovableSimulate removable = FIXED_SIMULATE);
+
void IncrementInNoSideEffectsScope() {
no_side_effects_scope_count_++;
}
@@ -1089,16 +1237,6 @@ class HGraphBuilder {
LoadKeyedHoleMode load_mode,
KeyedAccessStoreMode store_mode);
- HLoadNamedField* AddLoad(
- HValue *object,
- HObjectAccess access,
- HValue *typecheck = NULL);
-
- HLoadNamedField* BuildLoadNamedField(
- HValue* object,
- HObjectAccess access,
- Representation representation);
-
HInstruction* AddExternalArrayElementAccess(
HValue* external_elements,
HValue* checked_key,
@@ -1117,13 +1255,16 @@ class HGraphBuilder {
LoadKeyedHoleMode load_mode,
KeyedAccessStoreMode store_mode);
- HLoadNamedField* BuildLoadNamedField(HValue* object, HObjectAccess access);
- HStoreNamedField* AddStore(HValue *object, HObjectAccess access, HValue *val);
+ HLoadNamedField* BuildLoadNamedField(
+ HValue* object,
+ HObjectAccess access,
+ HValue* typecheck = NULL);
+ HInstruction* BuildLoadStringLength(HValue* object, HValue* typecheck = NULL);
HStoreNamedField* AddStoreMapConstant(HValue *object, Handle<Map>);
HLoadNamedField* AddLoadElements(HValue *object, HValue *typecheck = NULL);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
- HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin, HValue* context);
+ HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);
HValue* TruncateToNumber(HValue* value, Handle<Type>* expected);
@@ -1314,8 +1455,7 @@ class HGraphBuilder {
HGraphBuilder* builder_;
};
- HValue* BuildNewElementsCapacity(HValue* context,
- HValue* old_capacity);
+ HValue* BuildNewElementsCapacity(HValue* old_capacity);
void BuildNewSpaceArrayCheck(HValue* length,
ElementsKind kind);
@@ -1349,7 +1489,7 @@ class HGraphBuilder {
return JSArray::kPreallocatedArrayElements;
}
- HValue* EmitMapCode(HValue* context);
+ HValue* EmitMapCode();
HValue* EmitInternalMapCode();
HValue* EstablishEmptyArrayAllocationSize();
HValue* EstablishAllocationSize(HValue* length_node);
@@ -1364,16 +1504,14 @@ class HGraphBuilder {
HInnerAllocatedObject* elements_location_;
};
- HValue* BuildAllocateElements(HValue* context,
- ElementsKind kind,
+ HValue* BuildAllocateElements(ElementsKind kind,
HValue* capacity);
void BuildInitializeElementsHeader(HValue* elements,
ElementsKind kind,
HValue* capacity);
- HValue* BuildAllocateElementsAndInitializeElementsHeader(HValue* context,
- ElementsKind kind,
+ HValue* BuildAllocateElementsAndInitializeElementsHeader(ElementsKind kind,
HValue* capacity);
// array must have been allocated with enough room for
@@ -1394,30 +1532,24 @@ class HGraphBuilder {
HValue* length,
HValue* new_capacity);
- void BuildFillElementsWithHole(HValue* context,
- HValue* elements,
+ void BuildFillElementsWithHole(HValue* elements,
ElementsKind elements_kind,
HValue* from,
HValue* to);
- void BuildCopyElements(HValue* context,
- HValue* from_elements,
+ void BuildCopyElements(HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity);
- HValue* BuildCloneShallowArray(HContext* context,
- HValue* boilerplate,
+ HValue* BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length);
- HInstruction* BuildUnaryMathOp(
- HValue* value, Handle<Type> type, Token::Value token);
-
void BuildCompareNil(
HValue* value,
Handle<Type> type,
@@ -1428,8 +1560,12 @@ class HGraphBuilder {
int previous_object_size,
HValue* payload);
- HInstruction* BuildGetNativeContext(HValue* context);
- HInstruction* BuildGetArrayFunction(HValue* context);
+ void BuildConstantMapCheck(Handle<JSObject> constant, CompilationInfo* info);
+ void BuildCheckPrototypeMaps(Handle<JSObject> prototype,
+ Handle<JSObject> holder);
+
+ HInstruction* BuildGetNativeContext();
+ HInstruction* BuildGetArrayFunction();
private:
HGraphBuilder();
@@ -1445,13 +1581,14 @@ class HGraphBuilder {
template<>
-inline HDeoptimize* HGraphBuilder::Add(Deoptimizer::BailoutType type) {
+inline HInstruction* HGraphBuilder::AddUncasted<HDeoptimize>(
+ Deoptimizer::BailoutType type) {
if (type == Deoptimizer::SOFT) {
isolate()->counters()->soft_deopts_requested()->Increment();
if (FLAG_always_opt) return NULL;
}
if (current_block()->IsDeoptimizing()) return NULL;
- HDeoptimize* instr = new(zone()) HDeoptimize(type);
+ HDeoptimize* instr = New<HDeoptimize>(type);
AddInstruction(instr);
if (type == Deoptimizer::SOFT) {
isolate()->counters()->soft_deopts_inserted()->Increment();
@@ -1463,8 +1600,16 @@ inline HDeoptimize* HGraphBuilder::Add(Deoptimizer::BailoutType type) {
template<>
-inline HSimulate* HGraphBuilder::Add(BailoutId id,
- RemovableSimulate removable) {
+inline HDeoptimize* HGraphBuilder::Add<HDeoptimize>(
+ Deoptimizer::BailoutType type) {
+ return static_cast<HDeoptimize*>(AddUncasted<HDeoptimize>(type));
+}
+
+
+template<>
+inline HInstruction* HGraphBuilder::AddUncasted<HSimulate>(
+ BailoutId id,
+ RemovableSimulate removable) {
HSimulate* instr = current_block()->CreateSimulate(id, removable);
AddInstruction(instr);
return instr;
@@ -1472,26 +1617,46 @@ inline HSimulate* HGraphBuilder::Add(BailoutId id,
template<>
-inline HSimulate* HGraphBuilder::Add(BailoutId id) {
- return Add<HSimulate>(id, FIXED_SIMULATE);
+inline HInstruction* HGraphBuilder::NewUncasted<HLoadNamedField>(
+ HValue* object, HObjectAccess access) {
+ return NewUncasted<HLoadNamedField>(object, access,
+ static_cast<HValue*>(NULL));
}
template<>
-inline HReturn* HGraphBuilder::Add(HValue* value) {
- HValue* context = environment()->LookupContext();
+inline HInstruction* HGraphBuilder::AddUncasted<HLoadNamedField>(
+ HValue* object, HObjectAccess access) {
+ return AddUncasted<HLoadNamedField>(object, access,
+ static_cast<HValue*>(NULL));
+}
+
+
+template<>
+inline HInstruction* HGraphBuilder::AddUncasted<HSimulate>(BailoutId id) {
+ return AddUncasted<HSimulate>(id, FIXED_SIMULATE);
+}
+
+
+template<>
+inline HInstruction* HGraphBuilder::AddUncasted<HReturn>(HValue* value) {
int num_parameters = graph()->info()->num_parameters();
- HValue* params = Add<HConstant>(num_parameters);
- HReturn* return_instruction = new(graph()->zone())
- HReturn(value, context, params);
+ HValue* params = AddUncasted<HConstant>(num_parameters);
+ HReturn* return_instruction = New<HReturn>(value, params);
current_block()->FinishExit(return_instruction);
return return_instruction;
}
template<>
-inline HReturn* HGraphBuilder::Add(HConstant* p1) {
- return Add<HReturn>(static_cast<HValue*>(p1));
+inline HInstruction* HGraphBuilder::AddUncasted<HReturn>(HConstant* value) {
+ return AddUncasted<HReturn>(static_cast<HValue*>(value));
+}
+
+
+template<>
+inline HInstruction* HGraphBuilder::NewUncasted<HContext>() {
+ return HContext::New(zone());
}
@@ -1560,7 +1725,9 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
bool inline_bailout() { return inline_bailout_; }
- void Bailout(const char* reason);
+ HValue* context() { return environment()->context(); }
+
+ void Bailout(BailoutReason reason);
HBasicBlock* CreateJoin(HBasicBlock* first,
HBasicBlock* second,
@@ -1641,8 +1808,6 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
void VisitTypeof(UnaryOperation* expr);
- void VisitSub(UnaryOperation* expr);
- void VisitBitNot(UnaryOperation* expr);
void VisitNot(UnaryOperation* expr);
void VisitComma(BinaryOperation* expr);
@@ -1835,8 +2000,7 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
Expression* sub_expr,
NilValue nil);
- HInstruction* BuildStringCharCodeAt(HValue* context,
- HValue* string,
+ HInstruction* BuildStringCharCodeAt(HValue* string,
HValue* index);
HInstruction* BuildBinaryOperation(BinaryOperation* expr,
HValue* left,
@@ -1892,9 +2056,6 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
void AddCheckMap(HValue* object, Handle<Map> map);
- void AddCheckMapsWithTransitions(HValue* object,
- Handle<Map> map);
-
void BuildStoreNamed(Expression* expression,
BailoutId id,
int position,
diff --git a/deps/v8/src/i18n.cc b/deps/v8/src/i18n.cc
new file mode 100644
index 0000000000..b2ccfd4985
--- /dev/null
+++ b/deps/v8/src/i18n.cc
@@ -0,0 +1,297 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// limitations under the License.
+
+#include "i18n.h"
+
+#include "unicode/calendar.h"
+#include "unicode/dtfmtsym.h"
+#include "unicode/dtptngen.h"
+#include "unicode/locid.h"
+#include "unicode/numsys.h"
+#include "unicode/smpdtfmt.h"
+#include "unicode/timezone.h"
+
+namespace v8 {
+namespace internal {
+
+namespace {
+
+icu::SimpleDateFormat* CreateICUDateFormat(
+ Isolate* isolate,
+ const icu::Locale& icu_locale,
+ Handle<Object> options) {
+ // Create time zone as specified by the user. We have to re-create time zone
+ // since calendar takes ownership.
+ icu::TimeZone* tz = NULL;
+ MaybeObject* maybe_object = options->GetProperty(
+ *isolate->factory()->NewStringFromAscii(CStrVector("timeZone")));
+ Object* timezone;
+ if (maybe_object->ToObject(&timezone) && timezone->IsString()) {
+ v8::String::Utf8Value utf8_timezone(
+ v8::Utils::ToLocal(Handle<String>(String::cast(timezone))));
+ icu::UnicodeString u_timezone(icu::UnicodeString::fromUTF8(*utf8_timezone));
+ tz = icu::TimeZone::createTimeZone(u_timezone);
+ } else {
+ tz = icu::TimeZone::createDefault();
+ }
+
+ // Create a calendar using locale, and apply time zone to it.
+ UErrorCode status = U_ZERO_ERROR;
+ icu::Calendar* calendar =
+ icu::Calendar::createInstance(tz, icu_locale, status);
+
+ // Make formatter from skeleton. Calendar and numbering system are added
+ // to the locale as Unicode extension (if they were specified at all).
+ icu::SimpleDateFormat* date_format = NULL;
+ Object* skeleton;
+ maybe_object = options->GetProperty(
+ *isolate->factory()->NewStringFromAscii(CStrVector("skeleton")));
+ if (maybe_object->ToObject(&skeleton) && skeleton->IsString()) {
+ v8::String::Utf8Value utf8_skeleton(
+ v8::Utils::ToLocal(Handle<String>(String::cast(skeleton))));
+ icu::UnicodeString u_skeleton(icu::UnicodeString::fromUTF8(*utf8_skeleton));
+ icu::DateTimePatternGenerator* generator =
+ icu::DateTimePatternGenerator::createInstance(icu_locale, status);
+ icu::UnicodeString pattern;
+ if (U_SUCCESS(status)) {
+ pattern = generator->getBestPattern(u_skeleton, status);
+ delete generator;
+ }
+
+ date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
+ if (U_SUCCESS(status)) {
+ date_format->adoptCalendar(calendar);
+ }
+ }
+
+ if (U_FAILURE(status)) {
+ delete calendar;
+ delete date_format;
+ date_format = NULL;
+ }
+
+ return date_format;
+}
+
+
+void SetResolvedSettings(Isolate* isolate,
+ const icu::Locale& icu_locale,
+ icu::SimpleDateFormat* date_format,
+ Handle<JSObject> resolved) {
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString pattern;
+ date_format->toPattern(pattern);
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("pattern")),
+ isolate->factory()->NewStringFromTwoByte(
+ Vector<const uint16_t>(
+ reinterpret_cast<const uint16_t*>(pattern.getBuffer()),
+ pattern.length())),
+ NONE,
+ kNonStrictMode);
+
+ // Set time zone and calendar.
+ const icu::Calendar* calendar = date_format->getCalendar();
+ const char* calendar_name = calendar->getType();
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("calendar")),
+ isolate->factory()->NewStringFromAscii(CStrVector(calendar_name)),
+ NONE,
+ kNonStrictMode);
+
+ const icu::TimeZone& tz = calendar->getTimeZone();
+ icu::UnicodeString time_zone;
+ tz.getID(time_zone);
+
+ icu::UnicodeString canonical_time_zone;
+ icu::TimeZone::getCanonicalID(time_zone, canonical_time_zone, status);
+ if (U_SUCCESS(status)) {
+ if (canonical_time_zone == UNICODE_STRING_SIMPLE("Etc/GMT")) {
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("timeZone")),
+ isolate->factory()->NewStringFromAscii(CStrVector("UTC")),
+ NONE,
+ kNonStrictMode);
+ } else {
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("timeZone")),
+ isolate->factory()->NewStringFromTwoByte(
+ Vector<const uint16_t>(
+ reinterpret_cast<const uint16_t*>(
+ canonical_time_zone.getBuffer()),
+ canonical_time_zone.length())),
+ NONE,
+ kNonStrictMode);
+ }
+ }
+
+ // Ugly hack. ICU doesn't expose numbering system in any way, so we have
+ // to assume that for given locale NumberingSystem constructor produces the
+ // same digits as NumberFormat/Calendar would.
+ status = U_ZERO_ERROR;
+ icu::NumberingSystem* numbering_system =
+ icu::NumberingSystem::createInstance(icu_locale, status);
+ if (U_SUCCESS(status)) {
+ const char* ns = numbering_system->getName();
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")),
+ isolate->factory()->NewStringFromAscii(CStrVector(ns)),
+ NONE,
+ kNonStrictMode);
+ } else {
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("numberingSystem")),
+ isolate->factory()->undefined_value(),
+ NONE,
+ kNonStrictMode);
+ }
+ delete numbering_system;
+
+ // Set the locale
+ char result[ULOC_FULLNAME_CAPACITY];
+ status = U_ZERO_ERROR;
+ uloc_toLanguageTag(
+ icu_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
+ if (U_SUCCESS(status)) {
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("locale")),
+ isolate->factory()->NewStringFromAscii(CStrVector(result)),
+ NONE,
+ kNonStrictMode);
+ } else {
+ // This would never happen, since we got the locale from ICU.
+ JSObject::SetProperty(
+ resolved,
+ isolate->factory()->NewStringFromAscii(CStrVector("locale")),
+ isolate->factory()->NewStringFromAscii(CStrVector("und")),
+ NONE,
+ kNonStrictMode);
+ }
+}
+
+
+template<int internal_fields, EternalHandles::SingletonHandle field>
+Handle<ObjectTemplateInfo> GetEternal(Isolate* isolate) {
+ if (isolate->eternal_handles()->Exists(field)) {
+ return Handle<ObjectTemplateInfo>::cast(
+ isolate->eternal_handles()->GetSingleton(field));
+ }
+ v8::Local<v8::ObjectTemplate> raw_template(v8::ObjectTemplate::New());
+ raw_template->SetInternalFieldCount(internal_fields);
+ return Handle<ObjectTemplateInfo>::cast(
+ isolate->eternal_handles()->CreateSingleton(
+ isolate,
+ *v8::Utils::OpenHandle(*raw_template),
+ field));
+}
+
+} // namespace
+
+
+// static
+Handle<ObjectTemplateInfo> I18N::GetTemplate(Isolate* isolate) {
+ return GetEternal<1, i::EternalHandles::I18N_TEMPLATE_ONE>(isolate);
+}
+
+
+// static
+Handle<ObjectTemplateInfo> I18N::GetTemplate2(Isolate* isolate) {
+ return GetEternal<2, i::EternalHandles::I18N_TEMPLATE_TWO>(isolate);
+}
+
+
+// static
+icu::SimpleDateFormat* DateFormat::InitializeDateTimeFormat(
+ Isolate* isolate,
+ Handle<String> locale,
+ Handle<JSObject> options,
+ Handle<JSObject> resolved) {
+ // Convert BCP47 into ICU locale format.
+ UErrorCode status = U_ZERO_ERROR;
+ icu::Locale icu_locale;
+ char icu_result[ULOC_FULLNAME_CAPACITY];
+ int icu_length = 0;
+ v8::String::Utf8Value bcp47_locale(v8::Utils::ToLocal(locale));
+ if (bcp47_locale.length() != 0) {
+ uloc_forLanguageTag(*bcp47_locale, icu_result, ULOC_FULLNAME_CAPACITY,
+ &icu_length, &status);
+ if (U_FAILURE(status) || icu_length == 0) {
+ return NULL;
+ }
+ icu_locale = icu::Locale(icu_result);
+ }
+
+ icu::SimpleDateFormat* date_format = CreateICUDateFormat(
+ isolate, icu_locale, options);
+ if (!date_format) {
+ // Remove extensions and try again.
+ icu::Locale no_extension_locale(icu_locale.getBaseName());
+ date_format = CreateICUDateFormat(isolate, no_extension_locale, options);
+
+ // Set resolved settings (pattern, numbering system, calendar).
+ SetResolvedSettings(isolate, no_extension_locale, date_format, resolved);
+ } else {
+ SetResolvedSettings(isolate, icu_locale, date_format, resolved);
+ }
+
+ return date_format;
+}
+
+
+icu::SimpleDateFormat* DateFormat::UnpackDateFormat(
+ Isolate* isolate,
+ Handle<JSObject> obj) {
+ if (obj->HasLocalProperty(
+ *isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")))) {
+ return reinterpret_cast<icu::SimpleDateFormat*>(
+ obj->GetInternalField(0));
+ }
+
+ return NULL;
+}
+
+
+void DateFormat::DeleteDateFormat(v8::Isolate* isolate,
+ Persistent<v8::Object>* object,
+ void* param) {
+ // First delete the hidden C++ object.
+ delete reinterpret_cast<icu::SimpleDateFormat*>(Handle<JSObject>::cast(
+ v8::Utils::OpenPersistent(object))->GetInternalField(0));
+
+ // Then dispose of the persistent handle to JS object.
+ object->Dispose(isolate);
+}
+
+} } // namespace v8::internal
diff --git a/deps/v8/src/extensions/i18n/date-format.h b/deps/v8/src/i18n.h
index daa5964e25..37c57b135b 100644
--- a/deps/v8/src/extensions/i18n/date-format.h
+++ b/deps/v8/src/i18n.h
@@ -26,8 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// limitations under the License.
-#ifndef V8_EXTENSIONS_I18N_DATE_FORMAT_H_
-#define V8_EXTENSIONS_I18N_DATE_FORMAT_H_
+#ifndef V8_I18N_H_
+#define V8_I18N_H_
#include "unicode/uversion.h"
#include "v8.h"
@@ -36,36 +36,44 @@ namespace U_ICU_NAMESPACE {
class SimpleDateFormat;
}
-namespace v8_i18n {
+namespace v8 {
+namespace internal {
-class DateFormat {
+class I18N {
public:
- static void JSCreateDateTimeFormat(
- const v8::FunctionCallbackInfo<v8::Value>& args);
+ // Creates an ObjectTemplate with one internal field.
+ static Handle<ObjectTemplateInfo> GetTemplate(Isolate* isolate);
+
+ // Creates an ObjectTemplate with two internal fields.
+ static Handle<ObjectTemplateInfo> GetTemplate2(Isolate* isolate);
+
+ private:
+ I18N();
+};
- // Helper methods for various bindings.
+class DateFormat {
+ public:
+ // Create a formatter for the specificied locale and options. Returns the
+ // resolved settings for the locale / options.
+ static icu::SimpleDateFormat* InitializeDateTimeFormat(
+ Isolate* isolate,
+ Handle<String> locale,
+ Handle<JSObject> options,
+ Handle<JSObject> resolved);
// Unpacks date format object from corresponding JavaScript object.
- static icu::SimpleDateFormat* UnpackDateFormat(
- v8::Handle<v8::Object> obj);
+ static icu::SimpleDateFormat* UnpackDateFormat(Isolate* isolate,
+ Handle<JSObject> obj);
// Release memory we allocated for the DateFormat once the JS object that
// holds the pointer gets garbage collected.
static void DeleteDateFormat(v8::Isolate* isolate,
- v8::Persistent<v8::Object>* object,
+ Persistent<v8::Object>* object,
void* param);
-
- // Formats date and returns corresponding string.
- static void JSInternalFormat(const v8::FunctionCallbackInfo<v8::Value>& args);
-
- // Parses date and returns corresponding Date object or undefined if parse
- // failed.
- static void JSInternalParse(const v8::FunctionCallbackInfo<v8::Value>& args);
-
private:
DateFormat();
};
-} // namespace v8_i18n
+} } // namespace v8::internal
-#endif // V8_EXTENSIONS_I18N_DATE_FORMAT_H_
+#endif // V8_I18N_H_
diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc
index e0ae006655..7bea373025 100644
--- a/deps/v8/src/ia32/assembler-ia32.cc
+++ b/deps/v8/src/ia32/assembler-ia32.cc
@@ -1227,6 +1227,10 @@ void Assembler::test_b(Register reg, const Operand& op) {
void Assembler::test(const Operand& op, const Immediate& imm) {
+ if (op.is_reg_only()) {
+ test(op.reg(), imm);
+ return;
+ }
EnsureSpace ensure_space(this);
EMIT(0xF7);
emit_operand(eax, op);
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index b90a17f6c3..59124eab75 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -241,7 +241,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
if (FLAG_debug_code) {
__ cmp(esi, edi);
__ Assert(less_equal,
- "Unexpected number of pre-allocated property fields.");
+ kUnexpectedNumberOfPreAllocatedPropertyFields);
}
__ InitializeFieldsWithFiller(ecx, esi, edx);
__ mov(edx, factory->one_pointer_filler_map());
@@ -272,7 +272,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ sub(edx, ecx);
// Done if no extra properties are to be allocated.
__ j(zero, &allocated);
- __ Assert(positive, "Property allocation count failed.");
+ __ Assert(positive, kPropertyAllocationCountFailed);
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -654,7 +654,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
__ ret(2 * kPointerSize); // Remove state, eax.
__ bind(&not_tos_eax);
- __ Abort("no cases left");
+ __ Abort(kNoCasesLeft);
}
@@ -1033,9 +1033,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
- __ Assert(not_zero, "Unexpected initial map for InternalArray function");
+ __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
__ CmpObjectType(ebx, MAP_TYPE, ecx);
- __ Assert(equal, "Unexpected initial map for InternalArray function");
+ __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
}
// Run the native code for the InternalArray function called as a normal
@@ -1062,9 +1062,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
- __ Assert(not_zero, "Unexpected initial map for Array function");
+ __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(ebx, MAP_TYPE, ecx);
- __ Assert(equal, "Unexpected initial map for Array function");
+ __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
}
// Run the native code for the Array function called as a normal function.
@@ -1092,7 +1092,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
- __ Assert(equal, "Unexpected String function");
+ __ Assert(equal, kUnexpectedStringFunction);
}
// Load the first argument into eax and get rid of the rest
@@ -1137,9 +1137,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ cmpb(FieldOperand(ecx, Map::kInstanceSizeOffset),
JSValue::kSize >> kPointerSizeLog2);
- __ Assert(equal, "Unexpected string wrapper instance size");
+ __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
__ cmpb(FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset), 0);
- __ Assert(equal, "Unexpected unused properties of string wrapper");
+ __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
}
__ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index 5789f49216..8721656634 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -250,17 +250,6 @@ void ToBooleanStub::InitializeInterfaceDescriptor(
}
-void UnaryOpStub::InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { eax };
- descriptor->register_param_count_ = 1;
- descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(UnaryOpIC_Miss);
-}
-
-
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -511,9 +500,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
Label after_sentinel;
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
- const char* message = "Expected 0 as a Smi sentinel";
__ cmp(ecx, 0);
- __ Assert(equal, message);
+ __ Assert(equal, kExpected0AsASmiSentinel);
}
__ mov(ecx, GlobalObjectOperand());
__ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
@@ -3469,9 +3457,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
__ test(ecx, Immediate(kSmiTagMask));
- __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
+ __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
__ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
- __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
+ __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
}
// ecx: RegExp data (FixedArray)
@@ -3831,7 +3819,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ test_b(ebx, kIsIndirectStringMask);
- __ Assert(zero, "external string expected, but not found");
+ __ Assert(zero, kExternalStringExpectedButNotFound);
}
__ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
// Move the pointer so that offset-wise, it looks like a sequential string.
@@ -4326,7 +4314,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
edi);
}
#ifdef DEBUG
- __ Abort("Unexpected fall-through from string comparison");
+ __ Abort(kUnexpectedFallThroughFromStringComparison);
#endif
__ bind(&check_unequal_objects);
@@ -5085,9 +5073,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
- __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
__ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
- __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
}
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
__ mov(Operand(scratch, 0), map);
@@ -5120,7 +5108,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
@@ -5142,7 +5130,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
@@ -5255,7 +5243,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -5305,7 +5293,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
@@ -5340,7 +5328,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -5352,7 +5340,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
@@ -7482,7 +7470,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7545,7 +7533,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7610,9 +7598,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
- __ Assert(not_zero, "Unexpected initial map for Array function");
+ __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(ecx, MAP_TYPE, ecx);
- __ Assert(equal, "Unexpected initial map for Array function");
+ __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in ebx or a valid cell
Label okay_here;
@@ -7620,7 +7608,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ cmp(ebx, Immediate(undefined_sentinel));
__ j(equal, &okay_here);
__ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
- __ Assert(equal, "Expected property cell in register ebx");
+ __ Assert(equal, kExpectedPropertyCellInRegisterEbx);
__ bind(&okay_here);
}
@@ -7724,9 +7712,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
- __ Assert(not_zero, "Unexpected initial map for Array function");
+ __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(ecx, MAP_TYPE, ecx);
- __ Assert(equal, "Unexpected initial map for Array function");
+ __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
}
// Figure out the right elements kind
@@ -7745,7 +7733,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ j(equal, &done);
__ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
__ Assert(equal,
- "Invalid ElementsKind for InternalArray or InternalPackedArray");
+ kInvalidElementsKindForInternalArrayOrInternalPackedArray);
__ bind(&done);
}
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index f488718dc6..28b0f4ad82 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -779,7 +779,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
if (FLAG_debug_code) {
__ cmp(ebx, masm->isolate()->factory()->the_hole_value());
- __ Assert(equal, "object found in smi-only array");
+ __ Assert(equal, kObjectFoundInSmiOnlyArray);
}
if (CpuFeatures::IsSupported(SSE2)) {
@@ -1011,7 +1011,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ test(result, Immediate(kIsIndirectStringMask));
- __ Assert(zero, "external string expected, but not found");
+ __ Assert(zero, kExternalStringExpectedButNotFound);
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/deps/v8/src/ia32/debug-ia32.cc b/deps/v8/src/ia32/debug-ia32.cc
index 68199f905b..fd703dcc0c 100644
--- a/deps/v8/src/ia32/debug-ia32.cc
+++ b/deps/v8/src/ia32/debug-ia32.cc
@@ -128,7 +128,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ test(reg, Immediate(0xc0000000));
- __ Assert(zero, "Unable to encode value as smi");
+ __ Assert(zero, kUnableToEncodeValueAsSmi);
}
__ SmiTag(reg);
__ push(reg);
diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc
index 48968064aa..a9bd8c50b7 100644
--- a/deps/v8/src/ia32/deoptimizer-ia32.cc
+++ b/deps/v8/src/ia32/deoptimizer-ia32.cc
@@ -625,7 +625,7 @@ void Deoptimizer::EntryGenerator::Generate() {
__ pop(ecx);
if (FLAG_debug_code) {
__ cmp(ecx, Immediate(kAlignmentZapValue));
- __ Assert(equal, "alignment marker expected");
+ __ Assert(equal, kAlignmentMarkerExpected);
}
__ bind(&no_padding);
} else {
diff --git a/deps/v8/src/ia32/frames-ia32.h b/deps/v8/src/ia32/frames-ia32.h
index 6223748d64..8606125101 100644
--- a/deps/v8/src/ia32/frames-ia32.h
+++ b/deps/v8/src/ia32/frames-ia32.h
@@ -136,6 +136,11 @@ inline Object* JavaScriptFrame::function_slot_object() const {
}
+inline void StackHandler::SetFp(Address slot, Address fp) {
+ Memory::Address_at(slot) = fp;
+}
+
+
} } // namespace v8::internal
#endif // V8_IA32_FRAMES_IA32_H_
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index 8f11acc1be..f08a269e85 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -745,9 +745,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// Check that we're not inside a with or catch context.
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
__ cmp(ebx, isolate()->factory()->with_context_map());
- __ Check(not_equal, "Declaration in with context.");
+ __ Check(not_equal, kDeclarationInWithContext);
__ cmp(ebx, isolate()->factory()->catch_context_map());
- __ Check(not_equal, "Declaration in catch context.");
+ __ Check(not_equal, kDeclarationInCatchContext);
}
}
@@ -2169,7 +2169,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
__ Push(Smi::FromInt(resume_mode));
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
- __ Abort("Generator failed to resume.");
+ __ Abort(kGeneratorFailedToResume);
// Throw error if we attempt to operate on a running generator.
__ bind(&wrong_state);
@@ -2468,7 +2468,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Check for an uninitialized let binding.
__ mov(edx, location);
__ cmp(edx, isolate()->factory()->the_hole_value());
- __ Check(equal, "Let binding re-initialization.");
+ __ Check(equal, kLetBindingReInitialization);
}
// Perform the assignment.
__ mov(location, eax);
@@ -3430,15 +3430,15 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
Register value,
uint32_t encoding_mask) {
__ test(index, Immediate(kSmiTagMask));
- __ Check(zero, "Non-smi index");
+ __ Check(zero, kNonSmiIndex);
__ test(value, Immediate(kSmiTagMask));
- __ Check(zero, "Non-smi value");
+ __ Check(zero, kNonSmiValue);
__ cmp(index, FieldOperand(string, String::kLengthOffset));
- __ Check(less, "Index is too large");
+ __ Check(less, kIndexIsTooLarge);
__ cmp(index, Immediate(Smi::FromInt(0)));
- __ Check(greater_equal, "Index is negative");
+ __ Check(greater_equal, kIndexIsNegative);
__ push(value);
__ mov(value, FieldOperand(string, HeapObject::kMapOffset));
@@ -3446,7 +3446,7 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
__ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
__ cmp(value, Immediate(encoding_mask));
- __ Check(equal, "Unexpected string type");
+ __ Check(equal, kUnexpectedStringType);
__ pop(value);
}
@@ -3818,7 +3818,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort("Attempt to use undefined cache.");
+ __ Abort(kAttemptToUseUndefinedCache);
__ mov(eax, isolate()->factory()->undefined_value());
context()->Plug(eax);
return;
@@ -4000,7 +4000,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// scratch, string_length, elements.
if (generate_debug_code_) {
__ cmp(index, array_length);
- __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
+ __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
}
__ bind(&loop);
__ mov(string, FieldOperand(elements,
@@ -4347,34 +4347,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB:
- EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
- break;
-
- case Token::BIT_NOT:
- EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
- break;
-
default:
UNREACHABLE();
}
}
-void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
- const char* comment) {
- Comment cmt(masm_, comment);
- UnaryOpStub stub(expr->op());
- // UnaryOpStub expects the argument to be in the
- // accumulator register eax.
- VisitForAccumulatorValue(expr->expression());
- SetSourcePosition(expr->position());
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->UnaryOperationFeedbackId());
- context()->Plug(eax);
-}
-
-
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index bf0c80b2b4..1e0f14e768 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -483,7 +483,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// based on 32 bits of the map pointer and the string hash.
if (FLAG_debug_code) {
__ cmp(eax, FieldOperand(edx, HeapObject::kMapOffset));
- __ Check(equal, "Map is no longer in eax.");
+ __ Check(equal, kMapIsNoLongerInEax);
}
__ mov(ebx, eax); // Keep the map around for later.
__ shr(eax, KeyedLookupCache::kMapHashShift);
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 3ddad068bf..061ec9b6d3 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -113,7 +113,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LCodeGen::Abort(const char* reason) {
+void LCodeGen::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -137,6 +137,16 @@ void LCodeGen::Comment(const char* format, ...) {
}
+#ifdef _MSC_VER
+void LCodeGen::MakeSureStackPagesMapped(int offset) {
+ const int kPageSize = 4 * KB;
+ for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
+ __ mov(Operand(esp, offset), eax);
+ }
+}
+#endif
+
+
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
@@ -210,7 +220,7 @@ bool LCodeGen::GeneratePrologue() {
dynamic_frame_alignment_ &&
FLAG_debug_code) {
__ test(esp, Immediate(kPointerSize));
- __ Assert(zero, "frame is expected to be aligned");
+ __ Assert(zero, kFrameIsExpectedToBeAligned);
}
// Reserve space for the stack slots needed by the code.
@@ -226,6 +236,9 @@ bool LCodeGen::GeneratePrologue() {
} else {
if (FLAG_debug_code) {
__ sub(Operand(esp), Immediate(slots * kPointerSize));
+#ifdef _MSC_VER
+ MakeSureStackPagesMapped(slots * kPointerSize);
+#endif
__ push(eax);
__ mov(Operand(eax), Immediate(slots));
Label loop;
@@ -238,15 +251,7 @@ bool LCodeGen::GeneratePrologue() {
} else {
__ sub(Operand(esp), Immediate(slots * kPointerSize));
#ifdef _MSC_VER
- // On windows, you may not access the stack more than one page below
- // the most recently mapped page. To make the allocated area randomly
- // accessible, we write to each page in turn (the value is irrelevant).
- const int kPageSize = 4 * KB;
- for (int offset = slots * kPointerSize - kPageSize;
- offset > 0;
- offset -= kPageSize) {
- __ mov(Operand(esp, offset), eax);
- }
+ MakeSureStackPagesMapped(slots * kPointerSize);
#endif
}
@@ -877,7 +882,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) {
} else if (context->IsConstantOperand()) {
HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context));
- __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle()));
+ __ LoadObject(esi, Handle<Object>::cast(constant->handle()));
} else {
UNREACHABLE();
}
@@ -943,7 +948,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort("bailout was not prepared");
+ Abort(kBailoutWasNotPrepared);
return;
}
@@ -1674,8 +1679,9 @@ void LCodeGen::DoBitI(LBitI* instr) {
ASSERT(left->IsRegister());
if (right->IsConstantOperand()) {
- int right_operand = ToRepresentation(LConstantOperand::cast(right),
- instr->hydrogen()->representation());
+ int32_t right_operand =
+ ToRepresentation(LConstantOperand::cast(right),
+ instr->hydrogen()->representation());
switch (instr->op()) {
case Token::BIT_AND:
__ and_(ToRegister(left), right_operand);
@@ -1684,7 +1690,11 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ or_(ToRegister(left), right_operand);
break;
case Token::BIT_XOR:
- __ xor_(ToRegister(left), right_operand);
+ if (right_operand == int32_t(~0)) {
+ __ not_(ToRegister(left));
+ } else {
+ __ xor_(ToRegister(left), right_operand);
+ }
break;
default:
UNREACHABLE();
@@ -1771,7 +1781,9 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
if (shift_count != 0) {
if (instr->hydrogen_value()->representation().IsSmi() &&
instr->can_deopt()) {
- __ shl(ToRegister(left), shift_count - 1);
+ if (shift_count != 1) {
+ __ shl(ToRegister(left), shift_count - 1);
+ }
__ SmiTag(ToRegister(left));
DeoptimizeIf(overflow, instr->environment());
} else {
@@ -1969,7 +1981,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(equal, "Unexpected string type");
+ __ Check(equal, kUnexpectedStringType);
__ pop(value);
}
@@ -1983,13 +1995,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
-void LCodeGen::DoBitNotI(LBitNotI* instr) {
- LOperand* input = instr->value();
- ASSERT(input->Equals(instr->result()));
- __ not_(ToRegister(input));
-}
-
-
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToOperand(instr->value()));
ASSERT(ToRegister(instr->context()).is(esi));
@@ -2856,7 +2861,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
__ cmp(Operand(esp,
(parameter_count + extra_value_count) * kPointerSize),
Immediate(kAlignmentZapValue));
- __ Assert(equal, "expected alignment marker");
+ __ Assert(equal, kExpectedAlignmentMarker);
}
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
} else {
@@ -2869,7 +2874,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
__ cmp(Operand(esp, reg, times_pointer_size,
extra_value_count * kPointerSize),
Immediate(kAlignmentZapValue));
- __ Assert(equal, "expected alignment marker");
+ __ Assert(equal, kExpectedAlignmentMarker);
}
// emit code to restore stack based on instr->parameter_count()
@@ -2989,20 +2994,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
}
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
- Register object = ToRegister(instr->object());
- Register temp = ToRegister(instr->temp());
- ExternalReference sites_list_address = instr->GetReference(isolate());
-
- __ mov(temp, Immediate(sites_list_address));
- __ mov(temp, Operand(temp, 0));
- __ mov(FieldOperand(object, instr->hydrogen()->store_field().offset()),
- temp);
- __ mov(temp, Immediate(sites_list_address));
- __ mov(Operand(temp, 0), object);
-}
-
-
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -3163,9 +3154,6 @@ static bool CompactEmit(SmallMapList* list,
int i,
Isolate* isolate) {
Handle<Map> map = list->at(i);
- // If the map has ElementsKind transitions, we will generate map checks
- // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
- if (map->HasElementsTransition()) return false;
LookupResult lookup(isolate);
map->LookupDescriptor(NULL, *name, &lookup);
return lookup.IsField() || lookup.IsConstant();
@@ -3457,7 +3445,7 @@ Operand LCodeGen::BuildFastArrayOperand(
if (key->IsConstantOperand()) {
int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
- Abort("array index constant value too big");
+ Abort(kArrayIndexConstantValueTooBig);
}
return Operand(elements_pointer_reg,
((constant_value + additional_index) << shift_size)
@@ -3831,7 +3819,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
__ xorps(scratch, scratch);
__ subsd(scratch, input_reg);
__ pand(input_reg, scratch);
- } else if (r.IsInteger32()) {
+ } else if (r.IsSmiOrInteger32()) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
@@ -4357,6 +4345,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = access.offset();
if (access.IsExternalMemory()) {
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
MemOperand operand = instr->object()->IsConstantOperand()
? MemOperand::StaticVariable(
ToExternalReference(LConstantOperand::cast(instr->object())))
@@ -4876,13 +4865,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
}
-void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->string());
- Register result = ToRegister(instr->result());
- __ mov(result, FieldOperand(string, String::kLengthOffset));
-}
-
-
void LCodeGen::DoStringAdd(LStringAdd* instr) {
EmitPushTaggedOperand(instr->left());
EmitPushTaggedOperand(instr->right());
@@ -5809,31 +5791,68 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoCheckMapCommon(Register reg,
- Handle<Map> map,
- LInstruction* instr) {
- Label success;
- __ CompareMap(reg, map, &success);
- DeoptimizeIf(not_equal, instr->environment());
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
+ {
+ PushSafepointRegistersScope scope(this);
+ __ push(object);
+ __ xor_(esi, esi);
+ __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
+
+ __ test(eax, Immediate(kSmiTagMask));
+ }
+ DeoptimizeIf(zero, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(reg, map, instr);
+ __ CompareMap(reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ j(not_equal, deferred->entry());
+ } else {
+ DeoptimizeIf(not_equal, instr->environment());
+ }
+
__ bind(&success);
}
@@ -6010,22 +6029,6 @@ void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) {
}
-void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
- Register reg = ToRegister(instr->temp());
-
- ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
- ZoneList<Handle<Map> >* maps = instr->maps();
-
- ASSERT(prototypes->length() == maps->length());
-
- for (int i = 0; i < prototypes->length(); i++) {
- __ LoadHeapObject(reg, prototypes->at(i));
- DoCheckMapCommon(reg, maps->at(i), instr);
- }
-}
-
-
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -6048,10 +6051,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
}
@@ -6103,11 +6108,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
__ push(Immediate(Smi::FromInt(size)));
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(
Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context());
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(
Runtime::kAllocateInOldDataSpace, 1, instr, instr->context());
} else {
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h
index 27295da8b8..c9a78997f1 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.h
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.h
@@ -163,8 +163,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -212,7 +211,7 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
@@ -295,7 +294,7 @@ class LCodeGen BASE_EMBEDDED {
Register ToRegister(int index) const;
XMMRegister ToDoubleRegister(int index) const;
X87Register ToX87Register(int index) const;
- int ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
ExternalReference ToExternalReference(LConstantOperand* op) const;
@@ -408,6 +407,14 @@ class LCodeGen BASE_EMBEDDED {
int X87ArrayIndex(X87Register reg);
int x87_st2idx(int pos);
+#ifdef _MSC_VER
+ // On windows, you may not access the stack more than one page below
+ // the most recently mapped page. To make the allocated area randomly
+ // accessible, we write an arbitrary value to each page in range
+ // esp + offset - page_size .. esp in turn.
+ void MakeSureStackPagesMapped(int offset);
+#endif
+
Zone* zone_;
LPlatformChunk* const chunk_;
MacroAssembler* const masm_;
diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc
index 8c8103f619..52f39d4244 100644
--- a/deps/v8/src/ia32/lithium-ia32.cc
+++ b/deps/v8/src/ia32/lithium-ia32.cc
@@ -302,24 +302,6 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
}
-ExternalReference LLinkObjectInList::GetReference(Isolate* isolate) {
- switch (hydrogen()->known_list()) {
- case HLinkObjectInList::ALLOCATION_SITE_LIST:
- return ExternalReference::allocation_sites_list_address(isolate);
- }
-
- UNREACHABLE();
- // Return a dummy value
- return ExternalReference::isolate_address(isolate);
-}
-
-
-void LLinkObjectInList::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add(" offset %d", hydrogen()->store_field().offset());
-}
-
-
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -505,7 +487,7 @@ LPlatformChunk* LChunkBuilder::Build() {
}
-void LChunkBuilder::Abort(const char* reason) {
+void LChunkBuilder::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -716,7 +698,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort("Out of virtual registers while trying to allocate temp register.");
+ Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1432,16 +1414,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
}
-LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
- ASSERT(instr->value()->representation().IsInteger32());
- ASSERT(instr->representation().IsInteger32());
- if (instr->HasNoUses()) return NULL;
- LOperand* input = UseRegisterAtStart(instr->value());
- LBitNotI* result = new(zone()) LBitNotI(input);
- return DefineSameAsFirst(result);
-}
-
-
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1869,17 +1841,6 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
}
-LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
- return NULL;
-}
-
-
-LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
- HInductionVariableAnnotation* instr) {
- return NULL;
-}
-
-
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
return AssignEnvironment(new(zone()) LBoundsCheck(
UseRegisterOrConstantAtStart(instr->index()),
@@ -2067,15 +2028,6 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
}
-LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LUnallocated* temp = NULL;
- if (!instr->CanOmitPrototypeChecks()) temp = TempRegister();
- LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
- if (instr->CanOmitPrototypeChecks()) return result;
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
// If the target is in new space, we'll emit a global cell compare and so
// want the value in a register. If the target gets promoted before we
@@ -2089,10 +2041,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
@@ -2191,14 +2149,6 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoLinkObjectInList(HLinkObjectInList* instr) {
- LOperand* object = UseRegister(instr->value());
- LOperand* temp = TempRegister();
- LLinkObjectInList* result = new(zone()) LLinkObjectInList(object, temp);
- return result;
-}
-
-
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
LInstruction* result =
@@ -2444,7 +2394,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_external_location = instr->access().IsExternalMemory() &&
instr->access().offset() == 0;
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ bool needs_write_barrier_for_map = instr->has_transition() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2540,12 +2490,6 @@ LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
}
-LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LStringLength(string));
-}
-
-
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* context = UseAny(instr->context());
@@ -2599,7 +2543,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort("Too many spill slots needed for OSR");
+ Abort(kTooManySpillSlotsNeededForOSR);
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h
index 6a2aa00654..effecb73e3 100644
--- a/deps/v8/src/ia32/lithium-ia32.h
+++ b/deps/v8/src/ia32/lithium-ia32.h
@@ -50,7 +50,6 @@ class LCodeGen;
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
- V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -68,7 +67,6 @@ class LCodeGen;
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -120,7 +118,6 @@ class LCodeGen;
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -175,7 +172,6 @@ class LCodeGen;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(SubI) \
V(TaggedToI) \
V(TaggedToINoSSE2) \
@@ -1360,18 +1356,6 @@ class LThrow: public LTemplateInstruction<0, 2, 0> {
};
-class LBitNotI: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LBitNotI(LOperand* value) {
- inputs_[0] = value;
- }
-
- LOperand* value() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
-};
-
-
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -1705,25 +1689,6 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 3, 0> {
};
-class LLinkObjectInList: public LTemplateInstruction<0, 1, 1> {
- public:
- explicit LLinkObjectInList(LOperand* object, LOperand* temp) {
- inputs_[0] = object;
- temps_[0] = temp;
- }
-
- LOperand* object() { return inputs_[0]; }
- LOperand* temp() { return temps_[0]; }
-
- ExternalReference GetReference(Isolate* isolate);
-
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList, "link-object-in-list")
- DECLARE_HYDROGEN_ACCESSOR(LinkObjectInList)
-
- virtual void PrintDataTo(StringStream* stream);
-};
-
-
class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
public:
explicit LLoadContextSlot(LOperand* context) {
@@ -2257,7 +2222,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 2> {
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition(); }
+ Handle<Map> transition() const { return hydrogen()->transition_map(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2431,19 +2396,6 @@ class LStringCharFromCode: public LTemplateInstruction<1, 2, 0> {
};
-class LStringLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LStringLength(LOperand* string) {
- inputs_[0] = string;
- }
-
- LOperand* string() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
- DECLARE_HYDROGEN_ACCESSOR(StringLength)
-};
-
-
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -2485,24 +2437,6 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
- public:
- explicit LCheckPrototypeMaps(LOperand* temp) {
- temps_[0] = temp;
- }
-
- LOperand* temp() { return temps_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
-
- ZoneList<Handle<JSObject> >* prototypes() const {
- return hydrogen()->prototypes();
- }
- ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
-};
-
-
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2834,7 +2768,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index 6d1cb16c62..8b1be3cf17 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -678,7 +678,7 @@ void MacroAssembler::AssertNumber(Register object) {
JumpIfSmi(object, &ok);
cmp(FieldOperand(object, HeapObject::kMapOffset),
isolate()->factory()->heap_number_map());
- Check(equal, "Operand not a number");
+ Check(equal, kOperandNotANumber);
bind(&ok);
}
}
@@ -687,7 +687,7 @@ void MacroAssembler::AssertNumber(Register object) {
void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(equal, "Operand is not a smi");
+ Check(equal, kOperandIsNotASmi);
}
}
@@ -695,12 +695,12 @@ void MacroAssembler::AssertSmi(Register object) {
void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, "Operand is a smi and not a string");
+ Check(not_equal, kOperandIsASmiAndNotAString);
push(object);
mov(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(below, "Operand is not a string");
+ Check(below, kOperandIsNotAString);
}
}
@@ -708,12 +708,12 @@ void MacroAssembler::AssertString(Register object) {
void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, "Operand is a smi and not a name");
+ Check(not_equal, kOperandIsASmiAndNotAName);
push(object);
mov(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, LAST_NAME_TYPE);
pop(object);
- Check(below_equal, "Operand is not a name");
+ Check(below_equal, kOperandIsNotAName);
}
}
@@ -721,7 +721,7 @@ void MacroAssembler::AssertName(Register object) {
void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
test(object, Immediate(kSmiTagMask));
- Check(not_equal, "Operand is a smi");
+ Check(not_equal, kOperandIsASmi);
}
}
@@ -734,7 +734,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
push(Immediate(CodeObject()));
if (emit_debug_code()) {
cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
- Check(not_equal, "code object not properly patched");
+ Check(not_equal, kCodeObjectNotProperlyPatched);
}
}
@@ -743,7 +743,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
if (emit_debug_code()) {
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
Immediate(Smi::FromInt(type)));
- Check(equal, "stack frame types must match");
+ Check(equal, kStackFrameTypesMustMatch);
}
leave();
}
@@ -1024,7 +1024,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// When generating debug code, make sure the lexical context is set.
if (emit_debug_code()) {
cmp(scratch1, Immediate(0));
- Check(not_equal, "we should not have an empty lexical context");
+ Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
}
// Load the native context of the current context.
int offset =
@@ -1037,7 +1037,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Read the first word and compare to native_context_map.
cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, "JSGlobalObject::native_context should be a native context.");
+ Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
}
// Check if both contexts are the same.
@@ -1056,12 +1056,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Check the context is a native context.
if (emit_debug_code()) {
cmp(scratch2, isolate()->factory()->null_value());
- Check(not_equal, "JSGlobalProxy::context() should not be null.");
+ Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
// Read the first word and compare to native_context_map(),
cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, "JSGlobalObject::native_context should be a native context.");
+ Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
}
int token_offset = Context::kHeaderSize +
@@ -1206,7 +1206,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
#ifdef DEBUG
// Assert that result actually contains top on entry.
cmp(result, Operand::StaticVariable(allocation_top));
- Check(equal, "Unexpected allocation top");
+ Check(equal, kUnexpectedAllocationTop);
#endif
return;
}
@@ -1226,7 +1226,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
AllocationFlags flags) {
if (emit_debug_code()) {
test(result_end, Immediate(kObjectAlignmentMask));
- Check(zero, "Unaligned allocation in new space");
+ Check(zero, kUnalignedAllocationInNewSpace);
}
ExternalReference allocation_top =
@@ -1458,7 +1458,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
and_(object, Immediate(~kHeapObjectTagMask));
#ifdef DEBUG
cmp(object, Operand::StaticVariable(new_space_allocation_top));
- Check(below, "Undo allocation of non allocated memory");
+ Check(below, kUndoAllocationOfNonAllocatedMemory);
#endif
mov(Operand::StaticVariable(new_space_allocation_top), object);
}
@@ -1933,7 +1933,7 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
// If false, it is returned as a pointer to a preallocated by caller memory
// region. Pointer to this region should be passed to a function as an
// implicit first argument.
-#if V8_OS_BSD4 || V8_OS_MINGW32 || V8_OS_CYGWIN
+#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
static const bool kReturnHandlesDirectly = true;
#else
static const bool kReturnHandlesDirectly = false;
@@ -2062,7 +2062,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
// previous handle scope.
mov(Operand::StaticVariable(next_address), ebx);
sub(Operand::StaticVariable(level_address), Immediate(1));
- Assert(above_equal, "Invalid HandleScope level");
+ Assert(above_equal, kInvalidHandleScopeLevel);
cmp(edi, Operand::StaticVariable(limit_address));
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
@@ -2104,7 +2104,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
cmp(return_value, isolate()->factory()->null_value());
j(equal, &ok, Label::kNear);
- Abort("API call returned invalid object");
+ Abort(kAPICallReturnedInvalidObject);
bind(&ok);
#endif
@@ -2390,7 +2390,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
if (emit_debug_code()) {
cmp(FieldOperand(dst, HeapObject::kMapOffset),
isolate()->factory()->with_context_map());
- Check(not_equal, "Variable resolved to with context.");
+ Check(not_equal, kVariableResolvedToWithContext);
}
}
@@ -2477,7 +2477,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
jmp(&ok);
bind(&fail);
- Abort("Global functions must have initial map");
+ Abort(kGlobalFunctionsMustHaveInitialMap);
bind(&ok);
}
}
@@ -2578,7 +2578,7 @@ void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
and_(eax, kTopMask);
shr(eax, 11);
cmp(eax, Immediate(tos));
- Check(equal, "Unexpected FPU stack depth after instruction");
+ Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
fnclex();
pop(eax);
}
@@ -2661,8 +2661,8 @@ void MacroAssembler::DecrementCounter(Condition cc,
}
-void MacroAssembler::Assert(Condition cc, const char* msg) {
- if (emit_debug_code()) Check(cc, msg);
+void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
+ if (emit_debug_code()) Check(cc, reason);
}
@@ -2679,16 +2679,16 @@ void MacroAssembler::AssertFastElements(Register elements) {
cmp(FieldOperand(elements, HeapObject::kMapOffset),
Immediate(factory->fixed_cow_array_map()));
j(equal, &ok);
- Abort("JSObject with fast elements map has slow elements");
+ Abort(kJSObjectWithFastElementsMapHasSlowElements);
bind(&ok);
}
}
-void MacroAssembler::Check(Condition cc, const char* msg) {
+void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Label L;
j(cc, &L);
- Abort(msg);
+ Abort(reason);
// will not return here
bind(&L);
}
@@ -2709,12 +2709,13 @@ void MacroAssembler::CheckStackAlignment() {
}
-void MacroAssembler::Abort(const char* msg) {
+void MacroAssembler::Abort(BailoutReason reason) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
+ const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -3118,7 +3119,7 @@ void MacroAssembler::EnsureNotWhite(
if (emit_debug_code()) {
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
- Check(less_equal, "Live Bytes Count overflow chunk size");
+ Check(less_equal, kLiveBytesCountOverflowChunkSize);
}
bind(&done);
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index 3bca930d66..165c9ce6d2 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -807,6 +807,8 @@ class MacroAssembler: public Assembler {
void Drop(int element_count);
void Call(Label* target) { call(target); }
+ void Push(Register src) { push(src); }
+ void Pop(Register dst) { pop(dst); }
// Emit call to the code we are currently generating.
void CallSelf() {
@@ -844,15 +846,15 @@ class MacroAssembler: public Assembler {
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, const char* msg);
+ void Assert(Condition cc, BailoutReason reason);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, const char* msg);
+ void Check(Condition cc, BailoutReason reason);
// Print a message to stdout and abort execution.
- void Abort(const char* msg);
+ void Abort(BailoutReason reason);
// Check that the stack is aligned.
void CheckStackAlignment();
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index 123506fa62..b7828b81ab 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -3153,7 +3153,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
__ j(equal, &miss);
} else if (FLAG_debug_code) {
__ cmp(eax, factory()->the_hole_value());
- __ Check(not_equal, "DontDelete cells can't contain the hole");
+ __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
}
HandlerFrontendFooter(name, &success, &miss);
diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc
index 269754b7d6..3c22580c2c 100644
--- a/deps/v8/src/ic.cc
+++ b/deps/v8/src/ic.cc
@@ -233,16 +233,22 @@ static bool TryRemoveInvalidPrototypeDependentStub(Code* target,
// The stub is not in the cache. We've ruled out all other kinds of failure
// except for proptotype chain changes, a deprecated map, a map that's
- // different from the one that the stub expects, or a constant global property
- // that will become mutable. Threat all those situations as prototype failures
- // (stay monomorphic if possible).
+ // different from the one that the stub expects, elements kind changes, or a
+ // constant global property that will become mutable. Threat all those
+ // situations as prototype failures (stay monomorphic if possible).
// If the IC is shared between multiple receivers (slow dictionary mode), then
// the map cannot be deprecated and the stub invalidated.
if (cache_holder == OWN_MAP) {
Map* old_map = target->FindFirstMap();
if (old_map == map) return true;
- if (old_map != NULL && old_map->is_deprecated()) return true;
+ if (old_map != NULL) {
+ if (old_map->is_deprecated()) return true;
+ if (IsMoreGeneralElementsKindTransition(old_map->elements_kind(),
+ map->elements_kind())) {
+ return true;
+ }
+ }
}
if (receiver->IsGlobalObject()) {
@@ -384,7 +390,6 @@ void IC::Clear(Address address) {
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
case Code::COMPARE_IC: return CompareIC::Clear(address, target);
case Code::COMPARE_NIL_IC: return CompareNilIC::Clear(address, target);
- case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
@@ -2583,27 +2588,6 @@ void BinaryOpIC::StubInfoToType(int minor_key,
}
-MaybeObject* UnaryOpIC::Transition(Handle<Object> object) {
- Code::ExtraICState extra_ic_state = target()->extended_extra_ic_state();
- UnaryOpStub stub(extra_ic_state);
-
- stub.UpdateStatus(object);
-
- Handle<Code> code = stub.GetCode(isolate());
- set_target(*code);
-
- return stub.Result(object, isolate());
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss) {
- HandleScope scope(isolate);
- Handle<Object> object = args.at<Object>(0);
- UnaryOpIC ic(isolate);
- return ic.Transition(object);
-}
-
-
static BinaryOpIC::TypeInfo TypeInfoFromValue(Handle<Object> value,
Token::Value op) {
v8::internal::TypeInfo type = v8::internal::TypeInfo::FromValue(value);
diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h
index 7820d407ec..fcf0de58f1 100644
--- a/deps/v8/src/ic.h
+++ b/deps/v8/src/ic.h
@@ -714,14 +714,6 @@ class KeyedStoreIC: public StoreIC {
};
-class UnaryOpIC: public IC {
- public:
- explicit UnaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
-
- MUST_USE_RESULT MaybeObject* Transition(Handle<Object> object);
-};
-
-
// Type Recording BinaryOpIC, that records the types of the inputs and outputs.
class BinaryOpIC: public IC {
public:
diff --git a/deps/v8/src/icu_util.cc b/deps/v8/src/icu_util.cc
index 91f45278ee..b9bd65edc6 100644
--- a/deps/v8/src/icu_util.cc
+++ b/deps/v8/src/icu_util.cc
@@ -27,7 +27,7 @@
#include "icu_util.h"
-#if defined(_WIN32) && defined(ENABLE_I18N_SUPPORT)
+#if defined(_WIN32) && defined(V8_I18N_SUPPORT)
#include <windows.h>
#include "unicode/putil.h"
@@ -42,7 +42,7 @@ namespace v8 {
namespace internal {
bool InitializeICU() {
-#if defined(_WIN32) && defined(ENABLE_I18N_SUPPORT)
+#if defined(_WIN32) && defined(V8_I18N_SUPPORT)
// We expect to find the ICU data module alongside the current module.
HMODULE module = LoadLibraryA(ICU_UTIL_DATA_SHARED_MODULE_NAME);
if (!module) return false;
diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc
index 61f1e2dcfa..448c719c1a 100644
--- a/deps/v8/src/isolate.cc
+++ b/deps/v8/src/isolate.cc
@@ -1774,6 +1774,7 @@ Isolate::Isolate()
inner_pointer_to_code_cache_(NULL),
write_iterator_(NULL),
global_handles_(NULL),
+ eternal_handles_(NULL),
context_switcher_(NULL),
thread_manager_(NULL),
fp_stubs_generated_(false),
@@ -2052,6 +2053,8 @@ Isolate::~Isolate() {
code_range_ = NULL;
delete global_handles_;
global_handles_ = NULL;
+ delete eternal_handles_;
+ eternal_handles_ = NULL;
delete string_stream_debug_object_cache_;
string_stream_debug_object_cache_ = NULL;
@@ -2183,6 +2186,7 @@ bool Isolate::Init(Deserializer* des) {
inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
write_iterator_ = new ConsStringIteratorOp();
global_handles_ = new GlobalHandles(this);
+ eternal_handles_ = new EternalHandles();
bootstrapper_ = new Bootstrapper(this);
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h
index c008317737..74bfc29174 100644
--- a/deps/v8/src/isolate.h
+++ b/deps/v8/src/isolate.h
@@ -922,6 +922,8 @@ class Isolate {
GlobalHandles* global_handles() { return global_handles_; }
+ EternalHandles* eternal_handles() { return eternal_handles_; }
+
ThreadManager* thread_manager() { return thread_manager_; }
ContextSwitcher* context_switcher() { return context_switcher_; }
@@ -1295,6 +1297,7 @@ class Isolate {
InnerPointerToCodeCache* inner_pointer_to_code_cache_;
ConsStringIteratorOp* write_iterator_;
GlobalHandles* global_handles_;
+ EternalHandles* eternal_handles_;
ContextSwitcher* context_switcher_;
ThreadManager* thread_manager_;
RuntimeState runtime_state_;
diff --git a/deps/v8/src/lithium.cc b/deps/v8/src/lithium.cc
index e9c3531e38..790a2182b1 100644
--- a/deps/v8/src/lithium.cc
+++ b/deps/v8/src/lithium.cc
@@ -425,7 +425,7 @@ LChunk* LChunk::NewChunk(HGraph* graph) {
int values = graph->GetMaximumValueID();
CompilationInfo* info = graph->info();
if (values > LUnallocated::kMaxVirtualRegisters) {
- info->set_bailout_reason("not enough virtual registers for values");
+ info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
return NULL;
}
LAllocator allocator(values, graph);
@@ -434,7 +434,7 @@ LChunk* LChunk::NewChunk(HGraph* graph) {
if (chunk == NULL) return NULL;
if (!allocator.Allocate(chunk)) {
- info->set_bailout_reason("not enough virtual registers (regalloc)");
+ info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
return NULL;
}
diff --git a/deps/v8/src/log.cc b/deps/v8/src/log.cc
index b89c2bfba0..a1e5a6752b 100644
--- a/deps/v8/src/log.cc
+++ b/deps/v8/src/log.cc
@@ -1644,7 +1644,6 @@ void Logger::LogCodeObject(Object* object) {
case Code::FUNCTION:
case Code::OPTIMIZED_FUNCTION:
return; // We log this later using LogCompiledFunctions.
- case Code::UNARY_OP_IC: // fall through
case Code::BINARY_OP_IC: // fall through
case Code::COMPARE_IC: // fall through
case Code::COMPARE_NIL_IC: // fall through
diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc
index 911e73b12a..0e84267028 100644
--- a/deps/v8/src/mark-compact.cc
+++ b/deps/v8/src/mark-compact.cc
@@ -337,6 +337,11 @@ static void VerifyNativeContextSeparation(Heap* heap) {
#endif
+void MarkCompactCollector::TearDown() {
+ AbortCompaction();
+}
+
+
void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
p->MarkEvacuationCandidate();
evacuation_candidates_.Add(p);
@@ -426,8 +431,8 @@ void MarkCompactCollector::CollectGarbage() {
heap()->weak_embedded_maps_verification_enabled()) {
VerifyWeakEmbeddedMapsInOptimizedCode();
}
- if (FLAG_collect_maps && FLAG_omit_prototype_checks_for_leaf_maps) {
- VerifyOmittedPrototypeChecks();
+ if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
+ VerifyOmittedMapChecks();
}
#endif
@@ -498,13 +503,13 @@ void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
}
-void MarkCompactCollector::VerifyOmittedPrototypeChecks() {
+void MarkCompactCollector::VerifyOmittedMapChecks() {
HeapObjectIterator iterator(heap()->map_space());
for (HeapObject* obj = iterator.Next();
obj != NULL;
obj = iterator.Next()) {
Map* map = Map::cast(obj);
- map->VerifyOmittedPrototypeChecks();
+ map->VerifyOmittedMapChecks();
}
}
#endif // VERIFY_HEAP
diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h
index 3c4dfb688f..ee845a0837 100644
--- a/deps/v8/src/mark-compact.h
+++ b/deps/v8/src/mark-compact.h
@@ -571,6 +571,8 @@ class MarkCompactCollector {
static void Initialize();
+ void TearDown();
+
void CollectEvacuationCandidates(PagedSpace* space);
void AddEvacuationCandidate(Page* p);
@@ -636,7 +638,7 @@ class MarkCompactCollector {
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedMapsInOptimizedCode();
- void VerifyOmittedPrototypeChecks();
+ void VerifyOmittedMapChecks();
#endif
// Sweep a single page from the given space conservatively.
diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js
index 92ac1bc46a..2debbf8654 100644
--- a/deps/v8/src/messages.js
+++ b/deps/v8/src/messages.js
@@ -126,6 +126,7 @@ var kMessages = {
stack_overflow: ["Maximum call stack size exceeded"],
invalid_time_value: ["Invalid time value"],
+ invalid_count_value: ["Invalid count value"],
// SyntaxError
paren_in_arg_string: ["Function arg string contains parenthesis"],
not_isvar: ["builtin %IS_VAR: not a variable"],
@@ -227,16 +228,18 @@ function NoSideEffectToString(obj) {
}
}
}
- if (IsNativeErrorObject(obj)) return %_CallFunction(obj, ErrorToString);
+ if (CanBeSafelyTreatedAsAnErrorObject(obj)) {
+ return %_CallFunction(obj, ErrorToString);
+ }
return %_CallFunction(obj, ObjectToString);
}
-
-// To check if something is a native error we need to check the
-// concrete native error types. It is not sufficient to use instanceof
-// since it possible to create an object that has Error.prototype on
-// its prototype chain. This is the case for DOMException for example.
-function IsNativeErrorObject(obj) {
+// To determine whether we can safely stringify an object using ErrorToString
+// without the risk of side-effects, we need to check whether the object is
+// either an instance of a native error type (via '%_ClassOf'), or has $Error
+// in its prototype chain and hasn't overwritten 'toString' with something
+// strange and unusual.
+function CanBeSafelyTreatedAsAnErrorObject(obj) {
switch (%_ClassOf(obj)) {
case 'Error':
case 'EvalError':
@@ -247,7 +250,9 @@ function IsNativeErrorObject(obj) {
case 'URIError':
return true;
}
- return false;
+
+ var objToString = %GetDataProperty(obj, "toString");
+ return obj instanceof $Error && objToString === ErrorToString;
}
@@ -256,7 +261,7 @@ function IsNativeErrorObject(obj) {
// the error to string method. This is to avoid leaking error
// objects between script tags in a browser setting.
function ToStringCheckErrorObject(obj) {
- if (IsNativeErrorObject(obj)) {
+ if (CanBeSafelyTreatedAsAnErrorObject(obj)) {
return %_CallFunction(obj, ErrorToString);
} else {
return ToString(obj);
diff --git a/deps/v8/src/mips/assembler-mips.h b/deps/v8/src/mips/assembler-mips.h
index 8d533b36f4..cb0896a8de 100644
--- a/deps/v8/src/mips/assembler-mips.h
+++ b/deps/v8/src/mips/assembler-mips.h
@@ -358,6 +358,11 @@ class Operand BASE_EMBEDDED {
// Return true if this is a register operand.
INLINE(bool is_reg() const);
+ inline int32_t immediate() const {
+ ASSERT(!is_reg());
+ return imm32_;
+ }
+
Register rm() const { return rm_; }
private:
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index 3f5dca0009..d424cbc726 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -123,10 +123,10 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
// Initial map for the builtin InternalArray functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for InternalArray function",
+ __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, "Unexpected initial map for InternalArray function",
+ __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
t0, Operand(MAP_TYPE));
}
@@ -153,10 +153,10 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Initial map for the builtin Array functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function (1)",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function (2)",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
t0, Operand(MAP_TYPE));
}
@@ -185,7 +185,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Register function = a1;
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
- __ Assert(eq, "Unexpected String function", function, Operand(a2));
+ __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
}
// Load the first arguments in a0 and get rid of the rest.
@@ -231,10 +231,10 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ LoadGlobalFunctionInitialMap(function, map, t0);
if (FLAG_debug_code) {
__ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
- __ Assert(eq, "Unexpected string wrapper instance size",
+ __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
t0, Operand(JSValue::kSize >> kPointerSizeLog2));
__ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
- __ Assert(eq, "Unexpected unused properties of string wrapper",
+ __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
t0, Operand(zero_reg));
}
__ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
@@ -489,7 +489,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ addu(a0, t5, t0);
// a0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
- __ Assert(le, "Unexpected number of pre-allocated property fields.",
+ __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
a0, Operand(t6));
}
__ InitializeFieldsWithFiller(t5, a0, t7);
@@ -522,7 +522,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Done if no extra properties are to be allocated.
__ Branch(&allocated, eq, a3, Operand(zero_reg));
- __ Assert(greater_equal, "Property allocation count failed.",
+ __ Assert(greater_equal, kPropertyAllocationCountFailed,
a3, Operand(zero_reg));
// Scale the number of elements by pointer size and add the header for
@@ -569,7 +569,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
} else if (FLAG_debug_code) {
__ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
- __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
+ __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
}
__ jmp(&entry);
__ bind(&loop);
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 0e1b224ead..8a03a9a31a 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -247,17 +247,6 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
}
-void UnaryOpStub::InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { a0 };
- descriptor->register_param_count_ = 1;
- descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(UnaryOpIC_Miss);
-}
-
-
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -520,8 +509,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
Label after_sentinel;
__ JumpIfNotSmi(a3, &after_sentinel);
if (FLAG_debug_code) {
- const char* message = "Expected 0 as a Smi sentinel";
- __ Assert(eq, message, a3, Operand(zero_reg));
+ __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
}
__ lw(a3, GlobalObjectOperand());
__ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
@@ -679,7 +667,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
Label is_smi, done;
@@ -729,7 +717,7 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
Label done;
Label not_in_int32_range;
@@ -806,7 +794,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
__ bind(&obj_is_not_smi);
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Load the number.
@@ -853,7 +841,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
@@ -4279,12 +4267,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ And(t0, regexp_data, Operand(kSmiTagMask));
__ Check(nz,
- "Unexpected type for RegExp data, FixedArray expected",
+ kUnexpectedTypeForRegExpDataFixedArrayExpected,
t0,
Operand(zero_reg));
__ GetObjectType(regexp_data, a0, a0);
__ Check(eq,
- "Unexpected type for RegExp data, FixedArray expected",
+ kUnexpectedTypeForRegExpDataFixedArrayExpected,
a0,
Operand(FIXED_ARRAY_TYPE));
}
@@ -4639,7 +4627,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Sequential strings have already been ruled out.
__ And(at, a0, Operand(kIsIndirectStringMask));
__ Assert(eq,
- "external string expected, but not found",
+ kExternalStringExpectedButNotFound,
at,
Operand(zero_reg));
}
@@ -5020,7 +5008,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -5069,7 +5057,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
@@ -5106,7 +5094,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -5117,7 +5105,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ Branch(&exit_);
- __ Abort("Unexpected fallthrough from CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
@@ -5172,7 +5160,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
// that it is.
__ And(scratch4, dest, Operand(kPointerAlignmentMask));
__ Check(eq,
- "Destination of copy not aligned.",
+ kDestinationOfCopyNotAligned,
scratch4,
Operand(zero_reg));
}
@@ -5372,7 +5360,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
// Must be the hole (deleted entry).
if (FLAG_debug_code) {
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, "oddball in string table is not undefined or the hole",
+ __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole,
scratch, Operand(candidate));
}
__ jmp(&next_probe[i]);
@@ -6580,7 +6568,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
// filled with kZapValue by the GC.
// Dereference the address and check for this.
__ lw(t0, MemOperand(t9));
- __ Assert(ne, "Received invalid return address.", t0,
+ __ Assert(ne, kReceivedInvalidReturnAddress, t0,
Operand(reinterpret_cast<uint32_t>(kZapValue)));
}
__ Jump(t9);
@@ -7331,7 +7319,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7386,7 +7374,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7447,10 +7435,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
t0, Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid cell.
@@ -7459,7 +7447,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(&okay_here, eq, a2, Operand(at));
__ lw(a3, FieldMemOperand(a2, 0));
- __ Assert(eq, "Expected property cell in register a2",
+ __ Assert(eq, kExpectedPropertyCellInRegisterA2,
a3, Operand(cell_map));
__ bind(&okay_here);
}
@@ -7559,10 +7547,10 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
t0, Operand(MAP_TYPE));
}
@@ -7579,7 +7567,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
Label done;
__ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
__ Assert(
- eq, "Invalid ElementsKind for InternalArray or InternalPackedArray",
+ eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
a3, Operand(FAST_HOLEY_ELEMENTS));
__ bind(&done);
}
diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc
index 3f74154f58..10490e7a7b 100644
--- a/deps/v8/src/mips/codegen-mips.cc
+++ b/deps/v8/src/mips/codegen-mips.cc
@@ -289,7 +289,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
__ SmiTag(t5);
__ Or(t5, t5, Operand(1));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, "object found in smi-only array", at, Operand(t5));
+ __ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(t5));
}
__ sw(t0, MemOperand(t3)); // mantissa
__ sw(t1, MemOperand(t3, kIntSize)); // exponent
@@ -489,7 +489,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ And(at, result, Operand(kIsIndirectStringMask));
- __ Assert(eq, "external string expected, but not found",
+ __ Assert(eq, kExternalStringExpectedButNotFound,
at, Operand(zero_reg));
}
// Rule out short external strings.
diff --git a/deps/v8/src/mips/debug-mips.cc b/deps/v8/src/mips/debug-mips.cc
index 30cc4db634..020228fc6b 100644
--- a/deps/v8/src/mips/debug-mips.cc
+++ b/deps/v8/src/mips/debug-mips.cc
@@ -142,8 +142,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ And(at, reg, 0xc0000000);
- __ Assert(
- eq, "Unable to encode value as smi", at, Operand(zero_reg));
+ __ Assert(eq, kUnableToEncodeValueAsSmi, at, Operand(zero_reg));
}
__ sll(reg, reg, kSmiTagSize);
}
@@ -325,12 +324,12 @@ void Debug::GenerateSlotDebugBreak(MacroAssembler* masm) {
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on mips");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on mips");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
}
diff --git a/deps/v8/src/mips/frames-mips.h b/deps/v8/src/mips/frames-mips.h
index f6f20cd20c..437bf3a9f1 100644
--- a/deps/v8/src/mips/frames-mips.h
+++ b/deps/v8/src/mips/frames-mips.h
@@ -230,6 +230,11 @@ inline Object* JavaScriptFrame::function_slot_object() const {
}
+inline void StackHandler::SetFp(Address slot, Address fp) {
+ Memory::Address_at(slot) = fp;
+}
+
+
} } // namespace v8::internal
#endif
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index 9c610c32f9..b60502c9a5 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -786,10 +786,10 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// Check that we're not inside a with or catch context.
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.",
+ __ Check(ne, kDeclarationInWithContext,
a1, Operand(t0));
__ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.",
+ __ Check(ne, kDeclarationInCatchContext,
a1, Operand(t0));
}
}
@@ -2234,7 +2234,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
Handle<Map> map(isolate()->native_context()->generator_result_map());
- __ Allocate(map->instance_size(), a0, a2, a3, &gc_required, TAG_OBJECT);
+ __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&gc_required);
@@ -2249,19 +2249,18 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
__ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
__ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
- __ sw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
- __ sw(t0, FieldMemOperand(a0, JSObject::kPropertiesOffset));
- __ sw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
+ __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
+ __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
__ sw(a2,
- FieldMemOperand(a0, JSGeneratorObject::kResultValuePropertyOffset));
+ FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
__ sw(a3,
- FieldMemOperand(a0, JSGeneratorObject::kResultDonePropertyOffset));
+ FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
// Only the value field needs a write barrier, as the other values are in the
// root set.
- __ RecordWriteField(a0, JSGeneratorObject::kResultValuePropertyOffset,
+ __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
- __ mov(result_register(), a0);
}
@@ -2530,7 +2529,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Check for an uninitialized let binding.
__ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
+ __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
}
// Perform the assignment.
__ sw(v0, location);
@@ -3493,21 +3492,21 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
Register value,
uint32_t encoding_mask) {
__ And(at, index, Operand(kSmiTagMask));
- __ Check(eq, "Non-smi index", at, Operand(zero_reg));
+ __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
__ And(at, value, Operand(kSmiTagMask));
- __ Check(eq, "Non-smi value", at, Operand(zero_reg));
+ __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, String::kLengthOffset));
- __ Check(lt, "Index is too large", index, Operand(at));
+ __ Check(lt, kIndexIsTooLarge, index, Operand(at));
- __ Check(ge, "Index is negative", index, Operand(zero_reg));
+ __ Check(ge, kIndexIsNegative, index, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
__ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
__ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask));
__ Subu(at, at, Operand(encoding_mask));
- __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
+ __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
}
@@ -3882,7 +3881,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort("Attempt to use undefined cache.");
+ __ Abort(kAttemptToUseUndefinedCache);
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
context()->Plug(v0);
return;
@@ -4064,7 +4063,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// element: Current array element.
// elements_end: Array end.
if (generate_debug_code_) {
- __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
+ __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
array_length, Operand(zero_reg));
}
__ bind(&loop);
@@ -4383,35 +4382,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB:
- EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
- break;
-
- case Token::BIT_NOT:
- EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
- break;
-
default:
UNREACHABLE();
}
}
-void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
- const char* comment) {
- // TODO(svenpanne): Allowing format strings in Comment would be nice here...
- Comment cmt(masm_, comment);
- UnaryOpStub stub(expr->op());
- // GenericUnaryOpStub expects the argument to be in a0.
- VisitForAccumulatorValue(expr->expression());
- SetSourcePosition(expr->position());
- __ mov(a0, result_register());
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->UnaryOperationFeedbackId());
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index 8db5f00fbf..34e601ccaa 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LChunkBuilder::Abort(const char* reason) {
+void LChunkBuilder::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -324,7 +324,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
// end of the jump table.
if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 12)) {
- Abort("Generated code is too large");
+ Abort(kGeneratedCodeIsTooLarge);
}
if (deopt_jump_table_.length() > 0) {
@@ -411,7 +411,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
ASSERT(constant->HasSmiValue());
__ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
} else if (r.IsDouble()) {
- Abort("EmitLoadRegister: Unsupported double immediate.");
+ Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -449,9 +449,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
__ cvt_d_w(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort("unsupported double immediate");
+ Abort(kUnsupportedDoubleImmediate);
} else if (r.IsTagged()) {
- Abort("unsupported tagged immediate");
+ Abort(kUnsupportedTaggedImmediate);
}
} else if (op->IsStackSlot() || op->IsArgument()) {
MemOperand mem_op = ToMemOperand(op);
@@ -520,14 +520,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort("ToOperand Unsupported double immediate.");
+ Abort(kToOperandUnsupportedDoubleImmediate);
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort("ToOperand IsDoubleRegister unimplemented");
+ Abort(kToOperandIsDoubleRegisterUnimplemented);
return Operand(0);
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -748,7 +748,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort("bailout was not prepared");
+ Abort(kBailoutWasNotPrepared);
return;
}
@@ -1057,20 +1057,16 @@ void LCodeGen::DoModI(LModI* instr) {
HValue* left = hmod->left();
HValue* right = hmod->right();
if (hmod->HasPowerOf2Divisor()) {
- const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
- ASSERT(!left_reg.is(scratch));
const Register result_reg = ToRegister(instr->result());
// Note: The code below even works when right contains kMinInt.
int32_t divisor = Abs(right->GetInteger32Constant());
- __ mov(scratch, left_reg);
-
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
- ge, left_reg, Operand(zero_reg));
+ __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
+ &left_is_not_negative, ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1081,15 +1077,13 @@ void LCodeGen::DoModI(LModI* instr) {
}
__ bind(&left_is_not_negative);
- __ And(result_reg, scratch, divisor - 1);
+ __ And(result_reg, left_reg, divisor - 1);
__ bind(&done);
} else if (hmod->fixed_right_arg().has_value) {
- const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
const Register result_reg = ToRegister(instr->result());
-
- Register right_reg = EmitLoadRegister(instr->right(), scratch);
+ const Register right_reg = ToRegister(instr->right());
int32_t divisor = hmod->fixed_right_arg().value;
ASSERT(IsPowerOf2(divisor));
@@ -1099,8 +1093,8 @@ void LCodeGen::DoModI(LModI* instr) {
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
- ge, left_reg, Operand(zero_reg));
+ __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
+ &left_is_not_negative, ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1509,7 +1503,11 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ Or(result, left, right);
break;
case Token::BIT_XOR:
- __ Xor(result, left, right);
+ if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
+ __ Nor(result, zero_reg, left);
+ } else {
+ __ Xor(result, left, right);
+ }
break;
default:
UNREACHABLE();
@@ -1583,8 +1581,12 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
if (shift_count != 0) {
if (instr->hydrogen_value()->representation().IsSmi() &&
instr->can_deopt()) {
- __ sll(result, left, shift_count - 1);
- __ SmiTagCheckOverflow(result, result, scratch);
+ if (shift_count != 1) {
+ __ sll(result, left, shift_count - 1);
+ __ SmiTagCheckOverflow(result, result, scratch);
+ } else {
+ __ SmiTagCheckOverflow(result, left, scratch);
+ }
DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg));
} else {
__ sll(result, left, shift_count);
@@ -1766,7 +1768,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
+ __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
}
__ Addu(scratch,
@@ -1783,13 +1785,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
-void LCodeGen::DoBitNotI(LBitNotI* instr) {
- Register input = ToRegister(instr->value());
- Register result = ToRegister(instr->result());
- __ Nor(result, zero_reg, Operand(input));
-}
-
-
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), at);
__ push(input_reg);
@@ -2808,19 +2803,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
}
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
- Register object = ToRegister(instr->object());
- ExternalReference sites_list_address = instr->GetReference(isolate());
-
- __ li(at, Operand(sites_list_address));
- __ lw(at, MemOperand(at));
- __ sw(at, FieldMemOperand(object,
- instr->hydrogen()->store_field().offset()));
- __ li(at, Operand(sites_list_address));
- __ sw(object, MemOperand(at));
-}
-
-
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -3085,7 +3067,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3171,7 +3153,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3442,7 +3424,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort("DoPushArgument not implemented for double type.");
+ Abort(kDoPushArgumentNotImplementedForDoubleType);
} else {
Register argument_reg = EmitLoadRegister(argument, at);
__ push(argument_reg);
@@ -3661,7 +3643,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
FPURegister input = ToDoubleRegister(instr->value());
FPURegister result = ToDoubleRegister(instr->result());
__ abs_d(result, input);
- } else if (r.IsInteger32()) {
+ } else if (r.IsSmiOrInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4267,7 +4249,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4345,7 +4327,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4605,13 +4587,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
}
-void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->string());
- Register result = ToRegister(instr->result());
- __ lw(result, FieldMemOperand(string, String::kLengthOffset));
-}
-
-
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
@@ -5209,31 +5184,63 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoCheckMapCommon(Register map_reg,
- Handle<Map> map,
- LEnvironment* env) {
- Label success;
- __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
- DeoptimizeIf(al, env);
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
+ {
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ StoreToSafepointRegisterSlot(v0, scratch0());
+ }
+ __ And(at, scratch0(), Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMapAndBranch(map_reg, map, &success, eq, &success);
}
Handle<Map> map = map_set->last();
- DoCheckMapCommon(map_reg, map, instr->environment());
+ __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ Branch(deferred->entry());
+ } else {
+ DeoptimizeIf(al, instr->environment());
+ }
+
__ bind(&success);
}
@@ -5288,25 +5295,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
-
- Register prototype_reg = ToRegister(instr->temp());
- Register map_reg = ToRegister(instr->temp2());
-
- ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
- ZoneList<Handle<Map> >* maps = instr->maps();
-
- ASSERT(prototypes->length() == maps->length());
-
- for (int i = 0; i < prototypes->length(); i++) {
- __ LoadHeapObject(prototype_reg, prototypes->at(i));
- __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
- DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
- }
-}
-
-
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -5330,10 +5318,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
}
if (instr->size()->IsConstantOperand()) {
@@ -5391,10 +5381,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
__ Push(Smi::FromInt(size));
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
} else {
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h
index a485b67db9..670c4cc87a 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.h
+++ b/deps/v8/src/mips/lithium-codegen-mips.h
@@ -114,7 +114,7 @@ class LCodeGen BASE_EMBEDDED {
DoubleRegister EmitLoadDoubleRegister(LOperand* op,
FloatRegister flt_scratch,
DoubleRegister dbl_scratch);
- int ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -153,7 +153,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -213,7 +213,7 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
diff --git a/deps/v8/src/mips/lithium-gap-resolver-mips.cc b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
index 771b22862e..460e13bf0a 100644
--- a/deps/v8/src/mips/lithium-gap-resolver-mips.cc
+++ b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
@@ -258,7 +258,7 @@ void LGapResolver::EmitMove(int index) {
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
- } else if (source->IsDoubleRegister()) {
+ } else if (destination->IsDoubleRegister()) {
DoubleRegister result = cgen_->ToDoubleRegister(destination);
double v = cgen_->ToDouble(constant_source);
__ Move(result, v);
diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc
index 5cfca00010..38ac19f609 100644
--- a/deps/v8/src/mips/lithium-mips.cc
+++ b/deps/v8/src/mips/lithium-mips.cc
@@ -277,24 +277,6 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
}
-ExternalReference LLinkObjectInList::GetReference(Isolate* isolate) {
- switch (hydrogen()->known_list()) {
- case HLinkObjectInList::ALLOCATION_SITE_LIST:
- return ExternalReference::allocation_sites_list_address(isolate);
- }
-
- UNREACHABLE();
- // Return a dummy value
- return ExternalReference::isolate_address(isolate);
-}
-
-
-void LLinkObjectInList::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add(" offset %d", hydrogen()->store_field().offset());
-}
-
-
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -460,7 +442,7 @@ LPlatformChunk* LChunkBuilder::Build() {
}
-void LCodeGen::Abort(const char* reason) {
+void LCodeGen::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -668,7 +650,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort("Out of virtual registers while trying to allocate temp register.");
+ Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1345,15 +1327,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
}
-LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
- ASSERT(instr->value()->representation().IsInteger32());
- ASSERT(instr->representation().IsInteger32());
- if (instr->HasNoUses()) return NULL;
- LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LBitNotI(value));
-}
-
-
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1771,17 +1744,6 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
}
-LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
- return NULL;
-}
-
-
-LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
- HInductionVariableAnnotation* instr) {
- return NULL;
-}
-
-
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
@@ -1955,19 +1917,6 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
}
-LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LUnallocated* temp1 = NULL;
- LOperand* temp2 = NULL;
- if (!instr->CanOmitPrototypeChecks()) {
- temp1 = TempRegister();
- temp2 = TempRegister();
- }
- LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
- if (instr->CanOmitPrototypeChecks()) return result;
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -1976,10 +1925,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
@@ -2062,13 +2017,6 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoLinkObjectInList(HLinkObjectInList* instr) {
- LOperand* object = UseRegister(instr->value());
- LLinkObjectInList* result = new(zone()) LLinkObjectInList(object);
- return result;
-}
-
-
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
LInstruction* result =
@@ -2270,7 +2218,7 @@ LInstruction* LChunkBuilder::DoTrapAllocationMemento(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_in_object = instr->access().IsInobject();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ bool needs_write_barrier_for_map = instr->has_transition() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2341,12 +2289,6 @@ LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
}
-LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LStringLength(string));
-}
-
-
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* size = instr->size()->IsConstant()
@@ -2396,7 +2338,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort("Too many spill slots needed for OSR");
+ Abort(kTooManySpillSlotsNeededForOSR);
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h
index 2618c46992..a21c32342e 100644
--- a/deps/v8/src/mips/lithium-mips.h
+++ b/deps/v8/src/mips/lithium-mips.h
@@ -50,7 +50,6 @@ class LCodeGen;
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
- V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -68,7 +67,6 @@ class LCodeGen;
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -119,7 +117,6 @@ class LCodeGen;
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -175,7 +172,6 @@ class LCodeGen;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(SubI) \
V(TaggedToI) \
V(ThisFunction) \
@@ -1358,18 +1354,6 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
};
-class LBitNotI: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LBitNotI(LOperand* value) {
- inputs_[0] = value;
- }
-
- LOperand* value() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
-};
-
-
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -1654,23 +1638,6 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
};
-class LLinkObjectInList: public LTemplateInstruction<0, 1, 0> {
- public:
- explicit LLinkObjectInList(LOperand* object) {
- inputs_[0] = object;
- }
-
- LOperand* object() { return inputs_[0]; }
-
- ExternalReference GetReference(Isolate* isolate);
-
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList, "link-object-in-list")
- DECLARE_HYDROGEN_ACCESSOR(LinkObjectInList)
-
- virtual void PrintDataTo(StringStream* stream);
-};
-
-
class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
public:
explicit LLoadContextSlot(LOperand* context) {
@@ -2147,7 +2114,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition(); }
+ Handle<Map> transition() const { return hydrogen()->transition_map(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2304,19 +2271,6 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
};
-class LStringLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LStringLength(LOperand* string) {
- inputs_[0] = string;
- }
-
- LOperand* string() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
- DECLARE_HYDROGEN_ACCESSOR(StringLength)
-};
-
-
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -2356,26 +2310,6 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
- public:
- LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
- temps_[0] = temp;
- temps_[1] = temp2;
- }
-
- LOperand* temp() { return temps_[0]; }
- LOperand* temp2() { return temps_[1]; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
-
- ZoneList<Handle<JSObject> >* prototypes() const {
- return hydrogen()->prototypes();
- }
- ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
-};
-
-
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2674,7 +2608,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index ea08a552be..a7ec713b35 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -256,7 +256,7 @@ void MacroAssembler::RecordWrite(Register object,
if (emit_debug_code()) {
lw(at, MemOperand(address));
Assert(
- eq, "Wrong address or value passed to RecordWrite", at, Operand(value));
+ eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
}
Label done;
@@ -358,7 +358,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
// In debug mode, make sure the lexical context is set.
#ifdef DEBUG
- Check(ne, "we should not have an empty lexical context",
+ Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
scratch, Operand(zero_reg));
#endif
@@ -374,7 +374,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Read the first word and compare to the native_context_map.
lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, "JSGlobalObject::native_context should be a native context.",
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
holder_reg, Operand(at));
pop(holder_reg); // Restore holder.
}
@@ -388,12 +388,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, at); // Move at to its holding place.
LoadRoot(at, Heap::kNullValueRootIndex);
- Check(ne, "JSGlobalProxy::context() should not be null.",
+ Check(ne, kJSGlobalProxyContextShouldNotBeNull,
holder_reg, Operand(at));
lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, "JSGlobalObject::native_context should be a native context.",
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
holder_reg, Operand(at));
// Restore at is not needed. at is reloaded below.
pop(holder_reg); // Restore holder.
@@ -2938,7 +2938,7 @@ void MacroAssembler::Allocate(int object_size,
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, "Unexpected allocation top", result, Operand(t9));
+ Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3008,7 +3008,7 @@ void MacroAssembler::Allocate(Register object_size,
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, "Unexpected allocation top", result, Operand(t9));
+ Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3028,7 +3028,7 @@ void MacroAssembler::Allocate(Register object_size,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(t9, scratch2, Operand(kObjectAlignmentMask));
- Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
+ Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
}
sw(scratch2, MemOperand(topaddr));
@@ -3050,7 +3050,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
// Check that the object un-allocated is below the current top.
li(scratch, Operand(new_space_allocation_top));
lw(scratch, MemOperand(scratch));
- Check(less, "Undo allocation of non allocated memory",
+ Check(less, kUndoAllocationOfNonAllocatedMemory,
object, Operand(scratch));
#endif
// Write the address of the object to un-allocate as the current top.
@@ -3303,7 +3303,7 @@ void MacroAssembler::CopyBytes(Register src,
bind(&word_loop);
if (emit_debug_code()) {
And(scratch, src, kPointerSize - 1);
- Assert(eq, "Expecting alignment for CopyBytes",
+ Assert(eq, kExpectingAlignmentForCopyBytes,
scratch, Operand(zero_reg));
}
Branch(&byte_loop, lt, length, Operand(kPointerSize));
@@ -4029,7 +4029,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
sw(s0, MemOperand(s3, kNextOffset));
if (emit_debug_code()) {
lw(a1, MemOperand(s3, kLevelOffset));
- Check(eq, "Unexpected level after return from api call", a1, Operand(s2));
+ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
Subu(s2, s2, Operand(1));
sw(s2, MemOperand(s3, kLevelOffset));
@@ -4383,10 +4383,10 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
// -----------------------------------------------------------------------------
// Debugging.
-void MacroAssembler::Assert(Condition cc, const char* msg,
+void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Register rs, Operand rt) {
if (emit_debug_code())
- Check(cc, msg, rs, rt);
+ Check(cc, reason, rs, rt);
}
@@ -4394,7 +4394,7 @@ void MacroAssembler::AssertRegisterIsRoot(Register reg,
Heap::RootListIndex index) {
if (emit_debug_code()) {
LoadRoot(at, index);
- Check(eq, "Register did not match expected root", reg, Operand(at));
+ Check(eq, kRegisterDidNotMatchExpectedRoot, reg, Operand(at));
}
}
@@ -4411,24 +4411,24 @@ void MacroAssembler::AssertFastElements(Register elements) {
Branch(&ok, eq, elements, Operand(at));
LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
Branch(&ok, eq, elements, Operand(at));
- Abort("JSObject with fast elements map has slow elements");
+ Abort(kJSObjectWithFastElementsMapHasSlowElements);
bind(&ok);
pop(elements);
}
}
-void MacroAssembler::Check(Condition cc, const char* msg,
+void MacroAssembler::Check(Condition cc, BailoutReason reason,
Register rs, Operand rt) {
Label L;
Branch(&L, cc, rs, rt);
- Abort(msg);
+ Abort(reason);
// Will not return here.
bind(&L);
}
-void MacroAssembler::Abort(const char* msg) {
+void MacroAssembler::Abort(BailoutReason reason) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
@@ -4436,6 +4436,7 @@ void MacroAssembler::Abort(const char* msg) {
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
+ const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -4579,7 +4580,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Branch(&ok);
bind(&fail);
- Abort("Global functions must have initial map");
+ Abort(kGlobalFunctionsMustHaveInitialMap);
bind(&ok);
}
}
@@ -4862,7 +4863,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(ne, "Operand is a smi", at, Operand(zero_reg));
+ Check(ne, kOperandIsASmi, at, Operand(zero_reg));
}
}
@@ -4871,7 +4872,7 @@ void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(eq, "Operand is a smi", at, Operand(zero_reg));
+ Check(eq, kOperandIsASmi, at, Operand(zero_reg));
}
}
@@ -4880,11 +4881,11 @@ void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg));
+ Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
+ Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
pop(object);
}
}
@@ -4894,11 +4895,11 @@ void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a name", t0, Operand(zero_reg));
+ Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(le, "Operand is not a name", object, Operand(LAST_NAME_TYPE));
+ Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
pop(object);
}
}
@@ -4906,11 +4907,11 @@ void MacroAssembler::AssertName(Register object) {
void MacroAssembler::AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message) {
+ BailoutReason reason) {
if (emit_debug_code()) {
ASSERT(!src.is(at));
LoadRoot(at, root_value_index);
- Check(eq, message, src, Operand(at));
+ Check(eq, reason, src, Operand(at));
}
}
@@ -5127,7 +5128,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
// At this point scratch is a lui(at, ...) instruction.
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction to patch should be a lui.",
+ Check(eq, kTheInstructionToPatchShouldBeALui,
scratch, Operand(LUI));
lw(scratch, MemOperand(li_location));
}
@@ -5139,7 +5140,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction to patch should be an ori.",
+ Check(eq, kTheInstructionToPatchShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
@@ -5156,7 +5157,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
lw(value, MemOperand(li_location));
if (emit_debug_code()) {
And(value, value, kOpcodeMask);
- Check(eq, "The instruction should be a lui.",
+ Check(eq, kTheInstructionShouldBeALui,
value, Operand(LUI));
lw(value, MemOperand(li_location));
}
@@ -5167,7 +5168,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
lw(scratch, MemOperand(li_location, kInstrSize));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction should be an ori.",
+ Check(eq, kTheInstructionShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index bc3e7c48b4..ac37db2aaa 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -627,11 +627,11 @@ class MacroAssembler: public Assembler {
void MultiPushFPU(RegList regs);
void MultiPushReversedFPU(RegList regs);
- // Lower case push() for compatibility with arch-independent code.
void push(Register src) {
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
+ void Push(Register src) { push(src); }
// Push a handle.
void Push(Handle<Object> handle);
@@ -676,11 +676,11 @@ class MacroAssembler: public Assembler {
void MultiPopFPU(RegList regs);
void MultiPopReversedFPU(RegList regs);
- // Lower case pop() for compatibility with arch-independent code.
void pop(Register dst) {
lw(dst, MemOperand(sp, 0));
Addu(sp, sp, Operand(kPointerSize));
}
+ void Pop(Register dst) { pop(dst); }
// Pop two registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2) {
@@ -1286,15 +1286,15 @@ class MacroAssembler: public Assembler {
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, const char* msg, Register rs, Operand rt);
+ void Assert(Condition cc, BailoutReason reason, Register rs, Operand rt);
void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, const char* msg, Register rs, Operand rt);
+ void Check(Condition cc, BailoutReason reason, Register rs, Operand rt);
// Print a message to stdout and abort execution.
- void Abort(const char* msg);
+ void Abort(BailoutReason msg);
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
@@ -1378,7 +1378,7 @@ class MacroAssembler: public Assembler {
// enabled via --debug-code.
void AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message);
+ BailoutReason reason);
// ---------------------------------------------------------------------------
// HeapNumber utilities.
diff --git a/deps/v8/src/object-observe.js b/deps/v8/src/object-observe.js
index a5c12bf009..f5e0d9d563 100644
--- a/deps/v8/src/object-observe.js
+++ b/deps/v8/src/object-observe.js
@@ -394,7 +394,10 @@ function ObjectGetNotifier(object) {
if (ObjectIsFrozen(object)) return null;
var objectInfo = objectInfoMap.get(object);
- if (IS_UNDEFINED(objectInfo)) objectInfo = CreateObjectInfo(object);
+ if (IS_UNDEFINED(objectInfo)) {
+ objectInfo = CreateObjectInfo(object);
+ %SetIsObserved(object);
+ }
if (IS_NULL(objectInfo.notifier)) {
objectInfo.notifier = { __proto__: notifierPrototype };
diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc
index cb5f2b7900..e0cb8c9294 100644
--- a/deps/v8/src/objects-debug.cc
+++ b/deps/v8/src/objects-debug.cc
@@ -366,9 +366,12 @@ void Map::SharedMapVerify() {
}
-void Map::VerifyOmittedPrototypeChecks() {
- if (!FLAG_omit_prototype_checks_for_leaf_maps) return;
- if (HasTransitionArray() || is_dictionary_map()) {
+void Map::VerifyOmittedMapChecks() {
+ if (!FLAG_omit_map_checks_for_leaf_maps) return;
+ if (!is_stable() ||
+ is_deprecated() ||
+ HasTransitionArray() ||
+ is_dictionary_map()) {
CHECK_EQ(0, dependent_code()->number_of_entries(
DependentCode::kPrototypeCheckGroup));
}
@@ -1162,10 +1165,6 @@ static bool CheckOneBackPointer(Map* current_map, Object* target) {
bool TransitionArray::IsConsistentWithBackPointers(Map* current_map) {
- if (HasElementsTransition() &&
- !CheckOneBackPointer(current_map, elements_transition())) {
- return false;
- }
for (int i = 0; i < number_of_transitions(); ++i) {
if (!CheckOneBackPointer(current_map, GetTarget(i))) return false;
}
diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h
index ef30496963..169475791d 100644
--- a/deps/v8/src/objects-inl.h
+++ b/deps/v8/src/objects-inl.h
@@ -3563,6 +3563,7 @@ bool Map::is_shared() {
void Map::set_dictionary_map(bool value) {
+ if (value) mark_unstable();
set_bit_field3(DictionaryMap::update(bit_field3(), value));
}
@@ -3616,6 +3617,17 @@ bool Map::is_deprecated() {
}
+void Map::set_migration_target(bool value) {
+ set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
+}
+
+
+bool Map::is_migration_target() {
+ if (!FLAG_track_fields) return false;
+ return IsMigrationTarget::decode(bit_field3());
+}
+
+
void Map::freeze() {
set_bit_field3(IsFrozen::update(bit_field3(), true));
}
@@ -3626,6 +3638,16 @@ bool Map::is_frozen() {
}
+void Map::mark_unstable() {
+ set_bit_field3(IsUnstable::update(bit_field3(), true));
+}
+
+
+bool Map::is_stable() {
+ return !IsUnstable::decode(bit_field3());
+}
+
+
bool Map::has_code_cache() {
return code_cache() != GetIsolate()->heap()->empty_fixed_array();
}
@@ -3657,21 +3679,17 @@ bool Map::CanBeDeprecated() {
void Map::NotifyLeafMapLayoutChange() {
- dependent_code()->DeoptimizeDependentCodeGroup(
- GetIsolate(),
- DependentCode::kPrototypeCheckGroup);
-}
-
-
-bool Map::CanOmitPrototypeChecks() {
- return !HasTransitionArray() && !is_dictionary_map() &&
- FLAG_omit_prototype_checks_for_leaf_maps;
+ if (is_stable()) {
+ mark_unstable();
+ dependent_code()->DeoptimizeDependentCodeGroup(
+ GetIsolate(),
+ DependentCode::kPrototypeCheckGroup);
+ }
}
bool Map::CanOmitMapChecks() {
- return !HasTransitionArray() && !is_dictionary_map() &&
- FLAG_omit_map_checks_for_leaf_maps;
+ return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
}
@@ -3804,7 +3822,6 @@ inline void Code::set_is_crankshafted(bool value) {
int Code::major_key() {
ASSERT(kind() == STUB ||
- kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == COMPARE_NIL_IC ||
@@ -3819,7 +3836,6 @@ int Code::major_key() {
void Code::set_major_key(int major) {
ASSERT(kind() == STUB ||
- kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC ||
kind() == COMPARE_NIL_IC ||
@@ -4009,21 +4025,6 @@ void Code::set_check_type(CheckType value) {
}
-byte Code::unary_op_type() {
- ASSERT(is_unary_op_stub());
- return UnaryOpTypeField::decode(
- READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
-}
-
-
-void Code::set_unary_op_type(byte value) {
- ASSERT(is_unary_op_stub());
- int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
- int updated = UnaryOpTypeField::update(previous, value);
- WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
-}
-
-
byte Code::to_boolean_state() {
return extended_extra_ic_state();
}
@@ -4208,7 +4209,20 @@ void Map::InitializeDescriptors(DescriptorArray* descriptors) {
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
-SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
+
+
+void Map::set_bit_field3(uint32_t bits) {
+ // Ensure the upper 2 bits have the same value by sign extending it. This is
+ // necessary to be able to use the 31st bit.
+ int value = bits << 1;
+ WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
+}
+
+
+uint32_t Map::bit_field3() {
+ Object* value = READ_FIELD(this, kBitField3Offset);
+ return Smi::cast(value)->value();
+}
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
@@ -4257,7 +4271,8 @@ bool Map::HasTransitionArray() {
Map* Map::elements_transition_map() {
- return transitions()->elements_transition();
+ int index = transitions()->Search(GetHeap()->elements_transition_symbol());
+ return transitions()->GetTarget(index);
}
@@ -4288,10 +4303,14 @@ Map* Map::GetTransition(int transition_index) {
MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
- MaybeObject* allow_elements = EnsureHasTransitionArray(this);
- if (allow_elements->IsFailure()) return allow_elements;
- transitions()->set_elements_transition(transitioned_map);
- return this;
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions = AddTransition(
+ GetHeap()->elements_transition_symbol(),
+ transitioned_map,
+ FULL_TRANSITION);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+ set_transitions(transitions);
+ return transitions;
}
@@ -4487,6 +4506,7 @@ ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
kEvalFrominstructionsOffsetOffset)
ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
+BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
Script::CompilationType Script::compilation_type() {
return BooleanBit::get(flags(), kCompilationTypeBit) ?
diff --git a/deps/v8/src/objects-printer.cc b/deps/v8/src/objects-printer.cc
index 6b2a3f0d4f..87b2811e41 100644
--- a/deps/v8/src/objects-printer.cc
+++ b/deps/v8/src/objects-printer.cc
@@ -180,6 +180,12 @@ void HeapObject::HeapObjectPrint(FILE* out) {
case JS_FUNCTION_PROXY_TYPE:
JSFunctionProxy::cast(this)->JSFunctionProxyPrint(out);
break;
+ case JS_SET_TYPE:
+ JSSet::cast(this)->JSSetPrint(out);
+ break;
+ case JS_MAP_TYPE:
+ JSMap::cast(this)->JSMapPrint(out);
+ break;
case JS_WEAK_MAP_TYPE:
JSWeakMap::cast(this)->JSWeakMapPrint(out);
break;
@@ -488,7 +494,7 @@ void JSObject::JSObjectPrint(FILE* out) {
void JSModule::JSModulePrint(FILE* out) {
HeapObject::PrintHeader(out, "JSModule");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - context = ");
context()->Print(out);
PrintF(out, " - scope_info = ");
@@ -561,6 +567,8 @@ static const char* TypeToString(InstanceType type) {
case CODE_TYPE: return "CODE";
case JS_ARRAY_TYPE: return "JS_ARRAY";
case JS_PROXY_TYPE: return "JS_PROXY";
+ case JS_SET_TYPE: return "JS_SET";
+ case JS_MAP_TYPE: return "JS_MAP";
case JS_WEAK_MAP_TYPE: return "JS_WEAK_MAP";
case JS_WEAK_SET_TYPE: return "JS_WEAK_SET";
case JS_REGEXP_TYPE: return "JS_REGEXP";
@@ -777,7 +785,7 @@ static const char* const weekdays[] = {
void JSDate::JSDatePrint(FILE* out) {
HeapObject::PrintHeader(out, "JSDate");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - value = ");
value()->Print(out);
if (!year()->IsSmi()) {
@@ -797,7 +805,7 @@ void JSDate::JSDatePrint(FILE* out) {
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - handler = ");
handler()->Print(out);
PrintF(out, " - hash = ");
@@ -808,7 +816,7 @@ void JSProxy::JSProxyPrint(FILE* out) {
void JSFunctionProxy::JSFunctionProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSFunctionProxy");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - handler = ");
handler()->Print(out);
PrintF(out, " - call_trap = ");
@@ -819,9 +827,27 @@ void JSFunctionProxy::JSFunctionProxyPrint(FILE* out) {
}
+void JSSet::JSSetPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSSet");
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - table = ");
+ table()->ShortPrint(out);
+ PrintF(out, "\n");
+}
+
+
+void JSMap::JSMapPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSMap");
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - table = ");
+ table()->ShortPrint(out);
+ PrintF(out, "\n");
+}
+
+
void JSWeakMap::JSWeakMapPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSWeakMap");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - table = ");
table()->ShortPrint(out);
PrintF(out, "\n");
@@ -830,7 +856,7 @@ void JSWeakMap::JSWeakMapPrint(FILE* out) {
void JSWeakSet::JSWeakSetPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSWeakSet");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - table = ");
table()->ShortPrint(out);
PrintF(out, "\n");
@@ -839,8 +865,8 @@ void JSWeakSet::JSWeakSetPrint(FILE* out) {
void JSArrayBuffer::JSArrayBufferPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSArrayBuffer");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
- PrintF(out, " - backing_store = -0x%p\n", backing_store());
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - backing_store = %p\n", backing_store());
PrintF(out, " - byte_length = ");
byte_length()->ShortPrint(out);
PrintF(out, "\n");
@@ -878,7 +904,7 @@ void JSDataView::JSDataViewPrint(FILE* out) {
void JSFunction::JSFunctionPrint(FILE* out) {
HeapObject::PrintHeader(out, "Function");
- PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - initial_map = ");
if (has_initial_map()) {
initial_map()->ShortPrint(out);
diff --git a/deps/v8/src/objects-visiting.h b/deps/v8/src/objects-visiting.h
index 32e457b869..21757377a4 100644
--- a/deps/v8/src/objects-visiting.h
+++ b/deps/v8/src/objects-visiting.h
@@ -141,7 +141,7 @@ class StaticVisitorBase : public AllStatic {
(base == kVisitJSObject));
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
- ASSERT(object_size < Page::kMaxNonCodeHeapObjectSize);
+ ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
const VisitorId specialization = static_cast<VisitorId>(
base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 7839faaddf..734bf40814 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -2486,8 +2486,7 @@ void Map::DeprecateTransitionTree() {
deprecate();
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(), DependentCode::kTransitionGroup);
- dependent_code()->DeoptimizeDependentCodeGroup(
- GetIsolate(), DependentCode::kPrototypeCheckGroup);
+ NotifyLeafMapLayoutChange();
}
@@ -2720,6 +2719,7 @@ MaybeObject* Map::GeneralizeRepresentation(int modify_index,
Handle<Map>(new_map);
return maybe_map;
}
+ new_map->set_migration_target(true);
}
new_map->set_owns_descriptors(true);
@@ -3949,7 +3949,7 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup,
Handle<Object> hresult;
if (!result->ToHandle(&hresult, isolate)) return result;
- if (FLAG_harmony_observation && map()->is_observed()) {
+ if (FLAG_harmony_observation && self->map()->is_observed()) {
if (lookup->IsTransition()) {
EnqueueChangeRecord(self, "new", name, old_value);
} else {
@@ -6495,6 +6495,7 @@ MaybeObject* Map::RawCopy(int instance_size) {
new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCache);
new_bit_field3 = Deprecated::update(new_bit_field3, false);
+ new_bit_field3 = IsUnstable::update(new_bit_field3, false);
result->set_bit_field3(new_bit_field3);
return result;
}
@@ -6517,6 +6518,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
result->set_dictionary_map(true);
+ result->set_migration_target(false);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && result->is_shared()) {
@@ -7023,12 +7025,6 @@ class IntrusiveMapTransitionIterator {
return transition_array_->GetTarget(index);
}
- if (index == number_of_transitions &&
- transition_array_->HasElementsTransition()) {
- Map* elements_transition = transition_array_->elements_transition();
- *TransitionArrayHeader() = Smi::FromInt(index + 1);
- return elements_transition;
- }
*TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map();
return NULL;
}
@@ -9145,18 +9141,10 @@ void Map::ClearNonLiveTransitions(Heap* heap) {
}
}
- if (t->HasElementsTransition() &&
- ClearBackPointer(heap, t->elements_transition())) {
- if (t->elements_transition()->instance_descriptors() == descriptors) {
- descriptors_owner_died = true;
- }
- t->ClearElementsTransition();
- } else {
- // If there are no transitions to be cleared, return.
- // TODO(verwaest) Should be an assert, otherwise back pointers are not
- // properly cleared.
- if (transition_index == t->number_of_transitions()) return;
- }
+ // If there are no transitions to be cleared, return.
+ // TODO(verwaest) Should be an assert, otherwise back pointers are not
+ // properly cleared.
+ if (transition_index == t->number_of_transitions()) return;
int number_of_own_descriptors = NumberOfOwnDescriptors();
@@ -9818,7 +9806,7 @@ void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
}
-void SharedFunctionInfo::DisableOptimization(const char* reason) {
+void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
// Disable optimization for the shared function info and mark the
// code as non-optimizable. The marker on the shared function info
// is there because we flush non-optimized code thereby loosing the
@@ -9836,7 +9824,7 @@ void SharedFunctionInfo::DisableOptimization(const char* reason) {
if (FLAG_trace_opt) {
PrintF("[disabled optimization for ");
ShortPrint();
- PrintF(", reason: %s]\n", reason);
+ PrintF(", reason: %s]\n", GetBailoutReason(reason));
}
}
@@ -10807,18 +10795,17 @@ void Code::Disassemble(const char* name, FILE* out) {
// If there is no back edge table, the "table start" will be at or after
// (due to alignment) the end of the instruction stream.
if (static_cast<int>(offset) < instruction_size()) {
- Address back_edge_cursor = instruction_start() + offset;
- uint32_t table_length = Memory::uint32_at(back_edge_cursor);
- PrintF(out, "Back edges (size = %u)\n", table_length);
+ FullCodeGenerator::BackEdgeTableIterator back_edges(this);
+
+ PrintF(out, "Back edges (size = %u)\n", back_edges.table_length());
PrintF(out, "ast_id pc_offset loop_depth\n");
- for (uint32_t i = 0; i < table_length; ++i) {
- uint32_t ast_id = Memory::uint32_at(back_edge_cursor);
- uint32_t pc_offset = Memory::uint32_at(back_edge_cursor + kIntSize);
- uint32_t loop_depth = Memory::uint32_at(back_edge_cursor +
- 2 * kIntSize);
- PrintF(out, "%6u %9u %10u\n", ast_id, pc_offset, loop_depth);
- back_edge_cursor += FullCodeGenerator::kBackEdgeEntrySize;
+
+ for ( ; !back_edges.Done(); back_edges.Next()) {
+ PrintF(out, "%6d %9u %10u\n", back_edges.ast_id().ToInt(),
+ back_edges.pc_offset(),
+ back_edges.loop_depth());
}
+
PrintF(out, "\n");
}
#ifdef OBJECT_PRINT
@@ -15978,4 +15965,15 @@ void PropertyCell::AddDependentCode(Handle<Code> code) {
}
+const char* GetBailoutReason(BailoutReason reason) {
+ ASSERT(reason < kLastErrorMessage);
+#define ERROR_MESSAGES_TEXTS(C, T) T,
+ static const char* error_messages_[] = {
+ ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
+ };
+#undef ERROR_MESSAGES_TEXTS
+ return error_messages_[reason];
+}
+
+
} } // namespace v8::internal
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index 388187445f..b2dc1816f8 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -1046,7 +1046,287 @@ class MaybeObject BASE_EMBEDDED {
V(AccessCheckNeeded) \
V(Cell) \
V(PropertyCell) \
- V(ObjectHashTable) \
+ V(ObjectHashTable)
+
+
+#define ERROR_MESSAGES_LIST(V) \
+ V(kNoReason, "no reason") \
+ \
+ V(k32BitValueInRegisterIsNotZeroExtended, \
+ "32 bit value in register is not zero-extended") \
+ V(kAlignmentMarkerExpected, "alignment marker expected") \
+ V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
+ V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
+ V(kArgumentsObjectValueInATestContext, \
+ "arguments object value in a test context") \
+ V(kArrayBoilerplateCreationFailed, "array boilerplate creation failed") \
+ V(kArrayIndexConstantValueTooBig, "array index constant value too big") \
+ V(kAssignmentToArguments, "assignment to arguments") \
+ V(kAssignmentToLetVariableBeforeInitialization, \
+ "assignment to let variable before initialization") \
+ V(kAssignmentToLOOKUPVariable, "assignment to LOOKUP variable") \
+ V(kAssignmentToParameterFunctionUsesArgumentsObject, \
+ "assignment to parameter, function uses arguments object") \
+ V(kAssignmentToParameterInArgumentsObject, \
+ "assignment to parameter in arguments object") \
+ V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache") \
+ V(kBadValueContextForArgumentsObjectValue, \
+ "bad value context for arguments object value") \
+ V(kBadValueContextForArgumentsValue, \
+ "bad value context for arguments value") \
+ V(kBailedOutDueToDependentMap, "bailed out due to dependent map") \
+ V(kBailoutWasNotPrepared, "bailout was not prepared") \
+ V(kBinaryStubGenerateFloatingPointCode, \
+ "BinaryStub_GenerateFloatingPointCode") \
+ V(kBothRegistersWereSmisInSelectNonSmi, \
+ "Both registers were smis in SelectNonSmi") \
+ V(kCallToAJavaScriptRuntimeFunction, \
+ "call to a JavaScript runtime function") \
+ V(kCannotTranslatePositionInChangedArea, \
+ "Cannot translate position in changed area") \
+ V(kCodeGenerationFailed, "code generation failed") \
+ V(kCodeObjectNotProperlyPatched, "code object not properly patched") \
+ V(kCompoundAssignmentToLookupSlot, "compound assignment to lookup slot") \
+ V(kContextAllocatedArguments, "context-allocated arguments") \
+ V(kDebuggerIsActive, "debugger is active") \
+ V(kDebuggerStatement, "DebuggerStatement") \
+ V(kDeclarationInCatchContext, "Declaration in catch context") \
+ V(kDeclarationInWithContext, "Declaration in with context") \
+ V(kDefaultNaNModeNotSet, "Default NaN mode not set") \
+ V(kDeleteWithGlobalVariable, "delete with global variable") \
+ V(kDeleteWithNonGlobalVariable, "delete with non-global variable") \
+ V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \
+ V(kDontDeleteCellsCannotContainTheHole, \
+ "DontDelete cells can't contain the hole") \
+ V(kDoPushArgumentNotImplementedForDoubleType, \
+ "DoPushArgument not implemented for double type") \
+ V(kEmitLoadRegisterUnsupportedDoubleImmediate, \
+ "EmitLoadRegister: Unsupported double immediate") \
+ V(kEval, "eval") \
+ V(kExpected0AsASmiSentinel, "Expected 0 as a Smi sentinel") \
+ V(kExpectedAlignmentMarker, "expected alignment marker") \
+ V(kExpectedPropertyCellInRegisterA2, \
+ "Expected property cell in register a2") \
+ V(kExpectedPropertyCellInRegisterEbx, \
+ "Expected property cell in register ebx") \
+ V(kExpectedPropertyCellInRegisterRbx, \
+ "Expected property cell in register rbx") \
+ V(kExpectingAlignmentForCopyBytes, \
+ "Expecting alignment for CopyBytes") \
+ V(kExternalStringExpectedButNotFound, \
+ "external string expected, but not found") \
+ V(kFailedBailedOutLastTime, "failed/bailed out last time") \
+ V(kForInStatementIsNotFastCase, "ForInStatement is not fast case") \
+ V(kForInStatementOptimizationIsDisabled, \
+ "ForInStatement optimization is disabled") \
+ V(kForInStatementWithNonLocalEachVariable, \
+ "ForInStatement with non-local each variable") \
+ V(kForOfStatement, "ForOfStatement") \
+ V(kFrameIsExpectedToBeAligned, "frame is expected to be aligned") \
+ V(kFunctionCallsEval, "function calls eval") \
+ V(kFunctionIsAGenerator, "function is a generator") \
+ V(kFunctionWithIllegalRedeclaration, "function with illegal redeclaration") \
+ V(kGeneratedCodeIsTooLarge, "Generated code is too large") \
+ V(kGeneratorFailedToResume, "Generator failed to resume") \
+ V(kGenerator, "generator") \
+ V(kGlobalFunctionsMustHaveInitialMap, \
+ "Global functions must have initial map") \
+ V(kHeapNumberMapRegisterClobbered, "HeapNumberMap register clobbered") \
+ V(kImproperObjectOnPrototypeChainForStore, \
+ "improper object on prototype chain for store") \
+ V(kIndexIsNegative, "Index is negative") \
+ V(kIndexIsTooLarge, "Index is too large") \
+ V(kInlinedRuntimeFunctionClassOf, "inlined runtime function: ClassOf") \
+ V(kInlinedRuntimeFunctionFastAsciiArrayJoin, \
+ "inlined runtime function: FastAsciiArrayJoin") \
+ V(kInlinedRuntimeFunctionGeneratorNext, \
+ "inlined runtime function: GeneratorNext") \
+ V(kInlinedRuntimeFunctionGeneratorThrow, \
+ "inlined runtime function: GeneratorThrow") \
+ V(kInlinedRuntimeFunctionGetFromCache, \
+ "inlined runtime function: GetFromCache") \
+ V(kInlinedRuntimeFunctionIsNonNegativeSmi, \
+ "inlined runtime function: IsNonNegativeSmi") \
+ V(kInlinedRuntimeFunctionIsRegExpEquivalent, \
+ "inlined runtime function: IsRegExpEquivalent") \
+ V(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf, \
+ "inlined runtime function: IsStringWrapperSafeForDefaultValueOf") \
+ V(kInliningBailedOut, "inlining bailed out") \
+ V(kInputGPRIsExpectedToHaveUpper32Cleared, \
+ "input GPR is expected to have upper32 cleared") \
+ V(kInstanceofStubUnexpectedCallSiteCacheCheck, \
+ "InstanceofStub unexpected call site cache (check)") \
+ V(kInstanceofStubUnexpectedCallSiteCacheCmp1, \
+ "InstanceofStub unexpected call site cache (cmp 1)") \
+ V(kInstanceofStubUnexpectedCallSiteCacheCmp2, \
+ "InstanceofStub unexpected call site cache (cmp 2)") \
+ V(kInstanceofStubUnexpectedCallSiteCacheMov, \
+ "InstanceofStub unexpected call site cache (mov)") \
+ V(kInteger32ToSmiFieldWritingToNonSmiLocation, \
+ "Integer32ToSmiField writing to non-smi location") \
+ V(kInvalidCaptureReferenced, "Invalid capture referenced") \
+ V(kInvalidElementsKindForInternalArrayOrInternalPackedArray, \
+ "Invalid ElementsKind for InternalArray or InternalPackedArray") \
+ V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
+ V(kInvalidLeftHandSideInAssignment, "invalid left-hand side in assignment") \
+ V(kInvalidLhsInCompoundAssignment, "invalid lhs in compound assignment") \
+ V(kInvalidLhsInCountOperation, "invalid lhs in count operation") \
+ V(kInvalidMinLength, "Invalid min_length") \
+ V(kJSGlobalObjectNativeContextShouldBeANativeContext, \
+ "JSGlobalObject::native_context should be a native context") \
+ V(kJSGlobalProxyContextShouldNotBeNull, \
+ "JSGlobalProxy::context() should not be null") \
+ V(kJSObjectWithFastElementsMapHasSlowElements, \
+ "JSObject with fast elements map has slow elements") \
+ V(kLetBindingReInitialization, "Let binding re-initialization") \
+ V(kLiveBytesCountOverflowChunkSize, "Live Bytes Count overflow chunk size") \
+ V(kLiveEditFrameDroppingIsNotSupportedOnArm, \
+ "LiveEdit frame dropping is not supported on arm") \
+ V(kLiveEditFrameDroppingIsNotSupportedOnMips, \
+ "LiveEdit frame dropping is not supported on mips") \
+ V(kLiveEdit, "LiveEdit") \
+ V(kLookupVariableInCountOperation, \
+ "lookup variable in count operation") \
+ V(kMapIsNoLongerInEax, "Map is no longer in eax") \
+ V(kNoCasesLeft, "no cases left") \
+ V(kNoEmptyArraysHereInEmitFastAsciiArrayJoin, \
+ "No empty arrays here in EmitFastAsciiArrayJoin") \
+ V(kNonInitializerAssignmentToConst, \
+ "non-initializer assignment to const") \
+ V(kNonSmiIndex, "Non-smi index") \
+ V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal") \
+ V(kNonSmiValue, "Non-smi value") \
+ V(kNotEnoughVirtualRegistersForValues, \
+ "not enough virtual registers for values") \
+ V(kNotEnoughVirtualRegistersRegalloc, \
+ "not enough virtual registers (regalloc)") \
+ V(kObjectFoundInSmiOnlyArray, "object found in smi-only array") \
+ V(kObjectLiteralWithComplexProperty, \
+ "Object literal with complex property") \
+ V(kOddballInStringTableIsNotUndefinedOrTheHole, \
+ "oddball in string table is not undefined or the hole") \
+ V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \
+ V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \
+ V(kOperandIsASmi, "Operand is a smi") \
+ V(kOperandIsNotAName, "Operand is not a name") \
+ V(kOperandIsNotANumber, "Operand is not a number") \
+ V(kOperandIsNotASmi, "Operand is not a smi") \
+ V(kOperandIsNotAString, "Operand is not a string") \
+ V(kOperandIsNotSmi, "Operand is not smi") \
+ V(kOperandNotANumber, "Operand not a number") \
+ V(kOptimizedTooManyTimes, "optimized too many times") \
+ V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister, \
+ "Out of virtual registers while trying to allocate temp register") \
+ V(kParseScopeError, "parse/scope error") \
+ V(kPossibleDirectCallToEval, "possible direct call to eval") \
+ V(kPropertyAllocationCountFailed, "Property allocation count failed") \
+ V(kReceivedInvalidReturnAddress, "Received invalid return address") \
+ V(kReferenceToAVariableWhichRequiresDynamicLookup, \
+ "reference to a variable which requires dynamic lookup") \
+ V(kReferenceToGlobalLexicalVariable, \
+ "reference to global lexical variable") \
+ V(kReferenceToUninitializedVariable, "reference to uninitialized variable") \
+ V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
+ V(kRegisterWasClobbered, "register was clobbered") \
+ V(kScopedBlock, "ScopedBlock") \
+ V(kSharedFunctionInfoLiteral, "SharedFunctionInfoLiteral") \
+ V(kSmiAdditionOverflow, "Smi addition overflow") \
+ V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
+ V(kStackFrameTypesMustMatch, "stack frame types must match") \
+ V(kSwitchStatementMixedOrNonLiteralSwitchLabels, \
+ "SwitchStatement: mixed or non-literal switch labels") \
+ V(kSwitchStatementTooManyClauses, "SwitchStatement: too many clauses") \
+ V(kTheInstructionShouldBeALui, "The instruction should be a lui") \
+ V(kTheInstructionShouldBeAnOri, "The instruction should be an ori") \
+ V(kTheInstructionToPatchShouldBeALoadFromPc, \
+ "The instruction to patch should be a load from pc") \
+ V(kTheInstructionToPatchShouldBeALui, \
+ "The instruction to patch should be a lui") \
+ V(kTheInstructionToPatchShouldBeAnOri, \
+ "The instruction to patch should be an ori") \
+ V(kTooManyParametersLocals, "too many parameters/locals") \
+ V(kTooManyParameters, "too many parameters") \
+ V(kTooManySpillSlotsNeededForOSR, "Too many spill slots needed for OSR") \
+ V(kToOperandIsDoubleRegisterUnimplemented, \
+ "ToOperand IsDoubleRegister unimplemented") \
+ V(kToOperandUnsupportedDoubleImmediate, \
+ "ToOperand Unsupported double immediate") \
+ V(kTryCatchStatement, "TryCatchStatement") \
+ V(kTryFinallyStatement, "TryFinallyStatement") \
+ V(kUnableToEncodeValueAsSmi, "Unable to encode value as smi") \
+ V(kUnalignedAllocationInNewSpace, "Unaligned allocation in new space") \
+ V(kUndefinedValueNotLoaded, "Undefined value not loaded") \
+ V(kUndoAllocationOfNonAllocatedMemory, \
+ "Undo allocation of non allocated memory") \
+ V(kUnexpectedAllocationTop, "Unexpected allocation top") \
+ V(kUnexpectedElementsKindInArrayConstructor, \
+ "Unexpected ElementsKind in array constructor") \
+ V(kUnexpectedFallthroughFromCharCodeAtSlowCase, \
+ "Unexpected fallthrough from CharCodeAt slow case") \
+ V(kUnexpectedFallthroughFromCharFromCodeSlowCase, \
+ "Unexpected fallthrough from CharFromCode slow case") \
+ V(kUnexpectedFallThroughFromStringComparison, \
+ "Unexpected fall-through from string comparison") \
+ V(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode, \
+ "Unexpected fall-through in BinaryStub_GenerateFloatingPointCode") \
+ V(kUnexpectedFallthroughToCharCodeAtSlowCase, \
+ "Unexpected fallthrough to CharCodeAt slow case") \
+ V(kUnexpectedFallthroughToCharFromCodeSlowCase, \
+ "Unexpected fallthrough to CharFromCode slow case") \
+ V(kUnexpectedFPUStackDepthAfterInstruction, \
+ "Unexpected FPU stack depth after instruction") \
+ V(kUnexpectedInitialMapForArrayFunction1, \
+ "Unexpected initial map for Array function (1)") \
+ V(kUnexpectedInitialMapForArrayFunction2, \
+ "Unexpected initial map for Array function (2)") \
+ V(kUnexpectedInitialMapForArrayFunction, \
+ "Unexpected initial map for Array function") \
+ V(kUnexpectedInitialMapForInternalArrayFunction, \
+ "Unexpected initial map for InternalArray function") \
+ V(kUnexpectedLevelAfterReturnFromApiCall, \
+ "Unexpected level after return from api call") \
+ V(kUnexpectedNumberOfPreAllocatedPropertyFields, \
+ "Unexpected number of pre-allocated property fields") \
+ V(kUnexpectedStringFunction, "Unexpected String function") \
+ V(kUnexpectedStringType, "Unexpected string type") \
+ V(kUnexpectedStringWrapperInstanceSize, \
+ "Unexpected string wrapper instance size") \
+ V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
+ "Unexpected type for RegExp data, FixedArray expected") \
+ V(kUnexpectedUnusedPropertiesOfStringWrapper, \
+ "Unexpected unused properties of string wrapper") \
+ V(kUninitializedKSmiConstantRegister, "Uninitialized kSmiConstantRegister") \
+ V(kUnknown, "unknown") \
+ V(kUnsupportedConstCompoundAssignment, \
+ "unsupported const compound assignment") \
+ V(kUnsupportedCountOperationWithConst, \
+ "unsupported count operation with const") \
+ V(kUnsupportedDoubleImmediate, "unsupported double immediate") \
+ V(kUnsupportedLetCompoundAssignment, "unsupported let compound assignment") \
+ V(kUnsupportedLookupSlotInDeclaration, \
+ "unsupported lookup slot in declaration") \
+ V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
+ V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
+ V(kUnsupportedPhiUseOfConstVariable, \
+ "Unsupported phi use of const variable") \
+ V(kUnsupportedTaggedImmediate, "unsupported tagged immediate") \
+ V(kVariableResolvedToWithContext, "Variable resolved to with context") \
+ V(kWeShouldNotHaveAnEmptyLexicalContext, \
+ "we should not have an empty lexical context") \
+ V(kWithStatement, "WithStatement") \
+ V(kWrongAddressOrValuePassedToRecordWrite, \
+ "Wrong address or value passed to RecordWrite")
+
+
+#define ERROR_MESSAGES_CONSTANTS(C, T) C,
+enum BailoutReason {
+ ERROR_MESSAGES_LIST(ERROR_MESSAGES_CONSTANTS)
+ kLastErrorMessage
+};
+#undef ERROR_MESSAGES_CONSTANTS
+
+
+const char* GetBailoutReason(BailoutReason reason);
// Object is the abstract superclass for all classes in the
@@ -4501,7 +4781,6 @@ class Code: public HeapObject {
V(KEYED_CALL_IC) \
V(STORE_IC) \
V(KEYED_STORE_IC) \
- V(UNARY_OP_IC) \
V(BINARY_OP_IC) \
V(COMPARE_IC) \
V(COMPARE_NIL_IC) \
@@ -4620,8 +4899,7 @@ class Code: public HeapObject {
// TODO(danno): This is a bit of a hack right now since there are still
// clients of this API that pass "extra" values in for argc. These clients
// should be retrofitted to used ExtendedExtraICState.
- return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC ||
- kind == UNARY_OP_IC;
+ return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC;
}
inline StubType type(); // Only valid for monomorphic IC stubs.
@@ -4636,7 +4914,6 @@ class Code: public HeapObject {
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; }
- inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; }
inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
inline bool is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
@@ -4710,10 +4987,6 @@ class Code: public HeapObject {
inline CheckType check_type();
inline void set_check_type(CheckType value);
- // [type-recording unary op type]: For kind UNARY_OP_IC.
- inline byte unary_op_type();
- inline void set_unary_op_type(byte value);
-
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
inline byte to_boolean_state();
@@ -4952,9 +5225,6 @@ class Code: public HeapObject {
// KindSpecificFlags1 layout (STUB and OPTIMIZED_FUNCTION)
static const int kStackSlotsFirstBit = 0;
static const int kStackSlotsBitCount = 24;
- static const int kUnaryOpTypeFirstBit =
- kStackSlotsFirstBit + kStackSlotsBitCount;
- static const int kUnaryOpTypeBitCount = 3;
static const int kHasFunctionCacheFirstBit =
kStackSlotsFirstBit + kStackSlotsBitCount;
static const int kHasFunctionCacheBitCount = 1;
@@ -4963,15 +5233,12 @@ class Code: public HeapObject {
static const int kMarkedForDeoptimizationBitCount = 1;
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
- STATIC_ASSERT(kUnaryOpTypeFirstBit + kUnaryOpTypeBitCount <= 32);
STATIC_ASSERT(kHasFunctionCacheFirstBit + kHasFunctionCacheBitCount <= 32);
STATIC_ASSERT(kMarkedForDeoptimizationFirstBit +
kMarkedForDeoptimizationBitCount <= 32);
class StackSlotsField: public BitField<int,
kStackSlotsFirstBit, kStackSlotsBitCount> {}; // NOLINT
- class UnaryOpTypeField: public BitField<int,
- kUnaryOpTypeFirstBit, kUnaryOpTypeBitCount> {}; // NOLINT
class HasFunctionCacheField: public BitField<bool,
kHasFunctionCacheFirstBit, kHasFunctionCacheBitCount> {}; // NOLINT
class MarkedForDeoptimizationField: public BitField<bool,
@@ -5176,8 +5443,8 @@ class Map: public HeapObject {
inline void set_bit_field2(byte value);
// Bit field 3.
- inline int bit_field3();
- inline void set_bit_field3(int value);
+ inline uint32_t bit_field3();
+ inline void set_bit_field3(uint32_t bits);
class EnumLengthBits: public BitField<int, 0, 11> {};
class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {};
@@ -5188,6 +5455,8 @@ class Map: public HeapObject {
class IsObserved: public BitField<bool, 26, 1> {};
class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {};
+ class IsUnstable: public BitField<bool, 29, 1> {};
+ class IsMigrationTarget: public BitField<bool, 30, 1> {};
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
@@ -5492,6 +5761,10 @@ class Map: public HeapObject {
inline void set_is_observed(bool is_observed);
inline void freeze();
inline bool is_frozen();
+ inline void mark_unstable();
+ inline bool is_stable();
+ inline void set_migration_target(bool value);
+ inline bool is_migration_target();
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
@@ -5638,7 +5911,6 @@ class Map: public HeapObject {
// the descriptor array.
inline void NotifyLeafMapLayoutChange();
- inline bool CanOmitPrototypeChecks();
inline bool CanOmitMapChecks();
void AddDependentCompilationInfo(DependentCode::DependencyGroup group,
@@ -5655,7 +5927,7 @@ class Map: public HeapObject {
#ifdef VERIFY_HEAP
void SharedMapVerify();
- void VerifyOmittedPrototypeChecks();
+ void VerifyOmittedMapChecks();
#endif
inline int visitor_id();
@@ -5873,6 +6145,12 @@ class Script: public Struct {
inline CompilationState compilation_state();
inline void set_compilation_state(CompilationState state);
+ // [is_shared_cross_origin]: An opaque boolean set by the embedder via
+ // ScriptOrigin, and used by the embedder to make decisions about the
+ // script's level of privilege. V8 just passes this through. Encoded in
+ // the 'flags' field.
+ DECL_BOOLEAN_ACCESSORS(is_shared_cross_origin)
+
static inline Script* cast(Object* obj);
// If script source is an external string, check that the underlying
@@ -5904,6 +6182,7 @@ class Script: public Struct {
// Bit positions in the flags field.
static const int kCompilationTypeBit = 0;
static const int kCompilationStateBit = 1;
+ static const int kIsSharedCrossOriginBit = 2;
DISALLOW_IMPLICIT_CONSTRUCTORS(Script);
};
@@ -6295,7 +6574,7 @@ class SharedFunctionInfo: public HeapObject {
// Disable (further) attempted optimization of all functions sharing this
// shared function info.
- void DisableOptimization(const char* reason);
+ void DisableOptimization(BailoutReason reason);
// Lookup the bailout ID and ASSERT that it exists in the non-optimized
// code, returns whether it asserted (i.e., always true if assertions are
@@ -9777,6 +10056,7 @@ class BreakPointInfo: public Struct {
V(kHandleScope, "handlescope", "(Handle scope)") \
V(kBuiltins, "builtins", "(Builtins)") \
V(kGlobalHandles, "globalhandles", "(Global handles)") \
+ V(kEternalHandles, "eternalhandles", "(Eternal handles)") \
V(kThreadManager, "threadmanager", "(Thread manager)") \
V(kExtensions, "Extensions", "(Extensions)")
diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc
index df568ef1bb..4947790395 100644
--- a/deps/v8/src/parser.cc
+++ b/deps/v8/src/parser.cc
@@ -3197,6 +3197,20 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
factory()->NewNumberLiteral(1),
position);
}
+ // The same idea for '-foo' => 'foo*(-1)'.
+ if (op == Token::SUB) {
+ return factory()->NewBinaryOperation(Token::MUL,
+ expression,
+ factory()->NewNumberLiteral(-1),
+ position);
+ }
+ // ...and one more time for '~foo' => 'foo^(~0)'.
+ if (op == Token::BIT_NOT) {
+ return factory()->NewBinaryOperation(Token::BIT_XOR,
+ expression,
+ factory()->NewNumberLiteral(~0),
+ position);
+ }
return factory()->NewUnaryOperation(op, expression, position);
diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc
index 5c252bbf88..885683398e 100644
--- a/deps/v8/src/platform-linux.cc
+++ b/deps/v8/src/platform-linux.cc
@@ -239,7 +239,8 @@ bool OS::ArmUsingHardFloat() {
#else
#if defined(__ARM_PCS_VFP)
return true;
-#elif defined(__ARM_PCS) || defined(__SOFTFP) || !defined(__VFP_FP__)
+#elif defined(__ARM_PCS) || defined(__SOFTFP__) || defined(__SOFTFP) || \
+ !defined(__VFP_FP__)
return false;
#else
#error "Your version of GCC does not report the FP ABI compiled for." \
diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc
index 4e2e38988a..e772a54647 100644
--- a/deps/v8/src/profile-generator.cc
+++ b/deps/v8/src/profile-generator.cc
@@ -376,8 +376,8 @@ CpuProfile::CpuProfile(const char* title, unsigned uid, bool record_samples)
: title_(title),
uid_(uid),
record_samples_(record_samples),
- start_time_ms_(OS::TimeCurrentMillis()),
- end_time_ms_(0) {
+ start_time_us_(OS::Ticks()),
+ end_time_us_(0) {
}
@@ -388,13 +388,13 @@ void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
void CpuProfile::CalculateTotalTicksAndSamplingRate() {
- end_time_ms_ = OS::TimeCurrentMillis();
+ end_time_us_ = OS::Ticks();
top_down_.CalculateTotalTicks();
- double duration = end_time_ms_ - start_time_ms_;
- if (duration < 1) duration = 1;
+ double duration_ms = (end_time_us_ - start_time_us_) / 1000.;
+ if (duration_ms < 1) duration_ms = 1;
unsigned ticks = top_down_.root()->total_ticks();
- double rate = ticks / duration;
+ double rate = ticks / duration_ms;
top_down_.SetTickRatePerMs(rate);
}
diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h
index 7861ccd817..0cc397ed9b 100644
--- a/deps/v8/src/profile-generator.h
+++ b/deps/v8/src/profile-generator.h
@@ -209,12 +209,15 @@ class CpuProfile {
void AddPath(const Vector<CodeEntry*>& path);
void CalculateTotalTicksAndSamplingRate();
- INLINE(const char* title() const) { return title_; }
- INLINE(unsigned uid() const) { return uid_; }
- INLINE(const ProfileTree* top_down() const) { return &top_down_; }
+ const char* title() const { return title_; }
+ unsigned uid() const { return uid_; }
+ const ProfileTree* top_down() const { return &top_down_; }
- INLINE(int samples_count() const) { return samples_.length(); }
- INLINE(ProfileNode* sample(int index) const) { return samples_.at(index); }
+ int samples_count() const { return samples_.length(); }
+ ProfileNode* sample(int index) const { return samples_.at(index); }
+
+ int64_t start_time_us() const { return start_time_us_; }
+ int64_t end_time_us() const { return end_time_us_; }
void UpdateTicksScale();
@@ -225,8 +228,8 @@ class CpuProfile {
const char* title_;
unsigned uid_;
bool record_samples_;
- double start_time_ms_;
- double end_time_ms_;
+ int64_t start_time_us_;
+ int64_t end_time_us_;
List<ProfileNode*> samples_;
ProfileTree top_down_;
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index ed3527fa92..0916b93989 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -66,6 +66,23 @@
#include "v8threads.h"
#include "vm-state-inl.h"
+#ifdef V8_I18N_SUPPORT
+#include "i18n.h"
+#include "unicode/brkiter.h"
+#include "unicode/calendar.h"
+#include "unicode/coll.h"
+#include "unicode/datefmt.h"
+#include "unicode/dtfmtsym.h"
+#include "unicode/dtptngen.h"
+#include "unicode/locid.h"
+#include "unicode/numfmt.h"
+#include "unicode/numsys.h"
+#include "unicode/smpdtfmt.h"
+#include "unicode/timezone.h"
+#include "unicode/uloc.h"
+#include "unicode/uversion.h"
+#endif
+
#ifndef _STLP_VENDOR_CSTD
// STLPort doesn't import fpclassify and isless into the std namespace.
using std::fpclassify;
@@ -680,7 +697,9 @@ void Runtime::FreeArrayBuffer(Isolate* isolate,
isolate->heap()->AdjustAmountOfExternalAllocatedMemory(
-static_cast<intptr_t>(allocated_length));
CHECK(V8::ArrayBufferAllocator() != NULL);
- V8::ArrayBufferAllocator()->Free(phantom_array_buffer->backing_store());
+ V8::ArrayBufferAllocator()->Free(
+ phantom_array_buffer->backing_store(),
+ allocated_length);
}
@@ -712,13 +731,18 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
bool Runtime::SetupArrayBufferAllocatingData(
Isolate* isolate,
Handle<JSArrayBuffer> array_buffer,
- size_t allocated_length) {
+ size_t allocated_length,
+ bool initialize) {
void* data;
CHECK(V8::ArrayBufferAllocator() != NULL);
if (allocated_length != 0) {
- data = V8::ArrayBufferAllocator()->Allocate(allocated_length);
+ if (initialize) {
+ data = V8::ArrayBufferAllocator()->Allocate(allocated_length);
+ } else {
+ data =
+ V8::ArrayBufferAllocator()->AllocateUninitialized(allocated_length);
+ }
if (data == NULL) return false;
- memset(data, 0, allocated_length);
} else {
data = NULL;
}
@@ -805,74 +829,78 @@ enum TypedArrayId {
ARRAY_ID_UINT8C = 9
};
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
- HandleScope scope(isolate);
- ASSERT(args.length() == 5);
- CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0);
- CONVERT_SMI_ARG_CHECKED(arrayId, 1);
- CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, buffer, 2);
- CONVERT_ARG_HANDLE_CHECKED(Object, byte_offset_object, 3);
- CONVERT_ARG_HANDLE_CHECKED(Object, byte_length_object, 4);
-
- ASSERT(holder->GetInternalFieldCount() ==
- v8::ArrayBufferView::kInternalFieldCount);
- for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) {
- holder->SetInternalField(i, Smi::FromInt(0));
- }
-
- ExternalArrayType arrayType;
- size_t elementSize;
+static void ArrayIdToTypeAndSize(
+ int arrayId, ExternalArrayType* array_type, size_t* element_size) {
switch (arrayId) {
case ARRAY_ID_UINT8:
- arrayType = kExternalUnsignedByteArray;
- elementSize = 1;
+ *array_type = kExternalUnsignedByteArray;
+ *element_size = 1;
break;
case ARRAY_ID_INT8:
- arrayType = kExternalByteArray;
- elementSize = 1;
+ *array_type = kExternalByteArray;
+ *element_size = 1;
break;
case ARRAY_ID_UINT16:
- arrayType = kExternalUnsignedShortArray;
- elementSize = 2;
+ *array_type = kExternalUnsignedShortArray;
+ *element_size = 2;
break;
case ARRAY_ID_INT16:
- arrayType = kExternalShortArray;
- elementSize = 2;
+ *array_type = kExternalShortArray;
+ *element_size = 2;
break;
case ARRAY_ID_UINT32:
- arrayType = kExternalUnsignedIntArray;
- elementSize = 4;
+ *array_type = kExternalUnsignedIntArray;
+ *element_size = 4;
break;
case ARRAY_ID_INT32:
- arrayType = kExternalIntArray;
- elementSize = 4;
+ *array_type = kExternalIntArray;
+ *element_size = 4;
break;
case ARRAY_ID_FLOAT32:
- arrayType = kExternalFloatArray;
- elementSize = 4;
+ *array_type = kExternalFloatArray;
+ *element_size = 4;
break;
case ARRAY_ID_FLOAT64:
- arrayType = kExternalDoubleArray;
- elementSize = 8;
+ *array_type = kExternalDoubleArray;
+ *element_size = 8;
break;
case ARRAY_ID_UINT8C:
- arrayType = kExternalPixelArray;
- elementSize = 1;
+ *array_type = kExternalPixelArray;
+ *element_size = 1;
break;
default:
UNREACHABLE();
- return NULL;
+ }
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 5);
+ CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0);
+ CONVERT_SMI_ARG_CHECKED(arrayId, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, buffer, 2);
+ CONVERT_ARG_HANDLE_CHECKED(Object, byte_offset_object, 3);
+ CONVERT_ARG_HANDLE_CHECKED(Object, byte_length_object, 4);
+
+ ASSERT(holder->GetInternalFieldCount() ==
+ v8::ArrayBufferView::kInternalFieldCount);
+ for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) {
+ holder->SetInternalField(i, Smi::FromInt(0));
}
+ ExternalArrayType array_type = kExternalByteArray; // Bogus initialization.
+ size_t element_size = 1; // Bogus initialization.
+ ArrayIdToTypeAndSize(arrayId, &array_type, &element_size);
+
holder->set_buffer(*buffer);
holder->set_byte_offset(*byte_offset_object);
holder->set_byte_length(*byte_length_object);
size_t byte_offset = NumberToSize(isolate, *byte_offset_object);
size_t byte_length = NumberToSize(isolate, *byte_length_object);
- ASSERT(byte_length % elementSize == 0);
- size_t length = byte_length / elementSize;
+ ASSERT(byte_length % element_size == 0);
+ size_t length = byte_length / element_size;
Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(length);
holder->set_length(*length_obj);
@@ -881,13 +909,99 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
Handle<ExternalArray> elements =
isolate->factory()->NewExternalArray(
- static_cast<int>(length), arrayType,
+ static_cast<int>(length), array_type,
static_cast<uint8_t*>(buffer->backing_store()) + byte_offset);
holder->set_elements(*elements);
return isolate->heap()->undefined_value();
}
+// Initializes a typed array from an array-like object.
+// If an array-like object happens to be a typed array of the same type,
+// initializes backing store using memove.
+//
+// Returns true if backing store was initialized or false otherwise.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitializeFromArrayLike) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 4);
+ CONVERT_ARG_HANDLE_CHECKED(JSTypedArray, holder, 0);
+ CONVERT_SMI_ARG_CHECKED(arrayId, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, source, 2);
+ CONVERT_ARG_HANDLE_CHECKED(Object, length_obj, 3);
+
+ ASSERT(holder->GetInternalFieldCount() ==
+ v8::ArrayBufferView::kInternalFieldCount);
+ for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) {
+ holder->SetInternalField(i, Smi::FromInt(0));
+ }
+
+ ExternalArrayType array_type = kExternalByteArray; // Bogus initialization.
+ size_t element_size = 1; // Bogus initialization.
+ ArrayIdToTypeAndSize(arrayId, &array_type, &element_size);
+
+ Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
+ size_t length = NumberToSize(isolate, *length_obj);
+ size_t byte_length = length * element_size;
+ if (byte_length < length) { // Overflow
+ return isolate->Throw(*isolate->factory()->
+ NewRangeError("invalid_array_buffer_length",
+ HandleVector<Object>(NULL, 0)));
+ }
+
+ // We assume that the caller of this function will initialize holder
+ // with the loop
+ // for(i = 0; i < length; i++) { holder[i] = source[i]; }
+ // If source is a typed array, this loop will always run to completion,
+ // so we are sure that the backing store will be initialized.
+ // Otherwise, we do not know (the indexing operation might throw).
+ // Hence we require zero initialization unless our source is a typed array.
+ bool should_zero_initialize = !source->IsJSTypedArray();
+
+ if (!Runtime::SetupArrayBufferAllocatingData(
+ isolate, buffer, byte_length, should_zero_initialize)) {
+ return isolate->Throw(*isolate->factory()->
+ NewRangeError("invalid_array_buffer_length",
+ HandleVector<Object>(NULL, 0)));
+ }
+
+ holder->set_buffer(*buffer);
+ holder->set_byte_offset(Smi::FromInt(0));
+ Handle<Object> byte_length_obj(
+ isolate->factory()->NewNumberFromSize(byte_length));
+ holder->set_byte_length(*byte_length_obj);
+ holder->set_length(*length_obj);
+ holder->set_weak_next(buffer->weak_first_view());
+ buffer->set_weak_first_view(*holder);
+
+ Handle<ExternalArray> elements =
+ isolate->factory()->NewExternalArray(
+ static_cast<int>(length), array_type,
+ static_cast<uint8_t*>(buffer->backing_store()));
+ holder->set_elements(*elements);
+
+ if (source->IsJSTypedArray()) {
+ Handle<JSTypedArray> typed_array(JSTypedArray::cast(*source));
+
+ if (typed_array->type() == holder->type()) {
+ uint8_t* backing_store =
+ static_cast<uint8_t*>(
+ JSArrayBuffer::cast(typed_array->buffer())->backing_store());
+ size_t source_byte_offset =
+ NumberToSize(isolate, typed_array->byte_offset());
+ OS::MemCopy(
+ buffer->backing_store(),
+ backing_store + source_byte_offset,
+ byte_length);
+ return *isolate->factory()->true_value();
+ } else {
+ return *isolate->factory()->false_value();
+ }
+ }
+
+ return *isolate->factory()->false_value();
+}
+
+
#define TYPED_ARRAY_GETTER(getter, accessor) \
RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayGet##getter) { \
HandleScope scope(isolate); \
@@ -907,6 +1021,21 @@ TYPED_ARRAY_GETTER(Length, length)
#undef TYPED_ARRAY_GETTER
+// Return codes for Runtime_TypedArraySetFastCases.
+// Should be synchronized with typedarray.js natives.
+enum TypedArraySetResultCodes {
+ // Set from typed array of the same type.
+ // This is processed by TypedArraySetFastCases
+ TYPED_ARRAY_SET_TYPED_ARRAY_SAME_TYPE = 0,
+ // Set from typed array of the different type, overlapping in memory.
+ TYPED_ARRAY_SET_TYPED_ARRAY_OVERLAPPING = 1,
+ // Set from typed array of the different type, non-overlapping.
+ TYPED_ARRAY_SET_TYPED_ARRAY_NONOVERLAPPING = 2,
+ // Set from non-typed array.
+ TYPED_ARRAY_SET_NON_TYPED_ARRAY = 3
+};
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) {
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(Object, target_obj, 0);
@@ -918,7 +1047,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) {
"not_typed_array", HandleVector<Object>(NULL, 0)));
if (!source_obj->IsJSTypedArray())
- return isolate->heap()->false_value();
+ return Smi::FromInt(TYPED_ARRAY_SET_NON_TYPED_ARRAY);
Handle<JSTypedArray> target(JSTypedArray::cast(*target_obj));
Handle<JSTypedArray> source(JSTypedArray::cast(*source_obj));
@@ -933,20 +1062,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) {
return isolate->Throw(*isolate->factory()->NewRangeError(
"typed_array_set_source_too_large", HandleVector<Object>(NULL, 0)));
- Handle<JSArrayBuffer> target_buffer(JSArrayBuffer::cast(target->buffer()));
- Handle<JSArrayBuffer> source_buffer(JSArrayBuffer::cast(source->buffer()));
size_t target_offset = NumberToSize(isolate, target->byte_offset());
size_t source_offset = NumberToSize(isolate, source->byte_offset());
uint8_t* target_base =
- static_cast<uint8_t*>(target_buffer->backing_store()) + target_offset;
+ static_cast<uint8_t*>(
+ JSArrayBuffer::cast(target->buffer())->backing_store()) + target_offset;
uint8_t* source_base =
- static_cast<uint8_t*>(source_buffer->backing_store()) + source_offset;
+ static_cast<uint8_t*>(
+ JSArrayBuffer::cast(source->buffer())->backing_store()) + source_offset;
// Typed arrays of the same type: use memmove.
if (target->type() == source->type()) {
memmove(target_base + offset * target->element_size(),
source_base, source_byte_length);
- return isolate->heap()->true_value();
+ return Smi::FromInt(TYPED_ARRAY_SET_TYPED_ARRAY_SAME_TYPE);
}
// Typed arrays of different types over the same backing store
@@ -954,78 +1083,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArraySetFastCases) {
source_base + source_byte_length > target_base) ||
(target_base <= source_base &&
target_base + target_byte_length > source_base)) {
- size_t target_element_size = target->element_size();
- size_t source_element_size = source->element_size();
-
- size_t source_length = NumberToSize(isolate, source->length());
-
- // Copy left part
- size_t left_index;
- {
- // First un-mutated byte after the next write
- uint8_t* target_ptr = target_base + (offset + 1) * target_element_size;
- // Next read at source_ptr. We do not care for memory changing before
- // source_ptr - we have already copied it.
- uint8_t* source_ptr = source_base;
- for (left_index = 0;
- left_index < source_length && target_ptr <= source_ptr;
- left_index++) {
- Handle<Object> v = Object::GetElement(
- source, static_cast<uint32_t>(left_index));
- JSObject::SetElement(
- target, static_cast<uint32_t>(offset + left_index), v,
- NONE, kNonStrictMode);
- target_ptr += target_element_size;
- source_ptr += source_element_size;
- }
- }
- // Copy right part
- size_t right_index;
- {
- // First unmutated byte before the next write
- uint8_t* target_ptr =
- target_base + (offset + source_length - 1) * target_element_size;
- // Next read before source_ptr. We do not care for memory changing after
- // source_ptr - we have already copied it.
- uint8_t* source_ptr =
- source_base + source_length * source_element_size;
- for (right_index = source_length - 1;
- right_index >= left_index && target_ptr >= source_ptr;
- right_index--) {
- Handle<Object> v = Object::GetElement(
- source, static_cast<uint32_t>(right_index));
- JSObject::SetElement(
- target, static_cast<uint32_t>(offset + right_index), v,
- NONE, kNonStrictMode);
- target_ptr -= target_element_size;
- source_ptr -= source_element_size;
- }
- }
- // There can be at most 8 entries left in the middle that need buffering
- // (because the largest element_size is 8 times the smallest).
- ASSERT((right_index + 1) - left_index <= 8);
- Handle<Object> temp[8];
- size_t idx;
- for (idx = left_index; idx <= right_index; idx++) {
- temp[idx - left_index] = Object::GetElement(
- source, static_cast<uint32_t>(idx));
- }
- for (idx = left_index; idx <= right_index; idx++) {
- JSObject::SetElement(
- target, static_cast<uint32_t>(offset + idx), temp[idx-left_index],
- NONE, kNonStrictMode);
- }
+ // We do not support overlapping ArrayBuffers
+ ASSERT(
+ JSArrayBuffer::cast(target->buffer())->backing_store() ==
+ JSArrayBuffer::cast(source->buffer())->backing_store());
+ return Smi::FromInt(TYPED_ARRAY_SET_TYPED_ARRAY_OVERLAPPING);
} else { // Non-overlapping typed arrays
- for (size_t idx = 0; idx < source_length; idx++) {
- Handle<Object> value = Object::GetElement(
- source, static_cast<uint32_t>(idx));
- JSObject::SetElement(
- target, static_cast<uint32_t>(offset + idx), value,
- NONE, kNonStrictMode);
- }
+ return Smi::FromInt(TYPED_ARRAY_SET_TYPED_ARRAY_NONOVERLAPPING);
}
-
- return isolate->heap()->true_value();
}
@@ -7188,15 +7253,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberXor) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberNot) {
- SealHandleScope shs(isolate);
- ASSERT(args.length() == 1);
-
- CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
- return isolate->heap()->NumberFromInt32(~x);
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberShl) {
SealHandleScope shs(isolate);
ASSERT(args.length() == 2);
@@ -8499,23 +8555,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
// Use linear search of the unoptimized code's back edge table to find
// the AST id matching the PC.
- Address start = unoptimized->instruction_start();
- unsigned target_pc_offset = static_cast<unsigned>(frame->pc() - start);
- Address table_cursor = start + unoptimized->back_edge_table_offset();
- uint32_t table_length = Memory::uint32_at(table_cursor);
- table_cursor += kIntSize;
+ uint32_t target_pc_offset =
+ static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start());
uint32_t loop_depth = 0;
- for (unsigned i = 0; i < table_length; ++i) {
- // Table entries are (AST id, pc offset) pairs.
- uint32_t pc_offset = Memory::uint32_at(table_cursor + kIntSize);
- if (pc_offset == target_pc_offset) {
- ast_id = BailoutId(static_cast<int>(Memory::uint32_at(table_cursor)));
- loop_depth = Memory::uint32_at(table_cursor + 2 * kIntSize);
+
+ for (FullCodeGenerator::BackEdgeTableIterator back_edges(*unoptimized);
+ !back_edges.Done();
+ back_edges.Next()) {
+ if (back_edges.pc_offset() == target_pc_offset) {
+ ast_id = back_edges.ast_id();
+ loop_depth = back_edges.loop_depth();
break;
}
- table_cursor += FullCodeGenerator::kBackEdgeEntrySize;
}
ASSERT(!ast_id.IsNone());
+
if (FLAG_trace_osr) {
PrintF("[replacing on-stack at AST id %d, loop depth %d in ",
ast_id.ToInt(), loop_depth);
@@ -8632,8 +8686,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) {
CONVERT_ARG_HANDLE_CHECKED(JSObject, arguments, 2);
CONVERT_SMI_ARG_CHECKED(offset, 3);
CONVERT_SMI_ARG_CHECKED(argc, 4);
- ASSERT(offset >= 0);
- ASSERT(argc >= 0);
+ RUNTIME_ASSERT(offset >= 0);
+ RUNTIME_ASSERT(argc >= 0);
// If there are too many arguments, allocate argv via malloc.
const int argv_small_size = 10;
@@ -9426,7 +9480,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) {
ASSERT_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
- source = Handle<String>(source->TryFlattenGetString());
+ source = Handle<String>(FlattenGetString(source));
// Optimized fast case where we only have ASCII characters.
Handle<Object> result;
if (source->IsSeqOneByteString()) {
@@ -13316,6 +13370,304 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHeapUsage) {
#endif // ENABLE_DEBUGGER_SUPPORT
+#ifdef V8_I18N_SUPPORT
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CanonicalizeLanguageTag) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, locale_id_str, 0);
+
+ v8::String::Utf8Value locale_id(v8::Utils::ToLocal(locale_id_str));
+
+ // Return value which denotes invalid language tag.
+ const char* const kInvalidTag = "invalid-tag";
+
+ UErrorCode error = U_ZERO_ERROR;
+ char icu_result[ULOC_FULLNAME_CAPACITY];
+ int icu_length = 0;
+
+ uloc_forLanguageTag(*locale_id, icu_result, ULOC_FULLNAME_CAPACITY,
+ &icu_length, &error);
+ if (U_FAILURE(error) || icu_length == 0) {
+ return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag));
+ }
+
+ char result[ULOC_FULLNAME_CAPACITY];
+
+ // Force strict BCP47 rules.
+ uloc_toLanguageTag(icu_result, result, ULOC_FULLNAME_CAPACITY, TRUE, &error);
+
+ if (U_FAILURE(error)) {
+ return isolate->heap()->AllocateStringFromOneByte(CStrVector(kInvalidTag));
+ }
+
+ return isolate->heap()->AllocateStringFromOneByte(CStrVector(result));
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_AvailableLocalesOf) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, service, 0);
+
+ const icu::Locale* available_locales = NULL;
+ int32_t count = 0;
+
+ if (service->IsUtf8EqualTo(CStrVector("collator"))) {
+ available_locales = icu::Collator::getAvailableLocales(count);
+ } else if (service->IsUtf8EqualTo(CStrVector("numberformat"))) {
+ available_locales = icu::NumberFormat::getAvailableLocales(count);
+ } else if (service->IsUtf8EqualTo(CStrVector("dateformat"))) {
+ available_locales = icu::DateFormat::getAvailableLocales(count);
+ } else if (service->IsUtf8EqualTo(CStrVector("breakiterator"))) {
+ available_locales = icu::BreakIterator::getAvailableLocales(count);
+ }
+
+ UErrorCode error = U_ZERO_ERROR;
+ char result[ULOC_FULLNAME_CAPACITY];
+ Handle<JSObject> locales =
+ isolate->factory()->NewJSObject(isolate->object_function());
+
+ for (int32_t i = 0; i < count; ++i) {
+ const char* icu_name = available_locales[i].getName();
+
+ error = U_ZERO_ERROR;
+ // No need to force strict BCP47 rules.
+ uloc_toLanguageTag(icu_name, result, ULOC_FULLNAME_CAPACITY, FALSE, &error);
+ if (U_FAILURE(error)) {
+ // This shouldn't happen, but lets not break the user.
+ continue;
+ }
+
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ locales,
+ isolate->factory()->NewStringFromAscii(CStrVector(result)),
+ isolate->factory()->NewNumber(i),
+ NONE));
+ }
+
+ return *locales;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultICULocale) {
+ SealHandleScope shs(isolate);
+
+ ASSERT(args.length() == 0);
+
+ icu::Locale default_locale;
+
+ // Set the locale
+ char result[ULOC_FULLNAME_CAPACITY];
+ UErrorCode status = U_ZERO_ERROR;
+ uloc_toLanguageTag(
+ default_locale.getName(), result, ULOC_FULLNAME_CAPACITY, FALSE, &status);
+ if (U_SUCCESS(status)) {
+ return isolate->heap()->AllocateStringFromOneByte(CStrVector(result));
+ }
+
+ return isolate->heap()->AllocateStringFromOneByte(CStrVector("und"));
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLanguageTagVariants) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 1);
+
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, input, 0);
+
+ uint32_t length = static_cast<uint32_t>(input->length()->Number());
+ Handle<FixedArray> output = isolate->factory()->NewFixedArray(length);
+ Handle<Name> maximized =
+ isolate->factory()->NewStringFromAscii(CStrVector("maximized"));
+ Handle<Name> base =
+ isolate->factory()->NewStringFromAscii(CStrVector("base"));
+ for (unsigned int i = 0; i < length; ++i) {
+ MaybeObject* maybe_string = input->GetElement(i);
+ Object* locale_id;
+ if (!maybe_string->ToObject(&locale_id) || !locale_id->IsString()) {
+ return isolate->Throw(isolate->heap()->illegal_argument_string());
+ }
+
+ v8::String::Utf8Value utf8_locale_id(
+ v8::Utils::ToLocal(Handle<String>(String::cast(locale_id))));
+
+ UErrorCode error = U_ZERO_ERROR;
+
+ // Convert from BCP47 to ICU format.
+ // de-DE-u-co-phonebk -> de_DE@collation=phonebook
+ char icu_locale[ULOC_FULLNAME_CAPACITY];
+ int icu_locale_length = 0;
+ uloc_forLanguageTag(*utf8_locale_id, icu_locale, ULOC_FULLNAME_CAPACITY,
+ &icu_locale_length, &error);
+ if (U_FAILURE(error) || icu_locale_length == 0) {
+ return isolate->Throw(isolate->heap()->illegal_argument_string());
+ }
+
+ // Maximize the locale.
+ // de_DE@collation=phonebook -> de_Latn_DE@collation=phonebook
+ char icu_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_addLikelySubtags(
+ icu_locale, icu_max_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Remove extensions from maximized locale.
+ // de_Latn_DE@collation=phonebook -> de_Latn_DE
+ char icu_base_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_getBaseName(
+ icu_max_locale, icu_base_max_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Get original name without extensions.
+ // de_DE@collation=phonebook -> de_DE
+ char icu_base_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_getBaseName(
+ icu_locale, icu_base_locale, ULOC_FULLNAME_CAPACITY, &error);
+
+ // Convert from ICU locale format to BCP47 format.
+ // de_Latn_DE -> de-Latn-DE
+ char base_max_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_toLanguageTag(icu_base_max_locale, base_max_locale,
+ ULOC_FULLNAME_CAPACITY, FALSE, &error);
+
+ // de_DE -> de-DE
+ char base_locale[ULOC_FULLNAME_CAPACITY];
+ uloc_toLanguageTag(
+ icu_base_locale, base_locale, ULOC_FULLNAME_CAPACITY, FALSE, &error);
+
+ if (U_FAILURE(error)) {
+ return isolate->Throw(isolate->heap()->illegal_argument_string());
+ }
+
+ Handle<JSObject> result =
+ isolate->factory()->NewJSObject(isolate->object_function());
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ result,
+ maximized,
+ isolate->factory()->NewStringFromAscii(CStrVector(base_max_locale)),
+ NONE));
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ result,
+ base,
+ isolate->factory()->NewStringFromAscii(CStrVector(base_locale)),
+ NONE));
+ output->set(i, *result);
+ }
+
+ Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(output);
+ result->set_length(Smi::FromInt(length));
+ return *result;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateDateTimeFormat) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 3);
+
+ CONVERT_ARG_HANDLE_CHECKED(String, locale, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, options, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, resolved, 2);
+
+ Handle<ObjectTemplateInfo> date_format_template =
+ I18N::GetTemplate(isolate);
+
+ // Create an empty object wrapper.
+ bool has_pending_exception = false;
+ Handle<JSObject> local_object = Execution::InstantiateObject(
+ date_format_template, &has_pending_exception);
+ if (has_pending_exception) {
+ ASSERT(isolate->has_pending_exception());
+ return Failure::Exception();
+ }
+
+ // Set date time formatter as internal field of the resulting JS object.
+ icu::SimpleDateFormat* date_format = DateFormat::InitializeDateTimeFormat(
+ isolate, locale, options, resolved);
+
+ if (!date_format) return isolate->ThrowIllegalOperation();
+
+ local_object->SetInternalField(0, reinterpret_cast<Smi*>(date_format));
+
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ local_object,
+ isolate->factory()->NewStringFromAscii(CStrVector("dateFormat")),
+ isolate->factory()->NewStringFromAscii(CStrVector("valid")),
+ NONE));
+
+ Persistent<v8::Object> wrapper(reinterpret_cast<v8::Isolate*>(isolate),
+ v8::Utils::ToLocal(local_object));
+ // Make object handle weak so we can delete the data format once GC kicks in.
+ wrapper.MakeWeak<void>(NULL, &DateFormat::DeleteDateFormat);
+ Handle<Object> result = Utils::OpenPersistent(wrapper);
+ wrapper.ClearAndLeak();
+ return *result;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateFormat) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 2);
+
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
+
+ bool has_pending_exception = false;
+ double millis = Execution::ToNumber(date, &has_pending_exception)->Number();
+ if (has_pending_exception) {
+ ASSERT(isolate->has_pending_exception());
+ return Failure::Exception();
+ }
+
+ icu::SimpleDateFormat* date_format =
+ DateFormat::UnpackDateFormat(isolate, date_format_holder);
+ if (!date_format) return isolate->ThrowIllegalOperation();
+
+ icu::UnicodeString result;
+ date_format->format(millis, result);
+
+ return *isolate->factory()->NewStringFromTwoByte(
+ Vector<const uint16_t>(
+ reinterpret_cast<const uint16_t*>(result.getBuffer()),
+ result.length()));
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_InternalDateParse) {
+ HandleScope scope(isolate);
+
+ ASSERT(args.length() == 2);
+
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, date_string, 1);
+
+ v8::String::Utf8Value utf8_date(v8::Utils::ToLocal(date_string));
+ icu::UnicodeString u_date(icu::UnicodeString::fromUTF8(*utf8_date));
+ icu::SimpleDateFormat* date_format =
+ DateFormat::UnpackDateFormat(isolate, date_format_holder);
+ if (!date_format) return isolate->ThrowIllegalOperation();
+
+ UErrorCode status = U_ZERO_ERROR;
+ UDate date = date_format->parse(u_date, status);
+ if (U_FAILURE(status)) return isolate->heap()->undefined_value();
+
+ bool has_pending_exception = false;
+ Handle<JSDate> result = Handle<JSDate>::cast(
+ Execution::NewDate(static_cast<double>(date), &has_pending_exception));
+ if (has_pending_exception) {
+ ASSERT(isolate->has_pending_exception());
+ return Failure::Exception();
+ }
+ return *result;
+}
+#endif // V8_I18N_SUPPORT
+
+
// Finds the script object from the script data. NOTE: This operation uses
// heap traversal to find the function generated for the source position
// for the requested break point. For lazily compiled functions several heap
@@ -13434,6 +13786,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FlattenString) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
+ if (!object->IsJSObject()) return Smi::FromInt(0);
+ Handle<JSObject> js_object = Handle<JSObject>::cast(object);
+ if (!js_object->map()->is_deprecated()) return Smi::FromInt(0);
+ JSObject::MigrateInstance(js_object);
+ return *object;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
SealHandleScope shs(isolate);
// This is only called from codegen, so checks might be more lax.
@@ -13677,6 +14041,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetIsObserved) {
ASSERT(proto->IsJSGlobalObject());
obj = JSReceiver::cast(proto);
}
+ if (obj->IsJSProxy())
+ return isolate->heap()->undefined_value();
+
ASSERT(!(obj->map()->is_observed() && obj->IsJSObject() &&
JSObject::cast(obj)->HasFastElements()));
ASSERT(obj->IsJSObject());
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index 17e3b43561..ade7e732c0 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -109,6 +109,7 @@ namespace internal {
F(DebugCallbackSupportsStepping, 1, 1) \
F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \
+ F(MigrateInstance, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
@@ -157,7 +158,6 @@ namespace internal {
F(NumberOr, 2, 1) \
F(NumberAnd, 2, 1) \
F(NumberXor, 2, 1) \
- F(NumberNot, 1, 1) \
\
F(NumberShl, 2, 1) \
F(NumberShr, 2, 1) \
@@ -361,6 +361,7 @@ namespace internal {
F(ArrayBufferSliceImpl, 3, 1) \
\
F(TypedArrayInitialize, 5, 1) \
+ F(TypedArrayInitializeFromArrayLike, 4, 1) \
F(TypedArrayGetBuffer, 1, 1) \
F(TypedArrayGetByteLength, 1, 1) \
F(TypedArrayGetByteOffset, 1, 1) \
@@ -533,6 +534,26 @@ namespace internal {
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
#endif
+
+#ifdef V8_I18N_SUPPORT
+#define RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F) \
+ /* i18n support */ \
+ /* Standalone, helper methods. */ \
+ F(CanonicalizeLanguageTag, 1, 1) \
+ F(AvailableLocalesOf, 1, 1) \
+ F(GetDefaultICULocale, 0, 1) \
+ F(GetLanguageTagVariants, 1, 1) \
+ \
+ /* Date format and parse. */ \
+ F(CreateDateTimeFormat, 3, 1) \
+ F(InternalDateFormat, 2, 1) \
+ F(InternalDateParse, 2, 1) \
+
+#else
+#define RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F)
+#endif
+
+
#ifdef DEBUG
#define RUNTIME_FUNCTION_LIST_DEBUG(F) \
/* Testing */ \
@@ -550,7 +571,8 @@ namespace internal {
RUNTIME_FUNCTION_LIST_ALWAYS_1(F) \
RUNTIME_FUNCTION_LIST_ALWAYS_2(F) \
RUNTIME_FUNCTION_LIST_DEBUG(F) \
- RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
+ RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F) \
+ RUNTIME_FUNCTION_LIST_I18N_SUPPORT(F)
// ----------------------------------------------------------------------------
// INLINE_FUNCTION_LIST defines all inlined functions accessed
@@ -784,7 +806,8 @@ class Runtime : public AllStatic {
static bool SetupArrayBufferAllocatingData(
Isolate* isolate,
Handle<JSArrayBuffer> array_buffer,
- size_t allocated_length);
+ size_t allocated_length,
+ bool initialize = true);
static void FreeArrayBuffer(
Isolate* isolate,
diff --git a/deps/v8/src/runtime.js b/deps/v8/src/runtime.js
index 90fb36b422..5339570ef6 100644
--- a/deps/v8/src/runtime.js
+++ b/deps/v8/src/runtime.js
@@ -294,20 +294,6 @@ function BIT_XOR(y) {
}
-// ECMA-262, section 11.4.7, page 47.
-function UNARY_MINUS() {
- var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
- return %NumberUnaryMinus(x);
-}
-
-
-// ECMA-262, section 11.4.8, page 48.
-function BIT_NOT() {
- var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
- return %NumberNot(x);
-}
-
-
// ECMA-262, section 11.7.1, page 51.
function SHL(y) {
var x = IS_NUMBER(this) ? this : %NonNumberToNumber(this);
diff --git a/deps/v8/src/sampler.cc b/deps/v8/src/sampler.cc
index 6e451f0bde..d72ed1acdb 100644
--- a/deps/v8/src/sampler.cc
+++ b/deps/v8/src/sampler.cc
@@ -38,10 +38,7 @@
#include <signal.h>
#include <sys/time.h>
#include <sys/syscall.h>
-// OpenBSD doesn't have <ucontext.h>. ucontext_t lives in <signal.h>
-// and is a typedef for struct sigcontext. There is no uc_mcontext.
-#if (!defined(__ANDROID__) || defined(__BIONIC_HAVE_UCONTEXT_T)) \
- && !defined(__OpenBSD__)
+#if !defined(__ANDROID__) || defined(__BIONIC_HAVE_UCONTEXT_T)
#include <ucontext.h>
#endif
#include <unistd.h>
@@ -333,9 +330,7 @@ void SignalHandler::HandleProfilerSignal(int signal, siginfo_t* info,
#else
// Extracting the sample from the context is extremely machine dependent.
ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
-#if !defined(__OpenBSD__)
mcontext_t& mcontext = ucontext->uc_mcontext;
-#endif
#if defined(__linux__) || defined(__ANDROID__)
#if V8_HOST_ARCH_IA32
state.pc = reinterpret_cast<Address>(mcontext.gregs[REG_EIP]);
@@ -389,6 +384,7 @@ void SignalHandler::HandleProfilerSignal(int signal, siginfo_t* info,
state.fp = reinterpret_cast<Address>(mcontext.__gregs[_REG_RBP]);
#endif // V8_HOST_ARCH_*
#elif defined(__OpenBSD__)
+ USE(mcontext);
#if V8_HOST_ARCH_IA32
state.pc = reinterpret_cast<Address>(ucontext->sc_eip);
state.sp = reinterpret_cast<Address>(ucontext->sc_esp);
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index 6c5a620a41..746c926653 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -1304,6 +1304,7 @@ void StartupSerializer::SerializeStrongReferences() {
// No active or weak handles.
CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
+ CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
// We don't support serializing installed extensions.
CHECK(!isolate->has_installed_extensions());
diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h
index b47452e421..aa864b66ba 100644
--- a/deps/v8/src/spaces.h
+++ b/deps/v8/src/spaces.h
@@ -784,8 +784,9 @@ class Page : public MemoryChunk {
// Maximum object size that fits in a page. Objects larger than that size
// are allocated in large object space and are never moved in memory. This
// also applies to new space allocation, since objects are never migrated
- // from new space to large object space.
- static const int kMaxNonCodeHeapObjectSize = kNonCodeObjectAreaSize;
+ // from new space to large object space. Takes double alignment into account.
+ static const int kMaxNonCodeHeapObjectSize =
+ kNonCodeObjectAreaSize - kPointerSize;
// Page size mask.
static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
diff --git a/deps/v8/src/transitions-inl.h b/deps/v8/src/transitions-inl.h
index 45b6457245..c4825fcf73 100644
--- a/deps/v8/src/transitions-inl.h
+++ b/deps/v8/src/transitions-inl.h
@@ -57,30 +57,8 @@ TransitionArray* TransitionArray::cast(Object* object) {
}
-Map* TransitionArray::elements_transition() {
- Object* transition_map = get(kElementsTransitionIndex);
- return Map::cast(transition_map);
-}
-
-
-void TransitionArray::ClearElementsTransition() {
- WRITE_FIELD(this, kElementsTransitionOffset, Smi::FromInt(0));
-}
-
-
bool TransitionArray::HasElementsTransition() {
- return IsFullTransitionArray() &&
- get(kElementsTransitionIndex) != Smi::FromInt(0);
-}
-
-
-void TransitionArray::set_elements_transition(Map* transition_map,
- WriteBarrierMode mode) {
- ASSERT(IsFullTransitionArray());
- Heap* heap = GetHeap();
- WRITE_FIELD(this, kElementsTransitionOffset, transition_map);
- CONDITIONAL_WRITE_BARRIER(
- heap, this, kElementsTransitionOffset, transition_map, mode);
+ return Search(GetHeap()->elements_transition_symbol()) != kNotFound;
}
diff --git a/deps/v8/src/transitions.cc b/deps/v8/src/transitions.cc
index df53178dd3..086edcb994 100644
--- a/deps/v8/src/transitions.cc
+++ b/deps/v8/src/transitions.cc
@@ -50,7 +50,6 @@ MaybeObject* TransitionArray::Allocate(int number_of_transitions) {
FixedArray* array;
MaybeObject* maybe_array = AllocateRaw(ToKeyIndex(number_of_transitions));
if (!maybe_array->To(&array)) return maybe_array;
- array->set(kElementsTransitionIndex, Smi::FromInt(0));
array->set(kPrototypeTransitionsIndex, Smi::FromInt(0));
return array;
}
@@ -120,10 +119,6 @@ MaybeObject* TransitionArray::CopyInsert(Name* name, Map* target) {
maybe_array = TransitionArray::Allocate(new_size);
if (!maybe_array->To(&result)) return maybe_array;
- if (HasElementsTransition()) {
- result->set_elements_transition(elements_transition());
- }
-
if (HasPrototypeTransitions()) {
result->SetPrototypeTransitions(GetPrototypeTransitions());
}
diff --git a/deps/v8/src/transitions.h b/deps/v8/src/transitions.h
index 7abef47346..fde1279895 100644
--- a/deps/v8/src/transitions.h
+++ b/deps/v8/src/transitions.h
@@ -41,10 +41,10 @@ namespace internal {
// TransitionArrays are fixed arrays used to hold map transitions for property,
// constant, and element changes. They can either be simple transition arrays
// that store a single property transition, or a full transition array that has
-// space for elements transitions, prototype transitions and multiple property
-// transitons. The details related to property transitions are accessed in the
-// descriptor array of the target map. In the case of a simple transition, the
-// key is also read from the descriptor array of the target map.
+// prototype transitions and multiple property transitons. The details related
+// to property transitions are accessed in the descriptor array of the target
+// map. In the case of a simple transition, the key is also read from the
+// descriptor array of the target map.
//
// The simple format of the these objects is:
// [0] Undefined or back pointer map
@@ -52,9 +52,8 @@ namespace internal {
//
// The full format is:
// [0] Undefined or back pointer map
-// [1] Smi(0) or elements transition map
-// [2] Smi(0) or fixed array of prototype transitions
-// [3] First transition
+// [1] Smi(0) or fixed array of prototype transitions
+// [2] First transition
// [length() - kTransitionSize] Last transition
class TransitionArray: public FixedArray {
public:
@@ -73,12 +72,7 @@ class TransitionArray: public FixedArray {
inline PropertyDetails GetTargetDetails(int transition_number);
- inline Map* elements_transition();
- inline void set_elements_transition(
- Map* target,
- WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline bool HasElementsTransition();
- inline void ClearElementsTransition();
inline Object* back_pointer_storage();
inline void set_back_pointer_storage(
@@ -127,8 +121,21 @@ class TransitionArray: public FixedArray {
// Allocates a TransitionArray.
MUST_USE_RESULT static MaybeObject* Allocate(int number_of_transitions);
- bool IsSimpleTransition() { return length() == kSimpleTransitionSize; }
- bool IsFullTransitionArray() { return length() >= kFirstIndex; }
+ bool IsSimpleTransition() {
+ return length() == kSimpleTransitionSize &&
+ get(kSimpleTransitionTarget)->IsHeapObject() &&
+ // The IntrusivePrototypeTransitionIterator may have set the map of the
+ // prototype transitions array to a smi. In that case, there are
+ // prototype transitions, hence this transition array is a full
+ // transition array.
+ HeapObject::cast(get(kSimpleTransitionTarget))->map()->IsMap() &&
+ get(kSimpleTransitionTarget)->IsMap();
+ }
+
+ bool IsFullTransitionArray() {
+ return length() > kFirstIndex ||
+ (length() == kFirstIndex && !IsSimpleTransition());
+ }
// Casting.
static inline TransitionArray* cast(Object* obj);
@@ -139,9 +146,8 @@ class TransitionArray: public FixedArray {
static const int kBackPointerStorageIndex = 0;
// Layout for full transition arrays.
- static const int kElementsTransitionIndex = 1;
- static const int kPrototypeTransitionsIndex = 2;
- static const int kFirstIndex = 3;
+ static const int kPrototypeTransitionsIndex = 1;
+ static const int kFirstIndex = 2;
// Layout for simple transition arrays.
static const int kSimpleTransitionTarget = 1;
@@ -152,9 +158,7 @@ class TransitionArray: public FixedArray {
static const int kBackPointerStorageOffset = FixedArray::kHeaderSize;
// Layout for the full transition array header.
- static const int kElementsTransitionOffset = kBackPointerStorageOffset +
- kPointerSize;
- static const int kPrototypeTransitionsOffset = kElementsTransitionOffset +
+ static const int kPrototypeTransitionsOffset = kBackPointerStorageOffset +
kPointerSize;
// Layout of map transition entries in full transition arrays.
diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc
index 769df07e4f..336b459d6b 100644
--- a/deps/v8/src/type-info.cc
+++ b/deps/v8/src/type-info.cc
@@ -384,17 +384,6 @@ void TypeFeedbackOracle::CompareType(TypeFeedbackId id,
}
-Handle<Type> TypeFeedbackOracle::UnaryType(TypeFeedbackId id) {
- Handle<Object> object = GetInfo(id);
- if (!object->IsCode()) {
- return handle(Type::None(), isolate());
- }
- Handle<Code> code = Handle<Code>::cast(object);
- ASSERT(code->is_unary_op_stub());
- return UnaryOpStub(code->extended_extra_ic_state()).GetType(isolate());
-}
-
-
void TypeFeedbackOracle::BinaryType(TypeFeedbackId id,
Handle<Type>* left,
Handle<Type>* right,
@@ -658,7 +647,6 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
}
break;
- case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h
index 1a7c67dfb8..4b376c84bd 100644
--- a/deps/v8/src/type-info.h
+++ b/deps/v8/src/type-info.h
@@ -297,7 +297,6 @@ class TypeFeedbackOracle: public ZoneObject {
byte ToBooleanTypes(TypeFeedbackId id);
// Get type information for arithmetic operations and compares.
- Handle<Type> UnaryType(TypeFeedbackId id);
void BinaryType(TypeFeedbackId id,
Handle<Type>* left,
Handle<Type>* right,
diff --git a/deps/v8/src/typedarray.js b/deps/v8/src/typedarray.js
index d5357b4f2c..7bd16f670b 100644
--- a/deps/v8/src/typedarray.js
+++ b/deps/v8/src/typedarray.js
@@ -77,11 +77,10 @@ function CreateTypedArrayConstructor(name, elementSize, arrayId, constructor) {
function ConstructByArrayLike(obj, arrayLike) {
var length = arrayLike.length;
var l = ToPositiveInteger(length, "invalid_typed_array_length");
- var byteLength = l * elementSize;
- var buffer = new $ArrayBuffer(byteLength);
- %TypedArrayInitialize(obj, arrayId, buffer, 0, byteLength);
- for (var i = 0; i < l; i++) {
- obj[i] = arrayLike[i];
+ if(!%TypedArrayInitializeFromArrayLike(obj, arrayId, arrayLike, l)) {
+ for (var i = 0; i < l; i++) {
+ obj[i] = arrayLike[i];
+ }
}
}
@@ -144,30 +143,103 @@ function CreateSubArray(elementSize, constructor) {
}
}
+function TypedArraySetFromArrayLike(target, source, sourceLength, offset) {
+ if (offset > 0) {
+ for (var i = 0; i < sourceLength; i++) {
+ target[offset + i] = source[i];
+ }
+ }
+ else {
+ for (var i = 0; i < sourceLength; i++) {
+ target[i] = source[i];
+ }
+ }
+}
+
+function TypedArraySetFromOverlappingTypedArray(target, source, offset) {
+ var sourceElementSize = source.BYTES_PER_ELEMENT;
+ var targetElementSize = target.BYTES_PER_ELEMENT;
+ var sourceLength = source.length;
+
+ // Copy left part.
+ function CopyLeftPart() {
+ // First un-mutated byte after the next write
+ var targetPtr = target.byteOffset + (offset + 1) * targetElementSize;
+ // Next read at sourcePtr. We do not care for memory changing before
+ // sourcePtr - we have already copied it.
+ var sourcePtr = source.byteOffset;
+ for (var leftIndex = 0;
+ leftIndex < sourceLength && targetPtr <= sourcePtr;
+ leftIndex++) {
+ target[offset + leftIndex] = source[leftIndex];
+ targetPtr += targetElementSize;
+ sourcePtr += sourceElementSize;
+ }
+ return leftIndex;
+ }
+ var leftIndex = CopyLeftPart();
+
+ // Copy rigth part;
+ function CopyRightPart() {
+ // First unmutated byte before the next write
+ var targetPtr =
+ target.byteOffset + (offset + sourceLength - 1) * targetElementSize;
+ // Next read before sourcePtr. We do not care for memory changing after
+ // sourcePtr - we have already copied it.
+ var sourcePtr =
+ source.byteOffset + sourceLength * sourceElementSize;
+ for(var rightIndex = sourceLength - 1;
+ rightIndex >= leftIndex && targetPtr >= sourcePtr;
+ rightIndex--) {
+ target[offset + rightIndex] = source[rightIndex];
+ targetPtr -= targetElementSize;
+ sourcePtr -= sourceElementSize;
+ }
+ return rightIndex;
+ }
+ var rightIndex = CopyRightPart();
+
+ var temp = new $Array(rightIndex + 1 - leftIndex);
+ for (var i = leftIndex; i <= rightIndex; i++) {
+ temp[i - leftIndex] = source[i];
+ }
+ for (i = leftIndex; i <= rightIndex; i++) {
+ target[offset + i] = temp[i - leftIndex];
+ }
+}
+
function TypedArraySet(obj, offset) {
var intOffset = IS_UNDEFINED(offset) ? 0 : TO_INTEGER(offset);
if (intOffset < 0) {
throw MakeTypeError("typed_array_set_negative_offset");
}
- if (%TypedArraySetFastCases(this, obj, intOffset))
- return;
-
- var l = obj.length;
- if (IS_UNDEFINED(l)) {
- if (IS_NUMBER(obj)) {
- // For number as a first argument, throw TypeError
- // instead of silently ignoring the call, so that
- // the user knows (s)he did something wrong.
- // (Consistent with Firefox and Blink/WebKit)
- throw MakeTypeError("invalid_argument");
- }
- return;
- }
- if (intOffset + l > this.length) {
- throw MakeRangeError("typed_array_set_source_too_large");
- }
- for (var i = 0; i < l; i++) {
- this[intOffset + i] = obj[i];
+ switch (%TypedArraySetFastCases(this, obj, intOffset)) {
+ // These numbers should be synchronized with runtime.cc.
+ case 0: // TYPED_ARRAY_SET_TYPED_ARRAY_SAME_TYPE
+ return;
+ case 1: // TYPED_ARRAY_SET_TYPED_ARRAY_OVERLAPPING
+ TypedArraySetFromOverlappingTypedArray(this, obj, intOffset);
+ return;
+ case 2: // TYPED_ARRAY_SET_TYPED_ARRAY_NONOVERLAPPING
+ TypedArraySetFromArrayLike(this, obj, obj.length, intOffset);
+ return;
+ case 3: // TYPED_ARRAY_SET_NON_TYPED_ARRAY
+ var l = obj.length;
+ if (IS_UNDEFINED(l)) {
+ if (IS_NUMBER(obj)) {
+ // For number as a first argument, throw TypeError
+ // instead of silently ignoring the call, so that
+ // the user knows (s)he did something wrong.
+ // (Consistent with Firefox and Blink/WebKit)
+ throw MakeTypeError("invalid_argument");
+ }
+ return;
+ }
+ if (intOffset + l > this.length) {
+ throw MakeRangeError("typed_array_set_source_too_large");
+ }
+ TypedArraySetFromArrayLike(this, obj, l, intOffset);
+ return;
}
}
diff --git a/deps/v8/src/typing.cc b/deps/v8/src/typing.cc
index 4220d2110d..727c104ab5 100644
--- a/deps/v8/src/typing.cc
+++ b/deps/v8/src/typing.cc
@@ -404,8 +404,6 @@ void AstTyper::VisitUnaryOperation(UnaryOperation* expr) {
RECURSE(Visit(expr->expression()));
// Collect type feedback.
- Handle<Type> op_type = oracle()->UnaryType(expr->UnaryOperationFeedbackId());
- NarrowLowerType(expr->expression(), op_type);
if (expr->op() == Token::NOT) {
// TODO(rossberg): only do in test or value context.
expr->expression()->RecordToBooleanTypeFeedback(oracle());
@@ -419,16 +417,6 @@ void AstTyper::VisitUnaryOperation(UnaryOperation* expr) {
case Token::VOID:
NarrowType(expr, Bounds(Type::Undefined(), isolate_));
break;
- case Token::ADD:
- case Token::SUB: {
- Type* upper = *expr->expression()->bounds().upper;
- if (!upper->Is(Type::Number())) upper = Type::Number();
- NarrowType(expr, Bounds(Type::Smi(), upper, isolate_));
- break;
- }
- case Token::BIT_NOT:
- NarrowType(expr, Bounds(Type::Smi(), Type::Signed32(), isolate_));
- break;
case Token::TYPEOF:
NarrowType(expr, Bounds(Type::InternalizedString(), isolate_));
break;
diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc
index cfec0c0c41..93f3efb2e3 100644
--- a/deps/v8/src/v8.cc
+++ b/deps/v8/src/v8.cc
@@ -271,7 +271,12 @@ void V8::InitializeOncePerProcessImpl() {
FLAG_gc_global = true;
FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2;
}
- if (FLAG_trace_hydrogen) FLAG_parallel_recompilation = false;
+
+ if (FLAG_parallel_recompilation &&
+ (FLAG_trace_hydrogen || FLAG_trace_hydrogen_stubs)) {
+ FLAG_parallel_recompilation = false;
+ PrintF("Parallel recompilation has been disabled for tracing.\n");
+ }
if (FLAG_sweeper_threads <= 0) {
if (FLAG_concurrent_sweeping) {
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index 2111181306..e9f1dff4c3 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -34,8 +34,8 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 20
-#define BUILD_NUMBER 11
-#define PATCH_LEVEL 0
+#define BUILD_NUMBER 14
+#define PATCH_LEVEL 1
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/deps/v8/src/x64/assembler-x64-inl.h b/deps/v8/src/x64/assembler-x64-inl.h
index ae9aeee812..826c06e5ba 100644
--- a/deps/v8/src/x64/assembler-x64-inl.h
+++ b/deps/v8/src/x64/assembler-x64-inl.h
@@ -373,13 +373,14 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
bool RelocInfo::IsPatchedReturnSequence() {
// The recognized call sequence is:
- // movq(kScratchRegister, immediate64); call(kScratchRegister);
+ // movq(kScratchRegister, address); call(kScratchRegister);
// It only needs to be distinguished from a return sequence
// movq(rsp, rbp); pop(rbp); ret(n); int3 *6
// The 11th byte is int3 (0xCC) in the return sequence and
// REX.WB (0x48+register bit) for the call sequence.
#ifdef ENABLE_DEBUGGER_SUPPORT
- return pc_[2 + kPointerSize] != 0xCC;
+ return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
+ 0xCC;
#else
return false;
#endif
diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc
index f5939c3b7e..8969d89a6a 100644
--- a/deps/v8/src/x64/assembler-x64.cc
+++ b/deps/v8/src/x64/assembler-x64.cc
@@ -164,10 +164,7 @@ void CpuFeatures::Probe() {
// Patch the code at the current PC with a call to the target address.
// Additional guard int3 instructions can be added if required.
void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
- // Load register with immediate 64 and call through a register instructions
- // takes up 13 bytes and int3 takes up one byte.
- static const int kCallCodeSize = 13;
- int code_size = kCallCodeSize + guard_bytes;
+ int code_size = Assembler::kCallSequenceLength + guard_bytes;
// Create a code patcher.
CodePatcher patcher(pc_, code_size);
@@ -183,7 +180,7 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
patcher.masm()->call(r10);
// Check that the size of the code generated is as expected.
- ASSERT_EQ(kCallCodeSize,
+ ASSERT_EQ(Assembler::kCallSequenceLength,
patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
// Add the requested number of int3 instructions after the call.
diff --git a/deps/v8/src/x64/assembler-x64.h b/deps/v8/src/x64/assembler-x64.h
index 07afc129dc..4e36b6e4bc 100644
--- a/deps/v8/src/x64/assembler-x64.h
+++ b/deps/v8/src/x64/assembler-x64.h
@@ -579,29 +579,36 @@ class Assembler : public AssemblerBase {
// Distance between the address of the code target in the call instruction
// and the return address pushed on the stack.
static const int kCallTargetAddressOffset = 4; // Use 32-bit displacement.
- // Distance between the start of the JS return sequence and where the
- // 32-bit displacement of a near call would be, relative to the pushed
- // return address. TODO: Use return sequence length instead.
- // Should equal Debug::kX64JSReturnSequenceLength - kCallTargetAddressOffset;
- static const int kPatchReturnSequenceAddressOffset = 13 - 4;
- // Distance between start of patched debug break slot and where the
- // 32-bit displacement of a near call would be, relative to the pushed
- // return address. TODO: Use return sequence length instead.
- // Should equal Debug::kX64JSReturnSequenceLength - kCallTargetAddressOffset;
- static const int kPatchDebugBreakSlotAddressOffset = 13 - 4;
- // TODO(X64): Rename this, removing the "Real", after changing the above.
- static const int kRealPatchReturnSequenceAddressOffset = 2;
-
- // Some x64 JS code is padded with int3 to make it large
- // enough to hold an instruction when the debugger patches it.
- static const int kJumpInstructionLength = 13;
- static const int kCallInstructionLength = 13;
- static const int kJSReturnSequenceLength = 13;
+ // The length of call(kScratchRegister).
+ static const int kCallScratchRegisterInstructionLength = 3;
+ // The length of call(Immediate32).
static const int kShortCallInstructionLength = 5;
- static const int kPatchDebugBreakSlotReturnOffset = 4;
-
- // The debug break slot must be able to contain a call instruction.
- static const int kDebugBreakSlotLength = kCallInstructionLength;
+ // The length of movq(kScratchRegister, address).
+ static const int kMoveAddressIntoScratchRegisterInstructionLength =
+ 2 + kPointerSize;
+ // The length of movq(kScratchRegister, address) and call(kScratchRegister).
+ static const int kCallSequenceLength =
+ kMoveAddressIntoScratchRegisterInstructionLength +
+ kCallScratchRegisterInstructionLength;
+
+ // The js return and debug break slot must be able to contain an indirect
+ // call sequence, some x64 JS code is padded with int3 to make it large
+ // enough to hold an instruction when the debugger patches it.
+ static const int kJSReturnSequenceLength = kCallSequenceLength;
+ static const int kDebugBreakSlotLength = kCallSequenceLength;
+ static const int kPatchDebugBreakSlotReturnOffset = kCallTargetAddressOffset;
+ // Distance between the start of the JS return sequence and where the
+ // 32-bit displacement of a short call would be. The short call is from
+ // SetDebugBreakAtIC from debug-x64.cc.
+ static const int kPatchReturnSequenceAddressOffset =
+ kJSReturnSequenceLength - kPatchDebugBreakSlotReturnOffset;
+ // Distance between the start of the JS return sequence and where the
+ // 32-bit displacement of a short call would be. The short call is from
+ // SetDebugBreakAtIC from debug-x64.cc.
+ static const int kPatchDebugBreakSlotAddressOffset =
+ kDebugBreakSlotLength - kPatchDebugBreakSlotReturnOffset;
+ static const int kRealPatchReturnSequenceAddressOffset =
+ kMoveAddressIntoScratchRegisterInstructionLength - kPointerSize;
// One byte opcode for test eax,0xXXXXXXXX.
static const byte kTestEaxByte = 0xA9;
diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc
index d34e4f70d9..18a6e566c6 100644
--- a/deps/v8/src/x64/builtins-x64.cc
+++ b/deps/v8/src/x64/builtins-x64.cc
@@ -59,9 +59,9 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
int num_extra_args = 0;
if (extra_args == NEEDS_CALLED_FUNCTION) {
num_extra_args = 1;
- __ pop(kScratchRegister); // Save return address.
+ __ PopReturnAddressTo(kScratchRegister);
__ push(rdi);
- __ push(kScratchRegister); // Restore return address.
+ __ PushReturnAddressFrom(kScratchRegister);
} else {
ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
}
@@ -249,7 +249,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
if (FLAG_debug_code) {
__ cmpq(rsi, rdi);
__ Assert(less_equal,
- "Unexpected number of pre-allocated property fields.");
+ kUnexpectedNumberOfPreAllocatedPropertyFields);
}
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
__ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
@@ -280,7 +280,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ subq(rdx, rcx);
// Done if no extra properties are to be allocated.
__ j(zero, &allocated);
- __ Assert(positive, "Property allocation count failed.");
+ __ Assert(positive, kPropertyAllocationCountFailed);
// Scale the number of elements by pointer size and add the header for
// FixedArrays to the start of the next object calculation from above.
@@ -429,10 +429,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
}
// Remove caller arguments from the stack and return.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->constructed_objects(), 1);
__ ret(0);
@@ -723,7 +723,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
__ ret(2 * kPointerSize); // Remove state, rax.
__ bind(&not_tos_rax);
- __ Abort("no cases left");
+ __ Abort(kNoCasesLeft);
}
@@ -772,9 +772,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
{ Label done;
__ testq(rax, rax);
__ j(not_zero, &done);
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ Push(masm->isolate()->factory()->undefined_value());
- __ push(rbx);
+ __ PushReturnAddressFrom(rbx);
__ incq(rax);
__ bind(&done);
}
@@ -895,9 +895,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ cmpq(rdx, Immediate(1));
__ j(not_equal, &non_proxy);
- __ pop(rdx); // return address
+ __ PopReturnAddressTo(rdx);
__ push(rdi); // re-add proxy object as additional argument
- __ push(rdx);
+ __ PushReturnAddressFrom(rdx);
__ incq(rax);
__ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
@@ -1113,9 +1113,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
- __ Check(not_smi, "Unexpected initial map for InternalArray function");
+ __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
__ CmpObjectType(rbx, MAP_TYPE, rcx);
- __ Check(equal, "Unexpected initial map for InternalArray function");
+ __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
}
// Run the native code for the InternalArray function called as a normal
@@ -1143,9 +1143,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
- __ Check(not_smi, "Unexpected initial map for Array function");
+ __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(rbx, MAP_TYPE, rcx);
- __ Check(equal, "Unexpected initial map for Array function");
+ __ Check(equal, kUnexpectedInitialMapForArrayFunction);
}
// Run the native code for the Array function called as a normal function.
@@ -1173,7 +1173,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
__ cmpq(rdi, rcx);
- __ Assert(equal, "Unexpected String function");
+ __ Assert(equal, kUnexpectedStringFunction);
}
// Load the first argument into rax and get rid of the rest
@@ -1182,9 +1182,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ testq(rax, rax);
__ j(zero, &no_arguments);
__ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
__ movq(rax, rbx);
// Lookup the argument in the number to string cache.
@@ -1219,9 +1219,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
Immediate(JSValue::kSize >> kPointerSizeLog2));
- __ Assert(equal, "Unexpected string wrapper instance size");
+ __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
__ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
- __ Assert(equal, "Unexpected unused properties of string wrapper");
+ __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
}
__ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
@@ -1268,9 +1268,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// stack, and jump back to the case where the argument is a string.
__ bind(&no_arguments);
__ LoadRoot(rbx, Heap::kempty_stringRootIndex);
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ lea(rsp, Operand(rsp, kPointerSize));
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
__ jmp(&argument_is_string);
// At this point the argument is already a string. Call runtime to
@@ -1313,10 +1313,10 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
__ pop(rbp);
// Remove caller arguments from the stack.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
}
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index 551a71690e..ad33a8c631 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -246,17 +246,6 @@ void ToBooleanStub::InitializeInterfaceDescriptor(
}
-void UnaryOpStub::InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { rax };
- descriptor->register_param_count_ = 1;
- descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(UnaryOpIC_Miss);
-}
-
-
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -430,12 +419,12 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure through the slower runtime call.
__ bind(&gc);
- __ pop(rcx); // Temporarily remove return address.
+ __ PopReturnAddressTo(rcx);
__ pop(rdx);
__ push(rsi);
__ push(rdx);
__ PushRoot(Heap::kFalseValueRootIndex);
- __ push(rcx); // Restore return address.
+ __ PushReturnAddressFrom(rcx);
__ TailCallRuntime(Runtime::kNewClosure, 3, 1);
}
@@ -511,9 +500,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
Label after_sentinel;
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
- const char* message = "Expected 0 as a Smi sentinel";
__ cmpq(rcx, Immediate(0));
- __ Assert(equal, message);
+ __ Assert(equal, kExpected0AsASmiSentinel);
}
__ movq(rcx, GlobalObjectOperand());
__ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
@@ -695,13 +683,13 @@ void BinaryOpStub::Initialize() {}
void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
- __ pop(rcx); // Save return address.
+ __ PopReturnAddressTo(rcx);
__ push(rdx);
__ push(rax);
// Left and right arguments are now on top.
__ Push(Smi::FromInt(MinorKey()));
- __ push(rcx); // Push return address.
+ __ PushReturnAddressFrom(rcx);
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
@@ -954,7 +942,7 @@ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
// Set the map.
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
__ movq(FieldOperand(rax, HeapObject::kMapOffset),
heap_number_map);
__ cvtqsi2sd(xmm0, rbx);
@@ -974,8 +962,7 @@ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
}
// No fall-through from this generated code.
if (FLAG_debug_code) {
- __ Abort("Unexpected fall-through in "
- "BinaryStub_GenerateFloatingPointCode.");
+ __ Abort(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode);
}
}
@@ -984,10 +971,10 @@ static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn(
MacroAssembler* masm) {
// Push arguments, but ensure they are under the return address
// for a tail call.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ push(rdx);
__ push(rax);
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
}
@@ -2155,10 +2142,10 @@ void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
__ JumpIfNotSmi(value, &miss);
// Prepare tail call to StoreIC_ArrayLength.
- __ pop(scratch);
+ __ PopReturnAddressTo(scratch);
__ push(receiver);
__ push(value);
- __ push(scratch); // return address
+ __ PushReturnAddressFrom(scratch);
ExternalReference ref =
ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
@@ -2224,9 +2211,9 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// Slow-case: Handle non-smi or out-of-bounds access to arguments
// by calling the runtime system.
__ bind(&slow);
- __ pop(rbx); // Return address.
+ __ PopReturnAddressTo(rbx);
__ push(rdx);
- __ push(rbx);
+ __ PushReturnAddressFrom(rbx);
__ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
}
@@ -2616,9 +2603,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
Condition is_smi = masm->CheckSmi(rax);
__ Check(NegateCondition(is_smi),
- "Unexpected type for RegExp data, FixedArray expected");
+ kUnexpectedTypeForRegExpDataFixedArrayExpected);
__ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
- __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
+ __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
}
// rax: RegExp data (FixedArray)
@@ -2984,7 +2971,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ testb(rbx, Immediate(kIsIndirectStringMask));
- __ Assert(zero, "external string expected, but not found");
+ __ Assert(zero, kExternalStringExpectedButNotFound);
}
__ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
// Move the pointer so that offset-wise, it looks like a sequential string.
@@ -3448,7 +3435,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
}
#ifdef DEBUG
- __ Abort("Unexpected fall-through from string comparison");
+ __ Abort(kUnexpectedFallThroughFromStringComparison);
#endif
__ bind(&check_unequal_objects);
@@ -3486,7 +3473,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
}
// Push arguments below the return address to prepare jump to builtin.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ push(rdx);
__ push(rax);
@@ -3499,8 +3486,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
__ Push(Smi::FromInt(NegativeComparisonResult(cc)));
}
- // Restore return address on the stack.
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
@@ -3669,9 +3655,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Check for function proxy.
__ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
__ j(not_equal, &non_function);
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ push(rdi); // put proxy as additional argument under return address
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
__ Set(rax, argc_ + 1);
__ Set(rbx, 0);
__ SetCallKind(rcx, CALL_AS_METHOD);
@@ -4275,7 +4261,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
- __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
}
__ movq(kScratchRegister,
Operand(kScratchRegister, kOffsetToMapCheckValue));
@@ -4317,7 +4303,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
- __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
__ Set(rax, 0);
}
@@ -4340,7 +4326,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
- __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
+ __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
}
__ ret(2 * kPointerSize + extra_stack_space);
@@ -4349,9 +4335,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&slow);
if (HasCallSiteInlineCheck()) {
// Remove extra value from the stack.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ pop(rax);
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
}
@@ -4404,7 +4390,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Factory* factory = masm->isolate()->factory();
// Index is not a smi.
@@ -4454,7 +4440,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
@@ -4480,7 +4466,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -4492,7 +4478,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
@@ -4822,10 +4808,10 @@ void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
Register temp) {
- __ pop(temp);
+ __ PopReturnAddressTo(temp);
__ pop(rdx);
__ pop(rax);
- __ push(temp);
+ __ PushReturnAddressFrom(temp);
}
@@ -5040,7 +5026,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
if (FLAG_debug_code) {
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ cmpq(kScratchRegister, candidate);
- __ Assert(equal, "oddball in string table is not undefined or the hole");
+ __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
}
__ jmp(&next_probe[i]);
@@ -5529,9 +5515,9 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
// Inline comparison of ASCII strings.
__ IncrementCounter(counters->string_compare_native(), 1);
// Drop arguments from the stack
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ addq(rsp, Immediate(2 * kPointerSize));
- __ push(rcx);
+ __ PushReturnAddressFrom(rcx);
GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
// Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
@@ -5800,10 +5786,10 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
- __ pop(tmp1); // Return address.
+ __ PopReturnAddressTo(tmp1);
__ push(left);
__ push(right);
- __ push(tmp1);
+ __ PushReturnAddressFrom(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals, 2, 1);
} else {
@@ -6411,16 +6397,14 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// the runtime.
__ bind(&slow_elements);
- __ pop(rdi); // Pop return address and remember to put back later for tail
- // call.
+ __ PopReturnAddressTo(rdi);
__ push(rbx);
__ push(rcx);
__ push(rax);
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ push(rdx);
- __ push(rdi); // Return return address so that tail call returns to right
- // place.
+ __ PushReturnAddressFrom(rdi);
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
// Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
@@ -6467,7 +6451,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ movq(rbx, MemOperand(rbp, parameter_count_offset));
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
? kPointerSize
: 0;
@@ -6539,7 +6523,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -6602,7 +6586,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -6668,9 +6652,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
- __ Check(not_smi, "Unexpected initial map for Array function");
+ __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(rcx, MAP_TYPE, rcx);
- __ Check(equal, "Unexpected initial map for Array function");
+ __ Check(equal, kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in rbx or a valid cell
Label okay_here;
@@ -6678,7 +6662,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ Cmp(rbx, undefined_sentinel);
__ j(equal, &okay_here);
__ Cmp(FieldOperand(rbx, 0), cell_map);
- __ Assert(equal, "Expected property cell in register rbx");
+ __ Assert(equal, kExpectedPropertyCellInRegisterRbx);
__ bind(&okay_here);
}
@@ -6783,9 +6767,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
// Will both indicate a NULL and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
- __ Check(not_smi, "Unexpected initial map for Array function");
+ __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(rcx, MAP_TYPE, rcx);
- __ Check(equal, "Unexpected initial map for Array function");
+ __ Check(equal, kUnexpectedInitialMapForArrayFunction);
}
// Figure out the right elements kind
@@ -6804,7 +6788,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ j(equal, &done);
__ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
__ Assert(equal,
- "Invalid ElementsKind for InternalArray or InternalPackedArray");
+ kInvalidElementsKindForInternalArrayOrInternalPackedArray);
__ bind(&done);
}
diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc
index a823bf2e6d..a39f14b075 100644
--- a/deps/v8/src/x64/codegen-x64.cc
+++ b/deps/v8/src/x64/codegen-x64.cc
@@ -394,7 +394,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
if (FLAG_debug_code) {
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
- __ Assert(equal, "object found in smi-only array");
+ __ Assert(equal, kObjectFoundInSmiOnlyArray);
}
__ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
@@ -577,7 +577,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ testb(result, Immediate(kIsIndirectStringMask));
- __ Assert(zero, "external string expected, but not found");
+ __ Assert(zero, kExternalStringExpectedButNotFound);
}
// Rule out short external strings.
STATIC_CHECK(kShortExternalStringTag != 0);
diff --git a/deps/v8/src/x64/debug-x64.cc b/deps/v8/src/x64/debug-x64.cc
index a337b0d052..e6bc92950a 100644
--- a/deps/v8/src/x64/debug-x64.cc
+++ b/deps/v8/src/x64/debug-x64.cc
@@ -48,11 +48,10 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
// CodeGenerator::VisitReturnStatement and VirtualFrame::Exit in codegen-x64.cc
// for the precise return instructions sequence.
void BreakLocationIterator::SetDebugBreakAtReturn() {
- ASSERT(Assembler::kJSReturnSequenceLength >=
- Assembler::kCallInstructionLength);
+ ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallSequenceLength);
rinfo()->PatchCodeWithCall(
Isolate::Current()->debug()->debug_break_return()->entry(),
- Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
+ Assembler::kJSReturnSequenceLength - Assembler::kCallSequenceLength);
}
@@ -82,7 +81,7 @@ void BreakLocationIterator::SetDebugBreakAtSlot() {
ASSERT(IsDebugBreakSlot());
rinfo()->PatchCodeWithCall(
Isolate::Current()->debug()->debug_break_slot()->entry(),
- Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
+ Assembler::kDebugBreakSlotLength - Assembler::kCallSequenceLength);
}
diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc
index b45e9663e2..e9cf567f7e 100644
--- a/deps/v8/src/x64/deoptimizer-x64.cc
+++ b/deps/v8/src/x64/deoptimizer-x64.cc
@@ -42,7 +42,7 @@ const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
- return Assembler::kCallInstructionLength;
+ return Assembler::kCallSequenceLength;
}
@@ -69,7 +69,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
Address call_address = instruction_start + deopt_data->Pc(i)->value();
// There is room enough to write a long call instruction because we pad
// LLazyBailout instructions with nops if necessary.
- CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
+ CodePatcher patcher(call_address, Assembler::kCallSequenceLength);
patcher.masm()->Call(GetDeoptimizationEntry(isolate, i, LAZY),
RelocInfo::NONE64);
ASSERT(prev_call_address == NULL ||
diff --git a/deps/v8/src/x64/frames-x64.h b/deps/v8/src/x64/frames-x64.h
index a24ab53107..2af5a81bb5 100644
--- a/deps/v8/src/x64/frames-x64.h
+++ b/deps/v8/src/x64/frames-x64.h
@@ -126,6 +126,12 @@ inline Object* JavaScriptFrame::function_slot_object() const {
return Memory::Object_at(fp() + offset);
}
+
+inline void StackHandler::SetFp(Address slot, Address fp) {
+ Memory::Address_at(slot) = fp;
+}
+
+
} } // namespace v8::internal
#endif // V8_X64_FRAMES_X64_H_
diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc
index bac4e793b2..6333e87bea 100644
--- a/deps/v8/src/x64/full-codegen-x64.cc
+++ b/deps/v8/src/x64/full-codegen-x64.cc
@@ -753,9 +753,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// Check that we're not inside a with or catch context.
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
- __ Check(not_equal, "Declaration in with context.");
+ __ Check(not_equal, kDeclarationInWithContext);
__ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
- __ Check(not_equal, "Declaration in catch context.");
+ __ Check(not_equal, kDeclarationInCatchContext);
}
}
@@ -2192,7 +2192,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
__ Push(Smi::FromInt(resume_mode));
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
- __ Abort("Generator failed to resume.");
+ __ Abort(kGeneratorFailedToResume);
// Throw error if we attempt to operate on a running generator.
__ bind(&wrong_state);
@@ -2456,7 +2456,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Check for an uninitialized let binding.
__ movq(rdx, location);
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ Check(equal, "Let binding re-initialization.");
+ __ Check(equal, kLetBindingReInitialization);
}
// Perform the assignment.
__ movq(location, rax);
@@ -3398,14 +3398,14 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
Register index,
Register value,
uint32_t encoding_mask) {
- __ Check(masm()->CheckSmi(index), "Non-smi index");
- __ Check(masm()->CheckSmi(value), "Non-smi value");
+ __ Check(masm()->CheckSmi(index), kNonSmiIndex);
+ __ Check(masm()->CheckSmi(value), kNonSmiValue);
__ SmiCompare(index, FieldOperand(string, String::kLengthOffset));
- __ Check(less, "Index is too large");
+ __ Check(less, kIndexIsTooLarge);
__ SmiCompare(index, Smi::FromInt(0));
- __ Check(greater_equal, "Index is negative");
+ __ Check(greater_equal, kIndexIsNegative);
__ push(value);
__ movq(value, FieldOperand(string, HeapObject::kMapOffset));
@@ -3413,7 +3413,7 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
__ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
__ cmpq(value, Immediate(encoding_mask));
- __ Check(equal, "Unexpected string type");
+ __ Check(equal, kUnexpectedStringType);
__ pop(value);
}
@@ -3777,7 +3777,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort("Attempt to use undefined cache.");
+ __ Abort(kAttemptToUseUndefinedCache);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
context()->Plug(rax);
return;
@@ -3971,7 +3971,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// scratch, string_length(int32), elements(FixedArray*).
if (generate_debug_code_) {
__ cmpq(index, array_length);
- __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
+ __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
}
__ bind(&loop);
__ movq(string, FieldOperand(elements,
@@ -4335,35 +4335,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB:
- EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
- break;
-
- case Token::BIT_NOT:
- EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
- break;
-
default:
UNREACHABLE();
}
}
-void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
- const char* comment) {
- // TODO(svenpanne): Allowing format strings in Comment would be nice here...
- Comment cmt(masm_, comment);
- UnaryOpStub stub(expr->op());
- // UnaryOpStub expects the argument to be in the
- // accumulator register rax.
- VisitForAccumulatorValue(expr->expression());
- SetSourcePosition(expr->position());
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->UnaryOperationFeedbackId());
- context()->Plug(rax);
-}
-
-
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
@@ -4819,7 +4796,7 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
// Cook return address on top of stack (smi encoded Code* delta)
- __ pop(rdx);
+ __ PopReturnAddressTo(rdx);
__ Move(rcx, masm_->CodeObject());
__ subq(rdx, rcx);
__ Integer32ToSmi(rdx, rdx);
diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc
index 6e238c76ec..4837b9aa9a 100644
--- a/deps/v8/src/x64/ic-x64.cc
+++ b/deps/v8/src/x64/ic-x64.cc
@@ -570,10 +570,10 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
__ j(not_zero, &slow);
// Everything is fine, call runtime.
- __ pop(rcx);
+ __ PopReturnAddressTo(rcx);
__ push(rdx); // receiver
__ push(rax); // key
- __ push(rcx); // return address
+ __ PushReturnAddressFrom(rcx);
// Perform tail call to the entry.
__ TailCallExternalReference(
@@ -1369,10 +1369,10 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->load_miss(), 1);
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rax); // receiver
__ push(rcx); // name
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Perform tail call to the entry.
ExternalReference ref =
@@ -1388,10 +1388,10 @@ void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rax); // receiver
__ push(rcx); // name
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Perform tail call to the entry.
__ TailCallRuntime(Runtime::kGetProperty, 2, 1);
@@ -1408,10 +1408,10 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->keyed_load_miss(), 1);
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rax); // name
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Perform tail call to the entry.
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
@@ -1429,10 +1429,10 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rax); // name
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Perform tail call to the entry.
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
@@ -1468,11 +1468,11 @@ void StoreIC::GenerateMiss(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rcx); // name
__ push(rax); // value
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Perform tail call to the entry.
ExternalReference ref =
@@ -1512,13 +1512,13 @@ void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx);
__ push(rcx);
__ push(rax);
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
__ Push(Smi::FromInt(strict_mode));
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Do tail-call to runtime routine.
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
@@ -1534,13 +1534,13 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
__ Push(Smi::FromInt(strict_mode)); // Strict mode.
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Do tail-call to runtime routine.
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
@@ -1555,11 +1555,11 @@ void StoreIC::GenerateSlow(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Do tail-call to runtime routine.
ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
@@ -1575,11 +1575,11 @@ void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Do tail-call to runtime routine.
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
@@ -1595,11 +1595,11 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
// -- rsp[0] : return address
// -----------------------------------
- __ pop(rbx);
+ __ PopReturnAddressTo(rbx);
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
- __ push(rbx); // return address
+ __ PushReturnAddressFrom(rbx);
// Do tail-call to runtime routine.
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index e9210a9eed..d4c125bcdc 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -96,7 +96,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LChunkBuilder::Abort(const char* reason) {
+void LChunkBuilder::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -120,6 +120,16 @@ void LCodeGen::Comment(const char* format, ...) {
}
+#ifdef _MSC_VER
+void LCodeGen::MakeSureStackPagesMapped(int offset) {
+ const int kPageSize = 4 * KB;
+ for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
+ __ movq(Operand(rsp, offset), rax);
+ }
+}
+#endif
+
+
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
@@ -169,6 +179,9 @@ bool LCodeGen::GeneratePrologue() {
if (slots > 0) {
if (FLAG_debug_code) {
__ subq(rsp, Immediate(slots * kPointerSize));
+#ifdef _MSC_VER
+ MakeSureStackPagesMapped(slots * kPointerSize);
+#endif
__ push(rax);
__ Set(rax, slots);
__ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64);
@@ -182,15 +195,7 @@ bool LCodeGen::GeneratePrologue() {
} else {
__ subq(rsp, Immediate(slots * kPointerSize));
#ifdef _MSC_VER
- // On windows, you may not access the stack more than one page below
- // the most recently mapped page. To make the allocated area randomly
- // accessible, we write to each page in turn (the value is irrelevant).
- const int kPageSize = 4 * KB;
- for (int offset = slots * kPointerSize - kPageSize;
- offset > 0;
- offset -= kPageSize) {
- __ movq(Operand(rsp, offset), rax);
- }
+ MakeSureStackPagesMapped(slots * kPointerSize);
#endif
}
@@ -656,7 +661,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort("bailout was not prepared");
+ Abort(kBailoutWasNotPrepared);
return;
}
@@ -1265,7 +1270,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
bool can_overflow =
instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
if (right->IsConstantOperand()) {
- int right_value = ToInteger32(LConstantOperand::cast(right));
+ int32_t right_value = ToInteger32(LConstantOperand::cast(right));
if (right_value == -1) {
__ negl(left);
} else if (right_value == 0) {
@@ -1357,7 +1362,7 @@ void LCodeGen::DoBitI(LBitI* instr) {
ASSERT(left->IsRegister());
if (right->IsConstantOperand()) {
- int right_operand = ToInteger32(LConstantOperand::cast(right));
+ int32_t right_operand = ToInteger32(LConstantOperand::cast(right));
switch (instr->op()) {
case Token::BIT_AND:
__ andl(ToRegister(left), Immediate(right_operand));
@@ -1366,7 +1371,11 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ orl(ToRegister(left), Immediate(right_operand));
break;
case Token::BIT_XOR:
- __ xorl(ToRegister(left), Immediate(right_operand));
+ if (right_operand == int32_t(~0)) {
+ __ not_(ToRegister(left));
+ } else {
+ __ xorl(ToRegister(left), Immediate(right_operand));
+ }
break;
default:
UNREACHABLE();
@@ -1437,7 +1446,7 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
break;
}
} else {
- int value = ToInteger32(LConstantOperand::cast(right));
+ int32_t value = ToInteger32(LConstantOperand::cast(right));
uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
switch (instr->op()) {
case Token::ROR:
@@ -1637,7 +1646,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(equal, "Unexpected string type");
+ __ Check(equal, kUnexpectedStringType);
__ pop(value);
}
@@ -1651,13 +1660,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
-void LCodeGen::DoBitNotI(LBitNotI* instr) {
- LOperand* input = instr->value();
- ASSERT(input->Equals(instr->result()));
- __ not_(ToRegister(input));
-}
-
-
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToRegister(instr->value()));
CallRuntime(Runtime::kThrow, 1, instr);
@@ -2563,7 +2565,7 @@ void LCodeGen::DoReturn(LReturn* instr) {
// The argument count parameter is a smi
__ SmiToInteger32(reg, reg);
Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
- __ pop(return_addr_reg);
+ __ PopReturnAddressTo(return_addr_reg);
__ shl(reg, Immediate(kPointerSizeLog2));
__ addq(rsp, reg);
__ jmp(return_addr_reg);
@@ -2634,16 +2636,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
}
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
- Register object = ToRegister(instr->object());
- ExternalReference sites_list_address = instr->GetReference(isolate());
- __ Load(kScratchRegister, sites_list_address);
- __ movq(FieldOperand(object, instr->hydrogen()->store_field().offset()),
- kScratchRegister);
- __ Store(sites_list_address, object);
-}
-
-
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -2780,9 +2772,6 @@ static bool CompactEmit(SmallMapList* list,
int i,
Isolate* isolate) {
Handle<Map> map = list->at(i);
- // If the map has ElementsKind transitions, we will generate map checks
- // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
- if (map->HasElementsTransition()) return false;
LookupResult lookup(isolate);
map->LookupDescriptor(NULL, *name, &lookup);
return lookup.IsField() || lookup.IsConstant();
@@ -2906,8 +2895,8 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
if (instr->length()->IsConstantOperand() &&
instr->index()->IsConstantOperand()) {
- int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
- int const_length = ToInteger32(LConstantOperand::cast(instr->length()));
+ int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
+ int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length()));
int index = (const_length - const_index) + 1;
__ movq(result, Operand(arguments, index * kPointerSize));
} else {
@@ -3094,9 +3083,9 @@ Operand LCodeGen::BuildFastArrayOperand(
Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
- int constant_value = ToInteger32(LConstantOperand::cast(key));
+ int32_t constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
- Abort("array index constant value too big");
+ Abort(kArrayIndexConstantValueTooBig);
}
return Operand(elements_pointer_reg,
((constant_value + additional_index) << shift_size)
@@ -3434,6 +3423,17 @@ void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
}
+void LCodeGen::EmitInteger64MathAbs(LMathAbs* instr) {
+ Register input_reg = ToRegister(instr->value());
+ __ testq(input_reg, input_reg);
+ Label is_positive;
+ __ j(not_sign, &is_positive, Label::kNear);
+ __ neg(input_reg); // Sets flags.
+ DeoptimizeIf(negative, instr->environment());
+ __ bind(&is_positive);
+}
+
+
void LCodeGen::DoMathAbs(LMathAbs* instr) {
// Class for deferred case.
class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
@@ -3459,6 +3459,8 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
__ andpd(input_reg, scratch);
} else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
+ } else if (r.IsSmi()) {
+ EmitInteger64MathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
@@ -3955,6 +3957,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = access.offset();
if (access.IsExternalMemory()) {
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
Register value = ToRegister(instr->value());
if (instr->object()->IsConstantOperand()) {
ASSERT(value.is(rax));
@@ -4090,7 +4093,7 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
__ AssertZeroExtended(reg);
}
if (instr->index()->IsConstantOperand()) {
- int constant_index =
+ int32_t constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
if (instr->hydrogen()->length()->representation().IsSmi()) {
__ Cmp(reg, Smi::FromInt(constant_index));
@@ -4107,7 +4110,7 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
} else {
Operand length = ToOperand(instr->length());
if (instr->index()->IsConstantOperand()) {
- int constant_index =
+ int32_t constant_index =
ToInteger32(LConstantOperand::cast(instr->index()));
if (instr->hydrogen()->length()->representation().IsSmi()) {
__ Cmp(length, Smi::FromInt(constant_index));
@@ -4394,7 +4397,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
// DoStringCharCodeAt above.
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
if (instr->index()->IsConstantOperand()) {
- int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
+ int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
__ Push(Smi::FromInt(const_index));
} else {
Register index = ToRegister(instr->index());
@@ -4457,13 +4460,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
}
-void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->string());
- Register result = ToRegister(instr->result());
- __ movq(result, FieldOperand(string, String::kLengthOffset));
-}
-
-
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
@@ -4975,31 +4971,64 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoCheckMapCommon(Register reg,
- Handle<Map> map,
- LInstruction* instr) {
- Label success;
- __ CompareMap(reg, map, &success);
- DeoptimizeIf(not_equal, instr->environment());
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
+ {
+ PushSafepointRegistersScope scope(this);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ testq(rax, Immediate(kSmiTagMask));
+ }
+ DeoptimizeIf(zero, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(reg, map, instr);
+ __ CompareMap(reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ j(not_equal, deferred->entry());
+ } else {
+ DeoptimizeIf(not_equal, instr->environment());
+ }
+
__ bind(&success);
}
@@ -5053,22 +5082,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
- Register reg = ToRegister(instr->temp());
-
- ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
- ZoneList<Handle<Map> >* maps = instr->maps();
-
- ASSERT(prototypes->length() == maps->length());
-
- for (int i = 0; i < prototypes->length(); i++) {
- __ LoadHeapObject(reg, prototypes->at(i));
- DoCheckMapCommon(reg, maps->at(i), instr);
- }
-}
-
-
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -5091,10 +5104,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
}
@@ -5146,10 +5161,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
__ Push(Smi::FromInt(size));
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
} else {
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h
index 4eab56c5b4..e13422950f 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.h
+++ b/deps/v8/src/x64/lithium-codegen-x64.h
@@ -102,7 +102,6 @@ class LCodeGen BASE_EMBEDDED {
XMMRegister ToDoubleRegister(LOperand* op) const;
bool IsInteger32Constant(LConstantOperand* op) const;
bool IsSmiConstant(LConstantOperand* op) const;
- int ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -132,8 +131,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -179,7 +177,7 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
@@ -268,6 +266,7 @@ class LCodeGen BASE_EMBEDDED {
uint32_t additional_index = 0);
void EmitIntegerMathAbs(LMathAbs* instr);
+ void EmitInteger64MathAbs(LMathAbs* instr);
// Support for recording safepoint and position information.
void RecordSafepoint(LPointerMap* pointers,
@@ -345,6 +344,13 @@ class LCodeGen BASE_EMBEDDED {
void DoStoreKeyedExternalArray(LStoreKeyed* instr);
void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
void DoStoreKeyedFixedArray(LStoreKeyed* instr);
+#ifdef _MSC_VER
+ // On windows, you may not access the stack more than one page below
+ // the most recently mapped page. To make the allocated area randomly
+ // accessible, we write an arbitrary value to each page in range
+ // rsp + offset - page_size .. rsp in turn.
+ void MakeSureStackPagesMapped(int offset);
+#endif
Zone* zone_;
LPlatformChunk* const chunk_;
diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc
index 4153417473..913e170595 100644
--- a/deps/v8/src/x64/lithium-x64.cc
+++ b/deps/v8/src/x64/lithium-x64.cc
@@ -275,24 +275,6 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
}
-ExternalReference LLinkObjectInList::GetReference(Isolate* isolate) {
- switch (hydrogen()->known_list()) {
- case HLinkObjectInList::ALLOCATION_SITE_LIST:
- return ExternalReference::allocation_sites_list_address(isolate);
- }
-
- UNREACHABLE();
- // Return a dummy value
- return ExternalReference::isolate_address(isolate);
-}
-
-
-void LLinkObjectInList::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add(" offset %d", hydrogen()->store_field().offset());
-}
-
-
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -461,7 +443,7 @@ LPlatformChunk* LChunkBuilder::Build() {
}
-void LCodeGen::Abort(const char* reason) {
+void LCodeGen::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -672,7 +654,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort("Out of virtual registers while trying to allocate temp register.");
+ Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1339,16 +1321,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
}
-LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
- ASSERT(instr->value()->representation().IsInteger32());
- ASSERT(instr->representation().IsInteger32());
- if (instr->HasNoUses()) return NULL;
- LOperand* input = UseRegisterAtStart(instr->value());
- LBitNotI* result = new(zone()) LBitNotI(input);
- return DefineSameAsFirst(result);
-}
-
-
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1763,17 +1735,6 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
}
-LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
- return NULL;
-}
-
-
-LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
- HInductionVariableAnnotation* instr) {
- return NULL;
-}
-
-
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = Use(instr->length());
@@ -1938,15 +1899,6 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
}
-LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LUnallocated* temp = NULL;
- if (!instr->CanOmitPrototypeChecks()) temp = TempRegister();
- LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
- if (instr->CanOmitPrototypeChecks()) return result;
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -1955,10 +1907,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
@@ -2042,13 +2000,6 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoLinkObjectInList(HLinkObjectInList* instr) {
- LOperand* object = UseRegister(instr->value());
- LLinkObjectInList* result = new(zone()) LLinkObjectInList(object);
- return result;
-}
-
-
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
LInstruction* result =
@@ -2258,7 +2209,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_external_location = instr->access().IsExternalMemory() &&
instr->access().offset() == 0;
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ bool needs_write_barrier_for_map = instr->has_transition() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2345,12 +2296,6 @@ LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
}
-LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LStringLength(string));
-}
-
-
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* size = instr->size()->IsConstant()
@@ -2399,7 +2344,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort("Too many spill slots needed for OSR");
+ Abort(kTooManySpillSlotsNeededForOSR);
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h
index cb3a2b05d7..c3b9db4c59 100644
--- a/deps/v8/src/x64/lithium-x64.h
+++ b/deps/v8/src/x64/lithium-x64.h
@@ -50,7 +50,6 @@ class LCodeGen;
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
- V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -68,7 +67,6 @@ class LCodeGen;
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -119,7 +117,6 @@ class LCodeGen;
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -174,7 +171,6 @@ class LCodeGen;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(SubI) \
V(TaggedToI) \
V(ThisFunction) \
@@ -1314,18 +1310,6 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
};
-class LBitNotI: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LBitNotI(LOperand* value) {
- inputs_[0] = value;
- }
-
- LOperand* value() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
-};
-
-
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -1613,23 +1597,6 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
};
-class LLinkObjectInList: public LTemplateInstruction<0, 1, 0> {
- public:
- explicit LLinkObjectInList(LOperand* object) {
- inputs_[0] = object;
- }
-
- LOperand* object() { return inputs_[0]; }
-
- ExternalReference GetReference(Isolate* isolate);
-
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList, "link-object-in-list")
- DECLARE_HYDROGEN_ACCESSOR(LinkObjectInList)
-
- virtual void PrintDataTo(StringStream* stream);
-};
-
-
class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
public:
explicit LLoadContextSlot(LOperand* context) {
@@ -2085,7 +2052,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition(); }
+ Handle<Map> transition() const { return hydrogen()->transition_map(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2242,19 +2209,6 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
};
-class LStringLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LStringLength(LOperand* string) {
- inputs_[0] = string;
- }
-
- LOperand* string() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
- DECLARE_HYDROGEN_ACCESSOR(StringLength)
-};
-
-
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -2294,24 +2248,6 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
- public:
- explicit LCheckPrototypeMaps(LOperand* temp) {
- temps_[0] = temp;
- }
-
- LOperand* temp() { return temps_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
-
- ZoneList<Handle<JSObject> >* prototypes() const {
- return hydrogen()->prototypes();
- }
- ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
-};
-
-
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2605,7 +2541,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index 13d7ddaa68..9c9b1620e5 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -155,7 +155,7 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) {
}
}
// Size of movq(destination, src);
- return 10;
+ return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
}
@@ -449,8 +449,8 @@ void MacroAssembler::RecordWrite(Register object,
}
-void MacroAssembler::Assert(Condition cc, const char* msg) {
- if (emit_debug_code()) Check(cc, msg);
+void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
+ if (emit_debug_code()) Check(cc, reason);
}
@@ -466,16 +466,16 @@ void MacroAssembler::AssertFastElements(Register elements) {
CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Heap::kFixedCOWArrayMapRootIndex);
j(equal, &ok, Label::kNear);
- Abort("JSObject with fast elements map has slow elements");
+ Abort(kJSObjectWithFastElementsMapHasSlowElements);
bind(&ok);
}
}
-void MacroAssembler::Check(Condition cc, const char* msg) {
+void MacroAssembler::Check(Condition cc, BailoutReason reason) {
Label L;
j(cc, &L, Label::kNear);
- Abort(msg);
+ Abort(reason);
// Control will not return here.
bind(&L);
}
@@ -508,12 +508,13 @@ void MacroAssembler::NegativeZeroTest(Register result,
}
-void MacroAssembler::Abort(const char* msg) {
+void MacroAssembler::Abort(BailoutReason reason) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
+ const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
// Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
@@ -838,7 +839,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
CompareRoot(return_value, Heap::kNullValueRootIndex);
j(equal, &ok, Label::kNear);
- Abort("API call returned invalid object");
+ Abort(kAPICallReturnedInvalidObject);
bind(&ok);
#endif
@@ -1038,7 +1039,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
RelocInfo::NONE64);
cmpq(dst, kSmiConstantRegister);
if (allow_stub_calls()) {
- Assert(equal, "Uninitialized kSmiConstantRegister");
+ Assert(equal, kUninitializedKSmiConstantRegister);
} else {
Label ok;
j(equal, &ok, Label::kNear);
@@ -1106,7 +1107,7 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
Label ok;
j(zero, &ok, Label::kNear);
if (allow_stub_calls()) {
- Abort("Integer32ToSmiField writing to non-smi location");
+ Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
} else {
int3();
}
@@ -1689,12 +1690,12 @@ void MacroAssembler::SmiAdd(Register dst,
if (emit_debug_code()) {
movq(kScratchRegister, src1);
addq(kScratchRegister, src2);
- Check(no_overflow, "Smi addition overflow");
+ Check(no_overflow, kSmiAdditionOverflow);
}
lea(dst, Operand(src1, src2, times_1, 0));
} else {
addq(dst, src2);
- Assert(no_overflow, "Smi addition overflow");
+ Assert(no_overflow, kSmiAdditionOverflow);
}
}
@@ -1726,7 +1727,7 @@ void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
movq(dst, src1);
}
subq(dst, src2);
- Assert(no_overflow, "Smi subtraction overflow");
+ Assert(no_overflow, kSmiSubtractionOverflow);
}
@@ -1758,7 +1759,7 @@ void MacroAssembler::SmiSub(Register dst,
movq(dst, src1);
}
subq(dst, src2);
- Assert(no_overflow, "Smi subtraction overflow");
+ Assert(no_overflow, kSmiSubtractionOverflow);
}
@@ -2155,7 +2156,7 @@ void MacroAssembler::SelectNonSmi(Register dst,
#ifdef DEBUG
if (allow_stub_calls()) { // Check contains a stub call.
Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
- Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
+ Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
}
#endif
STATIC_ASSERT(kSmiTag == 0);
@@ -2510,8 +2511,8 @@ void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
int MacroAssembler::CallSize(ExternalReference ext) {
// Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
- const int kCallInstructionSize = 3;
- return LoadAddressSize(ext) + kCallInstructionSize;
+ return LoadAddressSize(ext) +
+ Assembler::kCallScratchRegisterInstructionLength;
}
@@ -2798,9 +2799,9 @@ void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
if (is_uint16(bytes_dropped)) {
ret(bytes_dropped);
} else {
- pop(scratch);
+ PopReturnAddressTo(scratch);
addq(rsp, Immediate(bytes_dropped));
- push(scratch);
+ PushReturnAddressFrom(scratch);
ret(0);
}
}
@@ -2984,7 +2985,7 @@ void MacroAssembler::LoadUint32(XMMRegister dst,
XMMRegister scratch) {
if (FLAG_debug_code) {
cmpq(src, Immediate(0xffffffff));
- Assert(below_equal, "input GPR is expected to have upper32 cleared");
+ Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
}
cvtqsi2sd(dst, src);
}
@@ -3033,7 +3034,7 @@ void MacroAssembler::AssertNumber(Register object) {
j(is_smi, &ok, Label::kNear);
Cmp(FieldOperand(object, HeapObject::kMapOffset),
isolate()->factory()->heap_number_map());
- Check(equal, "Operand is not a number");
+ Check(equal, kOperandIsNotANumber);
bind(&ok);
}
}
@@ -3042,7 +3043,7 @@ void MacroAssembler::AssertNumber(Register object) {
void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(NegateCondition(is_smi), "Operand is a smi");
+ Check(NegateCondition(is_smi), kOperandIsASmi);
}
}
@@ -3050,7 +3051,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(is_smi, "Operand is not a smi");
+ Check(is_smi, kOperandIsNotASmi);
}
}
@@ -3058,7 +3059,7 @@ void MacroAssembler::AssertSmi(Register object) {
void MacroAssembler::AssertSmi(const Operand& object) {
if (emit_debug_code()) {
Condition is_smi = CheckSmi(object);
- Check(is_smi, "Operand is not a smi");
+ Check(is_smi, kOperandIsNotASmi);
}
}
@@ -3068,7 +3069,7 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) {
ASSERT(!int32_register.is(kScratchRegister));
movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64);
cmpq(kScratchRegister, int32_register);
- Check(above_equal, "32 bit value in register is not zero-extended");
+ Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
}
}
@@ -3076,12 +3077,12 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) {
void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
testb(object, Immediate(kSmiTagMask));
- Check(not_equal, "Operand is a smi and not a string");
+ Check(not_equal, kOperandIsASmiAndNotAString);
push(object);
movq(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
pop(object);
- Check(below, "Operand is not a string");
+ Check(below, kOperandIsNotAString);
}
}
@@ -3089,24 +3090,24 @@ void MacroAssembler::AssertString(Register object) {
void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
testb(object, Immediate(kSmiTagMask));
- Check(not_equal, "Operand is a smi and not a name");
+ Check(not_equal, kOperandIsASmiAndNotAName);
push(object);
movq(object, FieldOperand(object, HeapObject::kMapOffset));
CmpInstanceType(object, LAST_NAME_TYPE);
pop(object);
- Check(below_equal, "Operand is not a name");
+ Check(below_equal, kOperandIsNotAName);
}
}
void MacroAssembler::AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message) {
+ BailoutReason reason) {
if (emit_debug_code()) {
ASSERT(!src.is(kScratchRegister));
LoadRoot(kScratchRegister, root_value_index);
cmpq(src, kScratchRegister);
- Check(equal, message);
+ Check(equal, reason);
}
}
@@ -3457,7 +3458,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
isolate()->factory()->undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
cmpq(Operand(rsp, 0), kScratchRegister);
- Check(not_equal, "code object not properly patched");
+ Check(not_equal, kCodeObjectNotProperlyPatched);
}
}
@@ -3466,7 +3467,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
if (emit_debug_code()) {
Move(kScratchRegister, Smi::FromInt(type));
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
- Check(equal, "stack frame types must match");
+ Check(equal, kStackFrameTypesMustMatch);
}
movq(rsp, rbp);
pop(rbp);
@@ -3567,8 +3568,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
// from the caller stack.
lea(rsp, Operand(r15, 1 * kPointerSize));
- // Push the return address to get ready to return.
- push(rcx);
+ PushReturnAddressFrom(rcx);
LeaveExitFrameEpilogue();
}
@@ -3612,7 +3612,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// When generating debug code, make sure the lexical context is set.
if (emit_debug_code()) {
cmpq(scratch, Immediate(0));
- Check(not_equal, "we should not have an empty lexical context");
+ Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
}
// Load the native context of the current context.
int offset =
@@ -3624,7 +3624,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
if (emit_debug_code()) {
Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
isolate()->factory()->native_context_map());
- Check(equal, "JSGlobalObject::native_context should be a native context.");
+ Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
}
// Check if both contexts are the same.
@@ -3643,12 +3643,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
movq(holder_reg,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
CompareRoot(holder_reg, Heap::kNullValueRootIndex);
- Check(not_equal, "JSGlobalProxy::context() should not be null.");
+ Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
// Read the first word and compare to native_context_map(),
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
- Check(equal, "JSGlobalObject::native_context should be a native context.");
+ Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
pop(holder_reg);
}
@@ -3794,7 +3794,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
// Assert that result actually contains top on entry.
Operand top_operand = ExternalOperand(allocation_top);
cmpq(result, top_operand);
- Check(equal, "Unexpected allocation top");
+ Check(equal, kUnexpectedAllocationTop);
#endif
return;
}
@@ -3815,7 +3815,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
AllocationFlags flags) {
if (emit_debug_code()) {
testq(result_end, Immediate(kObjectAlignmentMask));
- Check(zero, "Unaligned allocation in new space");
+ Check(zero, kUnalignedAllocationInNewSpace);
}
ExternalReference allocation_top =
@@ -3862,7 +3862,7 @@ void MacroAssembler::Allocate(int object_size,
// always safe because the limit of the heap is always aligned.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
- Check(zero, "Allocation is not double aligned");
+ Check(zero, kAllocationIsNotDoubleAligned);
}
// Calculate new top and bail out if new space is exhausted.
@@ -3941,7 +3941,7 @@ void MacroAssembler::Allocate(Register object_size,
// always safe because the limit of the heap is always aligned.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
testq(result, Immediate(kDoubleAlignmentMask));
- Check(zero, "Allocation is not double aligned");
+ Check(zero, kAllocationIsNotDoubleAligned);
}
// Calculate new top and bail out if new space is exhausted.
@@ -3975,7 +3975,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
Operand top_operand = ExternalOperand(new_space_allocation_top);
#ifdef DEBUG
cmpq(object, top_operand);
- Check(below, "Undo allocation of non allocated memory");
+ Check(below, kUndoAllocationOfNonAllocatedMemory);
#endif
movq(top_operand, object);
}
@@ -4165,7 +4165,7 @@ void MacroAssembler::CopyBytes(Register destination,
ASSERT(min_length >= 0);
if (emit_debug_code()) {
cmpl(length, Immediate(min_length));
- Assert(greater_equal, "Invalid min_length");
+ Assert(greater_equal, kInvalidMinLength);
}
Label loop, done, short_string, short_loop;
@@ -4249,7 +4249,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
if (emit_debug_code()) {
CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
Heap::kWithContextMapRootIndex);
- Check(not_equal, "Variable resolved to with context.");
+ Check(not_equal, kVariableResolvedToWithContext);
}
}
@@ -4340,7 +4340,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
jmp(&ok);
bind(&fail);
- Abort("Global functions must have initial map");
+ Abort(kGlobalFunctionsMustHaveInitialMap);
bind(&ok);
}
}
diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h
index 9d5d2a31c5..61abc206e1 100644
--- a/deps/v8/src/x64/macro-assembler-x64.h
+++ b/deps/v8/src/x64/macro-assembler-x64.h
@@ -823,6 +823,10 @@ class MacroAssembler: public Assembler {
void Drop(int stack_elements);
void Call(Label* target) { call(target); }
+ void Push(Register src) { push(src); }
+ void Pop(Register dst) { pop(dst); }
+ void PushReturnAddressFrom(Register src) { push(src); }
+ void PopReturnAddressTo(Register dst) { pop(dst); }
// Control Flow
void Jump(Address destination, RelocInfo::Mode rmode);
@@ -837,7 +841,7 @@ class MacroAssembler: public Assembler {
// The size of the code generated for different call instructions.
int CallSize(Address destination, RelocInfo::Mode rmode) {
- return kCallInstructionLength;
+ return kCallSequenceLength;
}
int CallSize(ExternalReference ext);
int CallSize(Handle<Code> code_object) {
@@ -1002,7 +1006,7 @@ class MacroAssembler: public Assembler {
// enabled via --debug-code.
void AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message);
+ BailoutReason reason);
// ---------------------------------------------------------------------------
// Exception handling
@@ -1319,15 +1323,15 @@ class MacroAssembler: public Assembler {
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, const char* msg);
+ void Assert(Condition cc, BailoutReason reason);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, const char* msg);
+ void Check(Condition cc, BailoutReason reason);
// Print a message to stdout and abort execution.
- void Abort(const char* msg);
+ void Abort(BailoutReason msg);
// Check that the stack is aligned.
void CheckStackAlignment();
@@ -1518,6 +1522,10 @@ inline Operand StackSpaceOperand(int index) {
}
+inline Operand StackOperandForReturnAddress(int32_t disp) {
+ return Operand(rsp, disp);
+}
+
#ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line);
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
index 106ffb76da..dcd317c666 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -397,7 +397,7 @@ void RegExpMacroAssemblerX64::CheckNotBackReference(
// Fail on partial or illegal capture (start of capture after end of capture).
// This must not happen (no back-reference can reference a capture that wasn't
// closed before in the reg-exp).
- __ Check(greater_equal, "Invalid capture referenced");
+ __ Check(greater_equal, kInvalidCaptureReferenced);
// Succeed on empty capture (including non-participating capture)
__ j(equal, &fallthrough);
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index 542018fddd..7ad250a4ad 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -410,9 +410,9 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// -- rsp[0] : return address
// -- rsp[8] : last argument in the internal frame of the caller
// -----------------------------------
- __ movq(scratch, Operand(rsp, 0));
+ __ movq(scratch, StackOperandForReturnAddress(0));
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
- __ movq(Operand(rsp, 0), scratch);
+ __ movq(StackOperandForReturnAddress(0), scratch);
__ Move(scratch, Smi::FromInt(0));
for (int i = 1; i <= kFastApiCallArguments; i++) {
__ movq(Operand(rsp, i * kPointerSize), scratch);
@@ -431,8 +431,9 @@ static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
// frame.
// -----------------------------------
- __ movq(scratch, Operand(rsp, 0));
- __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
+ __ movq(scratch, StackOperandForReturnAddress(0));
+ __ movq(StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize),
+ scratch);
__ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
}
@@ -829,11 +830,11 @@ void BaseStoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
object->map()->unused_property_fields() == 0) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ pop(scratch1); // Return address.
+ __ PopReturnAddressTo(scratch1);
__ push(receiver_reg);
__ Push(transition);
__ push(value_reg);
- __ push(scratch1);
+ __ PushReturnAddressFrom(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -1283,7 +1284,7 @@ void BaseLoadStubCompiler::GenerateLoadCallback(
Handle<ExecutableAccessorInfo> callback) {
// Insert additional parameters into the stack frame above return address.
ASSERT(!scratch4().is(reg));
- __ pop(scratch4()); // Get return address to place it below.
+ __ PopReturnAddressTo(scratch4());
__ push(receiver()); // receiver
__ push(reg); // holder
@@ -1323,7 +1324,7 @@ void BaseLoadStubCompiler::GenerateLoadCallback(
ASSERT(!name_arg.is(scratch4()));
__ movq(name_arg, rsp);
- __ push(scratch4()); // Restore return address.
+ __ PushReturnAddressFrom(scratch4());
// v8::Arguments::values_ and handler for name.
const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
@@ -1443,10 +1444,10 @@ void BaseLoadStubCompiler::GenerateLoadInterceptor(
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
// Check that the maps haven't changed.
- __ pop(scratch2()); // save old return address
+ __ PopReturnAddressTo(scratch2());
PushInterceptorArguments(masm(), receiver(), holder_reg,
this->name(), interceptor_holder);
- __ push(scratch2()); // restore old return address
+ __ PushReturnAddressFrom(scratch2());
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
@@ -2350,8 +2351,9 @@ Handle<Code> CallStubCompiler::CompileFastApiCall(
name, depth, &miss);
// Move the return address on top of the stack.
- __ movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize));
- __ movq(Operand(rsp, 0 * kPointerSize), rax);
+ __ movq(rax,
+ StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize));
+ __ movq(StackOperandForReturnAddress(0), rax);
GenerateFastApiCall(masm(), optimization, argc);
@@ -2648,12 +2650,12 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
HandlerFrontend(object, receiver(), holder, name, &success);
__ bind(&success);
- __ pop(scratch1()); // remove the return address
+ __ PopReturnAddressTo(scratch1());
__ push(receiver());
__ Push(callback); // callback info
__ Push(name);
__ push(value());
- __ push(scratch1()); // restore return address
+ __ PushReturnAddressFrom(scratch1());
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
@@ -2715,12 +2717,12 @@ void StoreStubCompiler::GenerateStoreViaSetter(
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> object,
Handle<Name> name) {
- __ pop(scratch1()); // remove the return address
+ __ PopReturnAddressTo(scratch1());
__ push(receiver());
__ push(this->name());
__ push(value());
__ Push(Smi::FromInt(strict_mode()));
- __ push(scratch1()); // restore return address
+ __ PushReturnAddressFrom(scratch1());
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
@@ -2936,7 +2938,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
__ j(equal, &miss);
} else if (FLAG_debug_code) {
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
- __ Check(not_equal, "DontDelete cells can't contain the hole");
+ __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
}
HandlerFrontendFooter(name, &success, &miss);
diff --git a/deps/v8/src/zone.h b/deps/v8/src/zone.h
index a12ed79312..1f14115208 100644
--- a/deps/v8/src/zone.h
+++ b/deps/v8/src/zone.h
@@ -246,6 +246,11 @@ class ZoneSplayTree: public SplayTree<Config, ZoneAllocationPolicy> {
explicit ZoneSplayTree(Zone* zone)
: SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
~ZoneSplayTree();
+
+ INLINE(void* operator new(size_t size, Zone* zone));
+
+ void operator delete(void* pointer) { UNREACHABLE(); }
+ void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
};
diff --git a/deps/v8/test/benchmarks/benchmarks.status b/deps/v8/test/benchmarks/benchmarks.status
new file mode 100644
index 0000000000..651b8d7ad1
--- /dev/null
+++ b/deps/v8/test/benchmarks/benchmarks.status
@@ -0,0 +1,29 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Too slow in Debug mode.
+octane/mandreel: PASS, SKIP if $mode == debug
diff --git a/deps/v8/test/benchmarks/testcfg.py b/deps/v8/test/benchmarks/testcfg.py
new file mode 100644
index 0000000000..5fb3f51c75
--- /dev/null
+++ b/deps/v8/test/benchmarks/testcfg.py
@@ -0,0 +1,181 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+import os
+import shutil
+import subprocess
+import tarfile
+
+from testrunner.local import testsuite
+from testrunner.objects import testcase
+
+
+class BenchmarksTestSuite(testsuite.TestSuite):
+
+ def __init__(self, name, root):
+ super(BenchmarksTestSuite, self).__init__(name, root)
+ self.testroot = root
+
+ def ListTests(self, context):
+ tests = []
+ for test in [
+ "kraken/ai-astar",
+ "kraken/audio-beat-detection",
+ "kraken/audio-dft",
+ "kraken/audio-fft",
+ "kraken/audio-oscillator",
+ "kraken/imaging-darkroom",
+ "kraken/imaging-desaturate",
+ "kraken/imaging-gaussian-blur",
+ "kraken/json-parse-financial",
+ "kraken/json-stringify-tinderbox",
+ "kraken/stanford-crypto-aes",
+ "kraken/stanford-crypto-ccm",
+ "kraken/stanford-crypto-pbkdf2",
+ "kraken/stanford-crypto-sha256-iterative",
+
+ "octane/box2d",
+ "octane/code-load",
+ "octane/crypto",
+ "octane/deltablue",
+ "octane/earley-boyer",
+ "octane/gbemu",
+ "octane/mandreel",
+ "octane/navier-stokes",
+ "octane/pdfjs",
+ "octane/raytrace",
+ "octane/regexp",
+ "octane/richards",
+ "octane/splay",
+
+ "sunspider/3d-cube",
+ "sunspider/3d-morph",
+ "sunspider/3d-raytrace",
+ "sunspider/access-binary-trees",
+ "sunspider/access-fannkuch",
+ "sunspider/access-nbody",
+ "sunspider/access-nsieve",
+ "sunspider/bitops-3bit-bits-in-byte",
+ "sunspider/bitops-bits-in-byte",
+ "sunspider/bitops-bitwise-and",
+ "sunspider/bitops-nsieve-bits",
+ "sunspider/controlflow-recursive",
+ "sunspider/crypto-aes",
+ "sunspider/crypto-md5",
+ "sunspider/crypto-sha1",
+ "sunspider/date-format-tofte",
+ "sunspider/date-format-xparb",
+ "sunspider/math-cordic",
+ "sunspider/math-partial-sums",
+ "sunspider/math-spectral-norm",
+ "sunspider/regexp-dna",
+ "sunspider/string-base64",
+ "sunspider/string-fasta",
+ "sunspider/string-tagcloud",
+ "sunspider/string-unpack-code",
+ "sunspider/string-validate-input"]:
+ tests.append(testcase.TestCase(self, test))
+ return tests
+
+ def GetFlagsForTestCase(self, testcase, context):
+ result = []
+ result += context.mode_flags
+ if testcase.path.startswith("kraken"):
+ result.append(os.path.join(self.testroot, "%s-data.js" % testcase.path))
+ result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
+ elif testcase.path.startswith("octane"):
+ result.append(os.path.join(self.testroot, "octane/base.js"))
+ result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
+ result += ["-e", "BenchmarkSuite.RunSuites({});"]
+ elif testcase.path.startswith("sunspider"):
+ result.append(os.path.join(self.testroot, "%s.js" % testcase.path))
+ return testcase.flags + result
+
+ def GetSourceForTest(self, testcase):
+ filename = os.path.join(self.testroot, testcase.path + ".js")
+ with open(filename) as f:
+ return f.read()
+
+ def _DownloadIfNecessary(self, url, revision, target_dir):
+ # Maybe we're still up to date?
+ revision_file = "CHECKED_OUT_%s" % target_dir
+ checked_out_revision = None
+ if os.path.exists(revision_file):
+ with open(revision_file) as f:
+ checked_out_revision = f.read()
+ if checked_out_revision == revision:
+ return
+
+ # If we have a local archive file with the test data, extract it.
+ if os.path.exists(target_dir):
+ shutil.rmtree(target_dir)
+ archive_file = "downloaded_%s_%s.tar.gz" % (target_dir, revision)
+ if os.path.exists(archive_file):
+ with tarfile.open(archive_file, "r:gz") as tar:
+ tar.extractall()
+ with open(revision_file, "w") as f:
+ f.write(revision)
+ return
+
+ # No cached copy. Check out via SVN, and pack as .tar.gz for later use.
+ command = "svn co %s -r %s %s" % (url, revision, target_dir)
+ code = subprocess.call(command, shell=True)
+ if code != 0:
+ raise Exception("Error checking out %s benchmark" % target_dir)
+ with tarfile.open(archive_file, "w:gz") as tar:
+ tar.add("%s" % target_dir)
+ with open(revision_file, "w") as f:
+ f.write(revision)
+
+ def DownloadData(self):
+ old_cwd = os.getcwd()
+ os.chdir(os.path.abspath(self.root))
+
+ self._DownloadIfNecessary(
+ ("http://svn.webkit.org/repository/webkit/trunk/PerformanceTests/"
+ "SunSpider/tests/sunspider-1.0/"),
+ "153700", "sunspider")
+
+ self._DownloadIfNecessary(
+ ("http://kraken-mirror.googlecode.com/svn/trunk/kraken/tests/"
+ "kraken-1.1/"),
+ "8", "kraken")
+
+ self._DownloadIfNecessary(
+ "http://octane-benchmark.googlecode.com/svn/trunk/",
+ "22", "octane")
+
+ os.chdir(old_cwd)
+
+ def VariantFlags(self):
+ # Both --nocrankshaft and --stressopt are very slow.
+ return [[]]
+
+
+def GetSuite(name, root):
+ return BenchmarksTestSuite(name, root)
diff --git a/deps/v8/test/cctest/cctest.cc b/deps/v8/test/cctest/cctest.cc
index 94dcce1305..a2caf0f3ba 100644
--- a/deps/v8/test/cctest/cctest.cc
+++ b/deps/v8/test/cctest/cctest.cc
@@ -99,9 +99,10 @@ v8::Isolate* CcTest::default_isolate_;
class CcTestArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
- public:
virtual void* Allocate(size_t length) { return malloc(length); }
- virtual void Free(void* data) { free(data); }
+ virtual void Free(void* data, size_t length) { free(data); }
+ // TODO(dslomov): Remove when v8:2823 is fixed.
+ virtual void Free(void* data) { UNREACHABLE(); }
};
diff --git a/deps/v8/test/cctest/cctest.h b/deps/v8/test/cctest/cctest.h
index 193126a081..1282d7da0f 100644
--- a/deps/v8/test/cctest/cctest.h
+++ b/deps/v8/test/cctest/cctest.h
@@ -300,4 +300,57 @@ static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
}
+// Adapted from http://en.wikipedia.org/wiki/Multiply-with-carry
+class RandomNumberGenerator {
+ public:
+ RandomNumberGenerator() {
+ init();
+ }
+
+ void init(uint32_t seed = 0x5688c73e) {
+ static const uint32_t phi = 0x9e3779b9;
+ c = 362436;
+ i = kQSize-1;
+ Q[0] = seed;
+ Q[1] = seed + phi;
+ Q[2] = seed + phi + phi;
+ for (unsigned j = 3; j < kQSize; j++) {
+ Q[j] = Q[j - 3] ^ Q[j - 2] ^ phi ^ j;
+ }
+ }
+
+ uint32_t next() {
+ uint64_t a = 18782;
+ uint32_t r = 0xfffffffe;
+ i = (i + 1) & (kQSize-1);
+ uint64_t t = a * Q[i] + c;
+ c = (t >> 32);
+ uint32_t x = static_cast<uint32_t>(t + c);
+ if (x < c) {
+ x++;
+ c++;
+ }
+ return (Q[i] = r - x);
+ }
+
+ uint32_t next(int max) {
+ return next() % max;
+ }
+
+ bool next(double threshold) {
+ ASSERT(threshold >= 0.0 && threshold <= 1.0);
+ if (threshold == 1.0) return true;
+ if (threshold == 0.0) return false;
+ uint32_t value = next() % 100000;
+ return threshold > static_cast<double>(value)/100000.0;
+ }
+
+ private:
+ static const uint32_t kQSize = 4096;
+ uint32_t Q[kQSize];
+ uint32_t c;
+ uint32_t i;
+};
+
+
#endif // ifndef CCTEST_H_
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index 3c6f85ed72..664f905105 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -3565,6 +3565,7 @@ static void check_message_0(v8::Handle<v8::Message> message,
CHECK_EQ(5.76, data->NumberValue());
CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
CHECK_EQ(7.56, message->GetScriptData()->NumberValue());
+ CHECK(!message->IsSharedCrossOrigin());
message_received = true;
}
@@ -3591,6 +3592,7 @@ static void check_message_1(v8::Handle<v8::Message> message,
v8::Handle<Value> data) {
CHECK(data->IsNumber());
CHECK_EQ(1337, data->Int32Value());
+ CHECK(!message->IsSharedCrossOrigin());
message_received = true;
}
@@ -3615,6 +3617,7 @@ static void check_message_2(v8::Handle<v8::Message> message,
v8::Local<v8::Value> hidden_property =
v8::Object::Cast(*data)->GetHiddenValue(v8_str("hidden key"));
CHECK(v8_str("hidden value")->Equals(hidden_property));
+ CHECK(!message->IsSharedCrossOrigin());
message_received = true;
}
@@ -3636,6 +3639,112 @@ TEST(MessageHandler2) {
}
+static void check_message_3(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ CHECK(message->IsSharedCrossOrigin());
+ CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
+ message_received = true;
+}
+
+
+TEST(MessageHandler3) {
+ message_received = false;
+ v8::HandleScope scope(v8::Isolate::GetCurrent());
+ CHECK(!message_received);
+ v8::V8::AddMessageListener(check_message_3);
+ LocalContext context;
+ v8::ScriptOrigin origin =
+ v8::ScriptOrigin(v8_str("6.75"),
+ v8::Integer::New(1),
+ v8::Integer::New(2),
+ v8::True());
+ v8::Handle<v8::Script> script = Script::Compile(v8_str("throw 'error'"),
+ &origin);
+ script->Run();
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_3);
+}
+
+
+static void check_message_4(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ CHECK(!message->IsSharedCrossOrigin());
+ CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
+ message_received = true;
+}
+
+
+TEST(MessageHandler4) {
+ message_received = false;
+ v8::HandleScope scope(v8::Isolate::GetCurrent());
+ CHECK(!message_received);
+ v8::V8::AddMessageListener(check_message_4);
+ LocalContext context;
+ v8::ScriptOrigin origin =
+ v8::ScriptOrigin(v8_str("6.75"),
+ v8::Integer::New(1),
+ v8::Integer::New(2),
+ v8::False());
+ v8::Handle<v8::Script> script = Script::Compile(v8_str("throw 'error'"),
+ &origin);
+ script->Run();
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_4);
+}
+
+
+static void check_message_5a(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ CHECK(message->IsSharedCrossOrigin());
+ CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
+ message_received = true;
+}
+
+
+static void check_message_5b(v8::Handle<v8::Message> message,
+ v8::Handle<Value> data) {
+ CHECK(!message->IsSharedCrossOrigin());
+ CHECK_EQ(6.75, message->GetScriptResourceName()->NumberValue());
+ message_received = true;
+}
+
+
+TEST(MessageHandler5) {
+ message_received = false;
+ v8::HandleScope scope(v8::Isolate::GetCurrent());
+ CHECK(!message_received);
+ v8::V8::AddMessageListener(check_message_5a);
+ LocalContext context;
+ v8::ScriptOrigin origin =
+ v8::ScriptOrigin(v8_str("6.75"),
+ v8::Integer::New(1),
+ v8::Integer::New(2),
+ v8::True());
+ v8::Handle<v8::Script> script = Script::Compile(v8_str("throw 'error'"),
+ &origin);
+ script->Run();
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_5a);
+
+ message_received = false;
+ v8::V8::AddMessageListener(check_message_5b);
+ origin =
+ v8::ScriptOrigin(v8_str("6.75"),
+ v8::Integer::New(1),
+ v8::Integer::New(2),
+ v8::False());
+ script = Script::Compile(v8_str("throw 'error'"),
+ &origin);
+ script->Run();
+ CHECK(message_received);
+ // clear out the message listener
+ v8::V8::RemoveMessageListeners(check_message_5b);
+}
+
+
THREADED_TEST(GetSetProperty) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
@@ -4279,7 +4388,7 @@ TEST(APIThrowMessageOverwrittenToString) {
}
-static void check_custom_error_message(
+static void check_custom_error_tostring(
v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
const char* uncaught_error = "Uncaught MyError toString";
@@ -4290,7 +4399,7 @@ static void check_custom_error_message(
TEST(CustomErrorToString) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
- v8::V8::AddMessageListener(check_custom_error_message);
+ v8::V8::AddMessageListener(check_custom_error_tostring);
CompileRun(
"function MyError(name, message) { "
" this.name = name; "
@@ -4301,6 +4410,58 @@ TEST(CustomErrorToString) {
" return 'MyError toString'; "
"}; "
"throw new MyError('my name', 'my message'); ");
+ v8::V8::RemoveMessageListeners(check_custom_error_tostring);
+}
+
+
+static void check_custom_error_message(
+ v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ const char* uncaught_error = "Uncaught MyError: my message";
+ printf("%s\n", *v8::String::Utf8Value(message->Get()));
+ CHECK(message->Get()->Equals(v8_str(uncaught_error)));
+}
+
+
+TEST(CustomErrorMessage) {
+ LocalContext context;
+ v8::HandleScope scope(context->GetIsolate());
+ v8::V8::AddMessageListener(check_custom_error_message);
+
+ // Handlebars.
+ CompileRun(
+ "function MyError(msg) { "
+ " this.name = 'MyError'; "
+ " this.message = msg; "
+ "} "
+ "MyError.prototype = new Error(); "
+ "throw new MyError('my message'); ");
+
+ // Closure.
+ CompileRun(
+ "function MyError(msg) { "
+ " this.name = 'MyError'; "
+ " this.message = msg; "
+ "} "
+ "inherits = function(childCtor, parentCtor) { "
+ " function tempCtor() {}; "
+ " tempCtor.prototype = parentCtor.prototype; "
+ " childCtor.superClass_ = parentCtor.prototype; "
+ " childCtor.prototype = new tempCtor(); "
+ " childCtor.prototype.constructor = childCtor; "
+ "}; "
+ "inherits(MyError, Error); "
+ "throw new MyError('my message'); ");
+
+ // Object.create.
+ CompileRun(
+ "function MyError(msg) { "
+ " this.name = 'MyError'; "
+ " this.message = msg; "
+ "} "
+ "MyError.prototype = Object.create(Error.prototype); "
+ "throw new MyError('my message'); ");
+
v8::V8::RemoveMessageListeners(check_custom_error_message);
}
@@ -19754,6 +19915,16 @@ THREADED_TEST(Regress260106) {
}
+THREADED_TEST(JSONParse) {
+ LocalContext context;
+ HandleScope scope(context->GetIsolate());
+ Local<Object> obj = v8::JSON::Parse(v8_str("{\"x\":42}"));
+ Handle<Object> global = context->Global();
+ global->Set(v8_str("obj"), obj);
+ ExpectString("JSON.stringify(obj)", "{\"x\":42}");
+}
+
+
#ifndef WIN32
class ThreadInterruptTest {
public:
diff --git a/deps/v8/test/cctest/test-assembler-arm.cc b/deps/v8/test/cctest/test-assembler-arm.cc
index cb677b3bb6..cac162e018 100644
--- a/deps/v8/test/cctest/test-assembler-arm.cc
+++ b/deps/v8/test/cctest/test-assembler-arm.cc
@@ -1418,4 +1418,25 @@ TEST(16) {
CHECK_EQ(0x11121313, t.dst4);
}
+
+TEST(17) {
+ // Test generating labels at high addresses.
+ // Should not assert.
+ CcTest::InitializeVM();
+ Isolate* isolate = Isolate::Current();
+ HandleScope scope(isolate);
+
+ // Generate a code segment that will be longer than 2^24 bytes.
+ Assembler assm(isolate, NULL, 0);
+ for (size_t i = 0; i < 1 << 23 ; ++i) { // 2^23
+ __ nop();
+ }
+
+ Label target;
+ __ b(eq, &target);
+ __ bind(&target);
+ __ nop();
+}
+
+
#undef __
diff --git a/deps/v8/test/cctest/test-code-stubs-ia32.cc b/deps/v8/test/cctest/test-code-stubs-ia32.cc
index 6f8de60471..a3c0b54e25 100644
--- a/deps/v8/test/cctest/test-code-stubs-ia32.cc
+++ b/deps/v8/test/cctest/test-code-stubs-ia32.cc
@@ -94,7 +94,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
Register reg = Register::from_code(reg_num);
if (!reg.is(esp) && !reg.is(ebp) && !reg.is(destination_reg)) {
__ cmp(reg, MemOperand(esp, 0));
- __ Assert(equal, "register was clobbered");
+ __ Assert(equal, kRegisterWasClobbered);
__ add(esp, Immediate(kPointerSize));
}
}
diff --git a/deps/v8/test/cctest/test-code-stubs-x64.cc b/deps/v8/test/cctest/test-code-stubs-x64.cc
index e30c160e7a..0bffb87fef 100644
--- a/deps/v8/test/cctest/test-code-stubs-x64.cc
+++ b/deps/v8/test/cctest/test-code-stubs-x64.cc
@@ -93,7 +93,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
Register reg = Register::from_code(reg_num);
if (!reg.is(rsp) && !reg.is(rbp) && !reg.is(destination_reg)) {
__ cmpq(reg, MemOperand(rsp, 0));
- __ Assert(equal, "register was clobbered");
+ __ Assert(equal, kRegisterWasClobbered);
__ addq(rsp, Immediate(kPointerSize));
}
}
diff --git a/deps/v8/test/cctest/test-compiler.cc b/deps/v8/test/cctest/test-compiler.cc
index bed8a6c92b..b5ba46c4cb 100644
--- a/deps/v8/test/cctest/test-compiler.cc
+++ b/deps/v8/test/cctest/test-compiler.cc
@@ -105,6 +105,7 @@ static Handle<JSFunction> Compile(const char* source) {
Handle<String>(),
0,
0,
+ false,
Handle<Context>(isolate->native_context()),
NULL,
NULL,
diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc
index d9ecc41a74..daf8db6151 100644
--- a/deps/v8/test/cctest/test-cpu-profiler.cc
+++ b/deps/v8/test/cctest/test-cpu-profiler.cc
@@ -410,6 +410,21 @@ TEST(GetProfilerWhenIsolateIsNotInitialized) {
}
+TEST(ProfileStartEndTime) {
+ LocalContext env;
+ v8::HandleScope scope(env->GetIsolate());
+ v8::CpuProfiler* cpu_profiler = env->GetIsolate()->GetCpuProfiler();
+
+ int64_t time_before_profiling = i::OS::Ticks();
+ v8::Local<v8::String> profile_name = v8::String::New("test");
+ cpu_profiler->StartCpuProfiling(profile_name);
+ const v8::CpuProfile* profile = cpu_profiler->StopCpuProfiling(profile_name);
+ CHECK(time_before_profiling <= profile->GetStartTime());
+ CHECK(profile->GetStartTime() <= profile->GetEndTime());
+ CHECK(profile->GetEndTime() <= i::OS::Ticks());
+}
+
+
static const v8::CpuProfile* RunProfiler(
LocalContext& env, v8::Handle<v8::Function> function,
v8::Handle<v8::Value> argv[], int argc,
diff --git a/deps/v8/test/cctest/test-global-handles.cc b/deps/v8/test/cctest/test-global-handles.cc
index a274d7546c..ea11dbcf3a 100644
--- a/deps/v8/test/cctest/test-global-handles.cc
+++ b/deps/v8/test/cctest/test-global-handles.cc
@@ -25,6 +25,9 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#include <map>
+#include <vector>
+
#include "global-handles.h"
#include "cctest.h"
@@ -315,3 +318,204 @@ TEST(ImplicitReferences) {
ASSERT(implicit_refs->at(1)->length == 1);
ASSERT(implicit_refs->at(1)->children[0] == g2c1.location());
}
+
+
+static const int kBlockSize = 256;
+
+
+TEST(BlockCollection) {
+ v8::V8::Initialize();
+ Isolate* isolate = Isolate::Current();
+ GlobalHandles* global_handles = isolate->global_handles();
+ CHECK_EQ(0, global_handles->block_count());
+ CHECK_EQ(0, global_handles->global_handles_count());
+ Object* object = isolate->heap()->undefined_value();
+ const int kNumberOfBlocks = 5;
+ typedef Handle<Object> Block[kBlockSize];
+ for (int round = 0; round < 3; round++) {
+ Block blocks[kNumberOfBlocks];
+ for (int i = 0; i < kNumberOfBlocks; i++) {
+ for (int j = 0; j < kBlockSize; j++) {
+ blocks[i][j] = global_handles->Create(object);
+ }
+ }
+ CHECK_EQ(kNumberOfBlocks, global_handles->block_count());
+ for (int i = 0; i < kNumberOfBlocks; i++) {
+ for (int j = 0; j < kBlockSize; j++) {
+ global_handles->Destroy(blocks[i][j].location());
+ }
+ }
+ isolate->heap()->CollectAllAvailableGarbage("BlockCollection");
+ CHECK_EQ(0, global_handles->global_handles_count());
+ CHECK_EQ(1, global_handles->block_count());
+ }
+}
+
+
+class RandomMutationData {
+ public:
+ explicit RandomMutationData(Isolate* isolate)
+ : isolate_(isolate), weak_offset_(0) {}
+
+ void Mutate(double strong_growth_tendency,
+ double weak_growth_tendency = 0.05) {
+ for (int i = 0; i < kBlockSize * 100; i++) {
+ if (rng_.next(strong_growth_tendency)) {
+ AddStrong();
+ } else if (strong_nodes_.size() != 0) {
+ size_t to_remove = rng_.next(static_cast<int>(strong_nodes_.size()));
+ RemoveStrong(to_remove);
+ }
+ if (rng_.next(weak_growth_tendency)) AddWeak();
+ if (rng_.next(0.05)) {
+#ifdef DEBUG
+ isolate_->global_handles()->VerifyBlockInvariants();
+#endif
+ }
+ if (rng_.next(0.0001)) {
+ isolate_->heap()->PerformScavenge();
+ } else if (rng_.next(0.00003)) {
+ isolate_->heap()->CollectAllAvailableGarbage();
+ }
+ CheckSizes();
+ }
+ }
+
+ void RemoveAll() {
+ while (strong_nodes_.size() != 0) {
+ RemoveStrong(strong_nodes_.size() - 1);
+ }
+ isolate_->heap()->PerformScavenge();
+ isolate_->heap()->CollectAllAvailableGarbage();
+ CheckSizes();
+ }
+
+ private:
+ typedef std::vector<Object**> NodeVector;
+ typedef std::map<int32_t, Object**> NodeMap;
+
+ void CheckSizes() {
+ int stored_sizes =
+ static_cast<int>(strong_nodes_.size() + weak_nodes_.size());
+ CHECK_EQ(isolate_->global_handles()->global_handles_count(), stored_sizes);
+ }
+
+ void AddStrong() {
+ Object* object = isolate_->heap()->undefined_value();
+ Object** location = isolate_->global_handles()->Create(object).location();
+ strong_nodes_.push_back(location);
+ }
+
+ void RemoveStrong(size_t offset) {
+ isolate_->global_handles()->Destroy(strong_nodes_.at(offset));
+ strong_nodes_.erase(strong_nodes_.begin() + offset);
+ }
+
+ void AddWeak() {
+ v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(isolate_);
+ v8::HandleScope scope(isolate);
+ v8::Local<v8::Object> object = v8::Object::New();
+ int32_t offset = ++weak_offset_;
+ object->Set(7, v8::Integer::New(offset, isolate));
+ v8::Persistent<v8::Object> persistent(isolate, object);
+ persistent.MakeWeak(isolate, this, WeakCallback);
+ persistent.MarkIndependent();
+ Object** location = v8::Utils::OpenPersistent(persistent).location();
+ bool inserted =
+ weak_nodes_.insert(std::make_pair(offset, location)).second;
+ CHECK(inserted);
+ }
+
+ static void WeakCallback(v8::Isolate* isolate,
+ v8::Persistent<v8::Object>* persistent,
+ RandomMutationData* data) {
+ v8::Local<v8::Object> object =
+ v8::Local<v8::Object>::New(isolate, *persistent);
+ int32_t offset =
+ v8::Local<v8::Integer>::Cast(object->Get(7))->Int32Value();
+ Object** location = v8::Utils::OpenPersistent(persistent).location();
+ NodeMap& weak_nodes = data->weak_nodes_;
+ NodeMap::iterator it = weak_nodes.find(offset);
+ CHECK(it != weak_nodes.end());
+ CHECK(it->second == location);
+ weak_nodes.erase(it);
+ persistent->Dispose();
+ }
+
+ Isolate* isolate_;
+ RandomNumberGenerator rng_;
+ NodeVector strong_nodes_;
+ NodeMap weak_nodes_;
+ int32_t weak_offset_;
+};
+
+
+TEST(RandomMutation) {
+ v8::V8::Initialize();
+ Isolate* isolate = Isolate::Current();
+ CHECK_EQ(0, isolate->global_handles()->block_count());
+ HandleScope handle_scope(isolate);
+ v8::Context::Scope context_scope(
+ v8::Context::New(reinterpret_cast<v8::Isolate*>(isolate)));
+ RandomMutationData data(isolate);
+ // grow some
+ data.Mutate(0.65);
+ data.Mutate(0.55);
+ // balanced mutation
+ for (int i = 0; i < 3; i++) data.Mutate(0.50);
+ // shrink some
+ data.Mutate(0.45);
+ data.Mutate(0.35);
+ // clear everything
+ data.RemoveAll();
+}
+
+
+TEST(EternalHandles) {
+ CcTest::InitializeVM();
+ Isolate* isolate = Isolate::Current();
+ v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
+ EternalHandles* eternals = isolate->eternal_handles();
+
+ // Create a number of handles that will not be on a block boundary
+ const int kArrayLength = 2048-1;
+ int indices[kArrayLength];
+
+ CHECK_EQ(0, eternals->NumberOfHandles());
+ for (int i = 0; i < kArrayLength; i++) {
+ HandleScope scope(isolate);
+ v8::Local<v8::Object> object = v8::Object::New();
+ object->Set(i, v8::Integer::New(i, v8_isolate));
+ if (i % 2 == 0) {
+ // Create with internal api
+ indices[i] = eternals->Create(isolate, *v8::Utils::OpenHandle(*object));
+ } else {
+ // Create with external api
+ indices[i] = object.Eternalize(v8_isolate);
+ }
+ }
+
+ isolate->heap()->CollectAllAvailableGarbage();
+
+ for (int i = 0; i < kArrayLength; i++) {
+ for (int j = 0; j < 2; j++) {
+ HandleScope scope(isolate);
+ v8::Local<v8::Object> object;
+ if (j == 0) {
+ // Test internal api
+ v8::Local<v8::Value> local =
+ v8::Utils::ToLocal(eternals->Get(indices[i]));
+ object = v8::Handle<v8::Object>::Cast(local);
+ } else {
+ // Test external api
+ object = v8::Local<v8::Object>::GetEternal(v8_isolate, indices[i]);
+ }
+ v8::Local<v8::Value> value = object->Get(i);
+ CHECK(value->IsInt32());
+ CHECK_EQ(i, value->Int32Value());
+ }
+ }
+
+ CHECK_EQ(kArrayLength, eternals->NumberOfHandles());
+}
+
diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc
index 6af9962bd1..5f713503ee 100644
--- a/deps/v8/test/cctest/test-heap.cc
+++ b/deps/v8/test/cctest/test-heap.cc
@@ -960,7 +960,7 @@ TEST(Regression39128) {
Factory* factory = isolate->factory();
// Increase the chance of 'bump-the-pointer' allocation in old space.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
v8::HandleScope scope(CcTest::isolate());
@@ -984,7 +984,7 @@ TEST(Regression39128) {
// just enough room to allocate JSObject and thus fill the newspace.
int allocation_amount = Min(FixedArray::kMaxSize,
- Page::kMaxNonCodeHeapObjectSize);
+ Page::kMaxNonCodeHeapObjectSize + kPointerSize);
int allocation_len = LenFromSize(allocation_amount);
NewSpace* new_space = HEAP->new_space();
Address* top_addr = new_space->allocation_top_address();
diff --git a/deps/v8/test/cctest/test-strings.cc b/deps/v8/test/cctest/test-strings.cc
index 310d93c04e..d6591a09b9 100644
--- a/deps/v8/test/cctest/test-strings.cc
+++ b/deps/v8/test/cctest/test-strings.cc
@@ -40,58 +40,6 @@
#include "cctest.h"
#include "zone-inl.h"
-// Adapted from http://en.wikipedia.org/wiki/Multiply-with-carry
-class RandomNumberGenerator {
- public:
- RandomNumberGenerator() {
- init();
- }
-
- void init(uint32_t seed = 0x5688c73e) {
- static const uint32_t phi = 0x9e3779b9;
- c = 362436;
- i = kQSize-1;
- Q[0] = seed;
- Q[1] = seed + phi;
- Q[2] = seed + phi + phi;
- for (unsigned j = 3; j < kQSize; j++) {
- Q[j] = Q[j - 3] ^ Q[j - 2] ^ phi ^ j;
- }
- }
-
- uint32_t next() {
- uint64_t a = 18782;
- uint32_t r = 0xfffffffe;
- i = (i + 1) & (kQSize-1);
- uint64_t t = a * Q[i] + c;
- c = (t >> 32);
- uint32_t x = static_cast<uint32_t>(t + c);
- if (x < c) {
- x++;
- c++;
- }
- return (Q[i] = r - x);
- }
-
- uint32_t next(int max) {
- return next() % max;
- }
-
- bool next(double threshold) {
- ASSERT(threshold >= 0.0 && threshold <= 1.0);
- if (threshold == 1.0) return true;
- if (threshold == 0.0) return false;
- uint32_t value = next() % 100000;
- return threshold > static_cast<double>(value)/100000.0;
- }
-
- private:
- static const uint32_t kQSize = 4096;
- uint32_t Q[kQSize];
- uint32_t c;
- uint32_t i;
-};
-
using namespace v8::internal;
diff --git a/deps/v8/test/intl/break-iterator/default-locale.js b/deps/v8/test/intl/break-iterator/default-locale.js
index 39a88574fe..d8d5aeadb2 100644
--- a/deps/v8/test/intl/break-iterator/default-locale.js
+++ b/deps/v8/test/intl/break-iterator/default-locale.js
@@ -38,7 +38,7 @@ assertFalse(options.locale === '');
assertFalse(options.locale === undefined);
// Then check for equality.
-assertEquals(options.locale, getDefaultLocale());
+assertEquals(options.locale, %GetDefaultICULocale());
var iteratorNone = new Intl.v8BreakIterator();
assertEquals(options.locale, iteratorNone.resolvedOptions().locale);
diff --git a/deps/v8/test/intl/break-iterator/wellformed-unsupported-locale.js b/deps/v8/test/intl/break-iterator/wellformed-unsupported-locale.js
index 56457b4829..5ac8fbcd41 100644
--- a/deps/v8/test/intl/break-iterator/wellformed-unsupported-locale.js
+++ b/deps/v8/test/intl/break-iterator/wellformed-unsupported-locale.js
@@ -29,4 +29,4 @@
var iterator = Intl.v8BreakIterator(['xx']);
-assertEquals(iterator.resolvedOptions().locale, getDefaultLocale());
+assertEquals(iterator.resolvedOptions().locale, %GetDefaultICULocale());
diff --git a/deps/v8/test/intl/collator/default-locale.js b/deps/v8/test/intl/collator/default-locale.js
index f6ffba8e1d..db9b1e7330 100644
--- a/deps/v8/test/intl/collator/default-locale.js
+++ b/deps/v8/test/intl/collator/default-locale.js
@@ -38,7 +38,7 @@ assertFalse(options.locale === '');
assertFalse(options.locale === undefined);
// Then check for equality.
-assertEquals(options.locale, getDefaultLocale());
+assertEquals(options.locale, %GetDefaultICULocale());
var collatorNone = new Intl.Collator();
assertEquals(options.locale, collatorNone.resolvedOptions().locale);
@@ -48,5 +48,5 @@ var collatorBraket = new Intl.Collator({});
assertEquals(options.locale, collatorBraket.resolvedOptions().locale);
var collatorWithOptions = new Intl.Collator(undefined, {usage: 'search'});
-assertEquals(getDefaultLocale() + '-u-co-search',
+assertEquals(%GetDefaultICULocale() + '-u-co-search',
collatorWithOptions.resolvedOptions().locale);
diff --git a/deps/v8/test/intl/collator/wellformed-unsupported-locale.js b/deps/v8/test/intl/collator/wellformed-unsupported-locale.js
index ea143fdc63..3963d47a61 100644
--- a/deps/v8/test/intl/collator/wellformed-unsupported-locale.js
+++ b/deps/v8/test/intl/collator/wellformed-unsupported-locale.js
@@ -29,4 +29,4 @@
var collator = Intl.Collator(['xx']);
-assertEquals(collator.resolvedOptions().locale, getDefaultLocale());
+assertEquals(collator.resolvedOptions().locale, %GetDefaultICULocale());
diff --git a/deps/v8/test/intl/date-format/default-locale.js b/deps/v8/test/intl/date-format/default-locale.js
index 2dcb0f8ae7..8e9b7fcec3 100644
--- a/deps/v8/test/intl/date-format/default-locale.js
+++ b/deps/v8/test/intl/date-format/default-locale.js
@@ -38,7 +38,7 @@ assertFalse(options.locale === '');
assertFalse(options.locale === undefined);
// Then check for equality.
-assertEquals(options.locale, getDefaultLocale());
+assertEquals(options.locale, %GetDefaultICULocale());
var dtfNone = new Intl.DateTimeFormat();
assertEquals(options.locale, dtfNone.resolvedOptions().locale);
diff --git a/deps/v8/test/intl/date-format/wellformed-unsupported-locale.js b/deps/v8/test/intl/date-format/wellformed-unsupported-locale.js
index 8867ec6442..6f063abbd1 100644
--- a/deps/v8/test/intl/date-format/wellformed-unsupported-locale.js
+++ b/deps/v8/test/intl/date-format/wellformed-unsupported-locale.js
@@ -29,4 +29,4 @@
var dtf = Intl.DateTimeFormat(['xx']);
-assertEquals(dtf.resolvedOptions().locale, getDefaultLocale());
+assertEquals(dtf.resolvedOptions().locale, %GetDefaultICULocale());
diff --git a/deps/v8/test/intl/intl.status b/deps/v8/test/intl/intl.status
index 913626b1f4..7ef0abb4d2 100644
--- a/deps/v8/test/intl/intl.status
+++ b/deps/v8/test/intl/intl.status
@@ -27,15 +27,7 @@
prefix intl
-# The following tests use getDefaultLocale() or getDefaultTimezone().
-break-iterator/default-locale: FAIL
-break-iterator/wellformed-unsupported-locale: FAIL
-collator/default-locale: FAIL
-collator/wellformed-unsupported-locale: FAIL
-date-format/default-locale: FAIL
+# The following tests use getDefaultTimeZone().
date-format/resolved-options: FAIL
date-format/timezone: FAIL
-date-format/wellformed-unsupported-locale: FAIL
general/v8Intl-exists: FAIL
-number-format/default-locale: FAIL
-number-format/wellformed-unsupported-locale: FAIL
diff --git a/deps/v8/test/intl/number-format/default-locale.js b/deps/v8/test/intl/number-format/default-locale.js
index 0d5e24dd70..cd67ba724f 100644
--- a/deps/v8/test/intl/number-format/default-locale.js
+++ b/deps/v8/test/intl/number-format/default-locale.js
@@ -38,7 +38,7 @@ assertFalse(options.locale === '');
assertFalse(options.locale === undefined);
// Then check for equality.
-assertEquals(options.locale, getDefaultLocale());
+assertEquals(options.locale, %GetDefaultICULocale());
var nfNone = new Intl.NumberFormat();
assertEquals(options.locale, nfNone.resolvedOptions().locale);
diff --git a/deps/v8/test/intl/number-format/wellformed-unsupported-locale.js b/deps/v8/test/intl/number-format/wellformed-unsupported-locale.js
index e3fe9cc087..195eba4c19 100644
--- a/deps/v8/test/intl/number-format/wellformed-unsupported-locale.js
+++ b/deps/v8/test/intl/number-format/wellformed-unsupported-locale.js
@@ -29,4 +29,4 @@
var nf = Intl.NumberFormat(['xx']);
-assertEquals(nf.resolvedOptions().locale, getDefaultLocale());
+assertEquals(nf.resolvedOptions().locale, %GetDefaultICULocale());
diff --git a/deps/v8/test/intl/testcfg.py b/deps/v8/test/intl/testcfg.py
index d25683bed2..09d29d0bee 100644
--- a/deps/v8/test/intl/testcfg.py
+++ b/deps/v8/test/intl/testcfg.py
@@ -52,7 +52,7 @@ class IntlTestSuite(testsuite.TestSuite):
return tests
def GetFlagsForTestCase(self, testcase, context):
- flags = [] + context.mode_flags
+ flags = ["--allow-natives-syntax"] + context.mode_flags
files = []
files.append(os.path.join(self.root, "assert.js"))
diff --git a/deps/v8/test/mjsunit/harmony/array-find.js b/deps/v8/test/mjsunit/harmony/array-find.js
new file mode 100644
index 0000000000..906c9cde7b
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/array-find.js
@@ -0,0 +1,280 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-arrays
+
+assertEquals(1, Array.prototype.find.length);
+
+var a = [21, 22, 23, 24];
+assertEquals(undefined, a.find(function() { return false; }));
+assertEquals(21, a.find(function() { return true; }));
+assertEquals(undefined, a.find(function(val) { return 121 === val; }));
+assertEquals(24, a.find(function(val) { return 24 === val; }));
+assertEquals(23, a.find(function(val) { return 23 === val; }), null);
+assertEquals(22, a.find(function(val) { return 22 === val; }), undefined);
+
+
+//
+// Test predicate is not called when array is empty
+//
+(function() {
+ var a = [];
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+
+ a.find(function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertEquals(-1, l);
+ assertEquals(-1, o);
+ assertEquals(-1, v);
+ assertEquals(-1, k);
+})();
+
+
+//
+// Test predicate is called with correct argumetns
+//
+(function() {
+ var a = ["b"];
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+
+ var found = a.find(function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertArrayEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("b", v);
+ assertEquals(0, k);
+ assertEquals(undefined, found);
+})();
+
+
+//
+// Test predicate is called array.length times
+//
+(function() {
+ var a = [1, 2, 3, 4, 5];
+ var l = 0;
+ var found = a.find(function() {
+ l++;
+ return false;
+ });
+
+ assertEquals(a.length, l);
+ assertEquals(undefined, found);
+})();
+
+
+//
+// Test Array.prototype.find works with String
+//
+(function() {
+ var a = "abcd";
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+ var found = Array.prototype.find.call(a, function(val, key, obj) {
+ o = obj.toString();
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("d", v);
+ assertEquals(3, k);
+ assertEquals(undefined, found);
+
+ found = Array.prototype.find.apply(a, [function(val, key, obj) {
+ o = obj.toString();
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return true;
+ }]);
+
+ assertEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("a", v);
+ assertEquals(0, k);
+ assertEquals("a", found);
+})();
+
+
+//
+// Test Array.prototype.find works with exotic object
+//
+(function() {
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+ var a = {
+ prop1: "val1",
+ prop2: "val2",
+ isValid: function() {
+ return this.prop1 === "val1" && this.prop2 === "val2";
+ }
+ };
+
+ Array.prototype.push.apply(a, [30, 31, 32]);
+ var found = Array.prototype.find.call(a, function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return !obj.isValid();
+ });
+
+ assertArrayEquals(a, o);
+ assertEquals(3, l);
+ assertEquals(32, v);
+ assertEquals(2, k);
+ assertEquals(undefined, found);
+})();
+
+
+//
+// Test array modifications
+//
+(function() {
+ var a = [1, 2, 3];
+ var found = a.find(function(val) { a.push(val); return false; });
+ assertArrayEquals([1, 2, 3, 1, 2, 3], a);
+ assertEquals(6, a.length);
+ assertEquals(undefined, found);
+
+ a = [1, 2, 3];
+ found = a.find(function(val, key) { a[key] = ++val; return false; });
+ assertArrayEquals([2, 3, 4], a);
+ assertEquals(3, a.length);
+ assertEquals(undefined, found);
+})();
+
+
+//
+// Test predicate is only called for existing elements
+//
+(function() {
+ var a = new Array(30);
+ a[11] = 21;
+ a[7] = 10;
+ a[29] = 31;
+
+ var count = 0;
+ a.find(function() { count++; return false; });
+ assertEquals(3, count);
+})();
+
+
+//
+// Test thisArg
+//
+(function() {
+ // Test String as a thisArg
+ var found = [1, 2, 3].find(function(val, key) {
+ return this.charAt(Number(key)) === String(val);
+ }, "321");
+ assertEquals(2, found);
+
+ // Test object as a thisArg
+ var thisArg = {
+ elementAt: function(key) {
+ return this[key];
+ }
+ };
+ Array.prototype.push.apply(thisArg, ["c", "b", "a"]);
+
+ found = ["a", "b", "c"].find(function(val, key) {
+ return this.elementAt(key) === val;
+ }, thisArg);
+ assertEquals("b", found);
+})();
+
+// Test exceptions
+assertThrows('Array.prototype.find.call(null, function() { })',
+ TypeError);
+assertThrows('Array.prototype.find.call(undefined, function() { })',
+ TypeError);
+assertThrows('Array.prototype.find.apply(null, function() { }, [])',
+ TypeError);
+assertThrows('Array.prototype.find.apply(undefined, function() { }, [])',
+ TypeError);
+
+assertThrows('[].find(null)', TypeError);
+assertThrows('[].find(undefined)', TypeError);
+assertThrows('[].find(0)', TypeError);
+assertThrows('[].find(true)', TypeError);
+assertThrows('[].find(false)', TypeError);
+assertThrows('[].find("")', TypeError);
+assertThrows('[].find({})', TypeError);
+assertThrows('[].find([])', TypeError);
+assertThrows('[].find(/\d+/)', TypeError);
+
+assertThrows('Array.prototype.find.call({}, null)', TypeError);
+assertThrows('Array.prototype.find.call({}, undefined)', TypeError);
+assertThrows('Array.prototype.find.call({}, 0)', TypeError);
+assertThrows('Array.prototype.find.call({}, true)', TypeError);
+assertThrows('Array.prototype.find.call({}, false)', TypeError);
+assertThrows('Array.prototype.find.call({}, "")', TypeError);
+assertThrows('Array.prototype.find.call({}, {})', TypeError);
+assertThrows('Array.prototype.find.call({}, [])', TypeError);
+assertThrows('Array.prototype.find.call({}, /\d+/)', TypeError);
+
+assertThrows('Array.prototype.find.apply({}, null, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, undefined, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, 0, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, true, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, false, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, "", [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, {}, [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, [], [])', TypeError);
+assertThrows('Array.prototype.find.apply({}, /\d+/, [])', TypeError); \ No newline at end of file
diff --git a/deps/v8/test/mjsunit/harmony/array-findindex.js b/deps/v8/test/mjsunit/harmony/array-findindex.js
new file mode 100644
index 0000000000..928cad79e4
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/array-findindex.js
@@ -0,0 +1,280 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-arrays
+
+assertEquals(1, Array.prototype.findIndex.length);
+
+var a = [21, 22, 23, 24];
+assertEquals(-1, a.findIndex(function() { return false; }));
+assertEquals(-1, a.findIndex(function(val) { return 121 === val; }));
+assertEquals(0, a.findIndex(function() { return true; }));
+assertEquals(1, a.findIndex(function(val) { return 22 === val; }), undefined);
+assertEquals(2, a.findIndex(function(val) { return 23 === val; }), null);
+assertEquals(3, a.findIndex(function(val) { return 24 === val; }));
+
+
+//
+// Test predicate is not called when array is empty
+//
+(function() {
+ var a = [];
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+
+ a.findIndex(function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertEquals(-1, l);
+ assertEquals(-1, o);
+ assertEquals(-1, v);
+ assertEquals(-1, k);
+})();
+
+
+//
+// Test predicate is called with correct argumetns
+//
+(function() {
+ var a = ["b"];
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+
+ var index = a.findIndex(function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertArrayEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("b", v);
+ assertEquals(0, k);
+ assertEquals(-1, index);
+})();
+
+
+//
+// Test predicate is called array.length times
+//
+(function() {
+ var a = [1, 2, 3, 4, 5];
+ var l = 0;
+
+ a.findIndex(function() {
+ l++;
+ return false;
+ });
+
+ assertEquals(a.length, l);
+})();
+
+
+//
+// Test Array.prototype.findIndex works with String
+//
+(function() {
+ var a = "abcd";
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+
+ var index = Array.prototype.findIndex.call(a, function(val, key, obj) {
+ o = obj.toString();
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return false;
+ });
+
+ assertEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("d", v);
+ assertEquals(3, k);
+ assertEquals(-1, index);
+
+ index = Array.prototype.findIndex.apply(a, [function(val, key, obj) {
+ o = obj.toString();
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return true;
+ }]);
+
+ assertEquals(a, o);
+ assertEquals(a.length, l);
+ assertEquals("a", v);
+ assertEquals(0, k);
+ assertEquals(0, index);
+})();
+
+
+//
+// Test Array.prototype.findIndex works with exotic object
+//
+(function() {
+ var l = -1;
+ var o = -1;
+ var v = -1;
+ var k = -1;
+ var a = {
+ prop1: "val1",
+ prop2: "val2",
+ isValid: function() {
+ return this.prop1 === "val1" && this.prop2 === "val2";
+ }
+ };
+
+ Array.prototype.push.apply(a, [30, 31, 32]);
+
+ var index = Array.prototype.findIndex.call(a, function(val, key, obj) {
+ o = obj;
+ l = obj.length;
+ v = val;
+ k = key;
+
+ return !obj.isValid();
+ });
+
+ assertArrayEquals(a, o);
+ assertEquals(3, l);
+ assertEquals(32, v);
+ assertEquals(2, k);
+ assertEquals(-1, index);
+})();
+
+
+//
+// Test array modifications
+//
+(function() {
+ var a = [1, 2, 3];
+ a.findIndex(function(val) { a.push(val); return false; });
+ assertArrayEquals([1, 2, 3, 1, 2, 3], a);
+ assertEquals(6, a.length);
+
+ a = [1, 2, 3];
+ a.findIndex(function(val, key) { a[key] = ++val; return false; });
+ assertArrayEquals([2, 3, 4], a);
+ assertEquals(3, a.length);
+})();
+
+
+//
+// Test predicate is only called for existing elements
+//
+(function() {
+ var a = new Array(30);
+ a[11] = 21;
+ a[7] = 10;
+ a[29] = 31;
+
+ var count = 0;
+ a.findIndex(function() { count++; return false; });
+ assertEquals(3, count);
+})();
+
+
+//
+// Test thisArg
+//
+(function() {
+ // Test String as a thisArg
+ var index = [1, 2, 3].findIndex(function(val, key) {
+ return this.charAt(Number(key)) === String(val);
+ }, "321");
+ assertEquals(1, index);
+
+ // Test object as a thisArg
+ var thisArg = {
+ elementAt: function(key) {
+ return this[key];
+ }
+ };
+ Array.prototype.push.apply(thisArg, ["c", "b", "a"]);
+
+ index = ["a", "b", "c"].findIndex(function(val, key) {
+ return this.elementAt(key) === val;
+ }, thisArg);
+ assertEquals(1, index);
+})();
+
+// Test exceptions
+assertThrows('Array.prototype.findIndex.call(null, function() { })',
+ TypeError);
+assertThrows('Array.prototype.findIndex.call(undefined, function() { })',
+ TypeError);
+assertThrows('Array.prototype.findIndex.apply(null, function() { }, [])',
+ TypeError);
+assertThrows('Array.prototype.findIndex.apply(undefined, function() { }, [])',
+ TypeError);
+
+assertThrows('[].findIndex(null)', TypeError);
+assertThrows('[].findIndex(undefined)', TypeError);
+assertThrows('[].findIndex(0)', TypeError);
+assertThrows('[].findIndex(true)', TypeError);
+assertThrows('[].findIndex(false)', TypeError);
+assertThrows('[].findIndex("")', TypeError);
+assertThrows('[].findIndex({})', TypeError);
+assertThrows('[].findIndex([])', TypeError);
+assertThrows('[].findIndex(/\d+/)', TypeError);
+
+assertThrows('Array.prototype.findIndex.call({}, null)', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, undefined)', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, 0)', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, true)', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, false)', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, "")', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, {})', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, [])', TypeError);
+assertThrows('Array.prototype.findIndex.call({}, /\d+/)', TypeError);
+
+assertThrows('Array.prototype.findIndex.apply({}, null, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, undefined, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, 0, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, true, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, false, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, "", [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, {}, [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, [], [])', TypeError);
+assertThrows('Array.prototype.findIndex.apply({}, /\d+/, [])', TypeError); \ No newline at end of file
diff --git a/deps/v8/test/mjsunit/harmony/array-iterator.js b/deps/v8/test/mjsunit/harmony/array-iterator.js
index f3a2627b57..6a402e7393 100644
--- a/deps/v8/test/mjsunit/harmony/array-iterator.js
+++ b/deps/v8/test/mjsunit/harmony/array-iterator.js
@@ -39,7 +39,7 @@ function TestArrayPrototype() {
TestArrayPrototype();
function assertIteratorResult(value, done, result) {
- assertEquals({ value: value, done: done}, result);
+ assertEquals({value: value, done: done}, result);
}
function TestValues() {
@@ -70,9 +70,9 @@ TestValuesMutate();
function TestKeys() {
var array = ['a', 'b', 'c'];
var iterator = array.keys();
- assertIteratorResult('0', false, iterator.next());
- assertIteratorResult('1', false, iterator.next());
- assertIteratorResult('2', false, iterator.next());
+ assertIteratorResult(0, false, iterator.next());
+ assertIteratorResult(1, false, iterator.next());
+ assertIteratorResult(2, false, iterator.next());
assertIteratorResult(void 0, true, iterator.next());
array.push('d');
@@ -83,11 +83,11 @@ TestKeys();
function TestKeysMutate() {
var array = ['a', 'b', 'c'];
var iterator = array.keys();
- assertIteratorResult('0', false, iterator.next());
- assertIteratorResult('1', false, iterator.next());
- assertIteratorResult('2', false, iterator.next());
+ assertIteratorResult(0, false, iterator.next());
+ assertIteratorResult(1, false, iterator.next());
+ assertIteratorResult(2, false, iterator.next());
array.push('d');
- assertIteratorResult('3', false, iterator.next());
+ assertIteratorResult(3, false, iterator.next());
assertIteratorResult(void 0, true, iterator.next());
}
TestKeysMutate();
@@ -95,9 +95,9 @@ TestKeysMutate();
function TestEntries() {
var array = ['a', 'b', 'c'];
var iterator = array.entries();
- assertIteratorResult(['0', 'a'], false, iterator.next());
- assertIteratorResult(['1', 'b'], false, iterator.next());
- assertIteratorResult(['2', 'c'], false, iterator.next());
+ assertIteratorResult([0, 'a'], false, iterator.next());
+ assertIteratorResult([1, 'b'], false, iterator.next());
+ assertIteratorResult([2, 'c'], false, iterator.next());
assertIteratorResult(void 0, true, iterator.next());
array.push('d');
@@ -108,11 +108,11 @@ TestEntries();
function TestEntriesMutate() {
var array = ['a', 'b', 'c'];
var iterator = array.entries();
- assertIteratorResult(['0', 'a'], false, iterator.next());
- assertIteratorResult(['1', 'b'], false, iterator.next());
- assertIteratorResult(['2', 'c'], false, iterator.next());
+ assertIteratorResult([0, 'a'], false, iterator.next());
+ assertIteratorResult([1, 'b'], false, iterator.next());
+ assertIteratorResult([2, 'c'], false, iterator.next());
array.push('d');
- assertIteratorResult(['3', 'd'], false, iterator.next());
+ assertIteratorResult([3, 'd'], false, iterator.next());
assertIteratorResult(void 0, true, iterator.next());
}
TestEntriesMutate();
@@ -168,7 +168,7 @@ function TestForArrayKeys() {
assertEquals(8, buffer.length);
for (var i = 0; i < buffer.length; i++) {
- assertEquals(String(i), buffer[i]);
+ assertEquals(i, buffer[i]);
}
}
TestForArrayKeys();
@@ -189,7 +189,7 @@ function TestForArrayEntries() {
assertTrue(isNaN(buffer[buffer.length - 1][1]));
for (var i = 0; i < buffer.length; i++) {
- assertEquals(String(i), buffer[i][0]);
+ assertEquals(i, buffer[i][0]);
}
}
TestForArrayEntries();
diff --git a/deps/v8/test/mjsunit/harmony/collections.js b/deps/v8/test/mjsunit/harmony/collections.js
index 3e87e6b533..174d3d1dc7 100644
--- a/deps/v8/test/mjsunit/harmony/collections.js
+++ b/deps/v8/test/mjsunit/harmony/collections.js
@@ -207,10 +207,10 @@ TestArbitrary(new WeakMap);
// Test direct constructor call
-assertTrue(Set() instanceof Set);
-assertTrue(Map() instanceof Map);
-assertTrue(WeakMap() instanceof WeakMap);
-assertTrue(WeakSet() instanceof WeakSet);
+assertThrows(function() { Set(); }, TypeError);
+assertThrows(function() { Map(); }, TypeError);
+assertThrows(function() { WeakMap(); }, TypeError);
+assertThrows(function() { WeakSet(); }, TypeError);
// Test whether NaN values as keys are treated correctly.
@@ -308,7 +308,6 @@ TestPrototype(WeakSet);
function TestConstructor(C) {
assertFalse(C === Object.prototype.constructor);
assertSame(C, C.prototype.constructor);
- assertSame(C, C().__proto__.constructor);
assertSame(C, (new C).__proto__.constructor);
}
TestConstructor(Set);
diff --git a/deps/v8/test/mjsunit/harmony/object-observe.js b/deps/v8/test/mjsunit/harmony/object-observe.js
index 103dda6567..06254ee6d8 100644
--- a/deps/v8/test/mjsunit/harmony/object-observe.js
+++ b/deps/v8/test/mjsunit/harmony/object-observe.js
@@ -259,6 +259,16 @@ records = undefined;
Object.deliverChangeRecords(observer.callback);
observer.assertRecordCount(1);
+// Get notifier prior to observing
+reset();
+var obj = {};
+Object.getNotifier(obj);
+Object.observe(obj, observer.callback);
+obj.id = 1;
+Object.deliverChangeRecords(observer.callback);
+observer.assertCallbackRecords([
+ { object: obj, type: 'new', name: 'id' },
+]);
// Observing a continuous stream of changes, while itermittantly unobserving.
reset();
@@ -636,8 +646,8 @@ function recursiveObserver2(r) {
Object.observe(obj1, recursiveObserver2);
Object.observe(obj2, recursiveObserver2);
++obj1.a;
-Object.deliverChangeRecords(recursiveObserver2);
// TODO(verwaest): Disabled because of bug 2774.
+// Object.deliverChangeRecords(recursiveObserver2);
// assertEquals(199, recordCount);
@@ -783,6 +793,8 @@ observer.assertNotCalled();
// Test all kinds of objects generically.
function TestObserveConfigurable(obj, prop) {
reset();
+ Object.observe(obj, observer.callback);
+ Object.unobserve(obj, observer.callback);
obj[prop] = 1;
Object.observe(obj, observer.callback);
obj[prop] = 2;
@@ -852,6 +864,8 @@ function TestObserveConfigurable(obj, prop) {
function TestObserveNonConfigurable(obj, prop, desc) {
reset();
+ Object.observe(obj, observer.callback);
+ Object.unobserve(obj, observer.callback);
obj[prop] = 1;
Object.observe(obj, observer.callback);
obj[prop] = 4;
diff --git a/deps/v8/test/mjsunit/harmony/proxies-example-membrane.js b/deps/v8/test/mjsunit/harmony/proxies-example-membrane.js
index c6e7f9f9b1..9e2228a638 100644
--- a/deps/v8/test/mjsunit/harmony/proxies-example-membrane.js
+++ b/deps/v8/test/mjsunit/harmony/proxies-example-membrane.js
@@ -285,8 +285,8 @@ assertEquals(4, wh4.q);
// http://wiki.ecmascript.org/doku.php?id=harmony:proxies#an_identity-preserving_membrane
function createMembrane(wetTarget) {
- var wet2dry = WeakMap();
- var dry2wet = WeakMap();
+ var wet2dry = new WeakMap();
+ var dry2wet = new WeakMap();
function asDry(obj) {
registerObject(obj)
diff --git a/deps/v8/test/mjsunit/harmony/proxies-hash.js b/deps/v8/test/mjsunit/harmony/proxies-hash.js
index abfc0f5f0e..789de35f6d 100644
--- a/deps/v8/test/mjsunit/harmony/proxies-hash.js
+++ b/deps/v8/test/mjsunit/harmony/proxies-hash.js
@@ -51,7 +51,7 @@ function TestSet2(construct, fix, create) {
var p3 = create(handler)
fix(p3)
- var s = construct();
+ var s = new construct();
s.add(p1);
s.add(p2);
assertTrue(s.has(p1));
@@ -88,7 +88,7 @@ function TestMap2(construct, fix, create) {
var p3 = create(handler)
fix(p3)
- var m = construct();
+ var m = new construct();
m.set(p1, 123);
m.set(p2, 321);
assertTrue(m.has(p1));
diff --git a/deps/v8/test/mjsunit/harmony/string-contains.js b/deps/v8/test/mjsunit/harmony/string-contains.js
new file mode 100644
index 0000000000..700a6ed6bc
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/string-contains.js
@@ -0,0 +1,151 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-strings
+
+assertEquals(1, String.prototype.contains.length);
+
+var reString = "asdf[a-z]+(asdf)?";
+assertTrue(reString.contains("[a-z]+"));
+assertTrue(reString.contains("(asdf)?"));
+
+// Random greek letters
+var twoByteString = "\u039a\u0391\u03a3\u03a3\u0395";
+
+// Test single char pattern
+assertTrue(twoByteString.contains("\u039a"), "Lamda");
+assertTrue(twoByteString.contains("\u0391"), "Alpha");
+assertTrue(twoByteString.contains("\u03a3"), "First Sigma");
+assertTrue(twoByteString.contains("\u03a3",3), "Second Sigma");
+assertTrue(twoByteString.contains("\u0395"), "Epsilon");
+assertFalse(twoByteString.contains("\u0392"), "Not beta");
+
+// Test multi-char pattern
+assertTrue(twoByteString.contains("\u039a\u0391"), "lambda Alpha");
+assertTrue(twoByteString.contains("\u0391\u03a3"), "Alpha Sigma");
+assertTrue(twoByteString.contains("\u03a3\u03a3"), "Sigma Sigma");
+assertTrue(twoByteString.contains("\u03a3\u0395"), "Sigma Epsilon");
+
+assertFalse(twoByteString.contains("\u0391\u03a3\u0395"),
+ "Not Alpha Sigma Epsilon");
+
+//single char pattern
+assertTrue(twoByteString.contains("\u0395"));
+
+assertThrows("String.prototype.contains.call(null, 'test')", TypeError);
+assertThrows("String.prototype.contains.call(null, null)", TypeError);
+assertThrows("String.prototype.contains.call(undefined, undefined)", TypeError);
+
+assertThrows("String.prototype.contains.apply(null, ['test'])", TypeError);
+assertThrows("String.prototype.contains.apply(null, [null])", TypeError);
+assertThrows("String.prototype.contains.apply(undefined, [undefined])", TypeError);
+
+var TEST_INPUT = [{
+ msg: "Empty string", val: ""
+}, {
+ msg: "Number 1234.34", val: 1234.34
+}, {
+ msg: "Integer number 0", val: 0
+}, {
+ msg: "Negative number -1", val: -1
+}, {
+ msg: "Boolean true", val: true
+}, {
+ msg: "Boolean false", val: false
+}, {
+ msg: "Regular expression /\d+/", val: /\d+/
+}, {
+ msg: "Empty array []", val: []
+}, {
+ msg: "Empty object {}", val: {}
+}, {
+ msg: "Array of size 3", val: new Array(3)
+}];
+
+var i = 0;
+var l = TEST_INPUT.length;
+
+for (; i < l; i++) {
+ var e = TEST_INPUT[i];
+ var v = e.val;
+ var s = String(v);
+ assertTrue(s.contains(v), e.msg);
+ assertTrue(String.prototype.contains.call(v, v), e.msg);
+ assertTrue(String.prototype.contains.apply(v, [v]), e.msg);
+}
+
+// Test cases found in FF
+assertTrue("abc".contains("a"));
+assertTrue("abc".contains("b"));
+assertTrue("abc".contains("abc"));
+assertTrue("abc".contains("bc"));
+assertFalse("abc".contains("d"));
+assertFalse("abc".contains("abcd"));
+assertFalse("abc".contains("ac"));
+assertTrue("abc".contains("abc", 0));
+assertTrue("abc".contains("bc", 0));
+assertFalse("abc".contains("de", 0));
+assertTrue("abc".contains("bc", 1));
+assertTrue("abc".contains("c", 1));
+assertFalse("abc".contains("a", 1));
+assertFalse("abc".contains("abc", 1));
+assertTrue("abc".contains("c", 2));
+assertFalse("abc".contains("d", 2));
+assertFalse("abc".contains("dcd", 2));
+assertFalse("abc".contains("a", 42));
+assertFalse("abc".contains("a", Infinity));
+assertTrue("abc".contains("ab", -43));
+assertFalse("abc".contains("cd", -42));
+assertTrue("abc".contains("ab", -Infinity));
+assertFalse("abc".contains("cd", -Infinity));
+assertTrue("abc".contains("ab", NaN));
+assertFalse("abc".contains("cd", NaN));
+assertFalse("xyzzy".contains("zy\0", 2));
+
+var dots = Array(10000).join('.');
+assertFalse(dots.contains("\x01", 10000));
+assertFalse(dots.contains("\0", 10000));
+
+var myobj = {
+ toString: function () {
+ return "abc";
+ },
+ contains: String.prototype.contains
+};
+assertTrue(myobj.contains("abc"));
+assertFalse(myobj.contains("cd"));
+
+var gotStr = false;
+var gotPos = false;
+myobj = {
+ toString: function () {
+ assertFalse(gotPos);
+ gotStr = true;
+ return "xyz";
+ },
+ contains: String.prototype.contains
+};
diff --git a/deps/v8/test/mjsunit/harmony/string-endswith.js b/deps/v8/test/mjsunit/harmony/string-endswith.js
new file mode 100644
index 0000000000..128cf1d023
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/string-endswith.js
@@ -0,0 +1,136 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-strings
+
+assertEquals(1, String.prototype.endsWith.length);
+
+var testString = "Hello World";
+assertTrue(testString.endsWith(""));
+assertTrue(testString.endsWith("World"));
+assertFalse(testString.endsWith("world"));
+assertFalse(testString.endsWith("Hello World!"));
+assertFalse(testString.endsWith(null));
+assertFalse(testString.endsWith(undefined));
+
+assertTrue("null".endsWith(null));
+assertTrue("undefined".endsWith(undefined));
+
+var georgianUnicodeString = "\u10D0\u10D1\u10D2\u10D3\u10D4\u10D5\u10D6\u10D7";
+assertTrue(georgianUnicodeString.endsWith(georgianUnicodeString));
+assertTrue(georgianUnicodeString.endsWith("\u10D4\u10D5\u10D6\u10D7"));
+assertFalse(georgianUnicodeString.endsWith("\u10D0"));
+
+assertThrows("String.prototype.endsWith.call(null, 'test')", TypeError);
+assertThrows("String.prototype.endsWith.call(null, null)", TypeError);
+assertThrows("String.prototype.endsWith.call(undefined, undefined)", TypeError);
+
+assertThrows("String.prototype.endsWith.apply(null, ['test'])", TypeError);
+assertThrows("String.prototype.endsWith.apply(null, [null])", TypeError);
+assertThrows("String.prototype.endsWith.apply(undefined, [undefined])", TypeError);
+
+var TEST_INPUT = [{
+ msg: "Empty string", val: ""
+}, {
+ msg: "Number 1234.34", val: 1234.34
+}, {
+ msg: "Integer number 0", val: 0
+}, {
+ msg: "Negative number -1", val: -1
+}, {
+ msg: "Boolean true", val: true
+}, {
+ msg: "Boolean false", val: false
+}, {
+ msg: "Regular expression /\d+/", val: /\d+/
+}, {
+ msg: "Empty array []", val: []
+}, {
+ msg: "Empty object {}", val: {}
+}, {
+ msg: "Array of size 3", val: new Array(3)
+}];
+
+function testNonStringValues() {
+ var i = 0;
+ var l = TEST_INPUT.length;
+
+ for (; i < l; i++) {
+ var e = TEST_INPUT[i];
+ var v = e.val;
+ var s = String(v);
+ assertTrue(s.endsWith(v), e.msg);
+ assertTrue(String.prototype.endsWith.call(v, v), e.msg);
+ assertTrue(String.prototype.endsWith.apply(v, [v]), e.msg);
+ }
+}
+testNonStringValues();
+
+var CustomType = function(value) {
+ this.endsWith = String.prototype.endsWith;
+ this.toString = function() {
+ return String(value);
+ }
+};
+
+function testCutomType() {
+ var i = 0;
+ var l = TEST_INPUT.length;
+
+ for (; i < l; i++) {
+ var e = TEST_INPUT[i];
+ var v = e.val;
+ var o = new CustomType(v);
+ assertTrue(o.endsWith(v), e.msg);
+ }
+}
+testCutomType();
+
+
+// Test cases found in FF
+assertTrue("abc".endsWith("abc"));
+assertTrue("abcd".endsWith("bcd"));
+assertTrue("abc".endsWith("c"));
+assertFalse("abc".endsWith("abcd"));
+assertFalse("abc".endsWith("bbc"));
+assertFalse("abc".endsWith("b"));
+assertTrue("abc".endsWith("abc", 3));
+assertTrue("abc".endsWith("bc", 3));
+assertFalse("abc".endsWith("a", 3));
+assertTrue("abc".endsWith("bc", 3));
+assertTrue("abc".endsWith("a", 1));
+assertFalse("abc".endsWith("abc", 1));
+assertTrue("abc".endsWith("b", 2));
+assertFalse("abc".endsWith("d", 2));
+assertFalse("abc".endsWith("dcd", 2));
+assertFalse("abc".endsWith("a", 42));
+assertTrue("abc".endsWith("bc", Infinity));
+assertFalse("abc".endsWith("a", Infinity));
+assertTrue("abc".endsWith("bc", undefined));
+assertFalse("abc".endsWith("bc", -43));
+assertFalse("abc".endsWith("bc", -Infinity));
+assertFalse("abc".endsWith("bc", NaN));
diff --git a/deps/v8/test/mjsunit/harmony/string-repeat.js b/deps/v8/test/mjsunit/harmony/string-repeat.js
new file mode 100644
index 0000000000..182e5c0e0e
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/string-repeat.js
@@ -0,0 +1,74 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-strings
+
+assertEquals("000", String.prototype.repeat.call(0, 3));
+assertEquals("-1-1-1", String.prototype.repeat.call(-1, 3));
+assertEquals("2.12.12.1", String.prototype.repeat.call(2.1, 3));
+assertEquals("", String.prototype.repeat.call([], 3));
+assertEquals("1,2,3", String.prototype.repeat.call([1, 2, 3], 1));
+assertEquals("true", String.prototype.repeat.call(true, 1));
+assertEquals("false", String.prototype.repeat.call(false, 1));
+assertEquals("[object Object]", String.prototype.repeat.call({}, 1));
+
+assertEquals("000", String.prototype.repeat.apply(0, [3]));
+assertEquals("-1-1-1", String.prototype.repeat.apply(-1, [3]));
+assertEquals("2.12.12.1", String.prototype.repeat.apply(2.1, [3]));
+assertEquals("", String.prototype.repeat.apply([], [3]));
+assertEquals("1,2,3", String.prototype.repeat.apply([1, 2, 3], [1]));
+assertEquals("true", String.prototype.repeat.apply(true, [1]));
+assertEquals("false", String.prototype.repeat.apply(false, [1]));
+assertEquals("[object Object]", String.prototype.repeat.apply({}, [1]));
+
+assertEquals("\u10D8\u10D8\u10D8", "\u10D8".repeat(3));
+
+assertThrows('String.prototype.repeat.call(null, 1)', TypeError);
+assertThrows('String.prototype.repeat.call(undefined, 1)', TypeError);
+assertThrows('String.prototype.repeat.apply(null, [1])', TypeError);
+assertThrows('String.prototype.repeat.apply(undefined, [1])', TypeError);
+
+// Test cases found in FF
+assertEquals("abc", "abc".repeat(1));
+assertEquals("abcabc", "abc".repeat(2));
+assertEquals("abcabcabc", "abc".repeat(3));
+assertEquals("aaaaaaaaaa", "a".repeat(10));
+assertEquals("", "".repeat(5));
+assertEquals("", "abc".repeat(0));
+assertEquals("abcabc", "abc".repeat(2.0));
+
+assertThrows('"a".repeat(-1)', RangeError);
+assertThrows('"a".repeat(Number.POSITIVE_INFINITY)', RangeError);
+
+var myobj = {
+ toString: function() {
+ return "abc";
+ },
+ repeat : String.prototype.repeat
+};
+assertEquals("abc", myobj.repeat(1));
+assertEquals("abcabc", myobj.repeat(2)); \ No newline at end of file
diff --git a/deps/v8/test/mjsunit/harmony/string-startswith.js b/deps/v8/test/mjsunit/harmony/string-startswith.js
new file mode 100644
index 0000000000..60c85d31b3
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/string-startswith.js
@@ -0,0 +1,135 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-strings
+
+assertEquals(1, String.prototype.startsWith.length);
+
+var testString = "Hello World";
+assertTrue(testString.startsWith(""));
+assertTrue(testString.startsWith("Hello"));
+assertFalse(testString.startsWith("hello"));
+assertFalse(testString.startsWith("Hello World!"));
+assertFalse(testString.startsWith(null));
+assertFalse(testString.startsWith(undefined));
+
+assertTrue("null".startsWith(null));
+assertTrue("undefined".startsWith(undefined));
+
+var georgianUnicodeString = "\u10D0\u10D1\u10D2\u10D3\u10D4\u10D5\u10D6\u10D7";
+assertTrue(georgianUnicodeString.startsWith(georgianUnicodeString));
+assertTrue(georgianUnicodeString.startsWith("\u10D0\u10D1\u10D2"));
+assertFalse(georgianUnicodeString.startsWith("\u10D8"));
+
+assertThrows("String.prototype.startsWith.call(null, 'test')", TypeError);
+assertThrows("String.prototype.startsWith.call(null, null)", TypeError);
+assertThrows("String.prototype.startsWith.call(undefined, undefined)", TypeError);
+
+assertThrows("String.prototype.startsWith.apply(null, ['test'])", TypeError);
+assertThrows("String.prototype.startsWith.apply(null, [null])", TypeError);
+assertThrows("String.prototype.startsWith.apply(undefined, [undefined])", TypeError);
+
+var TEST_INPUT = [{
+ msg: "Empty string", val: ""
+}, {
+ msg: "Number 1234.34", val: 1234.34
+}, {
+ msg: "Integer number 0", val: 0
+}, {
+ msg: "Negative number -1", val: -1
+}, {
+ msg: "Boolean true", val: true
+}, {
+ msg: "Boolean false", val: false
+}, {
+ msg: "Regular expression /\d+/", val: /\d+/
+}, {
+ msg: "Empty array []", val: []
+}, {
+ msg: "Empty object {}", val: {}
+}, {
+ msg: "Array of size 3", val: new Array(3)
+}];
+
+function testNonStringValues() {
+ var i = 0;
+ var l = TEST_INPUT.length;
+
+ for (; i < l; i++) {
+ var e = TEST_INPUT[i];
+ var v = e.val;
+ var s = String(v);
+ assertTrue(s.startsWith(v), e.msg);
+ assertTrue(String.prototype.startsWith.call(v, v), e.msg);
+ assertTrue(String.prototype.startsWith.apply(v, [v]), e.msg);
+ }
+}
+testNonStringValues();
+
+var CustomType = function(value) {
+ this.startsWith = String.prototype.startsWith;
+ this.toString = function() {
+ return String(value);
+ }
+};
+
+function testCutomType() {
+ var i = 0;
+ var l = TEST_INPUT.length;
+
+ for (; i < l; i++) {
+ var e = TEST_INPUT[i];
+ var v = e.val;
+ var o = new CustomType(v);
+ assertTrue(o.startsWith(v), e.msg);
+ }
+}
+testCutomType();
+
+// Test cases found in FF
+assertTrue("abc".startsWith("abc"));
+assertTrue("abcd".startsWith("abc"));
+assertTrue("abc".startsWith("a"));
+assertFalse("abc".startsWith("abcd"));
+assertFalse("abc".startsWith("bcde"));
+assertFalse("abc".startsWith("b"));
+assertTrue("abc".startsWith("abc", 0));
+assertFalse("abc".startsWith("bc", 0));
+assertTrue("abc".startsWith("bc", 1));
+assertFalse("abc".startsWith("c", 1));
+assertFalse("abc".startsWith("abc", 1));
+assertTrue("abc".startsWith("c", 2));
+assertFalse("abc".startsWith("d", 2));
+assertFalse("abc".startsWith("dcd", 2));
+assertFalse("abc".startsWith("a", 42));
+assertFalse("abc".startsWith("a", Infinity));
+assertTrue("abc".startsWith("a", NaN));
+assertFalse("abc".startsWith("b", NaN));
+assertTrue("abc".startsWith("ab", -43));
+assertTrue("abc".startsWith("ab", -Infinity));
+assertFalse("abc".startsWith("bc", -42));
+assertFalse("abc".startsWith("bc", -Infinity));
diff --git a/deps/v8/test/mjsunit/math-abs.js b/deps/v8/test/mjsunit/math-abs.js
index 2b079546ea..d6ee3f2da7 100644
--- a/deps/v8/test/mjsunit/math-abs.js
+++ b/deps/v8/test/mjsunit/math-abs.js
@@ -109,3 +109,14 @@ for(var i = 0; i < 1000; i++) {
assertEquals(42, foo(-42));
%OptimizeFunctionOnNextCall(foo)
assertEquals(42, foo(-42));
+
+// Regression test for SMI input of Math.abs on X64, see:
+// https://codereview.chromium.org/21180004/
+var a = [-1, -2];
+function foo2() {
+ return Math.abs(a[0]);
+}
+assertEquals(1, foo2());
+assertEquals(1, foo2());
+%OptimizeFunctionOnNextCall(foo2);
+assertEquals(1, foo2());
diff --git a/deps/v8/test/intl/general/v8Intl-exists.js b/deps/v8/test/mjsunit/regress/regress-264203.js
index 610767e376..fa00756625 100644
--- a/deps/v8/test/intl/general/v8Intl-exists.js
+++ b/deps/v8/test/mjsunit/regress/regress-264203.js
@@ -25,12 +25,20 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Make sure that v8Intl is mapped into Intl for backward compatibility.
+// Flags: --allow-natives-syntax
-assertEquals(v8Intl, Intl);
+function foo(x) {
+ var a = [1, 2, 3, 4, 5, 6, 7, 8];
+ a[x + 5];
+ var result;
+ for (var i = 0; i < 3; i++) {
+ result = a[0 - x];
+ }
+ return result;
+}
-// Extra checks.
-assertTrue(v8Intl.hasOwnProperty('DateTimeFormat'));
-assertTrue(v8Intl.hasOwnProperty('NumberFormat'));
-assertTrue(v8Intl.hasOwnProperty('Collator'));
-assertTrue(v8Intl.hasOwnProperty('v8BreakIterator'));
+foo(0);
+foo(0);
+%OptimizeFunctionOnNextCall(foo);
+var r = foo(-2);
+assertEquals(3, r);
diff --git a/deps/v8/test/mjsunit/regress/regress-2813.js b/deps/v8/test/mjsunit/regress/regress-2813.js
new file mode 100644
index 0000000000..97ae43b316
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2813.js
@@ -0,0 +1,44 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function foo(x) {
+ var a = x + 1;
+ var b = x + 2;
+ if (x != 0) {
+ if (x > 0 & x < 100) {
+ return a;
+ }
+ }
+ return 0;
+}
+
+assertEquals(0, foo(0));
+assertEquals(0, foo(0));
+%OptimizeFunctionOnNextCall(foo);
+assertEquals(3, foo(2));
diff --git a/deps/v8/test/mjsunit/regress/regress-omit-checks.js b/deps/v8/test/mjsunit/regress/regress-omit-checks.js
new file mode 100644
index 0000000000..e5d5074988
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-omit-checks.js
@@ -0,0 +1,55 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var a = {x:1};
+var a_deprecate = {x:1};
+a_deprecate.x = 1.5;
+function create() {
+ return {__proto__:a, y:1};
+}
+var b1 = create();
+var b2 = create();
+var b3 = create();
+var b4 = create();
+
+function set(b) {
+ b.x = 5;
+ b.z = 10;
+}
+
+set(b1);
+set(b2);
+%OptimizeFunctionOnNextCall(set);
+set(b3);
+var called = false;
+a.x = 1.5;
+Object.defineProperty(a, "z", {set:function(v) { called = true; }});
+set(b4);
+assertTrue(called);
+assertEquals(undefined, b4.z);
diff --git a/deps/v8/src/extensions/i18n/locale.h b/deps/v8/test/mjsunit/unary-minus-deopt.js
index c39568e5d9..367ef75c83 100644
--- a/deps/v8/src/extensions/i18n/locale.h
+++ b/deps/v8/test/mjsunit/unary-minus-deopt.js
@@ -24,33 +24,32 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// limitations under the License.
-#ifndef V8_EXTENSIONS_I18N_SRC_LOCALE_H_
-#define V8_EXTENSIONS_I18N_SRC_LOCALE_H_
-
-#include "unicode/uversion.h"
-#include "v8.h"
-
-namespace v8_i18n {
-
-// Canonicalizes the BCP47 language tag using BCP47 rules.
-// Returns 'invalid-tag' in case input was not well formed.
-void JSCanonicalizeLanguageTag(const v8::FunctionCallbackInfo<v8::Value>& args);
-
-// Returns a list of available locales for collator, date or number formatter.
-void JSAvailableLocalesOf(const v8::FunctionCallbackInfo<v8::Value>& args);
-
-// Returns default ICU locale.
-void JSGetDefaultICULocale(const v8::FunctionCallbackInfo<v8::Value>& args);
-
-// Returns an array of objects, that have maximized and base names of inputs.
-// Unicode extensions are dropped from both.
-// Input: ['zh-TW-u-nu-thai', 'sr']
-// Output: [{maximized: 'zh-Hant-TW', base: 'zh-TW'},
-// {maximized: 'sr-Cyrl-RS', base: 'sr'}]
-void JSGetLanguageTagVariants(const v8::FunctionCallbackInfo<v8::Value>& args);
-
-} // namespace v8_i18n
-
-#endif // V8_EXTENSIONS_I18N_LOCALE_H_
+// Flags: --allow-natives-syntax
+
+// This is a boiled-down example happening in the Epic Citadel demo:
+// After deopting, the multiplication for unary minus stayed in Smi
+// mode instead of going to double mode, leading to deopt loops.
+
+function unaryMinusTest(x) {
+ var g = (1 << x) | 0;
+ // Optimized code will contain a LMulI with -1 as right operand.
+ return (g & -g) - 1 | 0;
+}
+
+unaryMinusTest(3);
+unaryMinusTest(3);
+%OptimizeFunctionOnNextCall(unaryMinusTest);
+unaryMinusTest(3);
+assertOptimized(unaryMinusTest);
+
+// Deopt on kMinInt
+unaryMinusTest(31);
+// The following is normally true, but not with --stress-opt. :-/
+// assertUnoptimized(unaryMinusTest);
+
+// We should have learned something from the deopt.
+unaryMinusTest(31);
+%OptimizeFunctionOnNextCall(unaryMinusTest);
+unaryMinusTest(31);
+assertOptimized(unaryMinusTest);
diff --git a/deps/v8/test/webkit/webkit.status b/deps/v8/test/webkit/webkit.status
index 4aaf8a97fb..a437e93744 100644
--- a/deps/v8/test/webkit/webkit.status
+++ b/deps/v8/test/webkit/webkit.status
@@ -33,3 +33,6 @@ sort-large-array: PASS, SKIP if $mode == debug
##############################################################################
[ $deopt_fuzzer == True ]
+
+# Issue 2815.
+fast/js/kde/encode_decode_uri: SKIP
diff --git a/deps/v8/tools/grokdump.py b/deps/v8/tools/grokdump.py
index 9719376d7f..12ccefdef7 100755
--- a/deps/v8/tools/grokdump.py
+++ b/deps/v8/tools/grokdump.py
@@ -40,6 +40,7 @@ import re
import struct
import sys
import types
+import v8heapconst
USAGE="""usage: %prog [OPTIONS] [DUMP-FILE]
@@ -163,6 +164,11 @@ def FullDump(reader, heap):
reader.ForEachMemoryRegion(dump_region)
+# Heap constants generated by 'make grokdump' in v8heapconst module.
+INSTANCE_TYPES = v8heapconst.INSTANCE_TYPES
+KNOWN_MAPS = v8heapconst.KNOWN_MAPS
+KNOWN_OBJECTS = v8heapconst.KNOWN_OBJECTS
+
# Set of structures and constants that describe the layout of minidump
# files. Based on MSDN and Google Breakpad.
@@ -754,6 +760,14 @@ class MinidumpReader(object):
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return self.exception_context.esp
+ def ExceptionFP(self):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return self.exception_context.rbp
+ elif self.arch == MD_CPU_ARCHITECTURE_ARM:
+ return None
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return self.exception_context.ebp
+
def FormatIntPtr(self, value):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return "%016x" % value
@@ -834,262 +848,6 @@ class MinidumpReader(object):
return "%s+0x%x" % (symbol.name, diff)
-
-# List of V8 instance types. Obtained by adding the code below to any .cc file.
-#
-# #define DUMP_TYPE(T) printf(" %d: \"%s\",\n", T, #T);
-# struct P {
-# P() {
-# printf("INSTANCE_TYPES = {\n");
-# INSTANCE_TYPE_LIST(DUMP_TYPE)
-# printf("}\n");
-# }
-# };
-# static P p;
-INSTANCE_TYPES = {
- 0: "STRING_TYPE",
- 4: "ASCII_STRING_TYPE",
- 1: "CONS_STRING_TYPE",
- 5: "CONS_ASCII_STRING_TYPE",
- 3: "SLICED_STRING_TYPE",
- 2: "EXTERNAL_STRING_TYPE",
- 6: "EXTERNAL_ASCII_STRING_TYPE",
- 10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
- 18: "SHORT_EXTERNAL_STRING_TYPE",
- 22: "SHORT_EXTERNAL_ASCII_STRING_TYPE",
- 26: "SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
- 64: "INTERNALIZED_STRING_TYPE",
- 68: "ASCII_INTERNALIZED_STRING_TYPE",
- 65: "CONS_INTERNALIZED_STRING_TYPE",
- 69: "CONS_ASCII_INTERNALIZED_STRING_TYPE",
- 66: "EXTERNAL_INTERNALIZED_STRING_TYPE",
- 70: "EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE",
- 74: "EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE",
- 82: "SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE",
- 86: "SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE",
- 90: "SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ASCII_DATA_TYPE",
- 128: "SYMBOL_TYPE",
- 129: "MAP_TYPE",
- 130: "CODE_TYPE",
- 131: "ODDBALL_TYPE",
- 132: "JS_GLOBAL_PROPERTY_CELL_TYPE",
- 133: "HEAP_NUMBER_TYPE",
- 134: "FOREIGN_TYPE",
- 135: "BYTE_ARRAY_TYPE",
- 136: "FREE_SPACE_TYPE",
- 137: "EXTERNAL_BYTE_ARRAY_TYPE",
- 138: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
- 139: "EXTERNAL_SHORT_ARRAY_TYPE",
- 140: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
- 141: "EXTERNAL_INT_ARRAY_TYPE",
- 142: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
- 143: "EXTERNAL_FLOAT_ARRAY_TYPE",
- 145: "EXTERNAL_PIXEL_ARRAY_TYPE",
- 147: "FILLER_TYPE",
- 148: "DECLARED_ACCESSOR_DESCRIPTOR_TYPE",
- 149: "DECLARED_ACCESSOR_INFO_TYPE",
- 150: "EXECUTABLE_ACCESSOR_INFO_TYPE",
- 151: "ACCESSOR_PAIR_TYPE",
- 152: "ACCESS_CHECK_INFO_TYPE",
- 153: "INTERCEPTOR_INFO_TYPE",
- 154: "CALL_HANDLER_INFO_TYPE",
- 155: "FUNCTION_TEMPLATE_INFO_TYPE",
- 156: "OBJECT_TEMPLATE_INFO_TYPE",
- 157: "SIGNATURE_INFO_TYPE",
- 158: "TYPE_SWITCH_INFO_TYPE",
- 159: "ALLOCATION_SITE_INFO_TYPE",
- 160: "SCRIPT_TYPE",
- 161: "CODE_CACHE_TYPE",
- 162: "POLYMORPHIC_CODE_CACHE_TYPE",
- 163: "TYPE_FEEDBACK_INFO_TYPE",
- 164: "ALIASED_ARGUMENTS_ENTRY_TYPE",
- 167: "FIXED_ARRAY_TYPE",
- 146: "FIXED_DOUBLE_ARRAY_TYPE",
- 168: "SHARED_FUNCTION_INFO_TYPE",
- 169: "JS_MESSAGE_OBJECT_TYPE",
- 172: "JS_VALUE_TYPE",
- 173: "JS_DATE_TYPE",
- 174: "JS_OBJECT_TYPE",
- 175: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
- 176: "JS_MODULE_TYPE",
- 177: "JS_GLOBAL_OBJECT_TYPE",
- 178: "JS_BUILTINS_OBJECT_TYPE",
- 179: "JS_GLOBAL_PROXY_TYPE",
- 180: "JS_ARRAY_TYPE",
- 171: "JS_PROXY_TYPE",
- 183: "JS_WEAK_MAP_TYPE",
- 184: "JS_WEAK_SET_TYPE",
- 185: "JS_REGEXP_TYPE",
- 186: "JS_FUNCTION_TYPE",
- 170: "JS_FUNCTION_PROXY_TYPE",
- 165: "DEBUG_INFO_TYPE",
- 166: "BREAK_POINT_INFO_TYPE",
-}
-
-
-# List of known V8 maps. Used to determine the instance type and name
-# for maps that are part of the root-set and hence on the first page of
-# the map-space. Obtained by adding the code below to an IA32 release
-# build with enabled snapshots to the end of the Isolate::Init method.
-#
-# #define ROOT_LIST_CASE(type, name, camel_name) \
-# if (o == heap_.name()) n = #camel_name;
-# #define STRUCT_LIST_CASE(upper_name, camel_name, name) \
-# if (o == heap_.name##_map()) n = #camel_name "Map";
-# HeapObjectIterator it(heap_.map_space());
-# printf("KNOWN_MAPS = {\n");
-# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
-# Map* m = Map::cast(o);
-# const char* n = "";
-# intptr_t p = reinterpret_cast<intptr_t>(m) & 0xfffff;
-# int t = m->instance_type();
-# ROOT_LIST(ROOT_LIST_CASE)
-# STRUCT_LIST(STRUCT_LIST_CASE)
-# printf(" 0x%05x: (%d, \"%s\"),\n", p, t, n);
-# }
-# printf("}\n");
-KNOWN_MAPS = {
- 0x08081: (135, "ByteArrayMap"),
- 0x080a9: (129, "MetaMap"),
- 0x080d1: (131, "OddballMap"),
- 0x080f9: (68, "AsciiInternalizedStringMap"),
- 0x08121: (167, "FixedArrayMap"),
- 0x08149: (133, "HeapNumberMap"),
- 0x08171: (136, "FreeSpaceMap"),
- 0x08199: (147, "OnePointerFillerMap"),
- 0x081c1: (147, "TwoPointerFillerMap"),
- 0x081e9: (132, "GlobalPropertyCellMap"),
- 0x08211: (168, "SharedFunctionInfoMap"),
- 0x08239: (167, "NativeContextMap"),
- 0x08261: (130, "CodeMap"),
- 0x08289: (167, "ScopeInfoMap"),
- 0x082b1: (167, "FixedCOWArrayMap"),
- 0x082d9: (146, "FixedDoubleArrayMap"),
- 0x08301: (167, "HashTableMap"),
- 0x08329: (128, "SymbolMap"),
- 0x08351: (0, "StringMap"),
- 0x08379: (4, "AsciiStringMap"),
- 0x083a1: (1, "ConsStringMap"),
- 0x083c9: (5, "ConsAsciiStringMap"),
- 0x083f1: (3, "SlicedStringMap"),
- 0x08419: (7, "SlicedAsciiStringMap"),
- 0x08441: (2, "ExternalStringMap"),
- 0x08469: (10, "ExternalStringWithAsciiDataMap"),
- 0x08491: (6, "ExternalAsciiStringMap"),
- 0x084b9: (18, "ShortExternalStringMap"),
- 0x084e1: (26, "ShortExternalStringWithAsciiDataMap"),
- 0x08509: (64, "InternalizedStringMap"),
- 0x08531: (65, "ConsInternalizedStringMap"),
- 0x08559: (69, "ConsAsciiInternalizedStringMap"),
- 0x08581: (66, "ExternalInternalizedStringMap"),
- 0x085a9: (74, "ExternalInternalizedStringWithAsciiDataMap"),
- 0x085d1: (70, "ExternalAsciiInternalizedStringMap"),
- 0x085f9: (82, "ShortExternalInternalizedStringMap"),
- 0x08621: (90, "ShortExternalInternalizedStringWithAsciiDataMap"),
- 0x08649: (86, "ShortExternalAsciiInternalizedStringMap"),
- 0x08671: (22, "ShortExternalAsciiStringMap"),
- 0x08699: (0, "UndetectableStringMap"),
- 0x086c1: (4, "UndetectableAsciiStringMap"),
- 0x086e9: (145, "ExternalPixelArrayMap"),
- 0x08711: (137, "ExternalByteArrayMap"),
- 0x08739: (138, "ExternalUnsignedByteArrayMap"),
- 0x08761: (139, "ExternalShortArrayMap"),
- 0x08789: (140, "ExternalUnsignedShortArrayMap"),
- 0x087b1: (141, "ExternalIntArrayMap"),
- 0x087d9: (142, "ExternalUnsignedIntArrayMap"),
- 0x08801: (143, "ExternalFloatArrayMap"),
- 0x08829: (144, "ExternalDoubleArrayMap"),
- 0x08851: (167, "NonStrictArgumentsElementsMap"),
- 0x08879: (167, "FunctionContextMap"),
- 0x088a1: (167, "CatchContextMap"),
- 0x088c9: (167, "WithContextMap"),
- 0x088f1: (167, "BlockContextMap"),
- 0x08919: (167, "ModuleContextMap"),
- 0x08941: (167, "GlobalContextMap"),
- 0x08969: (169, "JSMessageObjectMap"),
- 0x08991: (134, "ForeignMap"),
- 0x089b9: (174, "NeanderMap"),
- 0x089e1: (159, "AllocationSiteInfoMap"),
- 0x08a09: (162, "PolymorphicCodeCacheMap"),
- 0x08a31: (160, "ScriptMap"),
- 0x08a59: (174, ""),
- 0x08a81: (174, "ExternalMap"),
- 0x08aa9: (148, "DeclaredAccessorDescriptorMap"),
- 0x08ad1: (149, "DeclaredAccessorInfoMap"),
- 0x08af9: (150, "ExecutableAccessorInfoMap"),
- 0x08b21: (151, "AccessorPairMap"),
- 0x08b49: (152, "AccessCheckInfoMap"),
- 0x08b71: (153, "InterceptorInfoMap"),
- 0x08b99: (154, "CallHandlerInfoMap"),
- 0x08bc1: (155, "FunctionTemplateInfoMap"),
- 0x08be9: (156, "ObjectTemplateInfoMap"),
- 0x08c11: (157, "SignatureInfoMap"),
- 0x08c39: (158, "TypeSwitchInfoMap"),
- 0x08c61: (161, "CodeCacheMap"),
- 0x08c89: (163, "TypeFeedbackInfoMap"),
- 0x08cb1: (164, "AliasedArgumentsEntryMap"),
- 0x08cd9: (165, "DebugInfoMap"),
- 0x08d01: (166, "BreakPointInfoMap"),
-}
-
-
-# List of known V8 objects. Used to determine name for objects that are
-# part of the root-set and hence on the first page of various old-space
-# paged. Obtained by adding the code below to an IA32 release build with
-# enabled snapshots to the end of the Isolate::Init method.
-#
-# #define ROOT_LIST_CASE(type, name, camel_name) \
-# if (o == heap_.name()) n = #camel_name;
-# OldSpaces spit(heap());
-# printf("KNOWN_OBJECTS = {\n");
-# for (PagedSpace* s = spit.next(); s != NULL; s = spit.next()) {
-# HeapObjectIterator it(s);
-# const char* sname = AllocationSpaceName(s->identity());
-# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
-# const char* n = NULL;
-# intptr_t p = reinterpret_cast<intptr_t>(o) & 0xfffff;
-# ROOT_LIST(ROOT_LIST_CASE)
-# if (n != NULL) {
-# printf(" (\"%s\", 0x%05x): \"%s\",\n", sname, p, n);
-# }
-# }
-# }
-# printf("}\n");
-KNOWN_OBJECTS = {
- ("OLD_POINTER_SPACE", 0x08081): "NullValue",
- ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
- ("OLD_POINTER_SPACE", 0x080a1): "InstanceofCacheMap",
- ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
- ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
- ("OLD_POINTER_SPACE", 0x080d1): "NoInterceptorResultSentinel",
- ("OLD_POINTER_SPACE", 0x080e1): "ArgumentsMarker",
- ("OLD_POINTER_SPACE", 0x080f1): "NumberStringCache",
- ("OLD_POINTER_SPACE", 0x088f9): "SingleCharacterStringCache",
- ("OLD_POINTER_SPACE", 0x08b01): "StringSplitCache",
- ("OLD_POINTER_SPACE", 0x08f09): "RegExpMultipleCache",
- ("OLD_POINTER_SPACE", 0x09311): "TerminationException",
- ("OLD_POINTER_SPACE", 0x09321): "MessageListeners",
- ("OLD_POINTER_SPACE", 0x0933d): "CodeStubs",
- ("OLD_POINTER_SPACE", 0x09fa5): "NonMonomorphicCache",
- ("OLD_POINTER_SPACE", 0x0a5b9): "PolymorphicCodeCache",
- ("OLD_POINTER_SPACE", 0x0a5c1): "NativesSourceCache",
- ("OLD_POINTER_SPACE", 0x0a601): "EmptyScript",
- ("OLD_POINTER_SPACE", 0x0a63d): "IntrinsicFunctionNames",
- ("OLD_POINTER_SPACE", 0x0d659): "ObservationState",
- ("OLD_POINTER_SPACE", 0x27415): "SymbolTable",
- ("OLD_DATA_SPACE", 0x08099): "EmptyDescriptorArray",
- ("OLD_DATA_SPACE", 0x080a1): "EmptyFixedArray",
- ("OLD_DATA_SPACE", 0x080a9): "NanValue",
- ("OLD_DATA_SPACE", 0x08125): "EmptyByteArray",
- ("OLD_DATA_SPACE", 0x0812d): "EmptyString",
- ("OLD_DATA_SPACE", 0x08259): "InfinityValue",
- ("OLD_DATA_SPACE", 0x08265): "MinusZeroValue",
- ("OLD_DATA_SPACE", 0x08271): "PrototypeAccessors",
- ("CODE_SPACE", 0x0aea1): "JsEntryCode",
- ("CODE_SPACE", 0x0b5c1): "JsConstructEntryCode",
-}
-
-
class Printer(object):
"""Printer with indentation support."""
@@ -2201,11 +1959,15 @@ def AnalyzeMinidump(options, minidump_name):
print "Kthxbye."
elif not options.command:
if reader.exception is not None:
+ frame_pointer = reader.ExceptionFP()
print "Annotated stack (from exception.esp to bottom):"
for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
maybe_address = reader.ReadUIntPtr(slot)
heap_object = heap.FindObject(maybe_address)
maybe_symbol = reader.FindSymbol(maybe_address)
+ if slot == frame_pointer:
+ maybe_symbol = "<---- frame pointer"
+ frame_pointer = maybe_address
print "%s: %s %s" % (reader.FormatIntPtr(slot),
reader.FormatIntPtr(maybe_address),
maybe_symbol or "")
diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp
index 0c1ad6843f..cbf948448c 100644
--- a/deps/v8/tools/gyp/v8.gyp
+++ b/deps/v8/tools/gyp/v8.gyp
@@ -822,18 +822,16 @@
}],
['v8_enable_i18n_support==1', {
'sources': [
+ '../../src/i18n.cc',
+ '../../src/i18n.h',
'../../src/extensions/i18n/break-iterator.cc',
'../../src/extensions/i18n/break-iterator.h',
'../../src/extensions/i18n/collator.cc',
'../../src/extensions/i18n/collator.h',
- '../../src/extensions/i18n/date-format.cc',
- '../../src/extensions/i18n/date-format.h',
'../../src/extensions/i18n/i18n-extension.cc',
'../../src/extensions/i18n/i18n-extension.h',
'../../src/extensions/i18n/i18n-utils.cc',
'../../src/extensions/i18n/i18n-utils.h',
- '../../src/extensions/i18n/locale.cc',
- '../../src/extensions/i18n/locale.h',
'../../src/extensions/i18n/number-format.cc',
'../../src/extensions/i18n/number-format.h',
],
@@ -906,7 +904,9 @@
'../../src/arraybuffer.js',
'../../src/typedarray.js',
'../../src/generator.js',
- '../../src/array-iterator.js'
+ '../../src/array-iterator.js',
+ '../../src/harmony-string.js',
+ '../../src/harmony-array.js',
],
'i18n_library_files': [
'../../src/extensions/i18n/header.js',
diff --git a/deps/v8/tools/run-tests.py b/deps/v8/tools/run-tests.py
index 761d03fe33..48682d4444 100755
--- a/deps/v8/tools/run-tests.py
+++ b/deps/v8/tools/run-tests.py
@@ -94,6 +94,9 @@ def BuildOptions():
default=False, action="store_true")
result.add_option("--cat", help="Print the source of the tests",
default=False, action="store_true")
+ result.add_option("--flaky-tests",
+ help="Regard tests marked as flaky (run|skip|dontcare)",
+ default="dontcare")
result.add_option("--command-prefix",
help="Prepended to each shell command used to run a test",
default="")
@@ -204,6 +207,9 @@ def ProcessOptions(options):
# This is OK for distributed running, so we don't need to set no_network.
options.command_prefix = (["python", "-u", run_valgrind] +
options.command_prefix)
+ if not options.flaky_tests in ["run", "skip", "dontcare"]:
+ print "Unknown flaky test mode %s" % options.flaky_tests
+ return False
return True
@@ -315,7 +321,7 @@ def Execute(arch, mode, args, options, suites, workspace):
if len(args) > 0:
s.FilterTestCasesByArgs(args)
all_tests += s.tests
- s.FilterTestCasesByStatus(options.warn_unused)
+ s.FilterTestCasesByStatus(options.warn_unused, options.flaky_tests)
if options.cat:
verbose.PrintTestSource(s.tests)
continue
diff --git a/deps/v8/tools/testrunner/local/old_statusfile.py b/deps/v8/tools/testrunner/local/old_statusfile.py
index a9a62036ec..d634e3ec95 100644
--- a/deps/v8/tools/testrunner/local/old_statusfile.py
+++ b/deps/v8/tools/testrunner/local/old_statusfile.py
@@ -37,6 +37,7 @@ OKAY = 'OKAY'
TIMEOUT = 'TIMEOUT'
CRASH = 'CRASH'
SLOW = 'SLOW'
+FLAKY = 'FLAKY'
# These are just for the status files and are mapped below in DEFS:
FAIL_OK = 'FAIL_OK'
PASS_OR_FAIL = 'PASS_OR_FAIL'
@@ -48,6 +49,7 @@ KEYWORDS = {SKIP: SKIP,
TIMEOUT: TIMEOUT,
CRASH: CRASH,
SLOW: SLOW,
+ FLAKY: FLAKY,
FAIL_OK: FAIL_OK,
PASS_OR_FAIL: PASS_OR_FAIL}
diff --git a/deps/v8/tools/testrunner/local/statusfile.py b/deps/v8/tools/testrunner/local/statusfile.py
index 634fe6a08a..1d30fe3d3c 100644
--- a/deps/v8/tools/testrunner/local/statusfile.py
+++ b/deps/v8/tools/testrunner/local/statusfile.py
@@ -42,6 +42,7 @@ OKAY = "OKAY"
TIMEOUT = "TIMEOUT"
CRASH = "CRASH"
SLOW = "SLOW"
+FLAKY = "FLAKY"
# These are just for the status files and are mapped below in DEFS:
FAIL_OK = "FAIL_OK"
PASS_OR_FAIL = "PASS_OR_FAIL"
@@ -49,7 +50,7 @@ PASS_OR_FAIL = "PASS_OR_FAIL"
ALWAYS = "ALWAYS"
KEYWORDS = {}
-for key in [SKIP, FAIL, PASS, OKAY, TIMEOUT, CRASH, SLOW, FAIL_OK,
+for key in [SKIP, FAIL, PASS, OKAY, TIMEOUT, CRASH, SLOW, FLAKY, FAIL_OK,
PASS_OR_FAIL, ALWAYS]:
KEYWORDS[key] = key
@@ -68,6 +69,10 @@ def DoSkip(outcomes):
def IsFlaky(outcomes):
+ return FLAKY in outcomes
+
+
+def IsPassOrFail(outcomes):
return ((PASS in outcomes) and (FAIL in outcomes) and
(not CRASH in outcomes) and (not OKAY in outcomes))
diff --git a/deps/v8/tools/testrunner/local/testsuite.py b/deps/v8/tools/testrunner/local/testsuite.py
index 473e8b1efe..b0372e7f73 100644
--- a/deps/v8/tools/testrunner/local/testsuite.py
+++ b/deps/v8/tools/testrunner/local/testsuite.py
@@ -66,7 +66,10 @@ class TestSuite(object):
# Used in the status file and for stdout printing.
def CommonTestName(self, testcase):
- return testcase.path
+ if utils.IsWindows():
+ return testcase.path.replace("\\", "/")
+ else:
+ return testcase.path
def ListTests(self, context):
raise NotImplementedError
@@ -84,32 +87,36 @@ class TestSuite(object):
def ReadTestCases(self, context):
self.tests = self.ListTests(context)
- def FilterTestCasesByStatus(self, warn_unused_rules):
+ @staticmethod
+ def _FilterFlaky(flaky, mode):
+ return (mode == "run" and not flaky) or (mode == "skip" and flaky)
+
+ def FilterTestCasesByStatus(self, warn_unused_rules, flaky_tests="dontcare"):
filtered = []
used_rules = set()
for t in self.tests:
+ flaky = False
testname = self.CommonTestName(t)
- if utils.IsWindows():
- testname = testname.replace("\\", "/")
if testname in self.rules:
used_rules.add(testname)
- outcomes = self.rules[testname]
- t.outcomes = outcomes # Even for skipped tests, as the TestCase
- # object stays around and PrintReport() uses it.
- if statusfile.DoSkip(outcomes):
+ # Even for skipped tests, as the TestCase object stays around and
+ # PrintReport() uses it.
+ t.outcomes = self.rules[testname]
+ if statusfile.DoSkip(t.outcomes):
continue # Don't add skipped tests to |filtered|.
- if len(self.wildcards) != 0:
- skip = False
- for rule in self.wildcards:
- assert rule[-1] == '*'
- if testname.startswith(rule[:-1]):
- used_rules.add(rule)
- outcomes = self.wildcards[rule]
- t.outcomes = outcomes
- if statusfile.DoSkip(outcomes):
- skip = True
- break # "for rule in self.wildcards"
- if skip: continue # "for t in self.tests"
+ flaky = statusfile.IsFlaky(t.outcomes)
+ skip = False
+ for rule in self.wildcards:
+ assert rule[-1] == '*'
+ if testname.startswith(rule[:-1]):
+ used_rules.add(rule)
+ t.outcomes = self.wildcards[rule]
+ if statusfile.DoSkip(t.outcomes):
+ skip = True
+ break # "for rule in self.wildcards"
+ flaky = flaky or statusfile.IsFlaky(t.outcomes)
+ if skip or self._FilterFlaky(flaky, flaky_tests):
+ continue # "for t in self.tests"
filtered.append(t)
self.tests = filtered
diff --git a/deps/v8/tools/testrunner/local/verbose.py b/deps/v8/tools/testrunner/local/verbose.py
index f693467523..00c330d2d9 100644
--- a/deps/v8/tools/testrunner/local/verbose.py
+++ b/deps/v8/tools/testrunner/local/verbose.py
@@ -54,7 +54,7 @@ def PrintReport(tests):
skipped += 1
continue
if statusfile.TIMEOUT in o: timeout += 1
- if statusfile.IsFlaky(o): nocrash += 1
+ if statusfile.IsPassOrFail(o): nocrash += 1
if list(o) == [statusfile.PASS]: passes += 1
if statusfile.IsFailOk(o): fail_ok += 1
if list(o) == [statusfile.FAIL]: fail += 1
diff --git a/deps/v8/tools/v8heapconst.py b/deps/v8/tools/v8heapconst.py
new file mode 100644
index 0000000000..591bf99ca9
--- /dev/null
+++ b/deps/v8/tools/v8heapconst.py
@@ -0,0 +1,252 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This file is automatically generated from the V8 source and should not
+# be modified manually, run 'make grokdump' instead to update this file.
+
+# List of known V8 instance types.
+INSTANCE_TYPES = {
+ 64: "STRING_TYPE",
+ 68: "ASCII_STRING_TYPE",
+ 65: "CONS_STRING_TYPE",
+ 69: "CONS_ASCII_STRING_TYPE",
+ 67: "SLICED_STRING_TYPE",
+ 66: "EXTERNAL_STRING_TYPE",
+ 70: "EXTERNAL_ASCII_STRING_TYPE",
+ 74: "EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE",
+ 82: "SHORT_EXTERNAL_STRING_TYPE",
+ 86: "SHORT_EXTERNAL_ASCII_STRING_TYPE",
+ 90: "SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE",
+ 0: "INTERNALIZED_STRING_TYPE",
+ 4: "ASCII_INTERNALIZED_STRING_TYPE",
+ 1: "CONS_INTERNALIZED_STRING_TYPE",
+ 5: "CONS_ASCII_INTERNALIZED_STRING_TYPE",
+ 2: "EXTERNAL_INTERNALIZED_STRING_TYPE",
+ 6: "EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE",
+ 10: "EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE",
+ 18: "SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE",
+ 22: "SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE",
+ 26: "SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE",
+ 128: "SYMBOL_TYPE",
+ 129: "MAP_TYPE",
+ 130: "CODE_TYPE",
+ 131: "ODDBALL_TYPE",
+ 132: "CELL_TYPE",
+ 133: "PROPERTY_CELL_TYPE",
+ 134: "HEAP_NUMBER_TYPE",
+ 135: "FOREIGN_TYPE",
+ 136: "BYTE_ARRAY_TYPE",
+ 137: "FREE_SPACE_TYPE",
+ 138: "EXTERNAL_BYTE_ARRAY_TYPE",
+ 139: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
+ 140: "EXTERNAL_SHORT_ARRAY_TYPE",
+ 141: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
+ 142: "EXTERNAL_INT_ARRAY_TYPE",
+ 143: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
+ 144: "EXTERNAL_FLOAT_ARRAY_TYPE",
+ 145: "EXTERNAL_DOUBLE_ARRAY_TYPE",
+ 146: "EXTERNAL_PIXEL_ARRAY_TYPE",
+ 148: "FILLER_TYPE",
+ 149: "DECLARED_ACCESSOR_DESCRIPTOR_TYPE",
+ 150: "DECLARED_ACCESSOR_INFO_TYPE",
+ 151: "EXECUTABLE_ACCESSOR_INFO_TYPE",
+ 152: "ACCESSOR_PAIR_TYPE",
+ 153: "ACCESS_CHECK_INFO_TYPE",
+ 154: "INTERCEPTOR_INFO_TYPE",
+ 155: "CALL_HANDLER_INFO_TYPE",
+ 156: "FUNCTION_TEMPLATE_INFO_TYPE",
+ 157: "OBJECT_TEMPLATE_INFO_TYPE",
+ 158: "SIGNATURE_INFO_TYPE",
+ 159: "TYPE_SWITCH_INFO_TYPE",
+ 161: "ALLOCATION_MEMENTO_TYPE",
+ 160: "ALLOCATION_SITE_TYPE",
+ 162: "SCRIPT_TYPE",
+ 163: "CODE_CACHE_TYPE",
+ 164: "POLYMORPHIC_CODE_CACHE_TYPE",
+ 165: "TYPE_FEEDBACK_INFO_TYPE",
+ 166: "ALIASED_ARGUMENTS_ENTRY_TYPE",
+ 167: "BOX_TYPE",
+ 170: "FIXED_ARRAY_TYPE",
+ 147: "FIXED_DOUBLE_ARRAY_TYPE",
+ 171: "SHARED_FUNCTION_INFO_TYPE",
+ 172: "JS_MESSAGE_OBJECT_TYPE",
+ 175: "JS_VALUE_TYPE",
+ 176: "JS_DATE_TYPE",
+ 177: "JS_OBJECT_TYPE",
+ 178: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+ 179: "JS_GENERATOR_OBJECT_TYPE",
+ 180: "JS_MODULE_TYPE",
+ 181: "JS_GLOBAL_OBJECT_TYPE",
+ 182: "JS_BUILTINS_OBJECT_TYPE",
+ 183: "JS_GLOBAL_PROXY_TYPE",
+ 184: "JS_ARRAY_TYPE",
+ 185: "JS_ARRAY_BUFFER_TYPE",
+ 186: "JS_TYPED_ARRAY_TYPE",
+ 187: "JS_DATA_VIEW_TYPE",
+ 174: "JS_PROXY_TYPE",
+ 190: "JS_WEAK_MAP_TYPE",
+ 191: "JS_WEAK_SET_TYPE",
+ 192: "JS_REGEXP_TYPE",
+ 193: "JS_FUNCTION_TYPE",
+ 173: "JS_FUNCTION_PROXY_TYPE",
+ 168: "DEBUG_INFO_TYPE",
+ 169: "BREAK_POINT_INFO_TYPE",
+}
+
+# List of known V8 maps.
+KNOWN_MAPS = {
+ 0x08081: (136, "ByteArrayMap"),
+ 0x080a9: (129, "MetaMap"),
+ 0x080d1: (131, "OddballMap"),
+ 0x080f9: (4, "AsciiInternalizedStringMap"),
+ 0x08121: (170, "FixedArrayMap"),
+ 0x08149: (134, "HeapNumberMap"),
+ 0x08171: (137, "FreeSpaceMap"),
+ 0x08199: (148, "OnePointerFillerMap"),
+ 0x081c1: (148, "TwoPointerFillerMap"),
+ 0x081e9: (132, "CellMap"),
+ 0x08211: (133, "GlobalPropertyCellMap"),
+ 0x08239: (171, "SharedFunctionInfoMap"),
+ 0x08261: (170, "NativeContextMap"),
+ 0x08289: (130, "CodeMap"),
+ 0x082b1: (170, "ScopeInfoMap"),
+ 0x082d9: (170, "FixedCOWArrayMap"),
+ 0x08301: (147, "FixedDoubleArrayMap"),
+ 0x08329: (170, "HashTableMap"),
+ 0x08351: (128, "SymbolMap"),
+ 0x08379: (64, "StringMap"),
+ 0x083a1: (68, "AsciiStringMap"),
+ 0x083c9: (65, "ConsStringMap"),
+ 0x083f1: (69, "ConsAsciiStringMap"),
+ 0x08419: (67, "SlicedStringMap"),
+ 0x08441: (71, "SlicedAsciiStringMap"),
+ 0x08469: (66, "ExternalStringMap"),
+ 0x08491: (74, "ExternalStringWithOneByteDataMap"),
+ 0x084b9: (70, "ExternalAsciiStringMap"),
+ 0x084e1: (82, "ShortExternalStringMap"),
+ 0x08509: (90, "ShortExternalStringWithOneByteDataMap"),
+ 0x08531: (0, "InternalizedStringMap"),
+ 0x08559: (1, "ConsInternalizedStringMap"),
+ 0x08581: (5, "ConsAsciiInternalizedStringMap"),
+ 0x085a9: (2, "ExternalInternalizedStringMap"),
+ 0x085d1: (10, "ExternalInternalizedStringWithOneByteDataMap"),
+ 0x085f9: (6, "ExternalAsciiInternalizedStringMap"),
+ 0x08621: (18, "ShortExternalInternalizedStringMap"),
+ 0x08649: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
+ 0x08671: (22, "ShortExternalAsciiInternalizedStringMap"),
+ 0x08699: (86, "ShortExternalAsciiStringMap"),
+ 0x086c1: (64, "UndetectableStringMap"),
+ 0x086e9: (68, "UndetectableAsciiStringMap"),
+ 0x08711: (138, "ExternalByteArrayMap"),
+ 0x08739: (139, "ExternalUnsignedByteArrayMap"),
+ 0x08761: (140, "ExternalShortArrayMap"),
+ 0x08789: (141, "ExternalUnsignedShortArrayMap"),
+ 0x087b1: (142, "ExternalIntArrayMap"),
+ 0x087d9: (143, "ExternalUnsignedIntArrayMap"),
+ 0x08801: (144, "ExternalFloatArrayMap"),
+ 0x08829: (145, "ExternalDoubleArrayMap"),
+ 0x08851: (146, "ExternalPixelArrayMap"),
+ 0x08879: (170, "NonStrictArgumentsElementsMap"),
+ 0x088a1: (170, "FunctionContextMap"),
+ 0x088c9: (170, "CatchContextMap"),
+ 0x088f1: (170, "WithContextMap"),
+ 0x08919: (170, "BlockContextMap"),
+ 0x08941: (170, "ModuleContextMap"),
+ 0x08969: (170, "GlobalContextMap"),
+ 0x08991: (172, "JSMessageObjectMap"),
+ 0x089b9: (135, "ForeignMap"),
+ 0x089e1: (177, "NeanderMap"),
+ 0x08a09: (161, "AllocationMementoMap"),
+ 0x08a31: (160, "AllocationSiteMap"),
+ 0x08a59: (164, "PolymorphicCodeCacheMap"),
+ 0x08a81: (162, "ScriptMap"),
+ 0x08ad1: (177, "ExternalMap"),
+ 0x08af9: (167, "BoxMap"),
+ 0x08b21: (149, "DeclaredAccessorDescriptorMap"),
+ 0x08b49: (150, "DeclaredAccessorInfoMap"),
+ 0x08b71: (151, "ExecutableAccessorInfoMap"),
+ 0x08b99: (152, "AccessorPairMap"),
+ 0x08bc1: (153, "AccessCheckInfoMap"),
+ 0x08be9: (154, "InterceptorInfoMap"),
+ 0x08c11: (155, "CallHandlerInfoMap"),
+ 0x08c39: (156, "FunctionTemplateInfoMap"),
+ 0x08c61: (157, "ObjectTemplateInfoMap"),
+ 0x08c89: (158, "SignatureInfoMap"),
+ 0x08cb1: (159, "TypeSwitchInfoMap"),
+ 0x08cd9: (163, "CodeCacheMap"),
+ 0x08d01: (165, "TypeFeedbackInfoMap"),
+ 0x08d29: (166, "AliasedArgumentsEntryMap"),
+ 0x08d51: (168, "DebugInfoMap"),
+ 0x08d79: (169, "BreakPointInfoMap"),
+}
+
+# List of known V8 objects.
+KNOWN_OBJECTS = {
+ ("OLD_POINTER_SPACE", 0x08081): "NullValue",
+ ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
+ ("OLD_POINTER_SPACE", 0x080a1): "TheHoleValue",
+ ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
+ ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
+ ("OLD_POINTER_SPACE", 0x080d1): "UninitializedValue",
+ ("OLD_POINTER_SPACE", 0x080e1): "NoInterceptorResultSentinel",
+ ("OLD_POINTER_SPACE", 0x080f1): "ArgumentsMarker",
+ ("OLD_POINTER_SPACE", 0x08101): "NumberStringCache",
+ ("OLD_POINTER_SPACE", 0x08909): "SingleCharacterStringCache",
+ ("OLD_POINTER_SPACE", 0x08d11): "StringSplitCache",
+ ("OLD_POINTER_SPACE", 0x09119): "RegExpMultipleCache",
+ ("OLD_POINTER_SPACE", 0x09521): "TerminationException",
+ ("OLD_POINTER_SPACE", 0x09531): "MessageListeners",
+ ("OLD_POINTER_SPACE", 0x0954d): "CodeStubs",
+ ("OLD_POINTER_SPACE", 0x0a9d9): "NonMonomorphicCache",
+ ("OLD_POINTER_SPACE", 0x0afed): "PolymorphicCodeCache",
+ ("OLD_POINTER_SPACE", 0x0aff5): "NativesSourceCache",
+ ("OLD_POINTER_SPACE", 0x0b035): "EmptyScript",
+ ("OLD_POINTER_SPACE", 0x0b06d): "IntrinsicFunctionNames",
+ ("OLD_POINTER_SPACE", 0x0e089): "ObservationState",
+ ("OLD_POINTER_SPACE", 0x0e095): "FrozenSymbol",
+ ("OLD_POINTER_SPACE", 0x0e0a1): "ElementsTransitionSymbol",
+ ("OLD_POINTER_SPACE", 0x0e0ad): "EmptySlowElementDictionary",
+ ("OLD_POINTER_SPACE", 0x0e249): "ObservedSymbol",
+ ("OLD_POINTER_SPACE", 0x27585): "StringTable",
+ ("OLD_DATA_SPACE", 0x08099): "EmptyDescriptorArray",
+ ("OLD_DATA_SPACE", 0x080a1): "EmptyFixedArray",
+ ("OLD_DATA_SPACE", 0x080a9): "NanValue",
+ ("OLD_DATA_SPACE", 0x08141): "EmptyByteArray",
+ ("OLD_DATA_SPACE", 0x08269): "EmptyExternalByteArray",
+ ("OLD_DATA_SPACE", 0x08275): "EmptyExternalUnsignedByteArray",
+ ("OLD_DATA_SPACE", 0x08281): "EmptyExternalShortArray",
+ ("OLD_DATA_SPACE", 0x0828d): "EmptyExternalUnsignedShortArray",
+ ("OLD_DATA_SPACE", 0x08299): "EmptyExternalIntArray",
+ ("OLD_DATA_SPACE", 0x082a5): "EmptyExternalUnsignedIntArray",
+ ("OLD_DATA_SPACE", 0x082b1): "EmptyExternalFloatArray",
+ ("OLD_DATA_SPACE", 0x082bd): "EmptyExternalDoubleArray",
+ ("OLD_DATA_SPACE", 0x082c9): "EmptyExternalPixelArray",
+ ("OLD_DATA_SPACE", 0x082d5): "InfinityValue",
+ ("OLD_DATA_SPACE", 0x082e1): "MinusZeroValue",
+ ("CODE_SPACE", 0x0eb41): "JsConstructEntryCode",
+ ("CODE_SPACE", 0x177a1): "JsEntryCode",
+}
diff --git a/deps/v8/tools/v8heapconst.py.tmpl b/deps/v8/tools/v8heapconst.py.tmpl
new file mode 100644
index 0000000000..a773f47c8b
--- /dev/null
+++ b/deps/v8/tools/v8heapconst.py.tmpl
@@ -0,0 +1,30 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This file is automatically generated from the V8 source and should not
+# be modified manually, run 'make grokdump' instead to update this file.
+