summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-02-16 08:38:33 -0800
committerRyan Dahl <ry@tinyclouds.org>2011-02-16 10:38:49 -0800
commit550f73ae3e3b29aa36e793e8ffc5cd23478df099 (patch)
tree3f4d8f9d7648169df967a820406923a9c4321cba
parent3ef6433255cfeabdeb70bbfa51ac32a287c5d243 (diff)
downloadandroid-node-v8-550f73ae3e3b29aa36e793e8ffc5cd23478df099.tar.gz
android-node-v8-550f73ae3e3b29aa36e793e8ffc5cd23478df099.tar.bz2
android-node-v8-550f73ae3e3b29aa36e793e8ffc5cd23478df099.zip
Upgrade V8 to 3.1.5
-rw-r--r--deps/v8/.gitignore2
-rw-r--r--deps/v8/AUTHORS1
-rw-r--r--deps/v8/ChangeLog65
-rw-r--r--deps/v8/SConstruct21
-rw-r--r--deps/v8/include/v8.h1
-rw-r--r--deps/v8/samples/shell.cc3
-rwxr-xr-xdeps/v8/src/SConscript5
-rw-r--r--deps/v8/src/accessors.cc1
-rw-r--r--deps/v8/src/api.cc68
-rw-r--r--deps/v8/src/arguments.h2
-rw-r--r--deps/v8/src/arm/assembler-arm-inl.h4
-rw-r--r--deps/v8/src/arm/assembler-arm.cc58
-rw-r--r--deps/v8/src/arm/assembler-arm.h9
-rw-r--r--deps/v8/src/arm/builtins-arm.cc58
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc381
-rw-r--r--deps/v8/src/arm/code-stubs-arm.h46
-rw-r--r--deps/v8/src/arm/codegen-arm.cc24
-rw-r--r--deps/v8/src/arm/constants-arm.h2
-rw-r--r--deps/v8/src/arm/deoptimizer-arm.cc204
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc557
-rw-r--r--deps/v8/src/arm/ic-arm.cc117
-rw-r--r--deps/v8/src/arm/lithium-arm.cc88
-rw-r--r--deps/v8/src/arm/lithium-arm.h106
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc208
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.h8
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.cc172
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.h27
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.cc71
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.h3
-rw-r--r--deps/v8/src/arm/simulator-arm.h15
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc41
-rw-r--r--deps/v8/src/arm/virtual-frame-arm.cc13
-rw-r--r--deps/v8/src/arm/virtual-frame-arm.h3
-rw-r--r--deps/v8/src/array.js10
-rw-r--r--deps/v8/src/assembler.cc2
-rw-r--r--deps/v8/src/assembler.h8
-rw-r--r--deps/v8/src/ast.cc4
-rw-r--r--deps/v8/src/bignum.cc11
-rw-r--r--deps/v8/src/bootstrapper.cc51
-rw-r--r--deps/v8/src/builtins.cc45
-rw-r--r--deps/v8/src/builtins.h170
-rw-r--r--deps/v8/src/code-stubs.cc4
-rw-r--r--deps/v8/src/code-stubs.h3
-rw-r--r--deps/v8/src/codegen.cc1
-rwxr-xr-xdeps/v8/src/compiler.cc25
-rw-r--r--deps/v8/src/compiler.h4
-rw-r--r--deps/v8/src/d8.cc2
-rw-r--r--deps/v8/src/date.js7
-rw-r--r--deps/v8/src/debug.cc4
-rw-r--r--deps/v8/src/deoptimizer.cc14
-rw-r--r--deps/v8/src/execution.cc1
-rw-r--r--deps/v8/src/execution.h2
-rw-r--r--deps/v8/src/factory.cc9
-rw-r--r--deps/v8/src/factory.h2
-rw-r--r--deps/v8/src/flag-definitions.h10
-rw-r--r--deps/v8/src/full-codegen.cc2
-rw-r--r--deps/v8/src/full-codegen.h6
-rw-r--r--deps/v8/src/gdb-jit.cc121
-rw-r--r--deps/v8/src/handles.cc12
-rw-r--r--deps/v8/src/handles.h7
-rw-r--r--deps/v8/src/heap.cc2
-rw-r--r--deps/v8/src/heap.h1
-rw-r--r--deps/v8/src/hydrogen-instructions.cc186
-rw-r--r--deps/v8/src/hydrogen-instructions.h441
-rw-r--r--deps/v8/src/hydrogen.cc462
-rw-r--r--deps/v8/src/hydrogen.h38
-rw-r--r--deps/v8/src/ia32/assembler-ia32.cc17
-rw-r--r--deps/v8/src/ia32/assembler-ia32.h53
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc15
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc223
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.h38
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc27
-rw-r--r--deps/v8/src/ia32/deoptimizer-ia32.cc58
-rw-r--r--deps/v8/src/ia32/disasm-ia32.cc12
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc133
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc49
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc332
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.h26
-rw-r--r--deps/v8/src/ia32/lithium-ia32.cc133
-rw-r--r--deps/v8/src/ia32/lithium-ia32.h234
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc133
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h9
-rw-r--r--deps/v8/src/ia32/simulator-ia32.h9
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc36
-rw-r--r--deps/v8/src/ia32/virtual-frame-ia32.cc14
-rw-r--r--deps/v8/src/ia32/virtual-frame-ia32.h3
-rw-r--r--deps/v8/src/ic.cc89
-rw-r--r--deps/v8/src/ic.h23
-rw-r--r--deps/v8/src/liveedit.cc1
-rw-r--r--deps/v8/src/macro-assembler.h7
-rw-r--r--deps/v8/src/math.js2
-rw-r--r--deps/v8/src/messages.cc9
-rw-r--r--deps/v8/src/messages.js4
-rw-r--r--deps/v8/src/objects-debug.cc2
-rw-r--r--deps/v8/src/objects-inl.h22
-rw-r--r--deps/v8/src/objects.cc131
-rw-r--r--deps/v8/src/objects.h66
-rw-r--r--deps/v8/src/oprofile-agent.cc108
-rw-r--r--deps/v8/src/parser.cc48
-rw-r--r--deps/v8/src/parser.h5
-rw-r--r--deps/v8/src/platform-cygwin.cc776
-rw-r--r--deps/v8/src/platform.h4
-rw-r--r--deps/v8/src/profile-generator.cc2
-rw-r--r--deps/v8/src/regexp-macro-assembler.cc6
-rw-r--r--deps/v8/src/runtime.cc310
-rw-r--r--deps/v8/src/runtime.h3
-rw-r--r--deps/v8/src/runtime.js4
-rwxr-xr-xdeps/v8/src/scanner.cc13
-rw-r--r--deps/v8/src/serialize.cc2
-rw-r--r--deps/v8/src/string.js19
-rw-r--r--deps/v8/src/stub-cache.cc77
-rw-r--r--deps/v8/src/stub-cache.h32
-rw-r--r--deps/v8/src/top.cc118
-rw-r--r--deps/v8/src/top.h23
-rw-r--r--deps/v8/src/uri.js28
-rw-r--r--deps/v8/src/v8-counters.h1
-rw-r--r--deps/v8/src/v8.cc3
-rw-r--r--deps/v8/src/v8globals.h18
-rw-r--r--deps/v8/src/v8natives.js74
-rw-r--r--deps/v8/src/version.cc2
-rw-r--r--deps/v8/src/x64/assembler-x64-inl.h4
-rw-r--r--deps/v8/src/x64/assembler-x64.cc23
-rw-r--r--deps/v8/src/x64/assembler-x64.h12
-rw-r--r--deps/v8/src/x64/builtins-x64.cc81
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc323
-rw-r--r--deps/v8/src/x64/code-stubs-x64.h40
-rw-r--r--deps/v8/src/x64/codegen-x64.cc29
-rw-r--r--deps/v8/src/x64/cpu-x64.cc3
-rw-r--r--deps/v8/src/x64/deoptimizer-x64.cc41
-rw-r--r--deps/v8/src/x64/full-codegen-x64.cc259
-rw-r--r--deps/v8/src/x64/ic-x64.cc98
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc517
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.h4
-rw-r--r--deps/v8/src/x64/lithium-x64.cc157
-rw-r--r--deps/v8/src/x64/lithium-x64.h237
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc139
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.h19
-rw-r--r--deps/v8/src/x64/simulator-x64.h7
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc37
-rw-r--r--deps/v8/src/x64/virtual-frame-x64.cc13
-rw-r--r--deps/v8/src/x64/virtual-frame-x64.h3
-rw-r--r--deps/v8/test/cctest/cctest.status12
-rw-r--r--deps/v8/test/cctest/test-api.cc206
-rw-r--r--deps/v8/test/cctest/test-debug.cc2
-rw-r--r--deps/v8/test/cctest/test-disasm-ia32.cc8
-rw-r--r--deps/v8/test/cctest/test-strtod.cc19
-rw-r--r--deps/v8/test/es5conform/es5conform.status239
-rw-r--r--deps/v8/test/mjsunit/compiler/regress-arguments.js5
-rw-r--r--deps/v8/test/mjsunit/fuzz-natives.js3
-rw-r--r--deps/v8/test/mjsunit/getter-in-prototype.js8
-rw-r--r--deps/v8/test/mjsunit/indexed-value-properties.js56
-rw-r--r--deps/v8/test/mjsunit/json.js14
-rw-r--r--deps/v8/test/mjsunit/mjsunit.status10
-rw-r--r--deps/v8/test/mjsunit/regexp.js14
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1103.js32
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1104.js37
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1105.js38
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1106.js50
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1107.js32
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1110.js38
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1112.js36
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1117.js35
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1118.js (renamed from deps/v8/src/oprofile-agent.h)63
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1119.js45
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1120.js33
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1121.js34
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1122.js55
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1125.js41
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1126.js35
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1129.js44
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1130.js38
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1131.js29
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1132.js48
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1146.js48
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1149.js39
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1150.js33
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1151.js32
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1156.js49
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1160.js46
-rw-r--r--deps/v8/test/mjsunit/regress/regress-crbug-72736.js37
-rw-r--r--deps/v8/test/mjsunit/strict-mode.js174
-rw-r--r--deps/v8/test/mjsunit/tools/codemap.js14
-rw-r--r--deps/v8/test/mjsunit/tools/csvparser.js2
-rw-r--r--deps/v8/test/mjsunit/tools/profile.js2
-rw-r--r--deps/v8/test/mjsunit/tools/profile_view.js4
-rw-r--r--deps/v8/test/mjsunit/tools/splaytree.js18
-rw-r--r--deps/v8/tools/codemap.js69
-rw-r--r--deps/v8/tools/csvparser.js17
-rw-r--r--deps/v8/tools/gyp/v8.gyp2
-rw-r--r--deps/v8/tools/logreader.js24
-rwxr-xr-xdeps/v8/tools/oprofile/annotate7
-rwxr-xr-xdeps/v8/tools/oprofile/common19
-rwxr-xr-xdeps/v8/tools/oprofile/dump7
-rwxr-xr-xdeps/v8/tools/oprofile/report7
-rwxr-xr-xdeps/v8/tools/oprofile/reset7
-rwxr-xr-xdeps/v8/tools/oprofile/run14
-rwxr-xr-xdeps/v8/tools/oprofile/shutdown7
-rwxr-xr-xdeps/v8/tools/oprofile/start7
-rw-r--r--deps/v8/tools/profile.js153
-rw-r--r--deps/v8/tools/profile_view.js61
-rw-r--r--deps/v8/tools/splaytree.js60
-rw-r--r--deps/v8/tools/tickprocessor.js44
-rw-r--r--deps/v8/tools/utils.py2
-rw-r--r--deps/v8/tools/v8.xcodeproj/project.pbxproj14
-rw-r--r--deps/v8/tools/visual_studio/common.vsprops2
-rw-r--r--deps/v8/tools/visual_studio/v8_base.vcproj8
-rw-r--r--deps/v8/tools/visual_studio/v8_base_arm.vcproj8
-rw-r--r--deps/v8/tools/visual_studio/v8_base_x64.vcproj8
208 files changed, 8294 insertions, 3793 deletions
diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore
index c68dadbe98..db57d1bb32 100644
--- a/deps/v8/.gitignore
+++ b/deps/v8/.gitignore
@@ -20,6 +20,8 @@ d8_g
shell
shell_g
/obj/
+/test/es5conform/data/
+/test/mozilla/data/
/test/sputnik/sputniktests/
/tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index 1b756caf27..92b69cb686 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -9,6 +9,7 @@ ARM Ltd.
Hewlett-Packard Development Company, LP
Alexander Botero-Lowry <alexbl@FreeBSD.org>
+Alexander Karpinsky <homm86@gmail.com>
Alexandre Vassalotti <avassalotti@gmail.com>
Andreas Anyuru <andreas.anyuru@gmail.com>
Bert Belder <bertbelder@gmail.com>
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index d48ded840c..f69be973f0 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,64 @@
+2011-02-16: Version 3.1.5
+
+ Change RegExp parsing to disallow /(*)/.
+
+ Added GDB JIT support for ARM.
+
+ Fixed several crash bugs.
+
+ Performance improvements on the IA32 platform.
+
+
+2011-02-14: Version 3.1.4
+
+ Fixed incorrect compare of prototypes of the global object (issue
+ 1082).
+
+ Fixed a bug in optimizing calls to global functions (issue 1106).
+
+ Made optimized Function.prototype.apply safe for non-JSObject first
+ arguments (issue 1128).
+
+ Fixed an error related to element accessors on Object.prototype and
+ parser errors (issue 1130).
+
+ Fixed a bug in sorting an array with large array indices (issue 1131).
+
+ Properly treat exceptions thrown while compiling (issue 1132).
+
+ Fixed bug in register requirements for function.apply (issue 1133).
+
+ Fixed a representation change bug in the Hydrogen graph construction
+ (issue 1134).
+
+ Fixed the semantics of delete on parameters (issue 1136).
+
+ Fixed a optimizer bug related to moving instructions with side effects
+ (issue 1138).
+
+ Added support for the global object in Object.keys (issue 1150).
+
+ Fixed incorrect value for Math.LOG10E
+ (issue http://code.google.com/p/chromium/issues/detail?id=72555)
+
+ Performance improvements on the IA32 platform.
+
+ Implement assignment to undefined reference in ES5 Strict Mode.
+
+
+2011-02-09: Version 3.1.3
+
+ Fixed a bug triggered by functions with huge numbers of declared
+ arguments.
+
+ Fixed zap value aliasing a real object - debug mode only (issue 866).
+
+ Fixed issue where Array.prototype.__proto__ had been set to null
+ (issue 1121).
+
+ Fixed stability bugs in Crankshaft for x86.
+
+
2011-02-07: Version 3.1.2
Added better security checks when accessing properties via
@@ -56,8 +117,8 @@
Introduced partial strict mode support.
- Changed formatting of recursive error messages to match Firefox and Safari
- (issue http://crbug.com/70334).
+ Changed formatting of recursive error messages to match Firefox and
+ Safari (issue http://crbug.com/70334).
Fixed incorrect rounding for float-to-integer conversions for external
array types, which implement the Typed Array spec
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index f877392941..bffbba648d 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -136,7 +136,7 @@ LIBRARY_FLAGS = {
'gcc': {
'all': {
'CCFLAGS': ['$DIALECTFLAGS', '$WARNINGFLAGS'],
- 'CXXFLAGS': ['$CCFLAGS', '-fno-rtti', '-fno-exceptions', '-fno-builtin-memcpy'],
+ 'CXXFLAGS': ['$CCFLAGS', '-fno-rtti', '-fno-exceptions'],
},
'visibility:hidden': {
# Use visibility=default to disable this.
@@ -234,9 +234,6 @@ LIBRARY_FLAGS = {
'CCFLAGS': ['-m64'],
'LINKFLAGS': ['-m64'],
},
- 'prof:oprofile': {
- 'CPPDEFINES': ['ENABLE_OPROFILE_AGENT']
- },
'gdbjit:on': {
'CPPDEFINES': ['ENABLE_GDB_JIT_INTERFACE']
}
@@ -538,10 +535,6 @@ SAMPLE_FLAGS = {
'CCFLAGS': ['-g', '-O0'],
'CPPDEFINES': ['DEBUG']
},
- 'prof:oprofile': {
- 'LIBPATH': ['/usr/lib32', '/usr/lib32/oprofile'],
- 'LIBS': ['opagent']
- }
},
'msvc': {
'all': {
@@ -669,8 +662,8 @@ def GuessToolchain(os):
def GuessVisibility(os, toolchain):
- if (os == 'win32' or os == 'cygwin') and toolchain == 'gcc':
- # MinGW / Cygwin can't do it.
+ if os == 'win32' and toolchain == 'gcc':
+ # MinGW can't do it.
return 'default'
elif os == 'solaris':
return 'default'
@@ -691,7 +684,7 @@ SIMPLE_OPTIONS = {
'help': 'the toolchain to use (%s)' % TOOLCHAIN_GUESS
},
'os': {
- 'values': ['freebsd', 'linux', 'macos', 'win32', 'android', 'openbsd', 'solaris', 'cygwin'],
+ 'values': ['freebsd', 'linux', 'macos', 'win32', 'android', 'openbsd', 'solaris'],
'default': OS_GUESS,
'help': 'the os to build for (%s)' % OS_GUESS
},
@@ -711,7 +704,7 @@ SIMPLE_OPTIONS = {
'help': 'build using snapshots for faster start-up'
},
'prof': {
- 'values': ['on', 'off', 'oprofile'],
+ 'values': ['on', 'off'],
'default': 'off',
'help': 'enable profiling of build target'
},
@@ -896,10 +889,8 @@ def VerifyOptions(env):
return False
if env['os'] == 'win32' and env['library'] == 'shared' and env['prof'] == 'on':
Abort("Profiling on windows only supported for static library.")
- if env['gdbjit'] == 'on' and (env['os'] != 'linux' or (env['arch'] != 'ia32' and env['arch'] != 'x64')):
+ if env['gdbjit'] == 'on' and (env['os'] != 'linux' or (env['arch'] != 'ia32' and env['arch'] != 'x64' and env['arch'] != 'arm')):
Abort("GDBJIT interface is supported only for Intel-compatible (ia32 or x64) Linux target.")
- if env['prof'] == 'oprofile' and env['os'] != 'linux':
- Abort("OProfile is only supported on Linux.")
if env['os'] == 'win32' and env['soname'] == 'on':
Abort("Shared Object soname not applicable for Windows.")
if env['soname'] == 'on' and env['library'] == 'static':
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index 44ff2c00e0..83a5744278 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -462,7 +462,6 @@ class V8EXPORT HandleScope {
void Leave();
-
internal::Object** prev_next_;
internal::Object** prev_limit_;
diff --git a/deps/v8/samples/shell.cc b/deps/v8/samples/shell.cc
index 6b67df6c6c..64f78f02c6 100644
--- a/deps/v8/samples/shell.cc
+++ b/deps/v8/samples/shell.cc
@@ -27,6 +27,7 @@
#include <v8.h>
#include <v8-testing.h>
+#include <assert.h>
#include <fcntl.h>
#include <string.h>
#include <stdio.h>
@@ -290,11 +291,13 @@ bool ExecuteString(v8::Handle<v8::String> source,
} else {
v8::Handle<v8::Value> result = script->Run();
if (result.IsEmpty()) {
+ assert(try_catch.HasCaught());
// Print errors that happened during execution.
if (report_exceptions)
ReportException(&try_catch);
return false;
} else {
+ assert(!try_catch.HasCaught());
if (print_result && !result->IsUndefined()) {
// If all went well and the result wasn't undefined then print
// the returned value.
diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript
index 44129f67ae..c3561be340 100755
--- a/deps/v8/src/SConscript
+++ b/deps/v8/src/SConscript
@@ -97,7 +97,6 @@ SOURCES = {
objects.cc
objects-printer.cc
objects-visiting.cc
- oprofile-agent.cc
parser.cc
preparser.cc
preparse-data.cc
@@ -234,7 +233,6 @@ SOURCES = {
'os:android': ['platform-linux.cc', 'platform-posix.cc'],
'os:macos': ['platform-macos.cc', 'platform-posix.cc'],
'os:solaris': ['platform-solaris.cc', 'platform-posix.cc'],
- 'os:cygwin': ['platform-cygwin.cc', 'platform-posix.cc'],
'os:nullos': ['platform-nullos.cc'],
'os:win32': ['platform-win32.cc'],
'mode:release': [],
@@ -266,9 +264,6 @@ D8_FILES = {
'os:solaris': [
'd8-posix.cc'
],
- 'os:cygwin': [
- 'd8-posix.cc'
- ],
'os:win32': [
'd8-windows.cc'
],
diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc
index 2b205d5d74..f6d1daf67a 100644
--- a/deps/v8/src/accessors.cc
+++ b/deps/v8/src/accessors.cc
@@ -447,6 +447,7 @@ MaybeObject* Accessors::FunctionGetPrototype(Object* object, void*) {
JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
if (!found_it) return Heap::undefined_value();
if (!function->has_prototype()) {
+ if (!function->should_have_prototype()) return Heap::undefined_value();
Object* prototype;
{ MaybeObject* maybe_prototype = Heap::AllocateFunctionPrototype(function);
if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index c16244038b..d718c8875b 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -115,7 +115,9 @@ static FatalErrorCallback exception_behavior = NULL;
static void DefaultFatalErrorHandler(const char* location,
const char* message) {
- ENTER_V8;
+#ifdef ENABLE_VMSTATE_TRACKING
+ i::VMState __state__(i::OTHER);
+#endif
API_Fatal(location, message);
}
@@ -668,7 +670,7 @@ static void InitializeTemplate(i::Handle<i::TemplateInfo> that, int type) {
void Template::Set(v8::Handle<String> name, v8::Handle<Data> value,
v8::PropertyAttribute attribute) {
- if (IsDeadCheck("v8::Template::SetProperty()")) return;
+ if (IsDeadCheck("v8::Template::Set()")) return;
ENTER_V8;
HandleScope scope;
i::Handle<i::Object> list(Utils::OpenHandle(this)->property_list());
@@ -2204,6 +2206,12 @@ bool Value::Equals(Handle<Value> that) const {
ENTER_V8;
i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> other = Utils::OpenHandle(*that);
+ // If both obj and other are JSObjects, we'd better compare by identity
+ // immediately when going into JS builtin. The reason is Invoke
+ // would overwrite global object receiver with global proxy.
+ if (obj->IsJSObject() && other->IsJSObject()) {
+ return *obj == *other;
+ }
i::Object** args[1] = { other.location() };
EXCEPTION_PREAMBLE();
i::Handle<i::Object> result =
@@ -2653,26 +2661,38 @@ int v8::Object::GetIdentityHash() {
ENTER_V8;
HandleScope scope;
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, true));
- i::Handle<i::Object> hash_symbol = i::Factory::identity_hash_symbol();
- i::Handle<i::Object> hash = i::GetProperty(hidden_props, hash_symbol);
- int hash_value;
- if (hash->IsSmi()) {
- hash_value = i::Smi::cast(*hash)->value();
- } else {
- int attempts = 0;
- do {
- // Generate a random 32-bit hash value but limit range to fit
- // within a smi.
- hash_value = i::V8::Random() & i::Smi::kMaxValue;
- attempts++;
- } while (hash_value == 0 && attempts < 30);
- hash_value = hash_value != 0 ? hash_value : 1; // never return 0
- i::SetProperty(hidden_props,
- hash_symbol,
- i::Handle<i::Object>(i::Smi::FromInt(hash_value)),
- static_cast<PropertyAttributes>(None));
+ i::Handle<i::Object> hidden_props_obj(i::GetHiddenProperties(self, true));
+ if (!hidden_props_obj->IsJSObject()) {
+ // We failed to create hidden properties. That's a detached
+ // global proxy.
+ ASSERT(hidden_props_obj->IsUndefined());
+ return 0;
}
+ i::Handle<i::JSObject> hidden_props =
+ i::Handle<i::JSObject>::cast(hidden_props_obj);
+ i::Handle<i::String> hash_symbol = i::Factory::identity_hash_symbol();
+ if (hidden_props->HasLocalProperty(*hash_symbol)) {
+ i::Handle<i::Object> hash = i::GetProperty(hidden_props, hash_symbol);
+ CHECK(!hash.is_null());
+ CHECK(hash->IsSmi());
+ return i::Smi::cast(*hash)->value();
+ }
+
+ int hash_value;
+ int attempts = 0;
+ do {
+ // Generate a random 32-bit hash value but limit range to fit
+ // within a smi.
+ hash_value = i::V8::Random() & i::Smi::kMaxValue;
+ attempts++;
+ } while (hash_value == 0 && attempts < 30);
+ hash_value = hash_value != 0 ? hash_value : 1; // never return 0
+ CHECK(!i::SetLocalPropertyIgnoreAttributes(
+ hidden_props,
+ hash_symbol,
+ i::Handle<i::Object>(i::Smi::FromInt(hash_value)),
+ static_cast<PropertyAttributes>(None)).is_null());
+
return hash_value;
}
@@ -2749,9 +2769,9 @@ void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
return;
}
i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(length, data);
- i::Handle<i::Map> slow_map =
- i::Factory::GetSlowElementsMap(i::Handle<i::Map>(self->map()));
- self->set_map(*slow_map);
+ i::Handle<i::Map> pixel_array_map =
+ i::Factory::GetPixelArrayElementsMap(i::Handle<i::Map>(self->map()));
+ self->set_map(*pixel_array_map);
self->set_elements(*pixels);
}
diff --git a/deps/v8/src/arguments.h b/deps/v8/src/arguments.h
index d51c9e4cb1..5cf8deaa59 100644
--- a/deps/v8/src/arguments.h
+++ b/deps/v8/src/arguments.h
@@ -78,7 +78,7 @@ class Arguments BASE_EMBEDDED {
class CustomArguments : public Relocatable {
public:
inline CustomArguments(Object* data,
- JSObject* self,
+ Object* self,
JSObject* holder) {
values_[2] = self;
values_[1] = holder;
diff --git a/deps/v8/src/arm/assembler-arm-inl.h b/deps/v8/src/arm/assembler-arm-inl.h
index 68d32f1ebf..3b811021b3 100644
--- a/deps/v8/src/arm/assembler-arm-inl.h
+++ b/deps/v8/src/arm/assembler-arm-inl.h
@@ -198,6 +198,8 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
visitor->VisitPointer(target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
visitor->VisitCodeTarget(this);
+ } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
+ visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -221,6 +223,8 @@ void RelocInfo::Visit() {
StaticVisitor::VisitPointer(target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
+ } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
+ StaticVisitor::VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc
index 243ba4978a..fb9bb488c9 100644
--- a/deps/v8/src/arm/assembler-arm.cc
+++ b/deps/v8/src/arm/assembler-arm.cc
@@ -272,7 +272,6 @@ static byte* spare_buffer_ = NULL;
Assembler::Assembler(void* buffer, int buffer_size)
: positions_recorder_(this),
allow_peephole_optimization_(false) {
- // BUG(3245989): disable peephole optimization if crankshaft is enabled.
allow_peephole_optimization_ = FLAG_peephole_optimization;
if (buffer == NULL) {
// Do our own buffer management.
@@ -352,6 +351,11 @@ void Assembler::CodeTargetAlign() {
}
+Condition Assembler::GetCondition(Instr instr) {
+ return Instruction::ConditionField(instr);
+}
+
+
bool Assembler::IsBranch(Instr instr) {
return (instr & (B27 | B25)) == (B27 | B25);
}
@@ -428,6 +432,20 @@ Register Assembler::GetRd(Instr instr) {
}
+Register Assembler::GetRn(Instr instr) {
+ Register reg;
+ reg.code_ = Instruction::RnValue(instr);
+ return reg;
+}
+
+
+Register Assembler::GetRm(Instr instr) {
+ Register reg;
+ reg.code_ = Instruction::RmValue(instr);
+ return reg;
+}
+
+
bool Assembler::IsPush(Instr instr) {
return ((instr & ~kRdMask) == kPushRegPattern);
}
@@ -465,6 +483,35 @@ bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
}
+bool Assembler::IsTstImmediate(Instr instr) {
+ return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
+ (I | TST | S);
+}
+
+
+bool Assembler::IsCmpRegister(Instr instr) {
+ return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
+ (CMP | S);
+}
+
+
+bool Assembler::IsCmpImmediate(Instr instr) {
+ return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
+ (I | CMP | S);
+}
+
+
+Register Assembler::GetCmpImmediateRegister(Instr instr) {
+ ASSERT(IsCmpImmediate(instr));
+ return GetRn(instr);
+}
+
+
+int Assembler::GetCmpImmediateRawImmediate(Instr instr) {
+ ASSERT(IsCmpImmediate(instr));
+ return instr & kOff12Mask;
+}
+
// Labels refer to positions in the (to be) generated code.
// There are bound, linked, and unused labels.
//
@@ -1052,6 +1099,13 @@ void Assembler::cmp(Register src1, const Operand& src2, Condition cond) {
}
+void Assembler::cmp_raw_immediate(
+ Register src, int raw_immediate, Condition cond) {
+ ASSERT(is_uint12(raw_immediate));
+ emit(cond | I | CMP | S | src.code() << 16 | raw_immediate);
+}
+
+
void Assembler::cmn(Register src1, const Operand& src2, Condition cond) {
addrmod1(cond | CMN | S, src1, r0, src2);
}
@@ -2363,7 +2417,7 @@ void Assembler::nop(int type) {
bool Assembler::IsNop(Instr instr, int type) {
- // Check for mov rx, rx.
+ // Check for mov rx, rx where x = type.
ASSERT(0 <= type && type <= 14); // mov pc, pc is not a nop.
return instr == (al | 13*B21 | type*B12 | type);
}
diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h
index fc826c727e..3941c84b34 100644
--- a/deps/v8/src/arm/assembler-arm.h
+++ b/deps/v8/src/arm/assembler-arm.h
@@ -729,6 +729,7 @@ class Assembler : public Malloced {
void cmp(Register src1, Register src2, Condition cond = al) {
cmp(src1, Operand(src2), cond);
}
+ void cmp_raw_immediate(Register src1, int raw_immediate, Condition cond = al);
void cmn(Register src1, const Operand& src2, Condition cond = al);
@@ -1099,6 +1100,7 @@ class Assembler : public Malloced {
static void instr_at_put(byte* pc, Instr instr) {
*reinterpret_cast<Instr*>(pc) = instr;
}
+ static Condition GetCondition(Instr instr);
static bool IsBranch(Instr instr);
static int GetBranchOffset(Instr instr);
static bool IsLdrRegisterImmediate(Instr instr);
@@ -1109,6 +1111,8 @@ class Assembler : public Malloced {
static bool IsAddRegisterImmediate(Instr instr);
static Instr SetAddRegisterImmediateOffset(Instr instr, int offset);
static Register GetRd(Instr instr);
+ static Register GetRn(Instr instr);
+ static Register GetRm(Instr instr);
static bool IsPush(Instr instr);
static bool IsPop(Instr instr);
static bool IsStrRegFpOffset(Instr instr);
@@ -1116,6 +1120,11 @@ class Assembler : public Malloced {
static bool IsStrRegFpNegOffset(Instr instr);
static bool IsLdrRegFpNegOffset(Instr instr);
static bool IsLdrPcImmediateOffset(Instr instr);
+ static bool IsTstImmediate(Instr instr);
+ static bool IsCmpRegister(Instr instr);
+ static bool IsCmpImmediate(Instr instr);
+ static Register GetCmpImmediateRegister(Instr instr);
+ static int GetCmpImmediateRawImmediate(Instr instr);
static bool IsNop(Instr instr, int type = NON_MARKING_NOP);
// Check if is time to emit a constant pool for pending reloc info entries
diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc
index dbb8242c55..6e8fe28a2b 100644
--- a/deps/v8/src/arm/builtins-arm.cc
+++ b/deps/v8/src/arm/builtins-arm.cc
@@ -1156,12 +1156,48 @@ void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
- __ stop("builtins-arm.cc: NotifyOSR");
+ // For now, we are relying on the fact that Runtime::NotifyOSR
+ // doesn't do any garbage collection which allows us to save/restore
+ // the registers without worrying about which of them contain
+ // pointers. This seems a bit fragile.
+ __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
+ __ EnterInternalFrame();
+ __ CallRuntime(Runtime::kNotifyOSR, 0);
+ __ LeaveInternalFrame();
+ __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
+ __ Ret();
}
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
- __ stop("builtins-arm.cc: OnStackReplacement");
+ // Probe the CPU to set the supported features, because this builtin
+ // may be called before the initialization performs CPU setup.
+ CpuFeatures::Probe(false);
+
+ // Lookup the function in the JavaScript frame and push it as an
+ // argument to the on-stack replacement function.
+ __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ EnterInternalFrame();
+ __ push(r0);
+ __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
+ __ LeaveInternalFrame();
+
+ // If the result was -1 it means that we couldn't optimize the
+ // function. Just return and continue in the unoptimized version.
+ Label skip;
+ __ cmp(r0, Operand(Smi::FromInt(-1)));
+ __ b(ne, &skip);
+ __ Ret();
+
+ __ bind(&skip);
+ // Untag the AST id and push it on the stack.
+ __ SmiUntag(r0);
+ __ push(r0);
+
+ // Generate the code for doing the frame-to-frame translation using
+ // the deoptimizer infrastructure.
+ Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
+ generator.Generate();
}
@@ -1195,6 +1231,14 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// Change context eagerly in case we need the global receiver.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+ // Do not transform the receiver for strict mode functions.
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
+ __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
+ kSmiTagSize)));
+ __ b(ne, &shift_arguments);
+
+ // Compute the receiver in non-strict mode.
__ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
__ ldr(r2, MemOperand(r2, -kPointerSize));
// r0: actual number of arguments
@@ -1358,10 +1402,20 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Change context eagerly to get the right global object if necessary.
__ ldr(r0, MemOperand(fp, kFunctionOffset));
__ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
+ // Load the shared function info while the function is still in r0.
+ __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
// Compute the receiver.
Label call_to_object, use_global_receiver, push_receiver;
__ ldr(r0, MemOperand(fp, kRecvOffset));
+
+ // Do not transform the receiver for strict mode functions.
+ __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
+ __ tst(r1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
+ kSmiTagSize)));
+ __ b(ne, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ LoadRoot(r1, Heap::kNullValueRootIndex);
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index 437dfd2733..cc49f7e4e5 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -1298,7 +1298,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result;
Label not_heap_number;
- Register scratch = r7;
+ Register scratch = r9.is(tos_) ? r7 : r9;
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(tos_, ip);
@@ -2588,6 +2588,39 @@ void TypeRecordingBinaryOpStub::GenerateSmiSmiOperation(
__ eor(right, left, Operand(right));
__ Ret();
break;
+ case Token::SAR:
+ // Remove tags from right operand.
+ __ GetLeastBitsFromSmi(scratch1, right, 5);
+ __ mov(right, Operand(left, ASR, scratch1));
+ // Smi tag result.
+ __ bic(right, right, Operand(kSmiTagMask));
+ __ Ret();
+ break;
+ case Token::SHR:
+ // Remove tags from operands. We can't do this on a 31 bit number
+ // because then the 0s get shifted into bit 30 instead of bit 31.
+ __ SmiUntag(scratch1, left);
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
+ __ mov(scratch1, Operand(scratch1, LSR, scratch2));
+ // Unsigned shift is not allowed to produce a negative number, so
+ // check the sign bit and the sign bit after Smi tagging.
+ __ tst(scratch1, Operand(0xc0000000));
+ __ b(ne, &not_smi_result);
+ // Smi tag result.
+ __ SmiTag(right, scratch1);
+ __ Ret();
+ break;
+ case Token::SHL:
+ // Remove tags from operands.
+ __ SmiUntag(scratch1, left);
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
+ __ mov(scratch1, Operand(scratch1, LSL, scratch2));
+ // Check that the signed result fits in a Smi.
+ __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
+ __ b(mi, &not_smi_result);
+ __ SmiTag(right, scratch1);
+ __ Ret();
+ break;
default:
UNREACHABLE();
}
@@ -2703,7 +2736,10 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
}
case Token::BIT_OR:
case Token::BIT_XOR:
- case Token::BIT_AND: {
+ case Token::BIT_AND:
+ case Token::SAR:
+ case Token::SHR:
+ case Token::SHL: {
if (smi_operands) {
__ SmiUntag(r3, left);
__ SmiUntag(r2, right);
@@ -2726,6 +2762,8 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
d0,
not_numbers);
}
+
+ Label result_not_a_smi;
switch (op_) {
case Token::BIT_OR:
__ orr(r2, r3, Operand(r2));
@@ -2736,11 +2774,35 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
case Token::BIT_AND:
__ and_(r2, r3, Operand(r2));
break;
+ case Token::SAR:
+ // Use only the 5 least significant bits of the shift count.
+ __ and_(r2, r2, Operand(0x1f));
+ __ GetLeastBitsFromInt32(r2, r2, 5);
+ __ mov(r2, Operand(r3, ASR, r2));
+ break;
+ case Token::SHR:
+ // Use only the 5 least significant bits of the shift count.
+ __ GetLeastBitsFromInt32(r2, r2, 5);
+ __ mov(r2, Operand(r3, LSR, r2), SetCC);
+ // SHR is special because it is required to produce a positive answer.
+ // The code below for writing into heap numbers isn't capable of
+ // writing the register as an unsigned int so we go to slow case if we
+ // hit this case.
+ if (CpuFeatures::IsSupported(VFP3)) {
+ __ b(mi, &result_not_a_smi);
+ } else {
+ __ b(mi, not_numbers);
+ }
+ break;
+ case Token::SHL:
+ // Use only the 5 least significant bits of the shift count.
+ __ GetLeastBitsFromInt32(r2, r2, 5);
+ __ mov(r2, Operand(r3, LSL, r2));
+ break;
default:
UNREACHABLE();
}
- Label result_not_a_smi;
// Check that the *signed* result fits in a smi.
__ add(r3, r2, Operand(0x40000000), SetCC);
__ b(mi, &result_not_a_smi);
@@ -2760,10 +2822,15 @@ void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
__ mov(r0, Operand(r5));
if (CpuFeatures::IsSupported(VFP3)) {
- // Convert the int32 in r2 to the heap number in r0. r3 is corrupted.
+ // Convert the int32 in r2 to the heap number in r0. r3 is corrupted. As
+ // mentioned above SHR needs to always produce a positive result.
CpuFeatures::Scope scope(VFP3);
__ vmov(s0, r2);
- __ vcvt_f64_s32(d0, s0);
+ if (op_ == Token::SHR) {
+ __ vcvt_f64_u32(d0, s0);
+ } else {
+ __ vcvt_f64_s32(d0, s0);
+ }
__ sub(r3, r0, Operand(kHeapObjectTag));
__ vstr(d0, r3, HeapNumber::kValueOffset);
__ Ret();
@@ -2790,15 +2857,6 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
Label not_smis;
- ASSERT(op_ == Token::ADD ||
- op_ == Token::SUB ||
- op_ == Token::MUL ||
- op_ == Token::DIV ||
- op_ == Token::MOD ||
- op_ == Token::BIT_OR ||
- op_ == Token::BIT_AND ||
- op_ == Token::BIT_XOR);
-
Register left = r1;
Register right = r0;
Register scratch1 = r7;
@@ -2825,15 +2883,6 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Label not_smis, call_runtime;
- ASSERT(op_ == Token::ADD ||
- op_ == Token::SUB ||
- op_ == Token::MUL ||
- op_ == Token::DIV ||
- op_ == Token::MOD ||
- op_ == Token::BIT_OR ||
- op_ == Token::BIT_AND ||
- op_ == Token::BIT_XOR);
-
if (result_type_ == TRBinaryOpIC::UNINITIALIZED ||
result_type_ == TRBinaryOpIC::SMI) {
// Only allow smi results.
@@ -2864,15 +2913,6 @@ void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
- ASSERT(op_ == Token::ADD ||
- op_ == Token::SUB ||
- op_ == Token::MUL ||
- op_ == Token::DIV ||
- op_ == Token::MOD ||
- op_ == Token::BIT_OR ||
- op_ == Token::BIT_AND ||
- op_ == Token::BIT_XOR);
-
ASSERT(operands_type_ == TRBinaryOpIC::INT32);
GenerateTypeTransition(masm);
@@ -2880,15 +2920,6 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
- ASSERT(op_ == Token::ADD ||
- op_ == Token::SUB ||
- op_ == Token::MUL ||
- op_ == Token::DIV ||
- op_ == Token::MOD ||
- op_ == Token::BIT_OR ||
- op_ == Token::BIT_AND ||
- op_ == Token::BIT_XOR);
-
Label not_numbers, call_runtime;
ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER);
@@ -2903,15 +2934,6 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
- ASSERT(op_ == Token::ADD ||
- op_ == Token::SUB ||
- op_ == Token::MUL ||
- op_ == Token::DIV ||
- op_ == Token::MOD ||
- op_ == Token::BIT_OR ||
- op_ == Token::BIT_AND ||
- op_ == Token::BIT_XOR);
-
Label call_runtime;
GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
@@ -2984,6 +3006,15 @@ void TypeRecordingBinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
case Token::BIT_XOR:
__ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
break;
+ case Token::SAR:
+ __ InvokeBuiltin(Builtins::SAR, JUMP_JS);
+ break;
+ case Token::SHR:
+ __ InvokeBuiltin(Builtins::SHR, JUMP_JS);
+ break;
+ case Token::SHL:
+ __ InvokeBuiltin(Builtins::SHL, JUMP_JS);
+ break;
default:
UNREACHABLE();
}
@@ -3268,105 +3299,13 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- // r0 holds the exception.
-
- // Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
- // Drop the sp to the top of the handler.
- __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
- __ ldr(sp, MemOperand(r3));
-
- // Restore the next handler and frame pointer, discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- __ pop(r2);
- __ str(r2, MemOperand(r3));
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- __ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
-
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- __ cmp(fp, Operand(0, RelocInfo::NONE));
- // Set cp to NULL if fp is NULL.
- __ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
- // Restore cp otherwise.
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
-#ifdef DEBUG
- if (FLAG_debug_code) {
- __ mov(lr, Operand(pc));
- }
-#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- __ pop(pc);
+ __ Throw(r0);
}
void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type) {
- // Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
- // Drop sp to the top stack handler.
- __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
- __ ldr(sp, MemOperand(r3));
-
- // Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- __ bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- __ ldr(r2, MemOperand(sp, kStateOffset));
- __ cmp(r2, Operand(StackHandler::ENTRY));
- __ b(eq, &done);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- __ ldr(sp, MemOperand(sp, kNextOffset));
- __ jmp(&loop);
- __ bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- __ pop(r2);
- __ str(r2, MemOperand(r3));
-
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(Top::k_external_caught_exception_address);
- __ mov(r0, Operand(false, RelocInfo::NONE));
- __ mov(r2, Operand(external_caught));
- __ str(r0, MemOperand(r2));
-
- // Set pending exception and r0 to out of memory exception.
- Failure* out_of_memory = Failure::OutOfMemoryException();
- __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
- __ str(r0, MemOperand(r2));
- }
-
- // Stack layout at this point. See also StackHandlerConstants.
- // sp -> state (ENTRY)
- // fp
- // lr
-
- // Discard handler state (r2 is not used) and restore frame pointer.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- __ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- __ cmp(fp, Operand(0, RelocInfo::NONE));
- // Set cp to NULL if fp is NULL.
- __ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
- // Restore cp otherwise.
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
-#ifdef DEBUG
- if (FLAG_debug_code) {
- __ mov(lr, Operand(pc));
- }
-#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- __ pop(pc);
+ __ ThrowUncatchable(type, r0);
}
@@ -3453,7 +3392,9 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// r0:r1: result
// sp: stack pointer
// fp: frame pointer
- __ LeaveExitFrame(save_doubles_);
+ // Callee-saved register r4 still holds argc.
+ __ LeaveExitFrame(save_doubles_, r4);
+ __ mov(pc, lr);
// check if we should retry or throw exception
Label retry;
@@ -4232,24 +4173,27 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(&Counters::regexp_entry_native, 1, r0, r2);
static const int kRegExpExecuteArguments = 7;
- __ push(lr);
- __ PrepareCallCFunction(kRegExpExecuteArguments, r0);
+ static const int kParameterRegisters = 4;
+ __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
- // Argument 7 (sp[8]): Indicate that this is a direct call from JavaScript.
+ // Stack pointer now points to cell where return address is to be written.
+ // Arguments are before that on the stack or in registers.
+
+ // Argument 7 (sp[12]): Indicate that this is a direct call from JavaScript.
__ mov(r0, Operand(1));
- __ str(r0, MemOperand(sp, 2 * kPointerSize));
+ __ str(r0, MemOperand(sp, 3 * kPointerSize));
- // Argument 6 (sp[4]): Start (high end) of backtracking stack memory area.
+ // Argument 6 (sp[8]): Start (high end) of backtracking stack memory area.
__ mov(r0, Operand(address_of_regexp_stack_memory_address));
__ ldr(r0, MemOperand(r0, 0));
__ mov(r2, Operand(address_of_regexp_stack_memory_size));
__ ldr(r2, MemOperand(r2, 0));
__ add(r0, r0, Operand(r2));
- __ str(r0, MemOperand(sp, 1 * kPointerSize));
+ __ str(r0, MemOperand(sp, 2 * kPointerSize));
- // Argument 5 (sp[0]): static offsets vector buffer.
+ // Argument 5 (sp[4]): static offsets vector buffer.
__ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector()));
- __ str(r0, MemOperand(sp, 0 * kPointerSize));
+ __ str(r0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data and
// calculate the shift of the index (0 for ASCII and 1 for two byte).
@@ -4271,8 +4215,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Locate the code entry and call it.
__ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ CallCFunction(r7, kRegExpExecuteArguments);
- __ pop(lr);
+ DirectCEntryStub stub;
+ stub.GenerateCall(masm, r7);
+
+ __ LeaveExitFrame(false, no_reg);
// r0: result
// subject: subject string (callee saved)
@@ -4281,6 +4227,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
+
__ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
__ b(eq, &success);
Label failure;
@@ -4293,12 +4240,26 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ mov(r0, Operand(ExternalReference::the_hole_value_location()));
- __ ldr(r0, MemOperand(r0, 0));
- __ mov(r1, Operand(ExternalReference(Top::k_pending_exception_address)));
+ __ mov(r1, Operand(ExternalReference::the_hole_value_location()));
__ ldr(r1, MemOperand(r1, 0));
+ __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
+ __ ldr(r0, MemOperand(r2, 0));
__ cmp(r0, r1);
__ b(eq, &runtime);
+
+ __ str(r1, MemOperand(r2, 0)); // Clear pending exception.
+
+ // Check if the exception is a termination. If so, throw as uncatchable.
+ __ LoadRoot(ip, Heap::kTerminationExceptionRootIndex);
+ __ cmp(r0, ip);
+ Label termination_exception;
+ __ b(eq, &termination_exception);
+
+ __ Throw(r0); // Expects thrown value in r0.
+
+ __ bind(&termination_exception);
+ __ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0.
+
__ bind(&failure);
// For failure and exception return null.
__ mov(r0, Operand(Factory::null_value()));
@@ -5809,10 +5770,9 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
// For equality we do not care about the sign of the result.
__ sub(r0, r0, r1, SetCC);
} else {
- __ sub(r1, r1, r0, SetCC);
- // Correct sign of result in case of overflow.
- __ rsb(r1, r1, Operand(0), SetCC, vs);
- __ mov(r0, r1);
+ // Untag before subtracting to avoid handling overflow.
+ __ SmiUntag(r1);
+ __ sub(r0, r1, SmiUntagOperand(r0));
}
__ Ret();
@@ -5923,14 +5883,24 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
ApiFunction *function) {
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
RelocInfo::CODE_TARGET));
- // Push return address (accessible to GC through exit frame pc).
__ mov(r2,
Operand(ExternalReference(function, ExternalReference::DIRECT_CALL)));
+ // Push return address (accessible to GC through exit frame pc).
__ str(pc, MemOperand(sp, 0));
__ Jump(r2); // Call the api function.
}
+void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
+ Register target) {
+ __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
+ RelocInfo::CODE_TARGET));
+ // Push return address (accessible to GC through exit frame pc).
+ __ str(pc, MemOperand(sp, 0));
+ __ Jump(target); // Call the C++ function.
+}
+
+
void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Register receiver,
Register key,
@@ -5998,6 +5968,91 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
}
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register elements_map,
+ Register scratch1,
+ Register scratch2,
+ bool load_elements_from_receiver,
+ bool load_elements_map_from_elements,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range) {
+ // Register use:
+ // receiver - holds the receiver and is unchanged unless the
+ // store succeeds.
+ // key - holds the key (must be a smi) and is unchanged.
+ // value - holds the value (must be a smi) and is unchanged.
+ // elements - holds the element object of the receiver on entry if
+ // load_elements_from_receiver is false, otherwise used
+ // internally to store the pixel arrays elements and
+ // external array pointer.
+ // elements_map - holds the map of the element object if
+ // load_elements_map_from_elements is false, otherwise
+ // loaded with the element map.
+ //
+ Register external_pointer = elements;
+ Register untagged_key = scratch1;
+ Register untagged_value = scratch2;
+
+ if (load_elements_from_receiver) {
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ }
+
+ // By passing NULL as not_pixel_array, callers signal that they have already
+ // verified that the receiver has pixel array elements.
+ if (not_pixel_array != NULL) {
+ if (load_elements_map_from_elements) {
+ __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
+ }
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
+ __ cmp(elements_map, ip);
+ __ b(ne, not_pixel_array);
+ } else {
+ if (FLAG_debug_code) {
+ // Map check should have already made sure that elements is a pixel array.
+ __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
+ __ cmp(elements_map, ip);
+ __ Assert(eq, "Elements isn't a pixel array");
+ }
+ }
+
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit check
+ // to ensure the key is a smi must be added.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+
+ __ SmiUntag(untagged_key, key);
+
+ // Perform bounds check.
+ __ ldr(scratch2, FieldMemOperand(elements, PixelArray::kLengthOffset));
+ __ cmp(untagged_key, scratch2);
+ __ b(hs, out_of_range); // unsigned check handles negative keys.
+
+ __ JumpIfNotSmi(value, value_not_smi);
+ __ SmiUntag(untagged_value, value);
+
+ // Clamp the value to [0..255].
+ __ Usat(untagged_value, 8, Operand(untagged_value));
+ // Get the pointer to the external array. This clobbers elements.
+ __ ldr(external_pointer,
+ FieldMemOperand(elements, PixelArray::kExternalPointerOffset));
+ __ strb(untagged_value, MemOperand(external_pointer, untagged_key));
+ __ Ret();
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/deps/v8/src/arm/code-stubs-arm.h b/deps/v8/src/arm/code-stubs-arm.h
index bf7d635487..baaa2f2bda 100644
--- a/deps/v8/src/arm/code-stubs-arm.h
+++ b/deps/v8/src/arm/code-stubs-arm.h
@@ -581,6 +581,7 @@ class DirectCEntryStub: public CodeStub {
DirectCEntryStub() {}
void Generate(MacroAssembler* masm);
void GenerateCall(MacroAssembler* masm, ApiFunction *function);
+ void GenerateCall(MacroAssembler* masm, Register target);
private:
Major MajorKey() { return DirectCEntry; }
@@ -589,14 +590,14 @@ class DirectCEntryStub: public CodeStub {
};
-// Generate code the to load an element from a pixel array. The receiver is
-// assumed to not be a smi and to have elements, the caller must guarantee this
-// precondition. If the receiver does not have elements that are pixel arrays,
-// the generated code jumps to not_pixel_array. If key is not a smi, then the
-// generated code branches to key_not_smi. Callers can specify NULL for
-// key_not_smi to signal that a smi check has already been performed on key so
-// that the smi check is not generated . If key is not a valid index within the
-// bounds of the pixel array, the generated code jumps to out_of_range.
+// Generate code to load an element from a pixel array. The receiver is assumed
+// to not be a smi and to have elements, the caller must guarantee this
+// precondition. If key is not a smi, then the generated code branches to
+// key_not_smi. Callers can specify NULL for key_not_smi to signal that a smi
+// check has already been performed on key so that the smi check is not
+// generated. If key is not a valid index within the bounds of the pixel array,
+// the generated code jumps to out_of_range. receiver, key and elements are
+// unchanged throughout the generated code sequence.
void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Register receiver,
Register key,
@@ -609,6 +610,35 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Label* key_not_smi,
Label* out_of_range);
+// Generate code to store an element into a pixel array, clamping values between
+// [0..255]. The receiver is assumed to not be a smi and to have elements, the
+// caller must guarantee this precondition. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated. If value is not a smi, the generated
+// code will branch to value_not_smi. If the receiver doesn't have pixel array
+// elements, the generated code will branch to not_pixel_array, unless
+// not_pixel_array is NULL, in which case the caller must ensure that the
+// receiver has pixel array elements. If key is not a valid index within the
+// bounds of the pixel array, the generated code jumps to out_of_range. If
+// load_elements_from_receiver is true, then the elements of receiver is loaded
+// into elements, otherwise elements is assumed to already be the receiver's
+// elements. If load_elements_map_from_elements is true, elements_map is loaded
+// from elements, otherwise it is assumed to already contain the element map.
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register elements_map,
+ Register scratch1,
+ Register scratch2,
+ bool load_elements_from_receiver,
+ bool load_elements_map_from_elements,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range);
} } // namespace v8::internal
diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc
index 12842230bf..a3921d8efc 100644
--- a/deps/v8/src/arm/codegen-arm.cc
+++ b/deps/v8/src/arm/codegen-arm.cc
@@ -2192,15 +2192,10 @@ void CodeGenerator::GenerateReturnSequence() {
DeleteFrame();
#ifdef DEBUG
- // Check that the size of the code used for returning matches what is
- // expected by the debugger. If the sp_delts above cannot be encoded in
- // the add instruction the add will generate two instructions.
- int return_sequence_length =
- masm_->InstructionsGeneratedSince(&check_exit_codesize);
- CHECK(return_sequence_length ==
- Assembler::kJSReturnSequenceInstructions ||
- return_sequence_length ==
- Assembler::kJSReturnSequenceInstructions + 1);
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceInstructions <=
+ masm_->InstructionsGeneratedSince(&check_exit_codesize));
#endif
}
}
@@ -5849,15 +5844,20 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
if (property != NULL) {
Load(property->obj());
Load(property->key());
- frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
+ frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
+ frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
frame_->EmitPush(r0);
} else if (variable != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
Slot* slot = variable->AsSlot();
if (variable->is_global()) {
LoadGlobal();
frame_->EmitPush(Operand(variable->name()));
- frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
+ frame_->EmitPush(Operand(Smi::FromInt(kNonStrictMode)));
+ frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
frame_->EmitPush(r0);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
@@ -6931,7 +6931,7 @@ void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
Result result;
if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
- frame()->CallStoreIC(name, is_contextual);
+ frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
} else {
// Inline the in-object property case.
JumpTarget slow, done;
diff --git a/deps/v8/src/arm/constants-arm.h b/deps/v8/src/arm/constants-arm.h
index 5671feecba..7ac38ed3ea 100644
--- a/deps/v8/src/arm/constants-arm.h
+++ b/deps/v8/src/arm/constants-arm.h
@@ -582,6 +582,7 @@ class Instruction {
inline int TypeValue() const { return Bits(27, 25); }
inline int RnValue() const { return Bits(19, 16); }
+ DECLARE_STATIC_ACCESSOR(RnValue);
inline int RdValue() const { return Bits(15, 12); }
DECLARE_STATIC_ACCESSOR(RdValue);
@@ -625,6 +626,7 @@ class Instruction {
inline int SValue() const { return Bit(20); }
// with register
inline int RmValue() const { return Bits(3, 0); }
+ DECLARE_STATIC_ACCESSOR(RmValue);
inline int ShiftValue() const { return static_cast<ShiftOp>(Bits(6, 5)); }
inline ShiftOp ShiftField() const {
return static_cast<ShiftOp>(BitField(6, 5));
diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc
index e05001f3c3..9af7a8d190 100644
--- a/deps/v8/src/arm/deoptimizer-arm.cc
+++ b/deps/v8/src/arm/deoptimizer-arm.cc
@@ -124,19 +124,204 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
void Deoptimizer::PatchStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- UNIMPLEMENTED();
+ const int kInstrSize = Assembler::kInstrSize;
+ // The call of the stack guard check has the following form:
+ // e1 5d 00 0c cmp sp, <limit>
+ // 2a 00 00 01 bcs ok
+ // e5 9f c? ?? ldr ip, [pc, <stack guard address>]
+ // e1 2f ff 3c blx ip
+ ASSERT(Memory::int32_at(pc_after - kInstrSize) ==
+ (al | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | ip.code()));
+ ASSERT(Assembler::IsLdrPcImmediateOffset(
+ Assembler::instr_at(pc_after - 2 * kInstrSize)));
+
+ // We patch the code to the following form:
+ // e1 5d 00 0c cmp sp, <limit>
+ // e1 a0 00 00 mov r0, r0 (NOP)
+ // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>]
+ // e1 2f ff 3c blx ip
+ // and overwrite the constant containing the
+ // address of the stack check stub.
+
+ // Replace conditional jump with NOP.
+ CodePatcher patcher(pc_after - 3 * kInstrSize, 1);
+ patcher.masm()->nop();
+
+ // Replace the stack check address in the constant pool
+ // with the entry address of the replacement code.
+ uint32_t stack_check_address_offset = Memory::uint16_at(pc_after -
+ 2 * kInstrSize) & 0xfff;
+ Address stack_check_address_pointer = pc_after + stack_check_address_offset;
+ ASSERT(Memory::uint32_at(stack_check_address_pointer) ==
+ reinterpret_cast<uint32_t>(check_code->entry()));
+ Memory::uint32_at(stack_check_address_pointer) =
+ reinterpret_cast<uint32_t>(replacement_code->entry());
}
void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- UNIMPLEMENTED();
+ const int kInstrSize = Assembler::kInstrSize;
+ ASSERT(Memory::uint32_at(pc_after - kInstrSize) == 0xe12fff3c);
+ ASSERT(Memory::uint8_at(pc_after - kInstrSize - 1) == 0xe5);
+ ASSERT(Memory::uint8_at(pc_after - kInstrSize - 2) == 0x9f);
+
+ // Replace NOP with conditional jump.
+ CodePatcher patcher(pc_after - 3 * kInstrSize, 1);
+ patcher.masm()->b(+4, cs);
+
+ // Replace the stack check address in the constant pool
+ // with the entry address of the replacement code.
+ uint32_t stack_check_address_offset = Memory::uint16_at(pc_after -
+ 2 * kInstrSize) & 0xfff;
+ Address stack_check_address_pointer = pc_after + stack_check_address_offset;
+ ASSERT(Memory::uint32_at(stack_check_address_pointer) ==
+ reinterpret_cast<uint32_t>(replacement_code->entry()));
+ Memory::uint32_at(stack_check_address_pointer) =
+ reinterpret_cast<uint32_t>(check_code->entry());
+}
+
+
+static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
+ ByteArray* translations = data->TranslationByteArray();
+ int length = data->DeoptCount();
+ for (int i = 0; i < length; i++) {
+ if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
+ TranslationIterator it(translations, data->TranslationIndex(i)->value());
+ int value = it.Next();
+ ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
+ // Read the number of frames.
+ value = it.Next();
+ if (value == 1) return i;
+ }
+ }
+ UNREACHABLE();
+ return -1;
}
void Deoptimizer::DoComputeOsrOutputFrame() {
- UNIMPLEMENTED();
+ DeoptimizationInputData* data = DeoptimizationInputData::cast(
+ optimized_code_->deoptimization_data());
+ unsigned ast_id = data->OsrAstId()->value();
+
+ int bailout_id = LookupBailoutId(data, ast_id);
+ unsigned translation_index = data->TranslationIndex(bailout_id)->value();
+ ByteArray* translations = data->TranslationByteArray();
+
+ TranslationIterator iterator(translations, translation_index);
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator.Next());
+ ASSERT(Translation::BEGIN == opcode);
+ USE(opcode);
+ int count = iterator.Next();
+ ASSERT(count == 1);
+ USE(count);
+
+ opcode = static_cast<Translation::Opcode>(iterator.Next());
+ USE(opcode);
+ ASSERT(Translation::FRAME == opcode);
+ unsigned node_id = iterator.Next();
+ USE(node_id);
+ ASSERT(node_id == ast_id);
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
+ USE(function);
+ ASSERT(function == function_);
+ unsigned height = iterator.Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ USE(height_in_bytes);
+
+ unsigned fixed_size = ComputeFixedSize(function_);
+ unsigned input_frame_size = input_->GetFrameSize();
+ ASSERT(fixed_size + height_in_bytes == input_frame_size);
+
+ unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
+ unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
+ unsigned outgoing_size = outgoing_height * kPointerSize;
+ unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
+ ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
+
+ if (FLAG_trace_osr) {
+ PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
+ reinterpret_cast<intptr_t>(function_));
+ function_->PrintName();
+ PrintF(" => node=%u, frame=%d->%d]\n",
+ ast_id,
+ input_frame_size,
+ output_frame_size);
+ }
+
+ // There's only one output frame in the OSR case.
+ output_count_ = 1;
+ output_ = new FrameDescription*[1];
+ output_[0] = new(output_frame_size) FrameDescription(
+ output_frame_size, function_);
+
+ // Clear the incoming parameters in the optimized frame to avoid
+ // confusing the garbage collector.
+ unsigned output_offset = output_frame_size - kPointerSize;
+ int parameter_count = function_->shared()->formal_parameter_count() + 1;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_[0]->SetFrameSlot(output_offset, 0);
+ output_offset -= kPointerSize;
+ }
+
+ // Translate the incoming parameters. This may overwrite some of the
+ // incoming argument slots we've just cleared.
+ int input_offset = input_frame_size - kPointerSize;
+ bool ok = true;
+ int limit = input_offset - (parameter_count * kPointerSize);
+ while (ok && input_offset > limit) {
+ ok = DoOsrTranslateCommand(&iterator, &input_offset);
+ }
+
+ // There are no translation commands for the caller's pc and fp, the
+ // context, and the function. Set them up explicitly.
+ for (int i = 0; ok && i < 4; i++) {
+ uint32_t input_value = input_->GetFrameSlot(input_offset);
+ if (FLAG_trace_osr) {
+ PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part)\n",
+ output_offset,
+ input_value,
+ input_offset);
+ }
+ output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
+ input_offset -= kPointerSize;
+ output_offset -= kPointerSize;
+ }
+
+ // Translate the rest of the frame.
+ while (ok && input_offset >= 0) {
+ ok = DoOsrTranslateCommand(&iterator, &input_offset);
+ }
+
+ // If translation of any command failed, continue using the input frame.
+ if (!ok) {
+ delete output_[0];
+ output_[0] = input_;
+ output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
+ } else {
+ // Setup the frame pointer and the context pointer.
+ output_[0]->SetRegister(fp.code(), input_->GetRegister(fp.code()));
+ output_[0]->SetRegister(cp.code(), input_->GetRegister(cp.code()));
+
+ unsigned pc_offset = data->OsrPcOffset()->value();
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ optimized_code_->entry() + pc_offset);
+ output_[0]->SetPc(pc);
+ }
+ Code* continuation = Builtins::builtin(Builtins::NotifyOSR);
+ output_[0]->SetContinuation(
+ reinterpret_cast<uint32_t>(continuation->entry()));
+
+ if (FLAG_trace_osr) {
+ PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
+ ok ? "finished" : "aborted",
+ reinterpret_cast<intptr_t>(function));
+ function->PrintName();
+ PrintF(" => pc=0x%0x]\n", output_[0]->GetPc());
+ }
}
@@ -318,7 +503,6 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// easily ported.
void Deoptimizer::EntryGenerator::Generate() {
GeneratePrologue();
- // TOS: bailout-id; TOS+1: return address if not EAGER.
CpuFeatures::Scope scope(VFP3);
// Save all general purpose registers before messing with them.
const int kNumberOfRegisters = Register::kNumRegisters;
@@ -353,6 +537,10 @@ void Deoptimizer::EntryGenerator::Generate() {
__ mov(r3, Operand(0));
// Correct one word for bailout id.
__ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
+ } else if (type() == OSR) {
+ __ mov(r3, lr);
+ // Correct one word for bailout id.
+ __ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
} else {
__ mov(r3, lr);
// Correct two words for bailout id and return address.
@@ -375,7 +563,6 @@ void Deoptimizer::EntryGenerator::Generate() {
// frame descriptor pointer to r1 (deoptimizer->input_);
__ ldr(r1, MemOperand(r0, Deoptimizer::input_offset()));
-
// Copy core registers into FrameDescription::registers_[kNumRegisters].
ASSERT(Register::kNumRegisters == kNumberOfRegisters);
for (int i = 0; i < kNumberOfRegisters; i++) {
@@ -396,7 +583,7 @@ void Deoptimizer::EntryGenerator::Generate() {
// Remove the bailout id, eventually return address, and the saved registers
// from the stack.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == OSR) {
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
} else {
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
@@ -450,11 +637,6 @@ void Deoptimizer::EntryGenerator::Generate() {
__ cmp(r0, r1);
__ b(lt, &outer_push_loop);
- // In case of OSR, we have to restore the XMM registers.
- if (type() == OSR) {
- UNIMPLEMENTED();
- }
-
// Push state, pc, and continuation from the last output frame.
if (type() != OSR) {
__ ldr(r6, MemOperand(r2, FrameDescription::state_offset()));
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index ff446c5e4b..f04a00e052 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -45,6 +45,67 @@ namespace internal {
#define __ ACCESS_MASM(masm_)
+
+// A patch site is a location in the code which it is possible to patch. This
+// class has a number of methods to emit the code which is patchable and the
+// method EmitPatchInfo to record a marker back to the patchable code. This
+// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
+// immediate value is used) is the delta from the pc to the first instruction of
+// the patchable code.
+class JumpPatchSite BASE_EMBEDDED {
+ public:
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
+#ifdef DEBUG
+ info_emitted_ = false;
+#endif
+ }
+
+ ~JumpPatchSite() {
+ ASSERT(patch_site_.is_bound() == info_emitted_);
+ }
+
+ // When initially emitting this ensure that a jump is always generated to skip
+ // the inlined smi code.
+ void EmitJumpIfNotSmi(Register reg, Label* target) {
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
+ __ bind(&patch_site_);
+ __ cmp(reg, Operand(reg));
+ // Don't use b(al, ...) as that might emit the constant pool right after the
+ // branch. After patching when the branch is no longer unconditional
+ // execution can continue into the constant pool.
+ __ b(eq, target); // Always taken before patched.
+ }
+
+ // When initially emitting this ensure that a jump is never generated to skip
+ // the inlined smi code.
+ void EmitJumpIfSmi(Register reg, Label* target) {
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
+ __ bind(&patch_site_);
+ __ cmp(reg, Operand(reg));
+ __ b(ne, target); // Never taken before patched.
+ }
+
+ void EmitPatchInfo() {
+ int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
+ Register reg;
+ reg.set_code(delta_to_patch_site / kOff12Mask);
+ __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
+#ifdef DEBUG
+ info_emitted_ = true;
+#endif
+ }
+
+ bool is_bound() const { return patch_site_.is_bound(); }
+
+ private:
+ MacroAssembler* masm_;
+ Label patch_site_;
+#ifdef DEBUG
+ bool info_emitted_;
+#endif
+};
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -268,15 +329,10 @@ void FullCodeGenerator::EmitReturnSequence() {
}
#ifdef DEBUG
- // Check that the size of the code used for returning matches what is
- // expected by the debugger. If the sp_delts above cannot be encoded in the
- // add instruction the add will generate two instructions.
- int return_sequence_length =
- masm_->InstructionsGeneratedSince(&check_exit_codesize);
- CHECK(return_sequence_length ==
- Assembler::kJSReturnSequenceInstructions ||
- return_sequence_length ==
- Assembler::kJSReturnSequenceInstructions + 1);
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceInstructions <=
+ masm_->InstructionsGeneratedSince(&check_exit_codesize));
#endif
}
}
@@ -285,7 +341,17 @@ void FullCodeGenerator::EmitReturnSequence() {
FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
Token::Value op, Expression* left, Expression* right) {
ASSERT(ShouldInlineSmiCase(op));
- return kNoConstants;
+ if (op == Token::DIV || op == Token::MOD || op == Token::MUL) {
+ // We never generate inlined constant smi operations for these.
+ return kNoConstants;
+ } else if (right->IsSmiLiteral()) {
+ return kRightConstant;
+ } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) {
+ // Don't inline shifts with constant left hand side.
+ return kLeftConstant;
+ } else {
+ return kNoConstants;
+ }
}
@@ -681,18 +747,24 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
} else if (prop != NULL) {
if (function != NULL || mode == Variable::CONST) {
// We are declaring a function or constant that rewrites to a
- // property. Use (keyed) IC to set the initial value.
- VisitForStackValue(prop->obj());
+ // property. Use (keyed) IC to set the initial value. We
+ // cannot visit the rewrite because it's shared and we risk
+ // recording duplicate AST IDs for bailouts from optimized code.
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
if (function != NULL) {
- VisitForStackValue(prop->key());
+ __ push(r0);
VisitForAccumulatorValue(function);
- __ pop(r1); // Key.
+ __ pop(r2);
} else {
- VisitForAccumulatorValue(prop->key());
- __ mov(r1, result_register()); // Key.
- __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
+ __ mov(r2, r0);
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
}
- __ pop(r2); // Receiver.
+ ASSERT(prop->key()->AsLiteral() != NULL &&
+ prop->key()->AsLiteral()->handle()->IsSmi());
+ __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -752,24 +824,24 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Perform the comparison as if via '==='.
__ ldr(r1, MemOperand(sp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
Label slow_case;
__ orr(r2, r1, r0);
- __ tst(r2, Operand(kSmiTagMask));
- __ b(ne, &slow_case);
+ patch_site.EmitJumpIfNotSmi(r2, &slow_case);
+
__ cmp(r1, r0);
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ b(clause->body_target()->entry_label());
- __ bind(&slow_case);
+ __ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(eq, true, flags, r1, r0);
- __ CallStub(&stub);
- __ cmp(r0, Operand(0, RelocInfo::NONE));
+ // Record position before stub call for type feedback.
+ SetSourcePosition(clause->position());
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
+ EmitCallIC(ic, &patch_site);
+ __ cmp(r0, Operand(0));
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ b(clause->body_target()->entry_label());
@@ -1536,34 +1608,316 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
}
+void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
+ OverwriteMode mode,
+ bool left_is_constant_smi,
+ Smi* value) {
+ Label call_stub, done;
+ // Optimistically add smi value with unknown object. If result overflows or is
+ // not a smi then we had either a smi overflow or added a smi with a tagged
+ // pointer.
+ __ mov(r1, Operand(value));
+ __ add(r2, r0, r1, SetCC);
+ __ b(vs, &call_stub);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfNotSmi(r2, &call_stub);
+ __ mov(r0, r2);
+ __ b(&done);
+
+ // Call the shared stub.
+ __ bind(&call_stub);
+ if (!left_is_constant_smi) {
+ __ Swap(r0, r1, r2);
+ }
+ TypeRecordingBinaryOpStub stub(Token::ADD, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
+ __ bind(&done);
+ context()->Plug(r0);
+}
+
+
+void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
+ OverwriteMode mode,
+ bool left_is_constant_smi,
+ Smi* value) {
+ Label call_stub, done;
+ // Optimistically subtract smi value and unknown object. If result overflows
+ // or is not a smi then we had either a smi overflow or subtraction between a
+ // smi and a tagged pointer.
+ __ mov(r1, Operand(value));
+ if (left_is_constant_smi) {
+ __ sub(r2, r1, r0, SetCC);
+ } else {
+ __ sub(r2, r0, r1, SetCC);
+ }
+ __ b(vs, &call_stub);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfNotSmi(r2, &call_stub);
+ __ mov(r0, r2);
+ __ b(&done);
+
+ // Call the shared stub.
+ __ bind(&call_stub);
+ if (!left_is_constant_smi) {
+ __ Swap(r0, r1, r2);
+ }
+ TypeRecordingBinaryOpStub stub(Token::SUB, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
+ __ bind(&done);
+ context()->Plug(r0);
+}
+
+
+void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
+ Token::Value op,
+ OverwriteMode mode,
+ Smi* value) {
+ Label call_stub, smi_case, done;
+ int shift_value = value->value() & 0x1f;
+
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfSmi(r0, &smi_case);
+
+ // Call stub.
+ __ bind(&call_stub);
+ __ mov(r1, r0);
+ __ mov(r0, Operand(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
+ __ b(&done);
+
+ // Smi case.
+ __ bind(&smi_case);
+ switch (op) {
+ case Token::SHL:
+ if (shift_value != 0) {
+ __ mov(r1, r0);
+ if (shift_value > 1) {
+ __ mov(r1, Operand(r1, LSL, shift_value - 1));
+ }
+ // Convert int result to smi, checking that it is in int range.
+ __ SmiTag(r1, SetCC);
+ __ b(vs, &call_stub);
+ __ mov(r0, r1); // Put result back into r0.
+ }
+ break;
+ case Token::SAR:
+ if (shift_value != 0) {
+ __ mov(r0, Operand(r0, ASR, shift_value));
+ __ bic(r0, r0, Operand(kSmiTagMask));
+ }
+ break;
+ case Token::SHR:
+ // SHR must return a positive value. When shifting by 0 or 1 we need to
+ // check that smi tagging the result will not create a negative value.
+ if (shift_value < 2) {
+ __ mov(r2, Operand(shift_value));
+ __ SmiUntag(r1, r0);
+ if (shift_value != 0) {
+ __ mov(r1, Operand(r1, LSR, shift_value));
+ }
+ __ tst(r1, Operand(0xc0000000));
+ __ b(ne, &call_stub);
+ __ SmiTag(r0, r1); // result in r0.
+ } else {
+ __ SmiUntag(r0);
+ __ mov(r0, Operand(r0, LSR, shift_value));
+ __ SmiTag(r0);
+ }
+ break;
+ default:
+ UNREACHABLE();
+ }
+
+ __ bind(&done);
+ context()->Plug(r0);
+}
+
+
+void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
+ Token::Value op,
+ OverwriteMode mode,
+ Smi* value) {
+ Label smi_case, done;
+
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfSmi(r0, &smi_case);
+
+ // The order of the arguments does not matter for bit-ops with a
+ // constant operand.
+ __ mov(r1, Operand(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
+ __ jmp(&done);
+
+ // Smi case.
+ __ bind(&smi_case);
+ __ mov(r1, Operand(value));
+ switch (op) {
+ case Token::BIT_OR:
+ __ orr(r0, r0, Operand(r1));
+ break;
+ case Token::BIT_XOR:
+ __ eor(r0, r0, Operand(r1));
+ break;
+ case Token::BIT_AND:
+ __ and_(r0, r0, Operand(r1));
+ break;
+ default:
+ UNREACHABLE();
+ }
+
+ __ bind(&done);
+ context()->Plug(r0);
+}
+
+
+void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr,
+ Token::Value op,
+ OverwriteMode mode,
+ bool left_is_constant_smi,
+ Smi* value) {
+ switch (op) {
+ case Token::BIT_OR:
+ case Token::BIT_XOR:
+ case Token::BIT_AND:
+ EmitConstantSmiBitOp(expr, op, mode, value);
+ break;
+ case Token::SHL:
+ case Token::SAR:
+ case Token::SHR:
+ ASSERT(!left_is_constant_smi);
+ EmitConstantSmiShiftOp(expr, op, mode, value);
+ break;
+ case Token::ADD:
+ EmitConstantSmiAdd(expr, mode, left_is_constant_smi, value);
+ break;
+ case Token::SUB:
+ EmitConstantSmiSub(expr, mode, left_is_constant_smi, value);
+ break;
+ default:
+ UNREACHABLE();
+ }
+}
+
+
void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
- Expression* left,
- Expression* right,
+ Expression* left_expr,
+ Expression* right_expr,
ConstantOperand constant) {
- ASSERT(constant == kNoConstants); // Only handled case.
- EmitBinaryOp(op, mode);
+ if (constant == kRightConstant) {
+ Smi* value = Smi::cast(*right_expr->AsLiteral()->handle());
+ EmitConstantSmiBinaryOp(expr, op, mode, false, value);
+ return;
+ } else if (constant == kLeftConstant) {
+ Smi* value = Smi::cast(*left_expr->AsLiteral()->handle());
+ EmitConstantSmiBinaryOp(expr, op, mode, true, value);
+ return;
+ }
+
+ Label done, smi_case, stub_call;
+
+ Register scratch1 = r2;
+ Register scratch2 = r3;
+
+ // Get the arguments.
+ Register left = r1;
+ Register right = r0;
+ __ pop(left);
+
+ // Perform combined smi check on both operands.
+ __ orr(scratch1, left, Operand(right));
+ STATIC_ASSERT(kSmiTag == 0);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfSmi(scratch1, &smi_case);
+
+ __ bind(&stub_call);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
+ __ jmp(&done);
+
+ __ bind(&smi_case);
+ // Smi case. This code works the same way as the smi-smi case in the type
+ // recording binary operation stub, see
+ // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
+ switch (op) {
+ case Token::SAR:
+ __ b(&stub_call);
+ __ GetLeastBitsFromSmi(scratch1, right, 5);
+ __ mov(right, Operand(left, ASR, scratch1));
+ __ bic(right, right, Operand(kSmiTagMask));
+ break;
+ case Token::SHL: {
+ __ b(&stub_call);
+ __ SmiUntag(scratch1, left);
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
+ __ mov(scratch1, Operand(scratch1, LSL, scratch2));
+ __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
+ __ b(mi, &stub_call);
+ __ SmiTag(right, scratch1);
+ break;
+ }
+ case Token::SHR: {
+ __ b(&stub_call);
+ __ SmiUntag(scratch1, left);
+ __ GetLeastBitsFromSmi(scratch2, right, 5);
+ __ mov(scratch1, Operand(scratch1, LSR, scratch2));
+ __ tst(scratch1, Operand(0xc0000000));
+ __ b(ne, &stub_call);
+ __ SmiTag(right, scratch1);
+ break;
+ }
+ case Token::ADD:
+ __ add(scratch1, left, Operand(right), SetCC);
+ __ b(vs, &stub_call);
+ __ mov(right, scratch1);
+ break;
+ case Token::SUB:
+ __ sub(scratch1, left, Operand(right), SetCC);
+ __ b(vs, &stub_call);
+ __ mov(right, scratch1);
+ break;
+ case Token::MUL: {
+ __ SmiUntag(ip, right);
+ __ smull(scratch1, scratch2, left, ip);
+ __ mov(ip, Operand(scratch1, ASR, 31));
+ __ cmp(ip, Operand(scratch2));
+ __ b(ne, &stub_call);
+ __ tst(scratch1, Operand(scratch1));
+ __ mov(right, Operand(scratch1), LeaveCC, ne);
+ __ b(ne, &done);
+ __ add(scratch2, right, Operand(left), SetCC);
+ __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
+ __ b(mi, &stub_call);
+ break;
+ }
+ case Token::BIT_OR:
+ __ orr(right, left, Operand(right));
+ break;
+ case Token::BIT_AND:
+ __ and_(right, left, Operand(right));
+ break;
+ case Token::BIT_XOR:
+ __ eor(right, left, Operand(right));
+ break;
+ default:
+ UNREACHABLE();
+ }
+
+ __ bind(&done);
+ context()->Plug(r0);
}
void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(r1);
- if (op == Token::ADD ||
- op == Token::SUB ||
- op == Token::MUL ||
- op == Token::DIV ||
- op == Token::MOD ||
- op == Token::BIT_OR ||
- op == Token::BIT_AND ||
- op == Token::BIT_XOR) {
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- } else {
- GenericBinaryOpStub stub(op, mode, r1, r0);
- __ CallStub(&stub);
- }
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), NULL);
context()->Plug(r0);
}
@@ -1606,10 +1960,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(r0); // Preserve value.
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(r1, r0);
- __ pop(r2);
+ if (prop->is_synthetic()) {
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ ASSERT(prop->key()->AsLiteral() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+ __ mov(r2, r0);
+ __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ mov(r1, r0);
+ __ pop(r2);
+ }
__ pop(r0); // Restore value.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1635,8 +1999,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// r2, and the global object in r1.
__ mov(r2, Operand(var->name()));
__ ldr(r1, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic(Builtins::builtin(is_strict()
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
// Perform the assignment for non-const variables and for initialization
@@ -2991,39 +3357,50 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- if (prop == NULL && var == NULL) {
- // Result of deleting non-property, non-variable reference is true.
- // The subexpression may have side effects.
- VisitForEffect(expr->expression());
- context()->Plug(true);
- } else if (var != NULL &&
- !var->is_global() &&
- var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
- } else {
- // Property or variable reference. Call the delete builtin with
- // object and property name as arguments.
- if (prop != NULL) {
+
+ if (prop != NULL) {
+ if (prop->is_synthetic()) {
+ // Result of deleting parameters is false, even when they rewrite
+ // to accesses on the arguments object.
+ context()->Plug(false);
+ } else {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
+ __ push(r1);
__ InvokeBuiltin(Builtins::DELETE, CALL_JS);
- } else if (var->is_global()) {
- __ ldr(r1, GlobalObjectOperand());
- __ mov(r0, Operand(var->name()));
- __ Push(r1, r0);
+ context()->Plug(r0);
+ }
+ } else if (var != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
+ if (var->is_global()) {
+ __ ldr(r2, GlobalObjectOperand());
+ __ mov(r1, Operand(var->name()));
+ __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
+ __ Push(r2, r1, r0);
__ InvokeBuiltin(Builtins::DELETE, CALL_JS);
+ context()->Plug(r0);
+ } else if (var->AsSlot() != NULL &&
+ var->AsSlot()->type() != Slot::LOOKUP) {
+ // Result of deleting non-global, non-dynamic variables is false.
+ // The subexpression does not have side effects.
+ context()->Plug(false);
} else {
- // Non-global variable. Call the runtime to delete from the
+ // Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
__ push(context_register());
__ mov(r2, Operand(var->name()));
__ push(r2);
__ CallRuntime(Runtime::kDeleteContextSlot, 2);
+ context()->Plug(r0);
}
- context()->Plug(r0);
+ } else {
+ // Result of deleting non-property, non-variable reference is true.
+ // The subexpression may have side effects.
+ VisitForEffect(expr->expression());
+ context()->Plug(true);
}
break;
}
@@ -3214,13 +3591,16 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Inline smi case if we are in a loop.
Label stub_call, done;
+ JumpPatchSite patch_site(masm_);
+
int count_value = expr->op() == Token::INC ? 1 : -1;
if (ShouldInlineSmiCase(expr->op())) {
__ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
__ b(vs, &stub_call);
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
- __ JumpIfSmi(r0, &done);
+ patch_site.EmitJumpIfSmi(r0, &done);
+
__ bind(&stub_call);
// Call stub. Undo operation first.
__ sub(r0, r0, Operand(Smi::FromInt(count_value)));
@@ -3230,8 +3610,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Record position before stub call.
SetSourcePosition(expr->position());
- GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0);
- __ CallStub(&stub);
+ TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
// Store the value returned in r0.
@@ -3510,21 +3890,22 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
}
bool inline_smi_code = ShouldInlineSmiCase(op);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
Label slow_case;
__ orr(r2, r0, Operand(r1));
- __ JumpIfNotSmi(r2, &slow_case);
+ patch_site.EmitJumpIfNotSmi(r2, &slow_case);
__ cmp(r1, r0);
Split(cond, if_true, if_false, NULL);
__ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(cond, strict, flags, r1, r0);
- __ CallStub(&stub);
+
+ // Record position and call the compare IC.
+ SetSourcePosition(expr->position());
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- __ cmp(r0, Operand(0, RelocInfo::NONE));
+ __ cmp(r0, Operand(0));
Split(cond, if_true, if_false, fall_through);
}
}
@@ -3591,6 +3972,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
}
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ __ Call(ic, RelocInfo::CODE_TARGET);
+ if (patch_site != NULL && patch_site->is_bound()) {
+ patch_site->EmitPatchInfo();
+ } else {
+ __ nop(); // Signals no inlined code.
+ }
+}
+
+
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
__ str(value, MemOperand(fp, frame_offset));
diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc
index 1aa031d39b..6c7aa0643a 100644
--- a/deps/v8/src/arm/ic-arm.cc
+++ b/deps/v8/src/arm/ic-arm.cc
@@ -115,6 +115,9 @@ static void GenerateStringDictionaryProbes(MacroAssembler* masm,
Register name,
Register scratch1,
Register scratch2) {
+ // Assert that name contains a string.
+ if (FLAG_debug_code) __ AbortIfNotString(name);
+
// Compute the capacity mask.
const int kCapacityOffset = StringDictionary::kHeaderSize +
StringDictionary::kCapacityIndex * kPointerSize;
@@ -843,7 +846,14 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// -- lr : return address
// -----------------------------------
+ // Check if the name is a string.
+ Label miss;
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(eq, &miss);
+ __ IsObjectJSStringType(r2, r0, &miss);
+
GenerateCallNormal(masm, argc);
+ __ bind(&miss);
GenerateMiss(masm, argc);
}
@@ -1465,24 +1475,20 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// Check whether the elements is a pixel array.
// r4: elements map.
__ bind(&check_pixel_array);
- __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
- __ cmp(r4, ip);
- __ b(ne, &slow);
- // Check that the value is a smi. If a conversion is needed call into the
- // runtime to convert and clamp.
- __ JumpIfNotSmi(value, &slow);
- __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the key.
- __ ldr(ip, FieldMemOperand(elements, PixelArray::kLengthOffset));
- __ cmp(r4, Operand(ip));
- __ b(hs, &slow);
- __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
- __ Usat(r5, 8, Operand(r5)); // Clamp the value to [0..255].
-
- // Get the pointer to the external array. This clobbers elements.
- __ ldr(elements,
- FieldMemOperand(elements, PixelArray::kExternalPointerOffset));
- __ strb(r5, MemOperand(elements, r4)); // Elements is now external array.
- __ Ret();
+ GenerateFastPixelArrayStore(masm,
+ r2,
+ r1,
+ r0,
+ elements,
+ r4,
+ r5,
+ r6,
+ false,
+ false,
+ NULL,
+ &slow,
+ &slow,
+ &slow);
// Extra capacity case: Check if there is extra capacity to
// perform the store and update the length. Used for adding one
@@ -1533,7 +1539,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
}
-void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
+void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
+ Code::ExtraICState extra_ic_state) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : receiver
@@ -1544,7 +1551,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
// Get the receiver from the stack and probe the stub cache.
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
- MONOMORPHIC);
+ MONOMORPHIC,
+ extra_ic_state);
StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5);
// Cache miss: Jump to runtime.
@@ -1700,11 +1708,78 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
Token::Name(op_));
}
#endif
+
+ // Activate inlined smi code.
+ if (previous_state == UNINITIALIZED) {
+ PatchInlinedSmiCode(address());
+ }
}
void PatchInlinedSmiCode(Address address) {
- // Currently there is no smi inlining in the ARM full code generator.
+ Address cmp_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a cmp rx, #yyy, nothing
+ // was inlined.
+ Instr instr = Assembler::instr_at(cmp_instruction_address);
+ if (!Assembler::IsCmpImmediate(instr)) {
+ return;
+ }
+
+ // The delta to the start of the map check instruction and the
+ // condition code uses at the patched jump.
+ int delta = Assembler::GetCmpImmediateRawImmediate(instr);
+ delta +=
+ Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
+ // If the delta is 0 the instruction is cmp r0, #0 which also signals that
+ // nothing was inlined.
+ if (delta == 0) {
+ return;
+ }
+
+#ifdef DEBUG
+ if (FLAG_trace_ic) {
+ PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
+ address, cmp_instruction_address, delta);
+ }
+#endif
+
+ Address patch_address =
+ cmp_instruction_address - delta * Instruction::kInstrSize;
+ Instr instr_at_patch = Assembler::instr_at(patch_address);
+ Instr branch_instr =
+ Assembler::instr_at(patch_address + Instruction::kInstrSize);
+ ASSERT(Assembler::IsCmpRegister(instr_at_patch));
+ ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
+ Assembler::GetRm(instr_at_patch).code());
+ ASSERT(Assembler::IsBranch(branch_instr));
+ if (Assembler::GetCondition(branch_instr) == eq) {
+ // This is patching a "jump if not smi" site to be active.
+ // Changing
+ // cmp rx, rx
+ // b eq, <target>
+ // to
+ // tst rx, #kSmiTagMask
+ // b ne, <target>
+ CodePatcher patcher(patch_address, 2);
+ Register reg = Assembler::GetRn(instr_at_patch);
+ patcher.masm()->tst(reg, Operand(kSmiTagMask));
+ patcher.EmitCondition(ne);
+ } else {
+ ASSERT(Assembler::GetCondition(branch_instr) == ne);
+ // This is patching a "jump if smi" site to be active.
+ // Changing
+ // cmp rx, rx
+ // b ne, <target>
+ // to
+ // tst rx, #kSmiTagMask
+ // b eq, <target>
+ CodePatcher patcher(patch_address, 2);
+ Register reg = Assembler::GetRn(instr_at_patch);
+ patcher.masm()->tst(reg, Operand(kSmiTagMask));
+ patcher.EmitCondition(eq);
+ }
}
diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc
index f672d4908e..903f77bbf0 100644
--- a/deps/v8/src/arm/lithium-arm.cc
+++ b/deps/v8/src/arm/lithium-arm.cc
@@ -62,15 +62,13 @@ void LInstruction::VerifyCall() {
// Call instructions can use only fixed registers as
// temporaries and outputs because all registers
// are blocked by the calling convention.
- // Inputs can use either fixed register or have a short lifetime (be
- // used at start of the instruction).
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
LOperand* operand = it.Next();
ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
- LUnallocated::cast(operand)->IsUsedAtStart() ||
!LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
@@ -186,6 +184,9 @@ const char* LArithmeticT::Mnemonic() const {
case Token::BIT_AND: return "bit-and-t";
case Token::BIT_OR: return "bit-or-t";
case Token::BIT_XOR: return "bit-xor-t";
+ case Token::SHL: return "shl-t";
+ case Token::SAR: return "sar-t";
+ case Token::SHR: return "shr-t";
default:
UNREACHABLE();
return NULL;
@@ -802,6 +803,16 @@ LInstruction* LChunkBuilder::DoBit(Token::Value op,
LInstruction* LChunkBuilder::DoShift(Token::Value op,
HBitwiseBinaryOperation* instr) {
+ if (instr->representation().IsTagged()) {
+ ASSERT(instr->left()->representation().IsTagged());
+ ASSERT(instr->right()->representation().IsTagged());
+
+ LOperand* left = UseFixed(instr->left(), r1);
+ LOperand* right = UseFixed(instr->right(), r0);
+ LArithmeticT* result = new LArithmeticT(op, left, right);
+ return MarkAsCall(DefineFixed(result, r0), instr);
+ }
+
ASSERT(instr->representation().IsInteger32());
ASSERT(instr->OperandAt(0)->representation().IsInteger32());
ASSERT(instr->OperandAt(1)->representation().IsInteger32());
@@ -1021,7 +1032,7 @@ LInstruction* LChunkBuilder::DoTest(HTest* instr) {
ASSERT(left->representation().IsInteger32());
ASSERT(right->representation().IsInteger32());
return new LCmpIDAndBranch(UseRegisterAtStart(left),
- UseOrConstantAtStart(right));
+ UseRegisterAtStart(right));
} else if (r.IsDouble()) {
ASSERT(left->representation().IsDouble());
ASSERT(right->representation().IsDouble());
@@ -1077,6 +1088,8 @@ LInstruction* LChunkBuilder::DoTest(HTest* instr) {
} else if (v->IsTypeofIs()) {
HTypeofIs* typeof_is = HTypeofIs::cast(v);
return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()));
+ } else if (v->IsIsConstructCall()) {
+ return new LIsConstructCallAndBranch(TempRegister());
} else {
if (v->IsConstant()) {
if (HConstant::cast(v)->handle()->IsTrue()) {
@@ -1131,8 +1144,8 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), r1);
LOperand* receiver = UseFixed(instr->receiver(), r0);
- LOperand* length = UseRegisterAtStart(instr->length());
- LOperand* elements = UseRegisterAtStart(instr->elements());
+ LOperand* length = UseFixed(instr->length(), r2);
+ LOperand* elements = UseFixed(instr->elements(), r3);
LApplyArguments* result = new LApplyArguments(function,
receiver,
length,
@@ -1304,10 +1317,10 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
// the generated code, which requires registers r0
// and r1 to be used. We should remove that
// when we provide a native implementation.
- LOperand* value = UseFixed(instr->left(), r0);
+ LOperand* dividend = UseFixed(instr->left(), r0);
LOperand* divisor = UseFixed(instr->right(), r1);
return AssignEnvironment(AssignPointerMap(
- DefineFixed(new LDivI(value, divisor), r0)));
+ DefineFixed(new LDivI(dividend, divisor), r0)));
} else {
return DoArithmeticT(Token::DIV, instr);
}
@@ -1417,7 +1430,7 @@ LInstruction* LChunkBuilder::DoCompare(HCompare* instr) {
ASSERT(instr->left()->representation().IsInteger32());
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
- LOperand* right = UseOrConstantAtStart(instr->right());
+ LOperand* right = UseRegisterAtStart(instr->right());
return DefineAsRegister(new LCmpID(left, right));
} else if (r.IsDouble()) {
ASSERT(instr->left()->representation().IsDouble());
@@ -1478,6 +1491,15 @@ LInstruction* LChunkBuilder::DoHasInstanceType(HHasInstanceType* instr) {
}
+LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
+ HGetCachedArrayIndex* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegister(instr->value());
+
+ return DefineAsRegister(new LGetCachedArrayIndex(value));
+}
+
+
LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
HHasCachedArrayIndex* instr) {
ASSERT(instr->value()->representation().IsTagged());
@@ -1500,6 +1522,12 @@ LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
}
+LInstruction* LChunkBuilder::DoPixelArrayLength(HPixelArrayLength* instr) {
+ LOperand* array = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LPixelArrayLength(array));
+}
+
+
LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
return DefineAsRegister(new LFixedArrayLength(array));
@@ -1642,13 +1670,11 @@ LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- int32_t value = instr->Integer32Value();
- return DefineAsRegister(new LConstantI(value));
+ return DefineAsRegister(new LConstantI);
} else if (r.IsDouble()) {
- double value = instr->DoubleValue();
- return DefineAsRegister(new LConstantD(value));
+ return DefineAsRegister(new LConstantD);
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT(instr->handle()));
+ return DefineAsRegister(new LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1716,7 +1742,14 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineSameAsFirst(new LLoadElements(input));
+ return DefineAsRegister(new LLoadElements(input));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadPixelArrayExternalPointer(
+ HLoadPixelArrayExternalPointer* instr) {
+ LOperand* input = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LLoadPixelArrayExternalPointer(input));
}
@@ -1731,6 +1764,19 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
}
+LInstruction* LChunkBuilder::DoLoadPixelArrayElement(
+ HLoadPixelArrayElement* instr) {
+ ASSERT(instr->representation().IsInteger32());
+ ASSERT(instr->key()->representation().IsInteger32());
+ LOperand* external_pointer =
+ UseRegisterAtStart(instr->external_pointer());
+ LOperand* key = UseRegisterAtStart(instr->key());
+ LLoadPixelArrayElement* result =
+ new LLoadPixelArrayElement(external_pointer, key);
+ return DefineAsRegister(result);
+}
+
+
LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), r1);
LOperand* key = UseFixed(instr->key(), r0);
@@ -1832,8 +1878,8 @@ LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
- LOperand* object = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterAtStart(instr->key());
+ LOperand* object = UseFixed(instr->object(), r0);
+ LOperand* key = UseFixed(instr->key(), r1);
LDeleteProperty* result = new LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1881,7 +1927,7 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseRegisterAtStart(instr->value()));
+ LTypeof* result = new LTypeof(UseFixed(instr->value(), r0));
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1890,6 +1936,12 @@ LInstruction* LChunkBuilder::DoTypeofIs(HTypeofIs* instr) {
return DefineSameAsFirst(new LTypeofIs(UseRegister(instr->value())));
}
+
+LInstruction* LChunkBuilder::DoIsConstructCall(HIsConstructCall* instr) {
+ return DefineAsRegister(new LIsConstructCall());
+}
+
+
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
HEnvironment* env = current_block_->last_environment();
ASSERT(env != NULL);
diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h
index a076c80c75..57338f16d5 100644
--- a/deps/v8/src/arm/lithium-arm.h
+++ b/deps/v8/src/arm/lithium-arm.h
@@ -41,7 +41,6 @@ class LCodeGen;
#define LITHIUM_ALL_INSTRUCTION_LIST(V) \
V(ControlInstruction) \
- V(Constant) \
V(Call) \
V(StoreKeyed) \
V(StoreNamed) \
@@ -95,6 +94,7 @@ class LCodeGen;
V(FixedArrayLength) \
V(FunctionLiteral) \
V(Gap) \
+ V(GetCachedArrayIndex) \
V(GlobalObject) \
V(GlobalReceiver) \
V(Goto) \
@@ -123,6 +123,8 @@ class LCodeGen;
V(LoadKeyedGeneric) \
V(LoadNamedField) \
V(LoadNamedGeneric) \
+ V(LoadPixelArrayElement) \
+ V(LoadPixelArrayExternalPointer) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -132,6 +134,7 @@ class LCodeGen;
V(OsrEntry) \
V(OuterContext) \
V(Parameter) \
+ V(PixelArrayLength) \
V(PushArgument) \
V(RegExpLiteral) \
V(Return) \
@@ -153,6 +156,8 @@ class LCodeGen;
V(Typeof) \
V(TypeofIs) \
V(TypeofIsAndBranch) \
+ V(IsConstructCall) \
+ V(IsConstructCallAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
V(ValueOf)
@@ -735,6 +740,17 @@ class LHasCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
};
+class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LGetCachedArrayIndex(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
+ DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
+};
+
+
class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
public:
explicit LHasCachedArrayIndexAndBranch(LOperand* value) {
@@ -903,44 +919,30 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
};
-class LConstant: public LTemplateInstruction<1, 0, 0> {
- DECLARE_INSTRUCTION(Constant)
-};
-
-
-class LConstantI: public LConstant {
+class LConstantI: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantI(int32_t value) : value_(value) { }
- int32_t value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantI, "constant-i")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- int32_t value_;
+ int32_t value() const { return hydrogen()->Integer32Value(); }
};
-class LConstantD: public LConstant {
+class LConstantD: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantD(double value) : value_(value) { }
- double value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantD, "constant-d")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- double value_;
+ double value() const { return hydrogen()->DoubleValue(); }
};
-class LConstantT: public LConstant {
+class LConstantT: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantT(Handle<Object> value) : value_(value) { }
- Handle<Object> value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantT, "constant-t")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- Handle<Object> value_;
+ Handle<Object> value() const { return hydrogen()->handle(); }
};
@@ -990,6 +992,17 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
};
+class LPixelArrayLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LPixelArrayLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(PixelArrayLength, "pixel-array-length")
+ DECLARE_HYDROGEN_ACCESSOR(PixelArrayLength)
+};
+
+
class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
public:
explicit LFixedArrayLength(LOperand* value) {
@@ -1139,6 +1152,17 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
};
+class LLoadPixelArrayExternalPointer: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LLoadPixelArrayExternalPointer(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayExternalPointer,
+ "load-pixel-array-external-pointer")
+};
+
+
class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
@@ -1154,6 +1178,22 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
};
+class LLoadPixelArrayElement: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadPixelArrayElement(LOperand* external_pointer, LOperand* key) {
+ inputs_[0] = external_pointer;
+ inputs_[1] = key;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayElement,
+ "load-pixel-array-element")
+ DECLARE_HYDROGEN_ACCESSOR(LoadPixelArrayElement)
+
+ LOperand* external_pointer() { return inputs_[0]; }
+ LOperand* key() { return inputs_[1]; }
+};
+
+
class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
@@ -1716,6 +1756,24 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
};
+class LIsConstructCall: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCall, "is-construct-call")
+ DECLARE_HYDROGEN_ACCESSOR(IsConstructCall)
+};
+
+
+class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
+ public:
+ explicit LIsConstructCallAndBranch(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
+ "is-construct-call-and-branch")
+};
+
+
class LDeleteProperty: public LTemplateInstruction<1, 2, 0> {
public:
LDeleteProperty(LOperand* obj, LOperand* key) {
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index 855ed461b5..1bfb3ad943 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -647,7 +647,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
return;
}
- if (cc == kNoCondition) {
+ if (cc == al) {
if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
__ Jump(entry, RelocInfo::RUNTIME_ENTRY);
} else {
@@ -1188,8 +1188,8 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ tst(left, Operand(left));
__ b(ne, &done);
if (instr->InputAt(1)->IsConstantOperand()) {
- if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) < 0) {
- DeoptimizeIf(kNoCondition, instr->environment());
+ if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
+ DeoptimizeIf(al, instr->environment());
}
} else {
// Test the non-zero operand for negative sign.
@@ -1322,6 +1322,13 @@ void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
}
+void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register array = ToRegister(instr->InputAt(0));
+ __ ldr(result, FieldMemOperand(array, PixelArray::kLengthOffset));
+}
+
+
void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
Register result = ToRegister(instr->result());
Register array = ToRegister(instr->InputAt(0));
@@ -1605,7 +1612,7 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
- __ cmp(ToRegister(left), ToOperand(right));
+ __ cmp(ToRegister(left), ToRegister(right));
}
@@ -1619,8 +1626,7 @@ void LCodeGen::DoCmpID(LCmpID* instr) {
if (instr->is_double()) {
// Compare left and right as doubles and load the
// resulting flags into the normal status register.
- __ vcmp(ToDoubleRegister(left), ToDoubleRegister(right));
- __ vmrs(pc);
+ __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
// If a NaN is involved, i.e. the result is unordered (V set),
// jump to unordered to return false.
__ b(vs, &unordered);
@@ -1647,8 +1653,7 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
if (instr->is_double()) {
// Compare left and right as doubles and load the
// resulting flags into the normal status register.
- __ vcmp(ToDoubleRegister(left), ToDoubleRegister(right));
- __ vmrs(pc);
+ __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
// If a NaN is involved, i.e. the result is unordered (V set),
// jump to false block label.
__ b(vs, chunk_->GetAssemblyLabel(false_block));
@@ -1891,14 +1896,42 @@ void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
}
+void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
+ Register input = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+
+ __ ldr(scratch, FieldMemOperand(input, String::kHashFieldOffset));
+ __ IndexFromHash(scratch, result);
+}
+
+
void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
- Abort("DoHasCachedArrayIndex unimplemented.");
+ Register input = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+
+ ASSERT(instr->hydrogen()->value()->representation().IsTagged());
+ __ ldr(scratch,
+ FieldMemOperand(input, String::kHashFieldOffset));
+ __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
+ __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
}
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
- Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
+ Register input = ToRegister(instr->InputAt(0));
+ Register scratch = scratch0();
+
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ __ ldr(scratch,
+ FieldMemOperand(input, String::kHashFieldOffset));
+ __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
+ EmitBranch(true_block, false_block, eq);
}
@@ -2180,12 +2213,12 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
Handle<Code> ic = CompareIC::GetUninitialized(op);
CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Condition condition = ComputeCompareCondition(op);
if (op == Token::GT || op == Token::LTE) {
condition = ReverseCondition(condition);
}
- __ cmp(r0, Operand(0));
__ LoadRoot(ToRegister(instr->result()),
Heap::kTrueValueRootIndex,
condition);
@@ -2196,7 +2229,21 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
- Abort("DoCmpTAndBranch unimplemented.");
+ Token::Value op = instr->op();
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+
+ // The compare stub expects compare condition and the input operands
+ // reversed for GT and LTE.
+ Condition condition = ComputeCompareCondition(op);
+ if (op == Token::GT || op == Token::LTE) {
+ condition = ReverseCondition(condition);
+ }
+ __ cmp(r0, Operand(0));
+ EmitBranch(true_block, false_block, condition);
}
@@ -2342,17 +2389,20 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
- ASSERT(instr->result()->Equals(instr->InputAt(0)));
- Register reg = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register input = ToRegister(instr->InputAt(0));
Register scratch = scratch0();
- __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
+ __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
if (FLAG_debug_code) {
Label done;
- __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
+ __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(scratch, ip);
__ b(eq, &done);
+ __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
+ __ cmp(scratch, ip);
+ __ b(eq, &done);
__ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
__ cmp(scratch, ip);
__ Check(eq, "Check for fast elements failed.");
@@ -2361,6 +2411,14 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
}
+void LCodeGen::DoLoadPixelArrayExternalPointer(
+ LLoadPixelArrayExternalPointer* instr) {
+ Register to_reg = ToRegister(instr->result());
+ Register from_reg = ToRegister(instr->InputAt(0));
+ __ ldr(to_reg, FieldMemOperand(from_reg, PixelArray::kExternalPointerOffset));
+}
+
+
void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register arguments = ToRegister(instr->arguments());
Register length = ToRegister(instr->length());
@@ -2397,6 +2455,16 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
}
+void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
+ Register external_elements = ToRegister(instr->external_pointer());
+ Register key = ToRegister(instr->key());
+ Register result = ToRegister(instr->result());
+
+ // Load the result.
+ __ ldrb(result, MemOperand(external_elements, key));
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(r1));
ASSERT(ToRegister(instr->key()).is(r0));
@@ -2448,29 +2516,33 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
Register scratch = scratch0();
-
- ASSERT(receiver.is(r0));
- ASSERT(function.is(r1));
+ ASSERT(receiver.is(r0)); // Used for parameter count.
+ ASSERT(function.is(r1)); // Required by InvokeFunction.
ASSERT(ToRegister(instr->result()).is(r0));
- // If the receiver is null or undefined, we have to pass the
- // global object as a receiver.
- Label global_receiver, receiver_ok;
+ // If the receiver is null or undefined, we have to pass the global object
+ // as a receiver.
+ Label global_object, receiver_ok;
__ LoadRoot(scratch, Heap::kNullValueRootIndex);
__ cmp(receiver, scratch);
- __ b(eq, &global_receiver);
+ __ b(eq, &global_object);
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
__ cmp(receiver, scratch);
- __ b(ne, &receiver_ok);
- __ bind(&global_receiver);
- __ ldr(receiver, GlobalObjectOperand());
- __ bind(&receiver_ok);
+ __ b(eq, &global_object);
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
+ // Deoptimize if the receiver is not a JS object.
+ __ tst(receiver, Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
+ __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
+ DeoptimizeIf(lo, instr->environment());
+ __ jmp(&receiver_ok);
- Label invoke;
+ __ bind(&global_object);
+ __ ldr(receiver, GlobalObjectOperand());
+ __ bind(&receiver_ok);
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2487,7 +2559,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// Loop through the arguments pushing them onto the execution
// stack.
- Label loop;
+ Label invoke, loop;
// length is a small non-negative integer, due to the test above.
__ tst(length, Operand(length));
__ b(eq, &invoke);
@@ -2510,6 +2582,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// by InvokeFunction.
v8::internal::ParameterCount actual(receiver);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -2899,7 +2972,9 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
// Name is always in r2.
__ mov(r2, Operand(instr->name()));
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(info_->is_strict()
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -3778,6 +3853,55 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
}
+void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
+ Register result = ToRegister(instr->result());
+ Label true_label;
+ Label false_label;
+ Label done;
+
+ EmitIsConstructCall(result, scratch0());
+ __ b(eq, &true_label);
+
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
+ __ b(&done);
+
+
+ __ bind(&true_label);
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
+
+ __ bind(&done);
+}
+
+
+void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
+ Register temp1 = ToRegister(instr->TempAt(0));
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ EmitIsConstructCall(temp1, scratch0());
+ EmitBranch(true_block, false_block, eq);
+}
+
+
+void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
+ ASSERT(!temp1.is(temp2));
+ // Get the frame pointer for the calling frame.
+ __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+
+ // Skip the arguments adaptor frame if it exists.
+ Label check_frame_marker;
+ __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
+ __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ b(ne, &check_frame_marker);
+ __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
+
+ // Check the marker in the calling frame.
+ __ bind(&check_frame_marker);
+ __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
+ __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
// No code for lazy bailout instruction. Used to capture environment after a
// call for populating the safepoint data with deoptimization data.
@@ -3785,14 +3909,16 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
- DeoptimizeIf(kNoCondition, instr->environment());
+ DeoptimizeIf(al, instr->environment());
}
void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Register object = ToRegister(instr->object());
Register key = ToRegister(instr->key());
- __ Push(object, key);
+ Register strict = scratch0();
+ __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(object, key, strict);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
LEnvironment* env = instr->deoptimization_environment();
@@ -3818,7 +3944,19 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
- Abort("DoOsrEntry unimplemented.");
+ // This is a pseudo-instruction that ensures that the environment here is
+ // properly registered for deoptimization and records the assembler's PC
+ // offset.
+ LEnvironment* environment = instr->environment();
+ environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
+ instr->SpilledDoubleRegisterArray());
+
+ // If the environment were already registered, we would have no way of
+ // backpatching it with the spill slot operands.
+ ASSERT(!environment->HasBeenRegistered());
+ RegisterEnvironmentForDeoptimization(environment);
+ ASSERT(osr_pc_offset_ == -1);
+ osr_pc_offset_ = masm()->pc_offset();
}
diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h
index 3f7fe4519b..732db44517 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.h
+++ b/deps/v8/src/arm/lithium-codegen-arm.h
@@ -129,6 +129,10 @@ class LCodeGen BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
+ int strict_mode_flag() const {
+ return info_->is_strict() ? kStrictMode : kNonStrictMode;
+ }
+
LChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -264,6 +268,10 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsConstructCall().
+ // Caller should branch on equal condition.
+ void EmitIsConstructCall(Register temp1, Register temp2);
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc
index c11d664f07..eb850cd948 100644
--- a/deps/v8/src/arm/macro-assembler-arm.cc
+++ b/deps/v8/src/arm/macro-assembler-arm.cc
@@ -714,7 +714,8 @@ int MacroAssembler::ActivationFrameAlignment() {
}
-void MacroAssembler::LeaveExitFrame(bool save_doubles) {
+void MacroAssembler::LeaveExitFrame(bool save_doubles,
+ Register argument_count) {
// Optionally restore all double registers.
if (save_doubles) {
for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) {
@@ -736,12 +737,12 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
str(r3, MemOperand(ip));
#endif
- // Tear down the exit frame, pop the arguments, and return. Callee-saved
- // register r4 still holds argc.
+ // Tear down the exit frame, pop the arguments, and return.
mov(sp, Operand(fp));
ldm(ia_w, sp, fp.bit() | lr.bit());
- add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
- mov(pc, lr);
+ if (argument_count.is_valid()) {
+ add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
+ }
}
@@ -929,8 +930,8 @@ void MacroAssembler::IsInstanceJSObjectType(Register map,
void MacroAssembler::IsObjectJSStringType(Register object,
- Register scratch,
- Label* fail) {
+ Register scratch,
+ Label* fail) {
ASSERT(kNotStringTag != 0);
ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
@@ -1005,6 +1006,117 @@ void MacroAssembler::PopTryHandler() {
}
+void MacroAssembler::Throw(Register value) {
+ // r0 is expected to hold the exception.
+ if (!value.is(r0)) {
+ mov(r0, value);
+ }
+
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // Drop the sp to the top of the handler.
+ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+ ldr(sp, MemOperand(r3));
+
+ // Restore the next handler and frame pointer, discard handler state.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(r2);
+ str(r2, MemOperand(r3));
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
+
+ // Before returning we restore the context from the frame pointer if
+ // not NULL. The frame pointer is NULL in the exception handler of a
+ // JS entry frame.
+ cmp(fp, Operand(0, RelocInfo::NONE));
+ // Set cp to NULL if fp is NULL.
+ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
+ // Restore cp otherwise.
+ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+#ifdef DEBUG
+ if (FLAG_debug_code) {
+ mov(lr, Operand(pc));
+ }
+#endif
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ pop(pc);
+}
+
+
+void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
+ Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // r0 is expected to hold the exception.
+ if (!value.is(r0)) {
+ mov(r0, value);
+ }
+
+ // Drop sp to the top stack handler.
+ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+ ldr(sp, MemOperand(r3));
+
+ // Unwind the handlers until the ENTRY handler is found.
+ Label loop, done;
+ bind(&loop);
+ // Load the type of the current stack handler.
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
+ ldr(r2, MemOperand(sp, kStateOffset));
+ cmp(r2, Operand(StackHandler::ENTRY));
+ b(eq, &done);
+ // Fetch the next handler in the list.
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
+ ldr(sp, MemOperand(sp, kNextOffset));
+ jmp(&loop);
+ bind(&done);
+
+ // Set the top handler address to next handler past the current ENTRY handler.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(r2);
+ str(r2, MemOperand(r3));
+
+ if (type == OUT_OF_MEMORY) {
+ // Set external caught exception to false.
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
+ mov(r0, Operand(false, RelocInfo::NONE));
+ mov(r2, Operand(external_caught));
+ str(r0, MemOperand(r2));
+
+ // Set pending exception and r0 to out of memory exception.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
+ str(r0, MemOperand(r2));
+ }
+
+ // Stack layout at this point. See also StackHandlerConstants.
+ // sp -> state (ENTRY)
+ // fp
+ // lr
+
+ // Discard handler state (r2 is not used) and restore frame pointer.
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
+ // Before returning we restore the context from the frame pointer if
+ // not NULL. The frame pointer is NULL in the exception handler of a
+ // JS entry frame.
+ cmp(fp, Operand(0, RelocInfo::NONE));
+ // Set cp to NULL if fp is NULL.
+ mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
+ // Restore cp otherwise.
+ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+#ifdef DEBUG
+ if (FLAG_debug_code) {
+ mov(lr, Operand(pc));
+ }
+#endif
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ pop(pc);
+}
+
+
void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Register scratch,
Label* miss) {
@@ -1150,7 +1262,8 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
// Calculate new top and bail out if new space is exhausted. Use result
// to calculate the new top.
- add(scratch2, result, Operand(obj_size_reg));
+ add(scratch2, result, Operand(obj_size_reg), SetCC);
+ b(cs, gc_required);
cmp(scratch2, Operand(ip));
b(hi, gc_required);
str(scratch2, MemOperand(topaddr));
@@ -1229,10 +1342,11 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
// to calculate the new top. Object size may be in words so a shift is
// required to get the number of bytes.
if ((flags & SIZE_IN_WORDS) != 0) {
- add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2));
+ add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC);
} else {
- add(scratch2, result, Operand(object_size));
+ add(scratch2, result, Operand(object_size), SetCC);
}
+ b(cs, gc_required);
cmp(scratch2, Operand(ip));
b(hi, gc_required);
@@ -1552,9 +1666,10 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
cmp(r4, r5);
b(ne, &promote_scheduled_exception);
- // LeaveExitFrame expects unwind space to be in r4.
+ // LeaveExitFrame expects unwind space to be in a register.
mov(r4, Operand(stack_space));
- LeaveExitFrame(false);
+ LeaveExitFrame(false, r4);
+ mov(pc, lr);
bind(&promote_scheduled_exception);
MaybeObject* result = TryTailCallExternalReference(
@@ -1771,6 +1886,13 @@ void MacroAssembler::GetLeastBitsFromSmi(Register dst,
}
+void MacroAssembler::GetLeastBitsFromInt32(Register dst,
+ Register src,
+ int num_least_bits) {
+ and_(dst, src, Operand((1 << num_least_bits) - 1));
+}
+
+
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
// All parameters are on the stack. r0 has the return value after call.
@@ -2113,6 +2235,19 @@ void MacroAssembler::AbortIfNotSmi(Register object) {
}
+void MacroAssembler::AbortIfNotString(Register object) {
+ STATIC_ASSERT(kSmiTag == 0);
+ tst(object, Operand(kSmiTagMask));
+ Assert(ne, "Operand is not a string");
+ push(object);
+ ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
+ CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Assert(lo, "Operand is not a string");
+}
+
+
+
void MacroAssembler::AbortIfNotRootValue(Register src,
Heap::RootListIndex root_value_index,
const char* message) {
@@ -2379,7 +2514,6 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
}
-#ifdef ENABLE_DEBUGGER_SUPPORT
CodePatcher::CodePatcher(byte* address, int instructions)
: address_(address),
instructions_(instructions),
@@ -2402,15 +2536,21 @@ CodePatcher::~CodePatcher() {
}
-void CodePatcher::Emit(Instr x) {
- masm()->emit(x);
+void CodePatcher::Emit(Instr instr) {
+ masm()->emit(instr);
}
void CodePatcher::Emit(Address addr) {
masm()->emit(reinterpret_cast<Instr>(addr));
}
-#endif // ENABLE_DEBUGGER_SUPPORT
+
+
+void CodePatcher::EmitCondition(Condition cond) {
+ Instr instr = Assembler::instr_at(masm_.pc_);
+ instr = (instr & ~kCondMask) | cond;
+ masm_.emit(instr);
+}
} } // namespace v8::internal
diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h
index c9ffde8981..354662da32 100644
--- a/deps/v8/src/arm/macro-assembler-arm.h
+++ b/deps/v8/src/arm/macro-assembler-arm.h
@@ -45,6 +45,12 @@ static inline MemOperand FieldMemOperand(Register object, int offset) {
}
+static inline Operand SmiUntagOperand(Register object) {
+ return Operand(object, ASR, kSmiTagSize);
+}
+
+
+
// Give alias names to registers
const Register cp = { 8 }; // JavaScript context pointer
const Register roots = { 10 }; // Roots array pointer.
@@ -291,7 +297,9 @@ class MacroAssembler: public Assembler {
void EnterExitFrame(bool save_doubles, int stack_space = 0);
// Leave the current exit frame. Expects the return value in r0.
- void LeaveExitFrame(bool save_doubles);
+ // Expect the number of values, pushed prior to the exit frame, to
+ // remove in a register (or no_reg, if there is nothing to remove).
+ void LeaveExitFrame(bool save_doubles, Register argument_count);
// Get the actual activation frame alignment for target environment.
static int ActivationFrameAlignment();
@@ -365,6 +373,13 @@ class MacroAssembler: public Assembler {
// Must preserve the result register.
void PopTryHandler();
+ // Passes thrown value (in r0) to the handler of top of the try handler chain.
+ void Throw(Register value);
+
+ // Propagates an uncatchable exception to the top of the current JS stack's
+ // handler chain.
+ void ThrowUncatchable(UncatchableExceptionType type, Register value);
+
// ---------------------------------------------------------------------------
// Inline caching support
@@ -558,6 +573,7 @@ class MacroAssembler: public Assembler {
// Get the number of least significant bits from a register
void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
+ void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
// Uses VFP instructions to Convert a Smi to a double.
void IntegerToDoubleConversionWithVFP3(Register inReg,
@@ -784,6 +800,9 @@ class MacroAssembler: public Assembler {
void AbortIfSmi(Register object);
void AbortIfNotSmi(Register object);
+ // Abort execution if argument is a string. Used in debug code.
+ void AbortIfNotString(Register object);
+
// Abort execution if argument is not the root value with the given index.
void AbortIfNotRootValue(Register src,
Heap::RootListIndex root_value_index,
@@ -886,11 +905,15 @@ class CodePatcher {
MacroAssembler* masm() { return &masm_; }
// Emit an instruction directly.
- void Emit(Instr x);
+ void Emit(Instr instr);
// Emit an address directly.
void Emit(Address addr);
+ // Emit the condition part of an instruction leaving the rest of the current
+ // instruction unchanged.
+ void EmitCondition(Condition cond);
+
private:
byte* address_; // The address of the code being patched.
int instructions_; // Number of instructions of the expected patch size.
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
index 94da04240d..1f6ed6712d 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -57,48 +57,57 @@ namespace internal {
* - r13/sp : points to tip of C stack.
*
* The remaining registers are free for computations.
- *
* Each call to a public method should retain this convention.
+ *
* The stack will have the following structure:
- * - direct_call (if 1, direct call from JavaScript code, if 0 call
- * through the runtime system)
- * - stack_area_base (High end of the memory area to use as
- * backtracking stack)
- * - int* capture_array (int[num_saved_registers_], for output).
- * --- sp when called ---
- * - link address
- * - backup of registers r4..r11
- * - end of input (Address of end of string)
- * - start of input (Address of first character in string)
- * - start index (character index of start)
- * --- frame pointer ----
- * - void* input_string (location of a handle containing the string)
- * - Offset of location before start of input (effectively character
- * position -1). Used to initialize capture registers to a non-position.
- * - At start (if 1, we are starting at the start of the
- * string, otherwise 0)
- * - register 0 (Only positions must be stored in the first
- * - register 1 num_saved_registers_ registers)
- * - ...
- * - register num_registers-1
- * --- sp ---
+ * - fp[48] direct_call (if 1, direct call from JavaScript code,
+ * if 0, call through the runtime system).
+ * - fp[44] stack_area_base (High end of the memory area to use as
+ * backtracking stack).
+ * - fp[40] int* capture_array (int[num_saved_registers_], for output).
+ * - fp[36] secondary link/return address used by native call.
+ * --- sp when called ---
+ * - fp[32] return address (lr).
+ * - fp[28] old frame pointer (r11).
+ * - fp[0..24] backup of registers r4..r10.
+ * --- frame pointer ----
+ * - fp[-4] end of input (Address of end of string).
+ * - fp[-8] start of input (Address of first character in string).
+ * - fp[-12] start index (character index of start).
+ * - fp[-16] void* input_string (location of a handle containing the string).
+ * - fp[-20] Offset of location before start of input (effectively character
+ * position -1). Used to initialize capture registers to a
+ * non-position.
+ * - fp[-24] At start (if 1, we are starting at the start of the
+ * string, otherwise 0)
+ * - fp[-28] register 0 (Only positions must be stored in the first
+ * - register 1 num_saved_registers_ registers)
+ * - ...
+ * - register num_registers-1
+ * --- sp ---
*
* The first num_saved_registers_ registers are initialized to point to
* "character -1" in the string (i.e., char_size() bytes before the first
* character of the string). The remaining registers start out as garbage.
*
* The data up to the return address must be placed there by the calling
- * code, by calling the code entry as cast to a function with the signature:
+ * code and the remaining arguments are passed in registers, e.g. by calling the
+ * code entry as cast to a function with the signature:
* int (*match)(String* input_string,
* int start_index,
* Address start,
* Address end,
+ * Address secondary_return_address, // Only used by native call.
* int* capture_output_array,
- * bool at_start,
* byte* stack_area_base,
- * bool direct_call)
+ * bool direct_call = false)
* The call is performed by NativeRegExpMacroAssembler::Execute()
- * (in regexp-macro-assembler.cc).
+ * (in regexp-macro-assembler.cc) via the CALL_GENERATED_REGEXP_CODE macro
+ * in arm/simulator-arm.h.
+ * When calling as a non-direct call (i.e., from C++ code), the return address
+ * area is overwritten with the LR register by the RegExp code. When doing a
+ * direct call from generated code, the return address is placed there by
+ * the calling code, as in a normal exit frame.
*/
#define __ ACCESS_MASM(masm_)
@@ -598,16 +607,17 @@ Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Entry code:
__ bind(&entry_label_);
- // Push Link register.
// Push arguments
// Save callee-save registers.
// Start new stack frame.
+ // Store link register in existing stack-cell.
// Order here should correspond to order of offset constants in header file.
RegList registers_to_retain = r4.bit() | r5.bit() | r6.bit() |
r7.bit() | r8.bit() | r9.bit() | r10.bit() | fp.bit();
RegList argument_registers = r0.bit() | r1.bit() | r2.bit() | r3.bit();
__ stm(db_w, sp, argument_registers | registers_to_retain | lr.bit());
- // Set frame pointer just above the arguments.
+ // Set frame pointer in space for it if this is not a direct call
+ // from generated code.
__ add(frame_pointer(), sp, Operand(4 * kPointerSize));
__ push(r0); // Make room for "position - 1" constant (value is irrelevant).
__ push(r0); // Make room for "at start" constant (value is irrelevant).
@@ -764,10 +774,9 @@ Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
if (stack_overflow_label_.is_linked()) {
SafeCallTarget(&stack_overflow_label_);
// Reached if the backtrack-stack limit has been hit.
-
Label grow_failed;
- // Call GrowStack(backtrack_stackpointer())
+ // Call GrowStack(backtrack_stackpointer(), &stack_base)
static const int num_arguments = 2;
__ PrepareCallCFunction(num_arguments, r0);
__ mov(r0, backtrack_stackpointer());
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h
index b487ba59d1..d9d0b3562e 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.h
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h
@@ -122,8 +122,9 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kStoredRegisters = kFramePointer;
// Return address (stored from link register, read into pc on return).
static const int kReturnAddress = kStoredRegisters + 8 * kPointerSize;
+ static const int kSecondaryReturnAddress = kReturnAddress + kPointerSize;
// Stack parameters placed by caller.
- static const int kRegisterOutput = kReturnAddress + kPointerSize;
+ static const int kRegisterOutput = kSecondaryReturnAddress + kPointerSize;
static const int kStackHighEnd = kRegisterOutput + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
diff --git a/deps/v8/src/arm/simulator-arm.h b/deps/v8/src/arm/simulator-arm.h
index 5256ae35b9..bdf1f8a106 100644
--- a/deps/v8/src/arm/simulator-arm.h
+++ b/deps/v8/src/arm/simulator-arm.h
@@ -48,10 +48,16 @@ namespace internal {
#define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
(entry(p0, p1, p2, p3, p4))
-// Call the generated regexp code directly. The entry function pointer should
-// expect seven int/pointer sized arguments and return an int.
+typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
+ void*, int*, Address, int);
+
+
+// Call the generated regexp code directly. The code at the entry address
+// should act as a function matching the type arm_regexp_matcher.
+// The fifth argument is a dummy that reserves the space used for
+// the return address added by the ExitFrame in native calls.
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
- (entry(p0, p1, p2, p3, p4, p5, p6))
+ (FUNCTION_CAST<arm_regexp_matcher>(entry)(p0, p1, p2, p3, NULL, p4, p5, p6))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
(reinterpret_cast<TryCatch*>(try_catch_address))
@@ -362,8 +368,7 @@ class Simulator {
FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
- Simulator::current()->Call( \
- FUNCTION_ADDR(entry), 7, p0, p1, p2, p3, p4, p5, p6)
+ Simulator::current()->Call(entry, 8, p0, p1, p2, p3, NULL, p4, p5, p6)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == \
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index 9ef61158ea..675fdf49b2 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -3259,6 +3259,47 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
}
+MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
+ JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- r0 : value
+ // -- r1 : key
+ // -- r2 : receiver
+ // -- r3 : scratch
+ // -- r4 : scratch
+ // -- r5 : scratch
+ // -- r6 : scratch
+ // -- lr : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(r2, r6, Handle<Map>(receiver->map()), &miss, false);
+
+ GenerateFastPixelArrayStore(masm(),
+ r2,
+ r1,
+ r0,
+ r3,
+ r4,
+ r5,
+ r6,
+ true,
+ true,
+ &miss,
+ &miss,
+ NULL,
+ &miss);
+
+ __ bind(&miss);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
// ----------- S t a t e -------------
// -- r0 : argc
diff --git a/deps/v8/src/arm/virtual-frame-arm.cc b/deps/v8/src/arm/virtual-frame-arm.cc
index 3266a16e3a..b4b518cff6 100644
--- a/deps/v8/src/arm/virtual-frame-arm.cc
+++ b/deps/v8/src/arm/virtual-frame-arm.cc
@@ -329,18 +329,25 @@ void VirtualFrame::CallLoadIC(Handle<String> name, RelocInfo::Mode mode) {
}
-void VirtualFrame::CallStoreIC(Handle<String> name, bool is_contextual) {
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+void VirtualFrame::CallStoreIC(Handle<String> name,
+ bool is_contextual,
+ StrictModeFlag strict_mode) {
+ Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
PopToR0();
+ RelocInfo::Mode mode;
if (is_contextual) {
SpillAll();
__ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ mode = RelocInfo::CODE_TARGET_CONTEXT;
} else {
EmitPop(r1);
SpillAll();
+ mode = RelocInfo::CODE_TARGET;
}
__ mov(r2, Operand(name));
- CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
+ CallCodeObject(ic, mode, 0);
}
diff --git a/deps/v8/src/arm/virtual-frame-arm.h b/deps/v8/src/arm/virtual-frame-arm.h
index 82b4d08ab7..b6e794a5c0 100644
--- a/deps/v8/src/arm/virtual-frame-arm.h
+++ b/deps/v8/src/arm/virtual-frame-arm.h
@@ -294,7 +294,8 @@ class VirtualFrame : public ZoneObject {
// Call store IC. If the load is contextual, value is found on top of the
// frame. If not, value and receiver are on the frame. Both are consumed.
// Result is returned in r0.
- void CallStoreIC(Handle<String> name, bool is_contextual);
+ void CallStoreIC(Handle<String> name, bool is_contextual,
+ StrictModeFlag strict_mode);
// Call keyed load IC. Key and receiver are on the stack. Both are consumed.
// Result is returned in r0.
diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js
index 1298434d59..ef82674d78 100644
--- a/deps/v8/src/array.js
+++ b/deps/v8/src/array.js
@@ -161,15 +161,7 @@ function Join(array, length, separator, convert) {
var result = %_FastAsciiArrayJoin(elements, separator);
if (!IS_UNDEFINED(result)) return result;
- var length2 = (length << 1) - 1;
- var j = length2;
- var i = length;
- elements[--j] = elements[--i];
- while (i > 0) {
- elements[--j] = separator;
- elements[--j] = elements[--i];
- }
- return %StringBuilderConcat(elements, length2, '');
+ return %StringBuilderJoin(elements, length, separator);
} finally {
// Make sure to remove the last element of the visited array no
// matter what happens.
diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc
index ef2094f63a..42a61c2b8d 100644
--- a/deps/v8/src/assembler.cc
+++ b/deps/v8/src/assembler.cc
@@ -68,7 +68,7 @@ const double DoubleConstant::min_int = kMinInt;
const double DoubleConstant::one_half = 0.5;
const double DoubleConstant::minus_zero = -0.0;
const double DoubleConstant::negative_infinity = -V8_INFINITY;
-
+const char* RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// -----------------------------------------------------------------------------
// Implementation of Label
diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h
index e8bc5d6caa..1b71dfc5a1 100644
--- a/deps/v8/src/assembler.h
+++ b/deps/v8/src/assembler.h
@@ -178,10 +178,16 @@ class RelocInfo BASE_EMBEDDED {
// invalid/uninitialized position value.
static const int kNoPosition = -1;
+ // This string is used to add padding comments to the reloc info in cases
+ // where we are not sure to have enough space for patching in during
+ // lazy deoptimization. This is the case if we have indirect calls for which
+ // we do not normally record relocation info.
+ static const char* kFillerCommentString;
+
enum Mode {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode).
CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.
- CODE_TARGET_CONTEXT, // Code target used for contextual loads.
+ CODE_TARGET_CONTEXT, // Code target used for contextual loads and stores.
DEBUG_BREAK, // Code target for the debugger statement.
CODE_TARGET, // Code target which is not any of the above.
EMBEDDED_OBJECT,
diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc
index ccfa2b4ecf..772684cf95 100644
--- a/deps/v8/src/ast.cc
+++ b/deps/v8/src/ast.cc
@@ -618,7 +618,9 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
cell_ = Handle<JSGlobalPropertyCell>::null();
LookupResult lookup;
global->Lookup(*name, &lookup);
- if (lookup.IsProperty() && lookup.type() == NORMAL) {
+ if (lookup.IsProperty() &&
+ lookup.type() == NORMAL &&
+ lookup.holder() == *global) {
cell_ = Handle<JSGlobalPropertyCell>(global->GetPropertyCell(&lookup));
if (cell_->value()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
diff --git a/deps/v8/src/bignum.cc b/deps/v8/src/bignum.cc
index dd1537a25a..a973974550 100644
--- a/deps/v8/src/bignum.cc
+++ b/deps/v8/src/bignum.cc
@@ -67,7 +67,7 @@ void Bignum::AssignUInt64(uint64_t value) {
int needed_bigits = kUInt64Size / kBigitSize + 1;
EnsureCapacity(needed_bigits);
for (int i = 0; i < needed_bigits; ++i) {
- bigits_[i] = value & kBigitMask;
+ bigits_[i] = static_cast<Chunk>(value & kBigitMask);
value = value >> kBigitSize;
}
used_digits_ = needed_bigits;
@@ -266,7 +266,7 @@ void Bignum::MultiplyByUInt32(uint32_t factor) {
}
while (carry != 0) {
EnsureCapacity(used_digits_ + 1);
- bigits_[used_digits_] = carry & kBigitMask;
+ bigits_[used_digits_] = static_cast<Chunk>(carry & kBigitMask);
used_digits_++;
carry >>= kBigitSize;
}
@@ -287,13 +287,13 @@ void Bignum::MultiplyByUInt64(uint64_t factor) {
uint64_t product_low = low * bigits_[i];
uint64_t product_high = high * bigits_[i];
uint64_t tmp = (carry & kBigitMask) + product_low;
- bigits_[i] = tmp & kBigitMask;
+ bigits_[i] = static_cast<Chunk>(tmp & kBigitMask);
carry = (carry >> kBigitSize) + (tmp >> kBigitSize) +
(product_high << (32 - kBigitSize));
}
while (carry != 0) {
EnsureCapacity(used_digits_ + 1);
- bigits_[used_digits_] = carry & kBigitMask;
+ bigits_[used_digits_] = static_cast<Chunk>(carry & kBigitMask);
used_digits_++;
carry >>= kBigitSize;
}
@@ -748,7 +748,8 @@ void Bignum::SubtractTimes(const Bignum& other, int factor) {
for (int i = 0; i < other.used_digits_; ++i) {
DoubleChunk product = static_cast<DoubleChunk>(factor) * other.bigits_[i];
DoubleChunk remove = borrow + product;
- Chunk difference = bigits_[i + exponent_diff] - (remove & kBigitMask);
+ Chunk difference =
+ bigits_[i + exponent_diff] - static_cast<Chunk>(remove & kBigitMask);
bigits_[i + exponent_diff] = difference & kBigitMask;
borrow = static_cast<Chunk>((difference >> (kChunkSize - 1)) +
(remove >> kBigitSize));
diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc
index 6db9a4819a..415d2dd8cb 100644
--- a/deps/v8/src/bootstrapper.cc
+++ b/deps/v8/src/bootstrapper.cc
@@ -349,7 +349,7 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
prototype,
call_code,
is_ecma_native);
- SetProperty(target, symbol, function, DONT_ENUM);
+ SetLocalPropertyNoThrow(target, symbol, function, DONT_ENUM);
if (is_ecma_native) {
function->shared()->set_instance_class_name(*symbol);
}
@@ -580,8 +580,8 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
Handle<JSObject> prototype =
Handle<JSObject>(
JSObject::cast(js_global_function->instance_prototype()));
- SetProperty(prototype, Factory::constructor_symbol(),
- Top::object_function(), NONE);
+ SetLocalPropertyNoThrow(
+ prototype, Factory::constructor_symbol(), Top::object_function(), NONE);
} else {
Handle<FunctionTemplateInfo> js_global_constructor(
FunctionTemplateInfo::cast(js_global_template->constructor()));
@@ -683,7 +683,8 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
global_context()->set_security_token(*inner_global);
Handle<String> object_name = Handle<String>(Heap::Object_symbol());
- SetProperty(inner_global, object_name, Top::object_function(), DONT_ENUM);
+ SetLocalPropertyNoThrow(inner_global, object_name,
+ Top::object_function(), DONT_ENUM);
Handle<JSObject> global = Handle<JSObject>(global_context()->global());
@@ -851,7 +852,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
cons->SetInstanceClassName(*name);
Handle<JSObject> json_object = Factory::NewJSObject(cons, TENURED);
ASSERT(json_object->IsJSObject());
- SetProperty(global, name, json_object, DONT_ENUM);
+ SetLocalPropertyNoThrow(global, name, json_object, DONT_ENUM);
global_context()->set_json_object(*json_object);
}
@@ -880,12 +881,12 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
global_context()->set_arguments_boilerplate(*result);
// Note: callee must be added as the first property and
// length must be added as the second property.
- SetProperty(result, Factory::callee_symbol(),
- Factory::undefined_value(),
- DONT_ENUM);
- SetProperty(result, Factory::length_symbol(),
- Factory::undefined_value(),
- DONT_ENUM);
+ SetLocalPropertyNoThrow(result, Factory::callee_symbol(),
+ Factory::undefined_value(),
+ DONT_ENUM);
+ SetLocalPropertyNoThrow(result, Factory::length_symbol(),
+ Factory::undefined_value(),
+ DONT_ENUM);
#ifdef DEBUG
LookupResult lookup;
@@ -1085,10 +1086,8 @@ bool Genesis::InstallNatives() {
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
Handle<String> global_symbol = Factory::LookupAsciiSymbol("global");
- SetProperty(builtins,
- global_symbol,
- Handle<Object>(global_context()->global()),
- attributes);
+ Handle<Object> global_obj(global_context()->global());
+ SetLocalPropertyNoThrow(builtins, global_symbol, global_obj, attributes);
// Setup the reference from the global object to the builtins object.
JSGlobalObject::cast(global_context()->global())->set_builtins(*builtins);
@@ -1480,17 +1479,17 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
if (FLAG_expose_natives_as != NULL && strlen(FLAG_expose_natives_as) != 0) {
Handle<String> natives_string =
Factory::LookupAsciiSymbol(FLAG_expose_natives_as);
- SetProperty(js_global, natives_string,
- Handle<JSObject>(js_global->builtins()), DONT_ENUM);
+ SetLocalPropertyNoThrow(js_global, natives_string,
+ Handle<JSObject>(js_global->builtins()), DONT_ENUM);
}
Handle<Object> Error = GetProperty(js_global, "Error");
if (Error->IsJSObject()) {
Handle<String> name = Factory::LookupAsciiSymbol("stackTraceLimit");
- SetProperty(Handle<JSObject>::cast(Error),
- name,
- Handle<Smi>(Smi::FromInt(FLAG_stack_trace_limit)),
- NONE);
+ SetLocalPropertyNoThrow(Handle<JSObject>::cast(Error),
+ name,
+ Handle<Smi>(Smi::FromInt(FLAG_stack_trace_limit)),
+ NONE);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -1507,8 +1506,8 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
Handle<String> debug_string =
Factory::LookupAsciiSymbol(FLAG_expose_debug_as);
- SetProperty(js_global, debug_string,
- Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM);
+ Handle<Object> global_proxy(Debug::debug_context()->global_proxy());
+ SetLocalPropertyNoThrow(js_global, debug_string, global_proxy, DONT_ENUM);
}
#endif
}
@@ -1679,7 +1678,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<String> key = Handle<String>(descs->GetKey(i));
int index = descs->GetFieldIndex(i);
Handle<Object> value = Handle<Object>(from->FastPropertyAt(index));
- SetProperty(to, key, value, details.attributes());
+ SetLocalPropertyNoThrow(to, key, value, details.attributes());
break;
}
case CONSTANT_FUNCTION: {
@@ -1687,7 +1686,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<String> key = Handle<String>(descs->GetKey(i));
Handle<JSFunction> fun =
Handle<JSFunction>(descs->GetConstantFunction(i));
- SetProperty(to, key, fun, details.attributes());
+ SetLocalPropertyNoThrow(to, key, fun, details.attributes());
break;
}
case CALLBACKS: {
@@ -1737,7 +1736,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
value = Handle<Object>(JSGlobalPropertyCell::cast(*value)->value());
}
PropertyDetails details = properties->DetailsAt(i);
- SetProperty(to, key, value, details.attributes());
+ SetLocalPropertyNoThrow(to, key, value, details.attributes());
}
}
}
diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc
index d604226d75..8fdc1b1382 100644
--- a/deps/v8/src/builtins.cc
+++ b/deps/v8/src/builtins.cc
@@ -368,7 +368,9 @@ static bool ArrayPrototypeHasNoElements(Context* global_context,
array_proto = JSObject::cast(array_proto->GetPrototype());
ASSERT(array_proto->elements() == Heap::empty_fixed_array());
// Object.prototype
- array_proto = JSObject::cast(array_proto->GetPrototype());
+ Object* proto = array_proto->GetPrototype();
+ if (proto == Heap::null_value()) return false;
+ array_proto = JSObject::cast(proto);
if (array_proto != global_context->initial_object_prototype()) return false;
if (array_proto->elements() != Heap::empty_fixed_array()) return false;
ASSERT(array_proto->GetPrototype()->IsNull());
@@ -1305,6 +1307,11 @@ static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
}
+static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
+ StoreIC::GenerateInitialize(masm);
+}
+
+
static void Generate_StoreIC_Miss(MacroAssembler* masm) {
StoreIC::GenerateMiss(masm);
}
@@ -1315,8 +1322,18 @@ static void Generate_StoreIC_Normal(MacroAssembler* masm) {
}
+static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
+ StoreIC::GenerateNormal(masm);
+}
+
+
static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
- StoreIC::GenerateMegamorphic(masm);
+ StoreIC::GenerateMegamorphic(masm, StoreIC::kStoreICNonStrict);
+}
+
+
+static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
+ StoreIC::GenerateMegamorphic(masm, StoreIC::kStoreICStrict);
}
@@ -1325,11 +1342,21 @@ static void Generate_StoreIC_ArrayLength(MacroAssembler* masm) {
}
+static void Generate_StoreIC_ArrayLength_Strict(MacroAssembler* masm) {
+ StoreIC::GenerateArrayLength(masm);
+}
+
+
static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
StoreIC::GenerateGlobalProxy(masm);
}
+static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
+ StoreIC::GenerateGlobalProxy(masm);
+}
+
+
static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
KeyedStoreIC::GenerateGeneric(masm);
}
@@ -1442,13 +1469,13 @@ void Builtins::Setup(bool create_heap_objects) {
extra_args \
},
-#define DEF_FUNCTION_PTR_A(name, kind, state) \
- { FUNCTION_ADDR(Generate_##name), \
- NULL, \
- #name, \
- name, \
- Code::ComputeFlags(Code::kind, NOT_IN_LOOP, state), \
- NO_EXTRA_ARGUMENTS \
+#define DEF_FUNCTION_PTR_A(name, kind, state, extra) \
+ { FUNCTION_ADDR(Generate_##name), \
+ NULL, \
+ #name, \
+ name, \
+ Code::ComputeFlags(Code::kind, NOT_IN_LOOP, state, extra), \
+ NO_EXTRA_ARGUMENTS \
},
// Define array of pointers to generators and C builtin functions.
diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h
index 88d31c7612..2733410ea9 100644
--- a/deps/v8/src/builtins.h
+++ b/deps/v8/src/builtins.h
@@ -63,73 +63,135 @@ enum BuiltinExtraArguments {
// Define list of builtins implemented in assembly.
#define BUILTIN_LIST_A(V) \
- V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED) \
- V(JSConstructCall, BUILTIN, UNINITIALIZED) \
- V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED) \
- V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED) \
- V(JSConstructStubApi, BUILTIN, UNINITIALIZED) \
- V(JSEntryTrampoline, BUILTIN, UNINITIALIZED) \
- V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED) \
- V(LazyCompile, BUILTIN, UNINITIALIZED) \
- V(LazyRecompile, BUILTIN, UNINITIALIZED) \
- V(NotifyDeoptimized, BUILTIN, UNINITIALIZED) \
- V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED) \
- V(NotifyOSR, BUILTIN, UNINITIALIZED) \
+ V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructCall, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LazyCompile, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LazyRecompile, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyOSR, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
- V(LoadIC_Miss, BUILTIN, UNINITIALIZED) \
- V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED) \
- V(StoreIC_Miss, BUILTIN, UNINITIALIZED) \
- V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED) \
+ V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
- V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED) \
- V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC) \
- V(LoadIC_Normal, LOAD_IC, MONOMORPHIC) \
- V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC) \
- V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC) \
- V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC) \
- V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC) \
- V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC) \
+ V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
\
- V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED) \
- V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC) \
- V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC) \
- V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC) \
- V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC) \
+ V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
\
- V(StoreIC_Initialize, STORE_IC, UNINITIALIZED) \
- V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC) \
- V(StoreIC_Normal, STORE_IC, MONOMORPHIC) \
- V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC) \
- V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC) \
+ V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \
+ StoreIC::kStoreICStrict) \
+ V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \
+ StoreIC::kStoreICStrict) \
+ V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \
+ StoreIC::kStoreICStrict) \
+ V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \
+ StoreIC::kStoreICStrict) \
+ V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
+ StoreIC::kStoreICStrict) \
\
- V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED) \
- V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC) \
+ V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
\
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
- V(FunctionCall, BUILTIN, UNINITIALIZED) \
- V(FunctionApply, BUILTIN, UNINITIALIZED) \
+ V(FunctionCall, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(FunctionApply, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
- V(ArrayCode, BUILTIN, UNINITIALIZED) \
- V(ArrayConstructCode, BUILTIN, UNINITIALIZED) \
+ V(ArrayCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
- V(StringConstructCode, BUILTIN, UNINITIALIZED) \
+ V(StringConstructCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
- V(OnStackReplacement, BUILTIN, UNINITIALIZED)
+ V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState)
#ifdef ENABLE_DEBUGGER_SUPPORT
// Define list of builtins used by the debugger implemented in assembly.
#define BUILTIN_LIST_DEBUG_A(V) \
- V(Return_DebugBreak, BUILTIN, DEBUG_BREAK) \
- V(ConstructCall_DebugBreak, BUILTIN, DEBUG_BREAK) \
- V(StubNoRegisters_DebugBreak, BUILTIN, DEBUG_BREAK) \
- V(LoadIC_DebugBreak, LOAD_IC, DEBUG_BREAK) \
- V(KeyedLoadIC_DebugBreak, KEYED_LOAD_IC, DEBUG_BREAK) \
- V(StoreIC_DebugBreak, STORE_IC, DEBUG_BREAK) \
- V(KeyedStoreIC_DebugBreak, KEYED_STORE_IC, DEBUG_BREAK) \
- V(Slot_DebugBreak, BUILTIN, DEBUG_BREAK) \
- V(PlainReturn_LiveEdit, BUILTIN, DEBUG_BREAK) \
- V(FrameDropper_LiveEdit, BUILTIN, DEBUG_BREAK)
+ V(Return_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(ConstructCall_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(StubNoRegisters_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(LoadIC_DebugBreak, LOAD_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_DebugBreak, KEYED_LOAD_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(StoreIC_DebugBreak, STORE_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_DebugBreak, KEYED_STORE_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(Slot_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(PlainReturn_LiveEdit, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(FrameDropper_LiveEdit, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState)
#else
#define BUILTIN_LIST_DEBUG_A(V)
#endif
@@ -152,7 +214,7 @@ enum BuiltinExtraArguments {
V(SHL, 1) \
V(SAR, 1) \
V(SHR, 1) \
- V(DELETE, 1) \
+ V(DELETE, 2) \
V(IN, 1) \
V(INSTANCE_OF, 1) \
V(GET_KEYS, 0) \
@@ -186,7 +248,7 @@ class Builtins : public AllStatic {
enum Name {
#define DEF_ENUM_C(name, ignore) name,
-#define DEF_ENUM_A(name, kind, state) name,
+#define DEF_ENUM_A(name, kind, state, extra) name,
BUILTIN_LIST_C(DEF_ENUM_C)
BUILTIN_LIST_A(DEF_ENUM_A)
BUILTIN_LIST_DEBUG_A(DEF_ENUM_A)
diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc
index 69f8477f89..ba77b21c60 100644
--- a/deps/v8/src/code-stubs.cc
+++ b/deps/v8/src/code-stubs.cc
@@ -32,7 +32,6 @@
#include "factory.h"
#include "gdb-jit.h"
#include "macro-assembler.h"
-#include "oprofile-agent.h"
namespace v8 {
namespace internal {
@@ -63,9 +62,6 @@ void CodeStub::GenerateCode(MacroAssembler* masm) {
void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) {
code->set_major_key(MajorKey());
- OPROFILE(CreateNativeCodeRegion(GetName(),
- code->instruction_start(),
- code->instruction_size()));
PROFILE(CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
GDBJIT(AddCode(GDBJITInterface::STUB, GetName(), code));
Counters::total_stubs_code_size.Increment(code->instruction_size());
diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h
index 0d0e37ffac..96ac7335cf 100644
--- a/deps/v8/src/code-stubs.h
+++ b/deps/v8/src/code-stubs.h
@@ -86,9 +86,6 @@ namespace internal {
CODE_STUB_LIST_ALL_PLATFORMS(V) \
CODE_STUB_LIST_ARM(V)
-// Types of uncatchable exceptions.
-enum UncatchableExceptionType { OUT_OF_MEMORY, TERMINATION };
-
// Mode to overwrite BinaryExpression values.
enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE };
diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc
index f9a2453a09..e6fcecde7b 100644
--- a/deps/v8/src/codegen.cc
+++ b/deps/v8/src/codegen.cc
@@ -31,7 +31,6 @@
#include "codegen-inl.h"
#include "compiler.h"
#include "debug.h"
-#include "oprofile-agent.h"
#include "prettyprinter.h"
#include "register-allocator-inl.h"
#include "rewriter.h"
diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc
index 77111a842e..ae7b2b9f98 100755
--- a/deps/v8/src/compiler.cc
+++ b/deps/v8/src/compiler.cc
@@ -39,7 +39,6 @@
#include "hydrogen.h"
#include "lithium.h"
#include "liveedit.h"
-#include "oprofile-agent.h"
#include "parser.h"
#include "rewriter.h"
#include "runtime-profiler.h"
@@ -289,6 +288,11 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
HGraphBuilder builder(&oracle);
HPhase phase(HPhase::kTotal);
HGraph* graph = builder.CreateGraph(info);
+ if (Top::has_pending_exception()) {
+ info->SetCode(Handle<Code>::null());
+ return false;
+ }
+
if (graph != NULL && FLAG_build_lithium) {
Handle<Code> code = graph->Compile();
if (!code.is_null()) {
@@ -419,9 +423,6 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
: Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
*info->code(),
String::cast(script->name())));
- OPROFILE(CreateNativeCodeRegion(String::cast(script->name()),
- info->code()->instruction_start(),
- info->code()->instruction_size()));
GDBJIT(AddCode(Handle<String>(String::cast(script->name())),
script,
info->code()));
@@ -432,9 +433,6 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
: Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
*info->code(),
""));
- OPROFILE(CreateNativeCodeRegion(info->is_eval() ? "Eval" : "Script",
- info->code()->instruction_start(),
- info->code()->instruction_size()));
GDBJIT(AddCode(Handle<String>(), script, info->code()));
}
@@ -608,7 +606,9 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Compile the code.
if (!MakeCode(info)) {
- Top::StackOverflow();
+ if (!Top::has_pending_exception()) {
+ Top::StackOverflow();
+ }
} else {
ASSERT(!info->code().is_null());
Handle<Code> code = info->code();
@@ -783,7 +783,6 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
// script name and line number. Check explicitly whether logging is
// enabled as finding the line number is not free.
if (Logger::is_logging() ||
- OProfileAgent::is_enabled() ||
CpuProfiler::is_profiling()) {
Handle<Script> script = info->script();
Handle<Code> code = info->code();
@@ -795,18 +794,10 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
*name,
String::cast(script->name()),
line_num));
- OPROFILE(CreateNativeCodeRegion(*name,
- String::cast(script->name()),
- line_num,
- code->instruction_start(),
- code->instruction_size()));
} else {
PROFILE(CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
*code,
*name));
- OPROFILE(CreateNativeCodeRegion(*name,
- code->instruction_start(),
- code->instruction_size()));
}
}
diff --git a/deps/v8/src/compiler.h b/deps/v8/src/compiler.h
index 9843dd6452..239bea35c2 100644
--- a/deps/v8/src/compiler.h
+++ b/deps/v8/src/compiler.h
@@ -71,7 +71,6 @@ class CompilationInfo BASE_EMBEDDED {
flags_ |= IsGlobal::encode(true);
}
void MarkAsStrict() {
- ASSERT(!is_lazy());
flags_ |= IsStrict::encode(true);
}
StrictModeFlag StrictMode() {
@@ -153,6 +152,9 @@ class CompilationInfo BASE_EMBEDDED {
void Initialize(Mode mode) {
mode_ = V8::UseCrankshaft() ? mode : NONOPT;
+ if (!shared_info_.is_null() && shared_info_->strict_mode()) {
+ MarkAsStrict();
+ }
}
void SetMode(Mode mode) {
diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc
index f0da7ac8bc..4dcc794bb3 100644
--- a/deps/v8/src/d8.cc
+++ b/deps/v8/src/d8.cc
@@ -127,11 +127,13 @@ bool Shell::ExecuteString(Handle<String> source,
} else {
Handle<Value> result = script->Run();
if (result.IsEmpty()) {
+ ASSERT(try_catch.HasCaught());
// Print errors that happened during execution.
if (report_exceptions && !i::FLAG_debugger)
ReportException(&try_catch);
return false;
} else {
+ ASSERT(!try_catch.HasCaught());
if (print_result && !result->IsUndefined()) {
// If all went well and the result wasn't undefined then print
// the returned value.
diff --git a/deps/v8/src/date.js b/deps/v8/src/date.js
index 1fb4897921..242ab7bbcb 100644
--- a/deps/v8/src/date.js
+++ b/deps/v8/src/date.js
@@ -81,12 +81,7 @@ function TimeFromYear(year) {
function InLeapYear(time) {
- return DaysInYear(YearFromTime(time)) == 366 ? 1 : 0;
-}
-
-
-function DayWithinYear(time) {
- return DAY(time) - DayFromYear(YearFromTime(time));
+ return DaysInYear(YearFromTime(time)) - 365; // Returns 1 or 0.
}
diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc
index 8ec77e77e5..d8201a1894 100644
--- a/deps/v8/src/debug.cc
+++ b/deps/v8/src/debug.cc
@@ -835,7 +835,9 @@ bool Debug::Load() {
// Expose the builtins object in the debugger context.
Handle<String> key = Factory::LookupAsciiSymbol("builtins");
Handle<GlobalObject> global = Handle<GlobalObject>(context->global());
- SetProperty(global, key, Handle<Object>(global->builtins()), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(global, key, Handle<Object>(global->builtins()), NONE),
+ false);
// Compile the JavaScript for the debugger in the debugger context.
Debugger::set_compiling_natives(true);
diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc
index 00e7d0ee2c..af2f42e466 100644
--- a/deps/v8/src/deoptimizer.cc
+++ b/deps/v8/src/deoptimizer.cc
@@ -663,7 +663,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
case Translation::REGISTER: {
int output_reg = iterator->Next();
if (FLAG_trace_osr) {
- PrintF(" %s <- 0x%08" V8PRIxPTR " ; [esp + %d]\n",
+ PrintF(" %s <- 0x%08" V8PRIxPTR " ; [sp + %d]\n",
converter.NameOfCPURegister(output_reg),
input_value,
*input_offset);
@@ -690,7 +690,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
return false;
}
if (FLAG_trace_osr) {
- PrintF(" %s <- %d (int32) ; [esp + %d]\n",
+ PrintF(" %s <- %d (int32) ; [sp + %d]\n",
converter.NameOfCPURegister(output_reg),
int32_value,
*input_offset);
@@ -706,7 +706,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
int output_reg = iterator->Next();
double double_value = input_object->Number();
if (FLAG_trace_osr) {
- PrintF(" %s <- %g (double) ; [esp + %d]\n",
+ PrintF(" %s <- %g (double) ; [sp + %d]\n",
DoubleRegister::AllocationIndexToString(output_reg),
double_value,
*input_offset);
@@ -720,7 +720,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
unsigned output_offset =
output->GetOffsetFromSlotIndex(this, output_index);
if (FLAG_trace_osr) {
- PrintF(" [esp + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d]\n",
+ PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d]\n",
output_offset,
input_value,
*input_offset);
@@ -749,7 +749,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
return false;
}
if (FLAG_trace_osr) {
- PrintF(" [esp + %d] <- %d (int32) ; [esp + %d]\n",
+ PrintF(" [sp + %d] <- %d (int32) ; [sp + %d]\n",
output_offset,
int32_value,
*input_offset);
@@ -773,12 +773,12 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
int32_t lower = static_cast<int32_t>(int_value);
int32_t upper = static_cast<int32_t>(int_value >> kBitsPerInt);
if (FLAG_trace_osr) {
- PrintF(" [esp + %d] <- 0x%08x (upper bits of %g) ; [esp + %d]\n",
+ PrintF(" [sp + %d] <- 0x%08x (upper bits of %g) ; [sp + %d]\n",
output_offset + kUpperOffset,
upper,
double_value,
*input_offset);
- PrintF(" [esp + %d] <- 0x%08x (lower bits of %g) ; [esp + %d]\n",
+ PrintF(" [sp + %d] <- 0x%08x (lower bits of %g) ; [sp + %d]\n",
output_offset + kLowerOffset,
lower,
double_value,
diff --git a/deps/v8/src/execution.cc b/deps/v8/src/execution.cc
index 11dacfeede..f484d8d9b8 100644
--- a/deps/v8/src/execution.cc
+++ b/deps/v8/src/execution.cc
@@ -403,6 +403,7 @@ void StackGuard::ThreadLocal::Initialize() {
if (real_climit_ == kIllegalLimit) {
// Takes the address of the limit variable in order to find out where
// the top of stack is right now.
+ const uintptr_t kLimitSize = FLAG_stack_size * KB;
uintptr_t limit = reinterpret_cast<uintptr_t>(&limit) - kLimitSize;
ASSERT(reinterpret_cast<uintptr_t>(&limit) > kLimitSize);
real_jslimit_ = SimulatorStack::JsLimitFromCLimit(limit);
diff --git a/deps/v8/src/execution.h b/deps/v8/src/execution.h
index af8ad9aff3..cb07807c06 100644
--- a/deps/v8/src/execution.h
+++ b/deps/v8/src/execution.h
@@ -243,8 +243,6 @@ class StackGuard : public AllStatic {
static void EnableInterrupts();
static void DisableInterrupts();
- static const uintptr_t kLimitSize = kPointerSize * 128 * KB;
-
#ifdef V8_TARGET_ARCH_X64
static const uintptr_t kInterruptLimit = V8_UINT64_C(0xfffffffffffffffe);
static const uintptr_t kIllegalLimit = V8_UINT64_C(0xfffffffffffffff8);
diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc
index a3d55921b6..96c757a373 100644
--- a/deps/v8/src/factory.cc
+++ b/deps/v8/src/factory.cc
@@ -334,6 +334,11 @@ Handle<Map> Factory::GetSlowElementsMap(Handle<Map> src) {
}
+Handle<Map> Factory::GetPixelArrayElementsMap(Handle<Map> src) {
+ CALL_HEAP_FUNCTION(src->GetPixelArrayElementsMap(), Map);
+}
+
+
Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
CALL_HEAP_FUNCTION(array->Copy(), FixedArray);
}
@@ -580,7 +585,9 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
// Set function.prototype and give the prototype a constructor
// property that refers to the function.
SetPrototypeProperty(function, prototype);
- SetProperty(prototype, Factory::constructor_symbol(), function, DONT_ENUM);
+ // Currently safe because it is only invoked from Genesis.
+ SetLocalPropertyNoThrow(
+ prototype, Factory::constructor_symbol(), function, DONT_ENUM);
return function;
}
diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h
index ba2b181a34..7547f7c452 100644
--- a/deps/v8/src/factory.h
+++ b/deps/v8/src/factory.h
@@ -196,6 +196,8 @@ class Factory : public AllStatic {
static Handle<Map> GetSlowElementsMap(Handle<Map> map);
+ static Handle<Map> GetPixelArrayElementsMap(Handle<Map> map);
+
static Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
// Numbers (eg, literals) are pretenured by the parser.
diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h
index b90534c0cc..6a5e2a5756 100644
--- a/deps/v8/src/flag-definitions.h
+++ b/deps/v8/src/flag-definitions.h
@@ -134,11 +134,7 @@ DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases")
DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
DEFINE_bool(aggressive_loop_invariant_motion, true,
"aggressive motion of instructions out of loops")
-#ifdef V8_TARGET_ARCH_IA32
DEFINE_bool(use_osr, true, "use on-stack replacement")
-#else
-DEFINE_bool(use_osr, false, "use on-stack replacement")
-#endif
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
@@ -231,6 +227,10 @@ DEFINE_bool(debugger_auto_break, true,
"in the queue")
DEFINE_bool(enable_liveedit, true, "enable liveedit experimental feature")
+// execution.cc
+DEFINE_int(stack_size, kPointerSize * 128,
+ "default size of stack region v8 is allowed to use (in KkBytes)")
+
// frames.cc
DEFINE_int(max_stack_trace_source_length, 300,
"maximum length of function source code printed in a stack trace.")
@@ -374,6 +374,7 @@ DEFINE_bool(debug_script_collected_events, true,
DEFINE_bool(gdbjit, false, "enable GDBJIT interface (disables compacting GC)")
DEFINE_bool(gdbjit_full, false, "enable GDBJIT interface for all code objects")
+DEFINE_bool(gdbjit_dump, false, "dump elf objects with debug info to disk")
//
// Debug only flags
@@ -493,7 +494,6 @@ DEFINE_bool(log_regexp, false, "Log regular expression execution.")
DEFINE_bool(sliding_state_window, false,
"Update sliding state window counters.")
DEFINE_string(logfile, "v8.log", "Specify the name of the log file.")
-DEFINE_bool(oprofile, false, "Enable JIT agent for OProfile.")
DEFINE_bool(ll_prof, false, "Enable low-level linux profiler.")
//
diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc
index 4ed3fecfe8..252fb9257a 100644
--- a/deps/v8/src/full-codegen.cc
+++ b/deps/v8/src/full-codegen.cc
@@ -913,7 +913,7 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
- PrepareForBailoutForId(stmt->EntryId(), TOS_REG);
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
VisitStatements(stmt->statements());
__ bind(nested_statement.break_target());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h
index 2d0998d8cf..655e560eb5 100644
--- a/deps/v8/src/full-codegen.h
+++ b/deps/v8/src/full-codegen.h
@@ -531,8 +531,9 @@ class FullCodeGenerator: public AstVisitor {
Handle<Script> script() { return info_->script(); }
bool is_eval() { return info_->is_eval(); }
+ bool is_strict() { return function()->strict_mode(); }
StrictModeFlag strict_mode_flag() {
- return function()->strict_mode() ? kStrictMode : kNonStrictMode;
+ return is_strict() ? kStrictMode : kNonStrictMode;
}
FunctionLiteral* function() { return info_->function(); }
Scope* scope() { return info_->scope(); }
@@ -544,7 +545,8 @@ class FullCodeGenerator: public AstVisitor {
void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
// Calling an IC stub with a patch site. Passing NULL for patch_site
- // indicates no inlined smi code and emits a nop after the IC call.
+ // or non NULL patch_site which is not activated indicates no inlined smi code
+ // and emits a nop after the IC call.
void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
// Set fields in the stack frame. Offsets are the frame pointer relative
diff --git a/deps/v8/src/gdb-jit.cc b/deps/v8/src/gdb-jit.cc
index c26ecf5ea8..88a9939722 100644
--- a/deps/v8/src/gdb-jit.cc
+++ b/deps/v8/src/gdb-jit.cc
@@ -395,7 +395,7 @@ class ELF BASE_EMBEDDED {
void WriteHeader(Writer* w) {
ASSERT(w->position() == 0);
Writer::Slot<ELFHeader> header = w->CreateSlotHere<ELFHeader>();
-#if defined(V8_TARGET_ARCH_IA32)
+#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_ARM)
const uint8_t ident[16] =
{ 0x7f, 'E', 'L', 'F', 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#elif defined(V8_TARGET_ARCH_X64)
@@ -413,6 +413,10 @@ class ELF BASE_EMBEDDED {
// System V ABI, AMD64 Supplement
// http://www.x86-64.org/documentation/abi.pdf
header->machine = 62;
+#elif defined(V8_TARGET_ARCH_ARM)
+ // Set to EM_ARM, defined as 40, in "ARM ELF File Format" at
+ // infocenter.arm.com/help/topic/com.arm.doc.dui0101a/DUI0101A_Elf.pdf
+ header->machine = 40;
#else
#error Unsupported target architecture.
#endif
@@ -503,8 +507,7 @@ class ELFSymbol BASE_EMBEDDED {
Binding binding() const {
return static_cast<Binding>(info >> 4);
}
-
-#if defined(V8_TARGET_ARCH_IA32)
+#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_ARM)
struct SerializedLayout {
SerializedLayout(uint32_t name,
uintptr_t value,
@@ -857,14 +860,20 @@ class DebugLineSection : public ELFSection {
Writer::Slot<uint32_t> total_length = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
+ // Used for special opcodes
+ const int8_t line_base = 1;
+ const uint8_t line_range = 7;
+ const int8_t max_line_incr = (line_base + line_range - 1);
+ const uint8_t opcode_base = DW_LNS_NEGATE_STMT + 1;
+
w->Write<uint16_t>(2); // Field version.
Writer::Slot<uint32_t> prologue_length = w->CreateSlotHere<uint32_t>();
uintptr_t prologue_start = w->position();
w->Write<uint8_t>(1); // Field minimum_instruction_length.
w->Write<uint8_t>(1); // Field default_is_stmt.
- w->Write<int8_t>(0); // Field line_base.
- w->Write<uint8_t>(2); // Field line_range.
- w->Write<uint8_t>(DW_LNS_NEGATE_STMT + 1); // Field opcode_base.
+ w->Write<int8_t>(line_base); // Field line_base.
+ w->Write<uint8_t>(line_range); // Field line_range.
+ w->Write<uint8_t>(opcode_base); // Field opcode_base.
w->Write<uint8_t>(0); // DW_LNS_COPY operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_PC operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_LINE operands count.
@@ -881,6 +890,7 @@ class DebugLineSection : public ELFSection {
WriteExtendedOpcode(w, DW_LNE_SET_ADDRESS, sizeof(intptr_t));
w->Write<intptr_t>(desc_->CodeStart());
+ w->Write<uint8_t>(DW_LNS_COPY);
intptr_t pc = 0;
intptr_t line = 1;
@@ -888,29 +898,66 @@ class DebugLineSection : public ELFSection {
List<GDBJITLineInfo::PCInfo>* pc_info = desc_->lineinfo()->pc_info();
pc_info->Sort(&ComparePCInfo);
- for (int i = 0; i < pc_info->length(); i++) {
+
+ int pc_info_length = pc_info->length();
+ for (int i = 0; i < pc_info_length; i++) {
GDBJITLineInfo::PCInfo* info = &pc_info->at(i);
- uintptr_t pc_diff = info->pc_ - pc;
ASSERT(info->pc_ >= pc);
- if (pc_diff != 0) {
- w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
- w->WriteSLEB128(pc_diff);
- pc += pc_diff;
- }
- intptr_t line_diff = desc_->GetScriptLineNumber(info->pos_) - line;
- if (line_diff != 0) {
- w->Write<uint8_t>(DW_LNS_ADVANCE_LINE);
- w->WriteSLEB128(line_diff);
- line += line_diff;
+
+ // Reduce bloating in the debug line table by removing duplicate line
+ // entries (per DWARF2 standard).
+ intptr_t new_line = desc_->GetScriptLineNumber(info->pos_);
+ if (new_line == line) {
+ continue;
}
- if (is_statement != info->is_statement_) {
+
+ // Mark statement boundaries. For a better debugging experience, mark
+ // the last pc address in the function as a statement (e.g. "}"), so that
+ // a user can see the result of the last line executed in the function,
+ // should control reach the end.
+ if ((i+1) == pc_info_length) {
+ if (!is_statement) {
+ w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
+ }
+ } else if (is_statement != info->is_statement_) {
w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
is_statement = !is_statement;
}
- if (pc_diff != 0 || i == 0) {
+
+ // Generate special opcodes, if possible. This results in more compact
+ // debug line tables. See the DWARF 2.0 standard to learn more about
+ // special opcodes.
+ uintptr_t pc_diff = info->pc_ - pc;
+ intptr_t line_diff = new_line - line;
+
+ // Compute special opcode (see DWARF 2.0 standard)
+ intptr_t special_opcode = (line_diff - line_base) +
+ (line_range * pc_diff) + opcode_base;
+
+ // If special_opcode is less than or equal to 255, it can be used as a
+ // special opcode. If line_diff is larger than the max line increment
+ // allowed for a special opcode, or if line_diff is less than the minimum
+ // line that can be added to the line register (i.e. line_base), then
+ // special_opcode can't be used.
+ if ((special_opcode >= opcode_base) && (special_opcode <= 255) &&
+ (line_diff <= max_line_incr) && (line_diff >= line_base)) {
+ w->Write<uint8_t>(special_opcode);
+ } else {
+ w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
+ w->WriteSLEB128(pc_diff);
+ w->Write<uint8_t>(DW_LNS_ADVANCE_LINE);
+ w->WriteSLEB128(line_diff);
w->Write<uint8_t>(DW_LNS_COPY);
}
+
+ // Increment the pc and line operands.
+ pc += pc_diff;
+ line += line_diff;
}
+ // Advance the pc to the end of the routine, since the end sequence opcode
+ // requires this.
+ w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
+ w->WriteSLEB128(desc_->CodeSize() - pc);
WriteExtendedOpcode(w, DW_LNE_END_SEQUENCE, 0);
total_length.set(static_cast<uint32_t>(w->position() - start));
return true;
@@ -1237,6 +1284,20 @@ static void DestroyCodeEntry(JITCodeEntry* entry) {
static void RegisterCodeEntry(JITCodeEntry* entry) {
+#if defined(DEBUG) && !defined(WIN32)
+ static int file_num = 0;
+ if (FLAG_gdbjit_dump) {
+ static const int kMaxFileNameSize = 64;
+ static const char* kElfFilePrefix = "/tmp/elfdump";
+ static const char* kObjFileExt = ".o";
+ char file_name[64];
+
+ OS::SNPrintF(Vector<char>(file_name, kMaxFileNameSize), "%s%d%s",
+ kElfFilePrefix, file_num++, kObjFileExt);
+ WriteBytes(file_name, entry->symfile_addr_, entry->symfile_size_);
+ }
+#endif
+
entry->next_ = __jit_debug_descriptor.first_entry_;
if (entry->next_ != NULL) entry->next_->prev_ = entry;
__jit_debug_descriptor.first_entry_ =
@@ -1294,7 +1355,13 @@ static bool SameCodeObjects(void* key1, void* key2) {
}
-static HashMap entries(&SameCodeObjects);
+static HashMap* GetEntries() {
+ static HashMap* entries = NULL;
+ if (entries == NULL) {
+ entries = new HashMap(&SameCodeObjects);
+ }
+ return entries;
+}
static uint32_t HashForCodeObject(Code* code) {
@@ -1398,7 +1465,7 @@ void GDBJITInterface::AddCode(const char* name,
if (!FLAG_gdbjit) return;
AssertNoAllocation no_gc;
- HashMap::Entry* e = entries.Lookup(code, HashForCodeObject(code), true);
+ HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
if (e->value != NULL && !IsLineInfoTagged(e->value)) return;
GDBJITLineInfo* lineinfo = UntagLineInfo(e->value);
@@ -1411,7 +1478,7 @@ void GDBJITInterface::AddCode(const char* name,
if (!FLAG_gdbjit_full && !code_desc.IsLineInfoAvailable()) {
delete lineinfo;
- entries.Remove(code, HashForCodeObject(code));
+ GetEntries()->Remove(code, HashForCodeObject(code));
return;
}
@@ -1464,7 +1531,9 @@ void GDBJITInterface::AddCode(GDBJITInterface::CodeTag tag, Code* code) {
void GDBJITInterface::RemoveCode(Code* code) {
if (!FLAG_gdbjit) return;
- HashMap::Entry* e = entries.Lookup(code, HashForCodeObject(code), false);
+ HashMap::Entry* e = GetEntries()->Lookup(code,
+ HashForCodeObject(code),
+ false);
if (e == NULL) return;
if (IsLineInfoTagged(e->value)) {
@@ -1475,14 +1544,14 @@ void GDBJITInterface::RemoveCode(Code* code) {
DestroyCodeEntry(entry);
}
e->value = NULL;
- entries.Remove(code, HashForCodeObject(code));
+ GetEntries()->Remove(code, HashForCodeObject(code));
}
void GDBJITInterface::RegisterDetailedLineInfo(Code* code,
GDBJITLineInfo* line_info) {
ASSERT(!IsLineInfoTagged(line_info));
- HashMap::Entry* e = entries.Lookup(code, HashForCodeObject(code), true);
+ HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
ASSERT(e->value == NULL);
e->value = TagLineInfo(line_info);
}
diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc
index 274c34ddeb..d625d644c7 100644
--- a/deps/v8/src/handles.cc
+++ b/deps/v8/src/handles.cc
@@ -290,6 +290,17 @@ Handle<Object> SetLocalPropertyIgnoreAttributes(
}
+void SetLocalPropertyNoThrow(Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes) {
+ ASSERT(!Top::has_pending_exception());
+ CHECK(!SetLocalPropertyIgnoreAttributes(
+ object, key, value, attributes).is_null());
+ CHECK(!Top::has_pending_exception());
+}
+
+
Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
@@ -808,6 +819,7 @@ static bool CompileLazyHelper(CompilationInfo* info,
ClearExceptionFlag flag) {
// Compile the source information to a code object.
ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
+ ASSERT(!Top::has_pending_exception());
bool result = Compiler::CompileLazy(info);
ASSERT(result != Top::has_pending_exception());
if (!result && flag == CLEAR_EXCEPTION) Top::clear_pending_exception();
diff --git a/deps/v8/src/handles.h b/deps/v8/src/handles.h
index aa9d8b9999..d95ca91170 100644
--- a/deps/v8/src/handles.h
+++ b/deps/v8/src/handles.h
@@ -223,6 +223,13 @@ Handle<Object> SetLocalPropertyIgnoreAttributes(
Handle<Object> value,
PropertyAttributes attributes);
+// Used to set local properties on the object we totally control
+// and which therefore has no accessors and alikes.
+void SetLocalPropertyNoThrow(Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes = NONE);
+
Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index 0e3a2b870e..f88ebda53d 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -4128,7 +4128,7 @@ bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
#ifdef DEBUG
void Heap::ZapFromSpace() {
- ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsHeapObject());
+ ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure());
for (Address a = new_space_.FromSpaceLow();
a < new_space_.FromSpaceHigh();
a += kPointerSize) {
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index dcd813b774..f50c3f9ac8 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -184,6 +184,7 @@ namespace internal {
V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized") \
V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized") \
V(KeyedLoadPixelArray_symbol, "KeyedLoadPixelArray") \
+ V(KeyedStorePixelArray_symbol, "KeyedStorePixelArray") \
V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
V(illegal_access_symbol, "illegal access") \
V(out_of_memory_symbol, "out-of-memory") \
diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc
index 0ff41ba238..5accc77f0b 100644
--- a/deps/v8/src/hydrogen-instructions.cc
+++ b/deps/v8/src/hydrogen-instructions.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -281,6 +281,33 @@ void HValue::SetOperandAt(int index, HValue* value) {
}
+void HLoadKeyedGeneric::InternalSetOperandAt(int index, HValue* value) {
+ if (index < 2) {
+ operands_[index] = value;
+ } else {
+ context_ = value;
+ }
+}
+
+
+void HStoreKeyedGeneric::InternalSetOperandAt(int index, HValue* value) {
+ if (index < 3) {
+ operands_[index] = value;
+ } else {
+ context_ = value;
+ }
+}
+
+
+void HStoreNamedGeneric::InternalSetOperandAt(int index, HValue* value) {
+ if (index < 2) {
+ operands_[index] = value;
+ } else {
+ context_ = value;
+ }
+}
+
+
void HValue::ReplaceAndDelete(HValue* other) {
ReplaceValue(other);
Delete();
@@ -438,9 +465,16 @@ void HInstruction::PrintTo(StringStream* stream) const {
void HInstruction::Unlink() {
ASSERT(IsLinked());
ASSERT(!IsControlInstruction()); // Must never move control instructions.
+ ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these.
+ ASSERT(previous_ != NULL);
+ previous_->next_ = next_;
+ if (next_ == NULL) {
+ ASSERT(block()->last() == this);
+ block()->set_last(previous_);
+ } else {
+ next_->previous_ = previous_;
+ }
clear_block();
- if (previous_ != NULL) previous_->next_ = next_;
- if (next_ != NULL) next_->previous_ = previous_;
}
@@ -527,26 +561,64 @@ void HInstruction::Verify() {
#endif
-HCall::HCall(int count) : arguments_(Zone::NewArray<HValue*>(count), count) {
- for (int i = 0; i < count; ++i) arguments_[i] = NULL;
- set_representation(Representation::Tagged());
- SetAllSideEffects();
+void HCall::PrintDataTo(StringStream* stream) const {
+ stream->Add("#%d", argument_count());
}
-void HCall::PrintDataTo(StringStream* stream) const {
- stream->Add("(");
- for (int i = 0; i < arguments_.length(); ++i) {
- if (i != 0) stream->Add(", ");
- arguments_.at(i)->PrintNameTo(stream);
+void HUnaryCall::PrintDataTo(StringStream* stream) const {
+ value()->PrintNameTo(stream);
+ stream->Add(" ");
+ HCall::PrintDataTo(stream);
+}
+
+
+void HBinaryCall::PrintDataTo(StringStream* stream) const {
+ first()->PrintNameTo(stream);
+ stream->Add(" ");
+ second()->PrintNameTo(stream);
+ stream->Add(" ");
+ HCall::PrintDataTo(stream);
+}
+
+
+void HCallConstantFunction::PrintDataTo(StringStream* stream) const {
+ if (IsApplyFunction()) {
+ stream->Add("optimized apply ");
+ } else {
+ stream->Add("%o ", function()->shared()->DebugName());
}
- stream->Add(")");
+ HCall::PrintDataTo(stream);
+}
+
+
+void HCallNamed::PrintDataTo(StringStream* stream) const {
+ stream->Add("%o ", *name());
+ HUnaryCall::PrintDataTo(stream);
+}
+
+
+void HCallGlobal::PrintDataTo(StringStream* stream) const {
+ stream->Add("%o ", *name());
+ HUnaryCall::PrintDataTo(stream);
+}
+
+
+void HCallKnownGlobal::PrintDataTo(StringStream* stream) const {
+ stream->Add("o ", target()->shared()->DebugName());
+ HCall::PrintDataTo(stream);
+}
+
+
+void HCallRuntime::PrintDataTo(StringStream* stream) const {
+ stream->Add("%o ", *name());
+ HCall::PrintDataTo(stream);
}
void HClassOfTest::PrintDataTo(StringStream* stream) const {
stream->Add("class_of_test(");
- value()->PrintTo(stream);
+ value()->PrintNameTo(stream);
stream->Add(", \"%o\")", *class_name());
}
@@ -560,22 +632,6 @@ void HAccessArgumentsAt::PrintDataTo(StringStream* stream) const {
}
-void HCall::SetArgumentAt(int index, HPushArgument* push_argument) {
- push_argument->set_argument_index(index);
- SetOperandAt(index, push_argument);
-}
-
-
-void HCallConstantFunction::PrintDataTo(StringStream* stream) const {
- if (IsApplyFunction()) {
- stream->Add("SPECIAL function: apply");
- } else {
- stream->Add("%s", *(function()->shared()->DebugName()->ToCString()));
- }
- HCall::PrintDataTo(stream);
-}
-
-
void HControlInstruction::PrintDataTo(StringStream* stream) const {
if (FirstSuccessor() != NULL) {
int first_id = FirstSuccessor()->block_id();
@@ -663,14 +719,6 @@ void HTypeofIs::PrintDataTo(StringStream* stream) const {
}
-void HPushArgument::PrintDataTo(StringStream* stream) const {
- HUnaryOperation::PrintDataTo(stream);
- if (argument_index() != -1) {
- stream->Add(" [%d]", argument_index_);
- }
-}
-
-
void HChange::PrintDataTo(StringStream* stream) const {
HUnaryOperation::PrintDataTo(stream);
stream->Add(" %s to %s", from_.Mnemonic(), to_.Mnemonic());
@@ -699,42 +747,19 @@ void HCheckFunction::PrintDataTo(StringStream* stream) const {
}
-void HCallKeyed::PrintDataTo(StringStream* stream) const {
- stream->Add("[");
- key()->PrintNameTo(stream);
- stream->Add("](");
- for (int i = 1; i < arguments_.length(); ++i) {
- if (i != 1) stream->Add(", ");
- arguments_.at(i)->PrintNameTo(stream);
- }
- stream->Add(")");
-}
-
-
-void HCallNamed::PrintDataTo(StringStream* stream) const {
- SmartPointer<char> name_string = name()->ToCString();
- stream->Add("%s ", *name_string);
- HCall::PrintDataTo(stream);
-}
-
-
-void HCallGlobal::PrintDataTo(StringStream* stream) const {
- SmartPointer<char> name_string = name()->ToCString();
- stream->Add("%s ", *name_string);
- HCall::PrintDataTo(stream);
+void HCallStub::PrintDataTo(StringStream* stream) const {
+ stream->Add("%s ",
+ CodeStub::MajorName(major_key_, false));
+ HUnaryCall::PrintDataTo(stream);
}
-void HCallRuntime::PrintDataTo(StringStream* stream) const {
- SmartPointer<char> name_string = name()->ToCString();
- stream->Add("%s ", *name_string);
- HCall::PrintDataTo(stream);
-}
-
-void HCallStub::PrintDataTo(StringStream* stream) const {
- stream->Add("%s(%d)",
- CodeStub::MajorName(major_key_, false),
- argument_count_);
+void HInstanceOf::PrintDataTo(StringStream* stream) const {
+ left()->PrintNameTo(stream);
+ stream->Add(" ");
+ right()->PrintNameTo(stream);
+ stream->Add(" ");
+ context()->PrintNameTo(stream);
}
@@ -900,17 +925,6 @@ void HPhi::AddInput(HValue* value) {
}
-bool HPhi::HasReceiverOperand() {
- for (int i = 0; i < OperandCount(); i++) {
- if (OperandAt(i)->IsParameter() &&
- HParameter::cast(OperandAt(i))->index() == 0) {
- return true;
- }
- }
- return false;
-}
-
-
HValue* HPhi::GetRedundantReplacement() const {
HValue* candidate = NULL;
int count = OperandCount();
@@ -1153,6 +1167,14 @@ void HLoadKeyed::PrintDataTo(StringStream* stream) const {
}
+void HLoadPixelArrayElement::PrintDataTo(StringStream* stream) const {
+ external_pointer()->PrintNameTo(stream);
+ stream->Add("[");
+ key()->PrintNameTo(stream);
+ stream->Add("]");
+}
+
+
void HStoreNamed::PrintDataTo(StringStream* stream) const {
object()->PrintNameTo(stream);
stream->Add(".");
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index f1093a004c..9f5170ca2b 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -48,6 +48,7 @@ class LChunkBuilder;
#define HYDROGEN_ALL_INSTRUCTION_LIST(V) \
V(ArithmeticBinaryOperation) \
+ V(BinaryCall) \
V(BinaryOperation) \
V(BitwiseBinaryOperation) \
V(Call) \
@@ -58,6 +59,7 @@ class LChunkBuilder;
V(Phi) \
V(StoreKeyed) \
V(StoreNamed) \
+ V(UnaryCall) \
V(UnaryControlInstruction) \
V(UnaryOperation) \
HYDROGEN_CONCRETE_INSTRUCTION_LIST(V)
@@ -105,6 +107,7 @@ class LChunkBuilder;
V(EnterInlined) \
V(FixedArrayLength) \
V(FunctionLiteral) \
+ V(GetCachedArrayIndex) \
V(GlobalObject) \
V(GlobalReceiver) \
V(Goto) \
@@ -113,6 +116,7 @@ class LChunkBuilder;
V(IsNull) \
V(IsObject) \
V(IsSmi) \
+ V(IsConstructCall) \
V(HasInstanceType) \
V(HasCachedArrayIndex) \
V(JSArrayLength) \
@@ -126,12 +130,15 @@ class LChunkBuilder;
V(LoadKeyedGeneric) \
V(LoadNamedField) \
V(LoadNamedGeneric) \
+ V(LoadPixelArrayElement) \
+ V(LoadPixelArrayExternalPointer) \
V(Mod) \
V(Mul) \
V(ObjectLiteral) \
V(OsrEntry) \
V(OuterContext) \
V(Parameter) \
+ V(PixelArrayLength) \
V(Power) \
V(PushArgument) \
V(RegExpLiteral) \
@@ -163,6 +170,7 @@ class LChunkBuilder;
V(InobjectFields) \
V(BackingStoreFields) \
V(ArrayElements) \
+ V(PixelArrayElements) \
V(GlobalVars) \
V(Maps) \
V(ArrayLengths) \
@@ -288,6 +296,7 @@ class Representation {
kTagged,
kDouble,
kInteger32,
+ kExternal,
kNumRepresentations
};
@@ -297,6 +306,7 @@ class Representation {
static Representation Tagged() { return Representation(kTagged); }
static Representation Integer32() { return Representation(kInteger32); }
static Representation Double() { return Representation(kDouble); }
+ static Representation External() { return Representation(kExternal); }
bool Equals(const Representation& other) const {
return kind_ == other.kind_;
@@ -307,6 +317,7 @@ class Representation {
bool IsTagged() const { return kind_ == kTagged; }
bool IsInteger32() const { return kind_ == kInteger32; }
bool IsDouble() const { return kind_ == kDouble; }
+ bool IsExternal() const { return kind_ == kExternal; }
bool IsSpecialization() const {
return kind_ == kInteger32 || kind_ == kDouble;
}
@@ -601,9 +612,6 @@ class HValue: public ZoneObject {
virtual HType CalculateInferredType() const;
- // Helper for type conversions used by normal and phi instructions.
- void InsertInputConversion(HInstruction* previous, int index, HType type);
-
#ifdef DEBUG
virtual void Verify() = 0;
#endif
@@ -1040,27 +1048,15 @@ class HLeaveInlined: public HInstruction {
class HPushArgument: public HUnaryOperation {
public:
- explicit HPushArgument(HValue* value)
- : HUnaryOperation(value), argument_index_(-1) {
- set_representation(Representation::Tagged());
- }
+ explicit HPushArgument(HValue* value) : HUnaryOperation(value) { }
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
- virtual void PrintDataTo(StringStream* stream) const;
HValue* argument() const { return OperandAt(0); }
- int argument_index() const { return argument_index_; }
- void set_argument_index(int index) {
- ASSERT(argument_index_ == -1 || index == argument_index_);
- argument_index_ = index;
- }
DECLARE_CONCRETE_INSTRUCTION(PushArgument, "push_argument")
-
- private:
- int argument_index_;
};
@@ -1123,36 +1119,80 @@ class HGlobalReceiver: public HUnaryOperation {
class HCall: public HInstruction {
public:
- // Construct a call with uninitialized arguments. The argument count
- // includes the receiver.
- explicit HCall(int count);
+ // The argument count includes the receiver.
+ explicit HCall(int argument_count) : argument_count_(argument_count) {
+ set_representation(Representation::Tagged());
+ SetAllSideEffects();
+ }
virtual HType CalculateInferredType() const { return HType::Tagged(); }
- // TODO(3190496): This needs a cleanup. We don't want the arguments
- // be operands of the call instruction. This results in bad code quality.
- virtual int argument_count() const { return arguments_.length(); }
- virtual int OperandCount() const { return argument_count(); }
- virtual HValue* OperandAt(int index) const { return arguments_[index]; }
- virtual HPushArgument* PushArgumentAt(int index) const {
- return HPushArgument::cast(OperandAt(index));
+ virtual int argument_count() const { return argument_count_; }
+
+ virtual void PrintDataTo(StringStream* stream) const;
+
+ DECLARE_INSTRUCTION(Call)
+
+ private:
+ int argument_count_;
+};
+
+
+class HUnaryCall: public HCall {
+ public:
+ HUnaryCall(HValue* value, int argument_count)
+ : HCall(argument_count), value_(NULL) {
+ SetOperandAt(0, value);
+ }
+
+ virtual void PrintDataTo(StringStream* stream) const;
+
+ HValue* value() const { return value_; }
+
+ virtual int OperandCount() const { return 1; }
+ virtual HValue* OperandAt(int index) const {
+ ASSERT(index == 0);
+ return value_;
}
- virtual HValue* ArgumentAt(int index) const {
- return PushArgumentAt(index)->argument();
+
+ DECLARE_INSTRUCTION(UnaryCall)
+
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value) {
+ ASSERT(index == 0);
+ value_ = value;
+ }
+
+ private:
+ HValue* value_;
+};
+
+
+class HBinaryCall: public HCall {
+ public:
+ HBinaryCall(HValue* first, HValue* second, int argument_count)
+ : HCall(argument_count) {
+ SetOperandAt(0, first);
+ SetOperandAt(1, second);
}
- virtual void SetArgumentAt(int index, HPushArgument* push_argument);
virtual void PrintDataTo(StringStream* stream) const;
- DECLARE_INSTRUCTION(Call)
+ HValue* first() const { return operands_[0]; }
+ HValue* second() const { return operands_[1]; }
+
+ virtual int OperandCount() const { return 2; }
+ virtual HValue* OperandAt(int index) const { return operands_[index]; }
+
+ DECLARE_INSTRUCTION(BinaryCall)
protected:
virtual void InternalSetOperandAt(int index, HValue* value) {
- arguments_[index] = value;
+ operands_[index] = value;
}
- int argument_count_;
- Vector<HValue*> arguments_;
+ private:
+ HOperandVector<2> operands_;
};
@@ -1162,6 +1202,7 @@ class HCallConstantFunction: public HCall {
: HCall(argument_count), function_(function) { }
Handle<JSFunction> function() const { return function_; }
+
bool IsApplyFunction() const {
return function_->code() == Builtins::builtin(Builtins::FunctionApply);
}
@@ -1175,42 +1216,32 @@ class HCallConstantFunction: public HCall {
};
-class HCallKeyed: public HCall {
+class HCallKeyed: public HBinaryCall {
public:
- HCallKeyed(HValue* key, int argument_count)
- : HCall(argument_count + 1) {
- SetOperandAt(0, key);
+ HCallKeyed(HValue* context, HValue* key, int argument_count)
+ : HBinaryCall(context, key, argument_count) {
}
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
- // TODO(3190496): This is a hack to get an additional operand that
- // is not an argument to work with the current setup. This _needs_ a cleanup.
- // (see HCall)
- virtual void PrintDataTo(StringStream* stream) const;
- HValue* key() const { return OperandAt(0); }
- virtual int argument_count() const { return arguments_.length() - 1; }
- virtual int OperandCount() const { return arguments_.length(); }
- virtual HValue* OperandAt(int index) const { return arguments_[index]; }
- virtual HPushArgument* PushArgumentAt(int index) const {
- return HPushArgument::cast(OperandAt(index + 1));
- }
- virtual void SetArgumentAt(int index, HPushArgument* push_argument) {
- HCall::SetArgumentAt(index + 1, push_argument);
- }
+ HValue* context() const { return first(); }
+ HValue* key() const { return second(); }
DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call_keyed")
};
-class HCallNamed: public HCall {
+class HCallNamed: public HUnaryCall {
public:
- HCallNamed(Handle<String> name, int argument_count)
- : HCall(argument_count), name_(name) { }
+ HCallNamed(HValue* context, Handle<String> name, int argument_count)
+ : HUnaryCall(context, argument_count), name_(name) {
+ }
+
virtual void PrintDataTo(StringStream* stream) const;
+ HValue* context() const { return value(); }
Handle<String> name() const { return name_; }
DECLARE_CONCRETE_INSTRUCTION(CallNamed, "call_named")
@@ -1220,21 +1251,27 @@ class HCallNamed: public HCall {
};
-class HCallFunction: public HCall {
+class HCallFunction: public HUnaryCall {
public:
- explicit HCallFunction(int argument_count) : HCall(argument_count) { }
+ HCallFunction(HValue* context, int argument_count)
+ : HUnaryCall(context, argument_count) {
+ }
+
+ HValue* context() const { return value(); }
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call_function")
};
-class HCallGlobal: public HCall {
+class HCallGlobal: public HUnaryCall {
public:
- HCallGlobal(Handle<String> name, int argument_count)
- : HCall(argument_count), name_(name) { }
+ HCallGlobal(HValue* context, Handle<String> name, int argument_count)
+ : HUnaryCall(context, argument_count), name_(name) {
+ }
virtual void PrintDataTo(StringStream* stream) const;
+ HValue* context() const { return value(); }
Handle<String> name() const { return name_; }
DECLARE_CONCRETE_INSTRUCTION(CallGlobal, "call_global")
@@ -1246,10 +1283,11 @@ class HCallGlobal: public HCall {
class HCallKnownGlobal: public HCall {
public:
- HCallKnownGlobal(Handle<JSFunction> target,
- int argument_count)
+ HCallKnownGlobal(Handle<JSFunction> target, int argument_count)
: HCall(argument_count), target_(target) { }
+ virtual void PrintDataTo(StringStream* stream) const;
+
Handle<JSFunction> target() const { return target_; }
DECLARE_CONCRETE_INSTRUCTION(CallKnownGlobal, "call_known_global")
@@ -1259,15 +1297,18 @@ class HCallKnownGlobal: public HCall {
};
-class HCallNew: public HCall {
+class HCallNew: public HBinaryCall {
public:
- explicit HCallNew(int argument_count) : HCall(argument_count) { }
+ HCallNew(HValue* context, HValue* constructor, int argument_count)
+ : HBinaryCall(context, constructor, argument_count) {
+ }
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
- HValue* constructor() const { return ArgumentAt(0); }
+ HValue* context() const { return first(); }
+ HValue* constructor() const { return second(); }
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call_new")
};
@@ -1333,6 +1374,27 @@ class HFixedArrayLength: public HUnaryOperation {
};
+class HPixelArrayLength: public HUnaryOperation {
+ public:
+ explicit HPixelArrayLength(HValue* value) : HUnaryOperation(value) {
+ set_representation(Representation::Integer32());
+ // The result of this instruction is idempotent as long as its inputs don't
+ // change. The length of a pixel array cannot change once set, so it's not
+ // necessary to introduce a kDependsOnArrayLengths or any other dependency.
+ SetFlag(kUseGVN);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) const {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(PixelArrayLength, "pixel_array_length")
+
+ protected:
+ virtual bool DataEquals(HValue* other) const { return true; }
+};
+
+
class HBitNot: public HUnaryOperation {
public:
explicit HBitNot(HValue* value) : HUnaryOperation(value) {
@@ -1451,6 +1513,30 @@ class HLoadElements: public HUnaryOperation {
};
+class HLoadPixelArrayExternalPointer: public HUnaryOperation {
+ public:
+ explicit HLoadPixelArrayExternalPointer(HValue* value)
+ : HUnaryOperation(value) {
+ set_representation(Representation::External());
+ // The result of this instruction is idempotent as long as its inputs don't
+ // change. The external array of a pixel array elements object cannot
+ // change once set, so it's no necessary to introduce any additional
+ // dependencies on top of the inputs.
+ SetFlag(kUseGVN);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) const {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayExternalPointer,
+ "load-pixel-array-external-pointer")
+
+ protected:
+ virtual bool DataEquals(HValue* other) const { return true; }
+};
+
+
class HCheckMap: public HUnaryOperation {
public:
HCheckMap(HValue* value, Handle<Map> map)
@@ -1701,7 +1787,7 @@ class HPhi: public HValue {
HValue* GetRedundantReplacement() const;
void AddInput(HValue* value);
- bool HasReceiverOperand();
+ bool IsReceiver() { return merged_index_ == 0; }
int merged_index() const { return merged_index_; }
@@ -1860,7 +1946,6 @@ class HBinaryOperation: public HInstruction {
operands_[index] = value;
}
- private:
HOperandVector<2> operands_;
};
@@ -2072,7 +2157,11 @@ class HCompare: public HBinaryOperation {
}
void SetInputRepresentation(Representation r);
- virtual bool EmitAtUses() const { return uses()->length() <= 1; }
+
+ virtual bool EmitAtUses() const {
+ return !HasSideEffects() && (uses()->length() <= 1);
+ }
+
virtual Representation RequiredInputRepresentation(int index) const {
return input_representation_;
}
@@ -2110,7 +2199,10 @@ class HCompareJSObjectEq: public HBinaryOperation {
SetFlag(kUseGVN);
}
- virtual bool EmitAtUses() const { return uses()->length() <= 1; }
+ virtual bool EmitAtUses() const {
+ return !HasSideEffects() && (uses()->length() <= 1);
+ }
+
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
@@ -2129,7 +2221,11 @@ class HUnaryPredicate: public HUnaryOperation {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
}
- virtual bool EmitAtUses() const { return uses()->length() <= 1; }
+
+ virtual bool EmitAtUses() const {
+ return !HasSideEffects() && (uses()->length() <= 1);
+ }
+
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
@@ -2179,6 +2275,24 @@ class HIsSmi: public HUnaryPredicate {
};
+class HIsConstructCall: public HInstruction {
+ public:
+ HIsConstructCall() {
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ }
+
+ virtual bool EmitAtUses() const {
+ return !HasSideEffects() && (uses()->length() <= 1);
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCall, "is_construct_call")
+
+ protected:
+ virtual bool DataEquals(HValue* other) const { return true; }
+};
+
+
class HHasInstanceType: public HUnaryPredicate {
public:
HHasInstanceType(HValue* value, InstanceType type)
@@ -2218,6 +2332,17 @@ class HHasCachedArrayIndex: public HUnaryPredicate {
};
+class HGetCachedArrayIndex: public HUnaryPredicate {
+ public:
+ explicit HGetCachedArrayIndex(HValue* value) : HUnaryPredicate(value) { }
+
+ DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get_cached_array_index")
+
+ protected:
+ virtual bool DataEquals(HValue* other) const { return true; }
+};
+
+
class HClassOfTest: public HUnaryPredicate {
public:
HClassOfTest(HValue* value, Handle<String> class_name)
@@ -2261,20 +2386,42 @@ class HTypeofIs: public HUnaryPredicate {
};
-class HInstanceOf: public HBinaryOperation {
+class HInstanceOf: public HInstruction {
public:
- HInstanceOf(HValue* left, HValue* right) : HBinaryOperation(left, right) {
+ HInstanceOf(HValue* context, HValue* left, HValue* right) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, left);
+ SetOperandAt(2, right);
set_representation(Representation::Tagged());
SetAllSideEffects();
}
- virtual bool EmitAtUses() const { return uses()->length() <= 1; }
+ HValue* context() const { return operands_[0]; }
+ HValue* left() const { return operands_[1]; }
+ HValue* right() const { return operands_[2]; }
+
+ virtual bool EmitAtUses() const {
+ return !HasSideEffects() && (uses()->length() <= 1);
+ }
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
+ virtual void PrintDataTo(StringStream* stream) const;
+
+ virtual int OperandCount() const { return 3; }
+ virtual HValue* OperandAt(int index) const { return operands_[index]; }
+
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance_of")
+
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value) {
+ operands_[index] = value;
+ }
+
+ private:
+ HOperandVector<3> operands_;
};
@@ -2543,18 +2690,17 @@ class HParameter: public HInstruction {
};
-class HCallStub: public HInstruction {
+class HCallStub: public HUnaryCall {
public:
- HCallStub(CodeStub::Major major_key, int argument_count)
- : major_key_(major_key),
- argument_count_(argument_count),
+ HCallStub(HValue* context, CodeStub::Major major_key, int argument_count)
+ : HUnaryCall(context, argument_count),
+ major_key_(major_key),
transcendental_type_(TranscendentalCache::kNumberOfCaches) {
- set_representation(Representation::Tagged());
- SetAllSideEffects();
}
CodeStub::Major major_key() { return major_key_; }
- int argument_count() { return argument_count_; }
+
+ HValue* context() const { return value(); }
void set_transcendental_type(TranscendentalCache::Type transcendental_type) {
transcendental_type_ = transcendental_type;
@@ -2562,13 +2708,13 @@ class HCallStub: public HInstruction {
TranscendentalCache::Type transcendental_type() {
return transcendental_type_;
}
+
virtual void PrintDataTo(StringStream* stream) const;
DECLARE_CONCRETE_INSTRUCTION(CallStub, "call_stub")
private:
CodeStub::Major major_key_;
- int argument_count_;
TranscendentalCache::Type transcendental_type_;
};
@@ -2746,15 +2892,16 @@ class HLoadNamedField: public HUnaryOperation {
};
-class HLoadNamedGeneric: public HUnaryOperation {
+class HLoadNamedGeneric: public HBinaryOperation {
public:
- HLoadNamedGeneric(HValue* object, Handle<Object> name)
- : HUnaryOperation(object), name_(name) {
+ HLoadNamedGeneric(HValue* context, HValue* object, Handle<Object> name)
+ : HBinaryOperation(context, object), name_(name) {
set_representation(Representation::Tagged());
SetAllSideEffects();
}
- HValue* object() const { return OperandAt(0); }
+ HValue* context() const { return OperandAt(0); }
+ HValue* object() const { return OperandAt(1); }
Handle<Object> name() const { return name_; }
virtual Representation RequiredInputRepresentation(int index) const {
@@ -2829,19 +2976,67 @@ class HLoadKeyedFastElement: public HLoadKeyed {
};
+class HLoadPixelArrayElement: public HBinaryOperation {
+ public:
+ HLoadPixelArrayElement(HValue* external_elements, HValue* key)
+ : HBinaryOperation(external_elements, key) {
+ set_representation(Representation::Integer32());
+ SetFlag(kDependsOnPixelArrayElements);
+ // Native code could change the pixel array.
+ SetFlag(kDependsOnCalls);
+ SetFlag(kUseGVN);
+ }
+
+ virtual void PrintDataTo(StringStream* stream) const;
+
+ virtual Representation RequiredInputRepresentation(int index) const {
+ // The key is supposed to be Integer32, but the base pointer
+ // for the element load is a naked pointer.
+ return (index == 1) ? Representation::Integer32()
+ : Representation::External();
+ }
+
+ HValue* external_pointer() const { return OperandAt(0); }
+ HValue* key() const { return OperandAt(1); }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayElement,
+ "load_pixel_array_element")
+
+ protected:
+ virtual bool DataEquals(HValue* other) const { return true; }
+};
+
+
class HLoadKeyedGeneric: public HLoadKeyed {
public:
- HLoadKeyedGeneric(HValue* obj, HValue* key) : HLoadKeyed(obj, key) {
+ HLoadKeyedGeneric(HContext* context, HValue* obj, HValue* key)
+ : HLoadKeyed(obj, key), context_(NULL) {
+ SetOperandAt(2, context);
SetAllSideEffects();
}
+ HValue* context() const { return context_; }
+ HValue* object() const { return operands_[0]; }
+ HValue* key() const { return operands_[1]; }
+
+ virtual int OperandCount() const { return 3; }
+ virtual HValue* OperandAt(int index) const {
+ return (index < 2) ? operands_[index] : context_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load_keyed_generic")
+
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value);
+
+ private:
+ HValue* context_;
};
class HStoreNamed: public HBinaryOperation {
public:
- HStoreNamed(HValue* obj, Handle<Object> name, HValue* val)
+ HStoreNamed(HValue* obj, Handle<String> name, HValue* val)
: HBinaryOperation(obj, val), name_(name) {
}
@@ -2852,21 +3047,21 @@ class HStoreNamed: public HBinaryOperation {
virtual void PrintDataTo(StringStream* stream) const;
HValue* object() const { return OperandAt(0); }
- Handle<Object> name() const { return name_; }
+ Handle<String> name() const { return name_; }
HValue* value() const { return OperandAt(1); }
void set_value(HValue* value) { SetOperandAt(1, value); }
DECLARE_INSTRUCTION(StoreNamed)
private:
- Handle<Object> name_;
+ Handle<String> name_;
};
class HStoreNamedField: public HStoreNamed {
public:
HStoreNamedField(HValue* obj,
- Handle<Object> name,
+ Handle<String> name,
HValue* val,
bool in_object,
int offset)
@@ -2905,12 +3100,32 @@ class HStoreNamedField: public HStoreNamed {
class HStoreNamedGeneric: public HStoreNamed {
public:
- HStoreNamedGeneric(HValue* obj, Handle<Object> name, HValue* val)
- : HStoreNamed(obj, name, val) {
+ HStoreNamedGeneric(HValue* context,
+ HValue* object,
+ Handle<String> name,
+ HValue* value)
+ : HStoreNamed(object, name, value), context_(NULL) {
+ SetOperandAt(2, context);
SetAllSideEffects();
}
+ HValue* context() const { return context_; }
+ HValue* object() const { return operands_[0]; }
+ HValue* value() const { return operands_[1]; }
+
+ virtual int OperandCount() const { return 3; }
+
+ virtual HValue* OperandAt(int index) const {
+ return (index < 2) ? operands_[index] : context_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store_named_generic")
+
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value);
+
+ private:
+ HValue* context_;
};
@@ -2945,7 +3160,6 @@ class HStoreKeyed: public HInstruction {
operands_[index] = value;
}
- private:
HOperandVector<3> operands_;
};
@@ -2970,12 +3184,33 @@ class HStoreKeyedFastElement: public HStoreKeyed {
class HStoreKeyedGeneric: public HStoreKeyed {
public:
- HStoreKeyedGeneric(HValue* obj, HValue* key, HValue* val)
- : HStoreKeyed(obj, key, val) {
+ HStoreKeyedGeneric(HValue* context,
+ HValue* object,
+ HValue* key,
+ HValue* value)
+ : HStoreKeyed(object, key, value), context_(NULL) {
+ SetOperandAt(3, context);
SetAllSideEffects();
}
+ HValue* context() const { return context_; }
+ HValue* object() const { return operands_[0]; }
+ HValue* key() const { return operands_[1]; }
+ HValue* value() const { return operands_[2]; }
+
+ virtual int OperandCount() const { return 4; }
+
+ virtual HValue* OperandAt(int index) const {
+ return (index < 3) ? operands_[index] : context_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store_keyed_generic")
+
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value);
+
+ private:
+ HValue* context_;
};
@@ -3077,22 +3312,36 @@ class HArrayLiteral: public HMaterializedLiteral {
class HObjectLiteral: public HMaterializedLiteral {
public:
- HObjectLiteral(Handle<FixedArray> constant_properties,
+ HObjectLiteral(HValue* context,
+ Handle<FixedArray> constant_properties,
bool fast_elements,
int literal_index,
int depth)
: HMaterializedLiteral(literal_index, depth),
+ context_(NULL),
constant_properties_(constant_properties),
- fast_elements_(fast_elements) {}
+ fast_elements_(fast_elements) {
+ SetOperandAt(0, context);
+ }
+ HValue* context() const { return context_; }
Handle<FixedArray> constant_properties() const {
return constant_properties_;
}
bool fast_elements() const { return fast_elements_; }
+ virtual int OperandCount() const { return 1; }
+ virtual HValue* OperandAt(int index) const { return context_; }
+
DECLARE_CONCRETE_INSTRUCTION(ObjectLiteral, "object_literal")
+ protected:
+ virtual void InternalSetOperandAt(int index, HValue* value) {
+ context_ = value;
+ }
+
private:
+ HValue* context_;
Handle<FixedArray> constant_properties_;
bool fast_elements_;
};
diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc
index 4044f7fff1..3ebd580adc 100644
--- a/deps/v8/src/hydrogen.cc
+++ b/deps/v8/src/hydrogen.cc
@@ -65,6 +65,7 @@ HBasicBlock::HBasicBlock(HGraph* graph)
first_instruction_index_(-1),
last_instruction_index_(-1),
deleted_phis_(4),
+ parent_loop_header_(NULL),
is_inline_return_target_(false) {
}
@@ -105,18 +106,10 @@ void HBasicBlock::AddInstruction(HInstruction* instr) {
if (first_ == NULL) {
HBlockEntry* entry = new HBlockEntry();
entry->InitializeAsFirst(this);
- first_ = entry;
+ first_ = last_ = entry;
}
- instr->InsertAfter(GetLastInstruction());
-}
-
-
-HInstruction* HBasicBlock::GetLastInstruction() {
- if (end_ != NULL) return end_->previous();
- if (first_ == NULL) return NULL;
- if (last_ == NULL) last_ = first_;
- while (last_->next() != NULL) last_ = last_->next();
- return last_;
+ instr->InsertAfter(last_);
+ last_ = instr;
}
@@ -177,7 +170,7 @@ void HBasicBlock::SetJoinId(int id) {
for (int i = 0; i < length; i++) {
HBasicBlock* predecessor = predecessors_[i];
ASSERT(predecessor->end()->IsGoto());
- HSimulate* simulate = HSimulate::cast(predecessor->GetLastInstruction());
+ HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
// We only need to verify the ID once.
ASSERT(i != 0 ||
predecessor->last_environment()->closure()->shared()
@@ -293,20 +286,6 @@ void HBasicBlock::Verify() {
// Check that every block is finished.
ASSERT(IsFinished());
ASSERT(block_id() >= 0);
-
- // Verify that all blocks targetting a branch target, have the same boolean
- // value on top of their expression stack.
- if (!cond().is_null()) {
- ASSERT(predecessors()->length() > 0);
- for (int i = 1; i < predecessors()->length(); i++) {
- HBasicBlock* pred = predecessors()->at(i);
- HValue* top = pred->last_environment()->Top();
- ASSERT(top->IsConstant());
- Object* a = *HConstant::cast(top)->handle();
- Object* b = *cond();
- ASSERT(a == b);
- }
- }
}
#endif
@@ -870,13 +849,11 @@ void HGraph::EliminateRedundantPhis() {
}
uses_to_replace.Rewind(0);
block->RemovePhi(phi);
- } else if (phi->HasNoUses() &&
- !phi->HasReceiverOperand() &&
- FLAG_eliminate_dead_phis) {
- // We can't eliminate phis that have the receiver as an operand
- // because in case of throwing an error we need the correct
- // receiver value in the environment to construct a corrent
- // stack trace.
+ } else if (FLAG_eliminate_dead_phis && phi->HasNoUses() &&
+ !phi->IsReceiver()) {
+ // We can't eliminate phis in the receiver position in the environment
+ // because in case of throwing an error we need this value to
+ // construct a stack trace.
block->RemovePhi(phi);
block->RecordDeletedPhi(phi->merged_index());
}
@@ -1815,17 +1792,15 @@ void HGraph::InsertRepresentationChangeForUse(HValue* value,
bool is_truncating) {
// Insert the representation change right before its use. For phi-uses we
// insert at the end of the corresponding predecessor.
- HBasicBlock* insert_block = use->block();
+ HInstruction* next = NULL;
if (use->IsPhi()) {
int index = 0;
while (use->OperandAt(index) != value) ++index;
- insert_block = insert_block->predecessors()->at(index);
+ next = use->block()->predecessors()->at(index)->end();
+ } else {
+ next = HInstruction::cast(use);
}
- HInstruction* next = (insert_block == use->block())
- ? HInstruction::cast(use)
- : insert_block->end();
-
// For constants we try to make the representation change at compile
// time. When a representation change is not possible without loss of
// information we treat constants like normal instructions and insert the
@@ -2197,10 +2172,8 @@ void HGraphBuilder::VisitForControl(Expression* expr,
}
-HValue* HGraphBuilder::VisitArgument(Expression* expr) {
+void HGraphBuilder::VisitArgument(Expression* expr) {
VisitForValue(expr);
- if (HasStackOverflow() || !subgraph()->HasExit()) return NULL;
- return environment()->Top();
}
@@ -2319,29 +2292,15 @@ void HGraphBuilder::PushAndAdd(HInstruction* instr) {
}
-void HGraphBuilder::PushArgumentsForStubCall(int argument_count) {
- const int kMaxStubArguments = 4;
- ASSERT_GE(kMaxStubArguments, argument_count);
- // Push the arguments on the stack.
- HValue* arguments[kMaxStubArguments];
- for (int i = argument_count - 1; i >= 0; i--) {
- arguments[i] = Pop();
- }
- for (int i = 0; i < argument_count; i++) {
- AddInstruction(new HPushArgument(arguments[i]));
- }
-}
-
-
-void HGraphBuilder::ProcessCall(HCall* call) {
- for (int i = call->argument_count() - 1; i >= 0; --i) {
- HValue* value = Pop();
- HPushArgument* push = new HPushArgument(value);
- call->SetArgumentAt(i, push);
+void HGraphBuilder::PreProcessCall(HCall* call) {
+ int count = call->argument_count();
+ ZoneList<HValue*> arguments(count);
+ for (int i = 0; i < count; ++i) {
+ arguments.Add(Pop());
}
- for (int i = 0; i < call->argument_count(); ++i) {
- AddInstruction(call->PushArgumentAt(i));
+ while (!arguments.is_empty()) {
+ AddInstruction(new HPushArgument(arguments.RemoveLast()));
}
}
@@ -2952,6 +2911,9 @@ void HGraphBuilder::LookupGlobalPropertyCell(Variable* var,
if (is_store && lookup->IsReadOnly()) {
BAILOUT("read-only global variable");
}
+ if (lookup->holder() != *global) {
+ BAILOUT("global property on prototype of global object");
+ }
}
@@ -3021,7 +2983,10 @@ void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
- HObjectLiteral* literal = (new HObjectLiteral(expr->constant_properties(),
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HObjectLiteral* literal = (new HObjectLiteral(context,
+ expr->constant_properties(),
expr->fast_elements(),
expr->literal_index(),
expr->depth()));
@@ -3048,7 +3013,9 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
VISIT_FOR_VALUE(value);
HValue* value = Pop();
Handle<String> name = Handle<String>::cast(key->handle());
- AddInstruction(new HStoreNamedGeneric(literal, name, value));
+ HStoreNamedGeneric* store =
+ new HStoreNamedGeneric(context, literal, name, value);
+ AddInstruction(store);
AddSimulate(key->id());
} else {
VISIT_FOR_EFFECT(value);
@@ -3111,53 +3078,47 @@ void HGraphBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
}
-HBasicBlock* HGraphBuilder::BuildTypeSwitch(ZoneMapList* maps,
- ZoneList<HSubgraph*>* subgraphs,
- HValue* receiver,
+HBasicBlock* HGraphBuilder::BuildTypeSwitch(HValue* receiver,
+ ZoneMapList* maps,
+ ZoneList<HSubgraph*>* body_graphs,
+ HSubgraph* default_graph,
int join_id) {
- ASSERT(subgraphs->length() == (maps->length() + 1));
-
- // Build map compare subgraphs for all but the first map.
- ZoneList<HSubgraph*> map_compare_subgraphs(maps->length() - 1);
- for (int i = maps->length() - 1; i > 0; --i) {
- HSubgraph* subgraph = CreateBranchSubgraph(environment());
- SubgraphScope scope(this, subgraph);
- HSubgraph* else_subgraph =
- (i == (maps->length() - 1))
- ? subgraphs->last()
- : map_compare_subgraphs.last();
- HCompareMap* compare = new HCompareMap(receiver,
- maps->at(i),
- subgraphs->at(i)->entry_block(),
- else_subgraph->entry_block());
- current_subgraph_->exit_block()->Finish(compare);
- map_compare_subgraphs.Add(subgraph);
- }
-
- // Generate first map check to end the current block.
+ ASSERT(maps->length() == body_graphs->length());
+ HBasicBlock* join_block = graph()->CreateBasicBlock();
AddInstruction(new HCheckNonSmi(receiver));
- HSubgraph* else_subgraph =
- (maps->length() == 1) ? subgraphs->at(1) : map_compare_subgraphs.last();
- HCompareMap* compare = new HCompareMap(receiver,
- Handle<Map>(maps->first()),
- subgraphs->first()->entry_block(),
- else_subgraph->entry_block());
- current_subgraph_->exit_block()->Finish(compare);
-
- // Join all the call subgraphs in a new basic block and make
- // this basic block the current basic block.
- HBasicBlock* join_block = graph_->CreateBasicBlock();
- for (int i = 0; i < subgraphs->length(); ++i) {
- HSubgraph* subgraph = subgraphs->at(i);
- if (subgraph->HasExit()) {
+
+ for (int i = 0; i < maps->length(); ++i) {
+ // Build the branches, connect all the target subgraphs to the join
+ // block. Use the default as a target of the last branch.
+ HSubgraph* if_true = body_graphs->at(i);
+ HSubgraph* if_false = (i == maps->length() - 1)
+ ? default_graph
+ : CreateBranchSubgraph(environment());
+ HCompareMap* compare =
+ new HCompareMap(receiver,
+ maps->at(i),
+ if_true->entry_block(),
+ if_false->entry_block());
+ subgraph()->exit_block()->Finish(compare);
+
+ if (if_true->HasExit()) {
// In an effect context the value of the type switch is not needed.
// There is no need to merge it at the join block only to discard it.
- HBasicBlock* subgraph_exit = subgraph->exit_block();
if (ast_context()->IsEffect()) {
- subgraph_exit->last_environment()->Drop(1);
+ if_true->exit_block()->last_environment()->Drop(1);
}
- subgraph_exit->Goto(join_block);
+ if_true->exit_block()->Goto(join_block);
+ }
+
+ subgraph()->set_exit_block(if_false->exit_block());
+ }
+
+ // Connect the default if necessary.
+ if (subgraph()->HasExit()) {
+ if (ast_context()->IsEffect()) {
+ environment()->Drop(1);
}
+ subgraph()->exit_block()->Goto(join_block);
}
if (join_block->predecessors()->is_empty()) return NULL;
@@ -3228,7 +3189,9 @@ HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
Handle<String> name,
HValue* value) {
- return new HStoreNamedGeneric(object, name, value);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ return new HStoreNamedGeneric(context, object, name, value);
}
@@ -3261,7 +3224,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
Handle<String> name) {
int number_of_types = Min(types->length(), kMaxStorePolymorphism);
ZoneMapList maps(number_of_types);
- ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
+ ZoneList<HSubgraph*> subgraphs(number_of_types);
bool needs_generic = (types->length() > kMaxStorePolymorphism);
// Build subgraphs for each of the specific maps.
@@ -3273,7 +3236,6 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
Handle<Map> map = types->at(i);
LookupResult lookup;
if (ComputeStoredField(map, name, &lookup)) {
- maps.Add(map);
HSubgraph* subgraph = CreateBranchSubgraph(environment());
SubgraphScope scope(this, subgraph);
HInstruction* instr =
@@ -3281,6 +3243,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
Push(value);
instr->set_position(expr->position());
AddInstruction(instr);
+ maps.Add(map);
subgraphs.Add(subgraph);
} else {
needs_generic = true;
@@ -3290,7 +3253,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
// If none of the properties were named fields we generate a
// generic store.
if (maps.length() == 0) {
- HInstruction* instr = new HStoreNamedGeneric(object, name, value);
+ HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
Push(value);
instr->set_position(expr->position());
AddInstruction(instr);
@@ -3298,22 +3261,20 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
ast_context()->ReturnValue(Pop());
} else {
// Build subgraph for generic store through IC.
- {
- HSubgraph* subgraph = CreateBranchSubgraph(environment());
- SubgraphScope scope(this, subgraph);
+ HSubgraph* default_graph = CreateBranchSubgraph(environment());
+ { SubgraphScope scope(this, default_graph);
if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
- subgraph->FinishExit(new HDeoptimize());
+ default_graph->FinishExit(new HDeoptimize());
} else {
- HInstruction* instr = new HStoreNamedGeneric(object, name, value);
+ HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
Push(value);
instr->set_position(expr->position());
AddInstruction(instr);
}
- subgraphs.Add(subgraph);
}
HBasicBlock* new_exit_block =
- BuildTypeSwitch(&maps, &subgraphs, object, expr->id());
+ BuildTypeSwitch(object, &maps, &subgraphs, default_graph, expr->id());
subgraph()->set_exit_block(new_exit_block);
// In an effect context, we did not materialized the value in the
// predecessor environments so there's no need to handle it here.
@@ -3354,7 +3315,7 @@ void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
return;
} else {
- instr = new HStoreNamedGeneric(object, name, value);
+ instr = BuildStoreNamedGeneric(object, name, value);
}
} else {
@@ -3414,10 +3375,6 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
BinaryOperation* operation = expr->binary_operation();
if (var != NULL) {
- if (!var->is_global() && !var->IsStackAllocated()) {
- BAILOUT("non-stack/non-global in compound assignment");
- }
-
VISIT_FOR_VALUE(operation);
if (var->is_global()) {
@@ -3425,8 +3382,16 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
Top(),
expr->position(),
expr->AssignmentId());
- } else {
+ } else if (var->IsStackAllocated()) {
Bind(var, Top());
+ } else if (var->IsContextSlot()) {
+ HValue* context = BuildContextChainWalk(var);
+ int index = var->AsSlot()->index();
+ HStoreContextSlot* instr = new HStoreContextSlot(context, index, Top());
+ AddInstruction(instr);
+ if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
+ } else {
+ BAILOUT("compound assignment to lookup slot");
}
ast_context()->ReturnValue(Pop());
@@ -3474,7 +3439,6 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
bool is_fast_elements = prop->IsMonomorphic() &&
prop->GetMonomorphicReceiverType()->has_fast_elements();
-
HInstruction* load = is_fast_elements
? BuildLoadKeyedFastElement(obj, key, prop)
: BuildLoadKeyedGeneric(obj, key);
@@ -3589,7 +3553,7 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
Handle<String> name) {
int number_of_types = Min(types->length(), kMaxLoadPolymorphism);
ZoneMapList maps(number_of_types);
- ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
+ ZoneList<HSubgraph*> subgraphs(number_of_types);
bool needs_generic = (types->length() > kMaxLoadPolymorphism);
// Build subgraphs for each of the specific maps.
@@ -3602,7 +3566,6 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
LookupResult lookup;
map->LookupInDescriptors(NULL, *name, &lookup);
if (lookup.IsProperty() && lookup.type() == FIELD) {
- maps.Add(map);
HSubgraph* subgraph = CreateBranchSubgraph(environment());
SubgraphScope scope(this, subgraph);
HLoadNamedField* instr =
@@ -3610,6 +3573,7 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
instr->set_position(expr->position());
instr->ClearFlag(HValue::kUseGVN); // Don't do GVN on polymorphic loads.
PushAndAdd(instr);
+ maps.Add(map);
subgraphs.Add(subgraph);
} else {
needs_generic = true;
@@ -3624,21 +3588,19 @@ void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
ast_context()->ReturnInstruction(instr, expr->id());
} else {
// Build subgraph for generic load through IC.
- {
- HSubgraph* subgraph = CreateBranchSubgraph(environment());
- SubgraphScope scope(this, subgraph);
+ HSubgraph* default_graph = CreateBranchSubgraph(environment());
+ { SubgraphScope scope(this, default_graph);
if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
- subgraph->FinishExit(new HDeoptimize());
+ default_graph->FinishExit(new HDeoptimize());
} else {
HInstruction* instr = BuildLoadNamedGeneric(object, expr);
instr->set_position(expr->position());
PushAndAdd(instr);
}
- subgraphs.Add(subgraph);
}
HBasicBlock* new_exit_block =
- BuildTypeSwitch(&maps, &subgraphs, object, expr->id());
+ BuildTypeSwitch(object, &maps, &subgraphs, default_graph, expr->id());
subgraph()->set_exit_block(new_exit_block);
// In an effect context, we did not materialized the value in the
// predecessor environments so there's no need to handle it here.
@@ -3677,7 +3639,9 @@ HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
Property* expr) {
ASSERT(expr->key()->IsPropertyName());
Handle<Object> name = expr->key()->AsLiteral()->handle();
- return new HLoadNamedGeneric(obj, name);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ return new HLoadNamedGeneric(context, obj, name);
}
@@ -3706,7 +3670,9 @@ HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
HInstruction* HGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
HValue* key) {
- return new HLoadKeyedGeneric(object, key);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ return new HLoadKeyedGeneric(context, object, key);
}
@@ -3734,10 +3700,34 @@ HInstruction* HGraphBuilder::BuildLoadKeyedFastElement(HValue* object,
}
+HInstruction* HGraphBuilder::BuildLoadKeyedPixelArrayElement(HValue* object,
+ HValue* key,
+ Property* expr) {
+ ASSERT(!expr->key()->IsPropertyName() && expr->IsMonomorphic());
+ AddInstruction(new HCheckNonSmi(object));
+ Handle<Map> map = expr->GetMonomorphicReceiverType();
+ ASSERT(!map->has_fast_elements());
+ ASSERT(map->has_pixel_array_elements());
+ AddInstruction(new HCheckMap(object, map));
+ HLoadElements* elements = new HLoadElements(object);
+ AddInstruction(elements);
+ HInstruction* length = AddInstruction(new HPixelArrayLength(elements));
+ AddInstruction(new HBoundsCheck(key, length));
+ HLoadPixelArrayExternalPointer* external_elements =
+ new HLoadPixelArrayExternalPointer(elements);
+ AddInstruction(external_elements);
+ HLoadPixelArrayElement* pixel_array_value =
+ new HLoadPixelArrayElement(external_elements, key);
+ return pixel_array_value;
+}
+
+
HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
HValue* key,
HValue* value) {
- return new HStoreKeyedGeneric(object, key, value);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ return new HStoreKeyedGeneric(context, object, key, value);
}
@@ -3841,12 +3831,20 @@ void HGraphBuilder::VisitProperty(Property* expr) {
HValue* key = Pop();
HValue* obj = Pop();
- bool is_fast_elements = expr->IsMonomorphic() &&
- expr->GetMonomorphicReceiverType()->has_fast_elements();
-
- instr = is_fast_elements
- ? BuildLoadKeyedFastElement(obj, key, expr)
- : BuildLoadKeyedGeneric(obj, key);
+ if (expr->IsMonomorphic()) {
+ Handle<Map> receiver_type(expr->GetMonomorphicReceiverType());
+ // An object has either fast elements or pixel array elements, but never
+ // both. Pixel array maps that are assigned to pixel array elements are
+ // always created with the fast elements flag cleared.
+ if (receiver_type->has_pixel_array_elements()) {
+ instr = BuildLoadKeyedPixelArrayElement(obj, key, expr);
+ } else if (receiver_type->has_fast_elements()) {
+ instr = BuildLoadKeyedFastElement(obj, key, expr);
+ }
+ }
+ if (instr == NULL) {
+ instr = BuildLoadKeyedGeneric(obj, key);
+ }
}
instr->set_position(expr->position());
ast_context()->ReturnInstruction(instr, expr->id());
@@ -3879,7 +3877,7 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
int argument_count = expr->arguments()->length() + 1; // Plus receiver.
int number_of_types = Min(types->length(), kMaxCallPolymorphism);
ZoneMapList maps(number_of_types);
- ZoneList<HSubgraph*> subgraphs(number_of_types + 1);
+ ZoneList<HSubgraph*> subgraphs(number_of_types);
bool needs_generic = (types->length() > kMaxCallPolymorphism);
// Build subgraphs for each of the specific maps.
@@ -3890,7 +3888,6 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
for (int i = 0; i < number_of_types; ++i) {
Handle<Map> map = types->at(i);
if (expr->ComputeTarget(map, name)) {
- maps.Add(map);
HSubgraph* subgraph = CreateBranchSubgraph(environment());
SubgraphScope scope(this, subgraph);
AddCheckConstantFunction(expr, receiver, map, false);
@@ -3904,9 +3901,10 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
CHECK_BAILOUT;
HCall* call = new HCallConstantFunction(expr->target(), argument_count);
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
PushAndAdd(call);
}
+ maps.Add(map);
subgraphs.Add(subgraph);
} else {
needs_generic = true;
@@ -3916,28 +3914,30 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
// If we couldn't compute the target for any of the maps just perform an
// IC call.
if (maps.length() == 0) {
- HCall* call = new HCallNamed(name, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCall* call = new HCallNamed(context, name, argument_count);
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
ast_context()->ReturnInstruction(call, expr->id());
} else {
// Build subgraph for generic call through IC.
- {
- HSubgraph* subgraph = CreateBranchSubgraph(environment());
- SubgraphScope scope(this, subgraph);
+ HSubgraph* default_graph = CreateBranchSubgraph(environment());
+ { SubgraphScope scope(this, default_graph);
if (!needs_generic && FLAG_deoptimize_uncommon_cases) {
- subgraph->FinishExit(new HDeoptimize());
+ default_graph->FinishExit(new HDeoptimize());
} else {
- HCall* call = new HCallNamed(name, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCall* call = new HCallNamed(context, name, argument_count);
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
PushAndAdd(call);
}
- subgraphs.Add(subgraph);
}
HBasicBlock* new_exit_block =
- BuildTypeSwitch(&maps, &subgraphs, receiver, expr->id());
+ BuildTypeSwitch(receiver, &maps, &subgraphs, default_graph, expr->id());
subgraph()->set_exit_block(new_exit_block);
// In an effect context, we did not materialized the value in the
// predecessor environments so there's no need to handle it here.
@@ -4004,6 +4004,9 @@ bool HGraphBuilder::TryInline(Call* expr) {
CompilationInfo inner_info(target);
if (!ParserApi::Parse(&inner_info) ||
!Scope::Analyze(&inner_info)) {
+ if (Top::has_pending_exception()) {
+ SetStackOverflow();
+ }
return false;
}
FunctionLiteral* function = inner_info.function();
@@ -4348,9 +4351,11 @@ void HGraphBuilder::VisitCall(Call* expr) {
VisitArgumentList(expr->arguments());
CHECK_BAILOUT;
- call = new HCallKeyed(key, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ call = new HCallKeyed(context, key, argument_count);
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
Drop(1); // Key.
ast_context()->ReturnInstruction(call, expr->id());
return;
@@ -4362,7 +4367,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
if (TryCallApply(expr)) return;
CHECK_BAILOUT;
- HValue* receiver = VisitArgument(prop->obj());
+ VisitArgument(prop->obj());
CHECK_BAILOUT;
VisitArgumentList(expr->arguments());
CHECK_BAILOUT;
@@ -4372,6 +4377,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
expr->RecordTypeFeedback(oracle());
ZoneMapList* types = expr->GetReceiverTypes();
+ HValue* receiver =
+ environment()->ExpressionStackAt(expr->arguments()->length());
if (expr->IsMonomorphic()) {
Handle<Map> receiver_map =
(types == NULL) ? Handle<Map>::null() : types->first();
@@ -4387,7 +4394,9 @@ void HGraphBuilder::VisitCall(Call* expr) {
// When the target has a custom call IC generator, use the IC,
// because it is likely to generate better code. Also use the
// IC when a primitive receiver check is required.
- call = new HCallNamed(name, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ call = new HCallNamed(context, name, argument_count);
} else {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
@@ -4416,7 +4425,9 @@ void HGraphBuilder::VisitCall(Call* expr) {
return;
} else {
- call = new HCallNamed(name, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ call = new HCallNamed(context, name, argument_count);
}
} else {
@@ -4486,7 +4497,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
VisitArgumentList(expr->arguments());
CHECK_BAILOUT;
- call = new HCallGlobal(var->name(), argument_count);
+ call = new HCallGlobal(context, var->name(), argument_count);
}
} else {
@@ -4498,12 +4509,12 @@ void HGraphBuilder::VisitCall(Call* expr) {
VisitArgumentList(expr->arguments());
CHECK_BAILOUT;
- call = new HCallFunction(argument_count);
+ call = new HCallFunction(context, argument_count);
}
}
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
ast_context()->ReturnInstruction(call, expr->id());
}
@@ -4516,10 +4527,16 @@ void HGraphBuilder::VisitCallNew(CallNew* expr) {
VisitArgumentList(expr->arguments());
CHECK_BAILOUT;
- int argument_count = expr->arguments()->length() + 1; // Plus constructor.
- HCall* call = new HCallNew(argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+
+ // The constructor is both an operand to the instruction and an argument
+ // to the construct call.
+ int arg_count = expr->arguments()->length() + 1; // Plus constructor.
+ HValue* constructor = environment()->ExpressionStackAt(arg_count - 1);
+ HCall* call = new HCallNew(context, constructor, arg_count);
call->set_position(expr->position());
- ProcessCall(call);
+ PreProcessCall(call);
ast_context()->ReturnInstruction(call, expr->id());
}
@@ -4573,7 +4590,7 @@ void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
ASSERT(function->intrinsic_type == Runtime::RUNTIME);
HCall* call = new HCallRuntime(name, expr->function(), argument_count);
call->set_position(RelocInfo::kNoPosition);
- ProcessCall(call);
+ PreProcessCall(call);
ast_context()->ReturnInstruction(call, expr->id());
}
}
@@ -4600,12 +4617,18 @@ void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
// The subexpression does not have side effects.
ast_context()->ReturnValue(graph()->GetConstantFalse());
} else if (prop != NULL) {
- VISIT_FOR_VALUE(prop->obj());
- VISIT_FOR_VALUE(prop->key());
- HValue* key = Pop();
- HValue* obj = Pop();
- ast_context()->ReturnInstruction(new HDeleteProperty(obj, key),
- expr->id());
+ if (prop->is_synthetic()) {
+ // Result of deleting parameters is false, even when they rewrite
+ // to accesses on the arguments object.
+ ast_context()->ReturnValue(graph()->GetConstantFalse());
+ } else {
+ VISIT_FOR_VALUE(prop->obj());
+ VISIT_FOR_VALUE(prop->key());
+ HValue* key = Pop();
+ HValue* obj = Pop();
+ HDeleteProperty* instr = new HDeleteProperty(obj, key);
+ ast_context()->ReturnInstruction(instr, expr->id());
+ }
} else if (var->is_global()) {
BAILOUT("delete with global variable");
} else {
@@ -4685,10 +4708,6 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
bool inc = expr->op() == Token::INC;
if (var != NULL) {
- if (!var->is_global() && !var->IsStackAllocated()) {
- BAILOUT("non-stack/non-global variable in count operation");
- }
-
VISIT_FOR_VALUE(target);
// Match the full code generator stack by simulating an extra stack
@@ -4704,9 +4723,16 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
after,
expr->position(),
expr->AssignmentId());
- } else {
- ASSERT(var->IsStackAllocated());
+ } else if (var->IsStackAllocated()) {
Bind(var, after);
+ } else if (var->IsContextSlot()) {
+ HValue* context = BuildContextChainWalk(var);
+ int index = var->AsSlot()->index();
+ HStoreContextSlot* instr = new HStoreContextSlot(context, index, after);
+ AddInstruction(instr);
+ if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
+ } else {
+ BAILOUT("lookup variable in count operation");
}
Drop(has_extra ? 2 : 1);
ast_context()->ReturnValue(expr->is_postfix() ? before : after);
@@ -4785,7 +4811,7 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
HInstruction* store = is_fast_elements
? BuildStoreKeyedFastElement(obj, key, after, prop)
- : new HStoreKeyedGeneric(obj, key, after);
+ : BuildStoreKeyedGeneric(obj, key, after);
AddInstruction(store);
// Drop the key from the bailout environment. Overwrite the receiver
@@ -5042,7 +5068,9 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
// If the target is not null we have found a known global function that is
// assumed to stay the same for this instanceof.
if (target.is_null()) {
- instr = new HInstanceOf(left, right);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ instr = new HInstanceOf(context, left, right);
} else {
AddInstruction(new HCheckFunction(right, target));
instr = new HInstanceOfKnownGlobal(left, target);
@@ -5185,9 +5213,10 @@ void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
}
- // Support for construct call checks.
+// Support for construct call checks.
void HGraphBuilder::GenerateIsConstructCall(int argument_count, int ast_id) {
- BAILOUT("inlined runtime function: IsConstructCall");
+ ASSERT(argument_count == 0);
+ ast_context()->ReturnInstruction(new HIsConstructCall, ast_id);
}
@@ -5251,8 +5280,11 @@ void HGraphBuilder::GenerateStringCharFromCode(int argument_count,
// Fast support for string.charAt(n) and string[n].
void HGraphBuilder::GenerateStringCharAt(int argument_count, int ast_id) {
ASSERT_EQ(2, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::StringCharAt, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::StringCharAt, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5281,8 +5313,11 @@ void HGraphBuilder::GenerateRandomHeapNumber(int argument_count, int ast_id) {
// Fast support for StringAdd.
void HGraphBuilder::GenerateStringAdd(int argument_count, int ast_id) {
ASSERT_EQ(2, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::StringAdd, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::StringAdd, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5290,8 +5325,11 @@ void HGraphBuilder::GenerateStringAdd(int argument_count, int ast_id) {
// Fast support for SubString.
void HGraphBuilder::GenerateSubString(int argument_count, int ast_id) {
ASSERT_EQ(3, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::SubString, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::SubString, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5299,8 +5337,11 @@ void HGraphBuilder::GenerateSubString(int argument_count, int ast_id) {
// Fast support for StringCompare.
void HGraphBuilder::GenerateStringCompare(int argument_count, int ast_id) {
ASSERT_EQ(2, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::StringCompare, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::StringCompare, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5308,8 +5349,11 @@ void HGraphBuilder::GenerateStringCompare(int argument_count, int ast_id) {
// Support for direct calls from JavaScript to native RegExp code.
void HGraphBuilder::GenerateRegExpExec(int argument_count, int ast_id) {
ASSERT_EQ(4, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::RegExpExec, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::RegExpExec, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5318,9 +5362,11 @@ void HGraphBuilder::GenerateRegExpExec(int argument_count, int ast_id) {
void HGraphBuilder::GenerateRegExpConstructResult(int argument_count,
int ast_id) {
ASSERT_EQ(3, argument_count);
- PushArgumentsForStubCall(argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
HCallStub* result =
- new HCallStub(CodeStub::RegExpConstructResult, argument_count);
+ new HCallStub(context, CodeStub::RegExpConstructResult, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5334,8 +5380,11 @@ void HGraphBuilder::GenerateGetFromCache(int argument_count, int ast_id) {
// Fast support for number to string.
void HGraphBuilder::GenerateNumberToString(int argument_count, int ast_id) {
ASSERT_EQ(1, argument_count);
- PushArgumentsForStubCall(argument_count);
- HCallStub* result = new HCallStub(CodeStub::NumberToString, argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
+ HCallStub* result =
+ new HCallStub(context, CodeStub::NumberToString, argument_count);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5366,30 +5415,36 @@ void HGraphBuilder::GenerateMathPow(int argument_count, int ast_id) {
void HGraphBuilder::GenerateMathSin(int argument_count, int ast_id) {
ASSERT_EQ(1, argument_count);
- PushArgumentsForStubCall(argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
HCallStub* result =
- new HCallStub(CodeStub::TranscendentalCache, argument_count);
+ new HCallStub(context, CodeStub::TranscendentalCache, argument_count);
result->set_transcendental_type(TranscendentalCache::SIN);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
void HGraphBuilder::GenerateMathCos(int argument_count, int ast_id) {
ASSERT_EQ(1, argument_count);
- PushArgumentsForStubCall(argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
HCallStub* result =
- new HCallStub(CodeStub::TranscendentalCache, argument_count);
+ new HCallStub(context, CodeStub::TranscendentalCache, argument_count);
result->set_transcendental_type(TranscendentalCache::COS);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
void HGraphBuilder::GenerateMathLog(int argument_count, int ast_id) {
ASSERT_EQ(1, argument_count);
- PushArgumentsForStubCall(argument_count);
+ HContext* context = new HContext;
+ AddInstruction(context);
HCallStub* result =
- new HCallStub(CodeStub::TranscendentalCache, argument_count);
+ new HCallStub(context, CodeStub::TranscendentalCache, argument_count);
result->set_transcendental_type(TranscendentalCache::LOG);
+ PreProcessCall(result);
ast_context()->ReturnInstruction(result, ast_id);
}
@@ -5408,7 +5463,10 @@ void HGraphBuilder::GenerateIsRegExpEquivalent(int argument_count,
void HGraphBuilder::GenerateGetCachedArrayIndex(int argument_count,
int ast_id) {
- BAILOUT("inlined runtime function: GetCachedArrayIndex");
+ ASSERT(argument_count == 1);
+ HValue* value = Pop();
+ HGetCachedArrayIndex* result = new HGetCachedArrayIndex(value);
+ ast_context()->ReturnInstruction(result, ast_id);
}
diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h
index 0178f3f85b..c911b6c1fc 100644
--- a/deps/v8/src/hydrogen.h
+++ b/deps/v8/src/hydrogen.h
@@ -60,6 +60,8 @@ class HBasicBlock: public ZoneObject {
HGraph* graph() const { return graph_; }
const ZoneList<HPhi*>* phis() const { return &phis_; }
HInstruction* first() const { return first_; }
+ HInstruction* last() const { return last_; }
+ void set_last(HInstruction* instr) { last_ = instr; }
HInstruction* GetLastInstruction();
HControlInstruction* end() const { return end_; }
HLoopInformation* loop_information() const { return loop_information_; }
@@ -103,16 +105,14 @@ class HBasicBlock: public ZoneObject {
void ClearEnvironment() { last_environment_ = NULL; }
bool HasEnvironment() const { return last_environment_ != NULL; }
void UpdateEnvironment(HEnvironment* env) { last_environment_ = env; }
- HBasicBlock* parent_loop_header() const {
- if (!HasParentLoopHeader()) return NULL;
- return parent_loop_header_.get();
- }
+ HBasicBlock* parent_loop_header() const { return parent_loop_header_; }
void set_parent_loop_header(HBasicBlock* block) {
- parent_loop_header_.set(block);
+ ASSERT(parent_loop_header_ == NULL);
+ parent_loop_header_ = block;
}
- bool HasParentLoopHeader() const { return parent_loop_header_.is_set(); }
+ bool HasParentLoopHeader() const { return parent_loop_header_ != NULL; }
void SetJoinId(int id);
@@ -136,9 +136,6 @@ class HBasicBlock: public ZoneObject {
bool IsInlineReturnTarget() const { return is_inline_return_target_; }
void MarkAsInlineReturnTarget() { is_inline_return_target_ = true; }
- Handle<Object> cond() { return cond_; }
- void set_cond(Handle<Object> value) { cond_ = value; }
-
#ifdef DEBUG
void Verify();
#endif
@@ -153,7 +150,7 @@ class HBasicBlock: public ZoneObject {
HGraph* graph_;
ZoneList<HPhi*> phis_;
HInstruction* first_;
- HInstruction* last_; // Last non-control instruction of the block.
+ HInstruction* last_;
HControlInstruction* end_;
HLoopInformation* loop_information_;
ZoneList<HBasicBlock*> predecessors_;
@@ -166,9 +163,8 @@ class HBasicBlock: public ZoneObject {
int first_instruction_index_;
int last_instruction_index_;
ZoneList<int> deleted_phis_;
- SetOncePointer<HBasicBlock> parent_loop_header_;
+ HBasicBlock* parent_loop_header_;
bool is_inline_return_target_;
- Handle<Object> cond_;
};
@@ -706,19 +702,17 @@ class HGraphBuilder: public AstVisitor {
HBasicBlock* true_block,
HBasicBlock* false_block);
- // Visit an argument and wrap it in a PushArgument instruction.
- HValue* VisitArgument(Expression* expr);
+ // Visit an argument subexpression.
+ void VisitArgument(Expression* expr);
void VisitArgumentList(ZoneList<Expression*>* arguments);
void AddPhi(HPhi* phi);
void PushAndAdd(HInstruction* instr);
- void PushArgumentsForStubCall(int argument_count);
-
// Remove the arguments from the bailout environment and emit instructions
// to push them as outgoing parameters.
- void ProcessCall(HCall* call);
+ void PreProcessCall(HCall* call);
void AssumeRepresentation(HValue* value, Representation r);
static Representation ToRepresentation(TypeInfo info);
@@ -791,6 +785,9 @@ class HGraphBuilder: public AstVisitor {
HInstruction* BuildLoadKeyedFastElement(HValue* object,
HValue* key,
Property* expr);
+ HInstruction* BuildLoadKeyedPixelArrayElement(HValue* object,
+ HValue* key,
+ Property* expr);
HInstruction* BuildLoadKeyedGeneric(HValue* object,
HValue* key);
@@ -831,9 +828,10 @@ class HGraphBuilder: public AstVisitor {
bool smi_and_map_check);
- HBasicBlock* BuildTypeSwitch(ZoneMapList* maps,
- ZoneList<HSubgraph*>* subgraphs,
- HValue* receiver,
+ HBasicBlock* BuildTypeSwitch(HValue* receiver,
+ ZoneMapList* maps,
+ ZoneList<HSubgraph*>* body_graphs,
+ HSubgraph* default_graph,
int join_id);
TypeFeedbackOracle* oracle_;
diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc
index 552d7b5eea..6652df27cf 100644
--- a/deps/v8/src/ia32/assembler-ia32.cc
+++ b/deps/v8/src/ia32/assembler-ia32.cc
@@ -2559,6 +2559,19 @@ void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) {
}
+void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) {
+ ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x3A);
+ EMIT(0x22);
+ emit_sse_operand(dst, src);
+ EMIT(offset);
+}
+
+
void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
Register ireg = { reg.code() };
emit_operand(ireg, adr);
@@ -2594,8 +2607,8 @@ void Assembler::RecordDebugBreakSlot() {
}
-void Assembler::RecordComment(const char* msg) {
- if (FLAG_code_comments) {
+void Assembler::RecordComment(const char* msg, bool force) {
+ if (FLAG_code_comments || force) {
EnsureSpace ensure_space(this);
RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
}
diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h
index 20446b0085..568b4d84fe 100644
--- a/deps/v8/src/ia32/assembler-ia32.h
+++ b/deps/v8/src/ia32/assembler-ia32.h
@@ -30,7 +30,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// A light-weight IA32 Assembler.
@@ -64,30 +64,14 @@ namespace internal {
// and best performance in optimized code.
//
struct Register {
- static const int kNumAllocatableRegisters = 5;
+ static const int kNumAllocatableRegisters = 6;
static const int kNumRegisters = 8;
- static int ToAllocationIndex(Register reg) {
- ASSERT(reg.code() < 4 || reg.code() == 7);
- return (reg.code() == 7) ? 4 : reg.code();
- }
+ static inline const char* AllocationIndexToString(int index);
- static Register FromAllocationIndex(int index) {
- ASSERT(index >= 0 && index < kNumAllocatableRegisters);
- return (index == 4) ? from_code(7) : from_code(index);
- }
+ static inline int ToAllocationIndex(Register reg);
- static const char* AllocationIndexToString(int index) {
- ASSERT(index >= 0 && index < kNumAllocatableRegisters);
- const char* const names[] = {
- "eax",
- "ecx",
- "edx",
- "ebx",
- "edi"
- };
- return names[index];
- }
+ static inline Register FromAllocationIndex(int index);
static Register from_code(int code) {
Register r = { code };
@@ -110,6 +94,7 @@ struct Register {
int code_;
};
+
const Register eax = { 0 };
const Register ecx = { 1 };
const Register edx = { 2 };
@@ -121,6 +106,26 @@ const Register edi = { 7 };
const Register no_reg = { -1 };
+inline const char* Register::AllocationIndexToString(int index) {
+ ASSERT(index >= 0 && index < kNumAllocatableRegisters);
+ // This is the mapping of allocation indices to registers.
+ const char* const kNames[] = { "eax", "ecx", "edx", "ebx", "esi", "edi" };
+ return kNames[index];
+}
+
+
+inline int Register::ToAllocationIndex(Register reg) {
+ ASSERT(reg.is_valid() && !reg.is(esp) && !reg.is(ebp));
+ return (reg.code() >= 6) ? reg.code() - 2 : reg.code();
+}
+
+
+inline Register Register::FromAllocationIndex(int index) {
+ ASSERT(index >= 0 && index < kNumAllocatableRegisters);
+ return (index >= 4) ? from_code(index + 2) : from_code(index);
+}
+
+
struct XMMRegister {
static const int kNumAllocatableRegisters = 7;
static const int kNumRegisters = 8;
@@ -928,6 +933,7 @@ class Assembler : public Malloced {
void psrlq(XMMRegister dst, XMMRegister src);
void pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle);
void pextrd(const Operand& dst, XMMRegister src, int8_t offset);
+ void pinsrd(XMMRegister dst, const Operand& src, int8_t offset);
// Parallel XMM operations.
void movntdqa(XMMRegister src, const Operand& dst);
@@ -951,8 +957,9 @@ class Assembler : public Malloced {
void RecordDebugBreakSlot();
// Record a comment relocation entry that can be used by a disassembler.
- // Use --code-comments to enable.
- void RecordComment(const char* msg);
+ // Use --code-comments to enable, or provide "force = true" flag to always
+ // write a comment.
+ void RecordComment(const char* msg, bool force = false);
// Writes a single byte or word of data in the code stream. Used for
// inline tables, e.g., jump-tables.
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index 0a3e093056..f15fd1cd84 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -589,6 +589,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// Change context eagerly in case we need the global receiver.
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+ // Do not transform the receiver for strict mode functions.
+ __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ebx, SharedFunctionInfo::kStrictModeByteOffset),
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
+ __ j(not_equal, &shift_arguments);
+
+ // Compute the receiver in non-strict mode.
__ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
__ test(ebx, Immediate(kSmiTagMask));
__ j(zero, &convert_to_object);
@@ -736,6 +743,14 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Compute the receiver.
Label call_to_object, use_global_receiver, push_receiver;
__ mov(ebx, Operand(ebp, 3 * kPointerSize));
+
+ // Do not transform the receiver for strict mode functions.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
+ __ j(not_equal, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
__ test(ebx, Immediate(kSmiTagMask));
__ j(zero, &call_to_object);
__ cmp(ebx, Factory::null_value());
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index cfee9709b7..6331a6e2ea 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -3887,7 +3887,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(&Counters::regexp_entry_native, 1);
static const int kRegExpExecuteArguments = 7;
- __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
+ __ EnterApiExitFrame(kRegExpExecuteArguments);
// Argument 7: Indicate that this is a direct call from JavaScript.
__ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
@@ -3932,7 +3932,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Locate the code entry and call it.
__ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
- __ CallCFunction(edx, kRegExpExecuteArguments);
+ __ call(Operand(edx));
+
+ // Drop arguments and come back to JS mode.
+ __ LeaveApiExitFrame();
// Check the result.
Label success;
@@ -3949,12 +3952,30 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
ExternalReference pending_exception(Top::k_pending_exception_address);
- __ mov(eax,
+ __ mov(edx,
Operand::StaticVariable(ExternalReference::the_hole_value_location()));
- __ cmp(eax, Operand::StaticVariable(pending_exception));
+ __ mov(eax, Operand::StaticVariable(pending_exception));
+ __ cmp(edx, Operand(eax));
__ j(equal, &runtime);
+ // For exception, throw the exception again.
+
+ // Clear the pending exception variable.
+ __ mov(Operand::StaticVariable(pending_exception), edx);
+
+ // Special handling of termination exceptions which are uncatchable
+ // by javascript code.
+ __ cmp(eax, Factory::termination_exception());
+ Label throw_termination_exception;
+ __ j(equal, &throw_termination_exception);
+
+ // Handle normal exception by following handler chain.
+ __ Throw(eax);
+
+ __ bind(&throw_termination_exception);
+ __ ThrowUncatchable(TERMINATION, eax);
+
__ bind(&failure);
- // For failure and exception return null.
+ // For failure to match, return null.
__ mov(Operand(eax), Factory::null_value());
__ ret(4 * kPointerSize);
@@ -4628,34 +4649,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- // eax holds the exception.
-
- // Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
- // Drop the sp to the top of the handler.
- ExternalReference handler_address(Top::k_handler_address);
- __ mov(esp, Operand::StaticVariable(handler_address));
-
- // Restore next handler and frame pointer, discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- __ pop(Operand::StaticVariable(handler_address));
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
- __ pop(ebp);
- __ pop(edx); // Remove state.
-
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of
- // a JS entry frame.
- __ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
- NearLabel skip;
- __ cmp(ebp, 0);
- __ j(equal, &skip, not_taken);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- __ bind(&skip);
-
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- __ ret(0);
+ __ Throw(eax);
}
@@ -4723,6 +4717,23 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ test(ecx, Immediate(kFailureTagMask));
__ j(zero, &failure_returned, not_taken);
+ ExternalReference pending_exception_address(Top::k_pending_exception_address);
+
+ // Check that there is no pending exception, otherwise we
+ // should have returned some failure value.
+ if (FLAG_debug_code) {
+ __ push(edx);
+ __ mov(edx, Operand::StaticVariable(
+ ExternalReference::the_hole_value_location()));
+ NearLabel okay;
+ __ cmp(edx, Operand::StaticVariable(pending_exception_address));
+ // Cannot use check here as it attempts to generate call into runtime.
+ __ j(equal, &okay);
+ __ int3();
+ __ bind(&okay);
+ __ pop(edx);
+ }
+
// Exit the JavaScript to C++ exit frame.
__ LeaveExitFrame(save_doubles_);
__ ret(0);
@@ -4741,7 +4752,6 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ j(equal, throw_out_of_memory_exception);
// Retrieve the pending exception and clear the variable.
- ExternalReference pending_exception_address(Top::k_pending_exception_address);
__ mov(eax, Operand::StaticVariable(pending_exception_address));
__ mov(edx,
Operand::StaticVariable(ExternalReference::the_hole_value_location()));
@@ -4762,52 +4772,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type) {
- // Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
- // Drop sp to the top stack handler.
- ExternalReference handler_address(Top::k_handler_address);
- __ mov(esp, Operand::StaticVariable(handler_address));
-
- // Unwind the handlers until the ENTRY handler is found.
- NearLabel loop, done;
- __ bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
- __ j(equal, &done);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- __ mov(esp, Operand(esp, kNextOffset));
- __ jmp(&loop);
- __ bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- __ pop(Operand::StaticVariable(handler_address));
-
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(Top::k_external_caught_exception_address);
- __ mov(eax, false);
- __ mov(Operand::StaticVariable(external_caught), eax);
-
- // Set pending exception and eax to out of memory exception.
- ExternalReference pending_exception(Top::k_pending_exception_address);
- __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
- __ mov(Operand::StaticVariable(pending_exception), eax);
- }
-
- // Clear the context pointer.
- __ Set(esi, Immediate(0));
-
- // Restore fp from handler and discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
- __ pop(ebp);
- __ pop(edx); // State.
-
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- __ ret(0);
+ __ ThrowUncatchable(type, eax);
}
@@ -6543,9 +6508,19 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
__ mov(untagged_key, key);
__ SmiUntag(untagged_key);
- // Verify that the receiver has pixel array elements.
__ mov(elements, FieldOperand(receiver, JSObject::kElementsOffset));
- __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ // By passing NULL as not_pixel_array, callers signal that they have already
+ // verified that the receiver has pixel array elements.
+ if (not_pixel_array != NULL) {
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ } else {
+ if (FLAG_debug_code) {
+ // Map check should have already made sure that elements is a pixel array.
+ __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
+ Immediate(Factory::pixel_array_map()));
+ __ Assert(equal, "Elements isn't a pixel array");
+ }
+ }
// Key must be in range.
__ cmp(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
@@ -6559,6 +6534,90 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
}
+// Stores an indexed element into a pixel array, clamping the stored value.
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register scratch1,
+ bool load_elements_from_receiver,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range) {
+ // Register use:
+ // receiver - holds the receiver and is unchanged unless the
+ // store succeeds.
+ // key - holds the key (must be a smi) and is unchanged.
+ // value - holds the value (must be a smi) and is unchanged.
+ // elements - holds the element object of the receiver on entry if
+ // load_elements_from_receiver is false, otherwise used
+ // internally to store the pixel arrays elements and
+ // external array pointer.
+ //
+ // receiver, key and value remain unmodified until it's guaranteed that the
+ // store will succeed.
+ Register external_pointer = elements;
+ Register untagged_key = scratch1;
+ Register untagged_value = receiver; // Only set once success guaranteed.
+
+ // Fetch the receiver's elements if the caller hasn't already done so.
+ if (load_elements_from_receiver) {
+ __ mov(elements, FieldOperand(receiver, JSObject::kElementsOffset));
+ }
+
+ // By passing NULL as not_pixel_array, callers signal that they have already
+ // verified that the receiver has pixel array elements.
+ if (not_pixel_array != NULL) {
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ } else {
+ if (FLAG_debug_code) {
+ // Map check should have already made sure that elements is a pixel array.
+ __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
+ Immediate(Factory::pixel_array_map()));
+ __ Assert(equal, "Elements isn't a pixel array");
+ }
+ }
+
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit check
+ // to ensure the key is a smi must be added.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+
+ // Key must be a smi and it must be in range.
+ __ mov(untagged_key, key);
+ __ SmiUntag(untagged_key);
+ __ cmp(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
+ __ j(above_equal, out_of_range); // unsigned check handles negative keys.
+
+ // Value must be a smi.
+ __ JumpIfNotSmi(value, value_not_smi);
+ __ mov(untagged_value, value);
+ __ SmiUntag(untagged_value);
+
+ { // Clamp the value to [0..255].
+ NearLabel done;
+ __ test(untagged_value, Immediate(0xFFFFFF00));
+ __ j(zero, &done);
+ __ setcc(negative, untagged_value); // 1 if negative, 0 if positive.
+ __ dec_b(untagged_value); // 0 if negative, 255 if positive.
+ __ bind(&done);
+ }
+
+ __ mov(external_pointer,
+ FieldOperand(elements, PixelArray::kExternalPointerOffset));
+ __ mov_b(Operand(external_pointer, untagged_key, times_1, 0), untagged_value);
+ __ ret(0); // Return value in eax.
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/deps/v8/src/ia32/code-stubs-ia32.h b/deps/v8/src/ia32/code-stubs-ia32.h
index 2064574ce8..ae36f9959e 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.h
+++ b/deps/v8/src/ia32/code-stubs-ia32.h
@@ -490,14 +490,14 @@ class NumberToStringStub: public CodeStub {
};
-// Generate code the to load an element from a pixel array. The receiver is
-// assumed to not be a smi and to have elements, the caller must guarantee this
-// precondition. If the receiver does not have elements that are pixel arrays,
-// the generated code jumps to not_pixel_array. If key is not a smi, then the
-// generated code branches to key_not_smi. Callers can specify NULL for
-// key_not_smi to signal that a smi check has already been performed on key so
-// that the smi check is not generated . If key is not a valid index within the
-// bounds of the pixel array, the generated code jumps to out_of_range.
+// Generate code to load an element from a pixel array. The receiver is assumed
+// to not be a smi and to have elements, the caller must guarantee this
+// precondition. If key is not a smi, then the generated code branches to
+// key_not_smi. Callers can specify NULL for key_not_smi to signal that a smi
+// check has already been performed on key so that the smi check is not
+// generated. If key is not a valid index within the bounds of the pixel array,
+// the generated code jumps to out_of_range. receiver, key and elements are
+// unchanged throughout the generated code sequence.
void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Register receiver,
Register key,
@@ -508,6 +508,28 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Label* key_not_smi,
Label* out_of_range);
+// Generate code to store an element into a pixel array, clamping values between
+// [0..255]. The receiver is assumed to not be a smi and to have elements, the
+// caller must guarantee this precondition. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated. If the value is not a smi, the generated
+// code will branch to value_not_smi. If the receiver doesn't have pixel array
+// elements, the generated code will branch to not_pixel_array, unless
+// not_pixel_array is NULL, in which case the caller must ensure that the
+// receiver has pixel array elements. If key is not a valid index within the
+// bounds of the pixel array, the generated code jumps to out_of_range.
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register scratch1,
+ bool load_elements_from_receiver,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range);
} } // namespace v8::internal
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index 52de32b6f3..02e29191dc 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -3771,14 +3771,15 @@ void CodeGenerator::GenerateReturnSequence(Result* return_value) {
// Leave the frame and return popping the arguments and the
// receiver.
frame_->Exit();
- masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
+ __ Ret(arguments_bytes, ecx);
DeleteFrame();
#ifdef ENABLE_DEBUGGER_SUPPORT
- // Check that the size of the code used for returning matches what is
- // expected by the debugger.
- ASSERT_EQ(Assembler::kJSReturnSequenceLength,
- masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceLength <=
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
@@ -5587,7 +5588,8 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Load(property->value());
if (property->emit_store()) {
Result ignored =
- frame_->CallStoreIC(Handle<String>::cast(key), false);
+ frame_->CallStoreIC(Handle<String>::cast(key), false,
+ strict_mode_flag());
// A test eax instruction following the store IC call would
// indicate the presence of an inlined version of the
// store. Add a nop to indicate that there is no such
@@ -8223,19 +8225,24 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
if (property != NULL) {
Load(property->obj());
Load(property->key());
- Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
+ frame_->Push(Smi::FromInt(strict_mode_flag()));
+ Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 3);
frame_->Push(&answer);
return;
}
Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
if (variable != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
Slot* slot = variable->AsSlot();
if (variable->is_global()) {
LoadGlobal();
frame_->Push(variable->name());
+ frame_->Push(Smi::FromInt(kNonStrictMode));
Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
- CALL_FUNCTION, 2);
+ CALL_FUNCTION, 3);
frame_->Push(&answer);
return;
@@ -9670,7 +9677,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
Result result;
if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
- result = frame()->CallStoreIC(name, is_contextual);
+ result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
// A test eax instruction following the call signals that the inobject
// property case was inlined. Ensure that there is not a test eax
// instruction here.
@@ -9754,7 +9761,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
slow.Bind(&value, &receiver);
frame()->Push(&receiver);
frame()->Push(&value);
- result = frame()->CallStoreIC(name, is_contextual);
+ result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
// Encode the offset to the map check instruction and the offset
// to the write barrier store address computation in a test eax
// instruction.
diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc
index a646052eee..322993ee61 100644
--- a/deps/v8/src/ia32/deoptimizer-ia32.cc
+++ b/deps/v8/src/ia32/deoptimizer-ia32.cc
@@ -80,6 +80,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
Address prev_address = code_start_address;
for (unsigned i = 0; i < table.length(); ++i) {
Address curr_address = code_start_address + table.GetPcOffset(i);
+ ASSERT_GE(curr_address, prev_address);
ZapCodeRange(prev_address, curr_address);
SafepointEntry safepoint_entry = table.GetEntry(i);
@@ -97,7 +98,8 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
RelocInfo::RUNTIME_ENTRY,
reinterpret_cast<intptr_t>(deopt_entry));
reloc_info_writer.Write(&rinfo);
-
+ ASSERT_GE(reloc_info_writer.pos(),
+ reloc_info->address() + ByteArray::kHeaderSize);
curr_address += patch_size();
}
prev_address = curr_address;
@@ -137,39 +139,39 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
void Deoptimizer::PatchStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- Address call_target_address = pc_after - kPointerSize;
- ASSERT(check_code->entry() ==
- Assembler::target_address_at(call_target_address));
- // The stack check code matches the pattern:
- //
- // cmp esp, <limit>
- // jae ok
- // call <stack guard>
- // test eax, <loop nesting depth>
- // ok: ...
- //
- // We will patch away the branch so the code is:
- //
- // cmp esp, <limit> ;; Not changed
- // nop
- // nop
- // call <on-stack replacment>
- // test eax, <loop nesting depth>
- // ok:
- ASSERT(*(call_target_address - 3) == 0x73 && // jae
- *(call_target_address - 2) == 0x07 && // offset
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x90; // nop
- *(call_target_address - 2) = 0x90; // nop
- Assembler::set_target_address_at(call_target_address,
- replacement_code->entry());
+ Address call_target_address = pc_after - kIntSize;
+ ASSERT(check_code->entry() ==
+ Assembler::target_address_at(call_target_address));
+ // The stack check code matches the pattern:
+ //
+ // cmp esp, <limit>
+ // jae ok
+ // call <stack guard>
+ // test eax, <loop nesting depth>
+ // ok: ...
+ //
+ // We will patch away the branch so the code is:
+ //
+ // cmp esp, <limit> ;; Not changed
+ // nop
+ // nop
+ // call <on-stack replacment>
+ // test eax, <loop nesting depth>
+ // ok:
+ ASSERT(*(call_target_address - 3) == 0x73 && // jae
+ *(call_target_address - 2) == 0x07 && // offset
+ *(call_target_address - 1) == 0xe8); // call
+ *(call_target_address - 3) = 0x90; // nop
+ *(call_target_address - 2) = 0x90; // nop
+ Assembler::set_target_address_at(call_target_address,
+ replacement_code->entry());
}
void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- Address call_target_address = pc_after - kPointerSize;
+ Address call_target_address = pc_after - kIntSize;
ASSERT(replacement_code->entry() ==
Assembler::target_address_at(call_target_address));
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
diff --git a/deps/v8/src/ia32/disasm-ia32.cc b/deps/v8/src/ia32/disasm-ia32.cc
index 4028a93421..e0cbe35c0d 100644
--- a/deps/v8/src/ia32/disasm-ia32.cc
+++ b/deps/v8/src/ia32/disasm-ia32.cc
@@ -1115,10 +1115,20 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
get_modrm(*data, &mod, &regop, &rm);
int8_t imm8 = static_cast<int8_t>(data[1]);
AppendToBuffer("pextrd %s,%s,%d",
- NameOfXMMRegister(regop),
+ NameOfCPURegister(regop),
NameOfXMMRegister(rm),
static_cast<int>(imm8));
data += 2;
+ } else if (*data == 0x22) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, &regop, &rm);
+ int8_t imm8 = static_cast<int8_t>(data[1]);
+ AppendToBuffer("pinsrd %s,%s,%d",
+ NameOfXMMRegister(regop),
+ NameOfCPURegister(rm),
+ static_cast<int>(imm8));
+ data += 2;
} else {
UnimplementedInstruction();
}
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index 3c094a4e12..a5c94c6bf5 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -47,8 +47,7 @@ namespace internal {
class JumpPatchSite BASE_EMBEDDED {
public:
- explicit JumpPatchSite(MacroAssembler* masm)
- : masm_(masm) {
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
@@ -60,7 +59,7 @@ class JumpPatchSite BASE_EMBEDDED {
void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
__ test(reg, Immediate(kSmiTagMask));
- EmitJump(not_carry, target); // Always taken before patched.
+ EmitJump(not_carry, target); // Always taken before patched.
}
void EmitJumpIfSmi(Register reg, NearLabel* target) {
@@ -310,12 +309,14 @@ void FullCodeGenerator::EmitReturnSequence() {
// patch with the code required by the debugger.
__ mov(esp, ebp);
__ pop(ebp);
- __ ret((scope()->num_parameters() + 1) * kPointerSize);
+
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
+ __ Ret(arguments_bytes, ecx);
#ifdef ENABLE_DEBUGGER_SUPPORT
- // Check that the size of the code used for returning matches what is
- // expected by the debugger.
- ASSERT_EQ(Assembler::kJSReturnSequenceLength,
- masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceLength <=
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
}
@@ -330,6 +331,7 @@ FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
} else if (right->IsSmiLiteral()) {
return kRightConstant;
} else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) {
+ // Don't inline shifts with constant left hand side.
return kLeftConstant;
} else {
return kNoConstants;
@@ -614,6 +616,7 @@ void FullCodeGenerator::Move(Slot* dst,
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
int offset = Context::SlotOffset(dst->index());
+ ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
__ RecordWrite(scratch1, offset, src, scratch2);
}
}
@@ -717,18 +720,25 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
} else if (prop != NULL) {
if (function != NULL || mode == Variable::CONST) {
// We are declaring a function or constant that rewrites to a
- // property. Use (keyed) IC to set the initial value.
- VisitForStackValue(prop->obj());
+ // property. Use (keyed) IC to set the initial value. We cannot
+ // visit the rewrite because it's shared and we risk recording
+ // duplicate AST IDs for bailouts from optimized code.
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+
if (function != NULL) {
- VisitForStackValue(prop->key());
+ __ push(eax);
VisitForAccumulatorValue(function);
- __ pop(ecx);
+ __ pop(edx);
} else {
- VisitForAccumulatorValue(prop->key());
- __ mov(ecx, result_register());
- __ mov(result_register(), Factory::the_hole_value());
+ __ mov(edx, eax);
+ __ mov(eax, Factory::the_hole_value());
}
- __ pop(edx);
+ ASSERT(prop->key()->AsLiteral() != NULL &&
+ prop->key()->AsLiteral()->handle()->IsSmi());
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -1635,6 +1645,9 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
bool left_is_constant_smi,
Smi* value) {
NearLabel call_stub, done;
+ // Optimistically add smi value with unknown object. If result overflows or is
+ // not a smi then we had either a smi overflow or added a smi with a tagged
+ // pointer.
__ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub);
JumpPatchSite patch_site(masm_);
@@ -1643,8 +1656,7 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
// Undo the optimistic add operation and call the shared stub.
__ bind(&call_stub);
__ sub(Operand(eax), Immediate(value));
- Token::Value op = Token::ADD;
- TypeRecordingBinaryOpStub stub(op, mode);
+ TypeRecordingBinaryOpStub stub(Token::ADD, mode);
if (left_is_constant_smi) {
__ mov(edx, Immediate(value));
} else {
@@ -1663,6 +1675,9 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
bool left_is_constant_smi,
Smi* value) {
NearLabel call_stub, done;
+ // Optimistically subtract smi value with unknown object. If result overflows
+ // or is not a smi then we had either a smi overflow or added a smi with a
+ // tagged pointer.
if (left_is_constant_smi) {
__ mov(ecx, eax);
__ mov(eax, Immediate(value));
@@ -1683,8 +1698,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
__ mov(edx, eax);
__ mov(eax, Immediate(value));
}
- Token::Value op = Token::SUB;
- TypeRecordingBinaryOpStub stub(op, mode);
+ TypeRecordingBinaryOpStub stub(Token::SUB, mode);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
@@ -1720,7 +1734,7 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
__ shl(edx, shift_value - 1);
}
// Convert int result to smi, checking that it is in int range.
- ASSERT(kSmiTagSize == 1); // Adjust code if not the case.
+ STATIC_ASSERT(kSmiTagSize == 1); // Adjust code if not the case.
__ add(edx, Operand(edx));
__ j(overflow, &call_stub);
__ mov(eax, edx); // Put result back into eax.
@@ -1733,6 +1747,8 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
}
break;
case Token::SHR:
+ // SHR must return a positive value. When shifting by 0 or 1 we need to
+ // check that smi tagging the result will not create a negative value.
if (shift_value < 2) {
__ mov(edx, eax);
__ SmiUntag(edx);
@@ -1975,10 +1991,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(eax); // Preserve value.
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(ecx, eax);
- __ pop(edx);
+ if (prop->is_synthetic()) {
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ ASSERT(prop->key()->AsLiteral() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+ __ mov(edx, eax);
+ __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ mov(ecx, eax);
+ __ pop(edx);
+ }
__ pop(eax); // Restore value.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -2004,8 +2030,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// ecx, and the global object on the stack.
__ mov(ecx, var->name());
__ mov(edx, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
@@ -3700,37 +3728,47 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- if (prop == NULL && var == NULL) {
- // Result of deleting non-property, non-variable reference is true.
- // The subexpression may have side effects.
- VisitForEffect(expr->expression());
- context()->Plug(true);
- } else if (var != NULL &&
- !var->is_global() &&
- var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
- } else {
- // Property or variable reference. Call the delete builtin with
- // object and property name as arguments.
- if (prop != NULL) {
+
+ if (prop != NULL) {
+ if (prop->is_synthetic()) {
+ // Result of deleting parameters is false, even when they rewrite
+ // to accesses on the arguments object.
+ context()->Plug(false);
+ } else {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- } else if (var->is_global()) {
+ context()->Plug(eax);
+ }
+ } else if (var != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
+ if (var->is_global()) {
__ push(GlobalObjectOperand());
__ push(Immediate(var->name()));
+ __ push(Immediate(Smi::FromInt(kNonStrictMode)));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(eax);
+ } else if (var->AsSlot() != NULL &&
+ var->AsSlot()->type() != Slot::LOOKUP) {
+ // Result of deleting non-global, non-dynamic variables is false.
+ // The subexpression does not have side effects.
+ context()->Plug(false);
} else {
- // Non-global variable. Call the runtime to delete from the
+ // Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
__ push(context_register());
__ push(Immediate(var->name()));
__ CallRuntime(Runtime::kDeleteContextSlot, 2);
+ context()->Plug(eax);
}
- context()->Plug(eax);
+ } else {
+ // Result of deleting non-property, non-variable reference is true.
+ // The subexpression may have side effects.
+ VisitForEffect(expr->expression());
+ context()->Plug(true);
}
break;
}
@@ -3949,8 +3987,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Call stub for +1/-1.
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
- TypeRecordingBinaryOpStub stub(expr->binary_op(),
- NO_OVERWRITE);
+ TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index 3fe8fdb969..73cd60df56 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -108,6 +108,9 @@ static void GenerateStringDictionaryProbes(MacroAssembler* masm,
Register name,
Register r0,
Register r1) {
+ // Assert that name contains a string.
+ if (FLAG_debug_code) __ AbortIfNotString(name);
+
// Compute the capacity mask.
const int kCapacityOffset =
StringDictionary::kHeaderSize +
@@ -806,28 +809,17 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// ecx: key (a smi)
// edx: receiver
// edi: elements array
- __ CheckMap(edi, Factory::pixel_array_map(), &slow, true);
- // Check that the value is a smi. If a conversion is needed call into the
- // runtime to convert and clamp.
- __ test(eax, Immediate(kSmiTagMask));
- __ j(not_zero, &slow);
- __ mov(ebx, ecx);
- __ SmiUntag(ebx);
- __ cmp(ebx, FieldOperand(edi, PixelArray::kLengthOffset));
- __ j(above_equal, &slow);
- __ mov(ecx, eax); // Save the value. Key is not longer needed.
- __ SmiUntag(ecx);
- { // Clamp the value to [0..255].
- Label done;
- __ test(ecx, Immediate(0xFFFFFF00));
- __ j(zero, &done);
- __ setcc(negative, ecx); // 1 if negative, 0 if positive.
- __ dec_b(ecx); // 0 if negative, 255 if positive.
- __ bind(&done);
- }
- __ mov(edi, FieldOperand(edi, PixelArray::kExternalPointerOffset));
- __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
- __ ret(0); // Return value in eax.
+ GenerateFastPixelArrayStore(masm,
+ edx,
+ ecx,
+ eax,
+ edi,
+ ebx,
+ false,
+ NULL,
+ &slow,
+ &slow,
+ &slow);
// Extra capacity case: Check if there is extra capacity to
// perform the store and update the length. Used for adding one
@@ -1208,7 +1200,14 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
+ // Check if the name is a string.
+ Label miss;
+ __ test(ecx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ Condition cond = masm->IsObjectStringType(ecx, eax, eax);
+ __ j(NegateCondition(cond), &miss);
GenerateCallNormal(masm, argc);
+ __ bind(&miss);
GenerateMiss(masm, argc);
}
@@ -1488,7 +1487,8 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
}
-void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
+void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
+ Code::ExtraICState extra_ic_state) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : name
@@ -1498,7 +1498,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
- MONOMORPHIC);
+ MONOMORPHIC,
+ extra_ic_state);
StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
// Cache miss: Jump to runtime.
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 9e4bada4f4..a59b1a5bad 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -43,13 +43,20 @@ class SafepointGenerator : public PostCallGenerator {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index)
+ int deoptimization_index,
+ bool ensure_reloc_space = false)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index) { }
+ deoptimization_index_(deoptimization_index),
+ ensure_reloc_space_(ensure_reloc_space) { }
virtual ~SafepointGenerator() { }
virtual void Generate() {
+ // Ensure that we have enough space in the reloc info to patch
+ // this with calls when doing deoptimization.
+ if (ensure_reloc_space_) {
+ codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
+ }
codegen_->RecordSafepoint(pointers_, deoptimization_index_);
}
@@ -57,6 +64,7 @@ class SafepointGenerator : public PostCallGenerator {
LCodeGen* codegen_;
LPointerMap* pointers_;
int deoptimization_index_;
+ bool ensure_reloc_space_;
};
@@ -157,6 +165,8 @@ bool LCodeGen::GeneratePrologue() {
// Trace the call.
if (FLAG_trace) {
+ // We have not executed any compiled code yet, so esi still holds the
+ // incoming context.
__ CallRuntime(Runtime::kTraceEnter, 0);
}
return !is_aborted();
@@ -367,10 +377,14 @@ void LCodeGen::AddToTranslation(Translation* translation,
void LCodeGen::CallCode(Handle<Code> code,
RelocInfo::Mode mode,
- LInstruction* instr) {
+ LInstruction* instr,
+ bool adjusted) {
ASSERT(instr != NULL);
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
+ if (!adjusted) {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
__ call(code, mode);
RegisterLazyDeoptimization(instr);
@@ -383,15 +397,19 @@ void LCodeGen::CallCode(Handle<Code> code,
}
-void LCodeGen::CallRuntime(Runtime::Function* function,
- int num_arguments,
- LInstruction* instr) {
+void LCodeGen::CallRuntime(Runtime::Function* fun,
+ int argc,
+ LInstruction* instr,
+ bool adjusted) {
ASSERT(instr != NULL);
ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- __ CallRuntime(function, num_arguments);
+ if (!adjusted) {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ CallRuntime(fun, argc);
RegisterLazyDeoptimization(instr);
}
@@ -568,10 +586,6 @@ void LCodeGen::RecordSafepoint(
safepoint.DefinePointerRegister(ToRegister(pointer));
}
}
- if (kind & Safepoint::kWithRegisters) {
- // Register esi always contains a pointer to the context.
- safepoint.DefinePointerRegister(esi);
- }
}
@@ -635,6 +649,7 @@ void LCodeGen::DoParameter(LParameter* instr) {
void LCodeGen::DoCallStub(LCallStub* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpConstructResult: {
@@ -804,7 +819,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ test(left, Operand(left));
__ j(not_zero, &done);
if (right->IsConstantOperand()) {
- if (ToInteger32(LConstantOperand::cast(right)) < 0) {
+ if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
DeoptimizeIf(no_condition, instr->environment());
}
} else {
@@ -945,19 +960,31 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
if (BitCast<uint64_t, double>(v) == 0) {
__ xorpd(res, res);
} else {
- int32_t v_int32 = static_cast<int32_t>(v);
- if (static_cast<double>(v_int32) == v) {
- __ push_imm32(v_int32);
- __ cvtsi2sd(res, Operand(esp, 0));
- __ add(Operand(esp), Immediate(kPointerSize));
+ Register temp = ToRegister(instr->TempAt(0));
+ uint64_t int_val = BitCast<uint64_t, double>(v);
+ int32_t lower = static_cast<int32_t>(int_val);
+ int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
+ if (CpuFeatures::IsSupported(SSE4_1)) {
+ CpuFeatures::Scope scope(SSE4_1);
+ if (lower != 0) {
+ __ Set(temp, Immediate(lower));
+ __ movd(res, Operand(temp));
+ __ Set(temp, Immediate(upper));
+ __ pinsrd(res, Operand(temp), 1);
+ } else {
+ __ xorpd(res, res);
+ __ Set(temp, Immediate(upper));
+ __ pinsrd(res, Operand(temp), 1);
+ }
} else {
- uint64_t int_val = BitCast<uint64_t, double>(v);
- int32_t lower = static_cast<int32_t>(int_val);
- int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
- __ push_imm32(upper);
- __ push_imm32(lower);
- __ movdbl(res, Operand(esp, 0));
- __ add(Operand(esp), Immediate(2 * kPointerSize));
+ __ Set(temp, Immediate(upper));
+ __ movd(res, Operand(temp));
+ __ psllq(res, 32);
+ if (lower != 0) {
+ __ Set(temp, Immediate(lower));
+ __ movd(xmm0, Operand(temp));
+ __ por(res, xmm0);
+ }
}
}
}
@@ -983,6 +1010,13 @@ void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
}
+void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register array = ToRegister(instr->InputAt(0));
+ __ mov(result, FieldOperand(array, PixelArray::kLengthOffset));
+}
+
+
void LCodeGen::DoValueOf(LValueOf* instr) {
Register input = ToRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
@@ -1011,7 +1045,7 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) {
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToOperand(instr->InputAt(0)));
- CallRuntime(Runtime::kThrow, 1, instr);
+ CallRuntime(Runtime::kThrow, 1, instr, false);
if (FLAG_debug_code) {
Comment("Unreachable code.");
@@ -1083,7 +1117,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->result()).is(eax));
TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -1196,6 +1230,7 @@ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
__ pushad();
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kStackGuard);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -1686,6 +1721,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
// Object and function are in fixed registers defined by the stub.
+ ASSERT(ToRegister(instr->context()).is(esi));
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -1701,6 +1737,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1735,11 +1772,11 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register object = ToRegister(instr->InputAt(0));
Register temp = ToRegister(instr->TempAt(0));
- // A Smi is not instance of anything.
+ // A Smi is not an instance of anything.
__ test(object, Immediate(kSmiTagMask));
__ j(zero, &false_result, not_taken);
- // This is the inlined call site instanceof cache. The two occourences of the
+ // This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
NearLabel cache_miss;
@@ -1751,10 +1788,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
__ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
__ jmp(&done);
- // The inlined call site cache did not match. Check null and string before
- // calling the deferred code.
+ // The inlined call site cache did not match. Check for null and string
+ // before calling the deferred code.
__ bind(&cache_miss);
- // Null is not instance of anything.
+ // Null is not an instance of anything.
__ cmp(object, Factory::null_value());
__ j(equal, &false_result);
@@ -1794,12 +1831,13 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Register temp = ToRegister(instr->TempAt(0));
ASSERT(temp.is(edi));
__ mov(InstanceofStub::right(), Immediate(instr->function()));
- static const int kAdditionalDelta = 13;
+ static const int kAdditionalDelta = 16;
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
__ mov(temp, Immediate(delta));
__ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ call(stub.GetCode(), RelocInfo::CODE_TARGET);
ASSERT_EQ(kAdditionalDelta,
masm_->SizeOfCodeGeneratedSince(&before_push_delta));
@@ -1836,7 +1874,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
Condition condition = ComputeCompareCondition(op);
if (op == Token::GT || op == Token::LTE) {
@@ -1859,7 +1897,7 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
int false_block = chunk_->LookupDestination(instr->false_block_id());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
// The compare stub expects compare condition and the input operands
// reversed for GT and LTE.
@@ -1874,14 +1912,17 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
void LCodeGen::DoReturn(LReturn* instr) {
if (FLAG_trace) {
- // Preserve the return value on the stack and rely on the runtime
- // call to return the value in the same register.
+ // Preserve the return value on the stack and rely on the runtime call
+ // to return the value in the same register. We're leaving the code
+ // managed by the register allocator and tearing down the frame, it's
+ // safe to write to the context register.
__ push(eax);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ mov(esp, ebp);
__ pop(ebp);
- __ ret((ParameterCount() + 1) * kPointerSize);
+ __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
}
@@ -1945,6 +1986,7 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(eax));
ASSERT(ToRegister(instr->result()).is(eax));
@@ -1997,22 +2039,33 @@ void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
void LCodeGen::DoLoadElements(LLoadElements* instr) {
- ASSERT(instr->result()->Equals(instr->InputAt(0)));
- Register reg = ToRegister(instr->InputAt(0));
- __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset));
+ Register result = ToRegister(instr->result());
+ Register input = ToRegister(instr->InputAt(0));
+ __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
if (FLAG_debug_code) {
NearLabel done;
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Immediate(Factory::fixed_array_map()));
__ j(equal, &done);
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Immediate(Factory::pixel_array_map()));
+ __ j(equal, &done);
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Immediate(Factory::fixed_cow_array_map()));
- __ Check(equal, "Check for fast elements failed.");
+ __ Check(equal, "Check for fast elements or pixel array failed.");
__ bind(&done);
}
}
+void LCodeGen::DoLoadPixelArrayExternalPointer(
+ LLoadPixelArrayExternalPointer* instr) {
+ Register result = ToRegister(instr->result());
+ Register input = ToRegister(instr->InputAt(0));
+ __ mov(result, FieldOperand(input, PixelArray::kExternalPointerOffset));
+}
+
+
void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register arguments = ToRegister(instr->arguments());
Register length = ToRegister(instr->length());
@@ -2035,7 +2088,10 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
ASSERT(result.is(elements));
// Load the result.
- __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
+ __ mov(result, FieldOperand(elements,
+ key,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
// Check for the hole value.
__ cmp(result, Factory::the_hole_value());
@@ -2043,7 +2099,19 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
}
+void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
+ Register external_elements = ToRegister(instr->external_pointer());
+ Register key = ToRegister(instr->key());
+ Register result = ToRegister(instr->result());
+ ASSERT(result.is(external_elements));
+
+ // Load the result.
+ __ movzx_b(result, Operand(external_elements, key, times_1, 0));
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->key()).is(eax));
@@ -2101,24 +2169,36 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Register receiver = ToRegister(instr->receiver());
- ASSERT(ToRegister(instr->function()).is(edi));
+ Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
+ Register scratch = ToRegister(instr->TempAt(0));
+ ASSERT(receiver.is(eax)); // Used for parameter count.
+ ASSERT(function.is(edi)); // Required by InvokeFunction.
ASSERT(ToRegister(instr->result()).is(eax));
- // If the receiver is null or undefined, we have to pass the
- // global object as a receiver.
- NearLabel global_receiver, receiver_ok;
+ // If the receiver is null or undefined, we have to pass the global object
+ // as a receiver.
+ NearLabel global_object, receiver_ok;
__ cmp(receiver, Factory::null_value());
- __ j(equal, &global_receiver);
+ __ j(equal, &global_object);
__ cmp(receiver, Factory::undefined_value());
- __ j(not_equal, &receiver_ok);
- __ bind(&global_receiver);
- __ mov(receiver, GlobalObjectOperand());
- __ bind(&receiver_ok);
+ __ j(equal, &global_object);
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
-
- Label invoke;
+ // The receiver should be a JS object.
+ __ test(receiver, Immediate(kSmiTagMask));
+ DeoptimizeIf(equal, instr->environment());
+ __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch);
+ DeoptimizeIf(below, instr->environment());
+ __ jmp(&receiver_ok);
+
+ __ bind(&global_object);
+ // TODO(kmillikin): We have a hydrogen value for the global object. See
+ // if it's better to use it than to explicitly fetch it from the context
+ // here.
+ __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
+ __ bind(&receiver_ok);
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2131,7 +2211,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// Loop through the arguments pushing them onto the execution
// stack.
- Label loop;
+ NearLabel invoke, loop;
// length is a small non-negative integer, due to the test above.
__ test(length, Operand(length));
__ j(zero, &invoke);
@@ -2149,13 +2229,10 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
RegisterEnvironmentForDeoptimization(env);
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index());
- ASSERT(receiver.is(eax));
+ env->deoptimization_index(),
+ true);
v8::internal::ParameterCount actual(eax);
- __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
-
- // Restore context.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
}
@@ -2171,7 +2248,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
void LCodeGen::DoContext(LContext* instr) {
Register result = ToRegister(instr->result());
- __ mov(result, esi);
+ __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2207,6 +2284,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
(scope()->num_heap_slots() > 0);
if (change_context) {
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+ } else {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
// Set eax to arguments count if adaption is not needed. Assumes that eax
@@ -2222,14 +2301,15 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
if (*function == *graph()->info()->closure()) {
__ CallSelf();
} else {
+ // This is an indirect call and will not be recorded in the reloc info.
+ // Add a comment to the reloc info in case we need to patch this during
+ // deoptimization.
+ __ RecordComment(RelocInfo::kFillerCommentString, true);
__ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
}
// Setup deoptimization.
RegisterLazyDeoptimization(instr);
-
- // Restore context.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2272,6 +2352,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
// Slow case: Call the runtime system to do the number allocation.
__ bind(&slow);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -2483,7 +2564,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::LOG,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2491,7 +2572,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::COS,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2499,7 +2580,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::SIN,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2537,46 +2618,46 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
+ ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->result()).is(eax));
- ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
CallCode(ic, RelocInfo::CODE_TARGET, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallNamed(LCallNamed* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
__ mov(ecx, instr->name());
CallCode(ic, RelocInfo::CODE_TARGET, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallFunction(LCallFunction* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ Drop(1);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
__ mov(ecx, instr->name());
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2588,7 +2669,8 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
void LCodeGen::DoCallNew(LCallNew* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(edi));
+ ASSERT(ToRegister(instr->context()).is(esi));
+ ASSERT(ToRegister(instr->constructor()).is(edi));
ASSERT(ToRegister(instr->result()).is(eax));
Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
@@ -2598,7 +2680,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
- CallRuntime(instr->function(), instr->arity(), instr);
+ CallRuntime(instr->function(), instr->arity(), instr, false);
}
@@ -2633,6 +2715,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->value()).is(eax));
@@ -2661,19 +2744,27 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
__ mov(FieldOperand(elements, offset), value);
} else {
- __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize),
+ __ mov(FieldOperand(elements,
+ key,
+ times_pointer_size,
+ FixedArray::kHeaderSize),
value);
}
if (instr->hydrogen()->NeedsWriteBarrier()) {
// Compute address of modified element and store it into key register.
- __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
+ __ lea(key,
+ FieldOperand(elements,
+ key,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
__ RecordWrite(elements, key, value);
}
}
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->value()).is(eax));
@@ -2809,6 +2900,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
__ SmiTag(index);
__ push(index);
}
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
RecordSafepointWithRegisters(
instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
@@ -2886,6 +2978,7 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// integer value.
__ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -2933,6 +3026,7 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
__ Set(reg, Immediate(0));
__ PushSafepointRegisters();
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -3348,21 +3442,22 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else if (instr->hydrogen()->depth() > 1) {
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false);
} else {
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
}
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
// Setup the parameters to the stub/runtime call.
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
@@ -3400,7 +3495,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ push(Immediate(instr->hydrogen()->pattern()));
__ push(Immediate(instr->hydrogen()->flags()));
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
+ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false);
__ mov(ebx, eax);
__ bind(&materialized);
@@ -3412,7 +3507,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ bind(&runtime_allocate);
__ push(ebx);
__ push(Immediate(Smi::FromInt(size)));
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false);
__ pop(ebx);
__ bind(&allocated);
@@ -3439,14 +3534,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
if (shared_info->num_literals() == 0 && !pretenure) {
FastNewClosureStub stub;
__ push(Immediate(shared_info));
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else {
- __ push(esi);
+ __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
__ push(Immediate(shared_info));
__ push(Immediate(pretenure
? Factory::true_value()
: Factory::false_value()));
- CallRuntime(Runtime::kNewClosure, 3, instr);
+ CallRuntime(Runtime::kNewClosure, 3, instr, false);
}
}
@@ -3458,7 +3553,7 @@ void LCodeGen::DoTypeof(LTypeof* instr) {
} else {
__ push(ToOperand(input));
}
- CallRuntime(Runtime::kTypeof, 1, instr);
+ CallRuntime(Runtime::kTypeof, 1, instr, false);
}
@@ -3577,6 +3672,53 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
}
+void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
+ Register result = ToRegister(instr->result());
+ NearLabel true_label;
+ NearLabel false_label;
+ NearLabel done;
+
+ EmitIsConstructCall(result);
+ __ j(equal, &true_label);
+
+ __ mov(result, Factory::false_value());
+ __ jmp(&done);
+
+ __ bind(&true_label);
+ __ mov(result, Factory::true_value());
+
+ __ bind(&done);
+}
+
+
+void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
+ Register temp = ToRegister(instr->TempAt(0));
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ EmitIsConstructCall(temp);
+ EmitBranch(true_block, false_block, equal);
+}
+
+
+void LCodeGen::EmitIsConstructCall(Register temp) {
+ // Get the frame pointer for the calling frame.
+ __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+
+ // Skip the arguments adaptor frame if it exists.
+ NearLabel check_frame_marker;
+ __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ j(not_equal, &check_frame_marker);
+ __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
+
+ // Check the marker in the calling frame.
+ __ bind(&check_frame_marker);
+ __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
+ Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
// No code for lazy bailout instruction. Used to capture environment after a
// call for populating the safepoint data with deoptimization data.
@@ -3602,9 +3744,15 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
RegisterEnvironmentForDeoptimization(env);
+ // Create safepoint generator that will also ensure enough space in the
+ // reloc info for patching in deoptimization (since this is invoking a
+ // builtin)
SafepointGenerator safepoint_generator(this,
pointers,
- env->deoptimization_index());
+ env->deoptimization_index(),
+ true);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
}
@@ -3617,7 +3765,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ j(above_equal, &done);
StackCheckStub stub;
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
__ bind(&done);
}
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h
index f0379c02e3..977fbcda7c 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.h
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.h
@@ -120,6 +120,10 @@ class LCodeGen BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
+ int strict_mode_flag() const {
+ return info_->is_strict() ? kStrictMode : kNonStrictMode;
+ }
+
LChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
@@ -149,17 +153,14 @@ class LCodeGen BASE_EMBEDDED {
bool GenerateDeferredCode();
bool GenerateSafepointTable();
- void CallCode(Handle<Code> code,
- RelocInfo::Mode mode,
- LInstruction* instr);
- void CallRuntime(Runtime::Function* function,
- int num_arguments,
- LInstruction* instr);
- void CallRuntime(Runtime::FunctionId id,
- int num_arguments,
- LInstruction* instr) {
+ void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr,
+ bool adjusted = true);
+ void CallRuntime(Runtime::Function* fun, int argc, LInstruction* instr,
+ bool adjusted = true);
+ void CallRuntime(Runtime::FunctionId id, int argc, LInstruction* instr,
+ bool adjusted = true) {
Runtime::Function* function = Runtime::FunctionForId(id);
- CallRuntime(function, num_arguments, instr);
+ CallRuntime(function, argc, instr, adjusted);
}
// Generate a direct call to a known function. Expects the function
@@ -229,6 +230,11 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsConstructCall().
+ // Caller should branch on equal condition.
+ void EmitIsConstructCall(Register temp);
+
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc
index b31c4eba18..a57e8c928c 100644
--- a/deps/v8/src/ia32/lithium-ia32.cc
+++ b/deps/v8/src/ia32/lithium-ia32.cc
@@ -74,15 +74,13 @@ void LInstruction::VerifyCall() {
// Call instructions can use only fixed registers as
// temporaries and outputs because all registers
// are blocked by the calling convention.
- // Inputs can use either fixed register or have a short lifetime (be
- // used at start of the instruction).
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
LOperand* operand = it.Next();
ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
- LUnallocated::cast(operand)->IsUsedAtStart() ||
!LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
@@ -1090,14 +1088,17 @@ LInstruction* LChunkBuilder::DoTest(HTest* instr) {
UseRegisterAtStart(compare->right()));
} else if (v->IsInstanceOf()) {
HInstanceOf* instance_of = HInstanceOf::cast(v);
+ LOperand* left = UseFixed(instance_of->left(), InstanceofStub::left());
+ LOperand* right = UseFixed(instance_of->right(), InstanceofStub::right());
+ LOperand* context = UseFixed(instance_of->context(), esi);
LInstanceOfAndBranch* result =
- new LInstanceOfAndBranch(
- UseFixed(instance_of->left(), InstanceofStub::left()),
- UseFixed(instance_of->right(), InstanceofStub::right()));
+ new LInstanceOfAndBranch(context, left, right);
return MarkAsCall(result, instr);
} else if (v->IsTypeofIs()) {
HTypeofIs* typeof_is = HTypeofIs::cast(v);
return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()));
+ } else if (v->IsIsConstructCall()) {
+ return new LIsConstructCallAndBranch(TempRegister());
} else {
if (v->IsConstant()) {
if (HConstant::cast(v)->handle()->IsTrue()) {
@@ -1132,9 +1133,10 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
- LInstanceOf* result =
- new LInstanceOf(UseFixed(instr->left(), InstanceofStub::left()),
- UseFixed(instr->right(), InstanceofStub::right()));
+ LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
+ LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
+ LOperand* context = UseFixed(instr->context(), esi);
+ LInstanceOf* result = new LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1153,12 +1155,14 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), edi);
LOperand* receiver = UseFixed(instr->receiver(), eax);
- LOperand* length = UseRegisterAtStart(instr->length());
- LOperand* elements = UseRegisterAtStart(instr->elements());
+ LOperand* length = UseFixed(instr->length(), ebx);
+ LOperand* elements = UseFixed(instr->elements(), ecx);
+ LOperand* temp = FixedTemp(edx);
LApplyArguments* result = new LApplyArguments(function,
receiver,
length,
- elements);
+ elements,
+ temp);
return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1230,21 +1234,27 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
- argument_count_ -= instr->argument_count();
+ LOperand* context = UseFixed(instr->context(), esi);
LOperand* key = UseFixed(instr->key(), ecx);
- return MarkAsCall(DefineFixed(new LCallKeyed(key), eax), instr);
+ argument_count_ -= instr->argument_count();
+ LCallKeyed* result = new LCallKeyed(context, key);
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, eax), instr);
+ LCallNamed* result = new LCallNamed(context);
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal, eax), instr);
+ LCallGlobal* result = new LCallGlobal(context);
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1255,16 +1265,19 @@ LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
LOperand* constructor = UseFixed(instr->constructor(), edi);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new LCallNew(context, constructor);
return MarkAsCall(DefineFixed(result, eax), instr);
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallFunction, eax), instr);
+ LCallFunction* result = new LCallFunction(context);
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1320,9 +1333,9 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
// The temporary operand is necessary to ensure that right is not allocated
// into edx.
LOperand* temp = FixedTemp(edx);
- LOperand* value = UseFixed(instr->left(), eax);
+ LOperand* dividend = UseFixed(instr->left(), eax);
LOperand* divisor = UseRegister(instr->right());
- LDivI* result = new LDivI(value, divisor, temp);
+ LDivI* result = new LDivI(dividend, divisor, temp);
return AssignEnvironment(DefineFixed(result, eax));
} else {
ASSERT(instr->representation().IsTagged());
@@ -1508,6 +1521,13 @@ LInstruction* LChunkBuilder::DoHasInstanceType(HHasInstanceType* instr) {
}
+LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
+ HGetCachedArrayIndex* instr) {
+ Abort("Unimplemented: %s", "DoGetCachedArrayIndex");
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
HHasCachedArrayIndex* instr) {
ASSERT(instr->value()->representation().IsTagged());
@@ -1537,6 +1557,12 @@ LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
}
+LInstruction* LChunkBuilder::DoPixelArrayLength(HPixelArrayLength* instr) {
+ LOperand* array = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LPixelArrayLength(array));
+}
+
+
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
LValueOf* result = new LValueOf(object, TempRegister());
@@ -1672,13 +1698,15 @@ LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- int32_t value = instr->Integer32Value();
- return DefineAsRegister(new LConstantI(value));
+ return DefineAsRegister(new LConstantI);
} else if (r.IsDouble()) {
double value = instr->DoubleValue();
- return DefineAsRegister(new LConstantD(value));
+ LOperand* temp = (BitCast<uint64_t, double>(value) != 0)
+ ? TempRegister()
+ : NULL;
+ return DefineAsRegister(new LConstantD(temp));
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT(instr->handle()));
+ return DefineAsRegister(new LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1731,8 +1759,9 @@ LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
LOperand* object = UseFixed(instr->object(), eax);
- LLoadNamedGeneric* result = new LLoadNamedGeneric(object);
+ LLoadNamedGeneric* result = new LLoadNamedGeneric(context, object);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1747,7 +1776,14 @@ LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineSameAsFirst(new LLoadElements(input));
+ return DefineAsRegister(new LLoadElements(input));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadPixelArrayExternalPointer(
+ HLoadPixelArrayExternalPointer* instr) {
+ LOperand* input = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LLoadPixelArrayExternalPointer(input));
}
@@ -1762,11 +1798,25 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
}
+LInstruction* LChunkBuilder::DoLoadPixelArrayElement(
+ HLoadPixelArrayElement* instr) {
+ ASSERT(instr->representation().IsInteger32());
+ ASSERT(instr->key()->representation().IsInteger32());
+ LOperand* external_pointer =
+ UseRegisterAtStart(instr->external_pointer());
+ LOperand* key = UseRegisterAtStart(instr->key());
+ LLoadPixelArrayElement* result =
+ new LLoadPixelArrayElement(external_pointer, key);
+ return DefineSameAsFirst(result);
+}
+
+
LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
LOperand* object = UseFixed(instr->object(), edx);
LOperand* key = UseFixed(instr->key(), eax);
- LLoadKeyedGeneric* result = new LLoadKeyedGeneric(object, key);
+ LLoadKeyedGeneric* result = new LLoadKeyedGeneric(context, object, key);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1791,15 +1841,18 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
- LOperand* obj = UseFixed(instr->object(), edx);
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* object = UseFixed(instr->object(), edx);
LOperand* key = UseFixed(instr->key(), ecx);
- LOperand* val = UseFixed(instr->value(), eax);
+ LOperand* value = UseFixed(instr->value(), eax);
ASSERT(instr->object()->representation().IsTagged());
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+ LStoreKeyedGeneric* result =
+ new LStoreKeyedGeneric(context, object, key, value);
+ return MarkAsCall(result, instr);
}
@@ -1825,10 +1878,11 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
- LOperand* obj = UseFixed(instr->object(), edx);
- LOperand* val = UseFixed(instr->value(), eax);
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* object = UseFixed(instr->object(), edx);
+ LOperand* value = UseFixed(instr->value(), eax);
- LStoreNamedGeneric* result = new LStoreNamedGeneric(obj, val);
+ LStoreNamedGeneric* result = new LStoreNamedGeneric(context, object, value);
return MarkAsCall(result, instr);
}
@@ -1853,7 +1907,8 @@ LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, eax), instr);
+ LOperand* context = UseFixed(instr->context(), esi);
+ return MarkAsCall(DefineFixed(new LObjectLiteral(context), eax), instr);
}
@@ -1894,8 +1949,10 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, eax), instr);
+ LCallStub* result = new LCallStub(context);
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1925,6 +1982,12 @@ LInstruction* LChunkBuilder::DoTypeofIs(HTypeofIs* instr) {
return DefineSameAsFirst(new LTypeofIs(UseRegister(instr->value())));
}
+
+LInstruction* LChunkBuilder::DoIsConstructCall(HIsConstructCall* instr) {
+ return DefineAsRegister(new LIsConstructCall);
+}
+
+
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
HEnvironment* env = current_block_->last_environment();
ASSERT(env != NULL);
diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h
index f643dac9f6..f1b9ffc997 100644
--- a/deps/v8/src/ia32/lithium-ia32.h
+++ b/deps/v8/src/ia32/lithium-ia32.h
@@ -41,7 +41,6 @@ class LCodeGen;
#define LITHIUM_ALL_INSTRUCTION_LIST(V) \
V(ControlInstruction) \
- V(Constant) \
V(Call) \
V(StoreKeyed) \
V(StoreNamed) \
@@ -112,6 +111,8 @@ class LCodeGen;
V(IsObjectAndBranch) \
V(IsSmi) \
V(IsSmiAndBranch) \
+ V(IsConstructCall) \
+ V(IsConstructCallAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -123,6 +124,8 @@ class LCodeGen;
V(LoadKeyedGeneric) \
V(LoadNamedField) \
V(LoadNamedGeneric) \
+ V(LoadPixelArrayElement) \
+ V(LoadPixelArrayExternalPointer) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -132,6 +135,7 @@ class LCodeGen;
V(OsrEntry) \
V(OuterContext) \
V(Parameter) \
+ V(PixelArrayLength) \
V(Power) \
V(PushArgument) \
V(RegExpLiteral) \
@@ -424,11 +428,17 @@ class LParameter: public LTemplateInstruction<1, 0, 0> {
};
-class LCallStub: public LTemplateInstruction<1, 0, 0> {
+class LCallStub: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallStub(LOperand* context) {
+ inputs_[0] = context;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallStub, "call-stub")
DECLARE_HYDROGEN_ACCESSOR(CallStub)
+ LOperand* context() { return inputs_[0]; }
+
TranscendentalCache::Type transcendental_type() {
return hydrogen()->transcendental_type();
}
@@ -460,16 +470,18 @@ class LControlInstruction: public LTemplateInstruction<0, I, T> {
};
-class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
+class LApplyArguments: public LTemplateInstruction<1, 4, 1> {
public:
LApplyArguments(LOperand* function,
LOperand* receiver,
LOperand* length,
- LOperand* elements) {
+ LOperand* elements,
+ LOperand* temp) {
inputs_[0] = function;
inputs_[1] = receiver;
inputs_[2] = length;
inputs_[3] = elements;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
@@ -755,6 +767,24 @@ class LHasCachedArrayIndexAndBranch: public LControlInstruction<1, 0> {
};
+class LIsConstructCall: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCall, "is-construct-call")
+ DECLARE_HYDROGEN_ACCESSOR(IsConstructCall)
+};
+
+
+class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
+ public:
+ explicit LIsConstructCallAndBranch(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
+ "is-construct-call-and-branch")
+};
+
+
class LClassOfTest: public LTemplateInstruction<1, 1, 1> {
public:
LClassOfTest(LOperand* value, LOperand* temp) {
@@ -813,25 +843,31 @@ class LCmpTAndBranch: public LControlInstruction<2, 0> {
};
-class LInstanceOf: public LTemplateInstruction<1, 2, 0> {
+class LInstanceOf: public LTemplateInstruction<1, 3, 0> {
public:
- LInstanceOf(LOperand* left, LOperand* right) {
- inputs_[0] = left;
- inputs_[1] = right;
+ LInstanceOf(LOperand* context, LOperand* left, LOperand* right) {
+ inputs_[0] = context;
+ inputs_[1] = left;
+ inputs_[2] = right;
}
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
+
+ LOperand* context() { return inputs_[0]; }
};
-class LInstanceOfAndBranch: public LControlInstruction<2, 0> {
+class LInstanceOfAndBranch: public LControlInstruction<3, 0> {
public:
- LInstanceOfAndBranch(LOperand* left, LOperand* right) {
- inputs_[0] = left;
- inputs_[1] = right;
+ LInstanceOfAndBranch(LOperand* context, LOperand* left, LOperand* right) {
+ inputs_[0] = context;
+ inputs_[1] = left;
+ inputs_[2] = right;
}
DECLARE_CONCRETE_INSTRUCTION(InstanceOfAndBranch, "instance-of-and-branch")
+
+ LOperand* context() { return inputs_[0]; }
};
@@ -913,44 +949,34 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
};
-class LConstant: public LTemplateInstruction<1, 0, 0> {
- DECLARE_INSTRUCTION(Constant)
-};
-
-
-class LConstantI: public LConstant {
+class LConstantI: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantI(int32_t value) : value_(value) { }
- int32_t value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantI, "constant-i")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- int32_t value_;
+ int32_t value() const { return hydrogen()->Integer32Value(); }
};
-class LConstantD: public LConstant {
+class LConstantD: public LTemplateInstruction<1, 0, 1> {
public:
- explicit LConstantD(double value) : value_(value) { }
- double value() const { return value_; }
+ explicit LConstantD(LOperand* temp) {
+ temps_[0] = temp;
+ }
DECLARE_CONCRETE_INSTRUCTION(ConstantD, "constant-d")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- double value_;
+ double value() const { return hydrogen()->DoubleValue(); }
};
-class LConstantT: public LConstant {
+class LConstantT: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantT(Handle<Object> value) : value_(value) { }
- Handle<Object> value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantT, "constant-t")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- Handle<Object> value_;
+ Handle<Object> value() const { return hydrogen()->handle(); }
};
@@ -999,6 +1025,17 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
};
+class LPixelArrayLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LPixelArrayLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(PixelArrayLength, "pixel-array-length")
+ DECLARE_HYDROGEN_ACCESSOR(PixelArrayLength)
+};
+
+
class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
public:
explicit LFixedArrayLength(LOperand* value) {
@@ -1123,16 +1160,18 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
};
-class LLoadNamedGeneric: public LTemplateInstruction<1, 1, 0> {
+class LLoadNamedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
- explicit LLoadNamedGeneric(LOperand* object) {
- inputs_[0] = object;
+ LLoadNamedGeneric(LOperand* context, LOperand* object) {
+ inputs_[0] = context;
+ inputs_[1] = object;
}
DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric, "load-named-generic")
DECLARE_HYDROGEN_ACCESSOR(LoadNamedGeneric)
- LOperand* object() { return inputs_[0]; }
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
};
@@ -1161,6 +1200,17 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
};
+class LLoadPixelArrayExternalPointer: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LLoadPixelArrayExternalPointer(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayExternalPointer,
+ "load-pixel-array-external-pointer")
+};
+
+
class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
@@ -1176,20 +1226,38 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
};
-class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
+class LLoadPixelArrayElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
- inputs_[0] = obj;
+ LLoadPixelArrayElement(LOperand* external_pointer, LOperand* key) {
+ inputs_[0] = external_pointer;
inputs_[1] = key;
}
- DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayElement,
+ "load-pixel-array-element")
+ DECLARE_HYDROGEN_ACCESSOR(LoadPixelArrayElement)
- LOperand* object() { return inputs_[0]; }
+ LOperand* external_pointer() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
};
+class LLoadKeyedGeneric: public LTemplateInstruction<1, 3, 0> {
+ public:
+ LLoadKeyedGeneric(LOperand* context, LOperand* obj, LOperand* key) {
+ inputs_[0] = context;
+ inputs_[1] = obj;
+ inputs_[2] = key;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric, "load-keyed-generic")
+
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+ LOperand* key() { return inputs_[2]; }
+};
+
+
class LLoadGlobal: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(LoadGlobal, "load-global")
@@ -1308,49 +1376,68 @@ class LCallConstantFunction: public LTemplateInstruction<1, 0, 0> {
};
-class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
+class LCallKeyed: public LTemplateInstruction<1, 2, 0> {
public:
- explicit LCallKeyed(LOperand* key) {
- inputs_[0] = key;
+ LCallKeyed(LOperand* context, LOperand* key) {
+ inputs_[0] = context;
+ inputs_[1] = key;
}
DECLARE_CONCRETE_INSTRUCTION(CallKeyed, "call-keyed")
DECLARE_HYDROGEN_ACCESSOR(CallKeyed)
+ LOperand* context() { return inputs_[0]; }
+ LOperand* key() { return inputs_[1]; }
+
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
};
-class LCallNamed: public LTemplateInstruction<1, 0, 0> {
+class LCallNamed: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallNamed(LOperand* context) {
+ inputs_[0] = context;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallNamed, "call-named")
DECLARE_HYDROGEN_ACCESSOR(CallNamed)
virtual void PrintDataTo(StringStream* stream);
+ LOperand* context() { return inputs_[0]; }
Handle<String> name() const { return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
};
-class LCallFunction: public LTemplateInstruction<1, 0, 0> {
+class LCallFunction: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallFunction(LOperand* context) {
+ inputs_[0] = context;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
+ LOperand* context() { return inputs_[0]; }
int arity() const { return hydrogen()->argument_count() - 2; }
};
-class LCallGlobal: public LTemplateInstruction<1, 0, 0> {
+class LCallGlobal: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallGlobal(LOperand* context) {
+ inputs_[0] = context;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallGlobal, "call-global")
DECLARE_HYDROGEN_ACCESSOR(CallGlobal)
virtual void PrintDataTo(StringStream* stream);
+ LOperand* context() { return inputs_[0]; }
Handle<String> name() const {return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1368,10 +1455,11 @@ class LCallKnownGlobal: public LTemplateInstruction<1, 0, 0> {
};
-class LCallNew: public LTemplateInstruction<1, 1, 0> {
+class LCallNew: public LTemplateInstruction<1, 2, 0> {
public:
- explicit LCallNew(LOperand* constructor) {
- inputs_[0] = constructor;
+ LCallNew(LOperand* context, LOperand* constructor) {
+ inputs_[0] = context;
+ inputs_[1] = constructor;
}
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
@@ -1379,6 +1467,8 @@ class LCallNew: public LTemplateInstruction<1, 1, 0> {
virtual void PrintDataTo(StringStream* stream);
+ LOperand* context() { return inputs_[0]; }
+ LOperand* constructor() { return inputs_[1]; }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1525,13 +1615,21 @@ class LStoreNamedField: public LStoreNamed {
};
-class LStoreNamedGeneric: public LStoreNamed {
+class LStoreNamedGeneric: public LTemplateInstruction<0, 3, 0> {
public:
- LStoreNamedGeneric(LOperand* obj, LOperand* val)
- : LStoreNamed(obj, val) { }
+ LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value) {
+ inputs_[0] = context;
+ inputs_[1] = object;
+ inputs_[2] = value;
+ }
DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
+
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+ LOperand* value() { return inputs_[2]; }
+ Handle<Object> name() const { return hydrogen()->name(); }
};
@@ -1564,12 +1662,24 @@ class LStoreKeyedFastElement: public LStoreKeyed {
};
-class LStoreKeyedGeneric: public LStoreKeyed {
+class LStoreKeyedGeneric: public LTemplateInstruction<0, 4, 0> {
public:
- LStoreKeyedGeneric(LOperand* obj, LOperand* key, LOperand* val)
- : LStoreKeyed(obj, key, val) { }
+ LStoreKeyedGeneric(LOperand* context,
+ LOperand* object,
+ LOperand* key,
+ LOperand* value) {
+ inputs_[0] = context;
+ inputs_[1] = object;
+ inputs_[2] = key;
+ inputs_[3] = value;
+ }
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
+
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+ LOperand* key() { return inputs_[2]; }
+ LOperand* value() { return inputs_[3]; }
};
@@ -1675,10 +1785,16 @@ class LArrayLiteral: public LTemplateInstruction<1, 0, 0> {
};
-class LObjectLiteral: public LTemplateInstruction<1, 0, 0> {
+class LObjectLiteral: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LObjectLiteral(LOperand* context) {
+ inputs_[0] = context;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(ObjectLiteral, "object-literal")
DECLARE_HYDROGEN_ACCESSOR(ObjectLiteral)
+
+ LOperand* context() { return inputs_[0]; }
};
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index fe4c83d8e8..03f726ca9b 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -78,11 +78,6 @@ void MacroAssembler::RecordWrite(Register object,
int offset,
Register value,
Register scratch) {
- // The compiled code assumes that record write doesn't change the
- // context register, so we check that none of the clobbered
- // registers are esi.
- ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
-
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
NearLabel done;
@@ -129,11 +124,6 @@ void MacroAssembler::RecordWrite(Register object,
void MacroAssembler::RecordWrite(Register object,
Register address,
Register value) {
- // The compiled code assumes that record write doesn't change the
- // context register, so we check that none of the clobbered
- // registers are esi.
- ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
-
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
Label done;
@@ -458,6 +448,97 @@ void MacroAssembler::PopTryHandler() {
}
+void MacroAssembler::Throw(Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // eax must hold the exception.
+ if (!value.is(eax)) {
+ mov(eax, value);
+ }
+
+ // Drop the sp to the top of the handler.
+ ExternalReference handler_address(Top::k_handler_address);
+ mov(esp, Operand::StaticVariable(handler_address));
+
+ // Restore next handler and frame pointer, discard handler state.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(Operand::StaticVariable(handler_address));
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
+ pop(ebp);
+ pop(edx); // Remove state.
+
+ // Before returning we restore the context from the frame pointer if
+ // not NULL. The frame pointer is NULL in the exception handler of
+ // a JS entry frame.
+ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
+ NearLabel skip;
+ cmp(ebp, 0);
+ j(equal, &skip, not_taken);
+ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ bind(&skip);
+
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ ret(0);
+}
+
+
+void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
+ Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+
+ // eax must hold the exception.
+ if (!value.is(eax)) {
+ mov(eax, value);
+ }
+
+ // Drop sp to the top stack handler.
+ ExternalReference handler_address(Top::k_handler_address);
+ mov(esp, Operand::StaticVariable(handler_address));
+
+ // Unwind the handlers until the ENTRY handler is found.
+ NearLabel loop, done;
+ bind(&loop);
+ // Load the type of the current stack handler.
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
+ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
+ j(equal, &done);
+ // Fetch the next handler in the list.
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
+ mov(esp, Operand(esp, kNextOffset));
+ jmp(&loop);
+ bind(&done);
+
+ // Set the top handler address to next handler past the current ENTRY handler.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ pop(Operand::StaticVariable(handler_address));
+
+ if (type == OUT_OF_MEMORY) {
+ // Set external caught exception to false.
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
+ mov(eax, false);
+ mov(Operand::StaticVariable(external_caught), eax);
+
+ // Set pending exception and eax to out of memory exception.
+ ExternalReference pending_exception(Top::k_pending_exception_address);
+ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
+ mov(Operand::StaticVariable(pending_exception), eax);
+ }
+
+ // Clear the context pointer.
+ Set(esi, Immediate(0));
+
+ // Restore fp from handler and discard handler state.
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
+ pop(ebp);
+ pop(edx); // State.
+
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
+ ret(0);
+}
+
+
void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
Register scratch,
Label* miss) {
@@ -604,11 +685,11 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- if (top_reg.is(result)) {
- add(Operand(top_reg), Immediate(object_size));
- } else {
- lea(top_reg, Operand(result, object_size));
+ if (!top_reg.is(result)) {
+ mov(top_reg, result);
}
+ add(Operand(top_reg), Immediate(object_size));
+ j(carry, gc_required, not_taken);
cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
@@ -657,7 +738,12 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- lea(result_end, Operand(result, element_count, element_size, header_size));
+
+ // We assume that element_count*element_size + header_size does not
+ // overflow.
+ lea(result_end, Operand(element_count, element_size, header_size));
+ add(result_end, Operand(result));
+ j(carry, gc_required);
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required);
@@ -702,6 +788,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
mov(result_end, object_size);
}
add(result_end, Operand(result));
+ j(carry, gc_required, not_taken);
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
@@ -1196,7 +1283,7 @@ MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
// If false, it is returned as a pointer to a preallocated by caller memory
// region. Pointer to this region should be passed to a function as an
// implicit first argument.
-#if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
+#if defined(USING_BSD_ABI) || defined(__MINGW32__)
static const bool kReturnHandlesDirectly = true;
#else
static const bool kReturnHandlesDirectly = false;
@@ -1581,6 +1668,20 @@ void MacroAssembler::Ret() {
}
+void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
+ if (is_uint16(bytes_dropped)) {
+ ret(bytes_dropped);
+ } else {
+ pop(scratch);
+ add(Operand(esp), Immediate(bytes_dropped));
+ push(scratch);
+ ret(0);
+ }
+}
+
+
+
+
void MacroAssembler::Drop(int stack_elements) {
if (stack_elements > 0) {
add(Operand(esp), Immediate(stack_elements * kPointerSize));
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index 12a8923113..16361ad239 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -304,6 +304,11 @@ class MacroAssembler: public Assembler {
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
+ // Activate the top handler in the try hander chain.
+ void Throw(Register value);
+
+ void ThrowUncatchable(UncatchableExceptionType type, Register value);
+
// ---------------------------------------------------------------------------
// Inline caching support
@@ -550,6 +555,10 @@ class MacroAssembler: public Assembler {
void Ret();
+ // Return and drop arguments from stack, where the number of arguments
+ // may be bigger than 2^16 - 1. Requires a scratch register.
+ void Ret(int bytes_dropped, Register scratch);
+
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the esp register.
void Drop(int element_count);
diff --git a/deps/v8/src/ia32/simulator-ia32.h b/deps/v8/src/ia32/simulator-ia32.h
index 88d0b6187f..43b7ea3b0c 100644
--- a/deps/v8/src/ia32/simulator-ia32.h
+++ b/deps/v8/src/ia32/simulator-ia32.h
@@ -38,10 +38,15 @@ namespace internal {
#define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
(entry(p0, p1, p2, p3, p4))
-// Call the generated regexp code directly. The entry function pointer should
+
+typedef int (*regexp_matcher)(String*, int, const byte*,
+ const byte*, int*, Address, int);
+
+// Call the generated regexp code directly. The code at the entry address should
// expect seven int/pointer sized arguments and return an int.
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
- (entry(p0, p1, p2, p3, p4, p5, p6))
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
+
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
(reinterpret_cast<TryCatch*>(try_catch_address))
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index f96ef5cefc..fdb22acea0 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -2709,6 +2709,42 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
}
+MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
+ JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : key
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(edx, Handle<Map>(receiver->map()), &miss, false);
+
+ // Do the load.
+ GenerateFastPixelArrayStore(masm(),
+ edx,
+ ecx,
+ eax,
+ edi,
+ ebx,
+ true,
+ &miss,
+ &miss,
+ NULL,
+ &miss);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
JSObject* object,
JSObject* last) {
diff --git a/deps/v8/src/ia32/virtual-frame-ia32.cc b/deps/v8/src/ia32/virtual-frame-ia32.cc
index 11e1aaf2b3..1cc91a9fea 100644
--- a/deps/v8/src/ia32/virtual-frame-ia32.cc
+++ b/deps/v8/src/ia32/virtual-frame-ia32.cc
@@ -1033,23 +1033,31 @@ Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
}
-Result VirtualFrame::CallStoreIC(Handle<String> name, bool is_contextual) {
+Result VirtualFrame::CallStoreIC(Handle<String> name,
+ bool is_contextual,
+ StrictModeFlag strict_mode) {
// Value and (if not contextual) receiver are on top of the frame.
// The IC expects name in ecx, value in eax, and receiver in edx.
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
+
Result value = Pop();
+ RelocInfo::Mode mode;
if (is_contextual) {
PrepareForCall(0, 0);
value.ToRegister(eax);
__ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
value.Unuse();
+ mode = RelocInfo::CODE_TARGET_CONTEXT;
} else {
Result receiver = Pop();
PrepareForCall(0, 0);
MoveResultsToRegisters(&value, &receiver, eax, edx);
+ mode = RelocInfo::CODE_TARGET;
}
__ mov(ecx, name);
- return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
+ return RawCallCodeObject(ic, mode);
}
diff --git a/deps/v8/src/ia32/virtual-frame-ia32.h b/deps/v8/src/ia32/virtual-frame-ia32.h
index b9faa46145..729469fdcc 100644
--- a/deps/v8/src/ia32/virtual-frame-ia32.h
+++ b/deps/v8/src/ia32/virtual-frame-ia32.h
@@ -365,7 +365,8 @@ class VirtualFrame: public ZoneObject {
// Call store IC. If the load is contextual, value is found on top of the
// frame. If not, value and receiver are on the frame. Both are dropped.
- Result CallStoreIC(Handle<String> name, bool is_contextual);
+ Result CallStoreIC(Handle<String> name, bool is_contextual,
+ StrictModeFlag strict_mode);
// Call keyed store IC. Value, key, and receiver are found on top
// of the frame. All three are dropped.
diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc
index 8e282adc34..968b45d0b6 100644
--- a/deps/v8/src/ic.cc
+++ b/deps/v8/src/ic.cc
@@ -342,7 +342,10 @@ void StoreIC::ClearInlinedVersion(Address address) {
void StoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
ClearInlinedVersion(address);
- SetTargetAtAddress(address, initialize_stub());
+ SetTargetAtAddress(address,
+ target->extra_ic_state() == kStoreICStrict
+ ? initialize_stub_strict()
+ : initialize_stub());
}
@@ -1368,6 +1371,7 @@ static bool LookupForWrite(JSObject* object,
MaybeObject* StoreIC::Store(State state,
+ Code::ExtraICState extra_ic_state,
Handle<Object> object,
Handle<String> name,
Handle<Object> value) {
@@ -1397,8 +1401,10 @@ MaybeObject* StoreIC::Store(State state,
#ifdef DEBUG
if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n");
#endif
- Code* target = Builtins::builtin(Builtins::StoreIC_ArrayLength);
- set_target(target);
+ Builtins::Name target = (extra_ic_state == kStoreICStrict)
+ ? Builtins::StoreIC_ArrayLength_Strict
+ : Builtins::StoreIC_ArrayLength;
+ set_target(Builtins::builtin(target));
return receiver->SetProperty(*name, *value, NONE);
}
@@ -1456,15 +1462,23 @@ MaybeObject* StoreIC::Store(State state,
// If no inlined store ic was patched, generate a stub for this
// store.
- UpdateCaches(&lookup, state, receiver, name, value);
+ UpdateCaches(&lookup, state, extra_ic_state, receiver, name, value);
+ } else {
+ // Strict mode doesn't allow setting non-existent global property.
+ if (extra_ic_state == kStoreICStrict && IsContextual(object)) {
+ return ReferenceError("not_defined", name);
+ }
}
}
if (receiver->IsJSGlobalProxy()) {
// Generate a generic stub that goes to the runtime when we see a global
// proxy as receiver.
- if (target() != global_proxy_stub()) {
- set_target(global_proxy_stub());
+ Code* stub = (extra_ic_state == kStoreICStrict)
+ ? global_proxy_stub_strict()
+ : global_proxy_stub();
+ if (target() != stub) {
+ set_target(stub);
#ifdef DEBUG
TraceIC("StoreIC", name, state, target());
#endif
@@ -1478,6 +1492,7 @@ MaybeObject* StoreIC::Store(State state,
void StoreIC::UpdateCaches(LookupResult* lookup,
State state,
+ Code::ExtraICState extra_ic_state,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value) {
@@ -1498,8 +1513,8 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
Object* code = NULL;
switch (type) {
case FIELD: {
- maybe_code = StubCache::ComputeStoreField(*name, *receiver,
- lookup->GetFieldIndex());
+ maybe_code = StubCache::ComputeStoreField(
+ *name, *receiver, lookup->GetFieldIndex(), NULL, extra_ic_state);
break;
}
case MAP_TRANSITION: {
@@ -1508,8 +1523,8 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
ASSERT(type == MAP_TRANSITION);
Handle<Map> transition(lookup->GetTransitionMap());
int index = transition->PropertyIndexFor(*name);
- maybe_code = StubCache::ComputeStoreField(*name, *receiver,
- index, *transition);
+ maybe_code = StubCache::ComputeStoreField(
+ *name, *receiver, index, *transition, extra_ic_state);
break;
}
case NORMAL: {
@@ -1520,10 +1535,11 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
JSGlobalPropertyCell* cell =
JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
- maybe_code = StubCache::ComputeStoreGlobal(*name, *global, cell);
+ maybe_code = StubCache::ComputeStoreGlobal(
+ *name, *global, cell, extra_ic_state);
} else {
if (lookup->holder() != *receiver) return;
- maybe_code = StubCache::ComputeStoreNormal();
+ maybe_code = StubCache::ComputeStoreNormal(extra_ic_state);
}
break;
}
@@ -1531,12 +1547,14 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
if (v8::ToCData<Address>(callback->setter()) == 0) return;
- maybe_code = StubCache::ComputeStoreCallback(*name, *receiver, callback);
+ maybe_code = StubCache::ComputeStoreCallback(
+ *name, *receiver, callback, extra_ic_state);
break;
}
case INTERCEPTOR: {
ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
- maybe_code = StubCache::ComputeStoreInterceptor(*name, *receiver);
+ maybe_code = StubCache::ComputeStoreInterceptor(
+ *name, *receiver, extra_ic_state);
break;
}
default:
@@ -1552,7 +1570,11 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
set_target(Code::cast(code));
} else if (state == MONOMORPHIC) {
// Only move to megamorphic if the target changes.
- if (target() != Code::cast(code)) set_target(megamorphic_stub());
+ if (target() != Code::cast(code)) {
+ set_target(extra_ic_state == kStoreICStrict
+ ? megamorphic_stub_strict()
+ : megamorphic_stub());
+ }
} else if (state == MEGAMORPHIC) {
// Update the stub cache.
StubCache::Set(*name, receiver->map(), Code::cast(code));
@@ -1610,19 +1632,25 @@ MaybeObject* KeyedStoreIC::Store(State state,
if (use_ic) {
Code* stub = generic_stub();
- if (object->IsJSObject()) {
- Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- if (receiver->HasExternalArrayElements()) {
- MaybeObject* probe =
- StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver, true);
- stub =
- probe->IsFailure() ? NULL : Code::cast(probe->ToObjectUnchecked());
- } else if (state == UNINITIALIZED &&
- key->IsSmi() &&
- receiver->map()->has_fast_elements()) {
- MaybeObject* probe = StubCache::ComputeKeyedStoreSpecialized(*receiver);
- stub =
- probe->IsFailure() ? NULL : Code::cast(probe->ToObjectUnchecked());
+ if (state == UNINITIALIZED) {
+ if (object->IsJSObject()) {
+ Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+ if (receiver->HasExternalArrayElements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver, true);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ } else if (receiver->HasPixelElements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedStorePixelArray(*receiver);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ } else if (key->IsSmi() && receiver->map()->has_fast_elements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedStoreSpecialized(*receiver);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ }
}
}
if (stub != NULL) set_target(stub);
@@ -1795,8 +1823,9 @@ MUST_USE_RESULT MaybeObject* StoreIC_Miss(Arguments args) {
ASSERT(args.length() == 3);
StoreIC ic;
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
- return ic.Store(state, args.at<Object>(0), args.at<String>(1),
- args.at<Object>(2));
+ Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
+ return ic.Store(state, extra_ic_state, args.at<Object>(0),
+ args.at<String>(1), args.at<Object>(2));
}
diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h
index 409ad3806d..3b10d064f6 100644
--- a/deps/v8/src/ic.h
+++ b/deps/v8/src/ic.h
@@ -398,9 +398,16 @@ class KeyedLoadIC: public IC {
class StoreIC: public IC {
public:
+
+ enum StoreICStrictMode {
+ kStoreICNonStrict = kNonStrictMode,
+ kStoreICStrict = kStrictMode
+ };
+
StoreIC() : IC(NO_EXTRA_FRAME) { ASSERT(target()->is_store_stub()); }
MUST_USE_RESULT MaybeObject* Store(State state,
+ Code::ExtraICState extra_ic_state,
Handle<Object> object,
Handle<String> name,
Handle<Object> value);
@@ -408,7 +415,8 @@ class StoreIC: public IC {
// Code generators for stub routines. Only called once at startup.
static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
static void GenerateMiss(MacroAssembler* masm);
- static void GenerateMegamorphic(MacroAssembler* masm);
+ static void GenerateMegamorphic(MacroAssembler* masm,
+ Code::ExtraICState extra_ic_state);
static void GenerateArrayLength(MacroAssembler* masm);
static void GenerateNormal(MacroAssembler* masm);
static void GenerateGlobalProxy(MacroAssembler* masm);
@@ -424,7 +432,9 @@ class StoreIC: public IC {
// Update the inline cache and the global stub cache based on the
// lookup result.
void UpdateCaches(LookupResult* lookup,
- State state, Handle<JSObject> receiver,
+ State state,
+ Code::ExtraICState extra_ic_state,
+ Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value);
@@ -432,12 +442,21 @@ class StoreIC: public IC {
static Code* megamorphic_stub() {
return Builtins::builtin(Builtins::StoreIC_Megamorphic);
}
+ static Code* megamorphic_stub_strict() {
+ return Builtins::builtin(Builtins::StoreIC_Megamorphic_Strict);
+ }
static Code* initialize_stub() {
return Builtins::builtin(Builtins::StoreIC_Initialize);
}
+ static Code* initialize_stub_strict() {
+ return Builtins::builtin(Builtins::StoreIC_Initialize_Strict);
+ }
static Code* global_proxy_stub() {
return Builtins::builtin(Builtins::StoreIC_GlobalProxy);
}
+ static Code* global_proxy_stub_strict() {
+ return Builtins::builtin(Builtins::StoreIC_GlobalProxy_Strict);
+ }
static void Clear(Address address, Code* target);
diff --git a/deps/v8/src/liveedit.cc b/deps/v8/src/liveedit.cc
index b6ad4cf538..a395c51189 100644
--- a/deps/v8/src/liveedit.cc
+++ b/deps/v8/src/liveedit.cc
@@ -36,7 +36,6 @@
#include "deoptimizer.h"
#include "global-handles.h"
#include "memory.h"
-#include "oprofile-agent.h"
#include "parser.h"
#include "scopeinfo.h"
#include "scopes.h"
diff --git a/deps/v8/src/macro-assembler.h b/deps/v8/src/macro-assembler.h
index d261f57da7..30838bd761 100644
--- a/deps/v8/src/macro-assembler.h
+++ b/deps/v8/src/macro-assembler.h
@@ -50,6 +50,13 @@ enum HandlerType {
};
+// Types of uncatchable exceptions.
+enum UncatchableExceptionType {
+ OUT_OF_MEMORY,
+ TERMINATION
+};
+
+
// Invalid depth in prototype chain.
const int kInvalidProtoDepth = -1;
diff --git a/deps/v8/src/math.js b/deps/v8/src/math.js
index 02b19aba69..70b8c57cae 100644
--- a/deps/v8/src/math.js
+++ b/deps/v8/src/math.js
@@ -220,7 +220,7 @@ function SetupMath() {
DONT_ENUM | DONT_DELETE | READ_ONLY);
%SetProperty($Math,
"LOG10E",
- 0.43429448190325176,
+ 0.4342944819032518,
DONT_ENUM | DONT_DELETE | READ_ONLY);
%SetProperty($Math,
"PI",
diff --git a/deps/v8/src/messages.cc b/deps/v8/src/messages.cc
index 432364919b..990000a32e 100644
--- a/deps/v8/src/messages.cc
+++ b/deps/v8/src/messages.cc
@@ -69,10 +69,13 @@ Handle<JSMessageObject> MessageHandler::MakeMessageObject(
Handle<String> stack_trace,
Handle<JSArray> stack_frames) {
Handle<String> type_handle = Factory::LookupAsciiSymbol(type);
- Handle<JSArray> arguments_handle = Factory::NewJSArray(args.length());
+ Handle<FixedArray> arguments_elements =
+ Factory::NewFixedArray(args.length());
for (int i = 0; i < args.length(); i++) {
- SetElement(arguments_handle, i, args[i]);
+ arguments_elements->set(i, *args[i]);
}
+ Handle<JSArray> arguments_handle =
+ Factory::NewJSArrayWithElements(arguments_elements);
int start = 0;
int end = 0;
@@ -87,7 +90,7 @@ Handle<JSMessageObject> MessageHandler::MakeMessageObject(
? Factory::undefined_value()
: Handle<Object>::cast(stack_trace);
- Handle<Object> stack_frames_handle = stack_frames.is_null()
+ Handle<Object> stack_frames_handle = stack_frames.is_null()
? Factory::undefined_value()
: Handle<Object>::cast(stack_frames);
diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js
index d22ac658d9..b7e57aa228 100644
--- a/deps/v8/src/messages.js
+++ b/deps/v8/src/messages.js
@@ -211,6 +211,7 @@ function FormatMessage(message) {
invalid_preparser_data: ["Invalid preparser data for function ", "%0"],
strict_mode_with: ["Strict mode code may not include a with statement"],
strict_catch_variable: ["Catch variable may not be eval or arguments in strict mode"],
+ too_many_parameters: ["Too many parameters in function definition"],
strict_param_name: ["Parameter name eval or arguments is not allowed in strict mode"],
strict_param_dupe: ["Strict mode function may not have duplicate parameter names"],
strict_var_name: ["Variable name may not be eval or arguments in strict mode"],
@@ -223,6 +224,8 @@ function FormatMessage(message) {
strict_lhs_postfix: ["Postfix increment/decrement may not have eval or arguments operand in strict mode"],
strict_lhs_prefix: ["Prefix increment/decrement may not have eval or arguments operand in strict mode"],
strict_reserved_word: ["Use of future reserved word in strict mode"],
+ strict_delete: ["Delete of an unqualified identifier in strict mode."],
+ strict_delete_property: ["Cannot delete property '", "%0", "' of ", "%1"],
};
}
var message_type = %MessageGetType(message);
@@ -316,6 +319,7 @@ Script.prototype.lineFromPosition = function(position) {
return i;
}
}
+
return -1;
}
diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc
index c8246c86c9..c1caef2d98 100644
--- a/deps/v8/src/objects-debug.cc
+++ b/deps/v8/src/objects-debug.cc
@@ -440,7 +440,7 @@ void JSRegExp::JSRegExpVerify() {
ASSERT(ascii_data->IsTheHole() || ascii_data->IsJSObject() ||
(is_native ? ascii_data->IsCode() : ascii_data->IsByteArray()));
Object* uc16_data = arr->get(JSRegExp::kIrregexpUC16CodeIndex);
- ASSERT(uc16_data->IsTheHole() || ascii_data->IsJSObject() ||
+ ASSERT(uc16_data->IsTheHole() || uc16_data->IsJSObject() ||
(is_native ? uc16_data->IsCode() : uc16_data->IsByteArray()));
ASSERT(arr->get(JSRegExp::kIrregexpCaptureCountIndex)->IsSmi());
ASSERT(arr->get(JSRegExp::kIrregexpMaxRegisterCountIndex)->IsSmi());
diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h
index 3b83dd4869..24887a0efb 100644
--- a/deps/v8/src/objects-inl.h
+++ b/deps/v8/src/objects-inl.h
@@ -2610,10 +2610,12 @@ Code::Flags Code::ComputeFlags(Kind kind,
PropertyType type,
int argc,
InlineCacheHolderFlag holder) {
- // Extra IC state is only allowed for monomorphic call IC stubs.
+ // Extra IC state is only allowed for monomorphic call IC stubs
+ // or for store IC stubs.
ASSERT(extra_ic_state == kNoExtraICState ||
(kind == CALL_IC && (ic_state == MONOMORPHIC ||
- ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)));
+ ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
+ (kind == STORE_IC));
// Compute the bit mask.
int bits = kind << kFlagsKindShift;
if (in_loop) bits |= kFlagsICInLoopMask;
@@ -2747,6 +2749,22 @@ MaybeObject* Map::GetSlowElementsMap() {
}
+MaybeObject* Map::GetPixelArrayElementsMap() {
+ if (has_pixel_array_elements()) return this;
+ // TODO(danno): Special case empty object map (or most common case)
+ // to return a pre-canned pixel array map.
+ Object* obj;
+ { MaybeObject* maybe_obj = CopyDropTransitions();
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ }
+ Map* new_map = Map::cast(obj);
+ new_map->set_has_fast_elements(false);
+ new_map->set_has_pixel_array_elements(true);
+ Counters::map_to_pixel_array_elements.Increment();
+ return new_map;
+}
+
+
ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 775487a0a6..e0232d5873 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -531,10 +531,25 @@ MaybeObject* Object::GetProperty(Object* receiver,
MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
- // Non-JS objects do not have integer indexed properties.
- if (!IsJSObject()) return Heap::undefined_value();
- return JSObject::cast(this)->GetElementWithReceiver(JSObject::cast(receiver),
- index);
+ if (IsJSObject()) {
+ return JSObject::cast(this)->GetElementWithReceiver(receiver, index);
+ }
+
+ Object* holder = NULL;
+ Context* global_context = Top::context()->global_context();
+ if (IsString()) {
+ holder = global_context->string_function()->instance_prototype();
+ } else if (IsNumber()) {
+ holder = global_context->number_function()->instance_prototype();
+ } else if (IsBoolean()) {
+ holder = global_context->boolean_function()->instance_prototype();
+ } else {
+ // Undefined and null have no indexed properties.
+ ASSERT(IsUndefined() || IsNull());
+ return Heap::undefined_value();
+ }
+
+ return JSObject::cast(holder)->GetElementWithReceiver(receiver, index);
}
@@ -1399,7 +1414,7 @@ MaybeObject* JSObject::AddProperty(String* name,
if (!map()->is_extensible()) {
Handle<Object> args[1] = {Handle<String>(name)};
return Top::Throw(*Factory::NewTypeError("object_not_extensible",
- HandleVector(args, 1)));
+ HandleVector(args, 1)));
}
if (HasFastProperties()) {
// Ensure the descriptor array does not get too big.
@@ -1707,8 +1722,9 @@ void JSObject::LookupCallbackSetterInPrototypes(String* name,
}
-bool JSObject::SetElementWithCallbackSetterInPrototypes(uint32_t index,
- Object* value) {
+MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(uint32_t index,
+ Object* value,
+ bool* found) {
for (Object* pt = GetPrototype();
pt != Heap::null_value();
pt = pt->GetPrototype()) {
@@ -1718,15 +1734,16 @@ bool JSObject::SetElementWithCallbackSetterInPrototypes(uint32_t index,
NumberDictionary* dictionary = JSObject::cast(pt)->element_dictionary();
int entry = dictionary->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
- SetElementWithCallback(element, index, value, JSObject::cast(pt));
- return true;
+ *found = true;
+ return SetElementWithCallback(
+ dictionary->ValueAt(entry), index, value, JSObject::cast(pt));
}
}
}
- return false;
+ *found = false;
+ return Heap::the_hole_value();
}
@@ -2618,7 +2635,17 @@ MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
NumberDictionary* dictionary = element_dictionary();
int entry = dictionary->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
- return dictionary->DeleteProperty(entry, mode);
+ Object* result = dictionary->DeleteProperty(entry, mode);
+ if (mode == STRICT_DELETION && result == Heap::false_value()) {
+ // In strict mode, deleting a non-configurable property throws
+ // exception. dictionary->DeleteProperty will return false_value()
+ // if a non-configurable property is being deleted.
+ HandleScope scope;
+ Handle<Object> i = Factory::NewNumberFromUint(index);
+ Handle<Object> args[2] = { i, Handle<Object>(this) };
+ return Top::Throw(*Factory::NewTypeError("strict_delete_property",
+ HandleVector(args, 2)));
+ }
}
break;
}
@@ -2657,6 +2684,13 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
if (!result.IsProperty()) return Heap::true_value();
// Ignore attributes if forcing a deletion.
if (result.IsDontDelete() && mode != FORCE_DELETION) {
+ if (mode == STRICT_DELETION) {
+ // Deleting a non-configurable property in strict mode.
+ HandleScope scope;
+ Handle<Object> args[2] = { Handle<Object>(name), Handle<Object>(this) };
+ return Top::Throw(*Factory::NewTypeError("strict_delete_property",
+ HandleVector(args, 2)));
+ }
return Heap::false_value();
}
// Check for interceptor.
@@ -2779,6 +2813,13 @@ bool JSObject::ReferencesObject(Object* obj) {
MaybeObject* JSObject::PreventExtensions() {
+ if (IsJSGlobalProxy()) {
+ Object* proto = GetPrototype();
+ if (proto->IsNull()) return this;
+ ASSERT(proto->IsJSGlobalObject());
+ return JSObject::cast(proto)->PreventExtensions();
+ }
+
// If there are fast elements we normalize.
if (HasFastElements()) {
Object* ok;
@@ -6229,10 +6270,35 @@ const char* Code::PropertyType2String(PropertyType type) {
}
+void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
+ const char* name = NULL;
+ switch (kind) {
+ case CALL_IC:
+ if (extra == STRING_INDEX_OUT_OF_BOUNDS) {
+ name = "STRING_INDEX_OUT_OF_BOUNDS";
+ }
+ break;
+ case STORE_IC:
+ if (extra == StoreIC::kStoreICStrict) {
+ name = "STRICT";
+ }
+ break;
+ default:
+ break;
+ }
+ if (name != NULL) {
+ PrintF(out, "extra_ic_state = %s\n", name);
+ } else {
+ PrintF(out, "etra_ic_state = %d\n", extra);
+ }
+}
+
+
void Code::Disassemble(const char* name, FILE* out) {
PrintF(out, "kind = %s\n", Kind2String(kind()));
if (is_inline_cache_stub()) {
PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
+ PrintExtraICState(out, kind(), extra_ic_state());
PrintF(out, "ic_in_loop = %d\n", ic_in_loop() == IN_LOOP);
if (ic_state() == MONOMORPHIC) {
PrintF(out, "type = %s\n", PropertyType2String(type()));
@@ -6962,9 +7028,11 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
uint32_t elms_length = static_cast<uint32_t>(elms->length());
if (check_prototype &&
- (index >= elms_length || elms->get(index)->IsTheHole()) &&
- SetElementWithCallbackSetterInPrototypes(index, value)) {
- return value;
+ (index >= elms_length || elms->get(index)->IsTheHole())) {
+ bool found;
+ MaybeObject* result =
+ SetElementWithCallbackSetterInPrototypes(index, value, &found);
+ if (found) return result;
}
@@ -7096,9 +7164,11 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
}
} else {
// Index not already used. Look for an accessor in the prototype chain.
- if (check_prototype &&
- SetElementWithCallbackSetterInPrototypes(index, value)) {
- return value;
+ if (check_prototype) {
+ bool found;
+ MaybeObject* result =
+ SetElementWithCallbackSetterInPrototypes(index, value, &found);
+ if (found) return result;
}
// When we set the is_extensible flag to false we always force
// the element into dictionary mode (and force them to stay there).
@@ -7182,7 +7252,7 @@ MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index,
}
-MaybeObject* JSObject::GetElementPostInterceptor(JSObject* receiver,
+MaybeObject* JSObject::GetElementPostInterceptor(Object* receiver,
uint32_t index) {
// Get element works for both JSObject and JSArray since
// JSArray::length cannot change.
@@ -7239,14 +7309,14 @@ MaybeObject* JSObject::GetElementPostInterceptor(JSObject* receiver,
}
-MaybeObject* JSObject::GetElementWithInterceptor(JSObject* receiver,
+MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
uint32_t index) {
// Make sure that the top context does not change when doing
// callbacks or interceptor calls.
AssertNoContextChange ncc;
HandleScope scope;
Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
- Handle<JSObject> this_handle(receiver);
+ Handle<Object> this_handle(receiver);
Handle<JSObject> holder_handle(this);
if (!interceptor->getter()->IsUndefined()) {
@@ -7272,7 +7342,7 @@ MaybeObject* JSObject::GetElementWithInterceptor(JSObject* receiver,
}
-MaybeObject* JSObject::GetElementWithReceiver(JSObject* receiver,
+MaybeObject* JSObject::GetElementWithReceiver(Object* receiver,
uint32_t index) {
// Check access rights if needed.
if (IsAccessCheckNeeded() &&
@@ -8552,10 +8622,20 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
if (value->IsUndefined()) {
undefs++;
} else {
+ if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
+ // Adding an entry with the key beyond smi-range requires
+ // allocation. Bailout.
+ return Smi::FromInt(-1);
+ }
new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked();
pos++;
}
} else {
+ if (key > static_cast<uint32_t>(Smi::kMaxValue)) {
+ // Adding an entry with the key beyond smi-range requires
+ // allocation. Bailout.
+ return Smi::FromInt(-1);
+ }
new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked();
}
}
@@ -8564,6 +8644,11 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
uint32_t result = pos;
PropertyDetails no_details = PropertyDetails(NONE, NORMAL);
while (undefs > 0) {
+ if (pos > static_cast<uint32_t>(Smi::kMaxValue)) {
+ // Adding an entry with the key beyond smi-range requires
+ // allocation. Bailout.
+ return Smi::FromInt(-1);
+ }
new_dict->AddNumberEntry(pos, Heap::undefined_value(), no_details)->
ToObjectUnchecked();
pos++;
@@ -9292,7 +9377,7 @@ Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
JSObject::DeleteMode mode) {
PropertyDetails details = DetailsAt(entry);
// Ignore attributes if forcing a deletion.
- if (details.IsDontDelete() && mode == JSObject::NORMAL_DELETION) {
+ if (details.IsDontDelete() && mode != JSObject::FORCE_DELETION) {
return Heap::false_value();
}
SetEntry(entry, Heap::null_value(), Heap::null_value(), Smi::FromInt(0));
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index bf598307a2..d6349e66eb 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -1286,7 +1286,12 @@ class HeapNumber: public HeapObject {
// caching.
class JSObject: public HeapObject {
public:
- enum DeleteMode { NORMAL_DELETION, FORCE_DELETION };
+ enum DeleteMode {
+ NORMAL_DELETION,
+ STRICT_DELETION,
+ FORCE_DELETION
+ };
+
enum ElementsKind {
// The only "fast" kind.
FAST_ELEMENTS,
@@ -1541,8 +1546,8 @@ class JSObject: public HeapObject {
// Returns the index'th element.
// The undefined object if index is out of bounds.
- MaybeObject* GetElementWithReceiver(JSObject* receiver, uint32_t index);
- MaybeObject* GetElementWithInterceptor(JSObject* receiver, uint32_t index);
+ MaybeObject* GetElementWithReceiver(Object* receiver, uint32_t index);
+ MaybeObject* GetElementWithInterceptor(Object* receiver, uint32_t index);
MUST_USE_RESULT MaybeObject* SetFastElementsCapacityAndLength(int capacity,
int length);
@@ -1579,7 +1584,8 @@ class JSObject: public HeapObject {
void LookupRealNamedProperty(String* name, LookupResult* result);
void LookupRealNamedPropertyInPrototypes(String* name, LookupResult* result);
void LookupCallbackSetterInPrototypes(String* name, LookupResult* result);
- bool SetElementWithCallbackSetterInPrototypes(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetElementWithCallbackSetterInPrototypes(
+ uint32_t index, Object* value, bool* found);
void LookupCallback(String* name, LookupResult* result);
// Returns the number of properties on this object filtering out properties
@@ -1798,7 +1804,7 @@ class JSObject: public HeapObject {
Object* value,
bool check_prototype);
- MaybeObject* GetElementPostInterceptor(JSObject* receiver, uint32_t index);
+ MaybeObject* GetElementPostInterceptor(Object* receiver, uint32_t index);
MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name,
DeleteMode mode);
@@ -3199,6 +3205,7 @@ class Code: public HeapObject {
static const char* Kind2String(Kind kind);
static const char* ICState2String(InlineCacheState state);
static const char* PropertyType2String(PropertyType type);
+ static void PrintExtraICState(FILE* out, Kind kind, ExtraICState extra);
inline void Disassemble(const char* name) {
Disassemble(name, stdout);
}
@@ -3592,6 +3599,19 @@ class Map: public HeapObject {
return ((1 << kHasFastElements) & bit_field2()) != 0;
}
+ // Tells whether an instance has pixel array elements.
+ inline void set_has_pixel_array_elements(bool value) {
+ if (value) {
+ set_bit_field2(bit_field2() | (1 << kHasPixelArrayElements));
+ } else {
+ set_bit_field2(bit_field2() & ~(1 << kHasPixelArrayElements));
+ }
+ }
+
+ inline bool has_pixel_array_elements() {
+ return ((1 << kHasPixelArrayElements) & bit_field2()) != 0;
+ }
+
// Tells whether the map is attached to SharedFunctionInfo
// (for inobject slack tracking).
inline void set_attached_to_shared_function_info(bool value);
@@ -3650,6 +3670,11 @@ class Map: public HeapObject {
// from the descriptors and the fast elements bit cleared.
MUST_USE_RESULT inline MaybeObject* GetSlowElementsMap();
+ // Returns this map if it has the pixel array elements bit is set, otherwise
+ // returns a copy of the map, with all transitions dropped from the
+ // descriptors and the pixel array elements bit set.
+ MUST_USE_RESULT inline MaybeObject* GetPixelArrayElementsMap();
+
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
@@ -3768,6 +3793,7 @@ class Map: public HeapObject {
static const int kStringWrapperSafeForDefaultValueOf = 3;
static const int kAttachedToSharedFunctionInfo = 4;
static const int kIsShared = 5;
+ static const int kHasPixelArrayElements = 6;
// Layout of the default cache. It holds alternating name and code objects.
static const int kCodeCacheEntrySize = 2;
@@ -4344,7 +4370,6 @@ class SharedFunctionInfo: public HeapObject {
kThisPropertyAssignmentsOffset + kPointerSize,
kSize> BodyDescriptor;
- private:
// Bit positions in start_position_and_type.
// The source code start position is in the 30 most significant bits of
// the start_position_and_type field.
@@ -4363,6 +4388,35 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptimizationDisabled = 7;
static const int kStrictModeFunction = 8;
+ private:
+#if V8_HOST_ARCH_32_BIT
+ // On 32 bit platforms, compiler hints is a smi.
+ static const int kCompilerHintsSmiTagSize = kSmiTagSize;
+ static const int kCompilerHintsSize = kPointerSize;
+#else
+ // On 64 bit platforms, compiler hints is not a smi, see comment above.
+ static const int kCompilerHintsSmiTagSize = 0;
+ static const int kCompilerHintsSize = kIntSize;
+#endif
+
+ public:
+ // Constants for optimizing codegen for strict mode function tests.
+ // Allows to use byte-widgh instructions.
+ static const int kStrictModeBitWithinByte =
+ (kStrictModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
+
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+ static const int kStrictModeByteOffset = kCompilerHintsOffset +
+ (kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
+#elif __BYTE_ORDER == __BIG_ENDIAN
+ static const int kStrictModeByteOffset = kCompilerHintsOffset +
+ (kCompilerHintsSize - 1) -
+ ((kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte);
+#else
+#error Unknown byte ordering
+#endif
+
+ private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
};
diff --git a/deps/v8/src/oprofile-agent.cc b/deps/v8/src/oprofile-agent.cc
deleted file mode 100644
index 6df8f503c9..0000000000
--- a/deps/v8/src/oprofile-agent.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "v8.h"
-
-#include "oprofile-agent.h"
-
-namespace v8 {
-namespace internal {
-
-
-bool OProfileAgent::Initialize() {
-#ifdef ENABLE_OPROFILE_AGENT
- if (FLAG_oprofile) {
- if (handle_ != NULL) return false;
-
- // Disable code moving by GC.
- FLAG_always_compact = false;
- FLAG_never_compact = true;
-
- handle_ = op_open_agent();
- return (handle_ != NULL);
- } else {
- return true;
- }
-#else
- if (FLAG_oprofile) {
- OS::Print("Warning: --oprofile specified but binary compiled without "
- "oprofile support.\n");
- }
- return true;
-#endif
-}
-
-
-void OProfileAgent::TearDown() {
-#ifdef ENABLE_OPROFILE_AGENT
- if (handle_ != NULL) {
- op_close_agent(handle_);
- }
-#endif
-}
-
-
-#ifdef ENABLE_OPROFILE_AGENT
-op_agent_t OProfileAgent::handle_ = NULL;
-
-
-void OProfileAgent::CreateNativeCodeRegion(const char* name,
- const void* ptr, unsigned int size) {
- op_write_native_code(handle_, name, (uint64_t)ptr, ptr, size);
-}
-
-
-void OProfileAgent::CreateNativeCodeRegion(String* name,
- const void* ptr, unsigned int size) {
- const char* func_name;
- SmartPointer<char> str =
- name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
- func_name = name->length() > 0 ? *str : "<anonymous>";
- CreateNativeCodeRegion(func_name, ptr, size);
-}
-
-
-void OProfileAgent::CreateNativeCodeRegion(String* name, String* source,
- int line_num, const void* ptr, unsigned int size) {
- Vector<char> buf = Vector<char>::New(OProfileAgent::kFormattingBufSize);
- const char* func_name;
- SmartPointer<char> str =
- name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
- func_name = name->length() > 0 ? *str : "<anonymous>";
- SmartPointer<char> source_str =
- source->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
- if (v8::internal::OS::SNPrintF(buf, "%s %s:%d",
- func_name, *source_str, line_num) != -1) {
- CreateNativeCodeRegion(buf.start(), ptr, size);
- } else {
- CreateNativeCodeRegion("<script/func name too long>", ptr, size);
- }
-}
-
-#endif // ENABLE_OPROFILE_AGENT
-
-} } // namespace v8::internal
diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc
index 5353a63460..249c9ced35 100644
--- a/deps/v8/src/parser.cc
+++ b/deps/v8/src/parser.cc
@@ -764,8 +764,6 @@ FunctionLiteral* Parser::ParseLazy(Handle<SharedFunctionInfo> info,
RelocInfo::kNoPosition, type, &ok);
// Make sure the results agree.
ASSERT(ok == (result != NULL));
- // The only errors should be stack overflows.
- ASSERT(ok || stack_overflow_);
}
// Make sure the target stack is empty.
@@ -774,8 +772,8 @@ FunctionLiteral* Parser::ParseLazy(Handle<SharedFunctionInfo> info,
// If there was a stack overflow we have to get rid of AST and it is
// not safe to do before scope has been deleted.
if (result == NULL) {
- Top::StackOverflow();
zone_scope->DeleteOnExit();
+ if (stack_overflow_) Top::StackOverflow();
} else {
Handle<String> inferred_name(info->inferred_name());
result->set_inferred_name(inferred_name);
@@ -2523,6 +2521,16 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
}
}
+ // "delete identifier" is a syntax error in strict mode.
+ if (op == Token::DELETE && temp_scope_->StrictMode()) {
+ VariableProxy* operand = expression->AsVariableProxy();
+ if (operand != NULL && !operand->is_this()) {
+ ReportMessage("strict_delete", Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
+ }
+
return new UnaryOperation(op, expression);
} else if (Token::IsCountOp(op)) {
@@ -3501,6 +3509,12 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
Variable* parameter = top_scope_->DeclareLocal(param_name, Variable::VAR);
top_scope_->AddParameter(parameter);
num_parameters++;
+ if (num_parameters > kMaxNumFunctionParameters) {
+ ReportMessageAt(scanner().location(), "too_many_parameters",
+ Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
done = (peek() == Token::RPAREN);
if (!done) Expect(Token::COMMA, CHECK_OK);
}
@@ -3919,16 +3933,15 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
Handle<String> type,
Vector< Handle<Object> > arguments) {
int argc = arguments.length();
- Handle<JSArray> array = Factory::NewJSArray(argc, TENURED);
- ASSERT(array->IsJSArray() && array->HasFastElements());
+ Handle<FixedArray> elements = Factory::NewFixedArray(argc, TENURED);
for (int i = 0; i < argc; i++) {
Handle<Object> element = arguments[i];
if (!element.is_null()) {
- // We know this doesn't cause a GC here because we allocated the JSArray
- // large enough.
- array->SetFastElement(i, *element)->ToObjectUnchecked();
+ elements->set(i, *element);
}
}
+ Handle<JSArray> array = Factory::NewJSArrayWithElements(elements, TENURED);
+
ZoneList<Expression*>* args = new ZoneList<Expression*>(2);
args->Add(new Literal(type));
args->Add(new Literal(array));
@@ -4058,6 +4071,11 @@ Handle<Object> JsonParser::ParseJsonObject() {
uint32_t index;
if (key->AsArrayIndex(&index)) {
SetOwnElement(json_object, index, value);
+ } else if (key->Equals(Heap::Proto_symbol())) {
+ // We can't remove the __proto__ accessor since it's hardcoded
+ // in several places. Instead go along and add the value as
+ // the prototype of the created object if possible.
+ SetPrototype(json_object, value);
} else {
SetLocalPropertyIgnoreAttributes(json_object, key, value, NONE);
}
@@ -4255,6 +4273,8 @@ RegExpTree* RegExpParser::ParseDisjunction() {
capture_index);
}
builder->AddAtom(body);
+ // For compatability with JSC and ES3, we allow quantifiers after
+ // lookaheads, and break in all cases.
break;
}
case '|': {
@@ -4328,7 +4348,7 @@ RegExpTree* RegExpParser::ParseDisjunction() {
type,
captures_started());
builder = stored_state->builder();
- break;
+ continue;
}
case '[': {
RegExpTree* atom = ParseCharacterClass(CHECK_FAILED);
@@ -4351,11 +4371,11 @@ RegExpTree* RegExpParser::ParseDisjunction() {
builder->AddAssertion(
new RegExpAssertion(RegExpAssertion::NON_BOUNDARY));
continue;
- // AtomEscape ::
- // CharacterClassEscape
- //
- // CharacterClassEscape :: one of
- // d D s S w W
+ // AtomEscape ::
+ // CharacterClassEscape
+ //
+ // CharacterClassEscape :: one of
+ // d D s S w W
case 'd': case 'D': case 's': case 'S': case 'w': case 'W': {
uc32 c = Next();
Advance(2);
diff --git a/deps/v8/src/parser.h b/deps/v8/src/parser.h
index aa8d525cd1..dfd909a004 100644
--- a/deps/v8/src/parser.h
+++ b/deps/v8/src/parser.h
@@ -436,6 +436,11 @@ class Parser {
Vector<Handle<String> > args);
protected:
+ // Limit on number of function parameters is chosen arbitrarily.
+ // Code::Flags uses only the low 17 bits of num-parameters to
+ // construct a hashable id, so if more than 2^17 are allowed, this
+ // should be checked.
+ static const int kMaxNumFunctionParameters = 32766;
FunctionLiteral* ParseLazy(Handle<SharedFunctionInfo> info,
UC16CharacterStream* source,
ZoneScope* zone_scope);
diff --git a/deps/v8/src/platform-cygwin.cc b/deps/v8/src/platform-cygwin.cc
deleted file mode 100644
index 27232436a4..0000000000
--- a/deps/v8/src/platform-cygwin.cc
+++ /dev/null
@@ -1,776 +0,0 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Platform specific code for Cygwin goes here. For the POSIX comaptible parts
-// the implementation is in platform-posix.cc.
-
-#include <pthread.h>
-#include <semaphore.h>
-#include <signal.h>
-#include <cygwin/signal.h>
-#include <sys/time.h>
-#include <sys/resource.h>
-#include <sys/types.h>
-#include <stdlib.h>
-
-#include <sys/types.h> // mmap & munmap
-#include <sys/mman.h> // mmap & munmap
-#include <sys/stat.h> // open
-#include <fcntl.h> // open
-#include <unistd.h> // sysconf
-#include <strings.h> // index
-#include <errno.h>
-#include <stdarg.h>
-
-#undef MAP_TYPE
-
-#include "v8.h"
-
-#include "platform.h"
-#include "top.h"
-#include "v8threads.h"
-#include "vm-state-inl.h"
-
-namespace v8 {
-namespace internal {
-
-// 0 is never a valid thread id
-static const pthread_t kNoThread = (pthread_t) 0;
-
-
-double ceiling(double x) {
- return ceil(x);
-}
-
-
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
-}
-
-
-uint64_t OS::CpuFeaturesImpliedByPlatform() {
- return 0; // Nothing special about cygwin
-}
-
-
-int OS::ActivationFrameAlignment() {
- // With gcc 4.4 the tree vectorization optimizer can generate code
- // that requires 16 byte alignment such as movdqa on x86.
- return 16;
-}
-
-
-void OS::ReleaseStore(volatile AtomicWord* ptr, AtomicWord value) {
- __asm__ __volatile__("" : : : "memory");
- // An x86 store acts as a release barrier.
- *ptr = value;
-}
-
-const char* OS::LocalTimezone(double time) {
- if (isnan(time)) return "";
- time_t tv = static_cast<time_t>(floor(time/msPerSecond));
- struct tm* t = localtime(&tv);
- if (NULL == t) return "";
- return tzname[0]; // The location of the timezone string on Cygwin.
-}
-
-
-double OS::LocalTimeOffset() {
- // On Cygwin, struct tm does not contain a tm_gmtoff field.
- time_t utc = time(NULL);
- ASSERT(utc != -1);
- struct tm* loc = localtime(&utc);
- ASSERT(loc != NULL);
- // time - localtime includes any daylight savings offset, so subtract it.
- return static_cast<double>((mktime(loc) - utc) * msPerSecond -
- (loc->tm_isdst > 0 ? 3600 * msPerSecond : 0));
-}
-
-
-// We keep the lowest and highest addresses mapped as a quick way of
-// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
-// 'allocated' space are actually allocated to our heap. The range is
-// [lowest, highest), inclusive on the low and and exclusive on the high end.
-static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
-static void* highest_ever_allocated = reinterpret_cast<void*>(0);
-
-
-static void UpdateAllocatedSpaceLimits(void* address, int size) {
- lowest_ever_allocated = Min(lowest_ever_allocated, address);
- highest_ever_allocated =
- Max(highest_ever_allocated,
- reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
-}
-
-
-bool OS::IsOutsideAllocatedSpace(void* address) {
- return address < lowest_ever_allocated || address >= highest_ever_allocated;
-}
-
-
-size_t OS::AllocateAlignment() {
- return sysconf(_SC_PAGESIZE);
-}
-
-
-void* OS::Allocate(const size_t requested,
- size_t* allocated,
- bool is_executable) {
- const size_t msize = RoundUp(requested, sysconf(_SC_PAGESIZE));
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
- if (mbase == MAP_FAILED) {
- LOG(StringEvent("OS::Allocate", "mmap failed"));
- return NULL;
- }
- *allocated = msize;
- UpdateAllocatedSpaceLimits(mbase, msize);
- return mbase;
-}
-
-
-void OS::Free(void* address, const size_t size) {
- // TODO(1240712): munmap has a return value which is ignored here.
- int result = munmap(address, size);
- USE(result);
- ASSERT(result == 0);
-}
-
-
-#ifdef ENABLE_HEAP_PROTECTION
-
-void OS::Protect(void* address, size_t size) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- mprotect(address, size, PROT_READ);
-}
-
-
-void OS::Unprotect(void* address, size_t size, bool is_executable) {
- // TODO(1240712): mprotect has a return value which is ignored here.
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- mprotect(address, size, prot);
-}
-
-#endif
-
-
-void OS::Sleep(int milliseconds) {
- unsigned int ms = static_cast<unsigned int>(milliseconds);
- usleep(1000 * ms);
-}
-
-
-void OS::Abort() {
- // Redirect to std abort to signal abnormal program termination.
- abort();
-}
-
-
-void OS::DebugBreak() {
- asm("int $3");
-}
-
-
-class PosixMemoryMappedFile : public OS::MemoryMappedFile {
- public:
- PosixMemoryMappedFile(FILE* file, void* memory, int size)
- : file_(file), memory_(memory), size_(size) { }
- virtual ~PosixMemoryMappedFile();
- virtual void* memory() { return memory_; }
- virtual int size() { return size_; }
- private:
- FILE* file_;
- void* memory_;
- int size_;
-};
-
-
-OS::MemoryMappedFile* OS::MemoryMappedFile::open(const char* name) {
- FILE* file = fopen(name, "w+");
- if (file == NULL) return NULL;
-
- fseek(file, 0, SEEK_END);
- int size = ftell(file);
-
- void* memory =
- mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
- return new PosixMemoryMappedFile(file, memory, size);
-}
-
-
-OS::MemoryMappedFile* OS::MemoryMappedFile::create(const char* name, int size,
- void* initial) {
- FILE* file = fopen(name, "w+");
- if (file == NULL) return NULL;
- int result = fwrite(initial, size, 1, file);
- if (result < 1) {
- fclose(file);
- return NULL;
- }
- void* memory =
- mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
- return new PosixMemoryMappedFile(file, memory, size);
-}
-
-
-PosixMemoryMappedFile::~PosixMemoryMappedFile() {
- if (memory_) munmap(memory_, size_);
- fclose(file_);
-}
-
-
-void OS::LogSharedLibraryAddresses() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
- // This function assumes that the layout of the file is as follows:
- // hex_start_addr-hex_end_addr rwxp <unused data> [binary_file_name]
- // If we encounter an unexpected situation we abort scanning further entries.
- FILE* fp = fopen("/proc/self/maps", "r");
- if (fp == NULL) return;
-
- // Allocate enough room to be able to store a full file name.
- const int kLibNameLen = FILENAME_MAX + 1;
- char* lib_name = reinterpret_cast<char*>(malloc(kLibNameLen));
-
- // This loop will terminate once the scanning hits an EOF.
- while (true) {
- uintptr_t start, end;
- char attr_r, attr_w, attr_x, attr_p;
- // Parse the addresses and permission bits at the beginning of the line.
- if (fscanf(fp, "%" V8PRIxPTR "-%" V8PRIxPTR, &start, &end) != 2) break;
- if (fscanf(fp, " %c%c%c%c", &attr_r, &attr_w, &attr_x, &attr_p) != 4) break;
-
- int c;
- if (attr_r == 'r' && attr_w != 'w' && attr_x == 'x') {
- // Found a read-only executable entry. Skip characters until we reach
- // the beginning of the filename or the end of the line.
- do {
- c = getc(fp);
- } while ((c != EOF) && (c != '\n') && (c != '/'));
- if (c == EOF) break; // EOF: Was unexpected, just exit.
-
- // Process the filename if found.
- if (c == '/') {
- ungetc(c, fp); // Push the '/' back into the stream to be read below.
-
- // Read to the end of the line. Exit if the read fails.
- if (fgets(lib_name, kLibNameLen, fp) == NULL) break;
-
- // Drop the newline character read by fgets. We do not need to check
- // for a zero-length string because we know that we at least read the
- // '/' character.
- lib_name[strlen(lib_name) - 1] = '\0';
- } else {
- // No library name found, just record the raw address range.
- snprintf(lib_name, kLibNameLen,
- "%08" V8PRIxPTR "-%08" V8PRIxPTR, start, end);
- }
- LOG(SharedLibraryEvent(lib_name, start, end));
- } else {
- // Entry not describing executable data. Skip to end of line to setup
- // reading the next entry.
- do {
- c = getc(fp);
- } while ((c != EOF) && (c != '\n'));
- if (c == EOF) break;
- }
- }
- free(lib_name);
- fclose(fp);
-#endif
-}
-
-
-void OS::SignalCodeMovingGC() {
- // Nothing to do on Cygwin
-}
-
-
-int OS::StackWalk(Vector<OS::StackFrame> frames) {
- // Not supported on Cygwin
- return 0;
-}
-
-
-// Constants used for mmap.
-static const int kMmapFd = -1;
-static const int kMmapFdOffset = 0;
-
-
-VirtualMemory::VirtualMemory(size_t size) {
- address_ = mmap(NULL, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
- kMmapFd, kMmapFdOffset);
- size_ = size;
-}
-
-
-VirtualMemory::~VirtualMemory() {
- if (IsReserved()) {
- if (0 == munmap(address(), size())) address_ = MAP_FAILED;
- }
-}
-
-
-bool VirtualMemory::IsReserved() {
- return address_ != MAP_FAILED;
-}
-
-
-bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
-
- if (mprotect(address, size, prot) != 0) {
- return false;
- }
-
- UpdateAllocatedSpaceLimits(address, size);
- return true;
-}
-
-
-bool VirtualMemory::Uncommit(void* address, size_t size) {
- return mmap(address, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
- kMmapFd, kMmapFdOffset) != MAP_FAILED;
-}
-
-
-class ThreadHandle::PlatformData : public Malloced {
- public:
- explicit PlatformData(ThreadHandle::Kind kind) {
- Initialize(kind);
- }
-
- void Initialize(ThreadHandle::Kind kind) {
- switch (kind) {
- case ThreadHandle::SELF: thread_ = pthread_self(); break;
- case ThreadHandle::INVALID: thread_ = kNoThread; break;
- }
- }
-
- pthread_t thread_; // Thread handle for pthread.
-};
-
-
-ThreadHandle::ThreadHandle(Kind kind) {
- data_ = new PlatformData(kind);
-}
-
-
-void ThreadHandle::Initialize(ThreadHandle::Kind kind) {
- data_->Initialize(kind);
-}
-
-
-ThreadHandle::~ThreadHandle() {
- delete data_;
-}
-
-
-bool ThreadHandle::IsSelf() const {
- return pthread_equal(data_->thread_, pthread_self());
-}
-
-
-bool ThreadHandle::IsValid() const {
- return data_->thread_ != kNoThread;
-}
-
-
-Thread::Thread() : ThreadHandle(ThreadHandle::INVALID) {
- set_name("v8:<unknown>");
-}
-
-
-Thread::Thread(const char* name) : ThreadHandle(ThreadHandle::INVALID) {
- set_name(name);
-}
-
-
-Thread::~Thread() {
-}
-
-
-static void* ThreadEntry(void* arg) {
- Thread* thread = reinterpret_cast<Thread*>(arg);
- // This is also initialized by the first argument to pthread_create() but we
- // don't know which thread will run first (the original thread or the new
- // one) so we initialize it here too.
- thread->thread_handle_data()->thread_ = pthread_self();
- ASSERT(thread->IsValid());
- thread->Run();
- return NULL;
-}
-
-
-void Thread::set_name(const char* name) {
- strncpy(name_, name, sizeof(name_));
- name_[sizeof(name_) - 1] = '\0';
-}
-
-
-void Thread::Start() {
- pthread_create(&thread_handle_data()->thread_, NULL, ThreadEntry, this);
- ASSERT(IsValid());
-}
-
-
-void Thread::Join() {
- pthread_join(thread_handle_data()->thread_, NULL);
-}
-
-
-Thread::LocalStorageKey Thread::CreateThreadLocalKey() {
- pthread_key_t key;
- int result = pthread_key_create(&key, NULL);
- USE(result);
- ASSERT(result == 0);
- return static_cast<LocalStorageKey>(key);
-}
-
-
-void Thread::DeleteThreadLocalKey(LocalStorageKey key) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- int result = pthread_key_delete(pthread_key);
- USE(result);
- ASSERT(result == 0);
-}
-
-
-void* Thread::GetThreadLocal(LocalStorageKey key) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- return pthread_getspecific(pthread_key);
-}
-
-
-void Thread::SetThreadLocal(LocalStorageKey key, void* value) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- pthread_setspecific(pthread_key, value);
-}
-
-
-void Thread::YieldCPU() {
- sched_yield();
-}
-
-
-class CygwinMutex : public Mutex {
- public:
-
- CygwinMutex() {
- pthread_mutexattr_t attrs;
- memset(&attrs, 0, sizeof(attrs));
-
- int result = pthread_mutexattr_init(&attrs);
- ASSERT(result == 0);
- result = pthread_mutexattr_settype(&attrs, PTHREAD_MUTEX_RECURSIVE);
- ASSERT(result == 0);
- result = pthread_mutex_init(&mutex_, &attrs);
- ASSERT(result == 0);
- }
-
- virtual ~CygwinMutex() { pthread_mutex_destroy(&mutex_); }
-
- virtual int Lock() {
- int result = pthread_mutex_lock(&mutex_);
- return result;
- }
-
- virtual int Unlock() {
- int result = pthread_mutex_unlock(&mutex_);
- return result;
- }
-
- virtual bool TryLock() {
- int result = pthread_mutex_trylock(&mutex_);
- // Return false if the lock is busy and locking failed.
- if (result == EBUSY) {
- return false;
- }
- ASSERT(result == 0); // Verify no other errors.
- return true;
- }
-
- private:
- pthread_mutex_t mutex_; // Pthread mutex for POSIX platforms.
-};
-
-
-Mutex* OS::CreateMutex() {
- return new CygwinMutex();
-}
-
-
-class CygwinSemaphore : public Semaphore {
- public:
- explicit CygwinSemaphore(int count) { sem_init(&sem_, 0, count); }
- virtual ~CygwinSemaphore() { sem_destroy(&sem_); }
-
- virtual void Wait();
- virtual bool Wait(int timeout);
- virtual void Signal() { sem_post(&sem_); }
- private:
- sem_t sem_;
-};
-
-
-void CygwinSemaphore::Wait() {
- while (true) {
- int result = sem_wait(&sem_);
- if (result == 0) return; // Successfully got semaphore.
- CHECK(result == -1 && errno == EINTR); // Signal caused spurious wakeup.
- }
-}
-
-
-#ifndef TIMEVAL_TO_TIMESPEC
-#define TIMEVAL_TO_TIMESPEC(tv, ts) do { \
- (ts)->tv_sec = (tv)->tv_sec; \
- (ts)->tv_nsec = (tv)->tv_usec * 1000; \
-} while (false)
-#endif
-
-
-bool CygwinSemaphore::Wait(int timeout) {
- const long kOneSecondMicros = 1000000; // NOLINT
-
- // Split timeout into second and nanosecond parts.
- struct timeval delta;
- delta.tv_usec = timeout % kOneSecondMicros;
- delta.tv_sec = timeout / kOneSecondMicros;
-
- struct timeval current_time;
- // Get the current time.
- if (gettimeofday(&current_time, NULL) == -1) {
- return false;
- }
-
- // Calculate time for end of timeout.
- struct timeval end_time;
- timeradd(&current_time, &delta, &end_time);
-
- struct timespec ts;
- TIMEVAL_TO_TIMESPEC(&end_time, &ts);
- // Wait for semaphore signalled or timeout.
- while (true) {
- int result = sem_timedwait(&sem_, &ts);
- if (result == 0) return true; // Successfully got semaphore.
- if (result == -1 && errno == ETIMEDOUT) return false; // Timeout.
- CHECK(result == -1 && errno == EINTR); // Signal caused spurious wakeup.
- }
-}
-
-
-Semaphore* OS::CreateSemaphore(int count) {
- return new CygwinSemaphore(count);
-}
-
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-
-typedef struct ucontext ucontext_t;
-
-static Sampler* active_sampler_ = NULL;
-static pthread_t vm_tid_ = 0;
-
-
-static pthread_t GetThreadID() {
- return pthread_self();
-}
-
-
-static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
- USE(info);
- if (signal != SIGPROF) return;
- if (active_sampler_ == NULL || !active_sampler_->IsActive()) return;
- if (vm_tid_ != GetThreadID()) return;
-
- TickSample sample_obj;
- TickSample* sample = CpuProfiler::TickSampleEvent();
- if (sample == NULL) sample = &sample_obj;
-
- // Extracting the sample from the context is extremely machine dependent.
- ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
- sample->state = Top::current_vm_state();
-#if V8_HOST_ARCH_IA32
- sample->pc = reinterpret_cast<Address>(ucontext->eip);
- sample->sp = reinterpret_cast<Address>(ucontext->esp);
- sample->fp = reinterpret_cast<Address>(ucontext->ebp);
-#else
- UNIMPLEMENTED();
-#endif
- active_sampler_->SampleStack(sample);
- active_sampler_->Tick(sample);
-}
-
-
-class Sampler::PlatformData : public Malloced {
- public:
- enum SleepInterval {
- FULL_INTERVAL,
- HALF_INTERVAL
- };
-
- explicit PlatformData(Sampler* sampler)
- : sampler_(sampler),
- signal_handler_installed_(false),
- vm_tgid_(getpid()),
- signal_sender_launched_(false) {
- }
-
- void SignalSender() {
- while (sampler_->IsActive()) {
- if (rate_limiter_.SuspendIfNecessary()) continue;
- if (sampler_->IsProfiling() && RuntimeProfiler::IsEnabled()) {
- SendProfilingSignal();
- Sleep(HALF_INTERVAL);
- RuntimeProfiler::NotifyTick();
- Sleep(HALF_INTERVAL);
- } else {
- if (sampler_->IsProfiling()) SendProfilingSignal();
- if (RuntimeProfiler::IsEnabled()) RuntimeProfiler::NotifyTick();
- Sleep(FULL_INTERVAL);
- }
- }
- }
-
- void SendProfilingSignal() {
- pthread_kill(vm_tid_, SIGPROF);
- }
-
- void Sleep(SleepInterval full_or_half) {
- // Convert ms to us and subtract 100 us to compensate delays
- // occuring during signal delivery.
- useconds_t interval = sampler_->interval_ * 1000 - 100;
- if (full_or_half == HALF_INTERVAL) interval /= 2;
- int result = usleep(interval);
-#ifdef DEBUG
- if (result != 0 && errno != EINTR) {
- fprintf(stderr,
- "SignalSender usleep error; interval = %u, errno = %d\n",
- static_cast<int>(interval),
- errno);
- ASSERT(result == 0 || errno == EINTR);
- }
-#endif
- USE(result);
- }
-
- Sampler* sampler_;
- bool signal_handler_installed_;
- struct sigaction old_signal_handler_;
- int vm_tgid_;
- bool signal_sender_launched_;
- pthread_t signal_sender_thread_;
- RuntimeProfilerRateLimiter rate_limiter_;
-};
-
-
-static void* SenderEntry(void* arg) {
- Sampler::PlatformData* data =
- reinterpret_cast<Sampler::PlatformData*>(arg);
- data->SignalSender();
- return 0;
-}
-
-
-Sampler::Sampler(int interval)
- : interval_(interval),
- profiling_(false),
- active_(false),
- samples_taken_(0) {
- data_ = new PlatformData(this);
-}
-
-
-Sampler::~Sampler() {
- ASSERT(!data_->signal_sender_launched_);
- delete data_;
-}
-
-
-void Sampler::Start() {
- // There can only be one active sampler at the time on POSIX
- // platforms.
- ASSERT(!IsActive());
- vm_tid_ = pthread_self();
-
- // Request profiling signals.
- struct sigaction sa;
- sa.sa_sigaction = ProfilerSignalHandler;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART | SA_SIGINFO;
- if (sigaction(SIGPROF, &sa, &data_->old_signal_handler_) != 0) return;
- data_->signal_handler_installed_ = true;
-
- // Start a thread that sends SIGPROF signal to VM thread.
- // Sending the signal ourselves instead of relying on itimer provides
- // much better accuracy.
- SetActive(true);
- if (pthread_create(
- &data_->signal_sender_thread_, NULL, SenderEntry, data_) == 0) {
- data_->signal_sender_launched_ = true;
- }
-
- // Set this sampler as the active sampler.
- active_sampler_ = this;
-}
-
-
-void Sampler::Stop() {
- SetActive(false);
-
- // Wait for signal sender termination (it will exit after setting
- // active_ to false).
- if (data_->signal_sender_launched_) {
- Top::WakeUpRuntimeProfilerThreadBeforeShutdown();
- pthread_join(data_->signal_sender_thread_, NULL);
- data_->signal_sender_launched_ = false;
- }
-
- // Restore old signal handler
- if (data_->signal_handler_installed_) {
- sigaction(SIGPROF, &data_->old_signal_handler_, 0);
- data_->signal_handler_installed_ = false;
- }
-
- // This sampler is no longer the active sampler.
- active_sampler_ = NULL;
-}
-
-
-#endif // ENABLE_LOGGING_AND_PROFILING
-
-} } // namespace v8::internal
-
diff --git a/deps/v8/src/platform.h b/deps/v8/src/platform.h
index 8cb1561c5d..0d7d2e9cb8 100644
--- a/deps/v8/src/platform.h
+++ b/deps/v8/src/platform.h
@@ -383,14 +383,10 @@ class Thread: public ThreadHandle {
// LOCAL_STORAGE_KEY_MIN_VALUE and LOCAL_STORAGE_KEY_MAX_VALUE are specified
// to ensure that enumeration type has correct value range (see Issue 830 for
// more details).
-#ifdef __CYGWIN__
- typedef void* LocalStorageKey;
-#else
enum LocalStorageKey {
LOCAL_STORAGE_KEY_MIN_VALUE = kMinInt,
LOCAL_STORAGE_KEY_MAX_VALUE = kMaxInt
};
-#endif
// Create new thread.
Thread();
diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc
index 4476cb876c..06ee333b90 100644
--- a/deps/v8/src/profile-generator.cc
+++ b/deps/v8/src/profile-generator.cc
@@ -2285,7 +2285,7 @@ static int Intersect(int i1, int i2, const Vector<HeapEntry*>& dominators) {
// The algorithm is based on the article:
// K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
-// Softw. Pract. Exper. 4 (2001), pp. 1–10.
+// Softw. Pract. Exper. 4 (2001), pp. 1-10.
bool HeapSnapshotGenerator::BuildDominatorTree(
const Vector<HeapEntry*>& entries,
Vector<HeapEntry*>* dominators) {
diff --git a/deps/v8/src/regexp-macro-assembler.cc b/deps/v8/src/regexp-macro-assembler.cc
index 09797ca2a4..51f4015f6c 100644
--- a/deps/v8/src/regexp-macro-assembler.cc
+++ b/deps/v8/src/regexp-macro-assembler.cc
@@ -154,16 +154,12 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
const byte* input_start,
const byte* input_end,
int* output) {
- typedef int (*matcher)(String*, int, const byte*,
- const byte*, int*, Address, int);
- matcher matcher_func = FUNCTION_CAST<matcher>(code->entry());
-
// Ensure that the minimum stack has been allocated.
RegExpStack stack;
Address stack_base = RegExpStack::stack_base();
int direct_call = 0;
- int result = CALL_GENERATED_REGEXP_CODE(matcher_func,
+ int result = CALL_GENERATED_REGEXP_CODE(code->entry(),
input,
start_offset,
input_start,
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index 4994378cd3..48ff69f5d2 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -884,10 +884,17 @@ static MaybeObject* Runtime_PreventExtensions(Arguments args) {
return obj->PreventExtensions();
}
+
static MaybeObject* Runtime_IsExtensible(Arguments args) {
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSObject, obj, args[0]);
- return obj->map()->is_extensible() ? Heap::true_value()
+ if (obj->IsJSGlobalProxy()) {
+ Object* proto = obj->GetPrototype();
+ if (proto->IsNull()) return Heap::false_value();
+ ASSERT(proto->IsJSGlobalObject());
+ obj = JSObject::cast(proto);
+ }
+ return obj->map()->is_extensible() ? Heap::true_value()
: Heap::false_value();
}
@@ -1082,7 +1089,7 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
const char* type = (lookup.IsReadOnly()) ? "const" : "var";
return ThrowRedeclarationError(type, name);
}
- SetProperty(global, name, value, attributes);
+ RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, attributes));
} else {
// If a property with this name does not already exist on the
// global object add the property locally. We take special
@@ -1090,10 +1097,12 @@ static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
// of callbacks in the prototype chain (this rules out using
// SetProperty). Also, we must use the handle-based version to
// avoid GC issues.
- SetLocalPropertyIgnoreAttributes(global, name, value, attributes);
+ RETURN_IF_EMPTY_HANDLE(
+ SetLocalPropertyIgnoreAttributes(global, name, value, attributes));
}
}
+ ASSERT(!Top::has_pending_exception());
return Heap::undefined_value();
}
@@ -1143,12 +1152,14 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
} else {
// The holder is an arguments object.
Handle<JSObject> arguments(Handle<JSObject>::cast(holder));
- SetElement(arguments, index, initial_value);
+ Handle<Object> result = SetElement(arguments, index, initial_value);
+ if (result.is_null()) return Failure::Exception();
}
} else {
// Slow case: The property is not in the FixedArray part of the context.
Handle<JSObject> context_ext = Handle<JSObject>::cast(holder);
- SetProperty(context_ext, name, initial_value, mode);
+ RETURN_IF_EMPTY_HANDLE(
+ SetProperty(context_ext, name, initial_value, mode));
}
}
@@ -1175,8 +1186,7 @@ static MaybeObject* Runtime_DeclareContextSlot(Arguments args) {
ASSERT(!context_ext->HasLocalProperty(*name));
Handle<Object> value(Heap::undefined_value());
if (*initial_value != NULL) value = initial_value;
- SetProperty(context_ext, name, value, mode);
- ASSERT(context_ext->GetLocalPropertyAttribute(*name) == mode);
+ RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, mode));
}
return Heap::undefined_value();
@@ -1325,12 +1335,12 @@ static MaybeObject* Runtime_InitializeConstGlobal(Arguments args) {
// with setting the value because the property is either absent or
// read-only. We also have to do redo the lookup.
HandleScope handle_scope;
- Handle<GlobalObject>global(Top::context()->global());
+ Handle<GlobalObject> global(Top::context()->global());
- // BUG 1213579: Handle the case where we have to set a read-only
+ // BUG 1213575: Handle the case where we have to set a read-only
// property through an interceptor and only do it if it's
// uninitialized, e.g. the hole. Nirk...
- SetProperty(global, name, value, attributes);
+ RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, attributes));
return *value;
}
@@ -1412,7 +1422,7 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
// context.
if (attributes == ABSENT) {
Handle<JSObject> global = Handle<JSObject>(Top::context()->global());
- SetProperty(global, name, value, NONE);
+ RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, NONE));
return *value;
}
@@ -1449,14 +1459,8 @@ static MaybeObject* Runtime_InitializeConstContextSlot(Arguments args) {
// The property was found in a different context extension object.
// Set it if it is not a read-only property.
if ((attributes & READ_ONLY) == 0) {
- Handle<Object> set = SetProperty(context_ext, name, value, attributes);
- // Setting a property might throw an exception. Exceptions
- // are converted to empty handles in handle operations. We
- // need to convert back to exceptions here.
- if (set.is_null()) {
- ASSERT(Top::has_pending_exception());
- return Failure::Exception();
- }
+ RETURN_IF_EMPTY_HANDLE(
+ SetProperty(context_ext, name, value, attributes));
}
}
@@ -3668,14 +3672,20 @@ static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
if (((unchecked & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) &&
is_element) {
// Normalize the elements to enable attributes on the property.
- if (!js_object->IsJSGlobalProxy()) {
- NormalizeElements(js_object);
- }
+ if (js_object->IsJSGlobalProxy()) {
+ Handle<Object> proto(js_object->GetPrototype());
+ // If proxy is detached, ignore the assignment. Alternatively,
+ // we could throw an exception.
+ if (proto->IsNull()) return *obj_value;
+ js_object = Handle<JSObject>::cast(proto);
+ }
+ NormalizeElements(js_object);
Handle<NumberDictionary> dictionary(js_object->element_dictionary());
// Make sure that we never go back to fast case.
dictionary->set_requires_slow_elements();
PropertyDetails details = PropertyDetails(attr, NORMAL);
NumberDictionarySet(dictionary, index, obj_value, details);
+ return *obj_value;
}
LookupResult result;
@@ -3690,9 +3700,12 @@ static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
if (result.IsProperty() &&
(attr != result.GetAttributes() || result.type() == CALLBACKS)) {
// New attributes - normalize to avoid writing to instance descriptor
- if (!js_object->IsJSGlobalProxy()) {
- NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
+ if (js_object->IsJSGlobalProxy()) {
+ // Since the result is a property, the prototype will exist so
+ // we don't have to check for null.
+ js_object = Handle<JSObject>(JSObject::cast(js_object->GetPrototype()));
}
+ NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
// Use IgnoreAttributes version since a readonly property may be
// overridden and SetProperty does not allow this.
return js_object->SetLocalPropertyIgnoreAttributes(*name,
@@ -3700,7 +3713,7 @@ static MaybeObject* Runtime_DefineOrRedefineDataProperty(Arguments args) {
attr);
}
- return Runtime::SetObjectProperty(js_object, name, obj_value, attr);
+ return Runtime::ForceSetObjectProperty(js_object, name, obj_value, attr);
}
@@ -3901,11 +3914,14 @@ static MaybeObject* Runtime_IgnoreAttributesAndSetProperty(Arguments args) {
static MaybeObject* Runtime_DeleteProperty(Arguments args) {
NoHandleAllocation ha;
- ASSERT(args.length() == 2);
+ ASSERT(args.length() == 3);
CONVERT_CHECKED(JSObject, object, args[0]);
CONVERT_CHECKED(String, key, args[1]);
- return object->DeleteProperty(key, JSObject::NORMAL_DELETION);
+ CONVERT_SMI_CHECKED(strict, args[2]);
+ return object->DeleteProperty(key, strict == kStrictMode
+ ? JSObject::STRICT_DELETION
+ : JSObject::NORMAL_DELETION);
}
@@ -4199,6 +4215,22 @@ static MaybeObject* Runtime_LocalKeys(Arguments args) {
CONVERT_CHECKED(JSObject, raw_object, args[0]);
HandleScope scope;
Handle<JSObject> object(raw_object);
+
+ if (object->IsJSGlobalProxy()) {
+ // Do access checks before going to the global object.
+ if (object->IsAccessCheckNeeded() &&
+ !Top::MayNamedAccess(*object, Heap::undefined_value(),
+ v8::ACCESS_KEYS)) {
+ Top::ReportFailedAccessCheck(*object, v8::ACCESS_KEYS);
+ return *Factory::NewJSArray(0);
+ }
+
+ Handle<Object> proto(object->GetPrototype());
+ // If proxy is detached we simply return an empty array.
+ if (proto->IsNull()) return *Factory::NewJSArray(0);
+ object = Handle<JSObject>::cast(proto);
+ }
+
Handle<FixedArray> contents = GetKeysInFixedArrayFor(object,
LOCAL_ONLY);
// Some fast paths through GetKeysInFixedArrayFor reuse a cached
@@ -5782,6 +5814,89 @@ static MaybeObject* Runtime_StringBuilderConcat(Arguments args) {
}
+static MaybeObject* Runtime_StringBuilderJoin(Arguments args) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 3);
+ CONVERT_CHECKED(JSArray, array, args[0]);
+ if (!args[1]->IsSmi()) {
+ Top::context()->mark_out_of_memory();
+ return Failure::OutOfMemoryException();
+ }
+ int array_length = Smi::cast(args[1])->value();
+ CONVERT_CHECKED(String, separator, args[2]);
+
+ if (!array->HasFastElements()) {
+ return Top::Throw(Heap::illegal_argument_symbol());
+ }
+ FixedArray* fixed_array = FixedArray::cast(array->elements());
+ if (fixed_array->length() < array_length) {
+ array_length = fixed_array->length();
+ }
+
+ if (array_length == 0) {
+ return Heap::empty_string();
+ } else if (array_length == 1) {
+ Object* first = fixed_array->get(0);
+ if (first->IsString()) return first;
+ }
+
+ int separator_length = separator->length();
+ int max_nof_separators =
+ (String::kMaxLength + separator_length - 1) / separator_length;
+ if (max_nof_separators < (array_length - 1)) {
+ Top::context()->mark_out_of_memory();
+ return Failure::OutOfMemoryException();
+ }
+ int length = (array_length - 1) * separator_length;
+ for (int i = 0; i < array_length; i++) {
+ Object* element_obj = fixed_array->get(i);
+ if (!element_obj->IsString()) {
+ // TODO(1161): handle this case.
+ return Top::Throw(Heap::illegal_argument_symbol());
+ }
+ String* element = String::cast(element_obj);
+ int increment = element->length();
+ if (increment > String::kMaxLength - length) {
+ Top::context()->mark_out_of_memory();
+ return Failure::OutOfMemoryException();
+ }
+ length += increment;
+ }
+
+ Object* object;
+ { MaybeObject* maybe_object = Heap::AllocateRawTwoByteString(length);
+ if (!maybe_object->ToObject(&object)) return maybe_object;
+ }
+ SeqTwoByteString* answer = SeqTwoByteString::cast(object);
+
+ uc16* sink = answer->GetChars();
+#ifdef DEBUG
+ uc16* end = sink + length;
+#endif
+
+ String* first = String::cast(fixed_array->get(0));
+ int first_length = first->length();
+ String::WriteToFlat(first, sink, 0, first_length);
+ sink += first_length;
+
+ for (int i = 1; i < array_length; i++) {
+ ASSERT(sink + separator_length <= end);
+ String::WriteToFlat(separator, sink, 0, separator_length);
+ sink += separator_length;
+
+ String* element = String::cast(fixed_array->get(i));
+ int element_length = element->length();
+ ASSERT(sink + element_length <= end);
+ String::WriteToFlat(element, sink, 0, element_length);
+ sink += element_length;
+ }
+ ASSERT(sink == end);
+
+ ASSERT(!answer->HasOnlyAsciiChars()); // Use %_FastAsciiArrayJoin instead.
+ return answer;
+}
+
+
static MaybeObject* Runtime_NumberOr(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -6681,28 +6796,50 @@ static MaybeObject* Runtime_NewClosure(Arguments args) {
return *result;
}
+
static MaybeObject* Runtime_NewObjectFromBound(Arguments args) {
HandleScope scope;
ASSERT(args.length() == 2);
+ // First argument is a function to use as a constructor.
CONVERT_ARG_CHECKED(JSFunction, function, 0);
- CONVERT_ARG_CHECKED(JSArray, params, 1);
- RUNTIME_ASSERT(params->HasFastElements());
- FixedArray* fixed = FixedArray::cast(params->elements());
+ // Second argument is either null or an array of bound arguments.
+ FixedArray* bound_args = NULL;
+ int bound_argc = 0;
+ if (!args[1]->IsNull()) {
+ CONVERT_ARG_CHECKED(JSArray, params, 1);
+ RUNTIME_ASSERT(params->HasFastElements());
+ bound_args = FixedArray::cast(params->elements());
+ bound_argc = Smi::cast(params->length())->value();
+ }
- int fixed_length = Smi::cast(params->length())->value();
- SmartPointer<Object**> param_data(NewArray<Object**>(fixed_length));
- for (int i = 0; i < fixed_length; i++) {
- Handle<Object> val = Handle<Object>(fixed->get(i));
+ // Find frame containing arguments passed to the caller.
+ JavaScriptFrameIterator it;
+ JavaScriptFrame* frame = it.frame();
+ ASSERT(!frame->is_optimized());
+ it.AdvanceToArgumentsFrame();
+ frame = it.frame();
+ int argc = frame->GetProvidedParametersCount();
+
+ // Prepend bound arguments to caller's arguments.
+ int total_argc = bound_argc + argc;
+ SmartPointer<Object**> param_data(NewArray<Object**>(total_argc));
+ for (int i = 0; i < bound_argc; i++) {
+ Handle<Object> val = Handle<Object>(bound_args->get(i));
param_data[i] = val.location();
}
+ for (int i = 0; i < argc; i++) {
+ Handle<Object> val = Handle<Object>(frame->GetParameter(i));
+ param_data[bound_argc + i] = val.location();
+ }
bool exception = false;
- Handle<Object> result = Execution::New(
- function, fixed_length, *param_data, &exception);
+ Handle<Object> result =
+ Execution::New(function, total_argc, *param_data, &exception);
if (exception) {
return Failure::Exception();
}
+
ASSERT(!result.is_null());
return *result;
}
@@ -7340,12 +7477,17 @@ static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
if (holder->IsContext()) {
// Ignore if read_only variable.
if ((attributes & READ_ONLY) == 0) {
- Handle<Context>::cast(holder)->set(index, *value);
+ // Context is a fixed array and set cannot fail.
+ Context::cast(*holder)->set(index, *value);
}
} else {
ASSERT((attributes & READ_ONLY) == 0);
- Handle<JSObject>::cast(holder)->SetElement(index, *value)->
- ToObjectUnchecked();
+ Handle<Object> result =
+ SetElement(Handle<JSObject>::cast(holder), index, value);
+ if (result.is_null()) {
+ ASSERT(Top::has_pending_exception());
+ return Failure::Exception();
+ }
}
return *value;
}
@@ -7368,14 +7510,7 @@ static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
// extension object itself.
if ((attributes & READ_ONLY) == 0 ||
(context_ext->GetLocalPropertyAttribute(*name) == ABSENT)) {
- Handle<Object> set = SetProperty(context_ext, name, value, NONE);
- if (set.is_null()) {
- // Failure::Exception is converted to a null handle in the
- // handle-based methods such as SetProperty. We therefore need
- // to convert null handles back to exceptions.
- ASSERT(Top::has_pending_exception());
- return Failure::Exception();
- }
+ RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, NONE));
}
return *value;
}
@@ -8226,7 +8361,7 @@ static MaybeObject* Runtime_ArrayConcat(Arguments args) {
}
}
- // Allocate an empty array, will set length and content later.
+ // Allocate an empty array, will set map, length, and content later.
Handle<JSArray> result = Factory::NewJSArray(0);
uint32_t estimate_nof_elements = IterateArguments(arguments, NULL);
@@ -8235,23 +8370,20 @@ static MaybeObject* Runtime_ArrayConcat(Arguments args) {
// dictionary.
bool fast_case = (estimate_nof_elements * 2) >= result_length;
+ Handle<Map> map;
Handle<FixedArray> storage;
if (fast_case) {
// The backing storage array must have non-existing elements to
// preserve holes across concat operations.
+ map = Factory::GetFastElementsMap(Handle<Map>(result->map()));
storage = Factory::NewFixedArrayWithHoles(result_length);
- Handle<Map> fast_map =
- Factory::GetFastElementsMap(Handle<Map>(result->map()));
- result->set_map(*fast_map);
} else {
+ map = Factory::GetSlowElementsMap(Handle<Map>(result->map()));
// TODO(126): move 25% pre-allocation logic into Dictionary::Allocate
uint32_t at_least_space_for = estimate_nof_elements +
(estimate_nof_elements >> 2);
storage = Handle<FixedArray>::cast(
- Factory::NewNumberDictionary(at_least_space_for));
- Handle<Map> slow_map =
- Factory::GetSlowElementsMap(Handle<Map>(result->map()));
- result->set_map(*slow_map);
+ Factory::NewNumberDictionary(at_least_space_for));
}
Handle<Object> len = Factory::NewNumber(static_cast<double>(result_length));
@@ -8260,8 +8392,12 @@ static MaybeObject* Runtime_ArrayConcat(Arguments args) {
IterateArguments(arguments, &visitor);
+ // Please note:
+ // - the storage might have been changed in the visitor;
+ // - the map and the storage must be set together to avoid breaking
+ // the invariant that the map describes the array's elements.
+ result->set_map(*map);
result->set_length(*len);
- // Please note the storage might have changed in the visitor.
result->set_elements(*visitor.storage());
return *result;
@@ -8855,7 +8991,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
// If we are inspecting an optimized frame use undefined as the
// value for all locals.
//
- // TODO(3141533): We should be able to get the correct values
+ // TODO(1140): We should be able to get the correct values
// for locals in optimized frames.
locals->set(i * 2 + 1, Heap::undefined_value());
} else if (i < info.number_of_stack_slots()) {
@@ -9019,7 +9155,7 @@ static MaybeObject* Runtime_GetFrameDetails(Arguments args) {
// Copy all the context locals into an object used to materialize a scope.
-static void CopyContextLocalsToScopeObject(
+static bool CopyContextLocalsToScopeObject(
Handle<SerializedScopeInfo> serialized_scope_info,
ScopeInfo<>& scope_info,
Handle<Context> context,
@@ -9033,11 +9169,15 @@ static void CopyContextLocalsToScopeObject(
// Don't include the arguments shadow (.arguments) context variable.
if (*scope_info.context_slot_name(i) != Heap::arguments_shadow_symbol()) {
- SetProperty(scope_object,
- scope_info.context_slot_name(i),
- Handle<Object>(context->get(context_index)), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(scope_object,
+ scope_info.context_slot_name(i),
+ Handle<Object>(context->get(context_index)), NONE),
+ false);
}
}
+
+ return true;
}
@@ -9055,23 +9195,29 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
// First fill all parameters.
for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
- SetProperty(local_scope,
- scope_info.parameter_name(i),
- Handle<Object>(frame->GetParameter(i)), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(local_scope,
+ scope_info.parameter_name(i),
+ Handle<Object>(frame->GetParameter(i)), NONE),
+ Handle<JSObject>());
}
// Second fill all stack locals.
for (int i = 0; i < scope_info.number_of_stack_slots(); i++) {
- SetProperty(local_scope,
- scope_info.stack_slot_name(i),
- Handle<Object>(frame->GetExpression(i)), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(local_scope,
+ scope_info.stack_slot_name(i),
+ Handle<Object>(frame->GetExpression(i)), NONE),
+ Handle<JSObject>());
}
// Third fill all context locals.
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context(frame_context->fcontext());
- CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
- function_context, local_scope);
+ if (!CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
+ function_context, local_scope)) {
+ return Handle<JSObject>();
+ }
// Finally copy any properties from the function context extension. This will
// be variables introduced by eval.
@@ -9084,7 +9230,9 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
// Names of variables introduced by eval are strings.
ASSERT(keys->get(i)->IsString());
Handle<String> key(String::cast(keys->get(i)));
- SetProperty(local_scope, key, GetProperty(ext, key), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(local_scope, key, GetProperty(ext, key), NONE),
+ Handle<JSObject>());
}
}
}
@@ -9117,16 +9265,20 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
// We don't expect exception-throwing getters on the arguments shadow.
Object* element = arguments_shadow->GetElement(i)->ToObjectUnchecked();
- SetProperty(closure_scope,
- scope_info.parameter_name(i),
- Handle<Object>(element),
- NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(closure_scope,
+ scope_info.parameter_name(i),
+ Handle<Object>(element),
+ NONE),
+ Handle<JSObject>());
}
}
// Fill all context locals to the context extension.
- CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
- context, closure_scope);
+ if (!CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
+ context, closure_scope)) {
+ return Handle<JSObject>();
+ }
// Finally copy any properties from the function context extension. This will
// be variables introduced by eval.
@@ -9137,7 +9289,9 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
// Names of variables introduced by eval are strings.
ASSERT(keys->get(i)->IsString());
Handle<String> key(String::cast(keys->get(i)));
- SetProperty(closure_scope, key, GetProperty(ext, key), NONE);
+ RETURN_IF_EMPTY_HANDLE_VALUE(
+ SetProperty(closure_scope, key, GetProperty(ext, key), NONE),
+ Handle<JSObject>());
}
}
@@ -9424,6 +9578,7 @@ static MaybeObject* Runtime_GetScopeDetails(Arguments args) {
// Fill in scope details.
details->set(kScopeDetailsTypeIndex, Smi::FromInt(it.Type()));
Handle<JSObject> scope_object = it.ScopeObject();
+ RETURN_IF_EMPTY_HANDLE(scope_object);
details->set(kScopeDetailsObjectIndex, *scope_object);
return *Factory::NewJSArrayWithElements(details);
@@ -9905,6 +10060,7 @@ static MaybeObject* Runtime_DebugEvaluate(Arguments args) {
// Materialize the content of the local scope into a JSObject.
Handle<JSObject> local_scope = MaterializeLocalScope(frame);
+ RETURN_IF_EMPTY_HANDLE(local_scope);
// Allocate a new context for the debug evaluation and set the extension
// object build.
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index fb2ff93c6f..06437ef9fb 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -45,7 +45,7 @@ namespace internal {
/* Property access */ \
F(GetProperty, 2, 1) \
F(KeyedGetProperty, 2, 1) \
- F(DeleteProperty, 2, 1) \
+ F(DeleteProperty, 3, 1) \
F(HasLocalProperty, 2, 1) \
F(HasProperty, 2, 1) \
F(HasElement, 2, 1) \
@@ -128,6 +128,7 @@ namespace internal {
\
F(StringAdd, 2, 1) \
F(StringBuilderConcat, 3, 1) \
+ F(StringBuilderJoin, 3, 1) \
\
/* Bit operations */ \
F(NumberOr, 2, 1) \
diff --git a/deps/v8/src/runtime.js b/deps/v8/src/runtime.js
index 2cdbbdeebf..66d839bec0 100644
--- a/deps/v8/src/runtime.js
+++ b/deps/v8/src/runtime.js
@@ -338,8 +338,8 @@ function SHR(y) {
*/
// ECMA-262, section 11.4.1, page 46.
-function DELETE(key) {
- return %DeleteProperty(%ToObject(this), %ToString(key));
+function DELETE(key, strict) {
+ return %DeleteProperty(%ToObject(this), %ToString(key), strict);
}
diff --git a/deps/v8/src/scanner.cc b/deps/v8/src/scanner.cc
index cab8c58bd8..d54d9f91f9 100755
--- a/deps/v8/src/scanner.cc
+++ b/deps/v8/src/scanner.cc
@@ -76,7 +76,8 @@ void BufferedUC16CharacterStream::SlowPushBack(uc16 character) {
buffer_end_ = buffer_ + kBufferSize;
buffer_cursor_ = buffer_end_;
}
- ASSERT(pushback_limit_ > buffer_);
+ // Ensure that there is room for at least one pushback.
+ ASSERT(buffer_cursor_ > buffer_);
ASSERT(pos_ > 0);
buffer_[--buffer_cursor_ - buffer_] = character;
if (buffer_cursor_ == buffer_) {
@@ -89,15 +90,17 @@ void BufferedUC16CharacterStream::SlowPushBack(uc16 character) {
bool BufferedUC16CharacterStream::ReadBlock() {
+ buffer_cursor_ = buffer_;
if (pushback_limit_ != NULL) {
- buffer_cursor_ = buffer_;
+ // Leave pushback mode.
buffer_end_ = pushback_limit_;
pushback_limit_ = NULL;
- ASSERT(buffer_cursor_ != buffer_end_);
- return true;
+ // If there were any valid characters left at the
+ // start of the buffer, use those.
+ if (buffer_cursor_ < buffer_end_) return true;
+ // Otherwise read a new block.
}
unsigned length = FillBuffer(pos_, kBufferSize);
- buffer_cursor_ = buffer_;
buffer_end_ = buffer_ + length;
return length > 0;
}
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index 60c2cbb69d..f8e98d33f5 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -192,7 +192,7 @@ void ExternalReferenceTable::PopulateTable() {
{ BUILTIN, \
Builtins::name, \
"Builtins::" #name },
-#define DEF_ENTRY_A(name, kind, state) DEF_ENTRY_C(name, ignored)
+#define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
BUILTIN_LIST_C(DEF_ENTRY_C)
BUILTIN_LIST_A(DEF_ENTRY_A)
diff --git a/deps/v8/src/string.js b/deps/v8/src/string.js
index ab0ab54f03..2b73e0f6e9 100644
--- a/deps/v8/src/string.js
+++ b/deps/v8/src/string.js
@@ -103,17 +103,14 @@ function StringConcat() {
// ECMA-262 section 15.5.4.7
function StringIndexOf(pattern /* position */) { // length == 1
var subject = TO_STRING_INLINE(this);
- var pattern = TO_STRING_INLINE(pattern);
- var subject_len = subject.length;
- var pattern_len = pattern.length;
+ pattern = TO_STRING_INLINE(pattern);
var index = 0;
if (%_ArgumentsLength() > 1) {
- var arg1 = %_Arguments(1); // position
- index = TO_INTEGER(arg1);
+ index = %_Arguments(1); // position
+ index = TO_INTEGER(index);
+ if (index < 0) index = 0;
+ if (index > subject.length) index = subject.length;
}
- if (index < 0) index = 0;
- if (index > subject_len) index = subject_len;
- if (pattern_len + index > subject_len) return -1;
return %StringIndexOf(subject, pattern, index);
}
@@ -405,8 +402,7 @@ function StringReplaceGlobalRegExpWithFunction(subject, regexp, replace) {
lastMatchInfoOverride = override;
var func_result =
%_CallFunction(receiver, elem, match_start, subject, replace);
- func_result = TO_STRING_INLINE(func_result);
- res[i] = func_result;
+ res[i] = TO_STRING_INLINE(func_result);
match_start += elem.length;
}
i++;
@@ -419,8 +415,7 @@ function StringReplaceGlobalRegExpWithFunction(subject, regexp, replace) {
// Use the apply argument as backing for global RegExp properties.
lastMatchInfoOverride = elem;
var func_result = replace.apply(null, elem);
- func_result = TO_STRING_INLINE(func_result);
- res[i] = func_result;
+ res[i] = TO_STRING_INLINE(func_result);
}
i++;
}
diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc
index 731714138b..abb26d6ed4 100644
--- a/deps/v8/src/stub-cache.cc
+++ b/deps/v8/src/stub-cache.cc
@@ -497,12 +497,14 @@ MaybeObject* StubCache::ComputeKeyedLoadPixelArray(JSObject* receiver) {
MaybeObject* StubCache::ComputeStoreField(String* name,
JSObject* receiver,
int field_index,
- Map* transition) {
+ Map* transition,
+ Code::ExtraICState extra_ic_state) {
PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION;
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, type, extra_ic_state);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler;
+ StoreStubCompiler compiler(extra_ic_state);
{ MaybeObject* maybe_code =
compiler.CompileStoreField(receiver, field_index, transition, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -540,6 +542,33 @@ MaybeObject* StubCache::ComputeKeyedStoreSpecialized(JSObject* receiver) {
}
+MaybeObject* StubCache::ComputeKeyedStorePixelArray(JSObject* receiver) {
+ // Using NORMAL as the PropertyType for array element stores is a misuse. The
+ // generated stub always accesses fast elements, not slow-mode fields, but
+ // some property type is required for the stub lookup. Note that overloading
+ // the NORMAL PropertyType is only safe as long as no stubs are generated for
+ // other keyed field stores. This is guaranteed to be the case since all field
+ // keyed stores that are not array elements go through a generic builtin stub.
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL);
+ String* name = Heap::KeyedStorePixelArray_symbol();
+ Object* code = receiver->map()->FindInCodeCache(name, flags);
+ if (code->IsUndefined()) {
+ KeyedStoreStubCompiler compiler;
+ { MaybeObject* maybe_code = compiler.CompileStorePixelArray(receiver);
+ if (!maybe_code->ToObject(&code)) return maybe_code;
+ }
+ PROFILE(CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, Code::cast(code), 0));
+ Object* result;
+ { MaybeObject* maybe_result =
+ receiver->UpdateMapCodeCache(name, Code::cast(code));
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ }
+ return code;
+}
+
+
namespace {
ExternalArrayType ElementsKindToExternalArrayType(JSObject::ElementsKind kind) {
@@ -608,18 +637,22 @@ MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray(
}
-MaybeObject* StubCache::ComputeStoreNormal() {
- return Builtins::builtin(Builtins::StoreIC_Normal);
+MaybeObject* StubCache::ComputeStoreNormal(Code::ExtraICState extra_ic_state) {
+ return Builtins::builtin(extra_ic_state == StoreIC::kStoreICStrict
+ ? Builtins::StoreIC_Normal_Strict
+ : Builtins::StoreIC_Normal);
}
MaybeObject* StubCache::ComputeStoreGlobal(String* name,
GlobalObject* receiver,
- JSGlobalPropertyCell* cell) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
+ JSGlobalPropertyCell* cell,
+ Code::ExtraICState extra_ic_state) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, NORMAL, extra_ic_state);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler;
+ StoreStubCompiler compiler(extra_ic_state);
{ MaybeObject* maybe_code =
compiler.CompileStoreGlobal(receiver, cell, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -636,14 +669,17 @@ MaybeObject* StubCache::ComputeStoreGlobal(String* name,
}
-MaybeObject* StubCache::ComputeStoreCallback(String* name,
- JSObject* receiver,
- AccessorInfo* callback) {
+MaybeObject* StubCache::ComputeStoreCallback(
+ String* name,
+ JSObject* receiver,
+ AccessorInfo* callback,
+ Code::ExtraICState extra_ic_state) {
ASSERT(v8::ToCData<Address>(callback->setter()) != 0);
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, CALLBACKS);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, CALLBACKS, extra_ic_state);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler;
+ StoreStubCompiler compiler(extra_ic_state);
{ MaybeObject* maybe_code =
compiler.CompileStoreCallback(receiver, callback, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -660,13 +696,15 @@ MaybeObject* StubCache::ComputeStoreCallback(String* name,
}
-MaybeObject* StubCache::ComputeStoreInterceptor(String* name,
- JSObject* receiver) {
- Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::STORE_IC, INTERCEPTOR);
+MaybeObject* StubCache::ComputeStoreInterceptor(
+ String* name,
+ JSObject* receiver,
+ Code::ExtraICState extra_ic_state) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, INTERCEPTOR, extra_ic_state);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler;
+ StoreStubCompiler compiler(extra_ic_state);
{ MaybeObject* maybe_code =
compiler.CompileStoreInterceptor(receiver, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -1637,7 +1675,8 @@ MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name) {
MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type,
+ extra_ic_state_);
MaybeObject* result = GetCodeWithFlags(flags, name);
if (!result->IsFailure()) {
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG,
diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h
index 6d78279833..4638da2ad7 100644
--- a/deps/v8/src/stub-cache.h
+++ b/deps/v8/src/stub-cache.h
@@ -138,26 +138,32 @@ class StubCache : public AllStatic {
// ---
- MUST_USE_RESULT static MaybeObject* ComputeStoreField(String* name,
- JSObject* receiver,
- int field_index,
- Map* transition = NULL);
+ MUST_USE_RESULT static MaybeObject* ComputeStoreField(
+ String* name,
+ JSObject* receiver,
+ int field_index,
+ Map* transition,
+ Code::ExtraICState extra_ic_state);
- MUST_USE_RESULT static MaybeObject* ComputeStoreNormal();
+ MUST_USE_RESULT static MaybeObject* ComputeStoreNormal(
+ Code::ExtraICState extra_ic_state);
MUST_USE_RESULT static MaybeObject* ComputeStoreGlobal(
String* name,
GlobalObject* receiver,
- JSGlobalPropertyCell* cell);
+ JSGlobalPropertyCell* cell,
+ Code::ExtraICState extra_ic_state);
MUST_USE_RESULT static MaybeObject* ComputeStoreCallback(
String* name,
JSObject* receiver,
- AccessorInfo* callback);
+ AccessorInfo* callback,
+ Code::ExtraICState extra_ic_state);
MUST_USE_RESULT static MaybeObject* ComputeStoreInterceptor(
String* name,
- JSObject* receiver);
+ JSObject* receiver,
+ Code::ExtraICState extra_ic_state);
// ---
@@ -170,6 +176,9 @@ class StubCache : public AllStatic {
MUST_USE_RESULT static MaybeObject* ComputeKeyedStoreSpecialized(
JSObject* receiver);
+ MUST_USE_RESULT static MaybeObject* ComputeKeyedStorePixelArray(
+ JSObject* receiver);
+
MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadOrStoreExternalArray(
JSObject* receiver,
bool is_store);
@@ -619,6 +628,9 @@ class KeyedLoadStubCompiler: public StubCompiler {
class StoreStubCompiler: public StubCompiler {
public:
+ explicit StoreStubCompiler(Code::ExtraICState extra_ic_state)
+ : extra_ic_state_(extra_ic_state) { }
+
MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object,
int index,
Map* transition,
@@ -636,6 +648,8 @@ class StoreStubCompiler: public StubCompiler {
private:
MaybeObject* GetCode(PropertyType type, String* name);
+
+ Code::ExtraICState extra_ic_state_;
};
@@ -648,6 +662,8 @@ class KeyedStoreStubCompiler: public StubCompiler {
MUST_USE_RESULT MaybeObject* CompileStoreSpecialized(JSObject* receiver);
+ MUST_USE_RESULT MaybeObject* CompileStorePixelArray(JSObject* receiver);
+
private:
MaybeObject* GetCode(PropertyType type, String* name);
};
diff --git a/deps/v8/src/top.cc b/deps/v8/src/top.cc
index e32eb6bc8b..83d7de3afe 100644
--- a/deps/v8/src/top.cc
+++ b/deps/v8/src/top.cc
@@ -333,7 +333,7 @@ void Top::RegisterTryCatchHandler(v8::TryCatch* that) {
void Top::UnregisterTryCatchHandler(v8::TryCatch* that) {
- ASSERT(thread_local_.TryCatchHandler() == that);
+ ASSERT(try_catch_handler() == that);
thread_local_.set_try_catch_handler_address(
reinterpret_cast<Address>(that->next_));
thread_local_.catcher_ = NULL;
@@ -380,16 +380,16 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
int limit = Max(frame_limit, 0);
Handle<JSArray> stack_trace = Factory::NewJSArray(frame_limit);
- Handle<String> column_key = Factory::LookupAsciiSymbol("column");
- Handle<String> line_key = Factory::LookupAsciiSymbol("lineNumber");
- Handle<String> script_key = Factory::LookupAsciiSymbol("scriptName");
+ Handle<String> column_key = Factory::LookupAsciiSymbol("column");
+ Handle<String> line_key = Factory::LookupAsciiSymbol("lineNumber");
+ Handle<String> script_key = Factory::LookupAsciiSymbol("scriptName");
Handle<String> name_or_source_url_key =
Factory::LookupAsciiSymbol("nameOrSourceURL");
Handle<String> script_name_or_source_url_key =
Factory::LookupAsciiSymbol("scriptNameOrSourceURL");
- Handle<String> function_key = Factory::LookupAsciiSymbol("functionName");
- Handle<String> eval_key = Factory::LookupAsciiSymbol("isEval");
- Handle<String> constructor_key = Factory::LookupAsciiSymbol("isConstructor");
+ Handle<String> function_key = Factory::LookupAsciiSymbol("functionName");
+ Handle<String> eval_key = Factory::LookupAsciiSymbol("isEval");
+ Handle<String> constructor_key = Factory::LookupAsciiSymbol("isConstructor");
StackTraceFrameIterator it;
int frames_seen = 0;
@@ -421,16 +421,16 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
// tag.
column_offset += script->column_offset()->value();
}
- SetProperty(stackFrame, column_key,
- Handle<Smi>(Smi::FromInt(column_offset + 1)), NONE);
+ SetLocalPropertyNoThrow(stackFrame, column_key,
+ Handle<Smi>(Smi::FromInt(column_offset + 1)));
}
- SetProperty(stackFrame, line_key,
- Handle<Smi>(Smi::FromInt(line_number + 1)), NONE);
+ SetLocalPropertyNoThrow(stackFrame, line_key,
+ Handle<Smi>(Smi::FromInt(line_number + 1)));
}
if (options & StackTrace::kScriptName) {
Handle<Object> script_name(script->name());
- SetProperty(stackFrame, script_key, script_name, NONE);
+ SetLocalPropertyNoThrow(stackFrame, script_key, script_name);
}
if (options & StackTrace::kScriptNameOrSourceURL) {
@@ -446,7 +446,8 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
if (caught_exception) {
result = Factory::undefined_value();
}
- SetProperty(stackFrame, script_name_or_source_url_key, result, NONE);
+ SetLocalPropertyNoThrow(stackFrame, script_name_or_source_url_key,
+ result);
}
if (options & StackTrace::kFunctionName) {
@@ -454,20 +455,20 @@ Handle<JSArray> Top::CaptureCurrentStackTrace(
if (fun_name->ToBoolean()->IsFalse()) {
fun_name = Handle<Object>(fun->shared()->inferred_name());
}
- SetProperty(stackFrame, function_key, fun_name, NONE);
+ SetLocalPropertyNoThrow(stackFrame, function_key, fun_name);
}
if (options & StackTrace::kIsEval) {
int type = Smi::cast(script->compilation_type())->value();
Handle<Object> is_eval = (type == Script::COMPILATION_TYPE_EVAL) ?
Factory::true_value() : Factory::false_value();
- SetProperty(stackFrame, eval_key, is_eval, NONE);
+ SetLocalPropertyNoThrow(stackFrame, eval_key, is_eval);
}
if (options & StackTrace::kIsConstructor) {
Handle<Object> is_constructor = (frames[i].is_constructor()) ?
Factory::true_value() : Factory::false_value();
- SetProperty(stackFrame, constructor_key, is_constructor, NONE);
+ SetLocalPropertyNoThrow(stackFrame, constructor_key, is_constructor);
}
FixedArray::cast(stack_trace->elements())->set(frames_seen, *stackFrame);
@@ -731,6 +732,13 @@ Failure* Top::Throw(Object* exception, MessageLocation* location) {
Failure* Top::ReThrow(MaybeObject* exception, MessageLocation* location) {
+ bool can_be_caught_externally = false;
+ ShouldReportException(&can_be_caught_externally,
+ is_catchable_by_javascript(exception));
+ if (can_be_caught_externally) {
+ thread_local_.catcher_ = try_catch_handler();
+ }
+
// Set the exception being re-thrown.
set_pending_exception(exception);
return Failure::Exception();
@@ -806,7 +814,7 @@ void Top::ComputeLocation(MessageLocation* target) {
}
-bool Top::ShouldReportException(bool* is_caught_externally,
+bool Top::ShouldReportException(bool* can_be_caught_externally,
bool catchable_by_javascript) {
// Find the top-most try-catch handler.
StackHandler* handler =
@@ -822,13 +830,13 @@ bool Top::ShouldReportException(bool* is_caught_externally,
// The exception has been externally caught if and only if there is
// an external handler which is on top of the top-most try-catch
// handler.
- *is_caught_externally = external_handler_address != NULL &&
+ *can_be_caught_externally = external_handler_address != NULL &&
(handler == NULL || handler->address() > external_handler_address ||
!catchable_by_javascript);
- if (*is_caught_externally) {
+ if (*can_be_caught_externally) {
// Only report the exception if the external handler is verbose.
- return thread_local_.TryCatchHandler()->is_verbose_;
+ return try_catch_handler()->is_verbose_;
} else {
// Report the exception if it isn't caught by JavaScript code.
return handler == NULL;
@@ -847,14 +855,12 @@ void Top::DoThrow(MaybeObject* exception,
Handle<Object> exception_handle(exception_object);
// Determine reporting and whether the exception is caught externally.
- bool is_out_of_memory = exception == Failure::OutOfMemoryException();
- bool is_termination_exception = exception == Heap::termination_exception();
- bool catchable_by_javascript = !is_termination_exception && !is_out_of_memory;
+ bool catchable_by_javascript = is_catchable_by_javascript(exception);
// Only real objects can be caught by JS.
ASSERT(!catchable_by_javascript || is_object);
- bool is_caught_externally = false;
+ bool can_be_caught_externally = false;
bool should_report_exception =
- ShouldReportException(&is_caught_externally, catchable_by_javascript);
+ ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
bool report_exception = catchable_by_javascript && should_report_exception;
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -868,8 +874,8 @@ void Top::DoThrow(MaybeObject* exception,
Handle<Object> message_obj;
MessageLocation potential_computed_location;
bool try_catch_needs_message =
- is_caught_externally &&
- thread_local_.TryCatchHandler()->capture_message_;
+ can_be_caught_externally &&
+ try_catch_handler()->capture_message_;
if (report_exception || try_catch_needs_message) {
if (location == NULL) {
// If no location was specified we use a computed one instead
@@ -907,8 +913,8 @@ void Top::DoThrow(MaybeObject* exception,
}
}
- if (is_caught_externally) {
- thread_local_.catcher_ = thread_local_.TryCatchHandler();
+ if (can_be_caught_externally) {
+ thread_local_.catcher_ = try_catch_handler();
}
// NOTE: Notifying the debugger or generating the message
@@ -924,22 +930,63 @@ void Top::DoThrow(MaybeObject* exception,
}
+bool Top::IsExternallyCaught() {
+ ASSERT(has_pending_exception());
+
+ if ((thread_local_.catcher_ == NULL) ||
+ (try_catch_handler() != thread_local_.catcher_)) {
+ // When throwing the exception, we found no v8::TryCatch
+ // which should care about this exception.
+ return false;
+ }
+
+ if (!is_catchable_by_javascript(pending_exception())) {
+ return true;
+ }
+
+ // Get the address of the external handler so we can compare the address to
+ // determine which one is closer to the top of the stack.
+ Address external_handler_address = thread_local_.try_catch_handler_address();
+ ASSERT(external_handler_address != NULL);
+
+ // The exception has been externally caught if and only if there is
+ // an external handler which is on top of the top-most try-finally
+ // handler.
+ // There should be no try-catch blocks as they would prohibit us from
+ // finding external catcher in the first place (see catcher_ check above).
+ //
+ // Note, that finally clause would rethrow an exception unless it's
+ // aborted by jumps in control flow like return, break, etc. and we'll
+ // have another chances to set proper v8::TryCatch.
+ StackHandler* handler =
+ StackHandler::FromAddress(Top::handler(Top::GetCurrentThread()));
+ while (handler != NULL && handler->address() < external_handler_address) {
+ ASSERT(!handler->is_try_catch());
+ if (handler->is_try_finally()) return false;
+
+ handler = handler->next();
+ }
+
+ return true;
+}
+
+
void Top::ReportPendingMessages() {
ASSERT(has_pending_exception());
- setup_external_caught();
// If the pending exception is OutOfMemoryException set out_of_memory in
// the global context. Note: We have to mark the global context here
// since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to
// set it.
- bool external_caught = thread_local_.external_caught_exception_;
+ bool external_caught = IsExternallyCaught();
+ thread_local_.external_caught_exception_ = external_caught;
HandleScope scope;
if (thread_local_.pending_exception_ == Failure::OutOfMemoryException()) {
context()->mark_out_of_memory();
} else if (thread_local_.pending_exception_ ==
Heap::termination_exception()) {
if (external_caught) {
- thread_local_.TryCatchHandler()->can_continue_ = false;
- thread_local_.TryCatchHandler()->exception_ = Heap::null_value();
+ try_catch_handler()->can_continue_ = false;
+ try_catch_handler()->exception_ = Heap::null_value();
}
} else {
// At this point all non-object (failure) exceptions have
@@ -948,9 +995,8 @@ void Top::ReportPendingMessages() {
Handle<Object> exception(pending_exception_object);
thread_local_.external_caught_exception_ = false;
if (external_caught) {
- thread_local_.TryCatchHandler()->can_continue_ = true;
- thread_local_.TryCatchHandler()->exception_ =
- thread_local_.pending_exception_;
+ try_catch_handler()->can_continue_ = true;
+ try_catch_handler()->exception_ = thread_local_.pending_exception_;
if (!thread_local_.pending_message_obj_->IsTheHole()) {
try_catch_handler()->message_ = thread_local_.pending_message_obj_;
}
diff --git a/deps/v8/src/top.h b/deps/v8/src/top.h
index 9d8aa8227f..26ae542f59 100644
--- a/deps/v8/src/top.h
+++ b/deps/v8/src/top.h
@@ -41,6 +41,15 @@ class Simulator;
#define RETURN_IF_SCHEDULED_EXCEPTION() \
if (Top::has_scheduled_exception()) return Top::PromoteScheduledException()
+#define RETURN_IF_EMPTY_HANDLE_VALUE(call, value) \
+ if (call.is_null()) { \
+ ASSERT(Top::has_pending_exception()); \
+ return value; \
+ }
+
+#define RETURN_IF_EMPTY_HANDLE(call) \
+ RETURN_IF_EMPTY_HANDLE_VALUE(call, Failure::Exception())
+
// Top has static variables used for JavaScript execution.
class SaveContext; // Forward declaration.
@@ -240,12 +249,7 @@ class Top {
thread_local_.scheduled_exception_ = Heap::the_hole_value();
}
- static void setup_external_caught() {
- thread_local_.external_caught_exception_ =
- has_pending_exception() &&
- (thread_local_.catcher_ != NULL) &&
- (try_catch_handler() == thread_local_.catcher_);
- }
+ static bool IsExternallyCaught();
static void SetCaptureStackTraceForUncaughtExceptions(
bool capture,
@@ -256,6 +260,11 @@ class Top {
// exception.
static bool is_out_of_memory();
+ static bool is_catchable_by_javascript(MaybeObject* exception) {
+ return (exception != Failure::OutOfMemoryException()) &&
+ (exception != Heap::termination_exception());
+ }
+
// JS execution stack (see frames.h).
static Address c_entry_fp(ThreadLocalTop* thread) {
return thread->c_entry_fp_;
@@ -388,7 +397,7 @@ class Top {
const char* message);
// Checks if exception should be reported and finds out if it's
// caught externally.
- static bool ShouldReportException(bool* is_caught_externally,
+ static bool ShouldReportException(bool* can_be_caught_externally,
bool catchable_by_javascript);
// Attempts to compute the current source location, storing the
diff --git a/deps/v8/src/uri.js b/deps/v8/src/uri.js
index 179fa92863..e94b3fe56a 100644
--- a/deps/v8/src/uri.js
+++ b/deps/v8/src/uri.js
@@ -90,11 +90,13 @@ function URIEncodePair(cc1 , cc2, result, index) {
}
-function URIHexCharsToCharCode(ch1, ch2) {
- if (HexValueOf(ch1) == -1 || HexValueOf(ch2) == -1) {
+function URIHexCharsToCharCode(highChar, lowChar) {
+ var highCode = HexValueOf(highChar);
+ var lowCode = HexValueOf(lowChar);
+ if (highCode == -1 || lowCode == -1) {
throw new $URIError("URI malformed");
}
- return HexStrToCharCode(ch1 + ch2);
+ return (highCode << 4) | lowCode;
}
@@ -196,7 +198,7 @@ function Decode(uri, reserved) {
var ch = uri.charAt(k);
if (ch == '%') {
if (k + 2 >= uriLength) throw new $URIError("URI malformed");
- var cc = URIHexCharsToCharCode(uri.charAt(++k), uri.charAt(++k));
+ var cc = URIHexCharsToCharCode(uri.charCodeAt(++k), uri.charCodeAt(++k));
if (cc >> 7) {
var n = 0;
while (((cc << ++n) & 0x80) != 0) ;
@@ -206,7 +208,7 @@ function Decode(uri, reserved) {
if (k + 3 * (n - 1) >= uriLength) throw new $URIError("URI malformed");
for (var i = 1; i < n; i++) {
if (uri.charAt(++k) != '%') throw new $URIError("URI malformed");
- octets[i] = URIHexCharsToCharCode(uri.charAt(++k), uri.charAt(++k));
+ octets[i] = URIHexCharsToCharCode(uri.charCodeAt(++k), uri.charCodeAt(++k));
}
index = URIDecodeOctets(octets, result, index);
} else {
@@ -325,9 +327,7 @@ function URIEncodeComponent(component) {
}
-function HexValueOf(c) {
- var code = c.charCodeAt(0);
-
+function HexValueOf(code) {
// 0-9
if (code >= 48 && code <= 57) return code - 48;
// A-F
@@ -356,18 +356,6 @@ function CharCodeToHex4Str(cc) {
}
-// Converts hex string to char code. Not efficient.
-function HexStrToCharCode(s) {
- var m = 0;
- var r = 0;
- for (var i = s.length - 1; i >= 0; --i) {
- r = r + (HexValueOf(s.charAt(i)) << m);
- m = m + 4;
- }
- return r;
-}
-
-
// Returns true if all digits in string s are valid hex numbers
function IsValidHex(s) {
for (var i = 0; i < s.length; ++i) {
diff --git a/deps/v8/src/v8-counters.h b/deps/v8/src/v8-counters.h
index aa30e4e150..9b91acebcd 100644
--- a/deps/v8/src/v8-counters.h
+++ b/deps/v8/src/v8-counters.h
@@ -128,6 +128,7 @@ namespace internal {
SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
SC(map_slow_to_fast_elements, V8.MapSlowToFastElements) \
SC(map_fast_to_slow_elements, V8.MapFastToSlowElements) \
+ SC(map_to_pixel_array_elements, V8.MapToPixelArrayElements) \
/* How is the generic keyed-load stub used? */ \
SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \
SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \
diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc
index c5a5775e5b..0ff7649340 100644
--- a/deps/v8/src/v8.cc
+++ b/deps/v8/src/v8.cc
@@ -34,7 +34,6 @@
#include "hydrogen.h"
#include "lithium-allocator.h"
#include "log.h"
-#include "oprofile-agent.h"
#include "runtime-profiler.h"
#include "serialize.h"
#include "simulator.h"
@@ -129,7 +128,6 @@ bool V8::Initialize(Deserializer* des) {
// objects in place for creating the code object used for probing.
CPU::Setup();
- OProfileAgent::Initialize();
Deoptimizer::Setup();
LAllocator::Setup();
RuntimeProfiler::Setup();
@@ -161,7 +159,6 @@ void V8::TearDown() {
Logger::EnsureTickerStopped();
Deoptimizer::TearDown();
- OProfileAgent::TearDown();
if (FLAG_preemption) {
v8::Locker locker;
diff --git a/deps/v8/src/v8globals.h b/deps/v8/src/v8globals.h
index 85bd17e0b1..d11bc38331 100644
--- a/deps/v8/src/v8globals.h
+++ b/deps/v8/src/v8globals.h
@@ -69,21 +69,21 @@ const intptr_t kFailureTagMask = (1 << kFailureTagSize) - 1;
// Zap-value: The value used for zapping dead objects.
-// Should be a recognizable hex value tagged as a heap object pointer.
+// Should be a recognizable hex value tagged as a failure.
#ifdef V8_HOST_ARCH_64_BIT
const Address kZapValue =
- reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeed));
+ reinterpret_cast<Address>(V8_UINT64_C(0xdeadbeedbeadbeef));
const Address kHandleZapValue =
- reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddead));
+ reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddeaf));
const Address kFromSpaceZapValue =
- reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdad));
+ reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdaf));
const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb);
-const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeed);
+const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeef);
#else
-const Address kZapValue = reinterpret_cast<Address>(0xdeadbeed);
-const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddead);
-const Address kFromSpaceZapValue = reinterpret_cast<Address>(0xbeefdad);
-const uint32_t kSlotsZapValue = 0xbeefdeed;
+const Address kZapValue = reinterpret_cast<Address>(0xdeadbeef);
+const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddeaf);
+const Address kFromSpaceZapValue = reinterpret_cast<Address>(0xbeefdaf);
+const uint32_t kSlotsZapValue = 0xbeefdeef;
const uint32_t kDebugZapValue = 0xbadbaddb;
#endif
diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js
index b0fb5bf171..884b6f414d 100644
--- a/deps/v8/src/v8natives.js
+++ b/deps/v8/src/v8natives.js
@@ -586,17 +586,20 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
// Step 7
if (desc.isConfigurable() ||
(desc.hasEnumerable() &&
- desc.isEnumerable() != current.isEnumerable()))
+ desc.isEnumerable() != current.isEnumerable())) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
+ }
// Step 8
if (!IsGenericDescriptor(desc)) {
// Step 9a
- if (IsDataDescriptor(current) != IsDataDescriptor(desc))
+ if (IsDataDescriptor(current) != IsDataDescriptor(desc)) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
+ }
// Step 10a
if (IsDataDescriptor(current) && IsDataDescriptor(desc)) {
- if (!current.isWritable() && desc.isWritable())
+ if (!current.isWritable() && desc.isWritable()) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
+ }
if (!current.isWritable() && desc.hasValue() &&
!SameValue(desc.getValue(), current.getValue())) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
@@ -604,11 +607,12 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
}
// Step 11
if (IsAccessorDescriptor(desc) && IsAccessorDescriptor(current)) {
- if (desc.hasSetter() && !SameValue(desc.getSet(), current.getSet())){
+ if (desc.hasSetter() && !SameValue(desc.getSet(), current.getSet())) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
}
- if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet()))
+ if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet())) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
+ }
}
}
}
@@ -1153,33 +1157,49 @@ function FunctionBind(this_arg) { // Length is 1.
}
// this_arg is not an argument that should be bound.
var argc_bound = (%_ArgumentsLength() || 1) - 1;
- if (argc_bound > 0) {
+ var fn = this;
+ if (argc_bound == 0) {
+ var result = function() {
+ if (%_IsConstructCall()) {
+ // %NewObjectFromBound implicitly uses arguments passed to this
+ // function. We do not pass the arguments object explicitly to avoid
+ // materializing it and guarantee that this function will be optimized.
+ return %NewObjectFromBound(fn, null);
+ }
+
+ return fn.apply(this_arg, arguments);
+ };
+ } else {
var bound_args = new $Array(argc_bound);
for(var i = 0; i < argc_bound; i++) {
bound_args[i] = %_Arguments(i+1);
}
+
+ var result = function() {
+ // If this is a construct call we use a special runtime method
+ // to generate the actual object using the bound function.
+ if (%_IsConstructCall()) {
+ // %NewObjectFromBound implicitly uses arguments passed to this
+ // function. We do not pass the arguments object explicitly to avoid
+ // materializing it and guarantee that this function will be optimized.
+ return %NewObjectFromBound(fn, bound_args);
+ }
+
+ // Combine the args we got from the bind call with the args
+ // given as argument to the invocation.
+ var argc = %_ArgumentsLength();
+ var args = new $Array(argc + argc_bound);
+ // Add bound arguments.
+ for (var i = 0; i < argc_bound; i++) {
+ args[i] = bound_args[i];
+ }
+ // Add arguments from call.
+ for (var i = 0; i < argc; i++) {
+ args[argc_bound + i] = %_Arguments(i);
+ }
+ return fn.apply(this_arg, args);
+ };
}
- var fn = this;
- var result = function() {
- // Combine the args we got from the bind call with the args
- // given as argument to the invocation.
- var argc = %_ArgumentsLength();
- var args = new $Array(argc + argc_bound);
- // Add bound arguments.
- for (var i = 0; i < argc_bound; i++) {
- args[i] = bound_args[i];
- }
- // Add arguments from call.
- for (var i = 0; i < argc; i++) {
- args[argc_bound + i] = %_Arguments(i);
- }
- // If this is a construct call we use a special runtime method
- // to generate the actual object using the bound function.
- if (%_IsConstructCall()) {
- return %NewObjectFromBound(fn, args);
- }
- return fn.apply(this_arg, args);
- };
// We already have caller and arguments properties on functions,
// which are non-configurable. It therefore makes no sence to
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index ac1887d55a..8c233375e7 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 1
-#define BUILD_NUMBER 2
+#define BUILD_NUMBER 5
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
diff --git a/deps/v8/src/x64/assembler-x64-inl.h b/deps/v8/src/x64/assembler-x64-inl.h
index 440645222f..285c07812f 100644
--- a/deps/v8/src/x64/assembler-x64-inl.h
+++ b/deps/v8/src/x64/assembler-x64-inl.h
@@ -366,6 +366,8 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
CPU::FlushICache(pc_, sizeof(Address));
} else if (RelocInfo::IsCodeTarget(mode)) {
visitor->VisitCodeTarget(this);
+ } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
+ visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(target_reference_address());
CPU::FlushICache(pc_, sizeof(Address));
@@ -391,6 +393,8 @@ void RelocInfo::Visit() {
CPU::FlushICache(pc_, sizeof(Address));
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
+ } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
+ StaticVisitor::VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
CPU::FlushICache(pc_, sizeof(Address));
diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc
index ef069a98bc..697f6cd403 100644
--- a/deps/v8/src/x64/assembler-x64.cc
+++ b/deps/v8/src/x64/assembler-x64.cc
@@ -1188,6 +1188,16 @@ void Assembler::imull(Register dst, Register src) {
}
+void Assembler::imull(Register dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xAF);
+ emit_operand(dst, src);
+}
+
+
void Assembler::imull(Register dst, Register src, Immediate imm) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
@@ -3076,10 +3086,15 @@ void Assembler::dd(uint32_t data) {
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
ASSERT(rmode != RelocInfo::NONE);
// Don't record external references unless the heap will be serialized.
- if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
- !Serializer::enabled() &&
- !FLAG_debug_code) {
- return;
+ if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
+#ifdef DEBUG
+ if (!Serializer::enabled()) {
+ Serializer::TooLateToEnableNow();
+ }
+#endif
+ if (!Serializer::enabled() && !FLAG_debug_code) {
+ return;
+ }
}
RelocInfo rinfo(pc_, rmode, data);
reloc_info_writer.Write(&rinfo);
diff --git a/deps/v8/src/x64/assembler-x64.h b/deps/v8/src/x64/assembler-x64.h
index c597783b5e..91e7e6cc61 100644
--- a/deps/v8/src/x64/assembler-x64.h
+++ b/deps/v8/src/x64/assembler-x64.h
@@ -565,6 +565,17 @@ class Assembler : public Malloced {
// One byte opcode for test eax,0xXXXXXXXX.
static const byte kTestEaxByte = 0xA9;
+ // One byte opcode for test al, 0xXX.
+ static const byte kTestAlByte = 0xA8;
+ // One byte opcode for nop.
+ static const byte kNopByte = 0x90;
+
+ // One byte prefix for a short conditional jump.
+ static const byte kJccShortPrefix = 0x70;
+ static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
+ static const byte kJcShortOpcode = kJccShortPrefix | carry;
+
+
// ---------------------------------------------------------------------------
// Code generation
@@ -863,6 +874,7 @@ class Assembler : public Malloced {
void imul(Register dst, Register src, Immediate imm); // dst = src * imm.
// Signed 32-bit multiply instructions.
void imull(Register dst, Register src); // dst = dst * src.
+ void imull(Register dst, const Operand& src); // dst = dst * src.
void imull(Register dst, Register src, Immediate imm); // dst = src * imm.
void incq(Register dst);
diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc
index 079dc8a590..c362f7b79f 100644
--- a/deps/v8/src/x64/builtins-x64.cc
+++ b/deps/v8/src/x64/builtins-x64.cc
@@ -596,12 +596,21 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
- Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
+ Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
- __ int3();
+ // For now, we are relying on the fact that Runtime::NotifyOSR
+ // doesn't do any garbage collection which allows us to save/restore
+ // the registers without worrying about which of them contain
+ // pointers. This seems a bit fragile.
+ __ Pushad();
+ __ EnterInternalFrame();
+ __ CallRuntime(Runtime::kNotifyOSR, 0);
+ __ LeaveInternalFrame();
+ __ Popad();
+ __ ret(0);
}
@@ -642,6 +651,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// Change context eagerly in case we need the global receiver.
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+ // Do not transform the receiver for strict mode functions.
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
+ Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
+ __ j(not_equal, &shift_arguments);
+
+ // Compute the receiver in non-strict mode.
__ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
__ JumpIfSmi(rbx, &convert_to_object);
@@ -798,6 +814,14 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Compute the receiver.
Label call_to_object, use_global_receiver, push_receiver;
__ movq(rbx, Operand(rbp, kReceiverOffset));
+
+ // Do not transform the receiver for strict mode functions.
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
+ Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
+ __ j(not_equal, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
__ JumpIfSmi(rbx, &call_to_object);
__ CompareRoot(rbx, Heap::kNullValueRootIndex);
__ j(equal, &use_global_receiver);
@@ -1406,7 +1430,58 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
- __ int3();
+ // Get the loop depth of the stack guard check. This is recorded in
+ // a test(rax, depth) instruction right after the call.
+ Label stack_check;
+ __ movq(rbx, Operand(rsp, 0)); // return address
+ __ movzxbq(rbx, Operand(rbx, 1)); // depth
+
+ // Get the loop nesting level at which we allow OSR from the
+ // unoptimized code and check if we want to do OSR yet. If not we
+ // should perform a stack guard check so we can get interrupts while
+ // waiting for on-stack replacement.
+ __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
+ __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
+ __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
+ __ j(greater, &stack_check);
+
+ // Pass the function to optimize as the argument to the on-stack
+ // replacement runtime function.
+ __ EnterInternalFrame();
+ __ push(rax);
+ __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
+ __ LeaveInternalFrame();
+
+ // If the result was -1 it means that we couldn't optimize the
+ // function. Just return and continue in the unoptimized version.
+ NearLabel skip;
+ __ SmiCompare(rax, Smi::FromInt(-1));
+ __ j(not_equal, &skip);
+ __ ret(0);
+
+ // If we decide not to perform on-stack replacement we perform a
+ // stack guard check to enable interrupts.
+ __ bind(&stack_check);
+ NearLabel ok;
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok);
+
+ StackCheckStub stub;
+ __ TailCallStub(&stub);
+ __ Abort("Unreachable code: returned from tail call.");
+ __ bind(&ok);
+ __ ret(0);
+
+ __ bind(&skip);
+ // Untag the AST id and push it on the stack.
+ __ SmiToInteger32(rax, rax);
+ __ push(rax);
+
+ // Generate the code for doing the frame-to-frame translation using
+ // the deoptimizer infrastructure.
+ Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
+ generator.Generate();
}
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index b0cadda6d3..0cfe665ced 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -2268,46 +2268,46 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// rcx: RegExp data (FixedArray)
// rdx: Number of capture registers
// Check that the second argument is a string.
- __ movq(rax, Operand(rsp, kSubjectOffset));
- __ JumpIfSmi(rax, &runtime);
- Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
+ __ movq(rdi, Operand(rsp, kSubjectOffset));
+ __ JumpIfSmi(rdi, &runtime);
+ Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx);
__ j(NegateCondition(is_string), &runtime);
- // rax: Subject string.
- // rcx: RegExp data (FixedArray).
+ // rdi: Subject string.
+ // rax: RegExp data (FixedArray).
// rdx: Number of capture registers.
// Check that the third argument is a positive smi less than the string
// length. A negative value will be greater (unsigned comparison).
__ movq(rbx, Operand(rsp, kPreviousIndexOffset));
__ JumpIfNotSmi(rbx, &runtime);
- __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset));
+ __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset));
__ j(above_equal, &runtime);
- // rcx: RegExp data (FixedArray)
+ // rax: RegExp data (FixedArray)
// rdx: Number of capture registers
// Check that the fourth object is a JSArray object.
- __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
- __ JumpIfSmi(rax, &runtime);
- __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister);
+ __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
+ __ JumpIfSmi(rdi, &runtime);
+ __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
__ j(not_equal, &runtime);
// Check that the JSArray is in fast case.
- __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
- __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
- __ Cmp(rax, Factory::fixed_array_map());
+ __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
+ __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
+ __ Cmp(rdi, Factory::fixed_array_map());
__ j(not_equal, &runtime);
// Check that the last match info has space for the capture registers and the
// additional information. Ensure no overflow in add.
STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
- __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
+ __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
__ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
- __ cmpl(rdx, rax);
+ __ cmpl(rdx, rdi);
__ j(greater, &runtime);
- // rcx: RegExp data (FixedArray)
+ // rax: RegExp data (FixedArray)
// Check the representation and encoding of the subject string.
NearLabel seq_ascii_string, seq_two_byte_string, check_code;
- __ movq(rax, Operand(rsp, kSubjectOffset));
- __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ movq(rdi, Operand(rsp, kSubjectOffset));
+ __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
__ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
// First check for flat two byte string.
__ andb(rbx, Immediate(
@@ -2328,13 +2328,13 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
__ j(not_zero, &runtime);
// String is a cons string.
- __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset));
+ __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset));
__ Cmp(rdx, Factory::empty_string());
__ j(not_equal, &runtime);
- __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset));
- __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
+ __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
// String is a cons string with empty second part.
- // rax: first part of cons string.
+ // rdi: first part of cons string.
// rbx: map of first part of cons string.
// Is first part a flat two byte string?
__ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
@@ -2347,17 +2347,17 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ j(not_zero, &runtime);
__ bind(&seq_ascii_string);
- // rax: subject string (sequential ascii)
- // rcx: RegExp data (FixedArray)
- __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset));
- __ Set(rdi, 1); // Type is ascii.
+ // rdi: subject string (sequential ascii)
+ // rax: RegExp data (FixedArray)
+ __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
+ __ Set(rcx, 1); // Type is ascii.
__ jmp(&check_code);
__ bind(&seq_two_byte_string);
- // rax: subject string (flat two-byte)
- // rcx: RegExp data (FixedArray)
- __ movq(r11, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset));
- __ Set(rdi, 0); // Type is two byte.
+ // rdi: subject string (flat two-byte)
+ // rax: RegExp data (FixedArray)
+ __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
+ __ Set(rcx, 0); // Type is two byte.
__ bind(&check_code);
// Check that the irregexp code has been generated for the actual string
@@ -2366,27 +2366,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
__ j(not_equal, &runtime);
- // rax: subject string
- // rdi: encoding of subject string (1 if ascii, 0 if two_byte);
+ // rdi: subject string
+ // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
// r11: code
// Load used arguments before starting to push arguments for call to native
// RegExp code to avoid handling changing stack height.
__ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
- // rax: subject string
+ // rdi: subject string
// rbx: previous index
- // rdi: encoding of subject string (1 if ascii 0 if two_byte);
+ // rcx: encoding of subject string (1 if ascii 0 if two_byte);
// r11: code
// All checks done. Now push arguments for native regexp code.
__ IncrementCounter(&Counters::regexp_entry_native, 1);
- // rsi is caller save on Windows and used to pass parameter on Linux.
- __ push(rsi);
-
static const int kRegExpExecuteArguments = 7;
- __ PrepareCallCFunction(kRegExpExecuteArguments);
int argument_slots_on_stack =
masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
+ __ EnterApiExitFrame(argument_slots_on_stack); // Clobbers rax!
// Argument 7: Indicate that this is a direct call from JavaScript.
__ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
@@ -2423,60 +2420,57 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
#endif
// Keep track on aliasing between argX defined above and the registers used.
- // rax: subject string
+ // rdi: subject string
// rbx: previous index
- // rdi: encoding of subject string (1 if ascii 0 if two_byte);
+ // rcx: encoding of subject string (1 if ascii 0 if two_byte);
// r11: code
// Argument 4: End of string data
// Argument 3: Start of string data
NearLabel setup_two_byte, setup_rest;
- __ testb(rdi, rdi);
+ __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
__ j(zero, &setup_two_byte);
- __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
- __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
- __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
+ __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
+ __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
+ __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
__ jmp(&setup_rest);
__ bind(&setup_two_byte);
- __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
- __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
- __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
+ __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
+ __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
+ __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
__ bind(&setup_rest);
// Argument 2: Previous index.
__ movq(arg2, rbx);
// Argument 1: Subject string.
- __ movq(arg1, rax);
+#ifdef WIN64_
+ __ movq(arg1, rdi);
+#else
+ // Already there in AMD64 calling convention.
+ ASSERT(arg1.is(rdi));
+#endif
// Locate the code entry and call it.
__ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
- __ CallCFunction(r11, kRegExpExecuteArguments);
+ __ call(r11);
- // rsi is caller save, as it is used to pass parameter.
- __ pop(rsi);
+ __ LeaveApiExitFrame();
// Check the result.
NearLabel success;
+ Label exception;
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
__ j(equal, &success);
- NearLabel failure;
- __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
- __ j(equal, &failure);
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
- // If not exception it can only be retry. Handle that in the runtime system.
+ __ j(equal, &exception);
+ __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
+ // If none of the above, it can only be retry.
+ // Handle that in the runtime system.
__ j(not_equal, &runtime);
- // Result must now be exception. If there is no pending exception already a
- // stack overflow (on the backtrack stack) was detected in RegExp code but
- // haven't created the exception yet. Handle that in the runtime system.
- // TODO(592): Rerunning the RegExp to get the stack overflow exception.
- ExternalReference pending_exception_address(Top::k_pending_exception_address);
- __ movq(kScratchRegister, pending_exception_address);
- __ Cmp(kScratchRegister, Factory::the_hole_value());
- __ j(equal, &runtime);
- __ bind(&failure);
- // For failure and exception return null.
- __ Move(rax, Factory::null_value());
+
+ // For failure return null.
+ __ LoadRoot(rax, Heap::kNullValueRootIndex);
__ ret(4 * kPointerSize);
// Load RegExp data.
@@ -2537,6 +2531,27 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ movq(rax, Operand(rsp, kLastMatchInfoOffset));
__ ret(4 * kPointerSize);
+ __ bind(&exception);
+ // Result must now be exception. If there is no pending exception already a
+ // stack overflow (on the backtrack stack) was detected in RegExp code but
+ // haven't created the exception yet. Handle that in the runtime system.
+ // TODO(592): Rerunning the RegExp to get the stack overflow exception.
+ ExternalReference pending_exception_address(Top::k_pending_exception_address);
+ __ movq(rbx, pending_exception_address);
+ __ movq(rax, Operand(rbx, 0));
+ __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ cmpq(rax, rdx);
+ __ j(equal, &runtime);
+ __ movq(Operand(rbx, 0), rdx);
+
+ __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
+ NearLabel termination_exception;
+ __ j(equal, &termination_exception);
+ __ Throw(rax);
+
+ __ bind(&termination_exception);
+ __ ThrowUncatchable(TERMINATION, rax);
+
// Do the runtime call to execute the regexp.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
@@ -3085,31 +3100,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- // Check that stack should contain next handler, frame pointer, state and
- // return address in that order.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
- StackHandlerConstants::kStateOffset);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
- StackHandlerConstants::kPCOffset);
-
- ExternalReference handler_address(Top::k_handler_address);
- __ movq(kScratchRegister, handler_address);
- __ movq(rsp, Operand(kScratchRegister, 0));
- // get next in chain
- __ pop(rcx);
- __ movq(Operand(kScratchRegister, 0), rcx);
- __ pop(rbp); // pop frame pointer
- __ pop(rdx); // remove state
-
- // Before returning we restore the context from the frame pointer if not NULL.
- // The frame pointer is NULL in the exception handler of a JS entry frame.
- __ Set(rsi, 0); // Tentatively set context pointer to NULL
- NearLabel skip;
- __ cmpq(rbp, Immediate(0));
- __ j(equal, &skip);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ bind(&skip);
- __ ret(0);
+ // Throw exception in eax.
+ __ Throw(rax);
}
@@ -3251,54 +3243,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type) {
- // Fetch top stack handler.
- ExternalReference handler_address(Top::k_handler_address);
- __ movq(kScratchRegister, handler_address);
- __ movq(rsp, Operand(kScratchRegister, 0));
-
- // Unwind the handlers until the ENTRY handler is found.
- NearLabel loop, done;
- __ bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
- __ j(equal, &done);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- __ movq(rsp, Operand(rsp, kNextOffset));
- __ jmp(&loop);
- __ bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
- __ movq(kScratchRegister, handler_address);
- __ pop(Operand(kScratchRegister, 0));
-
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(Top::k_external_caught_exception_address);
- __ movq(rax, Immediate(false));
- __ store_rax(external_caught);
-
- // Set pending exception and rax to out of memory exception.
- ExternalReference pending_exception(Top::k_pending_exception_address);
- __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
- __ store_rax(pending_exception);
- }
-
- // Clear the context pointer.
- __ Set(rsi, 0);
-
- // Restore registers from handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
- StackHandlerConstants::kFPOffset);
- __ pop(rbp); // FP
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
- StackHandlerConstants::kStateOffset);
- __ pop(rdx); // State
-
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
- StackHandlerConstants::kPCOffset);
- __ ret(0);
+ __ ThrowUncatchable(type, rax);
}
@@ -4627,10 +4572,10 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
if (GetCondition() == equal) {
// For equality we do not care about the sign of the result.
- __ SmiSub(rax, rax, rdx);
+ __ subq(rax, rdx);
} else {
NearLabel done;
- __ SmiSub(rdx, rdx, rax);
+ __ subq(rdx, rax);
__ j(no_overflow, &done);
// Correct sign of result in case of overflow.
__ SmiNot(rdx, rdx);
@@ -4767,9 +4712,19 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
}
__ SmiToInteger32(untagged_key, key);
- // Verify that the receiver has pixel array elements.
__ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
- __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ // By passing NULL as not_pixel_array, callers signal that they have already
+ // verified that the receiver has pixel array elements.
+ if (not_pixel_array != NULL) {
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ } else {
+ if (FLAG_debug_code) {
+ // Map check should have already made sure that elements is a pixel array.
+ __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
+ Factory::pixel_array_map());
+ __ Assert(equal, "Elements isn't a pixel array");
+ }
+ }
// Check that the smi is in range.
__ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
@@ -4783,6 +4738,88 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
}
+// Stores an indexed element into a pixel array, clamping the stored value.
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register scratch1,
+ bool load_elements_from_receiver,
+ bool key_is_untagged,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range) {
+ // Register use:
+ // receiver - holds the receiver and is unchanged.
+ // key - holds the key (must be a smi) and is unchanged.
+ // value - holds the value (must be a smi) and is unchanged.
+ // elements - holds the element object of the receiver on entry if
+ // load_elements_from_receiver is false, otherwise used
+ // internally to store the pixel arrays elements and
+ // external array pointer.
+ //
+ Register external_pointer = elements;
+ Register untagged_key = scratch1;
+ Register untagged_value = receiver; // Only set once success guaranteed.
+
+ // Fetch the receiver's elements if the caller hasn't already done so.
+ if (load_elements_from_receiver) {
+ __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
+ }
+
+ // By passing NULL as not_pixel_array, callers signal that they have already
+ // verified that the receiver has pixel array elements.
+ if (not_pixel_array != NULL) {
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+ } else {
+ if (FLAG_debug_code) {
+ // Map check should have already made sure that elements is a pixel array.
+ __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
+ Factory::pixel_array_map());
+ __ Assert(equal, "Elements isn't a pixel array");
+ }
+ }
+
+ // Key must be a smi and it must be in range.
+ if (key_is_untagged) {
+ untagged_key = key;
+ } else {
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit
+ // check to ensure the key is a smi.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+ __ SmiToInteger32(untagged_key, key);
+ }
+ __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
+ __ j(above_equal, out_of_range); // unsigned check handles negative keys.
+
+ // Value must be a smi.
+ __ JumpIfNotSmi(value, value_not_smi);
+ __ SmiToInteger32(untagged_value, value);
+
+ { // Clamp the value to [0..255].
+ NearLabel done;
+ __ testl(untagged_value, Immediate(0xFFFFFF00));
+ __ j(zero, &done);
+ __ setcc(negative, untagged_value); // 1 if negative, 0 if positive.
+ __ decb(untagged_value); // 0 if negative, 255 if positive.
+ __ bind(&done);
+ }
+
+ __ movq(external_pointer,
+ FieldOperand(elements, PixelArray::kExternalPointerOffset));
+ __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value);
+ __ ret(0); // Return value in eax.
+}
+
#undef __
} } // namespace v8::internal
diff --git a/deps/v8/src/x64/code-stubs-x64.h b/deps/v8/src/x64/code-stubs-x64.h
index 8051d4bdbe..119b699304 100644
--- a/deps/v8/src/x64/code-stubs-x64.h
+++ b/deps/v8/src/x64/code-stubs-x64.h
@@ -452,14 +452,14 @@ class NumberToStringStub: public CodeStub {
};
-// Generate code the to load an element from a pixel array. The receiver is
-// assumed to not be a smi and to have elements, the caller must guarantee this
-// precondition. If the receiver does not have elements that are pixel arrays,
-// the generated code jumps to not_pixel_array. If key is not a smi, then the
-// generated code branches to key_not_smi. Callers can specify NULL for
-// key_not_smi to signal that a smi check has already been performed on key so
-// that the smi check is not generated . If key is not a valid index within the
-// bounds of the pixel array, the generated code jumps to out_of_range.
+// Generate code to load an element from a pixel array. The receiver is assumed
+// to not be a smi and to have elements, the caller must guarantee this
+// precondition. If key is not a smi, then the generated code branches to
+// key_not_smi. Callers can specify NULL for key_not_smi to signal that a smi
+// check has already been performed on key so that the smi check is not
+// generated. If key is not a valid index within the bounds of the pixel array,
+// the generated code jumps to out_of_range. receiver, key and elements are
+// unchanged throughout the generated code sequence.
void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Register receiver,
Register key,
@@ -470,6 +470,30 @@ void GenerateFastPixelArrayLoad(MacroAssembler* masm,
Label* key_not_smi,
Label* out_of_range);
+// Generate code to store an element into a pixel array, clamping values between
+// [0..255]. The receiver is assumed to not be a smi and to have elements, the
+// caller must guarantee this precondition. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated. If the value is not a smi, the
+// generated code will branch to value_not_smi. If the receiver
+// doesn't have pixel array elements, the generated code will branch to
+// not_pixel_array, unless not_pixel_array is NULL, in which case the caller
+// must ensure that the receiver has pixel array elements. If key is not a
+// valid index within the bounds of the pixel array, the generated code jumps to
+// out_of_range.
+void GenerateFastPixelArrayStore(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register value,
+ Register elements,
+ Register scratch1,
+ bool load_elements_from_receiver,
+ bool key_is_untagged,
+ Label* key_not_smi,
+ Label* value_not_smi,
+ Label* not_pixel_array,
+ Label* out_of_range);
} } // namespace v8::internal
diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc
index b8069a2cf9..150ed664b0 100644
--- a/deps/v8/src/x64/codegen-x64.cc
+++ b/deps/v8/src/x64/codegen-x64.cc
@@ -2993,21 +2993,22 @@ void CodeGenerator::GenerateReturnSequence(Result* return_value) {
// Leave the frame and return popping the arguments and the
// receiver.
frame_->Exit();
- masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
+ __ Ret(arguments_bytes, rcx);
DeleteFrame();
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint.
- // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k"
+ // The shortest return sequence generated is "movq rsp, rbp; pop rbp; ret k"
// with length 7 (3 + 1 + 3).
const int kPadding = Assembler::kJSReturnSequenceLength - 7;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
- // Check that the size of the code used for returning matches what is
- // expected by the debugger.
- ASSERT_EQ(Assembler::kJSReturnSequenceLength,
- masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceLength <=
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
@@ -4893,7 +4894,8 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Load(property->value());
if (property->emit_store()) {
Result ignored =
- frame_->CallStoreIC(Handle<String>::cast(key), false);
+ frame_->CallStoreIC(Handle<String>::cast(key), false,
+ strict_mode_flag());
// A test rax instruction following the store IC call would
// indicate the presence of an inlined version of the
// store. Add a nop to indicate that there is no such
@@ -7228,19 +7230,24 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
if (property != NULL) {
Load(property->obj());
Load(property->key());
- Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
+ frame_->Push(Smi::FromInt(strict_mode_flag()));
+ Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 3);
frame_->Push(&answer);
return;
}
Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
if (variable != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
Slot* slot = variable->AsSlot();
if (variable->is_global()) {
LoadGlobal();
frame_->Push(variable->name());
+ frame_->Push(Smi::FromInt(kNonStrictMode));
Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
- CALL_FUNCTION, 2);
+ CALL_FUNCTION, 3);
frame_->Push(&answer);
return;
@@ -8233,7 +8240,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
Result result;
if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
- result = frame()->CallStoreIC(name, is_contextual);
+ result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
// A test rax instruction following the call signals that the inobject
// property case was inlined. Ensure that there is not a test rax
// instruction here.
@@ -8333,7 +8340,7 @@ Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
slow.Bind(&value, &receiver);
frame()->Push(&receiver);
frame()->Push(&value);
- result = frame()->CallStoreIC(name, is_contextual);
+ result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
// Encode the offset to the map check instruction and the offset
// to the write barrier store address computation in a test rax
// instruction.
diff --git a/deps/v8/src/x64/cpu-x64.cc b/deps/v8/src/x64/cpu-x64.cc
index 30134bf149..513c52286e 100644
--- a/deps/v8/src/x64/cpu-x64.cc
+++ b/deps/v8/src/x64/cpu-x64.cc
@@ -43,6 +43,9 @@ namespace internal {
void CPU::Setup() {
CpuFeatures::Probe(true);
+ if (Serializer::enabled()) {
+ V8::DisableCrankshaft();
+ }
}
diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc
index ed6c47bf99..7d6e6d8522 100644
--- a/deps/v8/src/x64/deoptimizer-x64.cc
+++ b/deps/v8/src/x64/deoptimizer-x64.cc
@@ -203,14 +203,51 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
void Deoptimizer::PatchStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- UNIMPLEMENTED();
+ Address call_target_address = pc_after - kIntSize;
+ ASSERT(check_code->entry() ==
+ Assembler::target_address_at(call_target_address));
+ // The stack check code matches the pattern:
+ //
+ // cmp rsp, <limit>
+ // jae ok
+ // call <stack guard>
+ // test rax, <loop nesting depth>
+ // ok: ...
+ //
+ // We will patch away the branch so the code is:
+ //
+ // cmp rsp, <limit> ;; Not changed
+ // nop
+ // nop
+ // call <on-stack replacment>
+ // test rax, <loop nesting depth>
+ // ok:
+ //
+ ASSERT(*(call_target_address - 3) == 0x73 && // jae
+ *(call_target_address - 2) == 0x05 && // offset
+ *(call_target_address - 1) == 0xe8); // call
+ *(call_target_address - 3) = 0x90; // nop
+ *(call_target_address - 2) = 0x90; // nop
+ Assembler::set_target_address_at(call_target_address,
+ replacement_code->entry());
}
void Deoptimizer::RevertStackCheckCodeAt(Address pc_after,
Code* check_code,
Code* replacement_code) {
- UNIMPLEMENTED();
+ Address call_target_address = pc_after - kIntSize;
+ ASSERT(replacement_code->entry() ==
+ Assembler::target_address_at(call_target_address));
+ // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
+ // restore the conditional branch.
+ ASSERT(*(call_target_address - 3) == 0x90 && // nop
+ *(call_target_address - 2) == 0x90 && // nop
+ *(call_target_address - 1) == 0xe8); // call
+ *(call_target_address - 3) = 0x73; // jae
+ *(call_target_address - 2) = 0x05; // offset
+ Assembler::set_target_address_at(call_target_address,
+ check_code->entry());
}
diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc
index 9144874c89..a28bcb79ff 100644
--- a/deps/v8/src/x64/full-codegen-x64.cc
+++ b/deps/v8/src/x64/full-codegen-x64.cc
@@ -43,6 +43,58 @@ namespace internal {
#define __ ACCESS_MASM(masm_)
+
+class JumpPatchSite BASE_EMBEDDED {
+ public:
+ explicit JumpPatchSite(MacroAssembler* masm)
+ : masm_(masm) {
+#ifdef DEBUG
+ info_emitted_ = false;
+#endif
+ }
+
+ ~JumpPatchSite() {
+ ASSERT(patch_site_.is_bound() == info_emitted_);
+ }
+
+ void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
+ __ testb(reg, Immediate(kSmiTagMask));
+ EmitJump(not_carry, target); // Always taken before patched.
+ }
+
+ void EmitJumpIfSmi(Register reg, NearLabel* target) {
+ __ testb(reg, Immediate(kSmiTagMask));
+ EmitJump(carry, target); // Never taken before patched.
+ }
+
+ void EmitPatchInfo() {
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
+ ASSERT(is_int8(delta_to_patch_site));
+ __ testl(rax, Immediate(delta_to_patch_site));
+#ifdef DEBUG
+ info_emitted_ = true;
+#endif
+ }
+
+ bool is_bound() const { return patch_site_.is_bound(); }
+
+ private:
+ // jc will be patched with jz, jnc will become jnz.
+ void EmitJump(Condition cc, NearLabel* target) {
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
+ ASSERT(cc == carry || cc == not_carry);
+ __ bind(&patch_site_);
+ __ j(cc, target);
+ }
+
+ MacroAssembler* masm_;
+ Label patch_site_;
+#ifdef DEBUG
+ bool info_emitted_;
+#endif
+};
+
+
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
@@ -245,19 +297,22 @@ void FullCodeGenerator::EmitReturnSequence() {
// patch with the code required by the debugger.
__ movq(rsp, rbp);
__ pop(rbp);
- __ ret((scope()->num_parameters() + 1) * kPointerSize);
+
+ int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
+ __ Ret(arguments_bytes, rcx);
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint. We
- // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7
+ // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
// (3 + 1 + 3).
const int kPadding = Assembler::kJSReturnSequenceLength - 7;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
- // Check that the size of the code used for returning matches what is
- // expected by the debugger.
- ASSERT_EQ(Assembler::kJSReturnSequenceLength,
- masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
+ // Check that the size of the code used for returning is large enough
+ // for the debugger's requirements.
+ ASSERT(Assembler::kJSReturnSequenceLength <=
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
#endif
}
}
@@ -659,18 +714,24 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
} else if (prop != NULL) {
if (function != NULL || mode == Variable::CONST) {
// We are declaring a function or constant that rewrites to a
- // property. Use (keyed) IC to set the initial value.
- VisitForStackValue(prop->obj());
+ // property. Use (keyed) IC to set the initial value. We
+ // cannot visit the rewrite because it's shared and we risk
+ // recording duplicate AST IDs for bailouts from optimized code.
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
if (function != NULL) {
- VisitForStackValue(prop->key());
+ __ push(rax);
VisitForAccumulatorValue(function);
- __ pop(rcx);
+ __ pop(rdx);
} else {
- VisitForAccumulatorValue(prop->key());
- __ movq(rcx, result_register());
- __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
+ __ movq(rdx, rax);
+ __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
}
- __ pop(rdx);
+ ASSERT(prop->key()->AsLiteral() != NULL &&
+ prop->key()->AsLiteral()->handle()->IsSmi());
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
@@ -728,21 +789,25 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Perform the comparison as if via '==='.
__ movq(rdx, Operand(rsp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
- Label slow_case;
- __ JumpIfNotBothSmi(rdx, rax, &slow_case);
- __ SmiCompare(rdx, rax);
+ NearLabel slow_case;
+ __ movq(rcx, rdx);
+ __ or_(rcx, rax);
+ patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
+
+ __ cmpq(rdx, rax);
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ jmp(clause->body_target()->entry_label());
__ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(equal, true, flags);
- __ CallStub(&stub);
+ // Record position before stub call for type feedback.
+ SetSourcePosition(clause->position());
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
+ EmitCallIC(ic, &patch_site);
+
__ testq(rax, rax);
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
@@ -1522,16 +1587,17 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
// Do combined smi check of the operands. Left operand is on the
// stack (popped into rdx). Right operand is in rax but moved into
// rcx to make the shifts easier.
- Label done, stub_call, smi_case;
+ NearLabel done, stub_call, smi_case;
__ pop(rdx);
__ movq(rcx, rax);
- Condition smi = masm()->CheckBothSmi(rdx, rax);
- __ j(smi, &smi_case);
+ __ or_(rax, rdx);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJumpIfSmi(rax, &smi_case);
__ bind(&stub_call);
- TypeRecordingBinaryOpStub stub(op, mode);
__ movq(rax, rcx);
- __ CallStub(&stub);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done);
__ bind(&smi_case);
@@ -1575,9 +1641,9 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
- TypeRecordingBinaryOpStub stub(op, mode);
__ pop(rdx);
- __ CallStub(&stub);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(rax);
}
@@ -1620,11 +1686,21 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(rax); // Preserve value.
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ movq(rcx, rax);
- __ pop(rdx);
- __ pop(rax);
+ if (prop->is_synthetic()) {
+ ASSERT(prop->obj()->AsVariableProxy() != NULL);
+ ASSERT(prop->key()->AsLiteral() != NULL);
+ { AccumulatorValueContext for_object(this);
+ EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
+ }
+ __ movq(rdx, rax);
+ __ Move(rcx, prop->key()->AsLiteral()->handle());
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ movq(rcx, rax);
+ __ pop(rdx);
+ }
+ __ pop(rax); // Restore value.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
@@ -1649,8 +1725,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// rcx, and the global object on the stack.
__ Move(rcx, var->name());
__ movq(rdx, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic(Builtins::builtin(is_strict()
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
// Perform the assignment for non-const variables and for initialization
@@ -2982,37 +3060,47 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- if (prop == NULL && var == NULL) {
- // Result of deleting non-property, non-variable reference is true.
- // The subexpression may have side effects.
- VisitForEffect(expr->expression());
- context()->Plug(true);
- } else if (var != NULL &&
- !var->is_global() &&
- var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
- } else {
- // Property or variable reference. Call the delete builtin with
- // object and property name as arguments.
- if (prop != NULL) {
+
+ if (prop != NULL) {
+ if (prop->is_synthetic()) {
+ // Result of deleting parameters is false, even when they rewrite
+ // to accesses on the arguments object.
+ context()->Plug(false);
+ } else {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
+ __ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- } else if (var->is_global()) {
+ context()->Plug(rax);
+ }
+ } else if (var != NULL) {
+ // Delete of an unqualified identifier is disallowed in strict mode
+ // so this code can only be reached in non-strict mode.
+ ASSERT(strict_mode_flag() == kNonStrictMode);
+ if (var->is_global()) {
__ push(GlobalObjectOperand());
__ Push(var->name());
+ __ Push(Smi::FromInt(kNonStrictMode));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(rax);
+ } else if (var->AsSlot() != NULL &&
+ var->AsSlot()->type() != Slot::LOOKUP) {
+ // Result of deleting non-global, non-dynamic variables is false.
+ // The subexpression does not have side effects.
+ context()->Plug(false);
} else {
- // Non-global variable. Call the runtime to delete from the
+ // Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
__ push(context_register());
__ Push(var->name());
__ CallRuntime(Runtime::kDeleteContextSlot, 2);
+ context()->Plug(rax);
}
- context()->Plug(rax);
+ } else {
+ // Result of deleting non-property, non-variable reference is true.
+ // The subexpression may have side effects.
+ VisitForEffect(expr->expression());
+ context()->Plug(true);
}
break;
}
@@ -3197,7 +3285,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
// Inline smi case if we are in a loop.
- Label stub_call, done;
+ NearLabel stub_call, done;
+ JumpPatchSite patch_site(masm_);
+
if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) {
__ SmiAddConstant(rax, rax, Smi::FromInt(1));
@@ -3207,8 +3297,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ j(overflow, &stub_call);
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
- is_smi = masm_->CheckSmi(rax);
- __ j(is_smi, &done);
+ patch_site.EmitJumpIfSmi(rax, &done);
__ bind(&stub_call);
// Call stub. Undo operation first.
@@ -3230,9 +3319,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
- __ CallStub(&stub);
-
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
+
// Store the value returned in rax.
switch (assign_type) {
case VARIABLE:
@@ -3500,19 +3589,21 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
}
bool inline_smi_code = ShouldInlineSmiCase(op);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
- Label slow_case;
- __ JumpIfNotBothSmi(rax, rdx, &slow_case);
- __ SmiCompare(rdx, rax);
+ NearLabel slow_case;
+ __ movq(rcx, rdx);
+ __ or_(rcx, rax);
+ patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
+ __ cmpq(rdx, rax);
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(cc, strict, flags);
- __ CallStub(&stub);
+ // Record position and call the compare IC.
+ SetSourcePosition(expr->position());
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
@@ -3575,10 +3666,30 @@ Register FullCodeGenerator::context_register() {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ switch (ic->kind()) {
+ case Code::LOAD_IC:
+ __ IncrementCounter(&Counters::named_load_full, 1);
+ break;
+ case Code::KEYED_LOAD_IC:
+ __ IncrementCounter(&Counters::keyed_load_full, 1);
+ break;
+ case Code::STORE_IC:
+ __ IncrementCounter(&Counters::named_store_full, 1);
+ break;
+ case Code::KEYED_STORE_IC:
+ __ IncrementCounter(&Counters::keyed_store_full, 1);
+ default:
+ break;
+ }
+
__ call(ic, mode);
// Crankshaft doesn't need patching of inlined loads and stores.
- if (V8::UseCrankshaft()) return;
+ // When compiling the snapshot we need to produce code that works
+ // with and without Crankshaft.
+ if (V8::UseCrankshaft() && !Serializer::enabled()) {
+ return;
+ }
// If we're calling a (keyed) load or store stub, we have to mark
// the call as containing no inlined code so we will not attempt to
@@ -3597,6 +3708,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
}
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ __ call(ic, RelocInfo::CODE_TARGET);
+ if (patch_site != NULL && patch_site->is_bound()) {
+ patch_site->EmitPatchInfo();
+ } else {
+ __ nop(); // Signals no inlined code.
+ }
+}
+
+
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT(IsAligned(frame_offset, kPointerSize));
__ movq(Operand(rbp, frame_offset), value);
diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc
index e5a4bfcfb5..f8c40ab4e1 100644
--- a/deps/v8/src/x64/ic-x64.cc
+++ b/deps/v8/src/x64/ic-x64.cc
@@ -108,6 +108,9 @@ static void GenerateStringDictionaryProbes(MacroAssembler* masm,
Register name,
Register r0,
Register r1) {
+ // Assert that name contains a string.
+ if (FLAG_debug_code) __ AbortIfNotString(name);
+
// Compute the capacity mask.
const int kCapacityOffset =
StringDictionary::kHeaderSize +
@@ -819,27 +822,18 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// rbx: receiver's elements array
// rcx: index, zero-extended.
__ bind(&check_pixel_array);
- __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
- Heap::kPixelArrayMapRootIndex);
- __ j(not_equal, &slow);
- // Check that the value is a smi. If a conversion is needed call into the
- // runtime to convert and clamp.
- __ JumpIfNotSmi(rax, &slow);
- __ cmpl(rcx, FieldOperand(rbx, PixelArray::kLengthOffset));
- __ j(above_equal, &slow);
- // No more bailouts to slow case on this path, so key not needed.
- __ SmiToInteger32(rdi, rax);
- { // Clamp the value to [0..255].
- NearLabel done;
- __ testl(rdi, Immediate(0xFFFFFF00));
- __ j(zero, &done);
- __ setcc(negative, rdi); // 1 if negative, 0 if positive.
- __ decb(rdi); // 0 if negative, 255 if positive.
- __ bind(&done);
- }
- __ movq(rbx, FieldOperand(rbx, PixelArray::kExternalPointerOffset));
- __ movb(Operand(rbx, rcx, times_1, 0), rdi);
- __ ret(0);
+ GenerateFastPixelArrayStore(masm,
+ rdx,
+ rcx,
+ rax,
+ rbx,
+ rdi,
+ false,
+ true,
+ NULL,
+ &slow,
+ &slow,
+ &slow);
// Extra capacity case: Check if there is extra capacity to
// perform the store and update the length. Used for adding one
@@ -1233,7 +1227,13 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// rsp[(argc + 1) * 8] : argument 0 = receiver
// -----------------------------------
+ // Check if the name is a string.
+ Label miss;
+ __ JumpIfSmi(rcx, &miss);
+ Condition cond = masm->IsObjectStringType(rcx, rax, rax);
+ __ j(NegateCondition(cond), &miss);
GenerateCallNormal(masm, argc);
+ __ bind(&miss);
GenerateMiss(masm, argc);
}
@@ -1473,7 +1473,8 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
}
-void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
+void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
+ Code::ExtraICState extra_ic_state) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : name
@@ -1484,7 +1485,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
// Get the receiver from the stack and probe the stub cache.
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
- MONOMORPHIC);
+ MONOMORPHIC,
+ extra_ic_state);
StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
// Cache miss: Jump to runtime.
@@ -1673,11 +1675,23 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
}
+static bool HasInlinedSmiCode(Address address) {
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test al, nothing
+ // was inlined.
+ return *test_instruction_address == Assembler::kTestAlByte;
+}
+
+
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
State previous_state = GetState();
- State state = TargetState(previous_state, false, x, y);
+
+ State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode();
@@ -1695,11 +1709,43 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
Token::Name(op_));
}
#endif
+
+ // Activate inlined smi code.
+ if (previous_state == UNINITIALIZED) {
+ PatchInlinedSmiCode(address());
+ }
}
void PatchInlinedSmiCode(Address address) {
- // Disabled, then patched inline smi code is not implemented on X64.
- // So we do nothing in this case.
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test al, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestAlByte) {
+ ASSERT(*test_instruction_address == Assembler::kNopByte);
+ return;
+ }
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction and the
+ // condition code uses at the patched jump.
+ int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
+ if (FLAG_trace_ic) {
+ PrintF("[ patching ic at %p, test=%p, delta=%d\n",
+ address, test_instruction_address, delta);
+ }
+
+ // Patch with a short conditional jump. There must be a
+ // short jump-if-carry/not-carry at this position.
+ Address jmp_address = test_instruction_address - delta;
+ ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
+ *jmp_address == Assembler::kJcShortOpcode);
+ Condition cc = *jmp_address == Assembler::kJncShortOpcode
+ ? not_zero
+ : zero;
+ *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
}
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index 48d413016b..e1ebb3eaca 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -593,7 +593,56 @@ void LCodeGen::DoParameter(LParameter* instr) {
void LCodeGen::DoCallStub(LCallStub* instr) {
- Abort("Unimplemented: %s", "DoCallStub");
+ ASSERT(ToRegister(instr->result()).is(rax));
+ switch (instr->hydrogen()->major_key()) {
+ case CodeStub::RegExpConstructResult: {
+ RegExpConstructResultStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::RegExpExec: {
+ RegExpExecStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::SubString: {
+ SubStringStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringCharAt: {
+ // TODO(1116): Add StringCharAt stub to x64.
+ Abort("Unimplemented: %s", "StringCharAt Stub");
+ break;
+ }
+ case CodeStub::MathPow: {
+ // TODO(1115): Add MathPow stub to x64.
+ Abort("Unimplemented: %s", "MathPow Stub");
+ break;
+ }
+ case CodeStub::NumberToString: {
+ NumberToStringStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringAdd: {
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringCompare: {
+ StringCompareStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::TranscendentalCache: {
+ TranscendentalCacheStub stub(instr->transcendental_type());
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ default:
+ UNREACHABLE();
+ }
}
@@ -608,11 +657,92 @@ void LCodeGen::DoModI(LModI* instr) {
void LCodeGen::DoDivI(LDivI* instr) {
- Abort("Unimplemented: %s", "DoDivI");}
+ LOperand* right = instr->InputAt(1);
+ ASSERT(ToRegister(instr->result()).is(rax));
+ ASSERT(ToRegister(instr->InputAt(0)).is(rax));
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
+ ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
+
+ Register left_reg = rax;
+
+ // Check for x / 0.
+ Register right_reg = ToRegister(right);
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
+ __ testl(right_reg, right_reg);
+ DeoptimizeIf(zero, instr->environment());
+ }
+
+ // Check for (0 / -x) that will produce negative zero.
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ NearLabel left_not_zero;
+ __ testl(left_reg, left_reg);
+ __ j(not_zero, &left_not_zero);
+ __ testl(right_reg, right_reg);
+ DeoptimizeIf(sign, instr->environment());
+ __ bind(&left_not_zero);
+ }
+
+ // Check for (-kMinInt / -1).
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+ NearLabel left_not_min_int;
+ __ cmpl(left_reg, Immediate(kMinInt));
+ __ j(not_zero, &left_not_min_int);
+ __ cmpl(right_reg, Immediate(-1));
+ DeoptimizeIf(zero, instr->environment());
+ __ bind(&left_not_min_int);
+ }
+
+ // Sign extend to rdx.
+ __ cdq();
+ __ idivl(right_reg);
+
+ // Deoptimize if remainder is not 0.
+ __ testl(rdx, rdx);
+ DeoptimizeIf(not_zero, instr->environment());
+}
void LCodeGen::DoMulI(LMulI* instr) {
- Abort("Unimplemented: %s", "DoMultI");}
+ Register left = ToRegister(instr->InputAt(0));
+ LOperand* right = instr->InputAt(1);
+
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ __ movl(kScratchRegister, left);
+ }
+
+ if (right->IsConstantOperand()) {
+ int right_value = ToInteger32(LConstantOperand::cast(right));
+ __ imull(left, left, Immediate(right_value));
+ } else if (right->IsStackSlot()) {
+ __ imull(left, ToOperand(right));
+ } else {
+ __ imull(left, ToRegister(right));
+ }
+
+ if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
+ DeoptimizeIf(overflow, instr->environment());
+ }
+
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ // Bail out if the result is supposed to be negative zero.
+ NearLabel done;
+ __ testl(left, left);
+ __ j(not_zero, &done);
+ if (right->IsConstantOperand()) {
+ if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
+ DeoptimizeIf(no_condition, instr->environment());
+ }
+ } else if (right->IsStackSlot()) {
+ __ or_(kScratchRegister, ToOperand(right));
+ DeoptimizeIf(sign, instr->environment());
+ } else {
+ // Test the non-zero operand for negative sign.
+ __ or_(kScratchRegister, ToRegister(right));
+ DeoptimizeIf(sign, instr->environment());
+ }
+ __ bind(&done);
+ }
+}
void LCodeGen::DoBitI(LBitI* instr) {
@@ -758,21 +888,15 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
ASSERT(instr->result()->IsDoubleRegister());
XMMRegister res = ToDoubleRegister(instr->result());
double v = instr->value();
+ uint64_t int_val = BitCast<uint64_t, double>(v);
// Use xor to produce +0.0 in a fast and compact way, but avoid to
// do so if the constant is -0.0.
- if (BitCast<uint64_t, double>(v) == 0) {
+ if (int_val == 0) {
__ xorpd(res, res);
} else {
Register tmp = ToRegister(instr->TempAt(0));
- int32_t v_int32 = static_cast<int32_t>(v);
- if (static_cast<double>(v_int32) == v) {
- __ movl(tmp, Immediate(v_int32));
- __ cvtlsi2sd(res, tmp);
- } else {
- uint64_t int_val = BitCast<uint64_t, double>(v);
- __ Set(tmp, int_val);
- __ movd(res, tmp);
- }
+ __ Set(tmp, int_val);
+ __ movq(res, tmp);
}
}
@@ -797,6 +921,13 @@ void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
}
+void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register array = ToRegister(instr->InputAt(0));
+ __ movq(result, FieldOperand(array, PixelArray::kLengthOffset));
+}
+
+
void LCodeGen::DoValueOf(LValueOf* instr) {
Abort("Unimplemented: %s", "DoValueOf");
}
@@ -810,7 +941,13 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) {
void LCodeGen::DoThrow(LThrow* instr) {
- Abort("Unimplemented: %s", "DoThrow");
+ __ push(ToRegister(instr->InputAt(0)));
+ CallRuntime(Runtime::kThrow, 1, instr);
+
+ if (FLAG_debug_code) {
+ Comment("Unreachable code.");
+ __ int3();
+ }
}
@@ -835,7 +972,30 @@ void LCodeGen::DoAddI(LAddI* instr) {
void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
- Abort("Unimplemented: %s", "DoArithmeticD");
+ LOperand* left = instr->InputAt(0);
+ LOperand* right = instr->InputAt(1);
+ // All operations except MOD are computed in-place.
+ ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
+ switch (instr->op()) {
+ case Token::ADD:
+ __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
+ break;
+ case Token::SUB:
+ __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
+ break;
+ case Token::MUL:
+ __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
+ break;
+ case Token::DIV:
+ __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
+ break;
+ case Token::MOD:
+ Abort("Unimplemented: %s", "DoArithmeticD MOD");
+ break;
+ default:
+ UNREACHABLE();
+ break;
+ }
}
@@ -844,8 +1004,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->InputAt(1)).is(rax));
ASSERT(ToRegister(instr->result()).is(rax));
- GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS);
- stub.SetArgsInRegisters();
+ TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
@@ -963,7 +1122,11 @@ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
- Abort("Unimplemented: %s", "DoDeferredStackCheck");
+ __ Pushad();
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+ __ Popad();
}
@@ -1022,9 +1185,9 @@ void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
__ cmpl(ToOperand(left), Immediate(value));
}
} else if (right->IsRegister()) {
- __ cmpq(ToRegister(left), ToRegister(right));
+ __ cmpl(ToRegister(left), ToRegister(right));
} else {
- __ cmpq(ToRegister(left), ToOperand(right));
+ __ cmpl(ToRegister(left), ToOperand(right));
}
}
@@ -1511,12 +1674,23 @@ void LCodeGen::DoReturn(LReturn* instr) {
}
__ movq(rsp, rbp);
__ pop(rbp);
- __ ret((ParameterCount() + 1) * kPointerSize);
+ __ Ret((ParameterCount() + 1) * kPointerSize, rcx);
}
void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
- Abort("Unimplemented: %s", "DoLoadGlobal");
+ Register result = ToRegister(instr->result());
+ if (result.is(rax)) {
+ __ load_rax(instr->hydrogen()->cell().location(),
+ RelocInfo::GLOBAL_PROPERTY_CELL);
+ } else {
+ __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ movq(result, Operand(result, 0));
+ }
+ if (instr->hydrogen()->check_hole_value()) {
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
}
@@ -1534,9 +1708,7 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
// been deleted from the property dictionary. In that case, we need
// to update the property details in the property dictionary to mark
// it as no longer deleted. We deoptimize in that case.
- __ movq(temp,
- Handle<Object>::cast(instr->hydrogen()->cell()),
- RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
if (check_hole) {
__ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
DeoptimizeIf(equal, instr->environment());
@@ -1563,25 +1735,69 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
- Abort("Unimplemented: %s", "DoLoadNamedGeneric");
+ ASSERT(ToRegister(instr->object()).is(rax));
+ ASSERT(ToRegister(instr->result()).is(rax));
+
+ __ Move(rcx, instr->name());
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
- Abort("Unimplemented: %s", "DoLoadFunctionPrototype");
+ Register function = ToRegister(instr->function());
+ Register result = ToRegister(instr->result());
+
+ // Check that the function really is a function.
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
+ DeoptimizeIf(not_equal, instr->environment());
+
+ // Check whether the function has an instance prototype.
+ NearLabel non_instance;
+ __ testb(FieldOperand(result, Map::kBitFieldOffset),
+ Immediate(1 << Map::kHasNonInstancePrototype));
+ __ j(not_zero, &non_instance);
+
+ // Get the prototype or initial map from the function.
+ __ movq(result,
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Check that the function has a prototype or an initial map.
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+
+ // If the function does not have an initial map, we're done.
+ NearLabel done;
+ __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
+ __ j(not_equal, &done);
+
+ // Get the prototype from the initial map.
+ __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
+ __ jmp(&done);
+
+ // Non-instance prototype: Fetch prototype from constructor field
+ // in the function's map.
+ __ bind(&non_instance);
+ __ movq(result, FieldOperand(result, Map::kConstructorOffset));
+
+ // All done.
+ __ bind(&done);
}
void LCodeGen::DoLoadElements(LLoadElements* instr) {
- ASSERT(instr->result()->Equals(instr->InputAt(0)));
- Register reg = ToRegister(instr->InputAt(0));
- __ movq(reg, FieldOperand(reg, JSObject::kElementsOffset));
+ Register result = ToRegister(instr->result());
+ Register input = ToRegister(instr->InputAt(0));
+ __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
if (FLAG_debug_code) {
NearLabel done;
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
Factory::fixed_array_map());
__ j(equal, &done);
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Factory::pixel_array_map());
+ __ j(equal, &done);
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
Factory::fixed_cow_array_map());
__ Check(equal, "Check for fast elements failed.");
__ bind(&done);
@@ -1589,8 +1805,29 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
}
+void LCodeGen::DoLoadPixelArrayExternalPointer(
+ LLoadPixelArrayExternalPointer* instr) {
+ Register result = ToRegister(instr->result());
+ Register input = ToRegister(instr->InputAt(0));
+ __ movq(result, FieldOperand(input, PixelArray::kExternalPointerOffset));
+}
+
+
void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
- Abort("Unimplemented: %s", "DoAccessArgumentsAt");
+ Register arguments = ToRegister(instr->arguments());
+ Register length = ToRegister(instr->length());
+ Register result = ToRegister(instr->result());
+
+ if (instr->index()->IsRegister()) {
+ __ subl(length, ToRegister(instr->index()));
+ } else {
+ __ subl(length, ToOperand(instr->index()));
+ }
+ DeoptimizeIf(below_equal, instr->environment());
+
+ // There are two words between the frame pointer and the last argument.
+ // Subtracting from length accounts for one of them add one more.
+ __ movq(result, Operand(arguments, length, times_pointer_size, kPointerSize));
}
@@ -1612,18 +1849,68 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
}
+void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
+ Register external_elements = ToRegister(instr->external_pointer());
+ Register key = ToRegister(instr->key());
+ Register result = ToRegister(instr->result());
+ ASSERT(result.is(external_elements));
+
+ // Load the result.
+ __ movzxbq(result, Operand(external_elements, key, times_1, 0));
+}
+
+
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
Abort("Unimplemented: %s", "DoLoadKeyedGeneric");
}
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
- Abort("Unimplemented: %s", "DoArgumentsElements");
+ Register result = ToRegister(instr->result());
+
+ // Check for arguments adapter frame.
+ NearLabel done, adapted;
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(equal, &adapted);
+
+ // No arguments adaptor frame.
+ __ movq(result, rbp);
+ __ jmp(&done);
+
+ // Arguments adaptor frame present.
+ __ bind(&adapted);
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+
+ // Result is the frame pointer for the frame if not adapted and for the real
+ // frame below the adaptor frame if adapted.
+ __ bind(&done);
}
void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
- Abort("Unimplemented: %s", "DoArgumentsLength");
+ Register result = ToRegister(instr->result());
+
+ NearLabel done;
+
+ // If no arguments adaptor frame the number of arguments is fixed.
+ if (instr->InputAt(0)->IsRegister()) {
+ __ cmpq(rbp, ToRegister(instr->InputAt(0)));
+ } else {
+ __ cmpq(rbp, ToOperand(instr->InputAt(0)));
+ }
+ __ movq(result, Immediate(scope()->num_parameters()));
+ __ j(equal, &done);
+
+ // Arguments adaptor frame present. Get argument length from there.
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ movq(result, Operand(result,
+ ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiToInteger32(result, result);
+
+ // Argument length is in result register.
+ __ bind(&done);
}
@@ -1656,6 +1943,12 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
}
+void LCodeGen::DoContext(LContext* instr) {
+ Register result = ToRegister(instr->result());
+ __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
+}
+
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register result = ToRegister(instr->result());
__ movq(result, GlobalObjectOperand());
@@ -1773,7 +2066,13 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
void LCodeGen::DoCallNamed(LCallNamed* instr) {
- Abort("Unimplemented: %s", "DoCallNamed");
+ ASSERT(ToRegister(instr->result()).is(rax));
+
+ int arity = instr->arity();
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+ __ Move(rcx, instr->name());
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
@@ -1783,7 +2082,12 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
- Abort("Unimplemented: %s", "DoCallGlobal");
+ ASSERT(ToRegister(instr->result()).is(rax));
+ int arity = instr->arity();
+ Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
+ __ Move(rcx, instr->name());
+ CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
@@ -1805,7 +2109,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
- Abort("Unimplemented: %s", "DoCallRuntime");
+ CallRuntime(instr->function(), instr->arity(), instr);
}
@@ -1855,7 +2159,33 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
- Abort("Unimplemented: %s", "DoStoreKeyedFastElement");
+ Register value = ToRegister(instr->value());
+ Register elements = ToRegister(instr->object());
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
+
+ // Do the store.
+ if (instr->key()->IsConstantOperand()) {
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
+ int offset =
+ ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
+ __ movq(FieldOperand(elements, offset), value);
+ } else {
+ __ movq(FieldOperand(elements,
+ key,
+ times_pointer_size,
+ FixedArray::kHeaderSize),
+ value);
+ }
+
+ if (instr->hydrogen()->NeedsWriteBarrier()) {
+ // Compute address of modified element and store it into key register.
+ __ lea(key, FieldOperand(elements,
+ key,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ RecordWrite(elements, key, value);
+ }
}
@@ -1864,12 +2194,23 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
}
+void LCodeGen::DoStringLength(LStringLength* instr) {
+ Register string = ToRegister(instr->string());
+ Register result = ToRegister(instr->result());
+ __ movq(result, FieldOperand(string, String::kLengthOffset));
+}
+
+
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister() || input->IsStackSlot());
LOperand* output = instr->result();
ASSERT(output->IsDoubleRegister());
- __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
+ if (input->IsRegister()) {
+ __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
+ } else {
+ __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
+ }
}
@@ -1926,12 +2267,21 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
- Abort("Unimplemented: %s", "DoSmiTag");
+ ASSERT(instr->InputAt(0)->Equals(instr->result()));
+ Register input = ToRegister(instr->InputAt(0));
+ ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
+ __ Integer32ToSmi(input, input);
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
- Abort("Unimplemented: %s", "DoSmiUntag");
+ ASSERT(instr->InputAt(0)->Equals(instr->result()));
+ Register input = ToRegister(instr->InputAt(0));
+ if (instr->needs_check()) {
+ Condition is_smi = __ CheckSmi(input);
+ DeoptimizeIf(NegateCondition(is_smi), instr->environment());
+ }
+ __ SmiToInteger32(input, input);
}
@@ -1963,7 +2313,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
// Smi to XMM conversion
__ bind(&load_smi);
- __ SmiToInteger32(kScratchRegister, input_reg); // Untag smi first.
+ __ SmiToInteger32(kScratchRegister, input_reg);
__ cvtlsi2sd(result_reg, kScratchRegister);
__ bind(&done);
}
@@ -2040,7 +2390,15 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
- Abort("Unimplemented: %s", "DoNumberUntagD");
+ LOperand* input = instr->InputAt(0);
+ ASSERT(input->IsRegister());
+ LOperand* result = instr->result();
+ ASSERT(result->IsDoubleRegister());
+
+ Register input_reg = ToRegister(input);
+ XMMRegister result_reg = ToDoubleRegister(result);
+
+ EmitNumberUntagD(input_reg, result_reg, instr->environment());
}
@@ -2110,7 +2468,14 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
- Abort("Unimplemented: %s", "LoadHeapObject");
+ if (Heap::InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ Factory::NewJSGlobalPropertyCell(object);
+ __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ movq(result, Operand(result, 0));
+ } else {
+ __ Move(result, object);
+ }
}
@@ -2219,6 +2584,54 @@ void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
}
+void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
+ Register result = ToRegister(instr->result());
+ NearLabel true_label;
+ NearLabel false_label;
+ NearLabel done;
+
+ EmitIsConstructCall(result);
+ __ j(equal, &true_label);
+
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
+ __ jmp(&done);
+
+ __ bind(&true_label);
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
+
+
+ __ bind(&done);
+}
+
+
+void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
+ Register temp = ToRegister(instr->TempAt(0));
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ EmitIsConstructCall(temp);
+ EmitBranch(true_block, false_block, equal);
+}
+
+
+void LCodeGen::EmitIsConstructCall(Register temp) {
+ // Get the frame pointer for the calling frame.
+ __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+
+ // Skip the arguments adaptor frame if it exists.
+ NearLabel check_frame_marker;
+ __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(not_equal, &check_frame_marker);
+ __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
+
+ // Check the marker in the calling frame.
+ __ bind(&check_frame_marker);
+ __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
+}
+
+
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Register input = ToRegister(instr->InputAt(0));
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -2328,7 +2741,19 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
- Abort("Unimplemented: %s", "DoOsrEntry");
+ // This is a pseudo-instruction that ensures that the environment here is
+ // properly registered for deoptimization and records the assembler's PC
+ // offset.
+ LEnvironment* environment = instr->environment();
+ environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
+ instr->SpilledDoubleRegisterArray());
+
+ // If the environment were already registered, we would have no way of
+ // backpatching it with the spill slot operands.
+ ASSERT(!environment->HasBeenRegistered());
+ RegisterEnvironmentForDeoptimization(environment);
+ ASSERT(osr_pc_offset_ == -1);
+ osr_pc_offset_ = masm()->pc_offset();
}
#undef __
diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h
index cbcc5c8b0c..6f8f06e345 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.h
+++ b/deps/v8/src/x64/lithium-codegen-x64.h
@@ -221,6 +221,10 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsConstructCall().
+ // Caller should branch on equal condition.
+ void EmitIsConstructCall(Register temp);
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc
index 12b952d226..fba29a69e5 100644
--- a/deps/v8/src/x64/lithium-x64.cc
+++ b/deps/v8/src/x64/lithium-x64.cc
@@ -74,15 +74,13 @@ void LInstruction::VerifyCall() {
// Call instructions can use only fixed registers as
// temporaries and outputs because all registers
// are blocked by the calling convention.
- // Inputs can use either fixed register or have a short lifetime (be
- // used at start of the instruction).
+ // Inputs must use a fixed register.
ASSERT(Output() == NULL ||
LUnallocated::cast(Output())->HasFixedPolicy() ||
!LUnallocated::cast(Output())->HasRegisterPolicy());
for (UseIterator it(this); it.HasNext(); it.Advance()) {
LOperand* operand = it.Next();
ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
- LUnallocated::cast(operand)->IsUsedAtStart() ||
!LUnallocated::cast(operand)->HasRegisterPolicy());
}
for (TempIterator it(this); it.HasNext(); it.Advance()) {
@@ -845,8 +843,16 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
HArithmeticBinaryOperation* instr) {
- Abort("Unimplemented: %s", "DoArithmeticD");
- return NULL;
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->left()->representation().IsDouble());
+ ASSERT(instr->right()->representation().IsDouble());
+ if (op == Token::MOD) {
+ Abort("Unimplemented: %s", "DoArithmeticD MOD");
+ }
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseRegisterAtStart(instr->right());
+ LArithmeticD* result = new LArithmeticD(op, left, right);
+ return DefineSameAsFirst(result);
}
@@ -1082,6 +1088,8 @@ LInstruction* LChunkBuilder::DoTest(HTest* instr) {
} else if (v->IsTypeofIs()) {
HTypeofIs* typeof_is = HTypeofIs::cast(v);
return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()));
+ } else if (v->IsIsConstructCall()) {
+ return new LIsConstructCallAndBranch(TempRegister());
} else {
if (v->IsConstant()) {
if (HConstant::cast(v)->handle()->IsTrue()) {
@@ -1106,14 +1114,12 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- Abort("Unimplemented: %s", "DoArgumentsLength");
- return NULL;
+ return DefineAsRegister(new LArgumentsLength(Use(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- Abort("Unimplemented: %s", "DoArgumentsElements");
- return NULL;
+ return DefineAsRegister(new LArgumentsElements);
}
@@ -1144,8 +1150,7 @@ LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- Abort("Unimplemented: DoContext");
- return NULL;
+ return DefineAsRegister(new LContext);
}
@@ -1191,8 +1196,8 @@ LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
- Abort("Unimplemented: %s", "DoCallGlobal");
- return NULL;
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallGlobal, rax), instr);
}
@@ -1262,8 +1267,20 @@ LInstruction* LChunkBuilder::DoBitXor(HBitXor* instr) {
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
- Abort("Unimplemented: %s", "DoDiv");
- return NULL;
+ if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::DIV, instr);
+ } else if (instr->representation().IsInteger32()) {
+ // The temporary operand is necessary to ensure that right is not allocated
+ // into rdx.
+ LOperand* temp = FixedTemp(rdx);
+ LOperand* dividend = UseFixed(instr->left(), rax);
+ LOperand* divisor = UseRegister(instr->right());
+ LDivI* result = new LDivI(dividend, divisor, temp);
+ return AssignEnvironment(DefineFixed(result, rax));
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::DIV, instr);
+ }
}
@@ -1274,8 +1291,19 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
LInstruction* LChunkBuilder::DoMul(HMul* instr) {
- Abort("Unimplemented: %s", "DoMul");
- return NULL;
+ if (instr->representation().IsInteger32()) {
+ ASSERT(instr->left()->representation().IsInteger32());
+ ASSERT(instr->right()->representation().IsInteger32());
+ LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
+ LOperand* right = UseOrConstant(instr->MostConstantOperand());
+ LMulI* mul = new LMulI(left, right);
+ return AssignEnvironment(DefineSameAsFirst(mul));
+ } else if (instr->representation().IsDouble()) {
+ return DoArithmeticD(Token::MUL, instr);
+ } else {
+ ASSERT(instr->representation().IsTagged());
+ return DoArithmeticT(Token::MUL, instr);
+ }
}
@@ -1313,7 +1341,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
}
return result;
} else if (instr->representation().IsDouble()) {
- Abort("Unimplemented: %s", "DoAdd on Doubles");
+ return DoArithmeticD(Token::ADD, instr);
} else {
ASSERT(instr->representation().IsTagged());
return DoArithmeticT(Token::ADD, instr);
@@ -1394,6 +1422,13 @@ LInstruction* LChunkBuilder::DoHasInstanceType(HHasInstanceType* instr) {
}
+LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
+ HGetCachedArrayIndex* instr) {
+ Abort("Unimplemented: %s", "DoGetCachedArrayIndex");
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
HHasCachedArrayIndex* instr) {
Abort("Unimplemented: %s", "DoHasCachedArrayIndex");
@@ -1419,6 +1454,12 @@ LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
}
+LInstruction* LChunkBuilder::DoPixelArrayLength(HPixelArrayLength* instr) {
+ LOperand* array = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LPixelArrayLength(array));
+}
+
+
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
Abort("Unimplemented: %s", "DoValueOf");
return NULL;
@@ -1439,8 +1480,8 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
- Abort("Unimplemented: %s", "DoThrow");
- return NULL;
+ LOperand* value = UseFixed(instr->value(), rax);
+ return MarkAsCall(new LThrow(value), instr);
}
@@ -1552,14 +1593,12 @@ LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- int32_t value = instr->Integer32Value();
- return DefineAsRegister(new LConstantI(value));
+ return DefineAsRegister(new LConstantI);
} else if (r.IsDouble()) {
- double value = instr->DoubleValue();
LOperand* temp = TempRegister();
- return DefineAsRegister(new LConstantD(value, temp));
+ return DefineAsRegister(new LConstantD(temp));
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT(instr->handle()));
+ return DefineAsRegister(new LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1602,21 +1641,29 @@ LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
- Abort("Unimplemented: %s", "DoLoadNamedGeneric");
- return NULL;
+ LOperand* object = UseFixed(instr->object(), rax);
+ LLoadNamedGeneric* result = new LLoadNamedGeneric(object);
+ return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
- Abort("Unimplemented: %s", "DoLoadFunctionPrototype");
- return NULL;
+ return AssignEnvironment(DefineAsRegister(
+ new LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineSameAsFirst(new LLoadElements(input));
+ return DefineAsRegister(new LLoadElements(input));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadPixelArrayExternalPointer(
+ HLoadPixelArrayExternalPointer* instr) {
+ LOperand* input = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LLoadPixelArrayExternalPointer(input));
}
@@ -1631,6 +1678,19 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
}
+LInstruction* LChunkBuilder::DoLoadPixelArrayElement(
+ HLoadPixelArrayElement* instr) {
+ ASSERT(instr->representation().IsInteger32());
+ ASSERT(instr->key()->representation().IsInteger32());
+ LOperand* external_pointer =
+ UseRegisterAtStart(instr->external_pointer());
+ LOperand* key = UseRegisterAtStart(instr->key());
+ LLoadPixelArrayElement* result =
+ new LLoadPixelArrayElement(external_pointer, key);
+ return DefineSameAsFirst(result);
+}
+
+
LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
Abort("Unimplemented: %s", "DoLoadKeyedGeneric");
return NULL;
@@ -1639,8 +1699,20 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
HStoreKeyedFastElement* instr) {
- Abort("Unimplemented: %s", "DoStoreKeyedFastElement");
- return NULL;
+ bool needs_write_barrier = instr->NeedsWriteBarrier();
+ ASSERT(instr->value()->representation().IsTagged());
+ ASSERT(instr->object()->representation().IsTagged());
+ ASSERT(instr->key()->representation().IsInteger32());
+
+ LOperand* obj = UseTempRegister(instr->object());
+ LOperand* val = needs_write_barrier
+ ? UseTempRegister(instr->value())
+ : UseRegisterAtStart(instr->value());
+ LOperand* key = needs_write_barrier
+ ? UseTempRegister(instr->key())
+ : UseRegisterOrConstantAtStart(instr->key());
+
+ return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
}
@@ -1683,8 +1755,8 @@ LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- Abort("Unimplemented: %s", "DoStringLength");
- return NULL;
+ LOperand* string = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LStringLength(string));
}
@@ -1734,8 +1806,8 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
- Abort("Unimplemented: %s", "DoCallStub");
- return NULL;
+ argument_count_ -= instr->argument_count();
+ return MarkAsCall(DefineFixed(new LCallStub, rax), instr);
}
@@ -1746,8 +1818,11 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
- Abort("Unimplemented: %s", "DoAccessArgumentsAt");
- return NULL;
+ LOperand* arguments = UseRegister(instr->arguments());
+ LOperand* length = UseTempRegister(instr->length());
+ LOperand* index = Use(instr->index());
+ LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ return AssignEnvironment(DefineAsRegister(result));
}
@@ -1762,6 +1837,12 @@ LInstruction* LChunkBuilder::DoTypeofIs(HTypeofIs* instr) {
return NULL;
}
+
+LInstruction* LChunkBuilder::DoIsConstructCall(HIsConstructCall* instr) {
+ return DefineAsRegister(new LIsConstructCall);
+}
+
+
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
HEnvironment* env = current_block_->last_environment();
ASSERT(env != NULL);
diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h
index 64d141e335..abffe50b19 100644
--- a/deps/v8/src/x64/lithium-x64.h
+++ b/deps/v8/src/x64/lithium-x64.h
@@ -39,119 +39,8 @@ namespace internal {
// Forward declarations.
class LCodeGen;
-
-// Type hierarchy:
-//
-// LInstruction
-// LTemplateInstruction
-// LControlInstruction
-// LBranch
-// LClassOfTestAndBranch
-// LCmpJSObjectEqAndBranch
-// LCmpIDAndBranch
-// LHasCachedArrayIndexAndBranch
-// LHasInstanceTypeAndBranch
-// LInstanceOfAndBranch
-// LIsNullAndBranch
-// LIsObjectAndBranch
-// LIsSmiAndBranch
-// LTypeofIsAndBranch
-// LAccessArgumentsAt
-// LArgumentsElements
-// LArgumentsLength
-// LAddI
-// LApplyArguments
-// LArithmeticD
-// LArithmeticT
-// LBitI
-// LBoundsCheck
-// LCmpID
-// LCmpJSObjectEq
-// LCmpT
-// LDivI
-// LInstanceOf
-// LInstanceOfKnownGlobal
-// LLoadKeyedFastElement
-// LLoadKeyedGeneric
-// LModI
-// LMulI
-// LPower
-// LShiftI
-// LSubI
-// LCallConstantFunction
-// LCallFunction
-// LCallGlobal
-// LCallKeyed
-// LCallKnownGlobal
-// LCallNamed
-// LCallRuntime
-// LCallStub
-// LConstant
-// LConstantD
-// LConstantI
-// LConstantT
-// LDeoptimize
-// LFunctionLiteral
-// LGap
-// LLabel
-// LGlobalObject
-// LGlobalReceiver
-// LGoto
-// LLazyBailout
-// LLoadGlobal
-// LCheckPrototypeMaps
-// LLoadContextSlot
-// LArrayLiteral
-// LObjectLiteral
-// LRegExpLiteral
-// LOsrEntry
-// LParameter
-// LRegExpConstructResult
-// LStackCheck
-// LStoreKeyed
-// LStoreKeyedFastElement
-// LStoreKeyedGeneric
-// LStoreNamed
-// LStoreNamedField
-// LStoreNamedGeneric
-// LBitNotI
-// LCallNew
-// LCheckFunction
-// LCheckPrototypeMaps
-// LCheckInstanceType
-// LCheckMap
-// LCheckSmi
-// LClassOfTest
-// LDeleteProperty
-// LDoubleToI
-// LFixedArrayLength
-// LHasCachedArrayIndex
-// LHasInstanceType
-// LInteger32ToDouble
-// LIsNull
-// LIsObject
-// LIsSmi
-// LJSArrayLength
-// LLoadNamedField
-// LLoadNamedGeneric
-// LLoadFunctionPrototype
-// LNumberTagD
-// LNumberTagI
-// LPushArgument
-// LReturn
-// LSmiTag
-// LStoreGlobal
-// LTaggedToI
-// LThrow
-// LTypeof
-// LTypeofIs
-// LUnaryMathOperation
-// LValueOf
-// LUnknownOSRValue
-
#define LITHIUM_ALL_INSTRUCTION_LIST(V) \
V(ControlInstruction) \
- V(Constant) \
V(Call) \
V(StoreKeyed) \
V(StoreNamed) \
@@ -195,6 +84,7 @@ class LCodeGen;
V(ConstantD) \
V(ConstantI) \
V(ConstantT) \
+ V(Context) \
V(DeleteProperty) \
V(Deoptimize) \
V(DivI) \
@@ -232,6 +122,8 @@ class LCodeGen;
V(LoadNamedField) \
V(LoadNamedGeneric) \
V(LoadFunctionPrototype) \
+ V(LoadPixelArrayElement) \
+ V(LoadPixelArrayExternalPointer) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -240,6 +132,7 @@ class LCodeGen;
V(ObjectLiteral) \
V(OsrEntry) \
V(Parameter) \
+ V(PixelArrayLength) \
V(Power) \
V(PushArgument) \
V(RegExpLiteral) \
@@ -253,12 +146,15 @@ class LCodeGen;
V(StoreKeyedGeneric) \
V(StoreNamedField) \
V(StoreNamedGeneric) \
+ V(StringLength) \
V(SubI) \
V(TaggedToI) \
V(Throw) \
V(Typeof) \
V(TypeofIs) \
V(TypeofIsAndBranch) \
+ V(IsConstructCall) \
+ V(IsConstructCallAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
V(ValueOf)
@@ -648,12 +544,11 @@ class LDivI: public LTemplateInstruction<1, 2, 1> {
};
-class LMulI: public LTemplateInstruction<1, 2, 1> {
+class LMulI: public LTemplateInstruction<1, 2, 0> {
public:
- LMulI(LOperand* left, LOperand* right, LOperand* temp) {
+ LMulI(LOperand* left, LOperand* right) {
inputs_[0] = left;
inputs_[1] = right;
- temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(MulI, "mul-i")
@@ -1013,47 +908,33 @@ class LSubI: public LTemplateInstruction<1, 2, 0> {
};
-template <int temp_count>
-class LConstant: public LTemplateInstruction<1, 0, temp_count> {
- DECLARE_INSTRUCTION(Constant)
-};
-
-
-class LConstantI: public LConstant<0> {
+class LConstantI: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantI(int32_t value) : value_(value) { }
- int32_t value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantI, "constant-i")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- int32_t value_;
+ int32_t value() const { return hydrogen()->Integer32Value(); }
};
-class LConstantD: public LConstant<1> {
+class LConstantD: public LTemplateInstruction<1, 0, 1> {
public:
- explicit LConstantD(double value, LOperand* temp) : value_(value) {
+ explicit LConstantD(LOperand* temp) {
temps_[0] = temp;
}
- double value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantD, "constant-d")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- double value_;
+ double value() const { return hydrogen()->DoubleValue(); }
};
-class LConstantT: public LConstant<0> {
+class LConstantT: public LTemplateInstruction<1, 0, 0> {
public:
- explicit LConstantT(Handle<Object> value) : value_(value) { }
- Handle<Object> value() const { return value_; }
-
DECLARE_CONCRETE_INSTRUCTION(ConstantT, "constant-t")
+ DECLARE_HYDROGEN_ACCESSOR(Constant)
- private:
- Handle<Object> value_;
+ Handle<Object> value() const { return hydrogen()->handle(); }
};
@@ -1102,6 +983,17 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
};
+class LPixelArrayLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LPixelArrayLength(LOperand* value) {
+ inputs_[0] = value;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(PixelArrayLength, "pixel-array-length")
+ DECLARE_HYDROGEN_ACCESSOR(PixelArrayLength)
+};
+
+
class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
public:
explicit LFixedArrayLength(LOperand* value) {
@@ -1240,11 +1132,10 @@ class LLoadNamedGeneric: public LTemplateInstruction<1, 1, 0> {
};
-class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 1> {
+class LLoadFunctionPrototype: public LTemplateInstruction<1, 1, 0> {
public:
- LLoadFunctionPrototype(LOperand* function, LOperand* temp) {
+ explicit LLoadFunctionPrototype(LOperand* function) {
inputs_[0] = function;
- temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(LoadFunctionPrototype, "load-function-prototype")
@@ -1264,6 +1155,17 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
};
+class LLoadPixelArrayExternalPointer: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LLoadPixelArrayExternalPointer(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayExternalPointer,
+ "load-pixel-array-external-pointer")
+};
+
+
class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedFastElement(LOperand* elements, LOperand* key) {
@@ -1279,6 +1181,22 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
};
+class LLoadPixelArrayElement: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadPixelArrayElement(LOperand* external_pointer, LOperand* key) {
+ inputs_[0] = external_pointer;
+ inputs_[1] = key;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayElement,
+ "load-pixel-array-element")
+ DECLARE_HYDROGEN_ACCESSOR(LoadPixelArrayElement)
+
+ LOperand* external_pointer() { return inputs_[0]; }
+ LOperand* key() { return inputs_[1]; }
+};
+
+
class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
public:
LLoadKeyedGeneric(LOperand* obj, LOperand* key) {
@@ -1338,6 +1256,12 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
};
+class LContext: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(Context, "context")
+};
+
+
class LGlobalObject: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
@@ -1627,6 +1551,19 @@ class LStoreKeyedGeneric: public LStoreKeyed {
};
+class LStringLength: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LStringLength(LOperand* string) {
+ inputs_[0] = string;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
+ DECLARE_HYDROGEN_ACCESSOR(StringLength)
+
+ LOperand* string() { return inputs_[0]; }
+};
+
+
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -1763,6 +1700,24 @@ class LTypeofIsAndBranch: public LControlInstruction<1, 0> {
};
+class LIsConstructCall: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCall, "is-construct-call")
+ DECLARE_HYDROGEN_ACCESSOR(IsConstructCall)
+};
+
+
+class LIsConstructCallAndBranch: public LControlInstruction<0, 1> {
+ public:
+ explicit LIsConstructCallAndBranch(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsConstructCallAndBranch,
+ "is-construct-call-and-branch")
+};
+
+
class LDeleteProperty: public LTemplateInstruction<1, 2, 0> {
public:
LDeleteProperty(LOperand* obj, LOperand* key) {
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index 50b689b23f..e7d02d2003 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -1442,10 +1442,17 @@ void MacroAssembler::Pushad() {
// r13 is kRootRegister.
push(r14);
// r15 is kSmiConstantRegister
+ STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
+ // Use lea for symmetry with Popad.
+ lea(rsp, Operand(rsp,
+ -(kNumSafepointRegisters-kNumSafepointSavedRegisters) * kPointerSize));
}
void MacroAssembler::Popad() {
+ // Popad must not change the flags, so use lea instead of addq.
+ lea(rsp, Operand(rsp,
+ (kNumSafepointRegisters-kNumSafepointSavedRegisters) * kPointerSize));
pop(r14);
pop(r12);
pop(r11);
@@ -1461,8 +1468,7 @@ void MacroAssembler::Popad() {
void MacroAssembler::Dropad() {
- const int kRegistersPushedByPushad = 11;
- addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize));
+ addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
}
@@ -1536,11 +1542,113 @@ void MacroAssembler::PopTryHandler() {
}
+void MacroAssembler::Throw(Register value) {
+ // Check that stack should contain next handler, frame pointer, state and
+ // return address in that order.
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
+ StackHandlerConstants::kStateOffset);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
+ StackHandlerConstants::kPCOffset);
+ // Keep thrown value in rax.
+ if (!value.is(rax)) {
+ movq(rax, value);
+ }
+
+ ExternalReference handler_address(Top::k_handler_address);
+ movq(kScratchRegister, handler_address);
+ movq(rsp, Operand(kScratchRegister, 0));
+ // get next in chain
+ pop(rcx);
+ movq(Operand(kScratchRegister, 0), rcx);
+ pop(rbp); // pop frame pointer
+ pop(rdx); // remove state
+
+ // Before returning we restore the context from the frame pointer if not NULL.
+ // The frame pointer is NULL in the exception handler of a JS entry frame.
+ Set(rsi, 0); // Tentatively set context pointer to NULL
+ NearLabel skip;
+ cmpq(rbp, Immediate(0));
+ j(equal, &skip);
+ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ bind(&skip);
+ ret(0);
+}
+
+
+void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
+ Register value) {
+ // Keep thrown value in rax.
+ if (!value.is(rax)) {
+ movq(rax, value);
+ }
+ // Fetch top stack handler.
+ ExternalReference handler_address(Top::k_handler_address);
+ movq(kScratchRegister, handler_address);
+ movq(rsp, Operand(kScratchRegister, 0));
+
+ // Unwind the handlers until the ENTRY handler is found.
+ NearLabel loop, done;
+ bind(&loop);
+ // Load the type of the current stack handler.
+ const int kStateOffset = StackHandlerConstants::kStateOffset;
+ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
+ j(equal, &done);
+ // Fetch the next handler in the list.
+ const int kNextOffset = StackHandlerConstants::kNextOffset;
+ movq(rsp, Operand(rsp, kNextOffset));
+ jmp(&loop);
+ bind(&done);
+
+ // Set the top handler address to next handler past the current ENTRY handler.
+ movq(kScratchRegister, handler_address);
+ pop(Operand(kScratchRegister, 0));
+
+ if (type == OUT_OF_MEMORY) {
+ // Set external caught exception to false.
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
+ movq(rax, Immediate(false));
+ store_rax(external_caught);
+
+ // Set pending exception and rax to out of memory exception.
+ ExternalReference pending_exception(Top::k_pending_exception_address);
+ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
+ store_rax(pending_exception);
+ }
+
+ // Clear the context pointer.
+ Set(rsi, 0);
+
+ // Restore registers from handler.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
+ StackHandlerConstants::kFPOffset);
+ pop(rbp); // FP
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
+ StackHandlerConstants::kStateOffset);
+ pop(rdx); // State
+
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
+ StackHandlerConstants::kPCOffset);
+ ret(0);
+}
+
+
void MacroAssembler::Ret() {
ret(0);
}
+void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
+ if (is_uint16(bytes_dropped)) {
+ ret(bytes_dropped);
+ } else {
+ pop(scratch);
+ addq(rsp, Immediate(bytes_dropped));
+ push(scratch);
+ ret(0);
+ }
+}
+
+
void MacroAssembler::FCmp() {
fucomip();
fstp(0);
@@ -1598,6 +1706,17 @@ void MacroAssembler::AbortIfNotSmi(Register object) {
}
+void MacroAssembler::AbortIfNotString(Register object) {
+ testb(object, Immediate(kSmiTagMask));
+ Assert(not_equal, "Operand is not a string");
+ push(object);
+ movq(object, FieldOperand(object, HeapObject::kMapOffset));
+ CmpInstanceType(object, FIRST_NONSTRING_TYPE);
+ pop(object);
+ Assert(below, "Operand is not a string");
+}
+
+
void MacroAssembler::AbortIfNotRootValue(Register src,
Heap::RootListIndex root_value_index,
const char* message) {
@@ -2098,11 +2217,11 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
Register top_reg = result_end.is_valid() ? result_end : result;
- if (top_reg.is(result)) {
- addq(top_reg, Immediate(object_size));
- } else {
- lea(top_reg, Operand(result, object_size));
+ if (!top_reg.is(result)) {
+ movq(top_reg, result);
}
+ addq(top_reg, Immediate(object_size));
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(top_reg, Operand(kScratchRegister, 0));
j(above, gc_required);
@@ -2152,7 +2271,12 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- lea(result_end, Operand(result, element_count, element_size, header_size));
+
+ // We assume that element_count*element_size + header_size does not
+ // overflow.
+ lea(result_end, Operand(element_count, element_size, header_size));
+ addq(result_end, result);
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(result_end, Operand(kScratchRegister, 0));
j(above, gc_required);
@@ -2198,6 +2322,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
movq(result_end, object_size);
}
addq(result_end, result);
+ j(carry, gc_required);
movq(kScratchRegister, new_space_allocation_limit);
cmpq(result_end, Operand(kScratchRegister, 0));
j(above, gc_required);
diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h
index 8bb3190448..8352518323 100644
--- a/deps/v8/src/x64/macro-assembler-x64.h
+++ b/deps/v8/src/x64/macro-assembler-x64.h
@@ -171,7 +171,8 @@ class MacroAssembler: public Assembler {
void PushSafepointRegisters() { Pushad(); }
void PopSafepointRegisters() { Popad(); }
static int SafepointRegisterStackIndex(int reg_code) {
- return kSafepointPushRegisterIndices[reg_code];
+ return kNumSafepointRegisters - 1 -
+ kSafepointPushRegisterIndices[reg_code];
}
@@ -661,6 +662,9 @@ class MacroAssembler: public Assembler {
// Abort execution if argument is not a smi. Used in debug code.
void AbortIfNotSmi(Register object);
+ // Abort execution if argument is a string. Used in debug code.
+ void AbortIfNotString(Register object);
+
// Abort execution if argument is not the root value with the given index.
void AbortIfNotRootValue(Register src,
Heap::RootListIndex root_value_index,
@@ -676,6 +680,13 @@ class MacroAssembler: public Assembler {
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
+ // Activate the top handler in the try hander chain and pass the
+ // thrown value.
+ void Throw(Register value);
+
+ // Propagate an uncatchable exception out of the current JS stack.
+ void ThrowUncatchable(UncatchableExceptionType type, Register value);
+
// ---------------------------------------------------------------------------
// Inline caching support
@@ -920,6 +931,10 @@ class MacroAssembler: public Assembler {
void Ret();
+ // Return and drop arguments from stack, where the number of arguments
+ // may be bigger than 2^16 - 1. Requires a scratch register.
+ void Ret(int bytes_dropped, Register scratch);
+
Handle<Object> CodeObject() { return code_object_; }
@@ -959,6 +974,8 @@ class MacroAssembler: public Assembler {
// Order general registers are pushed by Pushad.
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
static int kSafepointPushRegisterIndices[Register::kNumRegisters];
+ static const int kNumSafepointSavedRegisters = 11;
+
bool generating_stub_;
bool allow_stub_calls_;
diff --git a/deps/v8/src/x64/simulator-x64.h b/deps/v8/src/x64/simulator-x64.h
index e607c8b87d..3a62ffd5c3 100644
--- a/deps/v8/src/x64/simulator-x64.h
+++ b/deps/v8/src/x64/simulator-x64.h
@@ -39,10 +39,13 @@ namespace internal {
#define CALL_GENERATED_CODE(entry, p0, p1, p2, p3, p4) \
(entry(p0, p1, p2, p3, p4))
-// Call the generated regexp code directly. The entry function pointer should
+typedef int (*regexp_matcher)(String*, int, const byte*,
+ const byte*, int*, Address, int);
+
+// Call the generated regexp code directly. The code at the entry address should
// expect seven int/pointer sized arguments and return an int.
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
- (entry(p0, p1, p2, p3, p4, p5, p6))
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
(reinterpret_cast<TryCatch*>(try_catch_address))
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index 9cb88f36f6..973fece32a 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -2559,6 +2559,43 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
}
+MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
+ JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : key
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
+
+ // Do the load.
+ GenerateFastPixelArrayStore(masm(),
+ rdx,
+ rcx,
+ rax,
+ rdi,
+ rbx,
+ true,
+ false,
+ &miss,
+ &miss,
+ NULL,
+ &miss);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
JSObject* object,
JSObject* last) {
diff --git a/deps/v8/src/x64/virtual-frame-x64.cc b/deps/v8/src/x64/virtual-frame-x64.cc
index 3f7b1db7e5..31f9527a6d 100644
--- a/deps/v8/src/x64/virtual-frame-x64.cc
+++ b/deps/v8/src/x64/virtual-frame-x64.cc
@@ -1119,23 +1119,30 @@ Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
}
-Result VirtualFrame::CallStoreIC(Handle<String> name, bool is_contextual) {
+Result VirtualFrame::CallStoreIC(Handle<String> name,
+ bool is_contextual,
+ StrictModeFlag strict_mode) {
// Value and (if not contextual) receiver are on top of the frame.
// The IC expects name in rcx, value in rax, and receiver in rdx.
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
+ ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
Result value = Pop();
+ RelocInfo::Mode mode;
if (is_contextual) {
PrepareForCall(0, 0);
value.ToRegister(rax);
__ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
value.Unuse();
+ mode = RelocInfo::CODE_TARGET_CONTEXT;
} else {
Result receiver = Pop();
PrepareForCall(0, 0);
MoveResultsToRegisters(&value, &receiver, rax, rdx);
+ mode = RelocInfo::CODE_TARGET;
}
__ Move(rcx, name);
- return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
+ return RawCallCodeObject(ic, mode);
}
diff --git a/deps/v8/src/x64/virtual-frame-x64.h b/deps/v8/src/x64/virtual-frame-x64.h
index 0479ff0c12..4a9c72034e 100644
--- a/deps/v8/src/x64/virtual-frame-x64.h
+++ b/deps/v8/src/x64/virtual-frame-x64.h
@@ -338,7 +338,8 @@ class VirtualFrame : public ZoneObject {
// Call store IC. If the load is contextual, value is found on top of the
// frame. If not, value and receiver are on the frame. Both are dropped.
- Result CallStoreIC(Handle<String> name, bool is_contextual);
+ Result CallStoreIC(Handle<String> name, bool is_contextual,
+ StrictModeFlag strict_mode);
// Call keyed store IC. Value, key, and receiver are found on top
// of the frame. All three are dropped.
diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status
index 9b070d2064..a7422c2543 100644
--- a/deps/v8/test/cctest/cctest.status
+++ b/deps/v8/test/cctest/cctest.status
@@ -58,23 +58,11 @@ test-heap/TestInternalWeakListsTraverseWithGC: PASS || FAIL
[ $arch == x64 && $crankshaft ]
# Tests that fail with crankshaft.
-test-deoptimization/DeoptimizeBinaryOperationADDString: FAIL
-test-deoptimization/DeoptimizeBinaryOperationADD: FAIL
-test-deoptimization/DeoptimizeBinaryOperationSUB: FAIL
-test-deoptimization/DeoptimizeBinaryOperationMUL: FAIL
test-deoptimization/DeoptimizeBinaryOperationMOD: FAIL
-test-deoptimization/DeoptimizeBinaryOperationDIV: FAIL
test-deoptimization/DeoptimizeLoadICStoreIC: FAIL
test-deoptimization/DeoptimizeLoadICStoreICNested: FAIL
test-deoptimization/DeoptimizeCompare: PASS || FAIL
-# Tests that time out with crankshaft.
-test-api/Threading: SKIP
-
-# BUG(1069): Context serialization fails on optimized functions.
-test-serialize/ContextSerialization: SKIP
-test-serialize/ContextDeserialization: SKIP
-
##############################################################################
[ $arch == arm ]
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index f88ba384d7..3de5b92d48 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -1649,6 +1649,23 @@ THREADED_TEST(IdentityHash) {
CHECK_NE(hash, hash3);
int hash4 = obj->GetIdentityHash();
CHECK_EQ(hash, hash4);
+
+ // Check identity hashes behaviour in the presence of JS accessors.
+ // Put a getter for 'v8::IdentityHash' on the Object's prototype:
+ {
+ CompileRun("Object.prototype['v8::IdentityHash'] = 42;\n");
+ Local<v8::Object> o1 = v8::Object::New();
+ Local<v8::Object> o2 = v8::Object::New();
+ CHECK_NE(o1->GetIdentityHash(), o2->GetIdentityHash());
+ }
+ {
+ CompileRun(
+ "function cnst() { return 42; };\n"
+ "Object.prototype.__defineGetter__('v8::IdentityHash', cnst);\n");
+ Local<v8::Object> o1 = v8::Object::New();
+ Local<v8::Object> o2 = v8::Object::New();
+ CHECK_NE(o1->GetIdentityHash(), o2->GetIdentityHash());
+ }
}
@@ -2674,6 +2691,41 @@ THREADED_TEST(CatchExceptionFromWith) {
}
+THREADED_TEST(TryCatchAndFinallyHidingException) {
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::TryCatch try_catch;
+ CHECK(!try_catch.HasCaught());
+ CompileRun("function f(k) { try { this[k]; } finally { return 0; } };");
+ CompileRun("f({toString: function() { throw 42; }});");
+ CHECK(!try_catch.HasCaught());
+}
+
+
+v8::Handle<v8::Value> WithTryCatch(const v8::Arguments& args) {
+ v8::TryCatch try_catch;
+ return v8::Undefined();
+}
+
+
+THREADED_TEST(TryCatchAndFinally) {
+ v8::HandleScope scope;
+ LocalContext context;
+ context->Global()->Set(
+ v8_str("native_with_try_catch"),
+ v8::FunctionTemplate::New(WithTryCatch)->GetFunction());
+ v8::TryCatch try_catch;
+ CHECK(!try_catch.HasCaught());
+ CompileRun(
+ "try {\n"
+ " throw new Error('a');\n"
+ "} finally {\n"
+ " native_with_try_catch();\n"
+ "}\n");
+ CHECK(try_catch.HasCaught());
+}
+
+
THREADED_TEST(Equality) {
v8::HandleScope scope;
LocalContext context;
@@ -5600,6 +5652,35 @@ TEST(AccessControl) {
}
+// This is a regression test for issue 1154.
+TEST(AccessControlObjectKeys) {
+ v8::HandleScope handle_scope;
+ v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
+
+ global_template->SetAccessCheckCallbacks(NamedAccessBlocker,
+ IndexedAccessBlocker);
+
+ // Add an accessor that is not accessible by cross-domain JS code.
+ global_template->SetAccessor(v8_str("blocked_prop"),
+ UnreachableGetter, UnreachableSetter,
+ v8::Handle<Value>(),
+ v8::DEFAULT);
+
+ // Create an environment
+ v8::Persistent<Context> context0 = Context::New(NULL, global_template);
+ context0->Enter();
+
+ v8::Handle<v8::Object> global0 = context0->Global();
+
+ v8::Persistent<Context> context1 = Context::New();
+ context1->Enter();
+ v8::Handle<v8::Object> global1 = context1->Global();
+ global1->Set(v8_str("other"), global0);
+
+ ExpectTrue("Object.keys(other).indexOf('blocked_prop') == -1");
+}
+
+
static bool GetOwnPropertyNamesNamedBlocker(Local<v8::Object> global,
Local<Value> name,
v8::AccessType type,
@@ -10674,6 +10755,76 @@ THREADED_TEST(PixelArray) {
"result");
CHECK_EQ(32640, result->Int32Value());
+ // Make sure that pixel array store ICs clamp values correctly.
+ result = CompileRun("function pa_store(p) {"
+ " for (var j = 0; j < 256; j++) { p[j] = j * 2; }"
+ "}"
+ "pa_store(pixels);"
+ "var sum = 0;"
+ "for (var j = 0; j < 256; j++) { sum += pixels[j]; }"
+ "sum");
+ CHECK_EQ(48896, result->Int32Value());
+
+ // Make sure that pixel array stores correctly handle accesses outside
+ // of the pixel array..
+ result = CompileRun("function pa_store(p,start) {"
+ " for (var j = 0; j < 256; j++) {"
+ " p[j+start] = j * 2;"
+ " }"
+ "}"
+ "pa_store(pixels,0);"
+ "pa_store(pixels,-128);"
+ "var sum = 0;"
+ "for (var j = 0; j < 256; j++) { sum += pixels[j]; }"
+ "sum");
+ CHECK_EQ(65280, result->Int32Value());
+
+ // Make sure that the generic store stub correctly handle accesses outside
+ // of the pixel array..
+ result = CompileRun("function pa_store(p,start) {"
+ " for (var j = 0; j < 256; j++) {"
+ " p[j+start] = j * 2;"
+ " }"
+ "}"
+ "pa_store(pixels,0);"
+ "just_ints = new Object();"
+ "for (var i = 0; i < 256; ++i) { just_ints[i] = i; }"
+ "pa_store(just_ints, 0);"
+ "pa_store(pixels,-128);"
+ "var sum = 0;"
+ "for (var j = 0; j < 256; j++) { sum += pixels[j]; }"
+ "sum");
+ CHECK_EQ(65280, result->Int32Value());
+
+ // Make sure that the generic keyed store stub clamps pixel array values
+ // correctly.
+ result = CompileRun("function pa_store(p) {"
+ " for (var j = 0; j < 256; j++) { p[j] = j * 2; }"
+ "}"
+ "pa_store(pixels);"
+ "just_ints = new Object();"
+ "pa_store(just_ints);"
+ "pa_store(pixels);"
+ "var sum = 0;"
+ "for (var j = 0; j < 256; j++) { sum += pixels[j]; }"
+ "sum");
+ CHECK_EQ(48896, result->Int32Value());
+
+ // Make sure that pixel array loads are optimized by crankshaft.
+ result = CompileRun("function pa_load(p) {"
+ " var sum = 0;"
+ " for (var i=0; i<256; ++i) {"
+ " sum += p[i];"
+ " }"
+ " return sum; "
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "for (var i = 0; i < 10000; ++i) {"
+ " result = pa_load(pixels);"
+ "}"
+ "result");
+ CHECK_EQ(32640, result->Int32Value());
+
free(pixel_data);
}
@@ -11353,6 +11504,26 @@ TEST(CaptureStackTraceForUncaughtException) {
}
+TEST(CaptureStackTraceForUncaughtExceptionAndSetters) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true,
+ 1024,
+ v8::StackTrace::kDetailed);
+
+ CompileRun(
+ "var setters = ['column', 'lineNumber', 'scriptName',\n"
+ " 'scriptNameOrSourceURL', 'functionName', 'isEval',\n"
+ " 'isConstructor'];\n"
+ "for (var i = 0; i < setters.length; i++) {\n"
+ " var prop = setters[i];\n"
+ " Object.prototype.__defineSetter__(prop, function() { throw prop; });\n"
+ "}\n");
+ CompileRun("throw 'exception';");
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+}
+
+
v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) {
v8::HandleScope scope;
v8::Handle<v8::StackTrace> stackTrace =
@@ -12523,6 +12694,25 @@ TEST(RegExp) {
}
+THREADED_TEST(Equals) {
+ v8::HandleScope handleScope;
+ LocalContext localContext;
+
+ v8::Handle<v8::Object> globalProxy = localContext->Global();
+ v8::Handle<Value> global = globalProxy->GetPrototype();
+
+ CHECK(global->StrictEquals(global));
+ CHECK(!global->StrictEquals(globalProxy));
+ CHECK(!globalProxy->StrictEquals(global));
+ CHECK(globalProxy->StrictEquals(globalProxy));
+
+ CHECK(global->Equals(global));
+ CHECK(!global->Equals(globalProxy));
+ CHECK(!globalProxy->Equals(global));
+ CHECK(globalProxy->Equals(globalProxy));
+}
+
+
static v8::Handle<v8::Value> Getter(v8::Local<v8::String> property,
const v8::AccessorInfo& info ) {
return v8_str("42!");
@@ -12549,3 +12739,19 @@ TEST(NamedEnumeratorAndForIn) {
CHECK_EQ(1, result->Length());
CHECK_EQ(v8_str("universalAnswer"), result->Get(0));
}
+
+
+TEST(DefinePropertyPostDetach) {
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Handle<v8::Object> proxy = context->Global();
+ v8::Handle<v8::Function> define_property =
+ CompileRun("(function() {"
+ " Object.defineProperty("
+ " this,"
+ " 1,"
+ " { configurable: true, enumerable: true, value: 3 });"
+ "})").As<Function>();
+ context->DetachGlobal();
+ define_property->Call(proxy, 0, NULL);
+}
diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc
index b9f6038302..441aae63c6 100644
--- a/deps/v8/test/cctest/test-debug.cc
+++ b/deps/v8/test/cctest/test-debug.cc
@@ -997,7 +997,7 @@ TEST(DebugStub) {
CheckDebugBreakFunction(&env,
"function f2(){x=1;}", "f2",
0,
- v8::internal::RelocInfo::CODE_TARGET,
+ v8::internal::RelocInfo::CODE_TARGET_CONTEXT,
Builtins::builtin(Builtins::StoreIC_DebugBreak));
CheckDebugBreakFunction(&env,
"function f3(){var a=x;}", "f3",
diff --git a/deps/v8/test/cctest/test-disasm-ia32.cc b/deps/v8/test/cctest/test-disasm-ia32.cc
index 30d708e408..c995aa8ed0 100644
--- a/deps/v8/test/cctest/test-disasm-ia32.cc
+++ b/deps/v8/test/cctest/test-disasm-ia32.cc
@@ -446,6 +446,14 @@ TEST(DisasmIa320) {
}
}
+ {
+ if (CpuFeatures::IsSupported(SSE4_1)) {
+ CpuFeatures::Scope scope(SSE4_1);
+ __ pextrd(Operand(eax), xmm0, 1);
+ __ pinsrd(xmm1, Operand(eax), 0);
+ }
+ }
+
__ ret(0);
CodeDesc desc;
diff --git a/deps/v8/test/cctest/test-strtod.cc b/deps/v8/test/cctest/test-strtod.cc
index f5547dbc03..da6b07bb8b 100644
--- a/deps/v8/test/cctest/test-strtod.cc
+++ b/deps/v8/test/cctest/test-strtod.cc
@@ -324,6 +324,25 @@ TEST(Strtod) {
StrtodChar("5708990770823839207320493820740630171355185151999", -3));
CHECK_EQ(5708990770823839524233143877797980545530986496.0,
StrtodChar("5708990770823839207320493820740630171355185152001", -3));
+
+ // The following test-cases got some public attention in early 2011 when they
+ // sent Java and PHP into an infinite loop.
+ CHECK_EQ(2.225073858507201e-308, StrtodChar("22250738585072011", -324));
+ CHECK_EQ(2.22507385850720138309e-308,
+ StrtodChar("22250738585072011360574097967091319759348195463516456480"
+ "23426109724822222021076945516529523908135087914149158913"
+ "03962110687008643869459464552765720740782062174337998814"
+ "10632673292535522868813721490129811224514518898490572223"
+ "07285255133155755015914397476397983411801999323962548289"
+ "01710708185069063066665599493827577257201576306269066333"
+ "26475653000092458883164330377797918696120494973903778297"
+ "04905051080609940730262937128958950003583799967207254304"
+ "36028407889577179615094551674824347103070260914462157228"
+ "98802581825451803257070188608721131280795122334262883686"
+ "22321503775666622503982534335974568884423900265498198385"
+ "48794829220689472168983109969836584681402285424333066033"
+ "98508864458040010349339704275671864433837704860378616227"
+ "71738545623065874679014086723327636718751", -1076));
}
diff --git a/deps/v8/test/es5conform/es5conform.status b/deps/v8/test/es5conform/es5conform.status
index 0dd2750690..e021fc54dc 100644
--- a/deps/v8/test/es5conform/es5conform.status
+++ b/deps/v8/test/es5conform/es5conform.status
@@ -234,19 +234,11 @@ chapter15/15.10/15.10.7/15.10.7.5/15.10.7.5-2: FAIL_OK
# Setting expectations to fail only so that the tests trigger as soon as
# the strict mode feature gets implemented
-# A directive preceeding an 'use strict' directive may not contain an OctalEscapeSequence
+# A directive preceeding an 'use strict' directive may not contain
+# an OctalEscapeSequence
# Incorrect test - need double escape in eval.
chapter07/7.8/7.8.4/7.8.4-1-s: FAIL
-# this is not coerced to an object in strict mode (Number)
-chapter10/10.4/10.4.3/10.4.3-1-1-s: FAIL
-# this is not coerced to an object in strict mode (string)
-chapter10/10.4/10.4.3/10.4.3-1-2-s: FAIL
-# this is not coerced to an object in strict mode (undefined)
-chapter10/10.4/10.4.3/10.4.3-1-3-s: FAIL
-# this is not coerced to an object in strict mode (boolean)
-chapter10/10.4/10.4.3/10.4.3-1-4-s: FAIL
-
# arguments[i] remains same after changing actual parameters in strict mode
chapter10/10.6/10.6-10-c-ii-1-s: FAIL
# arguments[i] doesn't map to actual parameters in strict mode
@@ -263,182 +255,185 @@ chapter10/10.6/10.6-13-c-1-s: FAIL
# arguments.callee is non-configurable in strict mode
chapter10/10.6/10.6-13-c-3-s: FAIL
-# simple assignment throws ReferenceError if LeftHandSide is an unresolvable reference in strict mode
-chapter11/11.13/11.13.1/11.13.1-1-5-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a property reference with a primitive base value (this is undefined)
+# simple assignment throws TypeError if LeftHandSide is a property reference
+# with a primitive base value (this is undefined)
chapter11/11.13/11.13.1/11.13.1-1-7-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Global.NaN)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.NaN)
chapter11/11.13/11.13.1/11.13.1-4-2-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Global.Infinity)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.Infinity)
chapter11/11.13/11.13.1/11.13.1-4-3-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Global.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.length)
chapter11/11.13/11.13.1/11.13.1-4-4-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Object.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Object.length)
chapter11/11.13/11.13.1/11.13.1-4-5-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Function.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Function.length)
chapter11/11.13/11.13.1/11.13.1-4-6-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Array.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Array.length)
chapter11/11.13/11.13.1/11.13.1-4-7-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (String.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (String.length)
chapter11/11.13/11.13.1/11.13.1-4-8-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Boolean.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Boolean.length)
chapter11/11.13/11.13.1/11.13.1-4-9-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.length)
chapter11/11.13/11.13.1/11.13.1-4-10-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Date.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Date.length)
chapter11/11.13/11.13.1/11.13.1-4-11-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (RegExp.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (RegExp.length)
chapter11/11.13/11.13.1/11.13.1-4-12-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Error.length)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Error.length)
chapter11/11.13/11.13.1/11.13.1-4-13-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.MAX_VALUE)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.MAX_VALUE)
chapter11/11.13/11.13.1/11.13.1-4-14-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.MIN_VALUE)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.MIN_VALUE)
chapter11/11.13/11.13.1/11.13.1-4-15-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.NaN)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.NaN)
chapter11/11.13/11.13.1/11.13.1-4-16-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.NEGATIVE_INFINITY)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.NEGATIVE_INFINITY)
chapter11/11.13/11.13.1/11.13.1-4-17-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Number.POSITIVE_INFINITY)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Number.POSITIVE_INFINITY)
chapter11/11.13/11.13.1/11.13.1-4-18-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.E)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.E)
chapter11/11.13/11.13.1/11.13.1-4-19-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.LN10)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.LN10)
chapter11/11.13/11.13.1/11.13.1-4-20-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.LN2)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.LN2)
chapter11/11.13/11.13.1/11.13.1-4-21-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.LOG2E)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.LOG2E)
chapter11/11.13/11.13.1/11.13.1-4-22-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.LOG10E)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.LOG10E)
chapter11/11.13/11.13.1/11.13.1-4-23-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.PI)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.PI)
chapter11/11.13/11.13.1/11.13.1-4-24-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.SQRT1_2)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.SQRT1_2)
chapter11/11.13/11.13.1/11.13.1-4-25-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Math.SQRT2)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Math.SQRT2)
chapter11/11.13/11.13.1/11.13.1-4-26-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property in strict mode (Global.undefined)
+# simple assignment throws TypeError if LeftHandSide is a readonly property
+# in strict mode (Global.undefined)
chapter11/11.13/11.13.1/11.13.1-4-27-s: FAIL
-# delete operator throws TypeError when deleting a non-configurable data property in strict mode
-chapter11/11.4/11.4.1/11.4.1-4.a-3-s: FAIL
-# delete operator throws TypeError when when deleting a non-configurable data property in strict mode (Global.NaN)
-chapter11/11.4/11.4.1/11.4.1-4.a-4-s: FAIL
-# delete operator throws TypeError when deleting a non-configurable data property in strict mode (Math.LN2)
-chapter11/11.4/11.4.1/11.4.1-4.a-9-s: FAIL
+# delete operator throws TypeError when when deleting a non-configurable
+# data property in strict mode (Global.NaN)
+# Invalid test case - "this" is not a global object within the test case.
+# (http://es5conform.codeplex.com/workitem/29151)
+chapter11/11.4/11.4.1/11.4.1-4.a-4-s: FAIL_OK
-# delete operator throws ReferenceError when deleting a direct reference to a var in strict mode
+# delete operator throws ReferenceError when deleting a direct reference
+# to a var in strict mode
+# Invalid test case. Test expects ReferenceError instead of SyntaxError.
+# http://es5conform.codeplex.com/workitem/29084
chapter11/11.4/11.4.1/11.4.1-5-1-s: FAIL
-# delete operator throws ReferenceError when deleting a direct reference to a function argument in strict mode
+# delete operator throws ReferenceError when deleting a direct reference
+# to a function argument in strict mode
+# Invalid test case. Test expects ReferenceError instead of SyntaxError.
+# http://es5conform.codeplex.com/workitem/29084
chapter11/11.4/11.4.1/11.4.1-5-2-s: FAIL
-# delete operator throws ReferenceError when deleting a direct reference to a function name in strict mode
+# delete operator throws ReferenceError when deleting a direct reference
+# to a function name in strict mode
+# Invalid test case. Test expects ReferenceError instead of SyntaxError.
+# http://es5conform.codeplex.com/workitem/29084
chapter11/11.4/11.4.1/11.4.1-5-3-s: FAIL
-# delete operator throws SyntaxError when deleting a direct reference to a function argument(object) in strict mode
-chapter11/11.4/11.4.1/11.4.1-5-4-s: FAIL
# eval - a function declaring a var named 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-1-s: FAIL
# eval - a function assigning into 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-2-s: FAIL
-# eval - a function expr declaring a var named 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# eval - a function expr declaring a var named 'eval' throws EvalError
+# in strict mode
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-3-s: FAIL
# eval - a function expr assigning into 'eval' throws a EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-4-s: FAIL
# eval - a Function declaring var named 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-5-s: FAIL
# eval - a Function assigning into 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-6-s: FAIL
-# eval - a direct eval declaring a var named 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# eval - a direct eval declaring a var named 'eval' throws EvalError
+# in strict mode
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-7-s: FAIL
# eval - a direct eval assigning into 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-8-s: FAIL
-# eval - an indirect eval declaring a var named 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# eval - an indirect eval declaring a var named 'eval' throws EvalError
+# in strict mode
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-9-s: FAIL
# eval - an indirect eval assigning into 'eval' throws EvalError in strict mode
-# EvalError - incorrect test (SyntaxError should be expected instead of EvalError)
+# Invalid test case. SyntaxError should be expected instead of EvalError.
chapter12/12.2/12.2.1/12.2.1-10-s: FAIL
-# SyntaxError if eval used as function identifier in function declaration with strict body
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if eval used as function identifier in function declaration
+# with strict body
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-3-s: FAIL
-# SyntaxError if eval used as function identifier in function expression with strict body
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if eval used as function identifier in function expression
+# with strict body
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-4-s: FAIL
-# SyntaxError if eval used as function identifier in function declaration in strict code
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if eval used as function identifier in function declaration
+# in strict code
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-5-s: FAIL
-# SyntaxError if eval used as function identifier in function expression in strict code
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if eval used as function identifier in function expression
+# in strict code
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-6-s: FAIL
-# SyntaxError if arguments used as function identifier in function declaration with strict body
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if arguments used as function identifier in function declaration
+# with strict body
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-9-s: FAIL
-# SyntaxError if arguments used as function identifier in function expression with strict body
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if arguments used as function identifier in function expression
+# with strict body
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-10-s: FAIL
-# SyntaxError if arguments used as function identifier in function declaration in strict code
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if arguments used as function identifier in function declaration
+# in strict code
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-11-s: FAIL
-# SyntaxError if arguments used as function identifier in function expression in strict code
-# test uses implicit return (which doesn't seem to work in v8 or safari jsc)
+# SyntaxError if arguments used as function identifier in function expression
+# in strict code
+# Test fails to return true on success (invalid test case).
chapter13/13.1/13.1-3-12-s: FAIL
-# 'use strict' directive - correct usage
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-1-s: FAIL
-# "use strict" directive - correct usage double quotes
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-2-s: FAIL
-# 'use strict' directive - may follow other directives
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-8-s: FAIL
-# 'use strict' directive - may occur multiple times
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-9-s: FAIL
-# other directives - may follow 'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-10-s: FAIL
-# comments may preceed 'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-11-s: FAIL
-# comments may follow 'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-12-s: FAIL
-# semicolon insertion works for'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-13-s: FAIL
-# semicolon insertion may come before 'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-14-s: FAIL
-# blank lines may come before 'use strict' directive
-# depends on "this is not coerced to an object in strict mode (undefined)"
-chapter14/14.1/14.1-15-s: FAIL
-
-# Duplicate combined parameter name allowed in Function constructor called in strict mode if body not strict
-# Invalid test case per ECMA-262 5th Edition, 10.1.1, bullet 4
+# Duplicate combined parameter name allowed in Function constructor called
+# in strict mode if body not strict
+# Test fails to return true on success (invalid test case).
chapter15/15.3/15.3.2/15.3.2.1/15.3.2.1-11-6-s: FAIL
-# Array.prototype.every - thisArg not passed to strict callbackfn
-chapter15/15.4/15.4.4/15.4.4.16/15.4.4.16-5-1-s: FAIL
-# Array.prototype.some - thisArg not passed to strict callbackfn
-chapter15/15.4/15.4.4/15.4.4.17/15.4.4.17-5-1-s: FAIL
-# Array.prototype.forEach - thisArg not passed to strict callbackfn
-chapter15/15.4/15.4.4/15.4.4.18/15.4.4.18-5-1-s: FAIL
-# Array.prototype.map - thisArg not passed to strict callbackfn
-chapter15/15.4/15.4.4/15.4.4.19/15.4.4.19-5-1-s: FAIL
-# Array.prototype.filter - thisArg not passed to strict callbackfn
-chapter15/15.4/15.4.4/15.4.4.20/15.4.4.20-5-1-s: FAIL
# Array.prototype.reduce - null passed as thisValue to strict callbackfn
+# Invalid test case: http://es5conform.codeplex.com/workitem/29085
chapter15/15.4/15.4.4/15.4.4.21/15.4.4.21-9-c-ii-4-s: FAIL
[ $arch == mips ]
diff --git a/deps/v8/test/mjsunit/compiler/regress-arguments.js b/deps/v8/test/mjsunit/compiler/regress-arguments.js
index 234d3fbc97..ebae5a0399 100644
--- a/deps/v8/test/mjsunit/compiler/regress-arguments.js
+++ b/deps/v8/test/mjsunit/compiler/regress-arguments.js
@@ -46,4 +46,7 @@ function u() {
return f.apply(v, arguments);
}
-for (var i=0; i<1000000; i++) assertEquals(void 0, u());
+Number.prototype.foo = 42;
+delete Number.prototype.foo;
+
+for (var i=0; i<100000; i++) assertEquals(void 0, u());
diff --git a/deps/v8/test/mjsunit/fuzz-natives.js b/deps/v8/test/mjsunit/fuzz-natives.js
index 020e3c0c85..cefef0a4b3 100644
--- a/deps/v8/test/mjsunit/fuzz-natives.js
+++ b/deps/v8/test/mjsunit/fuzz-natives.js
@@ -118,8 +118,9 @@ var knownProblems = {
"Abort": true,
// Avoid calling the concat operation, because weird lengths
- // may lead to out-of-memory.
+ // may lead to out-of-memory. Ditto for StringBuilderJoin.
"StringBuilderConcat": true,
+ "StringBuilderJoin": true,
// These functions use pseudo-stack-pointers and are not robust
// to unexpected integer values.
diff --git a/deps/v8/test/mjsunit/getter-in-prototype.js b/deps/v8/test/mjsunit/getter-in-prototype.js
index dd26c53319..5563123e79 100644
--- a/deps/v8/test/mjsunit/getter-in-prototype.js
+++ b/deps/v8/test/mjsunit/getter-in-prototype.js
@@ -31,9 +31,11 @@
var o = {};
var p = {};
p.__defineGetter__('x', function(){});
+p.__defineGetter__(0, function(){});
o.__proto__ = p;
assertThrows("o.x = 42");
+assertThrows("o[0] = 42");
function f() {
with(o) {
@@ -48,3 +50,9 @@ function g() {
x = 42;
}
assertThrows("g()");
+
+__proto__ = p;
+function g2() {
+ this[0] = 42;
+}
+assertThrows("g2()");
diff --git a/deps/v8/test/mjsunit/indexed-value-properties.js b/deps/v8/test/mjsunit/indexed-value-properties.js
new file mode 100644
index 0000000000..92bb896eaf
--- /dev/null
+++ b/deps/v8/test/mjsunit/indexed-value-properties.js
@@ -0,0 +1,56 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that the Number, String and Boolean prototypes are searched
+// for indexed properties on value objects.
+
+function return_one() { return 1; };
+
+function test(value) {
+ for (var i = 0; i < 10; i++) {
+ assertEquals(0, (value)[0]);
+ assertEquals(0, (value)["0"]);
+ assertEquals(return_one, (value)[1]);
+ assertEquals(return_one, (value)["1"]);
+ assertEquals(1, (value)[1]());
+ assertEquals(1, (value)["1"]());
+ }
+}
+
+Number.prototype[0] = 0;
+Number.prototype[1] = return_one;
+test(0);
+test(0.1);
+
+String.prototype[0] = 0;
+String.prototype[1] = return_one;
+test("");
+
+Boolean.prototype[0] = 0;
+Boolean.prototype[1] = return_one;
+test(true);
+test(false);
diff --git a/deps/v8/test/mjsunit/json.js b/deps/v8/test/mjsunit/json.js
index a0be8dd133..812ffeb5a7 100644
--- a/deps/v8/test/mjsunit/json.js
+++ b/deps/v8/test/mjsunit/json.js
@@ -415,3 +415,17 @@ var falseNum = Object("37");
falseNum.__proto__ = Number.prototype;
falseNum.toString = function() { return 42; };
assertEquals('"42"', JSON.stringify(falseNum));
+
+// We don't currently allow plain properties called __proto__ in JSON
+// objects in JSON.parse. Instead we read them as we would JS object
+// literals. If we change that, this test should change with it.
+//
+// Parse a non-object value as __proto__. This must not create a
+// __proto__ property different from the original, and should not
+// change the original.
+var o = JSON.parse('{"__proto__":5}');
+assertEquals(Object.prototype, o.__proto__); // __proto__ isn't changed.
+assertEquals(0, Object.keys(o).length); // __proto__ isn't added as enumerable.
+
+
+
diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status
index 2448564608..c10281fc19 100644
--- a/deps/v8/test/mjsunit/mjsunit.status
+++ b/deps/v8/test/mjsunit/mjsunit.status
@@ -105,22 +105,20 @@ regress/regress-create-exception: SKIP
regress/regress-3218915: SKIP
regress/regress-3247124: SKIP
+# Requires bigger stack size in the Genesis and if stack size is increased,
+# the test requires too much time to run. However, the problem test covers
+# should be platform-independent.
+regress/regress-1132: SKIP
##############################################################################
[ $arch == arm && $crankshaft ]
-# Test that currently fails with crankshaft on ARM.
-compiler/simple-osr: FAIL
-
# BUG (1094)
regress/regress-deopt-gc: SKIP
##############################################################################
[ $arch == x64 && $crankshaft ]
-# BUG (1026) This test is currently flaky.
-compiler/simple-osr: SKIP
-
# BUG (1094)
regress/regress-deopt-gc: SKIP
diff --git a/deps/v8/test/mjsunit/regexp.js b/deps/v8/test/mjsunit/regexp.js
index 8d776ad5fb..24e1b21e84 100644
--- a/deps/v8/test/mjsunit/regexp.js
+++ b/deps/v8/test/mjsunit/regexp.js
@@ -676,3 +676,17 @@ assertEquals(["bc"], re.exec("zimzomzumbc"));
assertFalse(re.test("c"));
assertFalse(re.test(""));
+// Valid syntax in ES5.
+re = RegExp("(?:x)*");
+re = RegExp("(x)*");
+
+// Syntax extension relative to ES5, for matching JSC (and ES3).
+// Shouldn't throw.
+re = RegExp("(?=x)*");
+re = RegExp("(?!x)*");
+
+// Should throw. Shouldn't hit asserts in debug mode.
+assertThrows("RegExp('(*)')");
+assertThrows("RegExp('(?:*)')");
+assertThrows("RegExp('(?=*)')");
+assertThrows("RegExp('(?!*)')");
diff --git a/deps/v8/test/mjsunit/regress/regress-1103.js b/deps/v8/test/mjsunit/regress/regress-1103.js
new file mode 100644
index 0000000000..4ad25b3b7c
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1103.js
@@ -0,0 +1,32 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that freezing the global object functions correctly and does not
+// freeze the global proxy.
+
+var obj = this;
+obj = Object.freeze(obj);
diff --git a/deps/v8/test/mjsunit/regress/regress-1104.js b/deps/v8/test/mjsunit/regress/regress-1104.js
new file mode 100644
index 0000000000..aca0a66477
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1104.js
@@ -0,0 +1,37 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// A redeclaration of a variable that aliases a parameter and so rewrites to
+// an arguments object access should not record duplicate AST IDs for
+// bailout.
+function test(f) {
+ function f() {}
+ function f() {}
+ return arguments;
+}
+
+test();
diff --git a/deps/v8/test/mjsunit/regress/regress-1105.js b/deps/v8/test/mjsunit/regress/regress-1105.js
new file mode 100644
index 0000000000..cfe2bd389c
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1105.js
@@ -0,0 +1,38 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This should properly catch the exception from the setter triggered
+// by the loaded file, and it should not fail an assertion in debug mode.
+
+__defineSetter__("x", function(){ throw 42; });
+
+try {
+ this.eval('function x(){}');
+ assertUnreachable();
+} catch (e) {
+ assertEquals(42, e);
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1106.js b/deps/v8/test/mjsunit/regress/regress-1106.js
new file mode 100644
index 0000000000..382fd1ba9c
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1106.js
@@ -0,0 +1,50 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test for issue 1106, where the optimizing compiler broke when accessing
+// a property lying on a prototype of the global object, and that prototype
+// object was in dictionary mode.
+
+x = Object.prototype;
+x.foo = 3;
+x.bar = 4;
+delete x.foo;
+x.foo = 5;
+
+function f() { return foo; }
+
+for (i=0 ; i < 100000; ++i) {
+ assertEquals(5, f());
+}
+
+// Test calls on functions defined in the prototype of the global object.
+x.gee = function() { return 42; }
+function g() { return gee(); }
+
+for (i=0 ; i < 100000; ++i) {
+ assertEquals(42, g());
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1107.js b/deps/v8/test/mjsunit/regress/regress-1107.js
new file mode 100644
index 0000000000..4ba277a2e1
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1107.js
@@ -0,0 +1,32 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that even if we cannot set element 0 on all the objects, we still
+// can format exception messages to some extent.
+
+Object.prototype.__defineGetter__(0, function(){});
+assertThrows("x");
diff --git a/deps/v8/test/mjsunit/regress/regress-1110.js b/deps/v8/test/mjsunit/regress/regress-1110.js
new file mode 100644
index 0000000000..204a87ba3d
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1110.js
@@ -0,0 +1,38 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that the illegal continue is thrown at parse time.
+
+try {
+ function Crash() { continue;if (Crash) {
+ } }
+ Crash();
+ assertTrue(false);
+} catch (e) {
+ assertTrue(e instanceof SyntaxError);
+ assertTrue(/continue/.test(e.message));
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1112.js b/deps/v8/test/mjsunit/regress/regress-1112.js
new file mode 100644
index 0000000000..d780106ba0
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1112.js
@@ -0,0 +1,36 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Regression test making sure that defineProperty on the global proxy
+// defines the property on the global object.
+
+Object.defineProperty(this,
+ 1,
+ { configurable: true, enumerable: true, value: 3 });
+assertEquals(3, this[1]);
+assertTrue(this.hasOwnProperty("1"));
+
diff --git a/deps/v8/test/mjsunit/regress/regress-1117.js b/deps/v8/test/mjsunit/regress/regress-1117.js
new file mode 100644
index 0000000000..b013a223ec
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1117.js
@@ -0,0 +1,35 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we actually return the right value (-0) when we multiply
+// constant 0 with a negative integer.
+
+function foo(y) {return 0 * y; }
+for( var i = 0; i< 1000000; i++){
+ foo(42);
+}
+assertEquals(1/foo(-42), -Infinity);
diff --git a/deps/v8/src/oprofile-agent.h b/deps/v8/test/mjsunit/regress/regress-1118.js
index 4c50f0ff7d..84f96e4639 100644
--- a/deps/v8/src/oprofile-agent.h
+++ b/deps/v8/test/mjsunit/regress/regress-1118.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,53 +25,26 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_OPROFILE_AGENT_H_
-#define V8_OPROFILE_AGENT_H_
+// An exception thrown in a function optimized by on-stack replacement (OSR)
+// should be able to construct a receiver from all optimized stack frames.
-#include <stdlib.h>
+function A() { }
+A.prototype.f = function() { }
-#include "globals.h"
+function B() { }
-#ifdef ENABLE_OPROFILE_AGENT
-// opagent.h uses uint64_t type, which can be missing in
-// system headers (they have __uint64_t), but is defined
-// in V8's headers.
-#include <opagent.h> // NOLINT
+var o = new A();
-#define OPROFILE(Call) \
- do { \
- if (v8::internal::OProfileAgent::is_enabled()) \
- v8::internal::OProfileAgent::Call; \
- } while (false)
-#else
-#define OPROFILE(Call) ((void) 0)
-#endif
+// This function throws if o does not have an f property, and should not be
+// inlined.
+function g() { try { return o.f(); } finally { }}
-namespace v8 {
-namespace internal {
+// This function should be optimized via OSR.
+function h() {
+ while(false) ;
+ for (var j = 0; j < 5000000; j++) g();
+}
-class OProfileAgent {
- public:
- static bool Initialize();
- static void TearDown();
-#ifdef ENABLE_OPROFILE_AGENT
- static void CreateNativeCodeRegion(const char* name,
- const void* ptr, unsigned int size);
- static void CreateNativeCodeRegion(String* name,
- const void* ptr, unsigned int size);
- static void CreateNativeCodeRegion(String* name, String* source, int line_num,
- const void* ptr, unsigned int size);
- static bool is_enabled() { return handle_ != NULL; }
-
- private:
- static op_agent_t handle_;
-
- // Size of the buffer that is used for composing code areas names.
- static const int kFormattingBufSize = 256;
-#else
- static bool is_enabled() { return false; }
-#endif
-};
-} }
-
-#endif // V8_OPROFILE_AGENT_H_
+h();
+o = new B();
+assertThrows("h()");
diff --git a/deps/v8/test/mjsunit/regress/regress-1119.js b/deps/v8/test/mjsunit/regress/regress-1119.js
new file mode 100644
index 0000000000..484893c95b
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1119.js
@@ -0,0 +1,45 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test runtime declaration of properties with var which are intercepted
+// by JS accessors.
+
+__proto__.__defineSetter__("x", function() { hasBeenInvoked = true; });
+__proto__.__defineSetter__("y", function() { throw 'exception'; });
+
+var hasBeenInvoked = false;
+eval("try { } catch (e) { var x = false; }");
+assertTrue(hasBeenInvoked);
+
+var exception;
+try {
+ eval("try { } catch (e) { var y = false; }");
+ assertUnreachable();
+} catch (e) {
+ exception = e;
+}
+assertEquals('exception', exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-1120.js b/deps/v8/test/mjsunit/regress/regress-1120.js
new file mode 100644
index 0000000000..c8c06aa5c1
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1120.js
@@ -0,0 +1,33 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that retrieving the extensible value for the global object is
+// working correctly and does not return the bit from the global proxy map.
+
+var obj = this;
+Object.freeze(obj);
+assertFalse(Object.isExtensible(obj));
diff --git a/deps/v8/test/mjsunit/regress/regress-1121.js b/deps/v8/test/mjsunit/regress/regress-1121.js
new file mode 100644
index 0000000000..0ad29cc963
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1121.js
@@ -0,0 +1,34 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1121
+
+// Test that changing Array.prototype.__proto__ keeps Array functions working.
+
+Array.prototype.__proto__ = null;
+// pop has custom call generator, so we need some beefier function.
+assertEquals([1, 2, 3], [1, 2, 3].slice());
diff --git a/deps/v8/test/mjsunit/regress/regress-1122.js b/deps/v8/test/mjsunit/regress/regress-1122.js
new file mode 100644
index 0000000000..7dc9b248a3
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1122.js
@@ -0,0 +1,55 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we can handle functions with up to 32766 arguments, and that
+// functions with more arguments throw an exception.
+
+// See http://code.google.com/p/v8/issues/detail?id=1122.
+
+function function_with_n_args(n) {
+ test_prefix = 'prefix ';
+ test_suffix = ' suffix';
+ var source = 'test_prefix + (function f(';
+ for (var arg = 0; arg < n ; arg++) {
+ if (arg != 0) source += ',';
+ source += 'arg' + arg;
+ }
+ source += ') { return arg' + (n - n % 2) / 2 + '; })(';
+ for (var arg = 0; arg < n ; arg++) {
+ if (arg != 0) source += ',';
+ source += arg;
+ }
+ source += ') + test_suffix';
+ return eval(source);
+}
+
+assertEquals('prefix 4000 suffix', function_with_n_args(8000));
+assertEquals('prefix 9000 suffix', function_with_n_args(18000));
+assertEquals('prefix 16000 suffix', function_with_n_args(32000));
+
+assertThrows("function_with_n_args(35000)");
+assertThrows("function_with_n_args(100000)");
diff --git a/deps/v8/test/mjsunit/regress/regress-1125.js b/deps/v8/test/mjsunit/regress/regress-1125.js
new file mode 100644
index 0000000000..b0e1cb72a6
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1125.js
@@ -0,0 +1,41 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test a lot of updates to freshly created contexts.
+
+function f(x, y) {
+ with ("abcdefghijxxxxxxxxxx")
+ var y = {};
+}
+
+function g() {
+ f.apply(this, arguments);
+}
+
+for (var i = 0; i < 150000; i++) {
+ g(i);
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1126.js b/deps/v8/test/mjsunit/regress/regress-1126.js
new file mode 100644
index 0000000000..303583b97e
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1126.js
@@ -0,0 +1,35 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This should properly catch the exception from the setter triggered
+// by the loaded file, and it should not fail an assertion in debug mode.
+
+try {
+ eval('--');
+ assertUnreachable();
+} catch (e) {
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1129.js b/deps/v8/test/mjsunit/regress/regress-1129.js
new file mode 100644
index 0000000000..37bf9a81c5
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1129.js
@@ -0,0 +1,44 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --verify-heap --expose-gc
+
+// This should not hit an assertion in debug mode.
+
+// Create RegExp that is syntactically correct, but throws a stack overflow
+// during compilation.
+var source = Array(50000).join("(") + "a" + Array(50000).join(")");
+var r = RegExp(source);
+try {
+ // Try to compile in UC16 mode, and drop the exception.
+ r.test("\x80");
+ assertUnreachable();
+} catch (e) {
+}
+
+// Trigger a heap validation.
+gc();
diff --git a/deps/v8/test/mjsunit/regress/regress-1130.js b/deps/v8/test/mjsunit/regress/regress-1130.js
new file mode 100644
index 0000000000..188f3f9214
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1130.js
@@ -0,0 +1,38 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that parser errors can be build up correctly even in the presence
+// of JS accessors on Object's prototype elements.
+
+Object.prototype.__defineGetter__(0, function() { throw 42; } );
+
+try {
+ eval("(function() { const x; var x })")();
+ assertUnreachable();
+} catch (e) {
+ assertTrue(e instanceof TypeError);
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1131.js b/deps/v8/test/mjsunit/regress/regress-1131.js
new file mode 100644
index 0000000000..a1af9c90ca
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1131.js
@@ -0,0 +1,29 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var nonArray = { length: 4, 0: 42, 2: 37, 0xf7da5000: undefined, 4: 0 };
+Array.prototype.sort.call(nonArray);
diff --git a/deps/v8/test/mjsunit/regress/regress-1132.js b/deps/v8/test/mjsunit/regress/regress-1132.js
new file mode 100644
index 0000000000..4423ecdd34
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1132.js
@@ -0,0 +1,48 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test the case when exception is thrown from the parser when lazy
+// compiling a function.
+
+// Flags: --stack_size=32
+// NOTE: stack size constant above has been empirically chosen.
+// If the test starts to fail in Genesis, consider increasing this constant.
+
+function test() {
+ try {
+ test(1, test(1));
+ } catch(e) {
+ assertFalse(delete e, "deleting catch variable");
+ assertEquals(42, e);
+ }
+}
+
+try {
+ test();
+ assertUnreachable();
+} catch (e) {
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-1146.js b/deps/v8/test/mjsunit/regress/regress-1146.js
new file mode 100644
index 0000000000..e8028ce1d2
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1146.js
@@ -0,0 +1,48 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test keyed calls with different key types.
+function F() {}
+var a = new F();
+function f(i) { return a[i](); }
+
+a.first = function() { return 11; }
+a[0] = function() { return 22; }
+var obj = {};
+a[obj] = function() { return 33; }
+
+// Make object slow-case.
+a.foo = 0;
+delete a.foo;
+// Do multiple calls for IC transitions.
+var b = "first";
+f(b);
+f(b);
+
+assertEquals(11, f(b));
+assertEquals(22, f(0));
+assertEquals(33, f(obj));
diff --git a/deps/v8/test/mjsunit/regress/regress-1149.js b/deps/v8/test/mjsunit/regress/regress-1149.js
new file mode 100644
index 0000000000..d7a7d1b910
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1149.js
@@ -0,0 +1,39 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// We should not try to record duplicate bailout IDs for the 'left-hand
+// side' of a for/in, even if it is a parameter in a function using the
+// arguments object.
+
+function f(x) {
+ for (x in arguments) {
+ for (x in arguments) {
+ }
+ }
+}
+
+f();
diff --git a/deps/v8/test/mjsunit/regress/regress-1150.js b/deps/v8/test/mjsunit/regress/regress-1150.js
new file mode 100644
index 0000000000..57f739a4ac
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1150.js
@@ -0,0 +1,33 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that Object.keys is working correctly on the global object.
+
+var a = 10;
+var global = (function () { return this; }) ();
+var keys = Object.keys(global);
+assertTrue(keys.indexOf("a") > 0);
diff --git a/deps/v8/test/mjsunit/regress/regress-1151.js b/deps/v8/test/mjsunit/regress/regress-1151.js
new file mode 100644
index 0000000000..d36126e6e8
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1151.js
@@ -0,0 +1,32 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we do not try to create prototypes on objects that has the,
+// should_have_prototype flag set to false.
+
+__defineSetter__.__proto__ = function() {};
+__defineSetter__['prototype']
diff --git a/deps/v8/test/mjsunit/regress/regress-1156.js b/deps/v8/test/mjsunit/regress/regress-1156.js
new file mode 100644
index 0000000000..8ec7f817de
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1156.js
@@ -0,0 +1,49 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --nouse-inlining
+
+// Test that we do not crash we invoke builtins from optimized code that
+// is then deoptimized.
+
+function foo(a) {
+ delete a[1];
+ delete a[2];
+ delete a[3];
+ delete a[4];
+ delete a[5];
+ return void 0;
+}
+
+function call_and_deopt() {
+ var b = [1,2,3];
+ foo(b);
+ foo(b);
+ %DeoptimizeFunction(foo);
+}
+
+call_and_deopt();
diff --git a/deps/v8/test/mjsunit/regress/regress-1160.js b/deps/v8/test/mjsunit/regress/regress-1160.js
new file mode 100644
index 0000000000..8e6e29bd4c
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1160.js
@@ -0,0 +1,46 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1160
+
+// Array.prototype.join uses a temporary array internally. Verify it
+// does not crash and throws an illegal argument exception instead
+// when keyed store on the array does not work as expected because of
+// the setter on its prototype.
+
+try {
+ var N = 100;
+ var array = Array(N);
+ for (var i = 0; i < N; ++i) {
+ array[i] = i;
+ }
+ Array.prototype.__defineSetter__(32, function() { });
+ // The next line throws. We should make it work even with changed
+ // prototype. See http://code.google.com/p/v8/issues/detail?id=1161
+ array.join(",");
+ assertUnreachable();
+} catch (e) { }
diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-72736.js b/deps/v8/test/mjsunit/regress/regress-crbug-72736.js
new file mode 100644
index 0000000000..4b4b145759
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-crbug-72736.js
@@ -0,0 +1,37 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See http://crbug.com/72736
+
+// This tests that Object.defineProperty actually allows to change the value of
+// a non-writable property if configurable is true.
+
+var obj = {};
+Object.defineProperty(obj, 'foo', { value: 10, configurable: true });
+assertEquals(obj.foo, 10);
+Object.defineProperty(obj, 'foo', { value: 20, configurable: true });
+assertEquals(obj.foo, 20);
diff --git a/deps/v8/test/mjsunit/strict-mode.js b/deps/v8/test/mjsunit/strict-mode.js
index ddddfabee4..fbba64ed66 100644
--- a/deps/v8/test/mjsunit/strict-mode.js
+++ b/deps/v8/test/mjsunit/strict-mode.js
@@ -169,13 +169,20 @@ CheckStrictMode("var x = { '1234' : 1, '2345' : 2, '1234' : 3 };", SyntaxError);
CheckStrictMode("var x = { '1234' : 1, '2345' : 2, 1234 : 3 };", SyntaxError);
CheckStrictMode("var x = { 3.14 : 1, 2.71 : 2, 3.14 : 3 };", SyntaxError);
CheckStrictMode("var x = { 3.14 : 1, '3.14' : 2 };", SyntaxError);
-CheckStrictMode("var x = { 123: 1, 123.00000000000000000000000000000000000000000000000000000000000000000001 : 2 }", SyntaxError);
+CheckStrictMode("var x = { \
+ 123: 1, \
+ 123.00000000000000000000000000000000000000000000000000000000000000000001: 2 \
+}", SyntaxError);
// Non-conflicting data properties.
(function StrictModeNonDuplicate() {
"use strict";
var x = { 123 : 1, "0123" : 2 };
- var x = { 123: 1, '123.00000000000000000000000000000000000000000000000000000000000000000001' : 2 }
+ var x = {
+ 123: 1,
+ '123.00000000000000000000000000000000000000000000000000000000000000000001':
+ 2
+ };
})();
// Two getters (non-strict)
@@ -214,23 +221,32 @@ assertThrows("var x = { '12': 1, get 12(){}};", SyntaxError);
CheckStrictMode("function strict() { eval = undefined; }", SyntaxError);
CheckStrictMode("function strict() { arguments = undefined; }", SyntaxError);
CheckStrictMode("function strict() { print(eval = undefined); }", SyntaxError);
-CheckStrictMode("function strict() { print(arguments = undefined); }", SyntaxError);
+CheckStrictMode("function strict() { print(arguments = undefined); }",
+ SyntaxError);
CheckStrictMode("function strict() { var x = eval = undefined; }", SyntaxError);
-CheckStrictMode("function strict() { var x = arguments = undefined; }", SyntaxError);
+CheckStrictMode("function strict() { var x = arguments = undefined; }",
+ SyntaxError);
// Compound assignment to eval or arguments
CheckStrictMode("function strict() { eval *= undefined; }", SyntaxError);
CheckStrictMode("function strict() { arguments /= undefined; }", SyntaxError);
CheckStrictMode("function strict() { print(eval %= undefined); }", SyntaxError);
-CheckStrictMode("function strict() { print(arguments %= undefined); }", SyntaxError);
-CheckStrictMode("function strict() { var x = eval += undefined; }", SyntaxError);
-CheckStrictMode("function strict() { var x = arguments -= undefined; }", SyntaxError);
+CheckStrictMode("function strict() { print(arguments %= undefined); }",
+ SyntaxError);
+CheckStrictMode("function strict() { var x = eval += undefined; }",
+ SyntaxError);
+CheckStrictMode("function strict() { var x = arguments -= undefined; }",
+ SyntaxError);
CheckStrictMode("function strict() { eval <<= undefined; }", SyntaxError);
CheckStrictMode("function strict() { arguments >>= undefined; }", SyntaxError);
-CheckStrictMode("function strict() { print(eval >>>= undefined); }", SyntaxError);
-CheckStrictMode("function strict() { print(arguments &= undefined); }", SyntaxError);
-CheckStrictMode("function strict() { var x = eval ^= undefined; }", SyntaxError);
-CheckStrictMode("function strict() { var x = arguments |= undefined; }", SyntaxError);
+CheckStrictMode("function strict() { print(eval >>>= undefined); }",
+ SyntaxError);
+CheckStrictMode("function strict() { print(arguments &= undefined); }",
+ SyntaxError);
+CheckStrictMode("function strict() { var x = eval ^= undefined; }",
+ SyntaxError);
+CheckStrictMode("function strict() { var x = arguments |= undefined; }",
+ SyntaxError);
// Postfix increment with eval or arguments
CheckStrictMode("function strict() { eval++; }", SyntaxError);
@@ -264,6 +280,17 @@ CheckStrictMode("function strict() { print(--arguments); }", SyntaxError);
CheckStrictMode("function strict() { var x = --eval; }", SyntaxError);
CheckStrictMode("function strict() { var x = --arguments; }", SyntaxError);
+// Delete of an unqualified identifier
+CheckStrictMode("delete unqualified;", SyntaxError);
+CheckStrictMode("function strict() { delete unqualified; }", SyntaxError);
+CheckStrictMode("function function_name() { delete function_name; }",
+ SyntaxError);
+CheckStrictMode("function strict(parameter) { delete parameter; }",
+ SyntaxError);
+CheckStrictMode("function strict() { var variable; delete variable; }",
+ SyntaxError);
+CheckStrictMode("var variable; delete variable;", SyntaxError);
+
// Prefix unary operators other than delete, ++, -- are valid in strict mode
(function StrictModeUnaryOperators() {
"use strict";
@@ -318,21 +345,136 @@ function testFutureReservedWord(word) {
// Function names and arguments when the body is strict
assertThrows("function " + word + " () { 'use strict'; }", SyntaxError);
assertThrows("function foo (" + word + ") 'use strict'; {}", SyntaxError);
- assertThrows("function foo (" + word + ", " + word + ") { 'use strict'; }", SyntaxError);
+ assertThrows("function foo (" + word + ", " + word + ") { 'use strict'; }",
+ SyntaxError);
assertThrows("function foo (a, " + word + ") { 'use strict'; }", SyntaxError);
assertThrows("function foo (" + word + ", a) { 'use strict'; }", SyntaxError);
- assertThrows("function foo (a, " + word + ", b) { 'use strict'; }", SyntaxError);
- assertThrows("var foo = function (" + word + ") { 'use strict'; }", SyntaxError);
+ assertThrows("function foo (a, " + word + ", b) { 'use strict'; }",
+ SyntaxError);
+ assertThrows("var foo = function (" + word + ") { 'use strict'; }",
+ SyntaxError);
// get/set when the body is strict
eval("var x = { get " + word + " () { 'use strict'; } };");
eval("var x = { set " + word + " (value) { 'use strict'; } };");
- assertThrows("var x = { get foo(" + word + ") { 'use strict'; } };", SyntaxError);
- assertThrows("var x = { set foo(" + word + ") { 'use strict'; } };", SyntaxError);
+ assertThrows("var x = { get foo(" + word + ") { 'use strict'; } };",
+ SyntaxError);
+ assertThrows("var x = { set foo(" + word + ") { 'use strict'; } };",
+ SyntaxError);
}
for (var i = 0; i < future_reserved_words.length; i++) {
testFutureReservedWord(future_reserved_words[i]);
}
+function testAssignToUndefined(should_throw) {
+ "use strict";
+ try {
+ possibly_undefined_variable_for_strict_mode_test = "should throw?";
+ } catch (e) {
+ assertTrue(should_throw, "strict mode");
+ assertInstanceof(e, ReferenceError, "strict mode");
+ return;
+ }
+ assertFalse(should_throw, "strict mode");
+}
+
+testAssignToUndefined(true);
+testAssignToUndefined(true);
+testAssignToUndefined(true);
+
+possibly_undefined_variable_for_strict_mode_test = "value";
+
+testAssignToUndefined(false);
+testAssignToUndefined(false);
+testAssignToUndefined(false);
+
+delete possibly_undefined_variable_for_strict_mode_test;
+
+testAssignToUndefined(true);
+testAssignToUndefined(true);
+testAssignToUndefined(true);
+
+function repeat(n, f) {
+ for (var i = 0; i < n; i ++) { f(); }
+}
+
+repeat(10, function() { testAssignToUndefined(true); });
+possibly_undefined_variable_for_strict_mode_test = "value";
+repeat(10, function() { testAssignToUndefined(false); });
+delete possibly_undefined_variable_for_strict_mode_test;
+repeat(10, function() { testAssignToUndefined(true); });
+possibly_undefined_variable_for_strict_mode_test = undefined;
+repeat(10, function() { testAssignToUndefined(false); });
+(function testDeleteNonConfigurable() {
+ function delete_property(o) {
+ "use strict";
+ delete o.property;
+ }
+ function delete_element(o, i) {
+ "use strict";
+ delete o[i];
+ }
+
+ var object = {};
+
+ Object.defineProperty(object, "property", { value: "property_value" });
+ Object.defineProperty(object, "1", { value: "one" });
+ Object.defineProperty(object, 7, { value: "seven" });
+ Object.defineProperty(object, 3.14, { value: "pi" });
+
+ assertThrows(function() { delete_property(object); }, TypeError);
+ assertEquals(object.property, "property_value");
+ assertThrows(function() { delete_element(object, "1"); }, TypeError);
+ assertThrows(function() { delete_element(object, 1); }, TypeError);
+ assertEquals(object[1], "one");
+ assertThrows(function() { delete_element(object, "7"); }, TypeError);
+ assertThrows(function() { delete_element(object, 7); }, TypeError);
+ assertEquals(object[7], "seven");
+ assertThrows(function() { delete_element(object, "3.14"); }, TypeError);
+ assertThrows(function() { delete_element(object, 3.14); }, TypeError);
+ assertEquals(object[3.14], "pi");
+})();
+
+// Not transforming this in Function.call and Function.apply.
+(function testThisTransform() {
+ function non_strict() {
+ return this;
+ }
+ function strict() {
+ "use strict";
+ return this;
+ }
+
+ var global_object = (function() { return this; })();
+ var object = {};
+
+ // Non-strict call.
+ assertTrue(non_strict.call(null) === global_object);
+ assertTrue(non_strict.call(undefined) === global_object);
+ assertEquals(typeof non_strict.call(7), "object");
+ assertEquals(typeof non_strict.call("Hello"), "object");
+ assertTrue(non_strict.call(object) === object);
+
+ // Non-strict apply.
+ assertTrue(non_strict.apply(null) === global_object);
+ assertTrue(non_strict.apply(undefined) === global_object);
+ assertEquals(typeof non_strict.apply(7), "object");
+ assertEquals(typeof non_strict.apply("Hello"), "object");
+ assertTrue(non_strict.apply(object) === object);
+
+ // Strict call.
+ assertTrue(strict.call(null) === null);
+ assertTrue(strict.call(undefined) === undefined);
+ assertEquals(typeof strict.call(7), "number");
+ assertEquals(typeof strict.call("Hello"), "string");
+ assertTrue(strict.call(object) === object);
+
+ // Strict apply.
+ assertTrue(strict.apply(null) === null);
+ assertTrue(strict.apply(undefined) === undefined);
+ assertEquals(typeof strict.apply(7), "number");
+ assertEquals(typeof strict.apply("Hello"), "string");
+ assertTrue(strict.apply(object) === object);
+})();
diff --git a/deps/v8/test/mjsunit/tools/codemap.js b/deps/v8/test/mjsunit/tools/codemap.js
index 06a91e8102..81fb81015e 100644
--- a/deps/v8/test/mjsunit/tools/codemap.js
+++ b/deps/v8/test/mjsunit/tools/codemap.js
@@ -30,7 +30,7 @@
function newCodeEntry(size, name) {
- return new devtools.profiler.CodeMap.CodeEntry(size, name);
+ return new CodeMap.CodeEntry(size, name);
};
@@ -47,7 +47,7 @@ function assertNoEntry(codeMap, addr) {
(function testLibrariesAndStaticCode() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
codeMap.addLibrary(0x1500, newCodeEntry(0x3000, 'lib1'));
codeMap.addLibrary(0x15500, newCodeEntry(0x5000, 'lib2'));
codeMap.addLibrary(0x155500, newCodeEntry(0x10000, 'lib3'));
@@ -97,7 +97,7 @@ function assertNoEntry(codeMap, addr) {
(function testDynamicCode() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
codeMap.addCode(0x1500, newCodeEntry(0x200, 'code1'));
codeMap.addCode(0x1700, newCodeEntry(0x100, 'code2'));
codeMap.addCode(0x1900, newCodeEntry(0x50, 'code3'));
@@ -123,7 +123,7 @@ function assertNoEntry(codeMap, addr) {
(function testCodeMovesAndDeletions() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
codeMap.addCode(0x1500, newCodeEntry(0x200, 'code1'));
codeMap.addCode(0x1700, newCodeEntry(0x100, 'code2'));
assertEntry(codeMap, 'code1', 0x1500);
@@ -139,7 +139,7 @@ function assertNoEntry(codeMap, addr) {
(function testDynamicNamesDuplicates() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
// Code entries with same names but different addresses.
codeMap.addCode(0x1500, newCodeEntry(0x200, 'code'));
codeMap.addCode(0x1700, newCodeEntry(0x100, 'code'));
@@ -152,7 +152,7 @@ function assertNoEntry(codeMap, addr) {
(function testStaticEntriesExport() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
codeMap.addStaticCode(0x1500, newCodeEntry(0x3000, 'lib1'));
codeMap.addStaticCode(0x15500, newCodeEntry(0x5000, 'lib2'));
codeMap.addStaticCode(0x155500, newCodeEntry(0x10000, 'lib3'));
@@ -163,7 +163,7 @@ function assertNoEntry(codeMap, addr) {
(function testDynamicEntriesExport() {
- var codeMap = new devtools.profiler.CodeMap();
+ var codeMap = new CodeMap();
codeMap.addCode(0x1500, newCodeEntry(0x200, 'code1'));
codeMap.addCode(0x1700, newCodeEntry(0x100, 'code2'));
codeMap.addCode(0x1900, newCodeEntry(0x50, 'code3'));
diff --git a/deps/v8/test/mjsunit/tools/csvparser.js b/deps/v8/test/mjsunit/tools/csvparser.js
index 6ac490805f..f1449f6ba4 100644
--- a/deps/v8/test/mjsunit/tools/csvparser.js
+++ b/deps/v8/test/mjsunit/tools/csvparser.js
@@ -28,7 +28,7 @@
// Load CSV parser implementation from <project root>/tools.
// Files: tools/csvparser.js
-var parser = new devtools.profiler.CsvParser();
+var parser = new CsvParser();
assertEquals(
[],
diff --git a/deps/v8/test/mjsunit/tools/profile.js b/deps/v8/test/mjsunit/tools/profile.js
index 9ed851b1af..4df1a08f92 100644
--- a/deps/v8/test/mjsunit/tools/profile.js
+++ b/deps/v8/test/mjsunit/tools/profile.js
@@ -58,7 +58,7 @@ function countNodes(profile, traverseFunc) {
function ProfileTestDriver() {
- this.profile = new devtools.profiler.Profile();
+ this.profile = new Profile();
this.stack_ = [];
this.addFunctions_();
};
diff --git a/deps/v8/test/mjsunit/tools/profile_view.js b/deps/v8/test/mjsunit/tools/profile_view.js
index 3ed1128b67..7f60119a07 100644
--- a/deps/v8/test/mjsunit/tools/profile_view.js
+++ b/deps/v8/test/mjsunit/tools/profile_view.js
@@ -30,7 +30,7 @@
function createNode(name, time, opt_parent) {
- var node = new devtools.profiler.ProfileView.Node(name, time, time, null);
+ var node = new ProfileView.Node(name, time, time, null);
if (opt_parent) {
opt_parent.addChild(node);
}
@@ -61,7 +61,7 @@ function createNode(name, time, opt_parent) {
createNode('d', 4, b3);
createNode('d', 2, b3);
- var view = new devtools.profiler.ProfileView(root);
+ var view = new ProfileView(root);
var flatTree = [];
function fillFlatTree(node) {
diff --git a/deps/v8/test/mjsunit/tools/splaytree.js b/deps/v8/test/mjsunit/tools/splaytree.js
index 3beba0b9f2..5e18796dda 100644
--- a/deps/v8/test/mjsunit/tools/splaytree.js
+++ b/deps/v8/test/mjsunit/tools/splaytree.js
@@ -30,7 +30,7 @@
(function testIsEmpty() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertTrue(tree.isEmpty());
tree.insert(0, 'value');
assertFalse(tree.isEmpty());
@@ -38,7 +38,7 @@
(function testExportValues() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertArrayEquals([], tree.exportValues());
tree.insert(0, 'value');
assertArrayEquals(['value'], tree.exportValues());
@@ -79,7 +79,7 @@ function createSampleTree() {
(function testSplay() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
tree.root_ = createSampleTree();
assertArrayEquals(['50', '30', '60', '10', '40', '90', '20', '70', '100', '15', '80'],
tree.exportValues());
@@ -93,7 +93,7 @@ function createSampleTree() {
(function testInsert() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
tree.insert(5, 'root');
tree.insert(3, 'left');
assertArrayEquals(['left', 'root'], tree.exportValues());
@@ -103,7 +103,7 @@ function createSampleTree() {
(function testFind() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
tree.insert(5, 'root');
tree.insert(3, 'left');
tree.insert(7, 'right');
@@ -117,7 +117,7 @@ function createSampleTree() {
(function testFindMin() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertEquals(null, tree.findMin());
tree.insert(5, 'root');
tree.insert(3, 'left');
@@ -127,7 +127,7 @@ function createSampleTree() {
(function testFindMax() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertEquals(null, tree.findMax());
tree.insert(5, 'root');
tree.insert(3, 'left');
@@ -137,7 +137,7 @@ function createSampleTree() {
(function testFindGreatestLessThan() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertEquals(null, tree.findGreatestLessThan(10));
tree.insert(5, 'root');
tree.insert(3, 'left');
@@ -151,7 +151,7 @@ function createSampleTree() {
(function testRemove() {
- var tree = new goog.structs.SplayTree();
+ var tree = new SplayTree();
assertThrows('tree.remove(5)');
tree.insert(5, 'root');
tree.insert(3, 'left');
diff --git a/deps/v8/tools/codemap.js b/deps/v8/tools/codemap.js
index 8eb2acbc2a..71a99cc223 100644
--- a/deps/v8/tools/codemap.js
+++ b/deps/v8/tools/codemap.js
@@ -26,36 +26,31 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Initlialize namespaces
-var devtools = devtools || {};
-devtools.profiler = devtools.profiler || {};
-
-
/**
* Constructs a mapper that maps addresses into code entries.
*
* @constructor
*/
-devtools.profiler.CodeMap = function() {
+function CodeMap() {
/**
* Dynamic code entries. Used for JIT compiled code.
*/
- this.dynamics_ = new goog.structs.SplayTree();
+ this.dynamics_ = new SplayTree();
/**
* Name generator for entries having duplicate names.
*/
- this.dynamicsNameGen_ = new devtools.profiler.CodeMap.NameGenerator();
+ this.dynamicsNameGen_ = new CodeMap.NameGenerator();
/**
* Static code entries. Used for statically compiled code.
*/
- this.statics_ = new goog.structs.SplayTree();
+ this.statics_ = new SplayTree();
/**
* Libraries entries. Used for the whole static code libraries.
*/
- this.libraries_ = new goog.structs.SplayTree();
+ this.libraries_ = new SplayTree();
/**
* Map of memory pages occupied with static code.
@@ -67,23 +62,23 @@ devtools.profiler.CodeMap = function() {
/**
* The number of alignment bits in a page address.
*/
-devtools.profiler.CodeMap.PAGE_ALIGNMENT = 12;
+CodeMap.PAGE_ALIGNMENT = 12;
/**
* Page size in bytes.
*/
-devtools.profiler.CodeMap.PAGE_SIZE =
- 1 << devtools.profiler.CodeMap.PAGE_ALIGNMENT;
+CodeMap.PAGE_SIZE =
+ 1 << CodeMap.PAGE_ALIGNMENT;
/**
* Adds a dynamic (i.e. moveable and discardable) code entry.
*
* @param {number} start The starting address.
- * @param {devtools.profiler.CodeMap.CodeEntry} codeEntry Code entry object.
+ * @param {CodeMap.CodeEntry} codeEntry Code entry object.
*/
-devtools.profiler.CodeMap.prototype.addCode = function(start, codeEntry) {
+CodeMap.prototype.addCode = function(start, codeEntry) {
this.dynamics_.insert(start, codeEntry);
};
@@ -95,7 +90,7 @@ devtools.profiler.CodeMap.prototype.addCode = function(start, codeEntry) {
* @param {number} from The starting address of the entry being moved.
* @param {number} to The destination address.
*/
-devtools.profiler.CodeMap.prototype.moveCode = function(from, to) {
+CodeMap.prototype.moveCode = function(from, to) {
var removedNode = this.dynamics_.remove(from);
this.dynamics_.insert(to, removedNode.value);
};
@@ -107,7 +102,7 @@ devtools.profiler.CodeMap.prototype.moveCode = function(from, to) {
*
* @param {number} start The starting address of the entry being deleted.
*/
-devtools.profiler.CodeMap.prototype.deleteCode = function(start) {
+CodeMap.prototype.deleteCode = function(start) {
var removedNode = this.dynamics_.remove(start);
};
@@ -116,9 +111,9 @@ devtools.profiler.CodeMap.prototype.deleteCode = function(start) {
* Adds a library entry.
*
* @param {number} start The starting address.
- * @param {devtools.profiler.CodeMap.CodeEntry} codeEntry Code entry object.
+ * @param {CodeMap.CodeEntry} codeEntry Code entry object.
*/
-devtools.profiler.CodeMap.prototype.addLibrary = function(
+CodeMap.prototype.addLibrary = function(
start, codeEntry) {
this.markPages_(start, start + codeEntry.size);
this.libraries_.insert(start, codeEntry);
@@ -129,9 +124,9 @@ devtools.profiler.CodeMap.prototype.addLibrary = function(
* Adds a static code entry.
*
* @param {number} start The starting address.
- * @param {devtools.profiler.CodeMap.CodeEntry} codeEntry Code entry object.
+ * @param {CodeMap.CodeEntry} codeEntry Code entry object.
*/
-devtools.profiler.CodeMap.prototype.addStaticCode = function(
+CodeMap.prototype.addStaticCode = function(
start, codeEntry) {
this.statics_.insert(start, codeEntry);
};
@@ -140,10 +135,10 @@ devtools.profiler.CodeMap.prototype.addStaticCode = function(
/**
* @private
*/
-devtools.profiler.CodeMap.prototype.markPages_ = function(start, end) {
+CodeMap.prototype.markPages_ = function(start, end) {
for (var addr = start; addr <= end;
- addr += devtools.profiler.CodeMap.PAGE_SIZE) {
- this.pages_[addr >>> devtools.profiler.CodeMap.PAGE_ALIGNMENT] = 1;
+ addr += CodeMap.PAGE_SIZE) {
+ this.pages_[addr >>> CodeMap.PAGE_ALIGNMENT] = 1;
}
};
@@ -151,7 +146,7 @@ devtools.profiler.CodeMap.prototype.markPages_ = function(start, end) {
/**
* @private
*/
-devtools.profiler.CodeMap.prototype.isAddressBelongsTo_ = function(addr, node) {
+CodeMap.prototype.isAddressBelongsTo_ = function(addr, node) {
return addr >= node.key && addr < (node.key + node.value.size);
};
@@ -159,7 +154,7 @@ devtools.profiler.CodeMap.prototype.isAddressBelongsTo_ = function(addr, node) {
/**
* @private
*/
-devtools.profiler.CodeMap.prototype.findInTree_ = function(tree, addr) {
+CodeMap.prototype.findInTree_ = function(tree, addr) {
var node = tree.findGreatestLessThan(addr);
return node && this.isAddressBelongsTo_(addr, node) ? node.value : null;
};
@@ -171,8 +166,8 @@ devtools.profiler.CodeMap.prototype.findInTree_ = function(tree, addr) {
*
* @param {number} addr Address.
*/
-devtools.profiler.CodeMap.prototype.findEntry = function(addr) {
- var pageAddr = addr >>> devtools.profiler.CodeMap.PAGE_ALIGNMENT;
+CodeMap.prototype.findEntry = function(addr) {
+ var pageAddr = addr >>> CodeMap.PAGE_ALIGNMENT;
if (pageAddr in this.pages_) {
// Static code entries can contain "holes" of unnamed code.
// In this case, the whole library is assigned to this address.
@@ -200,7 +195,7 @@ devtools.profiler.CodeMap.prototype.findEntry = function(addr) {
*
* @param {number} addr Address.
*/
-devtools.profiler.CodeMap.prototype.findDynamicEntryByStartAddress =
+CodeMap.prototype.findDynamicEntryByStartAddress =
function(addr) {
var node = this.dynamics_.find(addr);
return node ? node.value : null;
@@ -210,7 +205,7 @@ devtools.profiler.CodeMap.prototype.findDynamicEntryByStartAddress =
/**
* Returns an array of all dynamic code entries.
*/
-devtools.profiler.CodeMap.prototype.getAllDynamicEntries = function() {
+CodeMap.prototype.getAllDynamicEntries = function() {
return this.dynamics_.exportValues();
};
@@ -218,7 +213,7 @@ devtools.profiler.CodeMap.prototype.getAllDynamicEntries = function() {
/**
* Returns an array of all static code entries.
*/
-devtools.profiler.CodeMap.prototype.getAllStaticEntries = function() {
+CodeMap.prototype.getAllStaticEntries = function() {
return this.statics_.exportValues();
};
@@ -226,7 +221,7 @@ devtools.profiler.CodeMap.prototype.getAllStaticEntries = function() {
/**
* Returns an array of all libraries entries.
*/
-devtools.profiler.CodeMap.prototype.getAllLibrariesEntries = function() {
+CodeMap.prototype.getAllLibrariesEntries = function() {
return this.libraries_.exportValues();
};
@@ -238,29 +233,29 @@ devtools.profiler.CodeMap.prototype.getAllLibrariesEntries = function() {
* @param {string} opt_name Code entry name.
* @constructor
*/
-devtools.profiler.CodeMap.CodeEntry = function(size, opt_name) {
+CodeMap.CodeEntry = function(size, opt_name) {
this.size = size;
this.name = opt_name || '';
this.nameUpdated_ = false;
};
-devtools.profiler.CodeMap.CodeEntry.prototype.getName = function() {
+CodeMap.CodeEntry.prototype.getName = function() {
return this.name;
};
-devtools.profiler.CodeMap.CodeEntry.prototype.toString = function() {
+CodeMap.CodeEntry.prototype.toString = function() {
return this.name + ': ' + this.size.toString(16);
};
-devtools.profiler.CodeMap.NameGenerator = function() {
+CodeMap.NameGenerator = function() {
this.knownNames_ = {};
};
-devtools.profiler.CodeMap.NameGenerator.prototype.getName = function(name) {
+CodeMap.NameGenerator.prototype.getName = function(name) {
if (!(name in this.knownNames_)) {
this.knownNames_[name] = 0;
return name;
diff --git a/deps/v8/tools/csvparser.js b/deps/v8/tools/csvparser.js
index 6e101e206b..c7d46b535c 100644
--- a/deps/v8/tools/csvparser.js
+++ b/deps/v8/tools/csvparser.js
@@ -26,15 +26,10 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Initlialize namespaces.
-var devtools = devtools || {};
-devtools.profiler = devtools.profiler || {};
-
-
/**
* Creates a CSV lines parser.
*/
-devtools.profiler.CsvParser = function() {
+function CsvParser() {
};
@@ -42,14 +37,14 @@ devtools.profiler.CsvParser = function() {
* A regex for matching a CSV field.
* @private
*/
-devtools.profiler.CsvParser.CSV_FIELD_RE_ = /^"((?:[^"]|"")*)"|([^,]*)/;
+CsvParser.CSV_FIELD_RE_ = /^"((?:[^"]|"")*)"|([^,]*)/;
/**
* A regex for matching a double quote.
* @private
*/
-devtools.profiler.CsvParser.DOUBLE_QUOTE_RE_ = /""/g;
+CsvParser.DOUBLE_QUOTE_RE_ = /""/g;
/**
@@ -57,9 +52,9 @@ devtools.profiler.CsvParser.DOUBLE_QUOTE_RE_ = /""/g;
*
* @param {string} line Input line.
*/
-devtools.profiler.CsvParser.prototype.parseLine = function(line) {
- var fieldRe = devtools.profiler.CsvParser.CSV_FIELD_RE_;
- var doubleQuoteRe = devtools.profiler.CsvParser.DOUBLE_QUOTE_RE_;
+CsvParser.prototype.parseLine = function(line) {
+ var fieldRe = CsvParser.CSV_FIELD_RE_;
+ var doubleQuoteRe = CsvParser.DOUBLE_QUOTE_RE_;
var pos = 0;
var endPos = line.length;
var fields = [];
diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp
index f73c7e141c..15185671b4 100644
--- a/deps/v8/tools/gyp/v8.gyp
+++ b/deps/v8/tools/gyp/v8.gyp
@@ -466,8 +466,6 @@
'../../src/objects-visiting.h',
'../../src/objects.cc',
'../../src/objects.h',
- '../../src/oprofile-agent.h',
- '../../src/oprofile-agent.cc',
'../../src/parser.cc',
'../../src/parser.h',
'../../src/platform.h',
diff --git a/deps/v8/tools/logreader.js b/deps/v8/tools/logreader.js
index 50e3aa45c5..315e721276 100644
--- a/deps/v8/tools/logreader.js
+++ b/deps/v8/tools/logreader.js
@@ -29,10 +29,6 @@
* @fileoverview Log Reader is used to process log file produced by V8.
*/
-// Initlialize namespaces
-var devtools = devtools || {};
-devtools.profiler = devtools.profiler || {};
-
/**
* Base class for processing log files.
@@ -41,7 +37,7 @@ devtools.profiler = devtools.profiler || {};
* log records.
* @constructor
*/
-devtools.profiler.LogReader = function(dispatchTable) {
+function LogReader(dispatchTable) {
/**
* @type {Array.<Object>}
*/
@@ -55,9 +51,9 @@ devtools.profiler.LogReader = function(dispatchTable) {
/**
* CSV lines parser.
- * @type {devtools.profiler.CsvParser}
+ * @type {CsvParser}
*/
- this.csvParser_ = new devtools.profiler.CsvParser();
+ this.csvParser_ = new CsvParser();
};
@@ -66,7 +62,7 @@ devtools.profiler.LogReader = function(dispatchTable) {
*
* @param {string} str Error message.
*/
-devtools.profiler.LogReader.prototype.printError = function(str) {
+LogReader.prototype.printError = function(str) {
// Do nothing.
};
@@ -76,7 +72,7 @@ devtools.profiler.LogReader.prototype.printError = function(str) {
*
* @param {string} chunk A portion of log.
*/
-devtools.profiler.LogReader.prototype.processLogChunk = function(chunk) {
+LogReader.prototype.processLogChunk = function(chunk) {
this.processLog_(chunk.split('\n'));
};
@@ -86,7 +82,7 @@ devtools.profiler.LogReader.prototype.processLogChunk = function(chunk) {
*
* @param {string} line A line of log.
*/
-devtools.profiler.LogReader.prototype.processLogLine = function(line) {
+LogReader.prototype.processLogLine = function(line) {
this.processLog_([line]);
};
@@ -99,7 +95,7 @@ devtools.profiler.LogReader.prototype.processLogLine = function(line) {
* @param {Array.<string>} stack String representation of a stack.
* @return {Array.<number>} Processed stack.
*/
-devtools.profiler.LogReader.prototype.processStack = function(pc, func, stack) {
+LogReader.prototype.processStack = function(pc, func, stack) {
var fullStack = func ? [pc, func] : [pc];
var prevFrame = pc;
for (var i = 0, n = stack.length; i < n; ++i) {
@@ -124,7 +120,7 @@ devtools.profiler.LogReader.prototype.processStack = function(pc, func, stack) {
* @param {!Object} dispatch Dispatch record.
* @return {boolean} True if dispatch must be skipped.
*/
-devtools.profiler.LogReader.prototype.skipDispatch = function(dispatch) {
+LogReader.prototype.skipDispatch = function(dispatch) {
return false;
};
@@ -135,7 +131,7 @@ devtools.profiler.LogReader.prototype.skipDispatch = function(dispatch) {
* @param {Array.<string>} fields Log record.
* @private
*/
-devtools.profiler.LogReader.prototype.dispatchLogRow_ = function(fields) {
+LogReader.prototype.dispatchLogRow_ = function(fields) {
// Obtain the dispatch.
var command = fields[0];
if (!(command in this.dispatchTable_)) {
@@ -173,7 +169,7 @@ devtools.profiler.LogReader.prototype.dispatchLogRow_ = function(fields) {
* @param {Array.<string>} lines Log lines.
* @private
*/
-devtools.profiler.LogReader.prototype.processLog_ = function(lines) {
+LogReader.prototype.processLog_ = function(lines) {
for (var i = 0, n = lines.length; i < n; ++i, ++this.lineNum_) {
var line = lines[i];
if (!line) {
diff --git a/deps/v8/tools/oprofile/annotate b/deps/v8/tools/oprofile/annotate
deleted file mode 100755
index a6a8545bd6..0000000000
--- a/deps/v8/tools/oprofile/annotate
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-opannotate --assembly --session-dir="$OPROFILE_SESSION_DIR" "$shell_exec" "$@"
-
diff --git a/deps/v8/tools/oprofile/common b/deps/v8/tools/oprofile/common
deleted file mode 100755
index fd00207ab0..0000000000
--- a/deps/v8/tools/oprofile/common
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-# Determine the session directory to use for oprofile.
-[ "$OPROFILE_SESSION_DIR" ] || OPROFILE_SESSION_DIR=/tmp/oprofv8
-
-# If no executable passed as the first parameter assume V8 release mode shell.
-if [[ -x $1 ]]
-then
- shell_exec=`readlink -f "$1"`
- # Any additional parameters are for the oprofile command.
- shift
-else
- oprofile_tools_path=`cd $(dirname "$0");pwd`
- [ "$V8_SHELL_DIR" ] || V8_SHELL_DIR=$oprofile_tools_path/../..
- shell_exec=$V8_SHELL_DIR/shell
-fi
-
-alias sudo_opcontrol='sudo opcontrol --session-dir="$OPROFILE_SESSION_DIR"'
-
diff --git a/deps/v8/tools/oprofile/dump b/deps/v8/tools/oprofile/dump
deleted file mode 100755
index 17bb0a1b08..0000000000
--- a/deps/v8/tools/oprofile/dump
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-sudo_opcontrol --dump "@$"
-
diff --git a/deps/v8/tools/oprofile/report b/deps/v8/tools/oprofile/report
deleted file mode 100755
index b7f28b9c45..0000000000
--- a/deps/v8/tools/oprofile/report
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-opreport --symbols --session-dir="$OPROFILE_SESSION_DIR" "$shell_exec" "$@"
-
diff --git a/deps/v8/tools/oprofile/reset b/deps/v8/tools/oprofile/reset
deleted file mode 100755
index edb707110f..0000000000
--- a/deps/v8/tools/oprofile/reset
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-sudo_opcontrol --reset "$@"
-
diff --git a/deps/v8/tools/oprofile/run b/deps/v8/tools/oprofile/run
deleted file mode 100755
index 0a92470a01..0000000000
--- a/deps/v8/tools/oprofile/run
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-# Reset oprofile samples.
-sudo_opcontrol --reset
-
-# Run the executable to profile with the correct arguments.
-"$shell_exec" --oprofile "$@"
-
-# Flush oprofile data including the generated code into ELF binaries.
-sudo_opcontrol --dump
-
diff --git a/deps/v8/tools/oprofile/shutdown b/deps/v8/tools/oprofile/shutdown
deleted file mode 100755
index 8ebb72f06b..0000000000
--- a/deps/v8/tools/oprofile/shutdown
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-sudo_opcontrol --shutdown "$@"
-
diff --git a/deps/v8/tools/oprofile/start b/deps/v8/tools/oprofile/start
deleted file mode 100755
index 059e4b84c1..0000000000
--- a/deps/v8/tools/oprofile/start
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-# Source common stuff.
-. `cd $(dirname "$0");pwd`/common
-
-sudo_opcontrol --start --no-vmlinux "$@"
-
diff --git a/deps/v8/tools/profile.js b/deps/v8/tools/profile.js
index b2de6490e0..03bee8397d 100644
--- a/deps/v8/tools/profile.js
+++ b/deps/v8/tools/profile.js
@@ -26,27 +26,22 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Initlialize namespaces
-var devtools = devtools || {};
-devtools.profiler = devtools.profiler || {};
-
-
/**
* Creates a profile object for processing profiling-related events
* and calculating function execution times.
*
* @constructor
*/
-devtools.profiler.Profile = function() {
- this.codeMap_ = new devtools.profiler.CodeMap();
- this.topDownTree_ = new devtools.profiler.CallTree();
- this.bottomUpTree_ = new devtools.profiler.CallTree();
+function Profile() {
+ this.codeMap_ = new CodeMap();
+ this.topDownTree_ = new CallTree();
+ this.bottomUpTree_ = new CallTree();
};
/**
* Version of profiler log.
*/
-devtools.profiler.Profile.VERSION = 2;
+Profile.VERSION = 2;
/**
@@ -55,7 +50,7 @@ devtools.profiler.Profile.VERSION = 2;
*
* @param {string} name Function name.
*/
-devtools.profiler.Profile.prototype.skipThisFunction = function(name) {
+Profile.prototype.skipThisFunction = function(name) {
return false;
};
@@ -66,7 +61,7 @@ devtools.profiler.Profile.prototype.skipThisFunction = function(name) {
*
* @enum {number}
*/
-devtools.profiler.Profile.Operation = {
+Profile.Operation = {
MOVE: 0,
DELETE: 1,
TICK: 2
@@ -76,7 +71,7 @@ devtools.profiler.Profile.Operation = {
/**
* Called whenever the specified operation has failed finding a function
* containing the specified address. Should be overriden by subclasses.
- * See the devtools.profiler.Profile.Operation enum for the list of
+ * See the Profile.Operation enum for the list of
* possible operations.
*
* @param {number} operation Operation.
@@ -85,7 +80,7 @@ devtools.profiler.Profile.Operation = {
* during stack strace processing, specifies a position of the frame
* containing the address.
*/
-devtools.profiler.Profile.prototype.handleUnknownCode = function(
+Profile.prototype.handleUnknownCode = function(
operation, addr, opt_stackPos) {
};
@@ -97,9 +92,9 @@ devtools.profiler.Profile.prototype.handleUnknownCode = function(
* @param {number} startAddr Starting address.
* @param {number} endAddr Ending address.
*/
-devtools.profiler.Profile.prototype.addLibrary = function(
+Profile.prototype.addLibrary = function(
name, startAddr, endAddr) {
- var entry = new devtools.profiler.CodeMap.CodeEntry(
+ var entry = new CodeMap.CodeEntry(
endAddr - startAddr, name);
this.codeMap_.addLibrary(startAddr, entry);
return entry;
@@ -113,9 +108,9 @@ devtools.profiler.Profile.prototype.addLibrary = function(
* @param {number} startAddr Starting address.
* @param {number} endAddr Ending address.
*/
-devtools.profiler.Profile.prototype.addStaticCode = function(
+Profile.prototype.addStaticCode = function(
name, startAddr, endAddr) {
- var entry = new devtools.profiler.CodeMap.CodeEntry(
+ var entry = new CodeMap.CodeEntry(
endAddr - startAddr, name);
this.codeMap_.addStaticCode(startAddr, entry);
return entry;
@@ -130,9 +125,9 @@ devtools.profiler.Profile.prototype.addStaticCode = function(
* @param {number} start Starting address.
* @param {number} size Code entry size.
*/
-devtools.profiler.Profile.prototype.addCode = function(
+Profile.prototype.addCode = function(
type, name, start, size) {
- var entry = new devtools.profiler.Profile.DynamicCodeEntry(size, type, name);
+ var entry = new Profile.DynamicCodeEntry(size, type, name);
this.codeMap_.addCode(start, entry);
return entry;
};
@@ -144,7 +139,7 @@ devtools.profiler.Profile.prototype.addCode = function(
* @param {number} aliasAddr Alias address.
* @param {number} addr Code entry address.
*/
-devtools.profiler.Profile.prototype.addCodeAlias = function(
+Profile.prototype.addCodeAlias = function(
aliasAddr, addr) {
var entry = this.codeMap_.findDynamicEntryByStartAddress(addr);
if (entry) {
@@ -159,11 +154,11 @@ devtools.profiler.Profile.prototype.addCodeAlias = function(
* @param {number} from Current code entry address.
* @param {number} to New code entry address.
*/
-devtools.profiler.Profile.prototype.moveCode = function(from, to) {
+Profile.prototype.moveCode = function(from, to) {
try {
this.codeMap_.moveCode(from, to);
} catch (e) {
- this.handleUnknownCode(devtools.profiler.Profile.Operation.MOVE, from);
+ this.handleUnknownCode(Profile.Operation.MOVE, from);
}
};
@@ -173,11 +168,11 @@ devtools.profiler.Profile.prototype.moveCode = function(from, to) {
*
* @param {number} start Starting address.
*/
-devtools.profiler.Profile.prototype.deleteCode = function(start) {
+Profile.prototype.deleteCode = function(start) {
try {
this.codeMap_.deleteCode(start);
} catch (e) {
- this.handleUnknownCode(devtools.profiler.Profile.Operation.DELETE, start);
+ this.handleUnknownCode(Profile.Operation.DELETE, start);
}
};
@@ -188,7 +183,7 @@ devtools.profiler.Profile.prototype.deleteCode = function(start) {
* @param {number} from Current code entry address.
* @param {number} to New code entry address.
*/
-devtools.profiler.Profile.prototype.safeMoveDynamicCode = function(from, to) {
+Profile.prototype.safeMoveDynamicCode = function(from, to) {
if (this.codeMap_.findDynamicEntryByStartAddress(from)) {
this.codeMap_.moveCode(from, to);
}
@@ -200,7 +195,7 @@ devtools.profiler.Profile.prototype.safeMoveDynamicCode = function(from, to) {
*
* @param {number} start Starting address.
*/
-devtools.profiler.Profile.prototype.safeDeleteDynamicCode = function(start) {
+Profile.prototype.safeDeleteDynamicCode = function(start) {
if (this.codeMap_.findDynamicEntryByStartAddress(start)) {
this.codeMap_.deleteCode(start);
}
@@ -212,7 +207,7 @@ devtools.profiler.Profile.prototype.safeDeleteDynamicCode = function(start) {
*
* @param {number} addr Entry address.
*/
-devtools.profiler.Profile.prototype.findEntry = function(addr) {
+Profile.prototype.findEntry = function(addr) {
return this.codeMap_.findEntry(addr);
};
@@ -223,7 +218,7 @@ devtools.profiler.Profile.prototype.findEntry = function(addr) {
*
* @param {Array<number>} stack Stack sample.
*/
-devtools.profiler.Profile.prototype.recordTick = function(stack) {
+Profile.prototype.recordTick = function(stack) {
var processedStack = this.resolveAndFilterFuncs_(stack);
this.bottomUpTree_.addPath(processedStack);
processedStack.reverse();
@@ -237,7 +232,7 @@ devtools.profiler.Profile.prototype.recordTick = function(stack) {
*
* @param {Array<number>} stack Stack sample.
*/
-devtools.profiler.Profile.prototype.resolveAndFilterFuncs_ = function(stack) {
+Profile.prototype.resolveAndFilterFuncs_ = function(stack) {
var result = [];
for (var i = 0; i < stack.length; ++i) {
var entry = this.codeMap_.findEntry(stack[i]);
@@ -248,7 +243,7 @@ devtools.profiler.Profile.prototype.resolveAndFilterFuncs_ = function(stack) {
}
} else {
this.handleUnknownCode(
- devtools.profiler.Profile.Operation.TICK, stack[i], i);
+ Profile.Operation.TICK, stack[i], i);
}
}
return result;
@@ -258,9 +253,9 @@ devtools.profiler.Profile.prototype.resolveAndFilterFuncs_ = function(stack) {
/**
* Performs a BF traversal of the top down call graph.
*
- * @param {function(devtools.profiler.CallTree.Node)} f Visitor function.
+ * @param {function(CallTree.Node)} f Visitor function.
*/
-devtools.profiler.Profile.prototype.traverseTopDownTree = function(f) {
+Profile.prototype.traverseTopDownTree = function(f) {
this.topDownTree_.traverse(f);
};
@@ -268,9 +263,9 @@ devtools.profiler.Profile.prototype.traverseTopDownTree = function(f) {
/**
* Performs a BF traversal of the bottom up call graph.
*
- * @param {function(devtools.profiler.CallTree.Node)} f Visitor function.
+ * @param {function(CallTree.Node)} f Visitor function.
*/
-devtools.profiler.Profile.prototype.traverseBottomUpTree = function(f) {
+Profile.prototype.traverseBottomUpTree = function(f) {
this.bottomUpTree_.traverse(f);
};
@@ -281,7 +276,7 @@ devtools.profiler.Profile.prototype.traverseBottomUpTree = function(f) {
*
* @param {string} opt_label Node label.
*/
-devtools.profiler.Profile.prototype.getTopDownProfile = function(opt_label) {
+Profile.prototype.getTopDownProfile = function(opt_label) {
return this.getTreeProfile_(this.topDownTree_, opt_label);
};
@@ -292,7 +287,7 @@ devtools.profiler.Profile.prototype.getTopDownProfile = function(opt_label) {
*
* @param {string} opt_label Node label.
*/
-devtools.profiler.Profile.prototype.getBottomUpProfile = function(opt_label) {
+Profile.prototype.getBottomUpProfile = function(opt_label) {
return this.getTreeProfile_(this.bottomUpTree_, opt_label);
};
@@ -300,10 +295,10 @@ devtools.profiler.Profile.prototype.getBottomUpProfile = function(opt_label) {
/**
* Helper function for calculating a tree profile.
*
- * @param {devtools.profiler.Profile.CallTree} tree Call tree.
+ * @param {Profile.CallTree} tree Call tree.
* @param {string} opt_label Node label.
*/
-devtools.profiler.Profile.prototype.getTreeProfile_ = function(tree, opt_label) {
+Profile.prototype.getTreeProfile_ = function(tree, opt_label) {
if (!opt_label) {
tree.computeTotalWeights();
return tree;
@@ -321,9 +316,9 @@ devtools.profiler.Profile.prototype.getTreeProfile_ = function(tree, opt_label)
*
* @param {string} opt_label Starting node label.
*/
-devtools.profiler.Profile.prototype.getFlatProfile = function(opt_label) {
- var counters = new devtools.profiler.CallTree();
- var rootLabel = opt_label || devtools.profiler.CallTree.ROOT_NODE_LABEL;
+Profile.prototype.getFlatProfile = function(opt_label) {
+ var counters = new CallTree();
+ var rootLabel = opt_label || CallTree.ROOT_NODE_LABEL;
var precs = {};
precs[rootLabel] = 0;
var root = counters.findOrAddChild(rootLabel);
@@ -378,8 +373,8 @@ devtools.profiler.Profile.prototype.getFlatProfile = function(opt_label) {
* @param {string} name Function name.
* @constructor
*/
-devtools.profiler.Profile.DynamicCodeEntry = function(size, type, name) {
- devtools.profiler.CodeMap.CodeEntry.call(this, size, name);
+Profile.DynamicCodeEntry = function(size, type, name) {
+ CodeMap.CodeEntry.call(this, size, name);
this.type = type;
};
@@ -387,7 +382,7 @@ devtools.profiler.Profile.DynamicCodeEntry = function(size, type, name) {
/**
* Returns node name.
*/
-devtools.profiler.Profile.DynamicCodeEntry.prototype.getName = function() {
+Profile.DynamicCodeEntry.prototype.getName = function() {
var name = this.name;
if (name.length == 0) {
name = '<anonymous>';
@@ -402,12 +397,12 @@ devtools.profiler.Profile.DynamicCodeEntry.prototype.getName = function() {
/**
* Returns raw node name (without type decoration).
*/
-devtools.profiler.Profile.DynamicCodeEntry.prototype.getRawName = function() {
+Profile.DynamicCodeEntry.prototype.getRawName = function() {
return this.name;
};
-devtools.profiler.Profile.DynamicCodeEntry.prototype.isJSFunction = function() {
+Profile.DynamicCodeEntry.prototype.isJSFunction = function() {
return this.type == "Function" ||
this.type == "LazyCompile" ||
this.type == "Script";
@@ -419,28 +414,28 @@ devtools.profiler.Profile.DynamicCodeEntry.prototype.isJSFunction = function() {
*
* @constructor
*/
-devtools.profiler.CallTree = function() {
- this.root_ = new devtools.profiler.CallTree.Node(
- devtools.profiler.CallTree.ROOT_NODE_LABEL);
+function CallTree() {
+ this.root_ = new CallTree.Node(
+ CallTree.ROOT_NODE_LABEL);
};
/**
* The label of the root node.
*/
-devtools.profiler.CallTree.ROOT_NODE_LABEL = '';
+CallTree.ROOT_NODE_LABEL = '';
/**
* @private
*/
-devtools.profiler.CallTree.prototype.totalsComputed_ = false;
+CallTree.prototype.totalsComputed_ = false;
/**
* Returns the tree root.
*/
-devtools.profiler.CallTree.prototype.getRoot = function() {
+CallTree.prototype.getRoot = function() {
return this.root_;
};
@@ -450,7 +445,7 @@ devtools.profiler.CallTree.prototype.getRoot = function() {
*
* @param {Array<string>} path Call path.
*/
-devtools.profiler.CallTree.prototype.addPath = function(path) {
+CallTree.prototype.addPath = function(path) {
if (path.length == 0) {
return;
}
@@ -470,7 +465,7 @@ devtools.profiler.CallTree.prototype.addPath = function(path) {
*
* @param {string} label Child node label.
*/
-devtools.profiler.CallTree.prototype.findOrAddChild = function(label) {
+CallTree.prototype.findOrAddChild = function(label) {
return this.root_.findOrAddChild(label);
};
@@ -491,8 +486,8 @@ devtools.profiler.CallTree.prototype.findOrAddChild = function(label) {
*
* @param {string} label The label of the new root node.
*/
-devtools.profiler.CallTree.prototype.cloneSubtree = function(label) {
- var subTree = new devtools.profiler.CallTree();
+CallTree.prototype.cloneSubtree = function(label) {
+ var subTree = new CallTree();
this.traverse(function(node, parent) {
if (!parent && node.label != label) {
return null;
@@ -508,7 +503,7 @@ devtools.profiler.CallTree.prototype.cloneSubtree = function(label) {
/**
* Computes total weights in the call graph.
*/
-devtools.profiler.CallTree.prototype.computeTotalWeights = function() {
+CallTree.prototype.computeTotalWeights = function() {
if (this.totalsComputed_) {
return;
}
@@ -529,10 +524,10 @@ devtools.profiler.CallTree.prototype.computeTotalWeights = function() {
* return nodeClone;
* });
*
- * @param {function(devtools.profiler.CallTree.Node, *)} f Visitor function.
+ * @param {function(CallTree.Node, *)} f Visitor function.
* The second parameter is the result of calling 'f' on the parent node.
*/
-devtools.profiler.CallTree.prototype.traverse = function(f) {
+CallTree.prototype.traverse = function(f) {
var pairsToProcess = new ConsArray();
pairsToProcess.concat([{node: this.root_, param: null}]);
while (!pairsToProcess.atEnd()) {
@@ -550,12 +545,12 @@ devtools.profiler.CallTree.prototype.traverse = function(f) {
/**
* Performs an indepth call graph traversal.
*
- * @param {function(devtools.profiler.CallTree.Node)} enter A function called
+ * @param {function(CallTree.Node)} enter A function called
* prior to visiting node's children.
- * @param {function(devtools.profiler.CallTree.Node)} exit A function called
+ * @param {function(CallTree.Node)} exit A function called
* after visiting node's children.
*/
-devtools.profiler.CallTree.prototype.traverseInDepth = function(enter, exit) {
+CallTree.prototype.traverseInDepth = function(enter, exit) {
function traverse(node) {
enter(node);
node.forEachChild(traverse);
@@ -569,9 +564,9 @@ devtools.profiler.CallTree.prototype.traverseInDepth = function(enter, exit) {
* Constructs a call graph node.
*
* @param {string} label Node label.
- * @param {devtools.profiler.CallTree.Node} opt_parent Node parent.
+ * @param {CallTree.Node} opt_parent Node parent.
*/
-devtools.profiler.CallTree.Node = function(label, opt_parent) {
+CallTree.Node = function(label, opt_parent) {
this.label = label;
this.parent = opt_parent;
this.children = {};
@@ -583,14 +578,14 @@ devtools.profiler.CallTree.Node = function(label, opt_parent) {
* a call path).
* @type {number}
*/
-devtools.profiler.CallTree.Node.prototype.selfWeight = 0;
+CallTree.Node.prototype.selfWeight = 0;
/**
* Node total weight (includes weights of all children).
* @type {number}
*/
-devtools.profiler.CallTree.Node.prototype.totalWeight = 0;
+CallTree.Node.prototype.totalWeight = 0;
/**
@@ -598,8 +593,8 @@ devtools.profiler.CallTree.Node.prototype.totalWeight = 0;
*
* @param {string} label Child node label.
*/
-devtools.profiler.CallTree.Node.prototype.addChild = function(label) {
- var child = new devtools.profiler.CallTree.Node(label, this);
+CallTree.Node.prototype.addChild = function(label) {
+ var child = new CallTree.Node(label, this);
this.children[label] = child;
return child;
};
@@ -608,7 +603,7 @@ devtools.profiler.CallTree.Node.prototype.addChild = function(label) {
/**
* Computes node's total weight.
*/
-devtools.profiler.CallTree.Node.prototype.computeTotalWeight =
+CallTree.Node.prototype.computeTotalWeight =
function() {
var totalWeight = this.selfWeight;
this.forEachChild(function(child) {
@@ -620,7 +615,7 @@ devtools.profiler.CallTree.Node.prototype.computeTotalWeight =
/**
* Returns all node's children as an array.
*/
-devtools.profiler.CallTree.Node.prototype.exportChildren = function() {
+CallTree.Node.prototype.exportChildren = function() {
var result = [];
this.forEachChild(function (node) { result.push(node); });
return result;
@@ -632,7 +627,7 @@ devtools.profiler.CallTree.Node.prototype.exportChildren = function() {
*
* @param {string} label Child node label.
*/
-devtools.profiler.CallTree.Node.prototype.findChild = function(label) {
+CallTree.Node.prototype.findChild = function(label) {
return this.children[label] || null;
};
@@ -643,7 +638,7 @@ devtools.profiler.CallTree.Node.prototype.findChild = function(label) {
*
* @param {string} label Child node label.
*/
-devtools.profiler.CallTree.Node.prototype.findOrAddChild = function(label) {
+CallTree.Node.prototype.findOrAddChild = function(label) {
return this.findChild(label) || this.addChild(label);
};
@@ -651,9 +646,9 @@ devtools.profiler.CallTree.Node.prototype.findOrAddChild = function(label) {
/**
* Calls the specified function for every child.
*
- * @param {function(devtools.profiler.CallTree.Node)} f Visitor function.
+ * @param {function(CallTree.Node)} f Visitor function.
*/
-devtools.profiler.CallTree.Node.prototype.forEachChild = function(f) {
+CallTree.Node.prototype.forEachChild = function(f) {
for (var c in this.children) {
f(this.children[c]);
}
@@ -663,9 +658,9 @@ devtools.profiler.CallTree.Node.prototype.forEachChild = function(f) {
/**
* Walks up from the current node up to the call tree root.
*
- * @param {function(devtools.profiler.CallTree.Node)} f Visitor function.
+ * @param {function(CallTree.Node)} f Visitor function.
*/
-devtools.profiler.CallTree.Node.prototype.walkUpToRoot = function(f) {
+CallTree.Node.prototype.walkUpToRoot = function(f) {
for (var curr = this; curr != null; curr = curr.parent) {
f(curr);
}
@@ -676,9 +671,9 @@ devtools.profiler.CallTree.Node.prototype.walkUpToRoot = function(f) {
* Tries to find a node with the specified path.
*
* @param {Array<string>} labels The path.
- * @param {function(devtools.profiler.CallTree.Node)} opt_f Visitor function.
+ * @param {function(CallTree.Node)} opt_f Visitor function.
*/
-devtools.profiler.CallTree.Node.prototype.descendToChild = function(
+CallTree.Node.prototype.descendToChild = function(
labels, opt_f) {
for (var pos = 0, curr = this; pos < labels.length && curr != null; pos++) {
var child = curr.findChild(labels[pos]);
diff --git a/deps/v8/tools/profile_view.js b/deps/v8/tools/profile_view.js
index bdea6319db..e041909b01 100644
--- a/deps/v8/tools/profile_view.js
+++ b/deps/v8/tools/profile_view.js
@@ -26,18 +26,13 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Initlialize namespaces
-var devtools = devtools || {};
-devtools.profiler = devtools.profiler || {};
-
-
/**
* Creates a Profile View builder object.
*
* @param {number} samplingRate Number of ms between profiler ticks.
* @constructor
*/
-devtools.profiler.ViewBuilder = function(samplingRate) {
+function ViewBuilder(samplingRate) {
this.samplingRate = samplingRate;
};
@@ -45,11 +40,11 @@ devtools.profiler.ViewBuilder = function(samplingRate) {
/**
* Builds a profile view for the specified call tree.
*
- * @param {devtools.profiler.CallTree} callTree A call tree.
+ * @param {CallTree} callTree A call tree.
* @param {boolean} opt_bottomUpViewWeights Whether remapping
* of self weights for a bottom up view is needed.
*/
-devtools.profiler.ViewBuilder.prototype.buildView = function(
+ViewBuilder.prototype.buildView = function(
callTree, opt_bottomUpViewWeights) {
var head;
var samplingRate = this.samplingRate;
@@ -80,11 +75,11 @@ devtools.profiler.ViewBuilder.prototype.buildView = function(
/**
* Factory method for a profile view.
*
- * @param {devtools.profiler.ProfileView.Node} head View head node.
- * @return {devtools.profiler.ProfileView} Profile view.
+ * @param {ProfileView.Node} head View head node.
+ * @return {ProfileView} Profile view.
*/
-devtools.profiler.ViewBuilder.prototype.createView = function(head) {
- return new devtools.profiler.ProfileView(head);
+ViewBuilder.prototype.createView = function(head) {
+ return new ProfileView(head);
};
@@ -97,12 +92,12 @@ devtools.profiler.ViewBuilder.prototype.createView = function(head) {
* profile they can be either callees or callers.)
* @param {number} selfTime Amount of time that application spent in the
* corresponding function only.
- * @param {devtools.profiler.ProfileView.Node} head Profile view head.
- * @return {devtools.profiler.ProfileView.Node} Profile view node.
+ * @param {ProfileView.Node} head Profile view head.
+ * @return {ProfileView.Node} Profile view node.
*/
-devtools.profiler.ViewBuilder.prototype.createViewNode = function(
+ViewBuilder.prototype.createViewNode = function(
funcName, totalTime, selfTime, head) {
- return new devtools.profiler.ProfileView.Node(
+ return new ProfileView.Node(
funcName, totalTime, selfTime, head);
};
@@ -111,10 +106,10 @@ devtools.profiler.ViewBuilder.prototype.createViewNode = function(
* Creates a Profile View object. It allows to perform sorting
* and filtering actions on the profile.
*
- * @param {devtools.profiler.ProfileView.Node} head Head (root) node.
+ * @param {ProfileView.Node} head Head (root) node.
* @constructor
*/
-devtools.profiler.ProfileView = function(head) {
+function ProfileView(head) {
this.head = head;
};
@@ -122,11 +117,11 @@ devtools.profiler.ProfileView = function(head) {
/**
* Sorts the profile view using the specified sort function.
*
- * @param {function(devtools.profiler.ProfileView.Node,
- * devtools.profiler.ProfileView.Node):number} sortFunc A sorting
+ * @param {function(ProfileView.Node,
+ * ProfileView.Node):number} sortFunc A sorting
* functions. Must comply with Array.sort sorting function requirements.
*/
-devtools.profiler.ProfileView.prototype.sort = function(sortFunc) {
+ProfileView.prototype.sort = function(sortFunc) {
this.traverse(function (node) {
node.sortChildren(sortFunc);
});
@@ -136,9 +131,9 @@ devtools.profiler.ProfileView.prototype.sort = function(sortFunc) {
/**
* Traverses profile view nodes in preorder.
*
- * @param {function(devtools.profiler.ProfileView.Node)} f Visitor function.
+ * @param {function(ProfileView.Node)} f Visitor function.
*/
-devtools.profiler.ProfileView.prototype.traverse = function(f) {
+ProfileView.prototype.traverse = function(f) {
var nodesToTraverse = new ConsArray();
nodesToTraverse.concat([this.head]);
while (!nodesToTraverse.atEnd()) {
@@ -159,10 +154,10 @@ devtools.profiler.ProfileView.prototype.traverse = function(f) {
* profile they can be either callees or callers.)
* @param {number} selfTime Amount of time that application spent in the
* corresponding function only.
- * @param {devtools.profiler.ProfileView.Node} head Profile view head.
+ * @param {ProfileView.Node} head Profile view head.
* @constructor
*/
-devtools.profiler.ProfileView.Node = function(
+ProfileView.Node = function(
internalFuncName, totalTime, selfTime, head) {
this.internalFuncName = internalFuncName;
this.totalTime = totalTime;
@@ -176,7 +171,7 @@ devtools.profiler.ProfileView.Node = function(
/**
* Returns a share of the function's total time in application's total time.
*/
-devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
+ProfileView.Node.prototype.__defineGetter__(
'totalPercent',
function() { return this.totalTime /
(this.head ? this.head.totalTime : this.totalTime) * 100.0; });
@@ -185,7 +180,7 @@ devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
/**
* Returns a share of the function's self time in application's total time.
*/
-devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
+ProfileView.Node.prototype.__defineGetter__(
'selfPercent',
function() { return this.selfTime /
(this.head ? this.head.totalTime : this.totalTime) * 100.0; });
@@ -194,7 +189,7 @@ devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
/**
* Returns a share of the function's total time in its parent's total time.
*/
-devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
+ProfileView.Node.prototype.__defineGetter__(
'parentTotalPercent',
function() { return this.totalTime /
(this.parent ? this.parent.totalTime : this.totalTime) * 100.0; });
@@ -203,9 +198,9 @@ devtools.profiler.ProfileView.Node.prototype.__defineGetter__(
/**
* Adds a child to the node.
*
- * @param {devtools.profiler.ProfileView.Node} node Child node.
+ * @param {ProfileView.Node} node Child node.
*/
-devtools.profiler.ProfileView.Node.prototype.addChild = function(node) {
+ProfileView.Node.prototype.addChild = function(node) {
node.parent = this;
this.children.push(node);
};
@@ -214,11 +209,11 @@ devtools.profiler.ProfileView.Node.prototype.addChild = function(node) {
/**
* Sorts all the node's children recursively.
*
- * @param {function(devtools.profiler.ProfileView.Node,
- * devtools.profiler.ProfileView.Node):number} sortFunc A sorting
+ * @param {function(ProfileView.Node,
+ * ProfileView.Node):number} sortFunc A sorting
* functions. Must comply with Array.sort sorting function requirements.
*/
-devtools.profiler.ProfileView.Node.prototype.sortChildren = function(
+ProfileView.Node.prototype.sortChildren = function(
sortFunc) {
this.children.sort(sortFunc);
};
diff --git a/deps/v8/tools/splaytree.js b/deps/v8/tools/splaytree.js
index 7b3af8b992..1c9aab9e2e 100644
--- a/deps/v8/tools/splaytree.js
+++ b/deps/v8/tools/splaytree.js
@@ -26,12 +26,6 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// A namespace stub. It will become more clear how to declare it properly
-// during integration of this script into Dev Tools.
-var goog = goog || {};
-goog.structs = goog.structs || {};
-
-
/**
* Constructs a Splay tree. A splay tree is a self-balancing binary
* search tree with the additional property that recently accessed
@@ -40,23 +34,23 @@ goog.structs = goog.structs || {};
*
* @constructor
*/
-goog.structs.SplayTree = function() {
+function SplayTree() {
};
/**
* Pointer to the root node of the tree.
*
- * @type {goog.structs.SplayTree.Node}
+ * @type {SplayTree.Node}
* @private
*/
-goog.structs.SplayTree.prototype.root_ = null;
+SplayTree.prototype.root_ = null;
/**
* @return {boolean} Whether the tree is empty.
*/
-goog.structs.SplayTree.prototype.isEmpty = function() {
+SplayTree.prototype.isEmpty = function() {
return !this.root_;
};
@@ -70,9 +64,9 @@ goog.structs.SplayTree.prototype.isEmpty = function() {
* @param {number} key Key to insert into the tree.
* @param {*} value Value to insert into the tree.
*/
-goog.structs.SplayTree.prototype.insert = function(key, value) {
+SplayTree.prototype.insert = function(key, value) {
if (this.isEmpty()) {
- this.root_ = new goog.structs.SplayTree.Node(key, value);
+ this.root_ = new SplayTree.Node(key, value);
return;
}
// Splay on the key to move the last node on the search path for
@@ -81,7 +75,7 @@ goog.structs.SplayTree.prototype.insert = function(key, value) {
if (this.root_.key == key) {
return;
}
- var node = new goog.structs.SplayTree.Node(key, value);
+ var node = new SplayTree.Node(key, value);
if (key > this.root_.key) {
node.left = this.root_;
node.right = this.root_.right;
@@ -101,9 +95,9 @@ goog.structs.SplayTree.prototype.insert = function(key, value) {
* key is not found, an exception is thrown.
*
* @param {number} key Key to find and remove from the tree.
- * @return {goog.structs.SplayTree.Node} The removed node.
+ * @return {SplayTree.Node} The removed node.
*/
-goog.structs.SplayTree.prototype.remove = function(key) {
+SplayTree.prototype.remove = function(key) {
if (this.isEmpty()) {
throw Error('Key not found: ' + key);
}
@@ -132,9 +126,9 @@ goog.structs.SplayTree.prototype.remove = function(key) {
* a node with the specified key.
*
* @param {number} key Key to find in the tree.
- * @return {goog.structs.SplayTree.Node} Node having the specified key.
+ * @return {SplayTree.Node} Node having the specified key.
*/
-goog.structs.SplayTree.prototype.find = function(key) {
+SplayTree.prototype.find = function(key) {
if (this.isEmpty()) {
return null;
}
@@ -144,9 +138,9 @@ goog.structs.SplayTree.prototype.find = function(key) {
/**
- * @return {goog.structs.SplayTree.Node} Node having the minimum key value.
+ * @return {SplayTree.Node} Node having the minimum key value.
*/
-goog.structs.SplayTree.prototype.findMin = function() {
+SplayTree.prototype.findMin = function() {
if (this.isEmpty()) {
return null;
}
@@ -159,9 +153,9 @@ goog.structs.SplayTree.prototype.findMin = function() {
/**
- * @return {goog.structs.SplayTree.Node} Node having the maximum key value.
+ * @return {SplayTree.Node} Node having the maximum key value.
*/
-goog.structs.SplayTree.prototype.findMax = function(opt_startNode) {
+SplayTree.prototype.findMax = function(opt_startNode) {
if (this.isEmpty()) {
return null;
}
@@ -174,10 +168,10 @@ goog.structs.SplayTree.prototype.findMax = function(opt_startNode) {
/**
- * @return {goog.structs.SplayTree.Node} Node having the maximum key value that
+ * @return {SplayTree.Node} Node having the maximum key value that
* is less or equal to the specified key value.
*/
-goog.structs.SplayTree.prototype.findGreatestLessThan = function(key) {
+SplayTree.prototype.findGreatestLessThan = function(key) {
if (this.isEmpty()) {
return null;
}
@@ -199,7 +193,7 @@ goog.structs.SplayTree.prototype.findGreatestLessThan = function(key) {
/**
* @return {Array<*>} An array containing all the values of tree's nodes.
*/
-goog.structs.SplayTree.prototype.exportValues = function() {
+SplayTree.prototype.exportValues = function() {
var result = [];
this.traverse_(function(node) { result.push(node.value); });
return result;
@@ -216,7 +210,7 @@ goog.structs.SplayTree.prototype.exportValues = function() {
* @param {number} key Key to splay the tree on.
* @private
*/
-goog.structs.SplayTree.prototype.splay_ = function(key) {
+SplayTree.prototype.splay_ = function(key) {
if (this.isEmpty()) {
return;
}
@@ -226,7 +220,7 @@ goog.structs.SplayTree.prototype.splay_ = function(key) {
// will hold the R tree of the algorithm. Using a dummy node, left
// and right will always be nodes and we avoid special cases.
var dummy, left, right;
- dummy = left = right = new goog.structs.SplayTree.Node(null, null);
+ dummy = left = right = new SplayTree.Node(null, null);
var current = this.root_;
while (true) {
if (key < current.key) {
@@ -281,10 +275,10 @@ goog.structs.SplayTree.prototype.splay_ = function(key) {
/**
* Performs a preorder traversal of the tree.
*
- * @param {function(goog.structs.SplayTree.Node)} f Visitor function.
+ * @param {function(SplayTree.Node)} f Visitor function.
* @private
*/
-goog.structs.SplayTree.prototype.traverse_ = function(f) {
+SplayTree.prototype.traverse_ = function(f) {
var nodesToVisit = [this.root_];
while (nodesToVisit.length > 0) {
var node = nodesToVisit.shift();
@@ -304,19 +298,19 @@ goog.structs.SplayTree.prototype.traverse_ = function(f) {
* @param {number} key Key.
* @param {*} value Value.
*/
-goog.structs.SplayTree.Node = function(key, value) {
+SplayTree.Node = function(key, value) {
this.key = key;
this.value = value;
};
/**
- * @type {goog.structs.SplayTree.Node}
+ * @type {SplayTree.Node}
*/
-goog.structs.SplayTree.Node.prototype.left = null;
+SplayTree.Node.prototype.left = null;
/**
- * @type {goog.structs.SplayTree.Node}
+ * @type {SplayTree.Node}
*/
-goog.structs.SplayTree.Node.prototype.right = null;
+SplayTree.Node.prototype.right = null;
diff --git a/deps/v8/tools/tickprocessor.js b/deps/v8/tools/tickprocessor.js
index 87864d1206..db2f3c9b90 100644
--- a/deps/v8/tools/tickprocessor.js
+++ b/deps/v8/tools/tickprocessor.js
@@ -26,16 +26,21 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-function Profile(separateIc) {
- devtools.profiler.Profile.call(this);
+function inherits(childCtor, parentCtor) {
+ childCtor.prototype.__proto__ = parentCtor.prototype;
+};
+
+
+function V8Profile(separateIc) {
+ Profile.call(this);
if (!separateIc) {
- this.skipThisFunction = function(name) { return Profile.IC_RE.test(name); };
+ this.skipThisFunction = function(name) { return V8Profile.IC_RE.test(name); };
}
};
-Profile.prototype = devtools.profiler.Profile.prototype;
+inherits(V8Profile, Profile);
-Profile.IC_RE =
+V8Profile.IC_RE =
/^(?:CallIC|LoadIC|StoreIC)|(?:Builtin: (?:Keyed)?(?:Call|Load|Store)IC_)/;
@@ -52,13 +57,8 @@ function readFile(fileName) {
}
-function inherits(childCtor, parentCtor) {
- childCtor.prototype.__proto__ = parentCtor.prototype;
-};
-
-
function SnapshotLogProcessor() {
- devtools.profiler.LogReader.call(this, {
+ LogReader.call(this, {
'code-creation': {
parsers: [null, parseInt, parseInt, null],
processor: this.processCodeCreation },
@@ -72,8 +72,8 @@ function SnapshotLogProcessor() {
'snapshot-pos': { parsers: [parseInt, parseInt],
processor: this.processSnapshotPosition }});
- Profile.prototype.handleUnknownCode = function(operation, addr) {
- var op = devtools.profiler.Profile.Operation;
+ V8Profile.prototype.handleUnknownCode = function(operation, addr) {
+ var op = Profile.Operation;
switch (operation) {
case op.MOVE:
print('Snapshot: Code move event for unknown code: 0x' +
@@ -86,10 +86,10 @@ function SnapshotLogProcessor() {
}
};
- this.profile_ = new Profile();
+ this.profile_ = new V8Profile();
this.serializedEntries_ = [];
}
-inherits(SnapshotLogProcessor, devtools.profiler.LogReader);
+inherits(SnapshotLogProcessor, LogReader);
SnapshotLogProcessor.prototype.processCodeCreation = function(
@@ -127,7 +127,7 @@ SnapshotLogProcessor.prototype.getSerializedEntryName = function(pos) {
function TickProcessor(
cppEntriesProvider, separateIc, ignoreUnknown, stateFilter, snapshotLogProcessor) {
- devtools.profiler.LogReader.call(this, {
+ LogReader.call(this, {
'shared-library': { parsers: [null, parseInt, parseInt],
processor: this.processSharedLibrary },
'code-creation': {
@@ -172,9 +172,9 @@ function TickProcessor(
var ticks = this.ticks_ =
{ total: 0, unaccounted: 0, excluded: 0, gc: 0 };
- Profile.prototype.handleUnknownCode = function(
+ V8Profile.prototype.handleUnknownCode = function(
operation, addr, opt_stackPos) {
- var op = devtools.profiler.Profile.Operation;
+ var op = Profile.Operation;
switch (operation) {
case op.MOVE:
print('Code move event for unknown code: 0x' + addr.toString(16));
@@ -193,16 +193,16 @@ function TickProcessor(
}
};
- this.profile_ = new Profile(separateIc);
+ this.profile_ = new V8Profile(separateIc);
this.codeTypes_ = {};
// Count each tick as a time unit.
- this.viewBuilder_ = new devtools.profiler.ViewBuilder(1);
+ this.viewBuilder_ = new ViewBuilder(1);
this.lastLogFileName_ = null;
this.generation_ = 1;
this.currentProducerProfile_ = null;
};
-inherits(TickProcessor, devtools.profiler.LogReader);
+inherits(TickProcessor, LogReader);
TickProcessor.VmStates = {
@@ -356,7 +356,7 @@ TickProcessor.prototype.processTick = function(pc, sp, func, vmState, stack) {
TickProcessor.prototype.processHeapSampleBegin = function(space, state, ticks) {
if (space != 'Heap') return;
- this.currentProducerProfile_ = new devtools.profiler.CallTree();
+ this.currentProducerProfile_ = new CallTree();
};
diff --git a/deps/v8/tools/utils.py b/deps/v8/tools/utils.py
index fb94d14186..8083091b6d 100644
--- a/deps/v8/tools/utils.py
+++ b/deps/v8/tools/utils.py
@@ -49,8 +49,6 @@ def GuessOS():
return 'linux'
elif id == 'Darwin':
return 'macos'
- elif id.find('CYGWIN') >= 0:
- return 'cygwin'
elif id == 'Windows' or id == 'Microsoft':
# On Windows Vista platform.system() can return 'Microsoft' with some
# versions of Python, see http://bugs.python.org/issue1082
diff --git a/deps/v8/tools/v8.xcodeproj/project.pbxproj b/deps/v8/tools/v8.xcodeproj/project.pbxproj
index 86fcd8dcab..24321e52c7 100644
--- a/deps/v8/tools/v8.xcodeproj/project.pbxproj
+++ b/deps/v8/tools/v8.xcodeproj/project.pbxproj
@@ -157,7 +157,6 @@
8956926612D4ED240072C313 /* messages.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF15C0E719B8F00D62E90 /* messages.cc */; };
8956926712D4ED240072C313 /* objects-debug.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF1600E719B8F00D62E90 /* objects-debug.cc */; };
8956926812D4ED240072C313 /* objects.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF1620E719B8F00D62E90 /* objects.cc */; };
- 8956926912D4ED240072C313 /* oprofile-agent.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */; };
8956926A12D4ED240072C313 /* parser.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF1640E719B8F00D62E90 /* parser.cc */; };
8956926B12D4ED240072C313 /* platform-macos.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF1670E719B8F00D62E90 /* platform-macos.cc */; };
8956926C12D4ED240072C313 /* platform-posix.cc in Sources */ = {isa = PBXBuildFile; fileRef = 893A72230F7B0FF200303DD2 /* platform-posix.cc */; };
@@ -427,8 +426,6 @@
9FA38BCF1175B30400C4CD55 /* full-codegen-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCB1175B30400C4CD55 /* full-codegen-arm.cc */; };
9FA38BD01175B30400C4CD55 /* jump-target-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCC1175B30400C4CD55 /* jump-target-arm.cc */; };
9FA38BD11175B30400C4CD55 /* virtual-frame-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FA38BCD1175B30400C4CD55 /* virtual-frame-arm.cc */; };
- 9FC86ABD0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */; };
- 9FC86ABE0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */; };
C2BD4BD7120165460046BF9F /* dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD5120165460046BF9F /* dtoa.cc */; };
C2BD4BDB120165A70046BF9F /* fixed-dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD9120165A70046BF9F /* fixed-dtoa.cc */; };
C2BD4BE4120166180046BF9F /* fixed-dtoa.cc in Sources */ = {isa = PBXBuildFile; fileRef = C2BD4BD9120165A70046BF9F /* fixed-dtoa.cc */; };
@@ -952,8 +949,6 @@
9FA38BCB1175B30400C4CD55 /* full-codegen-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "full-codegen-arm.cc"; path = "arm/full-codegen-arm.cc"; sourceTree = "<group>"; };
9FA38BCC1175B30400C4CD55 /* jump-target-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "jump-target-arm.cc"; path = "arm/jump-target-arm.cc"; sourceTree = "<group>"; };
9FA38BCD1175B30400C4CD55 /* virtual-frame-arm.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "virtual-frame-arm.cc"; path = "arm/virtual-frame-arm.cc"; sourceTree = "<group>"; };
- 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "oprofile-agent.cc"; sourceTree = "<group>"; };
- 9FC86ABC0F5FEDAC00F22668 /* oprofile-agent.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "oprofile-agent.h"; sourceTree = "<group>"; };
9FF7A28211A642EA0051B8F2 /* unbound-queue-inl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "unbound-queue-inl.h"; sourceTree = "<group>"; };
9FF7A28311A642EA0051B8F2 /* unbound-queue.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "unbound-queue.h"; sourceTree = "<group>"; };
C2BD4BD5120165460046BF9F /* dtoa.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = dtoa.cc; sourceTree = "<group>"; };
@@ -1175,7 +1170,6 @@
893E248E12B14B3D0083370F /* hydrogen-instructions.h */,
893E248F12B14B3D0083370F /* hydrogen.cc */,
893E249012B14B3D0083370F /* hydrogen.h */,
- 897FF1490E719B8F00D62E90 /* ic-arm.cc */,
897FF14B0E719B8F00D62E90 /* ic-inl.h */,
897FF14C0E719B8F00D62E90 /* ic.cc */,
897FF14D0E719B8F00D62E90 /* ic.h */,
@@ -1222,8 +1216,6 @@
C2D1E9721212F27B00187A52 /* objects-visiting.h */,
897FF1620E719B8F00D62E90 /* objects.cc */,
897FF1630E719B8F00D62E90 /* objects.h */,
- 9FC86ABB0F5FEDAC00F22668 /* oprofile-agent.cc */,
- 9FC86ABC0F5FEDAC00F22668 /* oprofile-agent.h */,
897FF1640E719B8F00D62E90 /* parser.cc */,
897FF1650E719B8F00D62E90 /* parser.h */,
89A15C6D0EE466A900B48DEB /* platform-freebsd.cc */,
@@ -1292,7 +1284,6 @@
897FF1890E719B8F00D62E90 /* string-stream.h */,
893E24A312B14B3D0083370F /* strtod.cc */,
893E24A412B14B3D0083370F /* strtod.h */,
- 897FF18A0E719B8F00D62E90 /* stub-cache-arm.cc */,
897FF18C0E719B8F00D62E90 /* stub-cache.cc */,
897FF18D0E719B8F00D62E90 /* stub-cache.h */,
897FF18E0E719B8F00D62E90 /* token.cc */,
@@ -1541,6 +1532,7 @@
898BD20C0EF6CC850068B00A /* debug-arm.cc */,
893E24C612B14B510083370F /* deoptimizer-arm.cc */,
9FA38BCB1175B30400C4CD55 /* full-codegen-arm.cc */,
+ 897FF1490E719B8F00D62E90 /* ic-arm.cc */,
9FA38BCC1175B30400C4CD55 /* jump-target-arm.cc */,
893E24C712B14B510083370F /* lithium-arm.cc */,
893E24C812B14B510083370F /* lithium-arm.h */,
@@ -1555,6 +1547,7 @@
895FA751107FFEAE006F39D4 /* register-allocator-arm.h */,
897FF17D0E719B8F00D62E90 /* simulator-arm.cc */,
897FF17E0E719B8F00D62E90 /* simulator-arm.h */,
+ 897FF18A0E719B8F00D62E90 /* stub-cache-arm.cc */,
893E24CB12B14B520083370F /* virtual-frame-arm-inl.h */,
9FA38BCD1175B30400C4CD55 /* virtual-frame-arm.cc */,
58950D570F55514900F3E8BA /* virtual-frame-arm.h */,
@@ -1906,7 +1899,6 @@
8956926612D4ED240072C313 /* messages.cc in Sources */,
8956926712D4ED240072C313 /* objects-debug.cc in Sources */,
8956926812D4ED240072C313 /* objects.cc in Sources */,
- 8956926912D4ED240072C313 /* oprofile-agent.cc in Sources */,
8956926A12D4ED240072C313 /* parser.cc in Sources */,
8956926B12D4ED240072C313 /* platform-macos.cc in Sources */,
8956926C12D4ED240072C313 /* platform-posix.cc in Sources */,
@@ -2056,7 +2048,6 @@
89A88E120E71A67A0043BA31 /* messages.cc in Sources */,
89A88E130E71A6860043BA31 /* objects-debug.cc in Sources */,
89A88E140E71A6870043BA31 /* objects.cc in Sources */,
- 9FC86ABD0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */,
89A88E150E71A68C0043BA31 /* parser.cc in Sources */,
89A88E160E71A68E0043BA31 /* platform-macos.cc in Sources */,
893A72240F7B101400303DD2 /* platform-posix.cc in Sources */,
@@ -2233,7 +2224,6 @@
89F23C660E78D5B2006B2466 /* messages.cc in Sources */,
89F23C670E78D5B2006B2466 /* objects-debug.cc in Sources */,
89F23C680E78D5B2006B2466 /* objects.cc in Sources */,
- 9FC86ABE0F5FEDAC00F22668 /* oprofile-agent.cc in Sources */,
89F23C690E78D5B2006B2466 /* parser.cc in Sources */,
89F23C6A0E78D5B2006B2466 /* platform-macos.cc in Sources */,
893A72250F7B101B00303DD2 /* platform-posix.cc in Sources */,
diff --git a/deps/v8/tools/visual_studio/common.vsprops b/deps/v8/tools/visual_studio/common.vsprops
index 20bb119212..fa78cdc437 100644
--- a/deps/v8/tools/visual_studio/common.vsprops
+++ b/deps/v8/tools/visual_studio/common.vsprops
@@ -16,7 +16,7 @@
WarnAsError="true"
Detect64BitPortabilityProblems="false"
DebugInformationFormat="3"
- DisableSpecificWarnings="4355;4800"
+ DisableSpecificWarnings="4351;4355;4800"
EnableFunctionLevelLinking="true"
/>
<Tool
diff --git a/deps/v8/tools/visual_studio/v8_base.vcproj b/deps/v8/tools/visual_studio/v8_base.vcproj
index a980bd2dc5..5f76069d3d 100644
--- a/deps/v8/tools/visual_studio/v8_base.vcproj
+++ b/deps/v8/tools/visual_studio/v8_base.vcproj
@@ -834,14 +834,6 @@
>
</File>
<File
- RelativePath="..\..\src\oprofile-agent.cc"
- >
- </File>
- <File
- RelativePath="..\..\src\oprofile-agent.h"
- >
- </File>
- <File
RelativePath="..\..\src\parser.cc"
>
</File>
diff --git a/deps/v8/tools/visual_studio/v8_base_arm.vcproj b/deps/v8/tools/visual_studio/v8_base_arm.vcproj
index 6aa73da3ed..feb7e6c622 100644
--- a/deps/v8/tools/visual_studio/v8_base_arm.vcproj
+++ b/deps/v8/tools/visual_studio/v8_base_arm.vcproj
@@ -816,14 +816,6 @@
>
</File>
<File
- RelativePath="..\..\src\oprofile-agent.cc"
- >
- </File>
- <File
- RelativePath="..\..\src\oprofile-agent.h"
- >
- </File>
- <File
RelativePath="..\..\src\parser.cc"
>
</File>
diff --git a/deps/v8/tools/visual_studio/v8_base_x64.vcproj b/deps/v8/tools/visual_studio/v8_base_x64.vcproj
index c8ceaa256b..14ed77e04e 100644
--- a/deps/v8/tools/visual_studio/v8_base_x64.vcproj
+++ b/deps/v8/tools/visual_studio/v8_base_x64.vcproj
@@ -834,14 +834,6 @@
>
</File>
<File
- RelativePath="..\..\src\oprofile-agent.cc"
- >
- </File>
- <File
- RelativePath="..\..\src\oprofile-agent.h"
- >
- </File>
- <File
RelativePath="..\..\src\parser.cc"
>
</File>