From 3f3f958c14cf4e963a73d6f037ac381c77fe78bb Mon Sep 17 00:00:00 2001 From: isaacs Date: Tue, 15 May 2012 19:53:16 -0700 Subject: Upgrade V8 to 3.11.1 --- deps/v8/AUTHORS | 1 + deps/v8/ChangeLog | 205 + deps/v8/DEPS | 27 + deps/v8/Makefile | 74 +- deps/v8/SConstruct | 13 + deps/v8/build/armu.gypi | 36 - deps/v8/build/common.gypi | 53 +- deps/v8/build/gyp_v8 | 33 +- deps/v8/build/mipsu.gypi | 33 - deps/v8/build/standalone.gypi | 16 +- deps/v8/include/v8-profiler.h | 81 +- deps/v8/include/v8.h | 213 +- deps/v8/samples/lineprocessor.cc | 6 +- deps/v8/samples/samples.gyp | 8 +- deps/v8/samples/shell.cc | 25 +- deps/v8/src/api.cc | 114 +- deps/v8/src/api.h | 1 + deps/v8/src/apiutils.h | 9 +- deps/v8/src/arguments.h | 13 +- deps/v8/src/arm/code-stubs-arm.cc | 55 +- deps/v8/src/arm/debug-arm.cc | 4 +- deps/v8/src/arm/deoptimizer-arm.cc | 15 +- deps/v8/src/arm/full-codegen-arm.cc | 237 +- deps/v8/src/arm/ic-arm.cc | 49 +- deps/v8/src/arm/lithium-arm.cc | 140 +- deps/v8/src/arm/lithium-arm.h | 90 +- deps/v8/src/arm/lithium-codegen-arm.cc | 314 +- deps/v8/src/arm/lithium-codegen-arm.h | 19 +- deps/v8/src/arm/macro-assembler-arm.cc | 31 +- deps/v8/src/arm/macro-assembler-arm.h | 11 +- deps/v8/src/arm/regexp-macro-assembler-arm.cc | 54 +- deps/v8/src/arm/regexp-macro-assembler-arm.h | 8 + deps/v8/src/arm/stub-cache-arm.cc | 139 +- deps/v8/src/array.js | 166 +- deps/v8/src/assembler.cc | 71 +- deps/v8/src/assembler.h | 6 + deps/v8/src/ast.cc | 218 +- deps/v8/src/ast.h | 11 +- deps/v8/src/bootstrapper.cc | 4 +- deps/v8/src/builtins.cc | 77 +- deps/v8/src/builtins.h | 1 - deps/v8/src/bytecodes-irregexp.h | 35 +- deps/v8/src/code-stubs.cc | 19 +- deps/v8/src/code-stubs.h | 1 + deps/v8/src/compiler-intrinsics.h | 17 + deps/v8/src/compiler.cc | 8 + deps/v8/src/contexts.h | 2 +- deps/v8/src/conversions-inl.h | 4 +- deps/v8/src/d8.cc | 83 +- deps/v8/src/d8.h | 1 + deps/v8/src/d8.js | 2 +- deps/v8/src/date.js | 3 +- deps/v8/src/debug-agent.cc | 26 +- deps/v8/src/debug-debugger.js | 57 +- deps/v8/src/debug.cc | 49 +- deps/v8/src/debug.h | 55 +- deps/v8/src/double.h | 6 - deps/v8/src/elements.cc | 212 +- deps/v8/src/elements.h | 60 +- .../src/extensions/externalize-string-extension.cc | 7 +- deps/v8/src/extensions/gc-extension.cc | 5 +- deps/v8/src/factory.cc | 27 +- deps/v8/src/factory.h | 12 +- deps/v8/src/flag-definitions.h | 11 +- deps/v8/src/frames.cc | 28 +- deps/v8/src/frames.h | 5 + deps/v8/src/full-codegen.cc | 134 +- deps/v8/src/full-codegen.h | 31 +- deps/v8/src/handles.cc | 6 +- deps/v8/src/hashmap.h | 10 +- deps/v8/src/heap-inl.h | 7 +- deps/v8/src/heap-profiler.cc | 42 +- deps/v8/src/heap-profiler.h | 11 +- deps/v8/src/heap.cc | 295 +- deps/v8/src/heap.h | 60 +- deps/v8/src/hydrogen-instructions.cc | 121 +- deps/v8/src/hydrogen-instructions.h | 234 +- deps/v8/src/hydrogen.cc | 1257 +++-- deps/v8/src/hydrogen.h | 71 +- deps/v8/src/ia32/assembler-ia32.h | 3 + deps/v8/src/ia32/builtins-ia32.cc | 11 +- deps/v8/src/ia32/code-stubs-ia32.cc | 112 +- deps/v8/src/ia32/codegen-ia32.cc | 18 +- deps/v8/src/ia32/debug-ia32.cc | 39 +- deps/v8/src/ia32/deoptimizer-ia32.cc | 31 +- deps/v8/src/ia32/full-codegen-ia32.cc | 254 +- deps/v8/src/ia32/ic-ia32.cc | 190 +- deps/v8/src/ia32/lithium-codegen-ia32.cc | 240 +- deps/v8/src/ia32/lithium-codegen-ia32.h | 8 +- deps/v8/src/ia32/lithium-ia32.cc | 69 +- deps/v8/src/ia32/lithium-ia32.h | 63 +- deps/v8/src/ia32/macro-assembler-ia32.cc | 2 +- deps/v8/src/ia32/regexp-macro-assembler-ia32.cc | 63 +- deps/v8/src/ia32/regexp-macro-assembler-ia32.h | 8 + deps/v8/src/ia32/stub-cache-ia32.cc | 359 +- deps/v8/src/ic.cc | 145 +- deps/v8/src/ic.h | 12 +- deps/v8/src/incremental-marking-inl.h | 2 +- deps/v8/src/incremental-marking.cc | 30 +- deps/v8/src/incremental-marking.h | 12 +- deps/v8/src/interface.cc | 13 +- deps/v8/src/interface.h | 46 +- deps/v8/src/interpreter-irregexp.cc | 87 +- deps/v8/src/isolate.cc | 130 +- deps/v8/src/isolate.h | 60 +- deps/v8/src/jsregexp.cc | 1848 +++++--- deps/v8/src/jsregexp.h | 416 +- deps/v8/src/lazy-instance.h | 67 +- deps/v8/src/list-inl.h | 31 +- deps/v8/src/list.h | 11 +- deps/v8/src/lithium-allocator.cc | 2 +- deps/v8/src/lithium.cc | 34 +- deps/v8/src/lithium.h | 32 +- deps/v8/src/liveedit-debugger.js | 5 + deps/v8/src/liveedit.cc | 62 +- deps/v8/src/log.cc | 15 +- deps/v8/src/log.h | 2 + deps/v8/src/macros.py | 10 + deps/v8/src/mark-compact.cc | 151 +- deps/v8/src/mark-compact.h | 9 +- deps/v8/src/math.js | 1 - deps/v8/src/messages.js | 47 +- deps/v8/src/mips/assembler-mips.cc | 33 +- deps/v8/src/mips/assembler-mips.h | 90 +- deps/v8/src/mips/code-stubs-mips.cc | 126 +- deps/v8/src/mips/constants-mips.h | 5 - deps/v8/src/mips/debug-mips.cc | 4 +- deps/v8/src/mips/deoptimizer-mips.cc | 15 +- deps/v8/src/mips/full-codegen-mips.cc | 246 +- deps/v8/src/mips/ic-mips.cc | 47 +- deps/v8/src/mips/lithium-codegen-mips.cc | 199 +- deps/v8/src/mips/lithium-codegen-mips.h | 8 +- deps/v8/src/mips/lithium-gap-resolver-mips.cc | 32 +- deps/v8/src/mips/lithium-mips.cc | 78 +- deps/v8/src/mips/lithium-mips.h | 69 +- deps/v8/src/mips/macro-assembler-mips.cc | 2 +- deps/v8/src/mips/regexp-macro-assembler-mips.cc | 97 +- deps/v8/src/mips/regexp-macro-assembler-mips.h | 8 + deps/v8/src/mips/stub-cache-mips.cc | 164 +- deps/v8/src/mirror-debugger.js | 88 +- deps/v8/src/objects-debug.cc | 69 + deps/v8/src/objects-inl.h | 174 +- deps/v8/src/objects-printer.cc | 39 +- deps/v8/src/objects-visiting-inl.h | 4 +- deps/v8/src/objects-visiting.cc | 1 + deps/v8/src/objects-visiting.h | 17 + deps/v8/src/objects.cc | 456 +- deps/v8/src/objects.h | 183 +- deps/v8/src/parser.cc | 49 +- deps/v8/src/platform-cygwin.cc | 43 +- deps/v8/src/platform-freebsd.cc | 43 +- deps/v8/src/platform-linux.cc | 101 +- deps/v8/src/platform-macos.cc | 34 +- deps/v8/src/platform-nullos.cc | 10 + deps/v8/src/platform-openbsd.cc | 34 +- deps/v8/src/platform-posix.cc | 62 +- deps/v8/src/platform-posix.h | 39 + deps/v8/src/platform-solaris.cc | 46 +- deps/v8/src/platform-win32.cc | 128 +- deps/v8/src/platform.h | 13 +- deps/v8/src/preparser.cc | 10 +- deps/v8/src/preparser.h | 15 +- deps/v8/src/profile-generator-inl.h | 49 + deps/v8/src/profile-generator.cc | 1917 ++++---- deps/v8/src/profile-generator.h | 378 +- deps/v8/src/property.h | 7 - deps/v8/src/regexp-macro-assembler-irregexp-inl.h | 10 + deps/v8/src/regexp-macro-assembler-irregexp.cc | 36 + deps/v8/src/regexp-macro-assembler-irregexp.h | 8 + deps/v8/src/regexp-macro-assembler-tracer.cc | 109 +- deps/v8/src/regexp-macro-assembler-tracer.h | 7 + deps/v8/src/regexp-macro-assembler.h | 18 +- deps/v8/src/regexp.js | 21 +- deps/v8/src/rewriter.cc | 2 +- deps/v8/src/runtime-profiler.cc | 64 +- deps/v8/src/runtime-profiler.h | 10 +- deps/v8/src/runtime.cc | 504 +- deps/v8/src/runtime.h | 7 + deps/v8/src/runtime.js | 2 +- deps/v8/src/scanner.cc | 18 +- deps/v8/src/scanner.h | 9 +- deps/v8/src/scopeinfo.cc | 10 +- deps/v8/src/scopes.cc | 44 +- deps/v8/src/scopes.h | 19 +- deps/v8/src/serialize.cc | 2 +- deps/v8/src/small-pointer-list.h | 10 + deps/v8/src/spaces-inl.h | 20 +- deps/v8/src/spaces.cc | 83 +- deps/v8/src/spaces.h | 25 +- deps/v8/src/string.js | 151 +- deps/v8/src/stub-cache.cc | 10 +- deps/v8/src/utils.cc | 15 + deps/v8/src/utils.h | 26 + deps/v8/src/v8-counters.h | 2 + deps/v8/src/v8.cc | 18 +- deps/v8/src/v8globals.h | 4 + deps/v8/src/version.cc | 6 +- deps/v8/src/x64/assembler-x64.h | 3 +- deps/v8/src/x64/code-stubs-x64.cc | 64 +- deps/v8/src/x64/debug-x64.cc | 4 +- deps/v8/src/x64/deoptimizer-x64.cc | 58 +- deps/v8/src/x64/disasm-x64.cc | 6 +- deps/v8/src/x64/full-codegen-x64.cc | 384 +- deps/v8/src/x64/ic-x64.cc | 22 +- deps/v8/src/x64/lithium-codegen-x64.cc | 205 +- deps/v8/src/x64/lithium-codegen-x64.h | 8 +- deps/v8/src/x64/lithium-x64.cc | 77 +- deps/v8/src/x64/lithium-x64.h | 72 +- deps/v8/src/x64/macro-assembler-x64.cc | 20 +- deps/v8/src/x64/macro-assembler-x64.h | 2 + deps/v8/src/x64/regexp-macro-assembler-x64.cc | 56 +- deps/v8/src/x64/regexp-macro-assembler-x64.h | 8 + deps/v8/src/x64/stub-cache-x64.cc | 131 +- deps/v8/test/cctest/test-accessors.cc | 9 +- deps/v8/test/cctest/test-alloc.cc | 36 +- deps/v8/test/cctest/test-api.cc | 310 +- deps/v8/test/cctest/test-debug.cc | 5 +- deps/v8/test/cctest/test-decls.cc | 16 +- deps/v8/test/cctest/test-disasm-x64.cc | 1 + deps/v8/test/cctest/test-double.cc | 15 - deps/v8/test/cctest/test-heap-profiler.cc | 454 +- deps/v8/test/cctest/test-heap.cc | 158 +- deps/v8/test/cctest/test-list.cc | 12 + deps/v8/test/cctest/test-mark-compact.cc | 8 +- deps/v8/test/cctest/test-regexp.cc | 80 +- deps/v8/test/cctest/test-spaces.cc | 8 +- deps/v8/test/cctest/test-strings.cc | 85 + deps/v8/test/cctest/test-thread-termination.cc | 4 + deps/v8/test/cctest/test-weakmaps.cc | 80 +- deps/v8/test/cctest/testcfg.py | 2 + deps/v8/test/mjsunit/accessor-map-sharing.js | 176 + deps/v8/test/mjsunit/array-bounds-check-removal.js | 145 + deps/v8/test/mjsunit/big-array-literal.js | 3 + deps/v8/test/mjsunit/compiler/alloc-object-huge.js | 2 +- deps/v8/test/mjsunit/compiler/inline-arguments.js | 67 + deps/v8/test/mjsunit/compiler/inline-construct.js | 6 +- deps/v8/test/mjsunit/compiler/literals.js | 24 +- deps/v8/test/mjsunit/compiler/optimize-bitnot.js | 42 + .../debug-evaluate-locals-optimized-double.js | 17 +- .../mjsunit/debug-evaluate-locals-optimized.js | 17 +- deps/v8/test/mjsunit/debug-function-scopes.js | 162 + .../test/mjsunit/debug-liveedit-stack-padding.js | 88 + deps/v8/test/mjsunit/debug-scripts-request.js | 6 +- .../test/mjsunit/debug-stepin-builtin-callback.js | 157 + deps/v8/test/mjsunit/declare-locally.js | 6 +- deps/v8/test/mjsunit/error-constructors.js | 101 +- .../test/mjsunit/harmony/debug-function-scopes.js | 115 + deps/v8/test/mjsunit/harmony/module-linking.js | 121 + deps/v8/test/mjsunit/harmony/module-parsing.js | 10 +- deps/v8/test/mjsunit/harmony/module-resolution.js | 2 +- deps/v8/test/mjsunit/math-floor-of-div.js | 216 + deps/v8/test/mjsunit/mjsunit.js | 2 +- deps/v8/test/mjsunit/mjsunit.status | 1 + deps/v8/test/mjsunit/regexp-capture-3.js | 190 +- deps/v8/test/mjsunit/regress/regress-1119.js | 12 +- deps/v8/test/mjsunit/regress/regress-115452.js | 19 +- deps/v8/test/mjsunit/regress/regress-1170.js | 64 +- deps/v8/test/mjsunit/regress/regress-117409.js | 52 + deps/v8/test/mjsunit/regress/regress-119609.js | 71 + deps/v8/test/mjsunit/regress/regress-120099.js | 40 + deps/v8/test/mjsunit/regress/regress-121407.js | 40 + deps/v8/test/mjsunit/regress/regress-1217.js | 2 +- deps/v8/test/mjsunit/regress/regress-123512.js | 78 + deps/v8/test/mjsunit/regress/regress-123919.js | 47 + deps/v8/test/mjsunit/regress/regress-124594.js | 50 + deps/v8/test/mjsunit/regress/regress-125515.js | 41 + deps/v8/test/mjsunit/regress/regress-126412.js | 33 + deps/v8/test/mjsunit/regress/regress-1639-2.js | 5 +- deps/v8/test/mjsunit/regress/regress-1639.js | 22 +- deps/v8/test/mjsunit/regress/regress-2027.js | 48 + deps/v8/test/mjsunit/regress/regress-2030.js | 53 + deps/v8/test/mjsunit/regress/regress-2032.js | 64 + deps/v8/test/mjsunit/regress/regress-2034.js | 46 + deps/v8/test/mjsunit/regress/regress-2045.js | 49 + deps/v8/test/mjsunit/regress/regress-2054.js | 34 + deps/v8/test/mjsunit/regress/regress-2055.js | 48 + deps/v8/test/mjsunit/regress/regress-2056.js | 66 + deps/v8/test/mjsunit/regress/regress-2058.js | 37 + deps/v8/test/mjsunit/regress/regress-2110.js | 53 + .../test/mjsunit/regress/regress-crbug-122271.js | 49 + .../test/mjsunit/regress/regress-crbug-126414.js | 32 + .../regress/regress-fast-literal-transition.js | 62 + .../test/mjsunit/unicodelctest-no-optimization.js | 4914 ++++++++++++++++++++ deps/v8/test/mjsunit/unicodelctest.js | 4912 +++++++++++++++++++ deps/v8/test/mozilla/mozilla.status | 14 + deps/v8/test/sputnik/sputnik.status | 40 +- deps/v8/test/test262/README | 4 +- deps/v8/test/test262/test262.status | 32 +- deps/v8/test/test262/testcfg.py | 47 +- deps/v8/tools/check-static-initializers.sh | 19 +- deps/v8/tools/common-includes.sh | 16 +- deps/v8/tools/grokdump.py | 114 +- deps/v8/tools/gyp/v8.gyp | 8 +- deps/v8/tools/merge-to-branch.sh | 27 +- deps/v8/tools/presubmit.py | 8 +- deps/v8/tools/push-to-trunk.sh | 9 + deps/v8/tools/test-wrapper-gypbuild.py | 21 +- 297 files changed, 25594 insertions(+), 6834 deletions(-) create mode 100644 deps/v8/DEPS delete mode 100644 deps/v8/build/armu.gypi delete mode 100644 deps/v8/build/mipsu.gypi create mode 100644 deps/v8/src/platform-posix.h create mode 100644 deps/v8/test/mjsunit/accessor-map-sharing.js create mode 100644 deps/v8/test/mjsunit/array-bounds-check-removal.js create mode 100644 deps/v8/test/mjsunit/compiler/optimize-bitnot.js create mode 100644 deps/v8/test/mjsunit/debug-function-scopes.js create mode 100644 deps/v8/test/mjsunit/debug-liveedit-stack-padding.js create mode 100644 deps/v8/test/mjsunit/debug-stepin-builtin-callback.js create mode 100644 deps/v8/test/mjsunit/harmony/debug-function-scopes.js create mode 100644 deps/v8/test/mjsunit/harmony/module-linking.js create mode 100644 deps/v8/test/mjsunit/math-floor-of-div.js create mode 100644 deps/v8/test/mjsunit/regress/regress-117409.js create mode 100644 deps/v8/test/mjsunit/regress/regress-119609.js create mode 100644 deps/v8/test/mjsunit/regress/regress-120099.js create mode 100644 deps/v8/test/mjsunit/regress/regress-121407.js create mode 100644 deps/v8/test/mjsunit/regress/regress-123512.js create mode 100644 deps/v8/test/mjsunit/regress/regress-123919.js create mode 100644 deps/v8/test/mjsunit/regress/regress-124594.js create mode 100644 deps/v8/test/mjsunit/regress/regress-125515.js create mode 100644 deps/v8/test/mjsunit/regress/regress-126412.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2027.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2030.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2032.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2034.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2045.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2054.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2055.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2056.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2058.js create mode 100644 deps/v8/test/mjsunit/regress/regress-2110.js create mode 100644 deps/v8/test/mjsunit/regress/regress-crbug-122271.js create mode 100644 deps/v8/test/mjsunit/regress/regress-crbug-126414.js create mode 100644 deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js create mode 100644 deps/v8/test/mjsunit/unicodelctest-no-optimization.js create mode 100644 deps/v8/test/mjsunit/unicodelctest.js diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index dfefad129f..6e46b3d621 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -23,6 +23,7 @@ Daniel James Dineel D Sule Erich Ocean Fedor Indutny +Filipe David Manana Ioseb Dzmanashvili Jan de Mooij Jay Freeman diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 2240ec0e68..c52a5abc5f 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,3 +1,208 @@ +2012-05-15: Version 3.11.1 + + Added a readbuffer function to d8 that reads a file into an ArrayBuffer. + + Fix freebsd build. (V8 issue 2126) + + Performance and stability improvements on all platforms. + + +2012-05-11: Version 3.11.0 + + Fixed compose-discard crasher from r11524 (issue 2123). + + Activated new global semantics by default. Global variables can + now shadow properties of the global object (ES5.1 erratum). + + Properly set ElementsKind of empty FAST_DOUBLE_ELEMENTS arrays when + transitioning (Chromium issue 117409). + + Made Error.prototype.name writable again, as required by the spec and + the web (Chromium issue 69187). + + Implemented map collection with incremental marking (issue 1465). + + Regexp: Fixed overflow in min-match-length calculation + (Chromium issue 126412). + + MIPS: Fixed illegal instruction use on Loongson in code for + Math.random() (issue 2115). + + Fixed crash bug in VisitChoice (Chromium issue 126272). + + Fixed unsigned-Smi check in MappedArgumentsLookup + (Chromium issue 126414). + + Fixed LiveEdit for function with no locals (issue 825). + + Fixed register clobbering in LoadIC for interceptors + (Chromium issue 125988). + + Implemented clearing of CompareICs (issue 2102). + + Performance and stability improvements on all platforms. + + +2012-05-03: Version 3.10.8 + + Enabled MIPS cross-compilation. + + Ensured reload of elements pointer in StoreFastDoubleElement stub. + (Chromium issue 125515) + + Fixed corner cases in truncation behavior when storing to + TypedArrays. (issue 2110) + + Fixed failure to properly recognize and report out-of-memory + conditions when allocating code space pages. (Chromium issue + 118625) + + Fixed idle notifications to perform a round of incremental GCs + after context disposal. (issue 2107) + + Fixed preparser for try statement. (issue 2109) + + Performance and stability improvements on all platforms. + + +2012-04-30: Version 3.10.7 + + Performance and stability improvements on all platforms. + + +2012-04-26: Version 3.10.6 + + Fixed some bugs in accessing details of the last regexp match. + + Fixed source property of empty RegExp objects. (issue 1982) + + Enabled inlining some V8 API functions. + + Performance and stability improvements on all platforms. + + +2012-04-23: Version 3.10.5 + + Put new global var semantics behind a flag until WebKit tests are + cleaned up. + + Enabled stepping into callback passed to builtins. + (Chromium issue 109564) + + Performance and stability improvements on all platforms. + + +2012-04-19: Version 3.10.4 + + Fixed issues when stressing compaction with WeakMaps. + + Fixed missing GVN flag for new-space promotion. (Chromium issue 123919) + + Simplify invocation sequence at monomorphic function invocation sites. + (issue 2079) + + Performance and stability improvements on all platforms. + + +2012-04-17: Version 3.10.3 + + Fixed several bugs in heap profiles (including issue 2078). + + Throw syntax errors on illegal escape sequences. + + Implemented rudimentary module linking (behind --harmony flag) + + Implemented ES5 erratum: Global declarations should shadow + inherited properties. + + Made handling of const more consistent when combined with 'eval' + and 'with'. + + Fixed V8 on MinGW-x64 (issue 2026). + + Performance and stability improvements on all platforms. + + +2012-04-13: Version 3.10.2 + + Fixed native ARM build (issues 1744, 539) + + Return LOOKUP variable instead of CONTEXT for non-context allocated + outer scope parameters (Chromium issue 119609). + + Fixed regular and ElementsKind transitions interfering with each other + (Chromium issue 122271). + + Improved performance of keyed loads/stores which have a HeapNumber + index (issues 1388, 1295). + + Fixed WeakMap processing for evacuation candidates (issue 2060). + + Bailout on possible direct eval calls (Chromium issue 122681). + + Do not assume that names of function expressions are context-allocated + (issue 2051). + + Performance and stability improvements on all platforms. + + +2012-04-10: Version 3.10.1 + + Fixed bug with arguments object in inlined functions (issue 2045). + + Fixed performance bug with lazy initialization (Chromium issue + 118686). + + Added suppport for Mac OS X 64bit builds with GYP. + (Patch contributed by Filipe David Manana ) + + Fixed bug with hidden properties (issue 2034). + + Fixed a performance bug when reloading pages (Chromium issue 117767, + V8 issue 1902). + + Fixed bug when optimizing throw in top-level code (issue 2054). + + Fixed two bugs with array literals (issue 2055, Chromium issue 121407). + + Fixed bug with Math.min/Math.max with NaN inputs (issue 2056). + + Fixed a bug with the new runtime profiler (Chromium issue 121147). + + Fixed compilation of V8 using uClibc. + + Optimized boot-up memory use. + + Optimized regular expressions. + + +2012-03-30: Version 3.10.0 + + Fixed store IC writability check in strict mode + (Chromium issue 120099). + + Resynchronize timers if the Windows system time was changed. + (Chromium issue 119815) + + Removed "-mfloat-abi=hard" from host compiler cflags when building for + hardfp ARM + (https://code.google.com/p/chrome-os-partner/issues/detail?id=8539) + + Fixed edge case for case independent regexp character classes + (issue 2032). + + Reset function info counters after context disposal. + (Chromium issue 117767, V8 issue 1902) + + Fixed missing write barrier in CopyObjectToObjectElements. + (Chromium issue 119926) + + Fixed missing bounds check in HasElementImpl. + (Chromium issue 119925) + + Performance and stability improvements on all platforms. + + 2012-03-23: Version 3.9.24 Activated count-based profiler for ARM. diff --git a/deps/v8/DEPS b/deps/v8/DEPS new file mode 100644 index 0000000000..e50d1d20f6 --- /dev/null +++ b/deps/v8/DEPS @@ -0,0 +1,27 @@ +# Note: The buildbots evaluate this file with CWD set to the parent +# directory and assume that the root of the checkout is in ./v8/, so +# all paths in here must match this assumption. + +deps = { + # Remember to keep the revision in sync with the Makefile. + "v8/build/gyp": + "http://gyp.googlecode.com/svn/trunk@1282", +} + +deps_os = { + "win": { + "v8/third_party/cygwin": + "http://src.chromium.org/svn/trunk/deps/third_party/cygwin@66844", + + "v8/third_party/python_26": + "http://src.chromium.org/svn/trunk/tools/third_party/python_26@89111", + } +} + +hooks = [ + { + # A change to a .gyp, .gypi, or to GYP itself should run the generator. + "pattern": ".", + "action": ["python", "v8/build/gyp_v8"], + }, +] diff --git a/deps/v8/Makefile b/deps/v8/Makefile index 5dc6ca5ad6..fbca56644c 100644 --- a/deps/v8/Makefile +++ b/deps/v8/Makefile @@ -137,6 +137,12 @@ ENVFILE = $(OUTDIR)/environment # Target definitions. "all" is the default. all: $(MODES) +# Special target for the buildbots to use. Depends on $(OUTDIR)/Makefile +# having been created before. +buildbot: + $(MAKE) -C "$(OUTDIR)" BUILDTYPE=$(BUILDTYPE) \ + builddir="$(abspath $(OUTDIR))/$(BUILDTYPE)" + # Compile targets. MODES and ARCHES are convenience targets. .SECONDEXPANSION: $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES)) @@ -144,21 +150,21 @@ $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES)) $(ARCHES): $(addprefix $$@.,$(MODES)) # Defines how to build a particular target (e.g. ia32.release). -$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@) - @$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \ +$(BUILDS): $(OUTDIR)/Makefile.$$(basename $$@) + @$(MAKE) -C "$(OUTDIR)" -f Makefile.$(basename $@) \ CXX="$(CXX)" LINK="$(LINK)" \ BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \ python -c "print raw_input().capitalize()") \ builddir="$(shell pwd)/$(OUTDIR)/$@" -native: $(OUTDIR)/Makefile-native - @$(MAKE) -C "$(OUTDIR)" -f Makefile-native \ +native: $(OUTDIR)/Makefile.native + @$(MAKE) -C "$(OUTDIR)" -f Makefile.native \ CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \ builddir="$(shell pwd)/$(OUTDIR)/$@" # TODO(jkummerow): add "android.debug" when we need it. -android android.release: $(OUTDIR)/Makefile-android - @$(MAKE) -C "$(OUTDIR)" -f Makefile-android \ +android android.release: $(OUTDIR)/Makefile.android + @$(MAKE) -C "$(OUTDIR)" -f Makefile.android \ CXX="$(ANDROID_TOOL_PREFIX)-g++" \ AR="$(ANDROID_TOOL_PREFIX)-ar" \ RANLIB="$(ANDROID_TOOL_PREFIX)-ranlib" \ @@ -191,55 +197,40 @@ native.check: native --arch-and-mode=. $(TESTFLAGS) # Clean targets. You can clean each architecture individually, or everything. -$(addsuffix .clean,$(ARCHES)): - rm -f $(OUTDIR)/Makefile-$(basename $@) +$(addsuffix .clean,$(ARCHES)) android.clean: + rm -f $(OUTDIR)/Makefile.$(basename $@) rm -rf $(OUTDIR)/$(basename $@).release rm -rf $(OUTDIR)/$(basename $@).debug - find $(OUTDIR) -regex '.*\(host\|target\)-$(basename $@)\.mk' -delete + find $(OUTDIR) -regex '.*\(host\|target\).$(basename $@)\.mk' -delete native.clean: - rm -f $(OUTDIR)/Makefile-native + rm -f $(OUTDIR)/Makefile.native rm -rf $(OUTDIR)/native - find $(OUTDIR) -regex '.*\(host\|target\)-native\.mk' -delete - -android.clean: - rm -f $(OUTDIR)/Makefile-android - rm -rf $(OUTDIR)/android.release - find $(OUTDIR) -regex '.*\(host\|target\)-android\.mk' -delete + find $(OUTDIR) -regex '.*\(host\|target\).native\.mk' -delete -clean: $(addsuffix .clean,$(ARCHES)) native.clean +clean: $(addsuffix .clean,$(ARCHES)) native.clean android.clean # GYP file generation targets. -$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE) - build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \ - -S-ia32 $(GYPFLAGS) - -$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE) - build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \ - -S-x64 $(GYPFLAGS) - -$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) build/armu.gypi - build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \ - -S-arm $(GYPFLAGS) - -$(OUTDIR)/Makefile-mips: $(GYPFILES) $(ENVFILE) build/mipsu.gypi +MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ARCHES)) +$(MAKEFILES): $(GYPFILES) $(ENVFILE) + GYP_GENERATORS=make \ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -Ibuild/mipsu.gypi \ - -S-mips $(GYPFLAGS) + -Ibuild/standalone.gypi --depth=. \ + -Dv8_target_arch=$(subst .,,$(suffix $@)) \ + -S.$(subst .,,$(suffix $@)) $(GYPFLAGS) -$(OUTDIR)/Makefile-native: $(GYPFILES) $(ENVFILE) +$(OUTDIR)/Makefile.native: $(GYPFILES) $(ENVFILE) + GYP_GENERATORS=make \ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -S-native $(GYPFLAGS) + -Ibuild/standalone.gypi --depth=. -S.native $(GYPFLAGS) -$(OUTDIR)/Makefile-android: $(GYPFILES) $(ENVFILE) build/android.gypi \ +$(OUTDIR)/Makefile.android: $(GYPFILES) $(ENVFILE) build/android.gypi \ must-set-ANDROID_NDK_ROOT + GYP_GENERATORS=make \ CC="${ANDROID_TOOL_PREFIX}-gcc" \ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ -Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \ - -S-android $(GYPFLAGS) + -S.android $(GYPFLAGS) must-set-ANDROID_NDK_ROOT: ifndef ANDROID_NDK_ROOT @@ -255,9 +246,10 @@ $(ENVFILE): $(ENVFILE).new # Stores current GYPFLAGS in a file. $(ENVFILE).new: - @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; + @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; \ + echo "CXX=$(CXX)" >> $(ENVFILE).new # Dependencies. dependencies: svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \ - --revision 1026 + --revision 1282 diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct index 34d0efc5ff..b0d1344700 100644 --- a/deps/v8/SConstruct +++ b/deps/v8/SConstruct @@ -1601,4 +1601,17 @@ except: pass +def WarnAboutDeprecation(): + print """ +####################################################### +# WARNING: Building V8 with SCons is deprecated and # +# will not work much longer. Please switch to using # +# the GYP-based build now. Instructions are at # +# http://code.google.com/p/v8/wiki/BuildingWithGYP. # +####################################################### + """ + +WarnAboutDeprecation() +import atexit +atexit.register(WarnAboutDeprecation) Build() diff --git a/deps/v8/build/armu.gypi b/deps/v8/build/armu.gypi deleted file mode 100644 index d15b8ab705..0000000000 --- a/deps/v8/build/armu.gypi +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2011 the V8 project authors. All rights reserved. -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -{ - 'variables': { - 'target_arch': 'ia32', - 'v8_target_arch': 'arm', - 'armv7': 1, - 'arm_neon': 0, - 'arm_fpu': 'vfpv3', - }, -} diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi index f2bb465c1e..4a9d45dd14 100644 --- a/deps/v8/build/common.gypi +++ b/deps/v8/build/common.gypi @@ -142,8 +142,10 @@ 'USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS', ], - 'cflags': [ - '-mfloat-abi=hard', + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': ['-mfloat-abi=hard',], + }], ], }, { 'defines': [ @@ -171,8 +173,11 @@ 'defines': [ 'V8_TARGET_ARCH_MIPS', ], + 'variables': { + 'mipscompiler': '&1 | grep -q "^Target: mips-" && echo "yes" || echo "no")', + }, 'conditions': [ - [ 'target_arch=="mips"', { + ['mipscompiler=="yes"', { 'target_conditions': [ ['_toolset=="target"', { 'cflags': ['-EL'], @@ -236,6 +241,19 @@ ], }], ], + }, { # Section for OS=="mac". + 'conditions': [ + ['target_arch=="ia32"', { + 'xcode_settings': { + 'ARCHS': ['i386'], + } + }], + ['target_arch=="x64"', { + 'xcode_settings': { + 'ARCHS': ['x86_64'], + } + }], + ], }], ['v8_use_liveobjectlist=="true"', { 'defines': [ @@ -262,12 +280,23 @@ }, }, }], + ['OS=="win" and v8_target_arch=="x64"', { + 'msvs_settings': { + 'VCLinkerTool': { + 'StackReserveSize': '2097152', + }, + }, + }], ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \ or OS=="netbsd"', { 'conditions': [ - [ 'target_arch=="ia32"', { - 'cflags': [ '-m32' ], - 'ldflags': [ '-m32' ], + [ 'v8_target_arch!="x64"', { + # Pass -m32 to the compiler iff it understands the flag. + 'variables': { + 'm32flag': ' /dev/null 2>&1) && echo -n "-m32" || true)', + }, + 'cflags': [ '<(m32flag)' ], + 'ldflags': [ '<(m32flag)' ], }], [ 'v8_no_strict_aliasing==1', { 'cflags': [ '-fno-strict-aliasing' ], @@ -300,10 +329,6 @@ }, 'VCLinkerTool': { 'LinkIncremental': '2', - # For future reference, the stack size needs to be increased - # when building for Windows 64-bit, otherwise some test cases - # can cause stack overflow. - # 'StackReserveSize': '297152', }, }, 'conditions': [ @@ -314,7 +339,7 @@ 'cflags': [ '-I/usr/pkg/include' ], }], ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', { - 'cflags': [ '-Wno-unused-parameter', + 'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter', '-Wnon-virtual-dtor', '-Woverloaded-virtual' ], }], ], @@ -361,6 +386,7 @@ }], # OS=="mac" ['OS=="win"', { 'msvs_configuration_attributes': { + 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)', 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)', 'CharacterSet': '1', }, @@ -384,12 +410,7 @@ 'VCLinkerTool': { 'LinkIncremental': '1', 'OptimizeReferences': '2', - 'OptimizeForWindows98': '1', 'EnableCOMDATFolding': '2', - # For future reference, the stack size needs to be - # increased when building for Windows 64-bit, otherwise - # some test cases can cause stack overflow. - # 'StackReserveSize': '297152', }, }, }], # OS=="win" diff --git a/deps/v8/build/gyp_v8 b/deps/v8/build/gyp_v8 index 4293e7637e..345f777d79 100755 --- a/deps/v8/build/gyp_v8 +++ b/deps/v8/build/gyp_v8 @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright 2010 the V8 project authors. All rights reserved. +# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -38,6 +38,11 @@ import sys script_dir = os.path.dirname(__file__) v8_root = os.path.normpath(os.path.join(script_dir, os.pardir)) +if __name__ == '__main__': + os.chdir(v8_root) + script_dir = os.path.dirname(__file__) + v8_root = '.' + sys.path.insert(0, os.path.join(v8_root, 'tools')) import utils @@ -93,7 +98,7 @@ def additional_include_files(args=[]): result.append(path) # Always include standalone.gypi - AddInclude(os.path.join(script_dir, 'standalone.gypi')) + AddInclude(os.path.join(v8_root, 'build', 'standalone.gypi')) # Optionally add supplemental .gypi files if present. supplements = glob.glob(os.path.join(v8_root, '*', 'supplement.gypi')) @@ -135,7 +140,10 @@ if __name__ == '__main__': # path separators even on Windows due to the use of shlex.split(). args.extend(shlex.split(gyp_file)) else: - args.append(os.path.join(script_dir, 'all.gyp')) + # Note that this must not start with "./" or things break. + # So we rely on having done os.chdir(v8_root) above and use the + # relative path. + args.append(os.path.join('build', 'all.gyp')) args.extend(['-I' + i for i in additional_include_files(args)]) @@ -156,23 +164,6 @@ if __name__ == '__main__': # Generate for the architectures supported on the given platform. gyp_args = list(args) - gyp_args.append('-Dtarget_arch=ia32') if utils.GuessOS() == 'linux': - gyp_args.append('-S-ia32') + gyp_args.append('--generator-output=out') run_gyp(gyp_args) - - if utils.GuessOS() == 'linux': - gyp_args = list(args) - gyp_args.append('-Dtarget_arch=x64') - gyp_args.append('-S-x64') - run_gyp(gyp_args) - - gyp_args = list(args) - gyp_args.append('-I' + v8_root + '/build/armu.gypi') - gyp_args.append('-S-armu') - run_gyp(gyp_args) - - gyp_args = list(args) - gyp_args.append('-I' + v8_root + '/build/mipsu.gypi') - gyp_args.append('-S-mipsu') - run_gyp(gyp_args) diff --git a/deps/v8/build/mipsu.gypi b/deps/v8/build/mipsu.gypi deleted file mode 100644 index 637ff841e4..0000000000 --- a/deps/v8/build/mipsu.gypi +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2012 the V8 project authors. All rights reserved. -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -{ - 'variables': { - 'target_arch': 'ia32', - 'v8_target_arch': 'mips', - }, -} diff --git a/deps/v8/build/standalone.gypi b/deps/v8/build/standalone.gypi index e9b056580d..ebdf557230 100644 --- a/deps/v8/build/standalone.gypi +++ b/deps/v8/build/standalone.gypi @@ -37,8 +37,9 @@ 'variables': { 'variables': { 'conditions': [ - ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', { - # This handles the Linux platforms we generally deal with. + ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or \ + OS=="netbsd" or OS=="mac"', { + # This handles the Unix platforms we generally deal with. # Anything else gets passed through, which probably won't work # very well; such hosts should pass an explicit target_arch # to gyp. @@ -46,7 +47,8 @@ ' value); + + /** + * A constant for invalid SnapshotObjectId. GetSnapshotObjectId will return + * it in case heap profiler cannot find id for the object passed as + * parameter. HeapSnapshot::GetNodeById will always return NULL for such id. + */ + static const SnapshotObjectId kUnknownObjectId = 0; + /** * Takes a heap snapshot and returns it. Title may be an empty string. * See HeapSnapshot::Type for types description. @@ -413,6 +434,33 @@ class V8EXPORT HeapProfiler { HeapSnapshot::Type type = HeapSnapshot::kFull, ActivityControl* control = NULL); + /** + * Starts tracking of heap objects population statistics. After calling + * this method, all heap objects relocations done by the garbage collector + * are being registered. + */ + static void StartHeapObjectsTracking(); + + /** + * Adds a new time interval entry to the aggregated statistics array. The + * time interval entry contains information on the current heap objects + * population size. The method also updates aggregated statistics and + * reports updates for all previous time intervals via the OutputStream + * object. Updates on each time interval are provided as a stream of the + * HeapStatsUpdate structure instances. + * + * StartHeapObjectsTracking must be called before the first call to this + * method. + */ + static void PushHeapObjectsStats(OutputStream* stream); + + /** + * Stops tracking of heap objects population statistics, cleans up all + * collected data. StartHeapObjectsTracking must be called again prior to + * calling PushHeapObjectsStats next time. + */ + static void StopHeapObjectsTracking(); + /** * Deletes all snapshots taken. All previously returned pointers to * snapshots and their contents become invalid after this call. @@ -510,6 +558,19 @@ class V8EXPORT RetainedObjectInfo { // NOLINT }; +/** + * A struct for exporting HeapStats data from V8, using "push" model. + * See HeapProfiler::PushHeapObjectsStats. + */ +struct HeapStatsUpdate { + HeapStatsUpdate(uint32_t index, uint32_t count, uint32_t size) + : index(index), count(count), size(size) { } + uint32_t index; // Index of the time interval that was changed. + uint32_t count; // New value of count field for the interval with this index. + uint32_t size; // New value of size field for the interval with this index. +}; + + } // namespace v8 diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 33179f5bf0..9024531992 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -107,6 +107,7 @@ class Data; class AccessorInfo; class StackTrace; class StackFrame; +class Isolate; namespace internal { @@ -862,13 +863,13 @@ class Value : public Data { * Returns true if this value is the undefined value. See ECMA-262 * 4.3.10. */ - V8EXPORT bool IsUndefined() const; + inline bool IsUndefined() const; /** * Returns true if this value is the null value. See ECMA-262 * 4.3.11. */ - V8EXPORT bool IsNull() const; + inline bool IsNull() const; /** * Returns true if this value is true. @@ -982,7 +983,11 @@ class Value : public Data { V8EXPORT bool StrictEquals(Handle that) const; private: + inline bool QuickIsUndefined() const; + inline bool QuickIsNull() const; inline bool QuickIsString() const; + V8EXPORT bool FullIsUndefined() const; + V8EXPORT bool FullIsNull() const; V8EXPORT bool FullIsString() const; }; @@ -1079,6 +1084,7 @@ class String : public Primitive { * A zero length string. */ V8EXPORT static v8::Local Empty(); + inline static v8::Local Empty(Isolate* isolate); /** * Returns true if the string is external @@ -1236,8 +1242,7 @@ class String : public Primitive { * this function should not otherwise delete or modify the resource. Neither * should the underlying buffer be deallocated or modified except through the * destructor of the external string resource. - */ - V8EXPORT static Local NewExternal( + */ V8EXPORT static Local NewExternal( ExternalAsciiStringResource* resource); /** @@ -1968,10 +1973,13 @@ class Arguments { inline Local Holder() const; inline bool IsConstructCall() const; inline Local Data() const; + inline Isolate* GetIsolate() const; + private: - static const int kDataIndex = 0; - static const int kCalleeIndex = -1; - static const int kHolderIndex = -2; + static const int kIsolateIndex = 0; + static const int kDataIndex = -1; + static const int kCalleeIndex = -2; + static const int kHolderIndex = -3; friend class ImplementationUtilities; inline Arguments(internal::Object** implicit_args, @@ -1993,9 +2001,11 @@ class V8EXPORT AccessorInfo { public: inline AccessorInfo(internal::Object** args) : args_(args) { } + inline Isolate* GetIsolate() const; inline Local Data() const; inline Local This() const; inline Local Holder() const; + private: internal::Object** args_; }; @@ -2552,6 +2562,11 @@ Handle V8EXPORT Null(); Handle V8EXPORT True(); Handle V8EXPORT False(); +inline Handle Undefined(Isolate* isolate); +inline Handle Null(Isolate* isolate); +inline Handle True(Isolate* isolate); +inline Handle False(Isolate* isolate); + /** * A set of constraints that specifies the limits of the runtime's memory use. @@ -2802,13 +2817,13 @@ class V8EXPORT Isolate { /** * Associate embedder-specific data with the isolate */ - void SetData(void* data); + inline void SetData(void* data); /** - * Retrive embedder-specific data from the isolate. + * Retrieve embedder-specific data from the isolate. * Returns NULL if SetData has never been called. */ - void* GetData(); + inline void* GetData(); private: Isolate(); @@ -3153,7 +3168,8 @@ class V8EXPORT V8 { * that is kept alive by JavaScript objects. * \returns the adjusted value. */ - static int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); + static intptr_t AdjustAmountOfExternalAllocatedMemory( + intptr_t change_in_bytes); /** * Suspends recording of tick samples in the profiler. @@ -3735,6 +3751,12 @@ class V8EXPORT Locker { }; +/** + * A struct for exporting HeapStats data from V8, using "push" model. + */ +struct HeapStatsUpdate; + + /** * An interface for exporting data from V8, using "push" model. */ @@ -3760,6 +3782,14 @@ class V8EXPORT OutputStream { // NOLINT * will not be called in case writing was aborted. */ virtual WriteResult WriteAsciiChunk(char* data, int size) = 0; + /** + * Writes the next chunk of heap stats data into the stream. Writing + * can be stopped by returning kAbort as function result. EndOfStream + * will not be called in case writing was aborted. + */ + virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate* data, int count) { + return kAbort; + }; }; @@ -3848,18 +3878,6 @@ const uintptr_t kEncodablePointerMask = PlatformSmiTagging::kEncodablePointerMask; const int kPointerToSmiShift = PlatformSmiTagging::kPointerToSmiShift; -template struct InternalConstants; - -// Internal constants for 32-bit systems. -template <> struct InternalConstants<4> { - static const int kStringResourceOffset = 3 * kApiPointerSize; -}; - -// Internal constants for 64-bit systems. -template <> struct InternalConstants<8> { - static const int kStringResourceOffset = 3 * kApiPointerSize; -}; - /** * This class exports constants and functionality from within v8 that * is necessary to implement inline functions in the v8 api. Don't @@ -3871,18 +3889,31 @@ class Internals { // the implementation of v8. static const int kHeapObjectMapOffset = 0; static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize; - static const int kStringResourceOffset = - InternalConstants::kStringResourceOffset; + static const int kStringResourceOffset = 3 * kApiPointerSize; + static const int kOddballKindOffset = 3 * kApiPointerSize; static const int kForeignAddressOffset = kApiPointerSize; static const int kJSObjectHeaderSize = 3 * kApiPointerSize; static const int kFullStringRepresentationMask = 0x07; static const int kExternalTwoByteRepresentationTag = 0x02; + static const int kIsolateStateOffset = 0; + static const int kIsolateEmbedderDataOffset = 1 * kApiPointerSize; + static const int kIsolateRootsOffset = 3 * kApiPointerSize; + static const int kUndefinedValueRootIndex = 5; + static const int kNullValueRootIndex = 7; + static const int kTrueValueRootIndex = 8; + static const int kFalseValueRootIndex = 9; + static const int kEmptySymbolRootIndex = 128; + static const int kJSObjectType = 0xaa; static const int kFirstNonstringType = 0x80; + static const int kOddballType = 0x82; static const int kForeignType = 0x85; + static const int kUndefinedOddballKind = 5; + static const int kNullOddballKind = 3; + static inline bool HasHeapObjectTag(internal::Object* value) { return ((reinterpret_cast(value) & kHeapObjectTagMask) == kHeapObjectTag); @@ -3902,6 +3933,11 @@ class Internals { return ReadField(map, kMapInstanceTypeOffset); } + static inline int GetOddballKind(internal::Object* obj) { + typedef internal::Object O; + return SmiValue(ReadField(obj, kOddballKindOffset)); + } + static inline void* GetExternalPointerFromSmi(internal::Object* value) { const uintptr_t address = reinterpret_cast(value); return reinterpret_cast(address >> kPointerToSmiShift); @@ -3922,6 +3958,28 @@ class Internals { return representation == kExternalTwoByteRepresentationTag; } + static inline bool IsInitialized(v8::Isolate* isolate) { + uint8_t* addr = reinterpret_cast(isolate) + kIsolateStateOffset; + return *reinterpret_cast(addr) == 1; + } + + static inline void SetEmbedderData(v8::Isolate* isolate, void* data) { + uint8_t* addr = reinterpret_cast(isolate) + + kIsolateEmbedderDataOffset; + *reinterpret_cast(addr) = data; + } + + static inline void* GetEmbedderData(v8::Isolate* isolate) { + uint8_t* addr = reinterpret_cast(isolate) + + kIsolateEmbedderDataOffset; + return *reinterpret_cast(addr); + } + + static inline internal::Object** GetRoot(v8::Isolate* isolate, int index) { + uint8_t* addr = reinterpret_cast(isolate) + kIsolateRootsOffset; + return reinterpret_cast(addr + index * kApiPointerSize); + } + template static inline T ReadField(Object* ptr, int offset) { uint8_t* addr = reinterpret_cast(ptr) + offset - kHeapObjectTag; @@ -4048,6 +4106,11 @@ Local Arguments::Data() const { } +Isolate* Arguments::GetIsolate() const { + return *reinterpret_cast(&implicit_args_[kIsolateIndex]); +} + + bool Arguments::IsConstructCall() const { return is_construct_call_; } @@ -4160,6 +4223,15 @@ String* String::Cast(v8::Value* value) { } +Local String::Empty(Isolate* isolate) { + typedef internal::Object* S; + typedef internal::Internals I; + if (!I::IsInitialized(isolate)) return Empty(); + S* slot = I::GetRoot(isolate, I::kEmptySymbolRootIndex); + return Local(reinterpret_cast(slot)); +} + + String::ExternalStringResource* String::GetExternalStringResource() const { typedef internal::Object O; typedef internal::Internals I; @@ -4178,6 +4250,42 @@ String::ExternalStringResource* String::GetExternalStringResource() const { } +bool Value::IsUndefined() const { +#ifdef V8_ENABLE_CHECKS + return FullIsUndefined(); +#else + return QuickIsUndefined(); +#endif +} + +bool Value::QuickIsUndefined() const { + typedef internal::Object O; + typedef internal::Internals I; + O* obj = *reinterpret_cast(const_cast(this)); + if (!I::HasHeapObjectTag(obj)) return false; + if (I::GetInstanceType(obj) != I::kOddballType) return false; + return (I::GetOddballKind(obj) == I::kUndefinedOddballKind); +} + + +bool Value::IsNull() const { +#ifdef V8_ENABLE_CHECKS + return FullIsNull(); +#else + return QuickIsNull(); +#endif +} + +bool Value::QuickIsNull() const { + typedef internal::Object O; + typedef internal::Internals I; + O* obj = *reinterpret_cast(const_cast(this)); + if (!I::HasHeapObjectTag(obj)) return false; + if (I::GetInstanceType(obj) != I::kOddballType) return false; + return (I::GetOddballKind(obj) == I::kNullOddballKind); +} + + bool Value::IsString() const { #ifdef V8_ENABLE_CHECKS return FullIsString(); @@ -4283,6 +4391,11 @@ External* External::Cast(v8::Value* value) { } +Isolate* AccessorInfo::GetIsolate() const { + return *reinterpret_cast(&args_[-3]); +} + + Local AccessorInfo::Data() const { return Local(reinterpret_cast(&args_[-2])); } @@ -4298,6 +4411,54 @@ Local AccessorInfo::Holder() const { } +Handle Undefined(Isolate* isolate) { + typedef internal::Object* S; + typedef internal::Internals I; + if (!I::IsInitialized(isolate)) return Undefined(); + S* slot = I::GetRoot(isolate, I::kUndefinedValueRootIndex); + return Handle(reinterpret_cast(slot)); +} + + +Handle Null(Isolate* isolate) { + typedef internal::Object* S; + typedef internal::Internals I; + if (!I::IsInitialized(isolate)) return Null(); + S* slot = I::GetRoot(isolate, I::kNullValueRootIndex); + return Handle(reinterpret_cast(slot)); +} + + +Handle True(Isolate* isolate) { + typedef internal::Object* S; + typedef internal::Internals I; + if (!I::IsInitialized(isolate)) return True(); + S* slot = I::GetRoot(isolate, I::kTrueValueRootIndex); + return Handle(reinterpret_cast(slot)); +} + + +Handle False(Isolate* isolate) { + typedef internal::Object* S; + typedef internal::Internals I; + if (!I::IsInitialized(isolate)) return False(); + S* slot = I::GetRoot(isolate, I::kFalseValueRootIndex); + return Handle(reinterpret_cast(slot)); +} + + +void Isolate::SetData(void* data) { + typedef internal::Internals I; + I::SetEmbedderData(this, data); +} + + +void* Isolate::GetData() { + typedef internal::Internals I; + return I::GetEmbedderData(this); +} + + /** * \example shell.cc * A simple shell that takes a list of expressions on the diff --git a/deps/v8/samples/lineprocessor.cc b/deps/v8/samples/lineprocessor.cc index 1606a8f99c..7a84a2a0ff 100644 --- a/deps/v8/samples/lineprocessor.cc +++ b/deps/v8/samples/lineprocessor.cc @@ -1,4 +1,4 @@ -// Copyright 2009 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -434,9 +434,9 @@ v8::Handle ReadLine() { } if (res == NULL) { v8::Handle t = v8::Undefined(); - return reinterpret_cast&>(t); + return v8::Handle(v8::String::Cast(*t)); } - // remove newline char + // Remove newline char for (char* pos = buffer; *pos != '\0'; pos++) { if (*pos == '\n') { *pos = '\0'; diff --git a/deps/v8/samples/samples.gyp b/deps/v8/samples/samples.gyp index 55b2a98acd..3c720a748a 100644 --- a/deps/v8/samples/samples.gyp +++ b/deps/v8/samples/samples.gyp @@ -1,4 +1,4 @@ -# Copyright 2011 the V8 project authors. All rights reserved. +# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -48,6 +48,12 @@ 'sources': [ 'process.cc', ], + }, + { + 'target_name': 'lineprocessor', + 'sources': [ + 'lineprocessor.cc', + ], } ], } diff --git a/deps/v8/samples/shell.cc b/deps/v8/samples/shell.cc index b40eca2f7c..db0cc1a930 100644 --- a/deps/v8/samples/shell.cc +++ b/deps/v8/samples/shell.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,17 +67,20 @@ static bool run_shell; int main(int argc, char* argv[]) { v8::V8::SetFlagsFromCommandLine(&argc, argv, true); run_shell = (argc == 1); - v8::HandleScope handle_scope; - v8::Persistent context = CreateShellContext(); - if (context.IsEmpty()) { - printf("Error creating context\n"); - return 1; + int result; + { + v8::HandleScope handle_scope; + v8::Persistent context = CreateShellContext(); + if (context.IsEmpty()) { + printf("Error creating context\n"); + return 1; + } + context->Enter(); + result = RunMain(argc, argv); + if (run_shell) RunShell(context); + context->Exit(); + context.Dispose(); } - context->Enter(); - int result = RunMain(argc, argv); - if (run_shell) RunShell(context); - context->Exit(); - context.Dispose(); v8::V8::Dispose(); return result; } diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 49a026be20..52a84edfda 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -512,6 +512,16 @@ void RegisteredExtension::Register(RegisteredExtension* that) { } +void RegisteredExtension::UnregisterAll() { + RegisteredExtension* re = first_extension_; + while (re != NULL) { + RegisteredExtension* next = re->next(); + delete re; + re = next; + } +} + + void RegisterExtension(Extension* that) { RegisteredExtension* extension = new RegisteredExtension(that); RegisteredExtension::Register(extension); @@ -2091,17 +2101,21 @@ bool StackFrame::IsConstructor() const { // --- D a t a --- -bool Value::IsUndefined() const { +bool Value::FullIsUndefined() const { if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsUndefined()")) { return false; } - return Utils::OpenHandle(this)->IsUndefined(); + bool result = Utils::OpenHandle(this)->IsUndefined(); + ASSERT_EQ(result, QuickIsUndefined()); + return result; } -bool Value::IsNull() const { +bool Value::FullIsNull() const { if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsNull()")) return false; - return Utils::OpenHandle(this)->IsNull(); + bool result = Utils::OpenHandle(this)->IsNull(); + ASSERT_EQ(result, QuickIsNull()); + return result; } @@ -2799,9 +2813,13 @@ bool v8::Object::ForceDelete(v8::Handle key) { i::Handle self = Utils::OpenHandle(this); i::Handle key_obj = Utils::OpenHandle(*key); - // When turning on access checks for a global object deoptimize all functions - // as optimized code does not always handle access checks. - i::Deoptimizer::DeoptimizeGlobalObject(*self); + // When deleting a property on the global object using ForceDelete + // deoptimize all functions as optimized code does not check for the hole + // value with DontDelete properties. We have to deoptimize all contexts + // because of possible cross-context inlined functions. + if (self->IsJSGlobalProxy() || self->IsGlobalObject()) { + i::Deoptimizer::DeoptimizeAll(); + } EXCEPTION_PREAMBLE(isolate); i::Handle obj = i::ForceDeleteProperty(self, key_obj); @@ -4170,7 +4188,7 @@ void v8::Object::SetPointerInInternalField(int index, void* value) { bool v8::V8::Initialize() { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); if (isolate != NULL && isolate->IsInitialized()) { return true; } @@ -4277,6 +4295,7 @@ Persistent v8::Context::New( v8::ExtensionConfiguration* extensions, v8::Handle global_template, v8::Handle global_object) { + i::Isolate::EnsureDefaultIsolate(); i::Isolate* isolate = i::Isolate::Current(); EnsureInitializedForIsolate(isolate, "v8::Context::New()"); LOG_API(isolate, "Context::New"); @@ -4611,7 +4630,9 @@ void* External::Value() const { Local v8::String::Empty() { i::Isolate* isolate = i::Isolate::Current(); - EnsureInitializedForIsolate(isolate, "v8::String::Empty()"); + if (!EnsureInitializedForIsolate(isolate, "v8::String::Empty()")) { + return v8::Local(); + } LOG_API(isolate, "String::Empty()"); return Utils::ToLocal(isolate->factory()->empty_symbol()); } @@ -5063,7 +5084,7 @@ Local v8::Number::New(double value) { Local v8::Integer::New(int32_t value) { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); EnsureInitializedForIsolate(isolate, "v8::Integer::New()"); if (i::Smi::IsValid(value)) { return Utils::IntegerToLocal(i::Handle(i::Smi::FromInt(value), @@ -5197,7 +5218,7 @@ void V8::AddImplicitReferences(Persistent parent, } -int V8::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) { +intptr_t V8::AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes) { i::Isolate* isolate = i::Isolate::Current(); if (IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) { return 0; @@ -5269,6 +5290,7 @@ void V8::RemoveMemoryAllocationCallback(MemoryAllocationCallback callback) { void V8::AddCallCompletedCallback(CallCompletedCallback callback) { if (callback == NULL) return; + i::Isolate::EnsureDefaultIsolate(); i::Isolate* isolate = i::Isolate::Current(); if (IsDeadCheck(isolate, "v8::V8::AddLeaveScriptCallback()")) return; i::V8::AddCallCompletedCallback(callback); @@ -5276,6 +5298,7 @@ void V8::AddCallCompletedCallback(CallCompletedCallback callback) { void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) { + i::Isolate::EnsureDefaultIsolate(); i::Isolate* isolate = i::Isolate::Current(); if (IsDeadCheck(isolate, "v8::V8::RemoveLeaveScriptCallback()")) return; i::V8::RemoveCallCompletedCallback(callback); @@ -5341,7 +5364,7 @@ bool V8::IsExecutionTerminating(Isolate* isolate) { Isolate* Isolate::GetCurrent() { - i::Isolate* isolate = i::Isolate::Current(); + i::Isolate* isolate = i::Isolate::UncheckedCurrent(); return reinterpret_cast(isolate); } @@ -5375,17 +5398,6 @@ void Isolate::Exit() { } -void Isolate::SetData(void* data) { - i::Isolate* isolate = reinterpret_cast(this); - isolate->SetData(data); -} - -void* Isolate::GetData() { - i::Isolate* isolate = reinterpret_cast(this); - return isolate->GetData(); -} - - String::Utf8Value::Utf8Value(v8::Handle obj) : str_(NULL), length_(0) { i::Isolate* isolate = i::Isolate::Current(); @@ -5985,7 +5997,7 @@ Handle HeapGraphEdge::GetName() const { const HeapGraphNode* HeapGraphEdge::GetFromNode() const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode"); - const i::HeapEntry* from = ToInternal(this)->From(); + const i::HeapEntry* from = ToInternal(this)->from(); return reinterpret_cast(from); } @@ -6019,7 +6031,7 @@ Handle HeapGraphNode::GetName() const { } -uint64_t HeapGraphNode::GetId() const { +SnapshotObjectId HeapGraphNode::GetId() const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapGraphNode::GetId"); return ToInternal(this)->id(); @@ -6051,7 +6063,7 @@ const HeapGraphEdge* HeapGraphNode::GetChild(int index) const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapSnapshot::GetChild"); return reinterpret_cast( - &ToInternal(this)->children()[index]); + ToInternal(this)->children()[index]); } @@ -6134,18 +6146,18 @@ const HeapGraphNode* HeapSnapshot::GetRoot() const { } -const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const { +const HeapGraphNode* HeapSnapshot::GetNodeById(SnapshotObjectId id) const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById"); return reinterpret_cast( - ToInternal(this)->GetEntryById(static_cast(id))); + ToInternal(this)->GetEntryById(id)); } int HeapSnapshot::GetNodesCount() const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodesCount"); - return ToInternal(this)->entries()->length(); + return ToInternal(this)->entries().length(); } @@ -6153,7 +6165,14 @@ const HeapGraphNode* HeapSnapshot::GetNode(int index) const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapSnapshot::GetNode"); return reinterpret_cast( - ToInternal(this)->entries()->at(index)); + &ToInternal(this)->entries().at(index)); +} + + +SnapshotObjectId HeapSnapshot::GetMaxSnapshotJSObjectId() const { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapSnapshot::GetMaxSnapshotJSObjectId"); + return ToInternal(this)->max_snapshot_js_object_id(); } @@ -6198,6 +6217,14 @@ const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) { } +SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle value) { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotObjectId"); + i::Handle obj = Utils::OpenHandle(*value); + return i::HeapProfiler::GetSnapshotObjectId(obj); +} + + const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle title, HeapSnapshot::Type type, ActivityControl* control) { @@ -6217,6 +6244,27 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle title, } +void HeapProfiler::StartHeapObjectsTracking() { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapProfiler::StartHeapObjectsTracking"); + i::HeapProfiler::StartHeapObjectsTracking(); +} + + +void HeapProfiler::StopHeapObjectsTracking() { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapProfiler::StopHeapObjectsTracking"); + i::HeapProfiler::StopHeapObjectsTracking(); +} + + +void HeapProfiler::PushHeapObjectsStats(OutputStream* stream) { + i::Isolate* isolate = i::Isolate::Current(); + IsDeadCheck(isolate, "v8::HeapProfiler::PushHeapObjectsStats"); + return i::HeapProfiler::PushHeapObjectsStats(stream); +} + + void HeapProfiler::DeleteAllSnapshots() { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapProfiler::DeleteAllSnapshots"); @@ -6264,7 +6312,11 @@ static void SetFlagsFromString(const char* flags) { void Testing::PrepareStressRun(int run) { static const char* kLazyOptimizations = - "--prepare-always-opt --nolimit-inlining --noalways-opt"; + "--prepare-always-opt " + "--max-inlined-source-size=999999 " + "--max-inlined-nodes=999999 " + "--max-inlined-nodes-cumulative=999999 " + "--noalways-opt"; static const char* kForcedOptimizations = "--always-opt"; // If deoptimization stressed turn on frequent deoptimization. If no value diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h index 89cf0c864c..3ad57f4657 100644 --- a/deps/v8/src/api.h +++ b/deps/v8/src/api.h @@ -146,6 +146,7 @@ class RegisteredExtension { public: explicit RegisteredExtension(Extension* extension); static void Register(RegisteredExtension* that); + static void UnregisterAll(); Extension* extension() { return extension_; } RegisteredExtension* next() { return next_; } RegisteredExtension* next_auto() { return next_auto_; } diff --git a/deps/v8/src/apiutils.h b/deps/v8/src/apiutils.h index 68579af1b3..71c0e1c2c4 100644 --- a/deps/v8/src/apiutils.h +++ b/deps/v8/src/apiutils.h @@ -1,4 +1,4 @@ -// Copyright 2009 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -40,14 +40,17 @@ class ImplementationUtilities { } // Packs additional parameters for the NewArguments function. |implicit_args| - // is a pointer to the last element of 3-elements array controlled by GC. + // is a pointer to the last element of 4-elements array controlled by GC. static void PrepareArgumentsData(internal::Object** implicit_args, + internal::Isolate* isolate, internal::Object* data, internal::JSFunction* callee, internal::Object* holder) { implicit_args[v8::Arguments::kDataIndex] = data; implicit_args[v8::Arguments::kCalleeIndex] = callee; implicit_args[v8::Arguments::kHolderIndex] = holder; + implicit_args[v8::Arguments::kIsolateIndex] = + reinterpret_cast(isolate); } static v8::Arguments NewArguments(internal::Object** implicit_args, @@ -55,6 +58,8 @@ class ImplementationUtilities { bool is_construct_call) { ASSERT(implicit_args[v8::Arguments::kCalleeIndex]->IsJSFunction()); ASSERT(implicit_args[v8::Arguments::kHolderIndex]->IsHeapObject()); + // The implicit isolate argument is not tagged and looks like a SMI. + ASSERT(implicit_args[v8::Arguments::kIsolateIndex]->IsSmi()); return v8::Arguments(implicit_args, argv, argc, is_construct_call); } diff --git a/deps/v8/src/arguments.h b/deps/v8/src/arguments.h index e9a32702cf..f8fb00c575 100644 --- a/deps/v8/src/arguments.h +++ b/deps/v8/src/arguments.h @@ -1,4 +1,4 @@ -// Copyright 2006-2008 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -91,9 +91,11 @@ class CustomArguments : public Relocatable { Object* data, Object* self, JSObject* holder) : Relocatable(isolate) { - values_[2] = self; - values_[1] = holder; - values_[0] = data; + ASSERT(reinterpret_cast(isolate)->IsSmi()); + values_[3] = self; + values_[2] = holder; + values_[1] = data; + values_[0] = reinterpret_cast(isolate); } inline explicit CustomArguments(Isolate* isolate) : Relocatable(isolate) { @@ -106,8 +108,9 @@ class CustomArguments : public Relocatable { void IterateInstance(ObjectVisitor* v); Object** end() { return values_ + ARRAY_SIZE(values_) - 1; } + private: - Object* values_[3]; + Object* values_[4]; }; diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index f772db9be2..ad2ab7e09d 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -5169,9 +5169,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); __ b(ne, &call); // Patch the receiver on the stack with the global receiver object. - __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); - __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); - __ str(r2, MemOperand(sp, argc_ * kPointerSize)); + __ ldr(r3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); + __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset)); + __ str(r3, MemOperand(sp, argc_ * kPointerSize)); __ bind(&call); } @@ -5179,9 +5179,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // r1: pushed function (to be verified) __ JumpIfSmi(r1, &non_function); // Get the map of the function object. - __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); + __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); __ b(ne, &slow); + if (RecordCallTarget()) { + GenerateRecordCallTarget(masm); + } + // Fast-case: Invoke the function now. // r1: pushed function ParameterCount actual(argc_); @@ -5205,8 +5209,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Slow-case: Non-function called. __ bind(&slow); + if (RecordCallTarget()) { + // If there is a call target cache, mark it megamorphic in the + // non-function case. MegamorphicSentinel is an immortal immovable + // object (undefined) so no write barrier is needed. + ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), + masm->isolate()->heap()->undefined_value()); + __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); + __ str(ip, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); + } // Check for function proxy. - __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); + __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); __ b(ne, &non_function); __ push(r1); // put proxy as additional argument __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE)); @@ -5873,36 +5886,12 @@ void SubStringStub::Generate(MacroAssembler* masm) { // r2: result string length __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset)); __ cmp(r2, Operand(r4, ASR, 1)); + // Return original string. __ b(eq, &return_r0); + // Longer than original string's length or negative: unsafe arguments. + __ b(hi, &runtime); + // Shorter than original string's length: an actual substring. - Label result_longer_than_two; - // Check for special case of two character ASCII string, in which case - // we do a lookup in the symbol table first. - __ cmp(r2, Operand(2)); - __ b(gt, &result_longer_than_two); - __ b(lt, &runtime); - - __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime); - - // Get the two characters forming the sub string. - __ add(r0, r0, Operand(r3)); - __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); - __ ldrb(r4, FieldMemOperand(r0, SeqAsciiString::kHeaderSize + 1)); - - // Try to lookup two character string in symbol table. - Label make_two_character_string; - StringHelper::GenerateTwoCharacterSymbolTableProbe( - masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string); - __ jmp(&return_r0); - - // r2: result string length. - // r3: two characters combined into halfword in little endian byte order. - __ bind(&make_two_character_string); - __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime); - __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); - __ jmp(&return_r0); - - __ bind(&result_longer_than_two); // Deal with different string types: update the index if necessary // and put the underlying string into r5. // r0: original string diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc index 96139a2597..3e7a1e9d0e 100644 --- a/deps/v8/src/arm/debug-arm.cc +++ b/deps/v8/src/arm/debug-arm.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -125,6 +125,8 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() { Assembler::kDebugBreakSlotInstructions); } +const bool Debug::FramePaddingLayout::kIsSupported = false; + #define __ ACCESS_MASM(masm) diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc index 7b2a3c4fc1..699e6aa4b1 100644 --- a/deps/v8/src/arm/deoptimizer-arm.cc +++ b/deps/v8/src/arm/deoptimizer-arm.cc @@ -457,6 +457,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator, void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, int frame_index) { + Builtins* builtins = isolate_->builtins(); + Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric); JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next())); unsigned height = iterator->Next(); unsigned height_in_bytes = height * kPointerSize; @@ -464,7 +466,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, PrintF(" translating construct stub => height=%d\n", height_in_bytes); } - unsigned fixed_frame_size = 7 * kPointerSize; + unsigned fixed_frame_size = 8 * kPointerSize; unsigned output_frame_size = height_in_bytes + fixed_frame_size; // Allocate and store the output frame description. @@ -529,6 +531,15 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, top_address + output_offset, output_offset, value); } + // The output frame reflects a JSConstructStubGeneric frame. + output_offset -= kPointerSize; + value = reinterpret_cast(construct_stub); + output_frame->SetFrameSlot(output_offset, value); + if (FLAG_trace_deopt) { + PrintF(" 0x%08x: [top + %d] <- 0x%08x ; code object\n", + top_address + output_offset, output_offset, value); + } + // Number of incoming arguments. output_offset -= kPointerSize; value = reinterpret_cast(Smi::FromInt(height - 1)); @@ -559,8 +570,6 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator, ASSERT(0 == output_offset); - Builtins* builtins = isolate_->builtins(); - Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric); uint32_t pc = reinterpret_cast( construct_stub->instruction_start() + isolate_->heap()->construct_stub_deopt_pc_offset()->value()); diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index 0cbd46ed1d..9f448720cc 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -70,6 +70,7 @@ class JumpPatchSite BASE_EMBEDDED { // the inlined smi code. void EmitJumpIfNotSmi(Register reg, Label* target) { ASSERT(!patch_site_.is_bound() && !info_emitted_); + Assembler::BlockConstPoolScope block_const_pool(masm_); __ bind(&patch_site_); __ cmp(reg, Operand(reg)); // Don't use b(al, ...) as that might emit the constant pool right after the @@ -82,6 +83,7 @@ class JumpPatchSite BASE_EMBEDDED { // the inlined smi code. void EmitJumpIfSmi(Register reg, Label* target) { ASSERT(!patch_site_.is_bound() && !info_emitted_); + Assembler::BlockConstPoolScope block_const_pool(masm_); __ bind(&patch_site_); __ cmp(reg, Operand(reg)); __ b(ne, target); // Never taken before patched. @@ -110,13 +112,6 @@ class JumpPatchSite BASE_EMBEDDED { }; -// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove. -int FullCodeGenerator::self_optimization_header_size() { - UNREACHABLE(); - return 24; -} - - // Generate code for a JS function. On entry to the function the receiver // and arguments have been pushed on the stack left to right. The actual // argument count matches the formal parameter count expected by the @@ -273,11 +268,11 @@ void FullCodeGenerator::Generate() { // For named function expressions, declare the function name as a // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { - VariableProxy* proxy = scope()->function(); - ASSERT(proxy->var()->mode() == CONST || - proxy->var()->mode() == CONST_HARMONY); - ASSERT(proxy->var()->location() != Variable::UNALLOCATED); - EmitDeclaration(proxy, proxy->var()->mode(), NULL); + VariableDeclaration* function = scope()->function(); + ASSERT(function->proxy()->var()->mode() == CONST || + function->proxy()->var()->mode() == CONST_HARMONY); + ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); + VisitVariableDeclaration(function); } VisitDeclarations(scope()->declarations()); } @@ -787,62 +782,51 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, } -void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, - VariableMode mode, - FunctionLiteral* function) { +void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { + // The variable in the declaration always resides in the current function + // context. + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); + if (FLAG_debug_code) { + // Check that we're not inside a with or catch context. + __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); + __ CompareRoot(r1, Heap::kWithContextMapRootIndex); + __ Check(ne, "Declaration in with context."); + __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); + __ Check(ne, "Declaration in catch context."); + } +} + + +void FullCodeGenerator::VisitVariableDeclaration( + VariableDeclaration* declaration) { // If it was not possible to allocate the variable at compile time, we // need to "declare" it at runtime to make sure it actually exists in the // local context. + VariableProxy* proxy = declaration->proxy(); + VariableMode mode = declaration->mode(); Variable* variable = proxy->var(); - bool binding_needs_init = (function == NULL) && - (mode == CONST || mode == CONST_HARMONY || mode == LET); + bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; switch (variable->location()) { case Variable::UNALLOCATED: - ++global_count_; + globals_->Add(variable->name()); + globals_->Add(variable->binding_needs_init() + ? isolate()->factory()->the_hole_value() + : isolate()->factory()->undefined_value()); break; case Variable::PARAMETER: case Variable::LOCAL: - if (function != NULL) { - Comment cmnt(masm_, "[ Declaration"); - VisitForAccumulatorValue(function); - __ str(result_register(), StackOperand(variable)); - } else if (binding_needs_init) { - Comment cmnt(masm_, "[ Declaration"); + if (hole_init) { + Comment cmnt(masm_, "[ VariableDeclaration"); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, StackOperand(variable)); } break; case Variable::CONTEXT: - // The variable in the decl always resides in the current function - // context. - ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); - if (FLAG_debug_code) { - // Check that we're not inside a with or catch context. - __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); - __ CompareRoot(r1, Heap::kWithContextMapRootIndex); - __ Check(ne, "Declaration in with context."); - __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); - __ Check(ne, "Declaration in catch context."); - } - if (function != NULL) { - Comment cmnt(masm_, "[ Declaration"); - VisitForAccumulatorValue(function); - __ str(result_register(), ContextOperand(cp, variable->index())); - int offset = Context::SlotOffset(variable->index()); - // We know that we have written a function, which is not a smi. - __ RecordWriteContextSlot(cp, - offset, - result_register(), - r2, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - } else if (binding_needs_init) { - Comment cmnt(masm_, "[ Declaration"); + if (hole_init) { + Comment cmnt(masm_, "[ VariableDeclaration"); + EmitDebugCheckDeclarationContext(variable); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, ContextOperand(cp, variable->index())); // No write barrier since the_hole_value is in old space. @@ -851,13 +835,11 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, break; case Variable::LOOKUP: { - Comment cmnt(masm_, "[ Declaration"); + Comment cmnt(masm_, "[ VariableDeclaration"); __ mov(r2, Operand(variable->name())); // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || - mode == CONST || - mode == CONST_HARMONY || - mode == LET); + ASSERT(mode == VAR || mode == LET || + mode == CONST || mode == CONST_HARMONY); PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE; __ mov(r1, Operand(Smi::FromInt(attr))); @@ -865,11 +847,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, // Note: For variables we must not push an initial value (such as // 'undefined') because we may have a (legal) redeclaration and we // must not destroy the current value. - if (function != NULL) { - __ Push(cp, r2, r1); - // Push initial value for function declaration. - VisitForStackValue(function); - } else if (binding_needs_init) { + if (hole_init) { __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); __ Push(cp, r2, r1, r0); } else { @@ -883,6 +861,122 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, } +void FullCodeGenerator::VisitFunctionDeclaration( + FunctionDeclaration* declaration) { + VariableProxy* proxy = declaration->proxy(); + Variable* variable = proxy->var(); + switch (variable->location()) { + case Variable::UNALLOCATED: { + globals_->Add(variable->name()); + Handle function = + Compiler::BuildFunctionInfo(declaration->fun(), script()); + // Check for stack-overflow exception. + if (function.is_null()) return SetStackOverflow(); + globals_->Add(function); + break; + } + + case Variable::PARAMETER: + case Variable::LOCAL: { + Comment cmnt(masm_, "[ FunctionDeclaration"); + VisitForAccumulatorValue(declaration->fun()); + __ str(result_register(), StackOperand(variable)); + break; + } + + case Variable::CONTEXT: { + Comment cmnt(masm_, "[ FunctionDeclaration"); + EmitDebugCheckDeclarationContext(variable); + VisitForAccumulatorValue(declaration->fun()); + __ str(result_register(), ContextOperand(cp, variable->index())); + int offset = Context::SlotOffset(variable->index()); + // We know that we have written a function, which is not a smi. + __ RecordWriteContextSlot(cp, + offset, + result_register(), + r2, + kLRHasBeenSaved, + kDontSaveFPRegs, + EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + PrepareForBailoutForId(proxy->id(), NO_REGISTERS); + break; + } + + case Variable::LOOKUP: { + Comment cmnt(masm_, "[ FunctionDeclaration"); + __ mov(r2, Operand(variable->name())); + __ mov(r1, Operand(Smi::FromInt(NONE))); + __ Push(cp, r2, r1); + // Push initial value for function declaration. + VisitForStackValue(declaration->fun()); + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + break; + } + } +} + + +void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { + VariableProxy* proxy = declaration->proxy(); + Variable* variable = proxy->var(); + Handle instance = declaration->module()->interface()->Instance(); + ASSERT(!instance.is_null()); + + switch (variable->location()) { + case Variable::UNALLOCATED: { + Comment cmnt(masm_, "[ ModuleDeclaration"); + globals_->Add(variable->name()); + globals_->Add(instance); + Visit(declaration->module()); + break; + } + + case Variable::CONTEXT: { + Comment cmnt(masm_, "[ ModuleDeclaration"); + EmitDebugCheckDeclarationContext(variable); + __ mov(r1, Operand(instance)); + __ str(r1, ContextOperand(cp, variable->index())); + Visit(declaration->module()); + break; + } + + case Variable::PARAMETER: + case Variable::LOCAL: + case Variable::LOOKUP: + UNREACHABLE(); + } +} + + +void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { + VariableProxy* proxy = declaration->proxy(); + Variable* variable = proxy->var(); + switch (variable->location()) { + case Variable::UNALLOCATED: + // TODO(rossberg) + break; + + case Variable::CONTEXT: { + Comment cmnt(masm_, "[ ImportDeclaration"); + EmitDebugCheckDeclarationContext(variable); + // TODO(rossberg) + break; + } + + case Variable::PARAMETER: + case Variable::LOCAL: + case Variable::LOOKUP: + UNREACHABLE(); + } +} + + +void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { + // TODO(rossberg) +} + + void FullCodeGenerator::DeclareGlobals(Handle pairs) { // Call the runtime to declare the globals. // The context is the first argument. @@ -2269,6 +2363,18 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { } // Record source position for debugger. SetSourcePosition(expr->position()); + + // Record call targets in unoptimized code, but not in the snapshot. + if (!Serializer::enabled()) { + flags = static_cast(flags | RECORD_CALL_TARGET); + Handle uninitialized = + TypeFeedbackCells::UninitializedSentinel(isolate()); + Handle cell = + isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); + RecordTypeFeedbackCell(expr->id(), cell); + __ mov(r2, Operand(cell)); + } + CallFunctionStub stub(arg_count, flags); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); @@ -3660,7 +3766,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); - __ add(string_length, string_length, Operand(scratch1)); + __ add(string_length, string_length, Operand(scratch1), SetCC); __ b(vs, &bailout); __ cmp(element, elements_end); __ b(lt, &loop); @@ -3697,7 +3803,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { __ b(ne, &bailout); __ tst(scratch2, Operand(0x80000000)); __ b(ne, &bailout); - __ add(string_length, string_length, Operand(scratch2)); + __ add(string_length, string_length, Operand(scratch2), SetCC); __ b(vs, &bailout); __ SmiUntag(string_length); @@ -4453,7 +4559,8 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) { void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { Scope* declaration_scope = scope()->DeclarationScope(); - if (declaration_scope->is_global_scope()) { + if (declaration_scope->is_global_scope() || + declaration_scope->is_module_scope()) { // Contexts nested in the global context have a canonical empty function // as their closure, not the anonymous closure containing the global // code. Pass a smi sentinel and let the runtime look up the empty diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc index e84365789b..c12c16754d 100644 --- a/deps/v8/src/arm/ic-arm.cc +++ b/deps/v8/src/arm/ic-arm.cc @@ -774,7 +774,7 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, __ b(lt, slow_case); // Check that the key is a positive smi. - __ tst(key, Operand(0x8000001)); + __ tst(key, Operand(0x80000001)); __ b(ne, slow_case); // Load the elements into scratch1 and check its map. @@ -1690,12 +1690,12 @@ void CompareIC::UpdateCaches(Handle x, Handle y) { // Activate inlined smi code. if (previous_state == UNINITIALIZED) { - PatchInlinedSmiCode(address()); + PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK); } } -void PatchInlinedSmiCode(Address address) { +void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { Address cmp_instruction_address = address + Assembler::kCallTargetAddressOffset; @@ -1729,34 +1729,31 @@ void PatchInlinedSmiCode(Address address) { Instr instr_at_patch = Assembler::instr_at(patch_address); Instr branch_instr = Assembler::instr_at(patch_address + Instruction::kInstrSize); - ASSERT(Assembler::IsCmpRegister(instr_at_patch)); - ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), - Assembler::GetRm(instr_at_patch).code()); + // This is patching a conditional "jump if not smi/jump if smi" site. + // Enabling by changing from + // cmp rx, rx + // b eq/ne, + // to + // tst rx, #kSmiTagMask + // b ne/eq, + // and vice-versa to be disabled again. + CodePatcher patcher(patch_address, 2); + Register reg = Assembler::GetRn(instr_at_patch); + if (check == ENABLE_INLINED_SMI_CHECK) { + ASSERT(Assembler::IsCmpRegister(instr_at_patch)); + ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), + Assembler::GetRm(instr_at_patch).code()); + patcher.masm()->tst(reg, Operand(kSmiTagMask)); + } else { + ASSERT(check == DISABLE_INLINED_SMI_CHECK); + ASSERT(Assembler::IsTstImmediate(instr_at_patch)); + patcher.masm()->cmp(reg, reg); + } ASSERT(Assembler::IsBranch(branch_instr)); if (Assembler::GetCondition(branch_instr) == eq) { - // This is patching a "jump if not smi" site to be active. - // Changing - // cmp rx, rx - // b eq, - // to - // tst rx, #kSmiTagMask - // b ne, - CodePatcher patcher(patch_address, 2); - Register reg = Assembler::GetRn(instr_at_patch); - patcher.masm()->tst(reg, Operand(kSmiTagMask)); patcher.EmitCondition(ne); } else { ASSERT(Assembler::GetCondition(branch_instr) == ne); - // This is patching a "jump if smi" site to be active. - // Changing - // cmp rx, rx - // b ne, - // to - // tst rx, #kSmiTagMask - // b eq, - CodePatcher patcher(patch_address, 2); - Register reg = Assembler::GetRn(instr_at_patch); - patcher.masm()->tst(reg, Operand(kSmiTagMask)); patcher.EmitCondition(eq); } } diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc index cdc1947d4e..5c60f5321c 100644 --- a/deps/v8/src/arm/lithium-arm.cc +++ b/deps/v8/src/arm/lithium-arm.cc @@ -108,22 +108,17 @@ void LInstruction::PrintTo(StringStream* stream) { } -template -void LTemplateInstruction::PrintDataTo(StringStream* stream) { +void LInstruction::PrintDataTo(StringStream* stream) { stream->Add("= "); - for (int i = 0; i < inputs_.length(); i++) { + for (int i = 0; i < InputCount(); i++) { if (i > 0) stream->Add(" "); - inputs_[i]->PrintTo(stream); + InputAt(i)->PrintTo(stream); } } -template -void LTemplateInstruction::PrintOutputOperandTo(StringStream* stream) { - for (int i = 0; i < results_.length(); i++) { - if (i > 0) stream->Add(" "); - results_[i]->PrintTo(stream); - } +void LInstruction::PrintOutputOperandTo(StringStream* stream) { + if (HasResult()) result()->PrintTo(stream); } @@ -732,22 +727,6 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { } -LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment( - LInstruction* instr, int ast_id) { - ASSERT(instruction_pending_deoptimization_environment_ == NULL); - ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber); - instruction_pending_deoptimization_environment_ = instr; - pending_deoptimization_ast_id_ = ast_id; - return instr; -} - - -void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() { - instruction_pending_deoptimization_environment_ = NULL; - pending_deoptimization_ast_id_ = AstNode::kNoNumber; -} - - LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, HInstruction* hinstr, CanDeoptimize can_deoptimize) { @@ -760,8 +739,10 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, if (hinstr->HasObservableSideEffects()) { ASSERT(hinstr->next()->IsSimulate()); HSimulate* sim = HSimulate::cast(hinstr->next()); - instr = SetInstructionPendingDeoptimizationEnvironment( - instr, sim->ast_id()); + ASSERT(instruction_pending_deoptimization_environment_ == NULL); + ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber); + instruction_pending_deoptimization_environment_ = instr; + pending_deoptimization_ast_id_ = sim->ast_id(); } // If instruction does not have side-effects lazy deoptimization @@ -779,12 +760,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, } -LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) { - instr->MarkAsSaveDoubles(); - return instr; -} - - LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) { ASSERT(!instr->HasPointerMap()); instr->set_pointer_map(new(zone()) LPointerMap(position_)); @@ -1295,6 +1270,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) { ASSERT(instr->value()->representation().IsInteger32()); ASSERT(instr->representation().IsInteger32()); + if (instr->HasNoUses()) return NULL; LOperand* value = UseRegisterAtStart(instr->value()); return DefineAsRegister(new(zone()) LBitNotI(value)); } @@ -1319,6 +1295,75 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) { } +bool LChunkBuilder::HasMagicNumberForDivisor(int32_t divisor) { + uint32_t divisor_abs = abs(divisor); + // Dividing by 0, 1, and powers of 2 is easy. + // Note that IsPowerOf2(0) returns true; + ASSERT(IsPowerOf2(0) == true); + if (IsPowerOf2(divisor_abs)) return true; + + // We have magic numbers for a few specific divisors. + // Details and proofs can be found in: + // - Hacker's Delight, Henry S. Warren, Jr. + // - The PowerPC Compiler Writer’s Guide + // and probably many others. + // + // We handle + // * + // but not + // * + int32_t power_of_2_factor = + CompilerIntrinsics::CountTrailingZeros(divisor_abs); + DivMagicNumbers magic_numbers = + DivMagicNumberFor(divisor_abs >> power_of_2_factor); + if (magic_numbers.M != InvalidDivMagicNumber.M) return true; + + return false; +} + + +HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) { + // A value with an integer representation does not need to be transformed. + if (dividend->representation().IsInteger32()) { + return dividend; + // A change from an integer32 can be replaced by the integer32 value. + } else if (dividend->IsChange() && + HChange::cast(dividend)->from().IsInteger32()) { + return HChange::cast(dividend)->value(); + } + return NULL; +} + + +HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) { + // Only optimize when we have magic numbers for the divisor. + // The standard integer division routine is usually slower than transitionning + // to VFP. + if (divisor->IsConstant() && + HConstant::cast(divisor)->HasInteger32Value()) { + HConstant* constant_val = HConstant::cast(divisor); + int32_t int32_val = constant_val->Integer32Value(); + if (LChunkBuilder::HasMagicNumberForDivisor(int32_val)) { + return constant_val->CopyToRepresentation(Representation::Integer32()); + } + } + return NULL; +} + + +LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { + HValue* right = instr->right(); + LOperand* dividend = UseRegister(instr->left()); + LOperand* divisor = UseRegisterOrConstant(right); + LOperand* remainder = TempRegister(); + ASSERT(right->IsConstant() && + HConstant::cast(right)->HasInteger32Value() && + HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value())); + return AssignEnvironment(DefineAsRegister( + new LMathFloorOfDiv(dividend, divisor, remainder))); +} + + LInstruction* LChunkBuilder::DoMod(HMod* instr) { if (instr->representation().IsInteger32()) { ASSERT(instr->left()->representation().IsInteger32()); @@ -1753,9 +1798,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) { } -LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) { +LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - LInstruction* result = new(zone()) LCheckMap(value); + LInstruction* result = new(zone()) LCheckMaps(value); return AssignEnvironment(result); } @@ -2242,9 +2287,12 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { if (pending_deoptimization_ast_id_ == instr->ast_id()) { LInstruction* result = new(zone()) LLazyBailout; result = AssignEnvironment(result); + // Store the lazy deopt environment with the instruction if needed. Right + // now it is only used for LInstanceOfKnownGlobal. instruction_pending_deoptimization_environment_-> - set_deoptimization_environment(result->environment()); - ClearInstructionPendingDeoptimizationEnvironment(); + SetDeferredLazyDeoptimizationEnvironment(result->environment()); + instruction_pending_deoptimization_environment_ = NULL; + pending_deoptimization_ast_id_ = AstNode::kNoNumber; return result; } @@ -2271,6 +2319,9 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { undefined, instr->call_kind(), instr->is_construct()); + if (instr->arguments_var() != NULL) { + inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject()); + } current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); return NULL; @@ -2278,10 +2329,21 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) { + LInstruction* pop = NULL; + + HEnvironment* env = current_block_->last_environment(); + + if (instr->arguments_pushed()) { + int argument_count = env->arguments_environment()->parameter_count(); + pop = new(zone()) LDrop(argument_count); + argument_count_ -= argument_count; + } + HEnvironment* outer = current_block_->last_environment()-> DiscardInlined(false); current_block_->UpdateEnvironment(outer); - return NULL; + + return pop; } diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h index 62cde6e249..ec8aac8036 100644 --- a/deps/v8/src/arm/lithium-arm.h +++ b/deps/v8/src/arm/lithium-arm.h @@ -72,7 +72,7 @@ class LCodeGen; V(CheckFunction) \ V(CheckInstanceType) \ V(CheckNonSmi) \ - V(CheckMap) \ + V(CheckMaps) \ V(CheckPrototypeMaps) \ V(CheckSmi) \ V(ClampDToUint8) \ @@ -132,6 +132,7 @@ class LCodeGen; V(LoadNamedField) \ V(LoadNamedFieldPolymorphic) \ V(LoadNamedGeneric) \ + V(MathFloorOfDiv) \ V(ModI) \ V(MulI) \ V(NumberTagD) \ @@ -179,7 +180,8 @@ class LCodeGen; V(CheckMapValue) \ V(LoadFieldByIndex) \ V(DateField) \ - V(WrapReceiver) + V(WrapReceiver) \ + V(Drop) #define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \ @@ -203,15 +205,14 @@ class LInstruction: public ZoneObject { LInstruction() : environment_(NULL), hydrogen_value_(NULL), - is_call_(false), - is_save_doubles_(false) { } + is_call_(false) { } virtual ~LInstruction() { } virtual void CompileToNative(LCodeGen* generator) = 0; virtual const char* Mnemonic() const = 0; virtual void PrintTo(StringStream* stream); - virtual void PrintDataTo(StringStream* stream) = 0; - virtual void PrintOutputOperandTo(StringStream* stream) = 0; + virtual void PrintDataTo(StringStream* stream); + virtual void PrintOutputOperandTo(StringStream* stream); enum Opcode { // Declare a unique enum value for each instruction. @@ -246,22 +247,12 @@ class LInstruction: public ZoneObject { void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } HValue* hydrogen_value() const { return hydrogen_value_; } - void set_deoptimization_environment(LEnvironment* env) { - deoptimization_environment_.set(env); - } - LEnvironment* deoptimization_environment() const { - return deoptimization_environment_.get(); - } - bool HasDeoptimizationEnvironment() const { - return deoptimization_environment_.is_set(); - } + virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } void MarkAsCall() { is_call_ = true; } - void MarkAsSaveDoubles() { is_save_doubles_ = true; } // Interface to the register allocator and iterators. bool IsMarkedAsCall() const { return is_call_; } - bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; } virtual bool HasResult() const = 0; virtual LOperand* result() = 0; @@ -282,9 +273,7 @@ class LInstruction: public ZoneObject { LEnvironment* environment_; SetOncePointer pointer_map_; HValue* hydrogen_value_; - SetOncePointer deoptimization_environment_; bool is_call_; - bool is_save_doubles_; }; @@ -306,9 +295,6 @@ class LTemplateInstruction: public LInstruction { int TempCount() { return T; } LOperand* TempAt(int i) { return temps_[i]; } - virtual void PrintDataTo(StringStream* stream); - virtual void PrintOutputOperandTo(StringStream* stream); - protected: EmbeddedContainer results_; EmbeddedContainer inputs_; @@ -534,9 +520,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> { class LArgumentsElements: public LTemplateInstruction<1, 0, 0> { public: - LArgumentsElements() { } - DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements") + DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements) }; @@ -582,6 +567,21 @@ class LDivI: public LTemplateInstruction<1, 2, 0> { }; +class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> { + public: + LMathFloorOfDiv(LOperand* left, + LOperand* right, + LOperand* temp = NULL) { + inputs_[0] = left; + inputs_[1] = right; + temps_[0] = temp; + } + + DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div") + DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) +}; + + class LMulI: public LTemplateInstruction<1, 2, 1> { public: LMulI(LOperand* left, LOperand* right, LOperand* temp) { @@ -834,6 +834,15 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> { DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal) Handle function() const { return hydrogen()->function(); } + LEnvironment* GetDeferredLazyDeoptimizationEnvironment() { + return lazy_deopt_env_; + } + virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { + lazy_deopt_env_ = env; + } + + private: + LEnvironment* lazy_deopt_env_; }; @@ -1378,6 +1387,19 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> { }; +class LDrop: public LTemplateInstruction<0, 0, 0> { + public: + explicit LDrop(int count) : count_(count) { } + + int count() const { return count_; } + + DECLARE_CONCRETE_INSTRUCTION(Drop, "drop") + + private: + int count_; +}; + + class LThisFunction: public LTemplateInstruction<1, 0, 0> { public: DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function") @@ -1460,6 +1482,7 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> { virtual void PrintDataTo(StringStream* stream); int arity() const { return hydrogen()->argument_count() - 1; } + Handle known_function() { return hydrogen()->known_function(); } }; @@ -1739,6 +1762,8 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> { LOperand* elements() { return inputs_[0]; } LOperand* key() { return inputs_[1]; } LOperand* value() { return inputs_[2]; } + + bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); } }; @@ -1889,14 +1914,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> { }; -class LCheckMap: public LTemplateInstruction<0, 1, 0> { +class LCheckMaps: public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMap(LOperand* value) { + explicit LCheckMaps(LOperand* value) { inputs_[0] = value; } - DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map") - DECLARE_HYDROGEN_ACCESSOR(CheckMap) + DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps") + DECLARE_HYDROGEN_ACCESSOR(CheckMaps) }; @@ -2274,6 +2299,10 @@ class LChunkBuilder BASE_EMBEDDED { HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) #undef DECLARE_DO + static bool HasMagicNumberForDivisor(int32_t divisor); + static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* val); + static HValue* SimplifiedDivisorForMathFloorOfDiv(HValue* val); + private: enum Status { UNUSED, @@ -2369,11 +2398,6 @@ class LChunkBuilder BASE_EMBEDDED { LInstruction* instr, HInstruction* hinstr, CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY); - LInstruction* MarkAsSaveDoubles(LInstruction* instr); - - LInstruction* SetInstructionPendingDeoptimizationEnvironment( - LInstruction* instr, int ast_id); - void ClearInstructionPendingDeoptimizationEnvironment(); LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env, int* argument_index_accumulator); diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 82b80a2b80..bf11ab9bea 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -1034,6 +1034,100 @@ void LCodeGen::DoModI(LModI* instr) { } +void LCodeGen::EmitSignedIntegerDivisionByConstant( + Register result, + Register dividend, + int32_t divisor, + Register remainder, + Register scratch, + LEnvironment* environment) { + ASSERT(!AreAliased(dividend, scratch, ip)); + ASSERT(LChunkBuilder::HasMagicNumberForDivisor(divisor)); + + uint32_t divisor_abs = abs(divisor); + + int32_t power_of_2_factor = + CompilerIntrinsics::CountTrailingZeros(divisor_abs); + + switch (divisor_abs) { + case 0: + DeoptimizeIf(al, environment); + return; + + case 1: + if (divisor > 0) { + __ Move(result, dividend); + } else { + __ rsb(result, dividend, Operand(0), SetCC); + DeoptimizeIf(vs, environment); + } + // Compute the remainder. + __ mov(remainder, Operand(0)); + return; + + default: + if (IsPowerOf2(divisor_abs)) { + // Branch and condition free code for integer division by a power + // of two. + int32_t power = WhichPowerOf2(divisor_abs); + if (power > 1) { + __ mov(scratch, Operand(dividend, ASR, power - 1)); + } + __ add(scratch, dividend, Operand(scratch, LSR, 32 - power)); + __ mov(result, Operand(scratch, ASR, power)); + // Negate if necessary. + // We don't need to check for overflow because the case '-1' is + // handled separately. + if (divisor < 0) { + ASSERT(divisor != -1); + __ rsb(result, result, Operand(0)); + } + // Compute the remainder. + if (divisor > 0) { + __ sub(remainder, dividend, Operand(result, LSL, power)); + } else { + __ add(remainder, dividend, Operand(result, LSL, power)); + } + return; + } else { + // Use magic numbers for a few specific divisors. + // Details and proofs can be found in: + // - Hacker's Delight, Henry S. Warren, Jr. + // - The PowerPC Compiler Writer’s Guide + // and probably many others. + // + // We handle + // * + // but not + // * + DivMagicNumbers magic_numbers = + DivMagicNumberFor(divisor_abs >> power_of_2_factor); + // Branch and condition free code for integer division by a power + // of two. + const int32_t M = magic_numbers.M; + const int32_t s = magic_numbers.s + power_of_2_factor; + + __ mov(ip, Operand(M)); + __ smull(ip, scratch, dividend, ip); + if (M < 0) { + __ add(scratch, scratch, Operand(dividend)); + } + if (s > 0) { + __ mov(scratch, Operand(scratch, ASR, s)); + } + __ add(result, scratch, Operand(dividend, LSR, 31)); + if (divisor < 0) __ rsb(result, result, Operand(0)); + // Compute the remainder. + __ mov(ip, Operand(divisor)); + // This sequence could be replaced with 'mls' when + // it gets implemented. + __ mul(scratch, result, ip); + __ sub(remainder, dividend, scratch); + } + } +} + + void LCodeGen::DoDivI(LDivI* instr) { class DeferredDivI: public LDeferredCode { public: @@ -1115,6 +1209,34 @@ void LCodeGen::DoDivI(LDivI* instr) { } +void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) { + const Register result = ToRegister(instr->result()); + const Register left = ToRegister(instr->InputAt(0)); + const Register remainder = ToRegister(instr->TempAt(0)); + const Register scratch = scratch0(); + + // We only optimize this for division by constants, because the standard + // integer division routine is usually slower than transitionning to VFP. + // This could be optimized on processors with SDIV available. + ASSERT(instr->InputAt(1)->IsConstantOperand()); + int32_t divisor = ToInteger32(LConstantOperand::cast(instr->InputAt(1))); + if (divisor < 0) { + __ cmp(left, Operand(0)); + DeoptimizeIf(eq, instr->environment()); + } + EmitSignedIntegerDivisionByConstant(result, + left, + divisor, + remainder, + scratch, + instr->environment()); + // We operated a truncating division. Correct the result if necessary. + __ cmp(remainder, Operand(0)); + __ teq(remainder, Operand(divisor), ne); + __ sub(result, result, Operand(1), LeaveCC, mi); +} + + template void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr, Token::Value op) { @@ -2267,8 +2389,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); - ASSERT(instr->HasDeoptimizationEnvironment()); - LEnvironment* env = instr->deoptimization_environment(); + LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); // Put the result value into the result register slot and // restore all registers. @@ -2466,42 +2587,38 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { Register object = ToRegister(instr->object()); Register result = ToRegister(instr->result()); Register scratch = scratch0(); + int map_count = instr->hydrogen()->types()->length(); + bool need_generic = instr->hydrogen()->need_generic(); + + if (map_count == 0 && !need_generic) { + DeoptimizeIf(al, instr->environment()); + return; + } Handle name = instr->hydrogen()->name(); - if (map_count == 0) { - ASSERT(instr->hydrogen()->need_generic()); - __ mov(r2, Operand(name)); - Handle ic = isolate()->builtins()->LoadIC_Initialize(); - CallCode(ic, RelocInfo::CODE_TARGET, instr); - } else { - Label done; - __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); - for (int i = 0; i < map_count - 1; ++i) { - Handle map = instr->hydrogen()->types()->at(i); + Label done; + __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); + for (int i = 0; i < map_count; ++i) { + bool last = (i == map_count - 1); + Handle map = instr->hydrogen()->types()->at(i); + __ cmp(scratch, Operand(map)); + if (last && !need_generic) { + DeoptimizeIf(ne, instr->environment()); + EmitLoadFieldOrConstantFunction(result, object, map, name); + } else { Label next; - __ cmp(scratch, Operand(map)); __ b(ne, &next); EmitLoadFieldOrConstantFunction(result, object, map, name); __ b(&done); __ bind(&next); } - Handle map = instr->hydrogen()->types()->last(); - __ cmp(scratch, Operand(map)); - if (instr->hydrogen()->need_generic()) { - Label generic; - __ b(ne, &generic); - EmitLoadFieldOrConstantFunction(result, object, map, name); - __ b(&done); - __ bind(&generic); - __ mov(r2, Operand(name)); - Handle ic = isolate()->builtins()->LoadIC_Initialize(); - CallCode(ic, RelocInfo::CODE_TARGET, instr); - } else { - DeoptimizeIf(ne, instr->environment()); - EmitLoadFieldOrConstantFunction(result, object, map, name); - } - __ bind(&done); } + if (need_generic) { + __ mov(r2, Operand(name)); + Handle ic = isolate()->builtins()->LoadIC_Initialize(); + CallCode(ic, RelocInfo::CODE_TARGET, instr); + } + __ bind(&done); } @@ -2764,16 +2881,20 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { Register scratch = scratch0(); Register result = ToRegister(instr->result()); - // Check if the calling frame is an arguments adaptor frame. - Label done, adapted; - __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset)); - __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + if (instr->hydrogen()->from_inlined()) { + __ sub(result, sp, Operand(2 * kPointerSize)); + } else { + // Check if the calling frame is an arguments adaptor frame. + Label done, adapted; + __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); + __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset)); + __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); - // Result is the frame pointer for the frame if not adapted and for the real - // frame below the adaptor frame if adapted. - __ mov(result, fp, LeaveCC, ne); - __ mov(result, scratch, LeaveCC, eq); + // Result is the frame pointer for the frame if not adapted and for the real + // frame below the adaptor frame if adapted. + __ mov(result, fp, LeaveCC, ne); + __ mov(result, scratch, LeaveCC, eq); + } } @@ -2882,7 +3003,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) { __ b(ne, &loop); __ bind(&invoke); - ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); + ASSERT(instr->HasPointerMap()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator( @@ -2907,6 +3028,11 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { } +void LCodeGen::DoDrop(LDrop* instr) { + __ Drop(instr->count()); +} + + void LCodeGen::DoThisFunction(LThisFunction* instr) { Register result = ToRegister(instr->result()); __ LoadHeapObject(result, instr->hydrogen()->closure()); @@ -2953,7 +3079,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { void LCodeGen::CallKnownFunction(Handle function, int arity, LInstruction* instr, - CallKind call_kind) { + CallKind call_kind, + R1State r1_state) { bool can_invoke_directly = !function->NeedsArgumentsAdaption() || function->shared()->formal_parameter_count() == arity; @@ -2961,7 +3088,10 @@ void LCodeGen::CallKnownFunction(Handle function, RecordPosition(pointers->position()); if (can_invoke_directly) { - __ LoadHeapObject(r1, function); + if (r1_state == R1_UNINITIALIZED) { + __ LoadHeapObject(r1, function); + } + // Change context if needed. bool change_context = (info()->closure()->context() != function->context()) || @@ -3000,7 +3130,8 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { CallKnownFunction(instr->function(), instr->arity(), instr, - CALL_AS_METHOD); + CALL_AS_METHOD, + R1_UNINITIALIZED); } @@ -3424,13 +3555,21 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { ASSERT(ToRegister(instr->function()).is(r1)); ASSERT(instr->HasPointerMap()); - ASSERT(instr->HasDeoptimizationEnvironment()); - LPointerMap* pointers = instr->pointer_map(); - RecordPosition(pointers->position()); - SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); - ParameterCount count(instr->arity()); - __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + + if (instr->known_function().is_null()) { + LPointerMap* pointers = instr->pointer_map(); + RecordPosition(pointers->position()); + SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); + ParameterCount count(instr->arity()); + __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); + __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + } else { + CallKnownFunction(instr->known_function(), + instr->arity(), + instr, + CALL_AS_METHOD, + R1_CONTAINS_TARGET); + } } @@ -3485,7 +3624,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) { void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { ASSERT(ToRegister(instr->result()).is(r0)); - CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION); + CallKnownFunction(instr->target(), + instr->arity(), + instr, + CALL_AS_FUNCTION, + R1_UNINITIALIZED); } @@ -3615,7 +3758,6 @@ void LCodeGen::DoStoreKeyedFastDoubleElement( Register scratch = scratch0(); bool key_is_constant = instr->key()->IsConstantOperand(); int constant_key = 0; - Label not_nan; // Calculate the effective address of the slot in the array to store the // double value. @@ -3638,13 +3780,15 @@ void LCodeGen::DoStoreKeyedFastDoubleElement( Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); } - // Check for NaN. All NaNs must be canonicalized. - __ VFPCompareAndSetFlags(value, value); - - // Only load canonical NaN if the comparison above set the overflow. - __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs); + if (instr->NeedsCanonicalization()) { + // Check for NaN. All NaNs must be canonicalized. + __ VFPCompareAndSetFlags(value, value); + // Only load canonical NaN if the comparison above set the overflow. + __ Vmov(value, + FixedDoubleArray::canonical_not_the_hole_nan_as_double(), + vs); + } - __ bind(¬_nan); __ vstr(value, scratch, 0); } @@ -4338,14 +4482,22 @@ void LCodeGen::DoCheckMapCommon(Register reg, } -void LCodeGen::DoCheckMap(LCheckMap* instr) { +void LCodeGen::DoCheckMaps(LCheckMaps* instr) { Register scratch = scratch0(); LOperand* input = instr->InputAt(0); ASSERT(input->IsRegister()); Register reg = ToRegister(input); - Handle map = instr->hydrogen()->map(); - DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(), - instr->environment()); + + Label success; + SmallMapList* map_set = instr->hydrogen()->map_set(); + for (int i = 0; i < map_set->length() - 1; i++) { + Handle map = map_set->at(i); + __ CompareMap(reg, scratch, map, &success, REQUIRE_EXACT_MAP); + __ b(eq, &success); + } + Handle map = map_set->last(); + DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment()); + __ bind(&success); } @@ -4464,6 +4616,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) { deferred->entry(), TAG_OBJECT); + __ bind(deferred->exit()); + if (FLAG_debug_code) { + Label is_in_new_space; + __ JumpIfInNewSpace(result, scratch, &is_in_new_space); + __ Abort("Allocated object is not in new-space"); + __ bind(&is_in_new_space); + } + // Load the initial map. Register map = scratch; __ LoadHeapObject(map, constructor); @@ -4482,14 +4642,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) { __ str(scratch, FieldMemOperand(result, property_offset)); } } - - __ bind(deferred->exit()); } void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) { Register result = ToRegister(instr->result()); Handle constructor = instr->hydrogen()->constructor(); + Handle initial_map(constructor->initial_map()); + int instance_size = initial_map->instance_size(); // TODO(3095996): Get rid of this. For now, we need to make the // result register contain a valid pointer because it is already @@ -4497,9 +4657,9 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) { __ mov(result, Operand(0)); PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); - __ LoadHeapObject(r0, constructor); + __ mov(r0, Operand(Smi::FromInt(instance_size))); __ push(r0); - CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr); + CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); __ StoreToSafepointRegisterSlot(r0, result); } @@ -4633,9 +4793,10 @@ void LCodeGen::EmitDeepCopy(Handle object, __ str(r2, FieldMemOperand(result, total_offset + 4)); } } else if (elements->IsFixedArray()) { + Handle fast_elements = Handle::cast(elements); for (int i = 0; i < elements_length; i++) { int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); - Handle value = JSObject::GetElement(object, i); + Handle value(fast_elements->get(i)); if (value->IsJSObject()) { Handle value_object = Handle::cast(value); __ add(r2, result, Operand(*offset)); @@ -4659,6 +4820,23 @@ void LCodeGen::EmitDeepCopy(Handle object, void LCodeGen::DoFastLiteral(LFastLiteral* instr) { int size = instr->hydrogen()->total_size(); + ElementsKind boilerplate_elements_kind = + instr->hydrogen()->boilerplate()->GetElementsKind(); + + // Deopt if the literal boilerplate ElementsKind is of a type different than + // the expected one. The check isn't necessary if the boilerplate has already + // been converted to FAST_ELEMENTS. + if (boilerplate_elements_kind != FAST_ELEMENTS) { + __ LoadHeapObject(r1, instr->hydrogen()->boilerplate()); + // Load map into r2. + __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); + // Load the map's "bit field 2". + __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset)); + // Retrieve elements_kind from bit field 2. + __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount); + __ cmp(r2, Operand(boilerplate_elements_kind)); + DeoptimizeIf(ne, instr->environment()); + } // Allocate all objects that are part of the literal in one big // allocation. This avoids multiple limit checks. @@ -4954,7 +5132,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { Register strict = scratch0(); __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); __ Push(object, key, strict); - ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); + ASSERT(instr->HasPointerMap()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator( @@ -4967,7 +5145,7 @@ void LCodeGen::DoIn(LIn* instr) { Register obj = ToRegister(instr->object()); Register key = ToRegister(instr->key()); __ Push(key, obj); - ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); + ASSERT(instr->HasPointerMap()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h index adb6e1bb73..c6a3af7e02 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.h +++ b/deps/v8/src/arm/lithium-codegen-arm.h @@ -215,12 +215,18 @@ class LCodeGen BASE_EMBEDDED { int argc, LInstruction* instr); + enum R1State { + R1_UNINITIALIZED, + R1_CONTAINS_TARGET + }; + // Generate a direct call to a known function. Expects the function // to be in r1. void CallKnownFunction(Handle function, int arity, LInstruction* instr, - CallKind call_kind); + CallKind call_kind, + R1State r1_state); void LoadHeapObject(Register result, Handle object); @@ -317,6 +323,17 @@ class LCodeGen BASE_EMBEDDED { Register source, int* offset); + // Emit optimized code for integer division. + // Inputs are signed. + // All registers are clobbered. + // If 'remainder' is no_reg, it is not computed. + void EmitSignedIntegerDivisionByConstant(Register result, + Register dividend, + int32_t divisor, + Register remainder, + Register scratch, + LEnvironment* environment); + struct JumpTableEntry { explicit inline JumpTableEntry(Address entry) : label(), diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index 857c2bf770..4da2fece03 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -3710,22 +3710,35 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) { } -bool AreAliased(Register r1, Register r2, Register r3, Register r4) { - if (r1.is(r2)) return true; - if (r1.is(r3)) return true; - if (r1.is(r4)) return true; - if (r2.is(r3)) return true; - if (r2.is(r4)) return true; - if (r3.is(r4)) return true; - return false; +#ifdef DEBUG +bool AreAliased(Register reg1, + Register reg2, + Register reg3, + Register reg4, + Register reg5, + Register reg6) { + int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + + reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid(); + + RegList regs = 0; + if (reg1.is_valid()) regs |= reg1.bit(); + if (reg2.is_valid()) regs |= reg2.bit(); + if (reg3.is_valid()) regs |= reg3.bit(); + if (reg4.is_valid()) regs |= reg4.bit(); + if (reg5.is_valid()) regs |= reg5.bit(); + if (reg6.is_valid()) regs |= reg6.bit(); + int n_of_non_aliasing_regs = NumRegs(regs); + + return n_of_valid_regs != n_of_non_aliasing_regs; } +#endif CodePatcher::CodePatcher(byte* address, int instructions) : address_(address), instructions_(instructions), size_(instructions * Assembler::kInstrSize), - masm_(Isolate::Current(), address, size_ + Assembler::kGap) { + masm_(NULL, address, size_ + Assembler::kGap) { // Create a new macro assembler pointing to the address of the code to patch. // The size is adjusted with kGap on order for the assembler to generate size // bytes of instructions without failing with buffer size constraints. diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 47afa93a6e..360f4c128c 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -85,7 +85,14 @@ enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved }; -bool AreAliased(Register r1, Register r2, Register r3, Register r4); +#ifdef DEBUG +bool AreAliased(Register reg1, + Register reg2, + Register reg3 = no_reg, + Register reg4 = no_reg, + Register reg5 = no_reg, + Register reg6 = no_reg); +#endif // MacroAssembler implements a collection of frequently used macros. @@ -1321,7 +1328,6 @@ class MacroAssembler: public Assembler { }; -#ifdef ENABLE_DEBUGGER_SUPPORT // The code patcher is used to patch (typically) small parts of code e.g. for // debugging and other types of instrumentation. When using the code patcher // the exact number of bytes specified must be emitted. It is not legal to emit @@ -1351,7 +1357,6 @@ class CodePatcher { int size_; // Number of bytes of the expected patch size. MacroAssembler masm_; // Macro assembler used to generate the code. }; -#endif // ENABLE_DEBUGGER_SUPPORT // ----------------------------------------------------------------------------- diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc index 10ff2dd96c..a833624ceb 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc @@ -452,8 +452,12 @@ void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c, void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c, uint32_t mask, Label* on_equal) { - __ and_(r0, current_character(), Operand(mask)); - __ cmp(r0, Operand(c)); + if (c == 0) { + __ tst(current_character(), Operand(mask)); + } else { + __ and_(r0, current_character(), Operand(mask)); + __ cmp(r0, Operand(c)); + } BranchOrBacktrack(eq, on_equal); } @@ -461,8 +465,12 @@ void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c, void RegExpMacroAssemblerARM::CheckNotCharacterAfterAnd(unsigned c, unsigned mask, Label* on_not_equal) { - __ and_(r0, current_character(), Operand(mask)); - __ cmp(r0, Operand(c)); + if (c == 0) { + __ tst(current_character(), Operand(mask)); + } else { + __ and_(r0, current_character(), Operand(mask)); + __ cmp(r0, Operand(c)); + } BranchOrBacktrack(ne, on_not_equal); } @@ -480,6 +488,44 @@ void RegExpMacroAssemblerARM::CheckNotCharacterAfterMinusAnd( } +void RegExpMacroAssemblerARM::CheckCharacterInRange( + uc16 from, + uc16 to, + Label* on_in_range) { + __ sub(r0, current_character(), Operand(from)); + __ cmp(r0, Operand(to - from)); + BranchOrBacktrack(ls, on_in_range); // Unsigned lower-or-same condition. +} + + +void RegExpMacroAssemblerARM::CheckCharacterNotInRange( + uc16 from, + uc16 to, + Label* on_not_in_range) { + __ sub(r0, current_character(), Operand(from)); + __ cmp(r0, Operand(to - from)); + BranchOrBacktrack(hi, on_not_in_range); // Unsigned higher condition. +} + + +void RegExpMacroAssemblerARM::CheckBitInTable( + Handle table, + Label* on_bit_set) { + __ mov(r0, Operand(table)); + if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) { + __ and_(r1, current_character(), Operand(kTableSize - 1)); + __ add(r1, r1, Operand(ByteArray::kHeaderSize - kHeapObjectTag)); + } else { + __ add(r1, + current_character(), + Operand(ByteArray::kHeaderSize - kHeapObjectTag)); + } + __ ldrb(r0, MemOperand(r0, r1)); + __ cmp(r0, Operand(0)); + BranchOrBacktrack(ne, on_bit_set); +} + + bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h index 5c8ed0693f..14f984f567 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.h +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h @@ -79,6 +79,14 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler { uc16 minus, uc16 mask, Label* on_not_equal); + virtual void CheckCharacterInRange(uc16 from, + uc16 to, + Label* on_in_range); + virtual void CheckCharacterNotInRange(uc16 from, + uc16 to, + Label* on_not_in_range); + virtual void CheckBitInTable(Handle table, Label* on_bit_set); + // Checks whether the given offset from the current position is before // the end of the string. virtual void CheckPosition(int cp_offset, Label* on_outside_input); diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index 06f8385af9..49c0982301 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -443,8 +443,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, Label exit; // Check that the map of the object hasn't changed. + CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS + : REQUIRE_EXACT_MAP; __ CheckMap(receiver_reg, scratch, Handle(object->map()), miss_label, - DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); + DO_SMI_CHECK, mode); // Perform global security token check if needed. if (object->IsJSGlobalProxy()) { @@ -580,6 +582,8 @@ static void PushInterceptorArguments(MacroAssembler* masm, __ push(holder); __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); __ push(scratch); + __ mov(scratch, Operand(ExternalReference::isolate_address())); + __ push(scratch); } @@ -594,7 +598,7 @@ static void CompileCallLoadPropertyWithInterceptor( ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), masm->isolate()); - __ mov(r0, Operand(5)); + __ mov(r0, Operand(6)); __ mov(r1, Operand(ref)); CEntryStub stub(1); @@ -602,9 +606,9 @@ static void CompileCallLoadPropertyWithInterceptor( } -static const int kFastApiCallArguments = 3; +static const int kFastApiCallArguments = 4; -// Reserves space for the extra arguments to FastHandleApiCall in the +// Reserves space for the extra arguments to API function in the // caller's frame. // // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall. @@ -630,7 +634,8 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, // -- sp[0] : holder (set by CheckPrototypes) // -- sp[4] : callee JS function // -- sp[8] : call data - // -- sp[12] : last JS argument + // -- sp[12] : isolate + // -- sp[16] : last JS argument // -- ... // -- sp[(argc + 3) * 4] : first JS argument // -- sp[(argc + 4) * 4] : receiver @@ -640,7 +645,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, __ LoadHeapObject(r5, function); __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); - // Pass the additional arguments FastHandleApiCall expects. + // Pass the additional arguments. Handle api_call_info = optimization.api_call_info(); Handle call_data(api_call_info->data()); if (masm->isolate()->heap()->InNewSpace(*call_data)) { @@ -649,13 +654,15 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, } else { __ Move(r6, call_data); } - // Store JS function and call data. - __ stm(ib, sp, r5.bit() | r6.bit()); + __ mov(r7, Operand(ExternalReference::isolate_address())); + // Store JS function, call data and isolate. + __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit()); - // r2 points to call data as expected by Arguments - // (refer to layout above). - __ add(r2, sp, Operand(2 * kPointerSize)); + // Prepare arguments. + __ add(r2, sp, Operand(3 * kPointerSize)); + // Allocate the v8::Arguments structure in the arguments' space since + // it's not controlled by GC. const int kApiStackSpace = 4; FrameScope frame_scope(masm, StackFrame::MANUAL); @@ -664,9 +671,9 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, // r0 = v8::Arguments& // Arguments is after the return address. __ add(r0, sp, Operand(1 * kPointerSize)); - // v8::Arguments::implicit_args = data + // v8::Arguments::implicit_args_ __ str(r2, MemOperand(r0, 0 * kPointerSize)); - // v8::Arguments::values = last argument + // v8::Arguments::values_ __ add(ip, r2, Operand(argc * kPointerSize)); __ str(ip, MemOperand(r0, 1 * kPointerSize)); // v8::Arguments::length_ = argc @@ -843,7 +850,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { __ CallExternalReference( ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), masm->isolate()), - 5); + 6); // Restore the name_ register. __ pop(name_); // Leave the internal frame. @@ -1202,7 +1209,9 @@ void StubCompiler::GenerateLoadCallback(Handle object, } else { __ Move(scratch3, Handle(callback->data())); } - __ Push(reg, scratch3, name_reg); + __ Push(reg, scratch3); + __ mov(scratch3, Operand(ExternalReference::isolate_address())); + __ Push(scratch3, name_reg); __ mov(r0, sp); // r0 = Handle const int kApiStackSpace = 1; @@ -1214,7 +1223,7 @@ void StubCompiler::GenerateLoadCallback(Handle object, __ str(scratch2, MemOperand(sp, 1 * kPointerSize)); __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo& - const int kStackUnwindSpace = 4; + const int kStackUnwindSpace = 5; Address getter_address = v8::ToCData
(callback->getter()); ApiFunction fun(getter_address); ExternalReference ref = @@ -1264,12 +1273,19 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, name, miss); ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1)); + // Preserve the receiver register explicitly whenever it is different from + // the holder and it is needed should the interceptor return without any + // result. The CALLBACKS case needs the receiver to be passed into C++ code, + // the FIELD case might cause a miss during the prototype check. + bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder(); + bool must_preserve_receiver_reg = !receiver.is(holder_reg) && + (lookup->type() == CALLBACKS || must_perfrom_prototype_check); + // Save necessary data before invoking an interceptor. // Requires a frame to make GC aware of pushed pointers. { FrameScope frame_scope(masm(), StackFrame::INTERNAL); - if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) { - // CALLBACKS case needs a receiver to be passed into C++ callback. + if (must_preserve_receiver_reg) { __ Push(receiver, holder_reg, name_reg); } else { __ Push(holder_reg, name_reg); @@ -1294,14 +1310,14 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, __ bind(&interceptor_failed); __ pop(name_reg); __ pop(holder_reg); - if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) { + if (must_preserve_receiver_reg) { __ pop(receiver); } // Leave the internal frame. } // Check that the maps from interceptor's holder to lookup's holder // haven't changed. And load lookup's holder into |holder| register. - if (*interceptor_holder != lookup->holder()) { + if (must_perfrom_prototype_check) { holder_reg = CheckPrototypes(interceptor_holder, holder_reg, Handle(lookup->holder()), @@ -1335,20 +1351,19 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, if (!receiver.is(holder_reg)) { ASSERT(scratch1.is(holder_reg)); __ Push(receiver, holder_reg); - __ ldr(scratch3, - FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); - __ Push(scratch3, scratch2, name_reg); } else { __ push(receiver); - __ ldr(scratch3, - FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); - __ Push(holder_reg, scratch3, scratch2, name_reg); + __ push(holder_reg); } + __ ldr(scratch3, + FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); + __ mov(scratch1, Operand(ExternalReference::isolate_address())); + __ Push(scratch3, scratch1, scratch2, name_reg); ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadCallbackProperty), masm()->isolate()); - __ TailCallExternalReference(ref, 5, 1); + __ TailCallExternalReference(ref, 6, 1); } } else { // !compile_followup_inline // Call the runtime system to load the interceptor. @@ -1362,7 +1377,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate()); - __ TailCallExternalReference(ref, 5, 1); + __ TailCallExternalReference(ref, 6, 1); } } @@ -1730,7 +1745,7 @@ Handle CallStubCompiler::CompileArrayPopCall( // We can't address the last element in one operation. Compute the more // expensive shift first, and use an offset later on. __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); - __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); + __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); __ cmp(r0, r6); __ b(eq, &call_builtin); @@ -1738,7 +1753,7 @@ Handle CallStubCompiler::CompileArrayPopCall( __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); // Fill with the hole. - __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); + __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize)); __ Drop(argc + 1); __ Ret(); @@ -3368,6 +3383,44 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) { } +static void GenerateSmiKeyCheck(MacroAssembler* masm, + Register key, + Register scratch0, + Register scratch1, + DwVfpRegister double_scratch0, + Label* fail) { + if (CpuFeatures::IsSupported(VFP3)) { + CpuFeatures::Scope scope(VFP3); + Label key_ok; + // Check for smi or a smi inside a heap number. We convert the heap + // number and check if the conversion is exact and fits into the smi + // range. + __ JumpIfSmi(key, &key_ok); + __ CheckMap(key, + scratch0, + Heap::kHeapNumberMapRootIndex, + fail, + DONT_DO_SMI_CHECK); + __ sub(ip, key, Operand(kHeapObjectTag)); + __ vldr(double_scratch0, ip, HeapNumber::kValueOffset); + __ EmitVFPTruncate(kRoundToZero, + double_scratch0.low(), + double_scratch0, + scratch0, + scratch1, + kCheckForInexactConversion); + __ b(ne, fail); + __ vmov(scratch0, double_scratch0.low()); + __ TrySmiTag(scratch0, fail, scratch1); + __ mov(key, scratch0); + __ bind(&key_ok); + } else { + // Check that the key is a smi. + __ JumpIfNotSmi(key, fail); + } +} + + void KeyedLoadStubCompiler::GenerateLoadExternalArray( MacroAssembler* masm, ElementsKind elements_kind) { @@ -3384,8 +3437,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi. - __ JumpIfNotSmi(key, &miss_force_generic); + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); // r3: elements array @@ -3715,8 +3768,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi. - __ JumpIfNotSmi(key, &miss_force_generic); + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); @@ -4041,8 +4094,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi. - __ JumpIfNotSmi(r0, &miss_force_generic); + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic); // Get the elements array. __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset)); @@ -4093,8 +4146,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi. - __ JumpIfNotSmi(key_reg, &miss_force_generic); + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); // Get the elements array. __ ldr(elements_reg, @@ -4169,8 +4222,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi. - __ JumpIfNotSmi(key_reg, &miss_force_generic); + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { __ JumpIfNotSmi(value_reg, &transition_elements_kind); @@ -4336,7 +4389,9 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - __ JumpIfNotSmi(key_reg, &miss_force_generic); + + // Check that the key is a smi or a heap number convertible to a smi. + GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); __ ldr(elements_reg, FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); @@ -4427,6 +4482,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( // Increment the length of the array. __ mov(length_reg, Operand(Smi::FromInt(1))); __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); + __ ldr(elements_reg, + FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); __ jmp(&finish_store); __ bind(&check_capacity); diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js index daa75d5753..9f2c8de3b1 100644 --- a/deps/v8/src/array.js +++ b/deps/v8/src/array.js @@ -1,4 +1,4 @@ -// Copyright 2010 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -465,15 +465,19 @@ function ArrayPush() { } +// Returns an array containing the array elements of the object followed +// by the array elements of each argument in order. See ECMA-262, +// section 15.4.4.7. function ArrayConcat(arg1) { // length == 1 if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) { throw MakeTypeError("called_on_null_or_undefined", ["Array.prototype.concat"]); } + var array = ToObject(this); var arg_count = %_ArgumentsLength(); var arrays = new InternalArray(1 + arg_count); - arrays[0] = this; + arrays[0] = array; for (var i = 0; i < arg_count; i++) { arrays[i + 1] = %_Arguments(i); } @@ -1023,13 +1027,28 @@ function ArrayFilter(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(); var accumulator_length = 0; - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) { - accumulator[accumulator_length++] = element; + if (%DebugCallbackSupportsStepping(f)) { + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(f); + if (%_CallFunction(receiver, element, i, array, f)) { + accumulator[accumulator_length++] = element; + } + } + } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (%_CallFunction(receiver, element, i, array, f)) { + accumulator[accumulator_length++] = element; + } } } + // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1055,12 +1074,24 @@ function ArrayForEach(f, receiver) { } else if (!IS_SPEC_OBJECT(receiver)) { receiver = ToObject(receiver); } - - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - %_CallFunction(receiver, element, i, array, f); + if (%DebugCallbackSupportsStepping(f)) { + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(f); + %_CallFunction(receiver, element, i, array, f); + } } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + %_CallFunction(receiver, element, i, array, f); + } + } + // End of duplicate. } } @@ -1087,11 +1118,24 @@ function ArraySome(f, receiver) { receiver = ToObject(receiver); } - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) return true; + if (%DebugCallbackSupportsStepping(f)) { + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(f); + if (%_CallFunction(receiver, element, i, array, f)) return true; + } + } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (%_CallFunction(receiver, element, i, array, f)) return true; + } } + // End of duplicate. } return false; } @@ -1117,11 +1161,24 @@ function ArrayEvery(f, receiver) { receiver = ToObject(receiver); } - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (!%_CallFunction(receiver, element, i, array, f)) return false; + if (%DebugCallbackSupportsStepping(f)) { + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(f); + if (!%_CallFunction(receiver, element, i, array, f)) return false; + } + } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (!%_CallFunction(receiver, element, i, array, f)) return false; + } } + // End of duplicate. } return true; } @@ -1148,11 +1205,24 @@ function ArrayMap(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(length); - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - accumulator[i] = %_CallFunction(receiver, element, i, array, f); + if (%DebugCallbackSupportsStepping(f)) { + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(f); + accumulator[i] = %_CallFunction(receiver, element, i, array, f); + } } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + accumulator[i] = %_CallFunction(receiver, element, i, array, f); + } + } + // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1307,11 +1377,27 @@ function ArrayReduce(callback, current) { } var receiver = %GetDefaultReceiver(callback); - for (; i < length; i++) { - if (i in array) { - var element = array[i]; - current = %_CallFunction(receiver, current, element, i, array, callback); + + if (%DebugCallbackSupportsStepping(callback)) { + for (; i < length; i++) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(callback); + current = + %_CallFunction(receiver, current, element, i, array, callback); + } + } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (; i < length; i++) { + if (i in array) { + var element = array[i]; + current = + %_CallFunction(receiver, current, element, i, array, callback); + } } + // End of duplicate. } return current; } @@ -1344,11 +1430,27 @@ function ArrayReduceRight(callback, current) { } var receiver = %GetDefaultReceiver(callback); - for (; i >= 0; i--) { - if (i in array) { - var element = array[i]; - current = %_CallFunction(receiver, current, element, i, array, callback); + + if (%DebugCallbackSupportsStepping(callback)) { + for (; i >= 0; i--) { + if (i in array) { + var element = array[i]; + // Prepare break slots for debugger step in. + %DebugPrepareStepInIfStepping(callback); + current = + %_CallFunction(receiver, current, element, i, array, callback); + } + } + } else { + // This is a duplicate of the previous loop sans debug stepping. + for (; i >= 0; i--) { + if (i in array) { + var element = array[i]; + current = + %_CallFunction(receiver, current, element, i, array, callback); + } } + // End of duplicate. } return current; } diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index 4944202f07..be2564960d 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -99,21 +99,7 @@ struct DoubleConstant BASE_EMBEDDED { double the_hole_nan; }; -struct InitializeDoubleConstants { - static void Construct(DoubleConstant* double_constants) { - double_constants->min_int = kMinInt; - double_constants->one_half = 0.5; - double_constants->minus_zero = -0.0; - double_constants->uint8_max_value = 255; - double_constants->zero = 0.0; - double_constants->canonical_non_hole_nan = OS::nan_value(); - double_constants->the_hole_nan = BitCast(kHoleNanInt64); - double_constants->negative_infinity = -V8_INFINITY; - } -}; - -static LazyInstance::type - double_constants = LAZY_INSTANCE_INITIALIZER; +static DoubleConstant double_constants; const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; @@ -726,6 +712,18 @@ void RelocInfo::Verify() { // ----------------------------------------------------------------------------- // Implementation of ExternalReference +void ExternalReference::SetUp() { + double_constants.min_int = kMinInt; + double_constants.one_half = 0.5; + double_constants.minus_zero = -0.0; + double_constants.uint8_max_value = 255; + double_constants.zero = 0.0; + double_constants.canonical_non_hole_nan = OS::nan_value(); + double_constants.the_hole_nan = BitCast(kHoleNanInt64); + double_constants.negative_infinity = -V8_INFINITY; +} + + ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate) : address_(Redirect(isolate, Builtins::c_function_address(id))) {} @@ -958,50 +956,47 @@ ExternalReference ExternalReference::scheduled_exception_address( ExternalReference ExternalReference::address_of_min_int() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->min_int)); + return ExternalReference(reinterpret_cast(&double_constants.min_int)); } ExternalReference ExternalReference::address_of_one_half() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->one_half)); + return ExternalReference(reinterpret_cast(&double_constants.one_half)); } ExternalReference ExternalReference::address_of_minus_zero() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->minus_zero)); + return ExternalReference( + reinterpret_cast(&double_constants.minus_zero)); } ExternalReference ExternalReference::address_of_zero() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->zero)); + return ExternalReference(reinterpret_cast(&double_constants.zero)); } ExternalReference ExternalReference::address_of_uint8_max_value() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->uint8_max_value)); + return ExternalReference( + reinterpret_cast(&double_constants.uint8_max_value)); } ExternalReference ExternalReference::address_of_negative_infinity() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->negative_infinity)); + return ExternalReference( + reinterpret_cast(&double_constants.negative_infinity)); } ExternalReference ExternalReference::address_of_canonical_non_hole_nan() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->canonical_non_hole_nan)); + return ExternalReference( + reinterpret_cast(&double_constants.canonical_non_hole_nan)); } ExternalReference ExternalReference::address_of_the_hole_nan() { - return ExternalReference(reinterpret_cast( - &double_constants.Pointer()->the_hole_nan)); + return ExternalReference( + reinterpret_cast(&double_constants.the_hole_nan)); } @@ -1158,6 +1153,20 @@ double power_double_int(double x, int y) { double power_double_double(double x, double y) { +#ifdef __MINGW64_VERSION_MAJOR + // MinGW64 has a custom implementation for pow. This handles certain + // special cases that are different. + if ((x == 0.0 || isinf(x)) && isfinite(y)) { + double f; + if (modf(y, &f) != 0.0) return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0; + } + + if (x == 2.0) { + int y_int = static_cast(y); + if (y == y_int) return ldexp(1.0, y_int); + } +#endif + // The checks for special cases can be dropped in ia32 because it has already // been done in generated code before bailing out here. if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) return OS::nan_value(); diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 918a2a679b..05fe320ad0 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -62,6 +62,10 @@ class AssemblerBase: public Malloced { Isolate* isolate() const { return isolate_; } int jit_cookie() { return jit_cookie_; } + // Overwrite a host NaN with a quiet target NaN. Used by mksnapshot for + // cross-snapshotting. + static void QuietNaN(HeapObject* nan) { } + private: Isolate* isolate_; int jit_cookie_; @@ -535,6 +539,8 @@ class ExternalReference BASE_EMBEDDED { DIRECT_GETTER_CALL }; + static void SetUp(); + typedef void* ExternalReferenceRedirector(void* original, Type type); ExternalReference(Builtins::CFunctionId id, Isolate* isolate); diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc index 4b6ae680a4..6f9fd7afb2 100644 --- a/deps/v8/src/ast.cc +++ b/deps/v8/src/ast.cc @@ -962,6 +962,14 @@ RegExpDisjunction::RegExpDisjunction(ZoneList* alternatives) } +static int IncreaseBy(int previous, int increase) { + if (RegExpTree::kInfinity - previous < increase) { + return RegExpTree::kInfinity; + } else { + return previous + increase; + } +} + RegExpAlternative::RegExpAlternative(ZoneList* nodes) : nodes_(nodes) { ASSERT(nodes->length() > 1); @@ -969,13 +977,10 @@ RegExpAlternative::RegExpAlternative(ZoneList* nodes) max_match_ = 0; for (int i = 0; i < nodes->length(); i++) { RegExpTree* node = nodes->at(i); - min_match_ += node->min_match(); + int node_min_match = node->min_match(); + min_match_ = IncreaseBy(min_match_, node_min_match); int node_max_match = node->max_match(); - if (kInfinity - max_match_ < node_max_match) { - max_match_ = kInfinity; - } else { - max_match_ += node->max_match(); - } + max_match_ = IncreaseBy(max_match_, node_max_match); } } @@ -993,138 +998,78 @@ CaseClause::CaseClause(Isolate* isolate, } -#define INCREASE_NODE_COUNT(NodeType) \ +#define REGULAR_NODE(NodeType) \ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ increase_node_count(); \ } +#define DONT_OPTIMIZE_NODE(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + add_flag(kDontOptimize); \ + add_flag(kDontInline); \ + add_flag(kDontSelfOptimize); \ + } +#define DONT_INLINE_NODE(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + add_flag(kDontInline); \ + } +#define DONT_SELFOPTIMIZE_NODE(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + add_flag(kDontSelfOptimize); \ + } -INCREASE_NODE_COUNT(VariableDeclaration) -INCREASE_NODE_COUNT(FunctionDeclaration) -INCREASE_NODE_COUNT(ModuleDeclaration) -INCREASE_NODE_COUNT(ImportDeclaration) -INCREASE_NODE_COUNT(ExportDeclaration) -INCREASE_NODE_COUNT(ModuleLiteral) -INCREASE_NODE_COUNT(ModuleVariable) -INCREASE_NODE_COUNT(ModulePath) -INCREASE_NODE_COUNT(ModuleUrl) -INCREASE_NODE_COUNT(Block) -INCREASE_NODE_COUNT(ExpressionStatement) -INCREASE_NODE_COUNT(EmptyStatement) -INCREASE_NODE_COUNT(IfStatement) -INCREASE_NODE_COUNT(ContinueStatement) -INCREASE_NODE_COUNT(BreakStatement) -INCREASE_NODE_COUNT(ReturnStatement) -INCREASE_NODE_COUNT(Conditional) -INCREASE_NODE_COUNT(Literal) -INCREASE_NODE_COUNT(ObjectLiteral) -INCREASE_NODE_COUNT(Assignment) -INCREASE_NODE_COUNT(Throw) -INCREASE_NODE_COUNT(Property) -INCREASE_NODE_COUNT(UnaryOperation) -INCREASE_NODE_COUNT(CountOperation) -INCREASE_NODE_COUNT(BinaryOperation) -INCREASE_NODE_COUNT(CompareOperation) -INCREASE_NODE_COUNT(ThisFunction) -INCREASE_NODE_COUNT(Call) -INCREASE_NODE_COUNT(CallNew) - -#undef INCREASE_NODE_COUNT - - -void AstConstructionVisitor::VisitWithStatement(WithStatement* node) { - increase_node_count(); - add_flag(kDontOptimize); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) { - increase_node_count(); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) { - increase_node_count(); - add_flag(kDontSelfOptimize); -} - - -void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) { - increase_node_count(); - add_flag(kDontSelfOptimize); -} - - -void AstConstructionVisitor::VisitForStatement(ForStatement* node) { - increase_node_count(); - add_flag(kDontSelfOptimize); -} - - -void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) { - increase_node_count(); - add_flag(kDontSelfOptimize); -} - - -void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) { - increase_node_count(); - add_flag(kDontOptimize); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitTryFinallyStatement( - TryFinallyStatement* node) { - increase_node_count(); - add_flag(kDontOptimize); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) { - increase_node_count(); - add_flag(kDontOptimize); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) { - increase_node_count(); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitSharedFunctionInfoLiteral( - SharedFunctionInfoLiteral* node) { - increase_node_count(); - add_flag(kDontOptimize); - add_flag(kDontInline); -} - - -void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) { - increase_node_count(); - // In theory, we'd have to add: - // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); } - // However, node->var() is usually not bound yet at VariableProxy creation - // time, and LOOKUP variables only result from constructs that cannot - // be inlined anyway. -} - - -void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) { - increase_node_count(); - add_flag(kDontInline); // TODO(1322): Allow materialized literals. -} - - -void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) { - increase_node_count(); - add_flag(kDontInline); // TODO(1322): Allow materialized literals. -} - +REGULAR_NODE(VariableDeclaration) +REGULAR_NODE(FunctionDeclaration) +REGULAR_NODE(Block) +REGULAR_NODE(ExpressionStatement) +REGULAR_NODE(EmptyStatement) +REGULAR_NODE(IfStatement) +REGULAR_NODE(ContinueStatement) +REGULAR_NODE(BreakStatement) +REGULAR_NODE(ReturnStatement) +REGULAR_NODE(SwitchStatement) +REGULAR_NODE(Conditional) +REGULAR_NODE(Literal) +REGULAR_NODE(ObjectLiteral) +REGULAR_NODE(Assignment) +REGULAR_NODE(Throw) +REGULAR_NODE(Property) +REGULAR_NODE(UnaryOperation) +REGULAR_NODE(CountOperation) +REGULAR_NODE(BinaryOperation) +REGULAR_NODE(CompareOperation) +REGULAR_NODE(ThisFunction) +REGULAR_NODE(Call) +REGULAR_NODE(CallNew) +// In theory, for VariableProxy we'd have to add: +// if (node->var()->IsLookupSlot()) add_flag(kDontInline); +// But node->var() is usually not bound yet at VariableProxy creation time, and +// LOOKUP variables only result from constructs that cannot be inlined anyway. +REGULAR_NODE(VariableProxy) + +DONT_OPTIMIZE_NODE(ModuleDeclaration) +DONT_OPTIMIZE_NODE(ImportDeclaration) +DONT_OPTIMIZE_NODE(ExportDeclaration) +DONT_OPTIMIZE_NODE(ModuleLiteral) +DONT_OPTIMIZE_NODE(ModuleVariable) +DONT_OPTIMIZE_NODE(ModulePath) +DONT_OPTIMIZE_NODE(ModuleUrl) +DONT_OPTIMIZE_NODE(WithStatement) +DONT_OPTIMIZE_NODE(TryCatchStatement) +DONT_OPTIMIZE_NODE(TryFinallyStatement) +DONT_OPTIMIZE_NODE(DebuggerStatement) +DONT_OPTIMIZE_NODE(SharedFunctionInfoLiteral) + +DONT_INLINE_NODE(FunctionLiteral) +DONT_INLINE_NODE(RegExpLiteral) // TODO(1322): Allow materialized literals. +DONT_INLINE_NODE(ArrayLiteral) // TODO(1322): Allow materialized literals. + +DONT_SELFOPTIMIZE_NODE(DoWhileStatement) +DONT_SELFOPTIMIZE_NODE(WhileStatement) +DONT_SELFOPTIMIZE_NODE(ForStatement) +DONT_SELFOPTIMIZE_NODE(ForInStatement) void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { increase_node_count(); @@ -1142,6 +1087,11 @@ void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { } } +#undef REGULAR_NODE +#undef DONT_OPTIMIZE_NODE +#undef DONT_INLINE_NODE +#undef DONT_SELFOPTIMIZE_NODE + Handle Literal::ToString() { if (handle_->IsString()) return Handle::cast(handle_); diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h index b827302ebd..dad80576bd 100644 --- a/deps/v8/src/ast.h +++ b/deps/v8/src/ast.h @@ -270,6 +270,7 @@ class SmallMapList { void Reserve(int capacity) { list_.Reserve(capacity); } void Clear() { list_.Clear(); } + void Sort() { list_.Sort(); } bool is_empty() const { return list_.is_empty(); } int length() const { return list_.length(); } @@ -420,8 +421,8 @@ class Block: public BreakableStatement { ZoneList* statements() { return &statements_; } bool is_initializer_block() const { return is_initializer_block_; } - Scope* block_scope() const { return block_scope_; } - void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; } + Scope* scope() const { return scope_; } + void set_scope(Scope* scope) { scope_ = scope; } protected: template friend class AstNodeFactory; @@ -433,13 +434,13 @@ class Block: public BreakableStatement { : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY), statements_(capacity), is_initializer_block_(is_initializer_block), - block_scope_(NULL) { + scope_(NULL) { } private: ZoneList statements_; bool is_initializer_block_; - Scope* block_scope_; + Scope* scope_; }; @@ -607,6 +608,7 @@ class ModuleLiteral: public Module { DECLARE_NODE_TYPE(ModuleLiteral) Block* body() const { return body_; } + Handle context() const { return context_; } protected: template friend class AstNodeFactory; @@ -618,6 +620,7 @@ class ModuleLiteral: public Module { private: Block* body_; + Handle context_; }; diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index 0e95b4b839..c65c68c2d7 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -1011,7 +1011,7 @@ bool Genesis::InitializeGlobal(Handle inner_global, proto_map->set_prototype(global_context()->initial_object_prototype()); Handle proto = factory->NewJSObjectFromMap(proto_map); proto->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex, - heap->empty_string()); + heap->query_colon_symbol()); proto->InObjectPropertyAtPut(JSRegExp::kGlobalFieldIndex, heap->false_value()); proto->InObjectPropertyAtPut(JSRegExp::kIgnoreCaseFieldIndex, @@ -2159,7 +2159,7 @@ void Genesis::TransferNamedProperties(Handle from, Handle descs = Handle(from->map()->instance_descriptors()); for (int i = 0; i < descs->number_of_descriptors(); i++) { - PropertyDetails details = PropertyDetails(descs->GetDetails(i)); + PropertyDetails details = descs->GetDetails(i); switch (details.type()) { case FIELD: { HandleScope inner; diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc index 0f493e6e57..6d1c6a9785 100644 --- a/deps/v8/src/builtins.cc +++ b/deps/v8/src/builtins.cc @@ -412,12 +412,19 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements( HeapObject* elms = array->elements(); Map* map = elms->map(); if (map == heap->fixed_array_map()) { - if (args == NULL || !array->HasFastSmiOnlyElements()) { + if (array->HasFastElements()) return elms; + if (args == NULL) { + if (array->HasFastDoubleElements()) { + ASSERT(elms == heap->empty_fixed_array()); + MaybeObject* maybe_transition = + array->TransitionElementsKind(FAST_ELEMENTS); + if (maybe_transition->IsFailure()) return maybe_transition; + } return elms; } } else if (map == heap->fixed_cow_array_map()) { MaybeObject* maybe_writable_result = array->EnsureWritableFastElements(); - if (args == NULL || !array->HasFastSmiOnlyElements() || + if (args == NULL || array->HasFastElements() || maybe_writable_result->IsFailure()) { return maybe_writable_result; } @@ -1098,7 +1105,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( CustomArguments custom(isolate); v8::ImplementationUtilities::PrepareArgumentsData(custom.end(), - data_obj, *function, raw_holder); + isolate, data_obj, *function, raw_holder); v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( custom.end(), @@ -1138,68 +1145,6 @@ BUILTIN(HandleApiCallConstruct) { } -#ifdef DEBUG - -static void VerifyTypeCheck(Handle object, - Handle function) { - ASSERT(function->shared()->IsApiFunction()); - FunctionTemplateInfo* info = function->shared()->get_api_func_data(); - if (info->signature()->IsUndefined()) return; - SignatureInfo* signature = SignatureInfo::cast(info->signature()); - Object* receiver_type = signature->receiver(); - if (receiver_type->IsUndefined()) return; - FunctionTemplateInfo* type = FunctionTemplateInfo::cast(receiver_type); - ASSERT(object->IsInstanceOf(type)); -} - -#endif - - -BUILTIN(FastHandleApiCall) { - ASSERT(!CalledAsConstructor(isolate)); - Heap* heap = isolate->heap(); - const bool is_construct = false; - - // We expect four more arguments: callback, function, call data, and holder. - const int args_length = args.length() - 4; - ASSERT(args_length >= 0); - - Object* callback_obj = args[args_length]; - - v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( - &args[args_length + 1], - &args[0] - 1, - args_length - 1, - is_construct); - -#ifdef DEBUG - VerifyTypeCheck(Utils::OpenHandle(*new_args.Holder()), - Utils::OpenHandle(*new_args.Callee())); -#endif - HandleScope scope(isolate); - Object* result; - v8::Handle value; - { - // Leaving JavaScript. - VMState state(isolate, EXTERNAL); - ExternalCallbackScope call_scope(isolate, - v8::ToCData
(callback_obj)); - v8::InvocationCallback callback = - v8::ToCData(callback_obj); - - value = callback(new_args); - } - if (value.IsEmpty()) { - result = heap->undefined_value(); - } else { - result = *reinterpret_cast(*value); - } - - RETURN_IF_SCHEDULED_EXCEPTION(isolate); - return result; -} - - // Helper function to handle calls to non-function objects created through the // API. The object can be called as either a constructor (using new) or just as // a function (without new). @@ -1238,7 +1183,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor( CustomArguments custom(isolate); v8::ImplementationUtilities::PrepareArgumentsData(custom.end(), - call_data->data(), constructor, obj); + isolate, call_data->data(), constructor, obj); v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( custom.end(), &args[0] - 1, diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h index f079139d45..3ea33938eb 100644 --- a/deps/v8/src/builtins.h +++ b/deps/v8/src/builtins.h @@ -56,7 +56,6 @@ enum BuiltinExtraArguments { V(ArrayConcat, NO_EXTRA_ARGUMENTS) \ \ V(HandleApiCall, NEEDS_CALLED_FUNCTION) \ - V(FastHandleApiCall, NO_EXTRA_ARGUMENTS) \ V(HandleApiCallConstruct, NEEDS_CALLED_FUNCTION) \ V(HandleApiCallAsFunction, NO_EXTRA_ARGUMENTS) \ V(HandleApiCallAsConstructor, NO_EXTRA_ARGUMENTS) \ diff --git a/deps/v8/src/bytecodes-irregexp.h b/deps/v8/src/bytecodes-irregexp.h index b13efb36f8..c7cc66e527 100644 --- a/deps/v8/src/bytecodes-irregexp.h +++ b/deps/v8/src/bytecodes-irregexp.h @@ -72,24 +72,23 @@ V(AND_CHECK_4_CHARS, 27, 16) /* bc8 pad24 uint32 uint32 addr32 */ \ V(AND_CHECK_CHAR, 28, 12) /* bc8 pad8 uint16 uint32 addr32 */ \ V(AND_CHECK_NOT_4_CHARS, 29, 16) /* bc8 pad24 uint32 uint32 addr32 */ \ V(AND_CHECK_NOT_CHAR, 30, 12) /* bc8 pad8 uint16 uint32 addr32 */ \ -V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 addr32 */ \ -V(CHECK_LT, 32, 8) /* bc8 pad8 uc16 addr32 */ \ -V(CHECK_GT, 33, 8) /* bc8 pad8 uc16 addr32 */ \ -V(CHECK_NOT_BACK_REF, 34, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_NOT_BACK_REF_NO_CASE, 35, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_NOT_REGS_EQUAL, 36, 12) /* bc8 regidx24 reg_idx32 addr32 */ \ -V(LOOKUP_MAP1, 37, 12) /* bc8 pad8 start16 bit_map_addr32 addr32 */ \ -V(LOOKUP_MAP2, 38, 96) /* bc8 pad8 start16 half_nibble_map_addr32* */ \ -V(LOOKUP_MAP8, 39, 96) /* bc8 pad8 start16 byte_map addr32* */ \ -V(LOOKUP_HI_MAP8, 40, 96) /* bc8 start24 byte_map_addr32 addr32* */ \ -V(CHECK_REGISTER_LT, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \ -V(CHECK_REGISTER_GE, 42, 12) /* bc8 reg_idx24 value32 addr32 */ \ -V(CHECK_REGISTER_EQ_POS, 43, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_AT_START, 44, 8) /* bc8 pad24 addr32 */ \ -V(CHECK_NOT_AT_START, 45, 8) /* bc8 pad24 addr32 */ \ -V(CHECK_GREEDY, 46, 8) /* bc8 pad24 addr32 */ \ -V(ADVANCE_CP_AND_GOTO, 47, 8) /* bc8 offset24 addr32 */ \ -V(SET_CURRENT_POSITION_FROM_END, 48, 4) /* bc8 idx24 */ +V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 uc16 addr32 */ \ +V(CHECK_CHAR_IN_RANGE, 32, 12) /* bc8 pad24 uc16 uc16 addr32 */ \ +V(CHECK_CHAR_NOT_IN_RANGE, 33, 12) /* bc8 pad24 uc16 uc16 addr32 */ \ +V(CHECK_BIT_IN_TABLE, 34, 24) /* bc8 pad24 addr32 bits128 */ \ +V(CHECK_LT, 35, 8) /* bc8 pad8 uc16 addr32 */ \ +V(CHECK_GT, 36, 8) /* bc8 pad8 uc16 addr32 */ \ +V(CHECK_NOT_BACK_REF, 37, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_NOT_BACK_REF_NO_CASE, 38, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_NOT_REGS_EQUAL, 39, 12) /* bc8 regidx24 reg_idx32 addr32 */ \ +V(CHECK_REGISTER_LT, 40, 12) /* bc8 reg_idx24 value32 addr32 */ \ +V(CHECK_REGISTER_GE, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \ +V(CHECK_REGISTER_EQ_POS, 42, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_AT_START, 43, 8) /* bc8 pad24 addr32 */ \ +V(CHECK_NOT_AT_START, 44, 8) /* bc8 pad24 addr32 */ \ +V(CHECK_GREEDY, 45, 8) /* bc8 pad24 addr32 */ \ +V(ADVANCE_CP_AND_GOTO, 46, 8) /* bc8 offset24 addr32 */ \ +V(SET_CURRENT_POSITION_FROM_END, 47, 4) /* bc8 idx24 */ #define DECLARE_BYTECODES(name, code, length) \ static const int BC_##name = code; diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc index 11016c8238..814e358721 100644 --- a/deps/v8/src/code-stubs.cc +++ b/deps/v8/src/code-stubs.cc @@ -73,21 +73,12 @@ SmartArrayPointer CodeStub::GetName() { void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) { - code->set_major_key(MajorKey()); - Isolate* isolate = masm->isolate(); SmartArrayPointer name = GetName(); PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, *name)); GDBJIT(AddCode(GDBJITInterface::STUB, *name, code)); Counters* counters = isolate->counters(); counters->total_stubs_code_size()->Increment(code->instruction_size()); - -#ifdef ENABLE_DISASSEMBLER - if (FLAG_print_code_stubs) { - code->Disassemble(*name); - PrintF("\n"); - } -#endif } @@ -125,8 +116,16 @@ Handle CodeStub::GetCode() { GetICState()); Handle new_object = factory->NewCode( desc, flags, masm.CodeObject(), NeedsImmovableCode()); - RecordCodeGeneration(*new_object, &masm); + new_object->set_major_key(MajorKey()); FinishCode(new_object); + RecordCodeGeneration(*new_object, &masm); + +#ifdef ENABLE_DISASSEMBLER + if (FLAG_print_code_stubs) { + new_object->Disassemble(*GetName()); + PrintF("\n"); + } +#endif if (UseSpecialCache()) { AddToSpecialCache(new_object); diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h index b67e961ac7..5c8717838f 100644 --- a/deps/v8/src/code-stubs.h +++ b/deps/v8/src/code-stubs.h @@ -498,6 +498,7 @@ class ICCompareStub: public CodeStub { virtual void FinishCode(Handle code) { code->set_compare_state(state_); + code->set_compare_operation(op_); } virtual CodeStub::Major MajorKey() { return CompareIC; } diff --git a/deps/v8/src/compiler-intrinsics.h b/deps/v8/src/compiler-intrinsics.h index 3b9c59ea53..b73e8ac750 100644 --- a/deps/v8/src/compiler-intrinsics.h +++ b/deps/v8/src/compiler-intrinsics.h @@ -40,6 +40,9 @@ class CompilerIntrinsics { // Returns number of zero bits following most significant 1 bit. // Undefined for zero value. INLINE(static int CountLeadingZeros(uint32_t value)); + + // Returns the number of bits set. + INLINE(static int CountSetBits(uint32_t value)); }; #ifdef __GNUC__ @@ -51,6 +54,10 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) { return __builtin_clz(value); } +int CompilerIntrinsics::CountSetBits(uint32_t value) { + return __builtin_popcount(value); +} + #elif defined(_MSC_VER) #pragma intrinsic(_BitScanForward) @@ -68,6 +75,16 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) { return 31 - static_cast(result); } +int CompilerIntrinsics::CountSetBits(uint32_t value) { + // Manually count set bits. + value = ((value >> 1) & 0x55555555) + (value & 0x55555555); + value = ((value >> 2) & 0x33333333) + (value & 0x33333333); + value = ((value >> 4) & 0x0f0f0f0f) + (value & 0x0f0f0f0f); + value = ((value >> 8) & 0x00ff00ff) + (value & 0x00ff00ff); + value = ((value >> 16) & 0x0000ffff) + (value & 0x0000ffff); + return value; +} + #else #error Unsupported compiler #endif diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc index 2272337739..c9c2480fa2 100644 --- a/deps/v8/src/compiler.cc +++ b/deps/v8/src/compiler.cc @@ -531,6 +531,10 @@ Handle Compiler::Compile(Handle source, if (extension == NULL && !result.is_null()) { compilation_cache->PutScript(source, result); } + } else { + if (result->ic_age() != HEAP->global_ic_age()) { + result->ResetForNewContext(HEAP->global_ic_age()); + } } if (result.is_null()) isolate->ReportPendingMessages(); @@ -586,6 +590,10 @@ Handle Compiler::CompileEval(Handle source, compilation_cache->PutEval( source, context, is_global, result, scope_position); } + } else { + if (result->ic_age() != HEAP->global_ic_age()) { + result->ResetForNewContext(HEAP->global_ic_age()); + } } return result; diff --git a/deps/v8/src/contexts.h b/deps/v8/src/contexts.h index af5cb036c6..647c15c153 100644 --- a/deps/v8/src/contexts.h +++ b/deps/v8/src/contexts.h @@ -397,7 +397,7 @@ class Context: public FixedArray { GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSORS) #undef GLOBAL_CONTEXT_FIELD_ACCESSORS - // Lookup the the slot called name, starting with the current context. + // Lookup the slot called name, starting with the current context. // There are three possibilities: // // 1) result->IsContext(): diff --git a/deps/v8/src/conversions-inl.h b/deps/v8/src/conversions-inl.h index b098a1c29c..77b260f036 100644 --- a/deps/v8/src/conversions-inl.h +++ b/deps/v8/src/conversions-inl.h @@ -228,9 +228,7 @@ double InternalStringToIntDouble(UnicodeCache* unicode_cache, } ASSERT(number != 0); - // The double could be constructed faster from number (mantissa), exponent - // and sign. Assuming it's a rare case more simple code is used. - return static_cast(negative ? -number : number) * pow(2.0, exponent); + return ldexp(static_cast(negative ? -number : number), exponent); } diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 45781cf0d4..adbe550136 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -315,9 +315,10 @@ static size_t convertToUint(Local value_in, TryCatch* try_catch) { } -const char kArrayBufferReferencePropName[] = "_is_array_buffer_"; -const char kArrayBufferMarkerPropName[] = "_array_buffer_ref_"; +const char kArrayBufferMarkerPropName[] = "_is_array_buffer_"; +const char kArrayBufferReferencePropName[] = "_array_buffer_ref_"; +static const int kExternalArrayAllocationHeaderSize = 2; Handle Shell::CreateExternalArray(const Arguments& args, ExternalArrayType type, @@ -352,10 +353,11 @@ Handle Shell::CreateExternalArray(const Arguments& args, Local length_value = (args.Length() < 3) ? (first_arg_is_array_buffer - ? args[0]->ToObject()->Get(String::New("length")) + ? args[0]->ToObject()->Get(String::New("byteLength")) : args[0]) : args[2]; - size_t length = convertToUint(length_value, &try_catch); + size_t byteLength = convertToUint(length_value, &try_catch); + size_t length = byteLength; if (try_catch.HasCaught()) return try_catch.Exception(); void* data = NULL; @@ -367,7 +369,7 @@ Handle Shell::CreateExternalArray(const Arguments& args, data = derived_from->GetIndexedPropertiesExternalArrayData(); size_t array_buffer_length = convertToUint( - derived_from->Get(String::New("length")), + derived_from->Get(String::New("byteLength")), &try_catch); if (try_catch.HasCaught()) return try_catch.Exception(); @@ -426,22 +428,44 @@ Handle Shell::CreateExternalArray(const Arguments& args, } Persistent persistent_array = Persistent::New(array); - persistent_array.MakeWeak(data, ExternalArrayWeakCallback); - persistent_array.MarkIndependent(); if (data == NULL && length != 0) { - data = calloc(length, element_size); + // Make sure the total size fits into a (signed) int. + static const int kMaxSize = 0x7fffffff; + if (length > (kMaxSize - sizeof(size_t)) / element_size) { + return ThrowException(String::New("Array exceeds maximum size (2G)")); + } + // Prepend the size of the allocated chunk to the data itself. + int total_size = length * element_size + + kExternalArrayAllocationHeaderSize * sizeof(size_t); + data = malloc(total_size); if (data == NULL) { return ThrowException(String::New("Memory allocation failed.")); } + *reinterpret_cast(data) = total_size; + data = reinterpret_cast(data) + kExternalArrayAllocationHeaderSize; + memset(data, 0, length * element_size); + V8::AdjustAmountOfExternalAllocatedMemory(total_size); } + persistent_array.MakeWeak(data, ExternalArrayWeakCallback); + persistent_array.MarkIndependent(); array->SetIndexedPropertiesToExternalArrayData( reinterpret_cast(data) + offset, type, static_cast(length)); - array->Set(String::New("length"), - Int32::New(static_cast(length)), ReadOnly); - array->Set(String::New("BYTES_PER_ELEMENT"), - Int32::New(static_cast(element_size))); + array->Set(String::New("byteLength"), + Int32::New(static_cast(byteLength)), ReadOnly); + if (!is_array_buffer_construct) { + array->Set(String::New("length"), + Int32::New(static_cast(length)), ReadOnly); + array->Set(String::New("byteOffset"), + Int32::New(static_cast(offset)), ReadOnly); + array->Set(String::New("BYTES_PER_ELEMENT"), + Int32::New(static_cast(element_size))); + // We currently support 'buffer' property only if constructed from a buffer. + if (first_arg_is_array_buffer) { + array->Set(String::New("buffer"), args[0], ReadOnly); + } + } return array; } @@ -452,6 +476,9 @@ void Shell::ExternalArrayWeakCallback(Persistent object, void* data) { Handle converted_object = object->ToObject(); Local prop_value = converted_object->Get(prop_name); if (data != NULL && !prop_value->IsObject()) { + data = reinterpret_cast(data) - kExternalArrayAllocationHeaderSize; + V8::AdjustAmountOfExternalAllocatedMemory( + -static_cast(*reinterpret_cast(data))); free(data); } object.Dispose(); @@ -808,6 +835,8 @@ Handle Shell::CreateGlobalTemplate() { global_template->Set(String::New("read"), FunctionTemplate::New(Read)); global_template->Set(String::New("readbinary"), FunctionTemplate::New(ReadBinary)); + global_template->Set(String::New("readbuffer"), + FunctionTemplate::New(ReadBuffer)); global_template->Set(String::New("readline"), FunctionTemplate::New(ReadLine)); global_template->Set(String::New("load"), FunctionTemplate::New(Load)); @@ -977,8 +1006,8 @@ void Shell::OnExit() { printf("+--------------------------------------------+-------------+\n"); delete [] counters; } - if (counters_file_ != NULL) - delete counters_file_; + delete counters_file_; + delete counter_map_; } #endif // V8_SHARED @@ -1043,6 +1072,32 @@ Handle Shell::ReadBinary(const Arguments& args) { } +Handle Shell::ReadBuffer(const Arguments& args) { + String::Utf8Value filename(args[0]); + int length; + if (*filename == NULL) { + return ThrowException(String::New("Error loading file")); + } + char* data = ReadChars(*filename, &length); + if (data == NULL) { + return ThrowException(String::New("Error reading file")); + } + + Handle buffer = Object::New(); + buffer->Set(String::New(kArrayBufferMarkerPropName), True(), ReadOnly); + + Persistent persistent_buffer = Persistent::New(buffer); + persistent_buffer.MakeWeak(data, ExternalArrayWeakCallback); + persistent_buffer.MarkIndependent(); + + buffer->SetIndexedPropertiesToExternalArrayData( + reinterpret_cast(data), kExternalUnsignedByteArray, length); + buffer->Set(String::New("byteLength"), + Int32::New(static_cast(length)), ReadOnly); + return buffer; +} + + #ifndef V8_SHARED static char* ReadToken(char* data, char token) { char* next = i::OS::StrChr(data, token); diff --git a/deps/v8/src/d8.h b/deps/v8/src/d8.h index c872f90958..23fdebcaf4 100644 --- a/deps/v8/src/d8.h +++ b/deps/v8/src/d8.h @@ -308,6 +308,7 @@ class Shell : public i::AllStatic { static Handle DisableProfiler(const Arguments& args); static Handle Read(const Arguments& args); static Handle ReadBinary(const Arguments& args); + static Handle ReadBuffer(const Arguments& args); static Handle ReadFromStdin(); static Handle ReadLine(const Arguments& args) { return ReadFromStdin(); diff --git a/deps/v8/src/d8.js b/deps/v8/src/d8.js index bf269230b8..819135add4 100644 --- a/deps/v8/src/d8.js +++ b/deps/v8/src/d8.js @@ -2174,7 +2174,7 @@ function DebugResponseDetails(response) { } var current_line = from_line + num; - spacer = maxdigits - (1 + Math.floor(log10(current_line))); + var spacer = maxdigits - (1 + Math.floor(log10(current_line))); if (current_line == Debug.State.currentSourceLine + 1) { for (var i = 0; i < maxdigits; i++) { result += '>'; diff --git a/deps/v8/src/date.js b/deps/v8/src/date.js index 75edf6d32d..d0e24abc50 100644 --- a/deps/v8/src/date.js +++ b/deps/v8/src/date.js @@ -516,8 +516,7 @@ function DateSetMilliseconds(ms) { var t = LOCAL_DATE_VALUE(this); ms = ToNumber(ms); var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), LOCAL_SEC(this), ms); - SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time)); - return this; + return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time)); } diff --git a/deps/v8/src/debug-agent.cc b/deps/v8/src/debug-agent.cc index bdc7a578ac..511663d8ee 100644 --- a/deps/v8/src/debug-agent.cc +++ b/deps/v8/src/debug-agent.cc @@ -323,41 +323,41 @@ bool DebuggerAgentUtil::SendConnectMessage(const Socket* conn, const char* embedding_host) { static const int kBufferSize = 80; char buffer[kBufferSize]; // Sending buffer. + bool ok; int len; - int r; // Send the header. len = OS::SNPrintF(Vector(buffer, kBufferSize), "Type: connect\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; len = OS::SNPrintF(Vector(buffer, kBufferSize), "V8-Version: %s\r\n", v8::V8::GetVersion()); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; len = OS::SNPrintF(Vector(buffer, kBufferSize), "Protocol-Version: 1\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; if (embedding_host != NULL) { len = OS::SNPrintF(Vector(buffer, kBufferSize), "Embedding-Host: %s\r\n", embedding_host); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; } len = OS::SNPrintF(Vector(buffer, kBufferSize), "%s: 0\r\n", kContentLength); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; // Terminate header with empty line. len = OS::SNPrintF(Vector(buffer, kBufferSize), "\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; // No body for connect message. diff --git a/deps/v8/src/debug-debugger.js b/deps/v8/src/debug-debugger.js index 802f6224c4..91838e8ad0 100644 --- a/deps/v8/src/debug-debugger.js +++ b/deps/v8/src/debug-debugger.js @@ -1957,7 +1957,7 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) { if (request.arguments && !IS_UNDEFINED(request.arguments.frameNumber)) { frame_index = request.arguments.frameNumber; if (frame_index < 0 || this.exec_state_.frameCount() <= frame_index) { - return response.failed('Invalid frame number'); + throw new Error('Invalid frame number'); } return this.exec_state_.frame(frame_index); } else { @@ -1966,20 +1966,44 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) { }; -DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { - // No frames no scopes. - if (this.exec_state_.frameCount() == 0) { - return response.failed('No scopes'); +// Gets scope host object from request. It is either a function +// ('functionHandle' argument must be specified) or a stack frame +// ('frameNumber' may be specified and the current frame is taken by default). +DebugCommandProcessor.prototype.scopeHolderForScopeRequest_ = + function(request) { + if (request.arguments && "functionHandle" in request.arguments) { + if (!IS_NUMBER(request.arguments.functionHandle)) { + throw new Error('Function handle must be a number'); + } + var function_mirror = LookupMirror(request.arguments.functionHandle); + if (!function_mirror) { + throw new Error('Failed to find function object by handle'); + } + if (!function_mirror.isFunction()) { + throw new Error('Value of non-function type is found by handle'); + } + return function_mirror; + } else { + // No frames no scopes. + if (this.exec_state_.frameCount() == 0) { + throw new Error('No scopes'); + } + + // Get the frame for which the scopes are requested. + var frame = this.frameForScopeRequest_(request); + return frame; } +} - // Get the frame for which the scopes are requested. - var frame = this.frameForScopeRequest_(request); - // Fill all scopes for this frame. - var total_scopes = frame.scopeCount(); +DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { + var scope_holder = this.scopeHolderForScopeRequest_(request); + + // Fill all scopes for this frame or function. + var total_scopes = scope_holder.scopeCount(); var scopes = []; for (var i = 0; i < total_scopes; i++) { - scopes.push(frame.scope(i)); + scopes.push(scope_holder.scope(i)); } response.body = { fromScope: 0, @@ -1991,24 +2015,19 @@ DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { DebugCommandProcessor.prototype.scopeRequest_ = function(request, response) { - // No frames no scopes. - if (this.exec_state_.frameCount() == 0) { - return response.failed('No scopes'); - } - - // Get the frame for which the scope is requested. - var frame = this.frameForScopeRequest_(request); + // Get the frame or function for which the scope is requested. + var scope_holder = this.scopeHolderForScopeRequest_(request); // With no scope argument just return top scope. var scope_index = 0; if (request.arguments && !IS_UNDEFINED(request.arguments.number)) { scope_index = %ToNumber(request.arguments.number); - if (scope_index < 0 || frame.scopeCount() <= scope_index) { + if (scope_index < 0 || scope_holder.scopeCount() <= scope_index) { return response.failed('Invalid scope number'); } } - response.body = frame.scope(scope_index); + response.body = scope_holder.scope(scope_index); }; diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc index f8a1ecf4f9..9efb5c37aa 100644 --- a/deps/v8/src/debug.cc +++ b/deps/v8/src/debug.cc @@ -892,6 +892,16 @@ void Debug::Iterate(ObjectVisitor* v) { } +void Debug::PutValuesOnStackAndDie(int start, + Address c_entry_fp, + Address last_fp, + Address larger_fp, + int count, + int end) { + OS::Abort(); +} + + Object* Debug::Break(Arguments args) { Heap* heap = isolate_->heap(); HandleScope scope(isolate_); @@ -984,11 +994,34 @@ Object* Debug::Break(Arguments args) { // Count frames until target frame int count = 0; JavaScriptFrameIterator it(isolate_); - while (!it.done() && it.frame()->fp() != thread_local_.last_fp_) { + while (!it.done() && it.frame()->fp() < thread_local_.last_fp_) { count++; it.Advance(); } + // Catch the cases that would lead to crashes and capture + // - C entry FP at which to start stack crawl. + // - FP of the frame at which we plan to stop stepping out (last FP). + // - current FP that's larger than last FP. + // - Counter for the number of steps to step out. + if (it.done()) { + // We crawled the entire stack, never reaching last_fp_. + PutValuesOnStackAndDie(0xBEEEEEEE, + frame->fp(), + thread_local_.last_fp_, + NULL, + count, + 0xFEEEEEEE); + } else if (it.frame()->fp() != thread_local_.last_fp_) { + // We crawled over last_fp_, without getting a match. + PutValuesOnStackAndDie(0xBEEEEEEE, + frame->fp(), + thread_local_.last_fp_, + it.frame()->fp(), + count, + 0xFEEEEEEE); + } + // If we found original frame if (it.frame()->fp() == thread_local_.last_fp_) { if (step_count > 1) { @@ -1857,13 +1890,6 @@ static void RedirectActivationsToRecompiledCodeOnThread( // break slots. debug_break_slot_count++; } - if (frame_code->has_self_optimization_header() && - !new_code->has_self_optimization_header()) { - delta -= FullCodeGenerator::self_optimization_header_size(); - } else { - ASSERT(frame_code->has_self_optimization_header() == - new_code->has_self_optimization_header()); - } int debug_break_slot_bytes = debug_break_slot_count * Assembler::kDebugBreakSlotLength; if (FLAG_trace_deopt) { @@ -2234,6 +2260,13 @@ void Debug::FramesHaveBeenDropped(StackFrame::Id new_break_frame_id, } +const int Debug::FramePaddingLayout::kInitialSize = 1; + + +// Any even value bigger than kInitialSize as needed for stack scanning. +const int Debug::FramePaddingLayout::kPaddingValue = kInitialSize + 1; + + bool Debug::IsDebugGlobal(GlobalObject* global) { return IsLoaded() && global == debug_context()->global(); } diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h index 474b90bd21..d9c966c37f 100644 --- a/deps/v8/src/debug.h +++ b/deps/v8/src/debug.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -232,6 +232,12 @@ class Debug { void PreemptionWhileInDebugger(); void Iterate(ObjectVisitor* v); + NO_INLINE(void PutValuesOnStackAndDie(int start, + Address c_entry_fp, + Address last_fp, + Address larger_fp, + int count, + int end)); Object* Break(Arguments args); void SetBreakPoint(Handle shared, Handle break_point_object, @@ -245,6 +251,8 @@ class Debug { bool IsBreakOnException(ExceptionBreakType type); void PrepareStep(StepAction step_action, int step_count); void ClearStepping(); + void ClearStepOut(); + bool IsStepping() { return thread_local_.step_count_ > 0; } bool StepNextContinue(BreakLocationIterator* break_location_iterator, JavaScriptFrame* frame); static Handle GetDebugInfo(Handle shared); @@ -455,6 +463,50 @@ class Debug { // Architecture-specific constant. static const bool kFrameDropperSupported; + /** + * Defines layout of a stack frame that supports padding. This is a regular + * internal frame that has a flexible stack structure. LiveEdit can shift + * its lower part up the stack, taking up the 'padding' space when additional + * stack memory is required. + * Such frame is expected immediately above the topmost JavaScript frame. + * + * Stack Layout: + * --- Top + * LiveEdit routine frames + * --- + * C frames of debug handler + * --- + * ... + * --- + * An internal frame that has n padding words: + * - any number of words as needed by code -- upper part of frame + * - padding size: a Smi storing n -- current size of padding + * - padding: n words filled with kPaddingValue in form of Smi + * - 3 context/type words of a regular InternalFrame + * - fp + * --- + * Topmost JavaScript frame + * --- + * ... + * --- Bottom + */ + class FramePaddingLayout : public AllStatic { + public: + // Architecture-specific constant. + static const bool kIsSupported; + + // A size of frame base including fp. Padding words starts right above + // the base. + static const int kFrameBaseSize = 4; + + // A number of words that should be reserved on stack for the LiveEdit use. + // Normally equals 1. Stored on stack in form of Smi. + static const int kInitialSize; + // A value that padding words are filled with (in form of Smi). Going + // bottom-top, the first word not having this value is a counter word. + static const int kPaddingValue; + }; + private: explicit Debug(Isolate* isolate); ~Debug(); @@ -464,7 +516,6 @@ class Debug { void ActivateStepIn(StackFrame* frame); void ClearStepIn(); void ActivateStepOut(StackFrame* frame); - void ClearStepOut(); void ClearStepNext(); // Returns whether the compile succeeded. void RemoveDebugInfo(Handle debug_info); diff --git a/deps/v8/src/double.h b/deps/v8/src/double.h index 16a3245e9a..fcf6906af7 100644 --- a/deps/v8/src/double.h +++ b/deps/v8/src/double.h @@ -130,12 +130,6 @@ class Double { return (d64 & kExponentMask) == kExponentMask; } - bool IsNan() const { - uint64_t d64 = AsUint64(); - return ((d64 & kExponentMask) == kExponentMask) && - ((d64 & kSignificandMask) != 0); - } - bool IsInfinite() const { uint64_t d64 = AsUint64(); return ((d64 & kExponentMask) == kExponentMask) && diff --git a/deps/v8/src/elements.cc b/deps/v8/src/elements.cc index 1d043a153e..d367af85ce 100644 --- a/deps/v8/src/elements.cc +++ b/deps/v8/src/elements.cc @@ -199,10 +199,13 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from, } #endif } - ASSERT((copy_size + static_cast(to_start)) <= to->length()); ASSERT(to != from); ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS); if (copy_size == 0) return; + uint32_t to_length = to->length(); + if (to_start + copy_size > to_length) { + copy_size = to_length - to_start; + } for (int i = 0; i < copy_size; i++) { int entry = from->FindEntry(i + from_start); if (entry != SeededNumberDictionary::kNotFound) { @@ -356,8 +359,11 @@ static void CopyDictionaryToDoubleElements(SeededNumberDictionary* from, } } } - ASSERT(copy_size + static_cast(to_start) <= to->length()); if (copy_size == 0) return; + uint32_t to_length = to->length(); + if (to_start + copy_size > to_length) { + copy_size = to_length - to_start; + } for (int i = 0; i < copy_size; i++) { int entry = from->FindEntry(i + from_start); if (entry != SeededNumberDictionary::kNotFound) { @@ -418,10 +424,10 @@ class ElementsAccessorBase : public ElementsAccessor { receiver, holder, key, BackingStore::cast(backing_store)); } - virtual MaybeObject* Get(Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store) { + MUST_USE_RESULT virtual MaybeObject* Get(Object* receiver, + JSObject* holder, + uint32_t key, + FixedArrayBase* backing_store) { if (backing_store == NULL) { backing_store = holder->elements(); } @@ -429,62 +435,64 @@ class ElementsAccessorBase : public ElementsAccessor { receiver, holder, key, BackingStore::cast(backing_store)); } - static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - BackingStore* backing_store) { + MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, + JSObject* obj, + uint32_t key, + BackingStore* backing_store) { return (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store)) ? backing_store->get(key) : backing_store->GetHeap()->the_hole_value(); } - virtual MaybeObject* SetLength(JSArray* array, - Object* length) { + MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* array, + Object* length) { return ElementsAccessorSubclass::SetLengthImpl( array, length, BackingStore::cast(array->elements())); } - static MaybeObject* SetLengthImpl(JSObject* obj, - Object* length, - BackingStore* backing_store); + MUST_USE_RESULT static MaybeObject* SetLengthImpl( + JSObject* obj, + Object* length, + BackingStore* backing_store); - virtual MaybeObject* SetCapacityAndLength(JSArray* array, - int capacity, - int length) { + MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(JSArray* array, + int capacity, + int length) { return ElementsAccessorSubclass::SetFastElementsCapacityAndLength( array, capacity, length); } - static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj, - int capacity, - int length) { + MUST_USE_RESULT static MaybeObject* SetFastElementsCapacityAndLength( + JSObject* obj, + int capacity, + int length) { UNIMPLEMENTED(); return obj; } - virtual MaybeObject* Delete(JSObject* obj, - uint32_t key, - JSReceiver::DeleteMode mode) = 0; + MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) = 0; - static MaybeObject* CopyElementsImpl(FixedArrayBase* from, - uint32_t from_start, - FixedArrayBase* to, - ElementsKind to_kind, - uint32_t to_start, - int copy_size) { + MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, + uint32_t from_start, + FixedArrayBase* to, + ElementsKind to_kind, + uint32_t to_start, + int copy_size) { UNREACHABLE(); return NULL; } - virtual MaybeObject* CopyElements(JSObject* from_holder, - uint32_t from_start, - FixedArrayBase* to, - ElementsKind to_kind, - uint32_t to_start, - int copy_size, - FixedArrayBase* from) { + MUST_USE_RESULT virtual MaybeObject* CopyElements(JSObject* from_holder, + uint32_t from_start, + FixedArrayBase* to, + ElementsKind to_kind, + uint32_t to_start, + int copy_size, + FixedArrayBase* from) { if (from == NULL) { from = from_holder->elements(); } @@ -495,10 +503,11 @@ class ElementsAccessorBase : public ElementsAccessor { from, from_start, to, to_kind, to_start, copy_size); } - virtual MaybeObject* AddElementsToFixedArray(Object* receiver, - JSObject* holder, - FixedArray* to, - FixedArrayBase* from) { + MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( + Object* receiver, + JSObject* holder, + FixedArray* to, + FixedArrayBase* from) { int len0 = to->length(); #ifdef DEBUG if (FLAG_enable_slow_asserts) { @@ -860,27 +869,28 @@ class ExternalElementsAccessor friend class ElementsAccessorBase >; - static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - BackingStore* backing_store) { + MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, + JSObject* obj, + uint32_t key, + BackingStore* backing_store) { return key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store) ? backing_store->get(key) : backing_store->GetHeap()->undefined_value(); } - static MaybeObject* SetLengthImpl(JSObject* obj, - Object* length, - BackingStore* backing_store) { + MUST_USE_RESULT static MaybeObject* SetLengthImpl( + JSObject* obj, + Object* length, + BackingStore* backing_store) { // External arrays do not support changing their length. UNREACHABLE(); return obj; } - virtual MaybeObject* Delete(JSObject* obj, - uint32_t key, - JSReceiver::DeleteMode mode) { + MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { // External arrays always ignore deletes. return obj->GetHeap()->true_value(); } @@ -996,10 +1006,11 @@ class DictionaryElementsAccessor // Adjusts the length of the dictionary backing store and returns the new // length according to ES5 section 15.4.5.2 behavior. - static MaybeObject* SetLengthWithoutNormalize(SeededNumberDictionary* dict, - JSArray* array, - Object* length_object, - uint32_t length) { + MUST_USE_RESULT static MaybeObject* SetLengthWithoutNormalize( + SeededNumberDictionary* dict, + JSArray* array, + Object* length_object, + uint32_t length) { if (length == 0) { // If the length of a slow array is reset to zero, we clear // the array and flush backing storage. This has the added @@ -1051,9 +1062,10 @@ class DictionaryElementsAccessor return length_object; } - static MaybeObject* DeleteCommon(JSObject* obj, - uint32_t key, - JSReceiver::DeleteMode mode) { + MUST_USE_RESULT static MaybeObject* DeleteCommon( + JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { Isolate* isolate = obj->GetIsolate(); Heap* heap = isolate->heap(); FixedArray* backing_store = FixedArray::cast(obj->elements()); @@ -1096,12 +1108,12 @@ class DictionaryElementsAccessor return heap->true_value(); } - static MaybeObject* CopyElementsImpl(FixedArrayBase* from, - uint32_t from_start, - FixedArrayBase* to, - ElementsKind to_kind, - uint32_t to_start, - int copy_size) { + MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, + uint32_t from_start, + FixedArrayBase* to, + ElementsKind to_kind, + uint32_t to_start, + int copy_size) { switch (to_kind) { case FAST_SMI_ONLY_ELEMENTS: case FAST_ELEMENTS: @@ -1125,16 +1137,17 @@ class DictionaryElementsAccessor friend class ElementsAccessorBase >; - virtual MaybeObject* Delete(JSObject* obj, - uint32_t key, - JSReceiver::DeleteMode mode) { + MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { return DeleteCommon(obj, key, mode); } - static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - SeededNumberDictionary* backing_store) { + MUST_USE_RESULT static MaybeObject* GetImpl( + Object* receiver, + JSObject* obj, + uint32_t key, + SeededNumberDictionary* backing_store) { int entry = backing_store->FindEntry(key); if (entry != SeededNumberDictionary::kNotFound) { Object* element = backing_store->ValueAt(entry); @@ -1180,10 +1193,10 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase< NonStrictArgumentsElementsAccessor, ElementsKindTraits >; - static MaybeObject* GetImpl(Object* receiver, - JSObject* obj, - uint32_t key, - FixedArray* parameter_map) { + MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver, + JSObject* obj, + uint32_t key, + FixedArray* parameter_map) { Object* probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { Context* context = Context::cast(parameter_map->get(0)); @@ -1210,18 +1223,19 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase< } } - static MaybeObject* SetLengthImpl(JSObject* obj, - Object* length, - FixedArray* parameter_map) { + MUST_USE_RESULT static MaybeObject* SetLengthImpl( + JSObject* obj, + Object* length, + FixedArray* parameter_map) { // TODO(mstarzinger): This was never implemented but will be used once we // correctly implement [[DefineOwnProperty]] on arrays. UNIMPLEMENTED(); return obj; } - virtual MaybeObject* Delete(JSObject* obj, - uint32_t key, - JSReceiver::DeleteMode mode) { + MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, + uint32_t key, + JSReceiver::DeleteMode mode) { FixedArray* parameter_map = FixedArray::cast(obj->elements()); Object* probe = GetParameterMapArg(obj, parameter_map, key); if (!probe->IsTheHole()) { @@ -1240,12 +1254,12 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase< return obj->GetHeap()->true_value(); } - static MaybeObject* CopyElementsImpl(FixedArrayBase* from, - uint32_t from_start, - FixedArrayBase* to, - ElementsKind to_kind, - uint32_t to_start, - int copy_size) { + MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, + uint32_t from_start, + FixedArrayBase* to, + ElementsKind to_kind, + uint32_t to_start, + int copy_size) { FixedArray* parameter_map = FixedArray::cast(from); FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments); @@ -1326,18 +1340,8 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) { void ElementsAccessor::InitializeOncePerProcess() { - static struct ConcreteElementsAccessors { -#define ACCESSOR_STRUCT(Class, Kind, Store) Class* Kind##_handler; - ELEMENTS_LIST(ACCESSOR_STRUCT) -#undef ACCESSOR_STRUCT - } element_accessors = { -#define ACCESSOR_INIT(Class, Kind, Store) new Class(#Kind), - ELEMENTS_LIST(ACCESSOR_INIT) -#undef ACCESSOR_INIT - }; - static ElementsAccessor* accessor_array[] = { -#define ACCESSOR_ARRAY(Class, Kind, Store) element_accessors.Kind##_handler, +#define ACCESSOR_ARRAY(Class, Kind, Store) new Class(#Kind), ELEMENTS_LIST(ACCESSOR_ARRAY) #undef ACCESSOR_ARRAY }; @@ -1349,9 +1353,17 @@ void ElementsAccessor::InitializeOncePerProcess() { } +void ElementsAccessor::TearDown() { +#define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind]; + ELEMENTS_LIST(ACCESSOR_DELETE) +#undef ACCESSOR_DELETE + elements_accessors_ = NULL; +} + + template -MaybeObject* ElementsAccessorBase:: +MUST_USE_RESULT MaybeObject* ElementsAccessorBase:: SetLengthImpl(JSObject* obj, Object* length, typename ElementsKindTraits::BackingStore* backing_store) { diff --git a/deps/v8/src/elements.h b/deps/v8/src/elements.h index ff97c08324..55d6fa56f0 100644 --- a/deps/v8/src/elements.h +++ b/deps/v8/src/elements.h @@ -60,18 +60,19 @@ class ElementsAccessor { // can optionally pass in the backing store to use for the check, which must // be compatible with the ElementsKind of the ElementsAccessor. If // backing_store is NULL, the holder->elements() is used as the backing store. - virtual MaybeObject* Get(Object* receiver, - JSObject* holder, - uint32_t key, - FixedArrayBase* backing_store = NULL) = 0; + MUST_USE_RESULT virtual MaybeObject* Get( + Object* receiver, + JSObject* holder, + uint32_t key, + FixedArrayBase* backing_store = NULL) = 0; // Modifies the length data property as specified for JSArrays and resizes the // underlying backing store accordingly. The method honors the semantics of // changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that // have non-deletable elements can only be shrunk to the size of highest // element that is non-deletable. - virtual MaybeObject* SetLength(JSArray* holder, - Object* new_length) = 0; + MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* holder, + Object* new_length) = 0; // Modifies both the length and capacity of a JSArray, resizing the underlying // backing store as necessary. This method does NOT honor the semantics of @@ -79,14 +80,14 @@ class ElementsAccessor { // elements. This method should only be called for array expansion OR by // runtime JavaScript code that use InternalArrays and don't care about // EcmaScript 5.1 semantics. - virtual MaybeObject* SetCapacityAndLength(JSArray* array, - int capacity, - int length) = 0; + MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(JSArray* array, + int capacity, + int length) = 0; // Deletes an element in an object, returning a new elements backing store. - virtual MaybeObject* Delete(JSObject* holder, - uint32_t key, - JSReceiver::DeleteMode mode) = 0; + MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* holder, + uint32_t key, + JSReceiver::DeleteMode mode) = 0; // If kCopyToEnd is specified as the copy_size to CopyElements, it copies all // of elements from source after source_start to the destination array. @@ -101,26 +102,28 @@ class ElementsAccessor { // the source JSObject or JSArray in source_holder. If the holder's backing // store is available, it can be passed in source and source_holder is // ignored. - virtual MaybeObject* CopyElements(JSObject* source_holder, - uint32_t source_start, - FixedArrayBase* destination, - ElementsKind destination_kind, - uint32_t destination_start, - int copy_size, - FixedArrayBase* source = NULL) = 0; - - MaybeObject* CopyElements(JSObject* from_holder, - FixedArrayBase* to, - ElementsKind to_kind, - FixedArrayBase* from = NULL) { + MUST_USE_RESULT virtual MaybeObject* CopyElements( + JSObject* source_holder, + uint32_t source_start, + FixedArrayBase* destination, + ElementsKind destination_kind, + uint32_t destination_start, + int copy_size, + FixedArrayBase* source = NULL) = 0; + + MUST_USE_RESULT MaybeObject* CopyElements(JSObject* from_holder, + FixedArrayBase* to, + ElementsKind to_kind, + FixedArrayBase* from = NULL) { return CopyElements(from_holder, 0, to, to_kind, 0, kCopyToEndAndInitializeToHole, from); } - virtual MaybeObject* AddElementsToFixedArray(Object* receiver, - JSObject* holder, - FixedArray* to, - FixedArrayBase* from = NULL) = 0; + MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( + Object* receiver, + JSObject* holder, + FixedArray* to, + FixedArrayBase* from = NULL) = 0; // Returns a shared ElementsAccessor for the specified ElementsKind. static ElementsAccessor* ForKind(ElementsKind elements_kind) { @@ -131,6 +134,7 @@ class ElementsAccessor { static ElementsAccessor* ForArray(FixedArrayBase* array); static void InitializeOncePerProcess(); + static void TearDown(); protected: friend class NonStrictArgumentsElementsAccessor; diff --git a/deps/v8/src/extensions/externalize-string-extension.cc b/deps/v8/src/extensions/externalize-string-extension.cc index 9fbf329818..50d876136f 100644 --- a/deps/v8/src/extensions/externalize-string-extension.cc +++ b/deps/v8/src/extensions/externalize-string-extension.cc @@ -133,11 +133,8 @@ v8::Handle ExternalizeStringExtension::IsAscii( void ExternalizeStringExtension::Register() { - static ExternalizeStringExtension* externalize_extension = NULL; - if (externalize_extension == NULL) - externalize_extension = new ExternalizeStringExtension; - static v8::DeclareExtension externalize_extension_declaration( - externalize_extension); + static ExternalizeStringExtension externalize_extension; + static v8::DeclareExtension declaration(&externalize_extension); } } } // namespace v8::internal diff --git a/deps/v8/src/extensions/gc-extension.cc b/deps/v8/src/extensions/gc-extension.cc index 573797e174..f921552aaa 100644 --- a/deps/v8/src/extensions/gc-extension.cc +++ b/deps/v8/src/extensions/gc-extension.cc @@ -46,9 +46,8 @@ v8::Handle GCExtension::GC(const v8::Arguments& args) { void GCExtension::Register() { - static GCExtension* gc_extension = NULL; - if (gc_extension == NULL) gc_extension = new GCExtension(); - static v8::DeclareExtension gc_extension_declaration(gc_extension); + static GCExtension gc_extension; + static v8::DeclareExtension declaration(&gc_extension); } } } // namespace v8::internal diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc index 143099cfb8..6bb7893746 100644 --- a/deps/v8/src/factory.cc +++ b/deps/v8/src/factory.cc @@ -291,6 +291,15 @@ Handle Factory::NewGlobalContext() { } +Handle Factory::NewModuleContext(Handle previous, + Handle scope_info) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateModuleContext(*previous, *scope_info), + Context); +} + + Handle Factory::NewFunctionContext(int length, Handle function) { CALL_HEAP_FUNCTION( @@ -324,10 +333,9 @@ Handle Factory::NewWithContext(Handle function, } -Handle Factory::NewBlockContext( - Handle function, - Handle previous, - Handle scope_info) { +Handle Factory::NewBlockContext(Handle function, + Handle previous, + Handle scope_info) { CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateBlockContext(*function, @@ -537,6 +545,10 @@ Handle Factory::NewFunctionFromSharedFunctionInfo( : isolate()->strict_mode_function_map(), pretenure); + if (function_info->ic_age() != isolate()->heap()->global_ic_age()) { + function_info->ResetForNewContext(isolate()->heap()->global_ic_age()); + } + result->set_context(*context); if (!function_info->bound()) { int number_of_literals = function_info->num_literals(); @@ -924,6 +936,13 @@ Handle Factory::NewJSObject(Handle constructor, } +Handle Factory::NewJSModule() { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateJSModule(), JSModule); +} + + Handle Factory::NewGlobalObject( Handle constructor) { CALL_HEAP_FUNCTION(isolate(), diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h index 786d4a983a..06aad1bef6 100644 --- a/deps/v8/src/factory.h +++ b/deps/v8/src/factory.h @@ -162,9 +162,12 @@ class Factory { // Create a global (but otherwise uninitialized) context. Handle NewGlobalContext(); + // Create a module context. + Handle NewModuleContext(Handle previous, + Handle scope_info); + // Create a function context. - Handle NewFunctionContext(int length, - Handle function); + Handle NewFunctionContext(int length, Handle function); // Create a catch context. Handle NewCatchContext(Handle function, @@ -177,7 +180,7 @@ class Factory { Handle previous, Handle extension); - // Create a 'block' context. + // Create a block context. Handle NewBlockContext(Handle function, Handle previous, Handle scope_info); @@ -262,6 +265,9 @@ class Factory { // runtime. Handle NewJSObjectFromMap(Handle map); + // JS modules are pretenured. + Handle NewJSModule(); + // JS arrays are pretenured when allocated by the parser. Handle NewJSArray(int capacity, ElementsKind elements_kind = FAST_ELEMENTS, diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index 75697a8906..62a9782859 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -132,6 +132,8 @@ public: // Flags for language modes and experimental language features. DEFINE_bool(use_strict, false, "enforce strict mode") +DEFINE_bool(es52_globals, false, + "activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false, "enable harmony block scoping") @@ -165,7 +167,12 @@ DEFINE_bool(eliminate_dead_phis, true, "eliminate dead phis") DEFINE_bool(use_gvn, true, "use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true, "use function inlining") -DEFINE_bool(limit_inlining, true, "limit code size growth from inlining") +DEFINE_int(max_inlined_source_size, 600, + "maximum source size in bytes considered for a single inlining") +DEFINE_int(max_inlined_nodes, 196, + "maximum number of AST nodes considered for a single inlining") +DEFINE_int(max_inlined_nodes_cumulative, 196, + "maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true, @@ -188,6 +195,8 @@ DEFINE_bool(trap_on_deopt, false, "put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining") DEFINE_bool(use_osr, true, "use on-stack replacement") +DEFINE_bool(array_bounds_checks_elimination, true, + "perform array bounds checks elimination") DEFINE_bool(trace_osr, false, "trace on-stack replacement") DEFINE_int(stress_runs, 0, "number of stress runs") diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc index 0571a813f5..e265341b1a 100644 --- a/deps/v8/src/frames.cc +++ b/deps/v8/src/frames.cc @@ -1359,34 +1359,28 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* // ------------------------------------------------------------------------- int NumRegs(RegList reglist) { - int n = 0; - while (reglist != 0) { - n++; - reglist &= reglist - 1; // clear one bit - } - return n; + return CompilerIntrinsics::CountSetBits(reglist); } struct JSCallerSavedCodeData { - JSCallerSavedCodeData() { - int i = 0; - for (int r = 0; r < kNumRegs; r++) - if ((kJSCallerSaved & (1 << r)) != 0) - reg_code[i++] = r; - - ASSERT(i == kNumJSCallerSaved); - } int reg_code[kNumJSCallerSaved]; }; +JSCallerSavedCodeData caller_saved_code_data; -static LazyInstance::type caller_saved_code_data = - LAZY_INSTANCE_INITIALIZER; +void SetUpJSCallerSavedCodeData() { + int i = 0; + for (int r = 0; r < kNumRegs; r++) + if ((kJSCallerSaved & (1 << r)) != 0) + caller_saved_code_data.reg_code[i++] = r; + + ASSERT(i == kNumJSCallerSaved); +} int JSCallerSavedCode(int n) { ASSERT(0 <= n && n < kNumJSCallerSaved); - return caller_saved_code_data.Get().reg_code[n]; + return caller_saved_code_data.reg_code[n]; } diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h index 9071555197..78cdd0cedb 100644 --- a/deps/v8/src/frames.h +++ b/deps/v8/src/frames.h @@ -40,6 +40,8 @@ typedef uint32_t RegList; // Get the number of registers in a given register list. int NumRegs(RegList list); +void SetUpJSCallerSavedCodeData(); + // Return the code of the n-th saved register available to JavaScript. int JSCallerSavedCode(int n); @@ -209,6 +211,9 @@ class StackFrame BASE_EMBEDDED { virtual void SetCallerFp(Address caller_fp) = 0; + // Manually changes value of fp in this object. + void UpdateFp(Address fp) { state_.fp = fp; } + Address* pc_address() const { return state_.pc_address; } // Get the id of this stack frame. diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc index d963979ad8..b8794c0b8b 100644 --- a/deps/v8/src/full-codegen.cc +++ b/deps/v8/src/full-codegen.cc @@ -315,7 +315,6 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { Handle code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info); code->set_optimizable(info->IsOptimizable() && !info->function()->flags()->Contains(kDontOptimize)); - code->set_self_optimization_header(cgen.has_self_optimization_header_); cgen.PopulateDeoptimizationData(code); cgen.PopulateTypeFeedbackInfo(code); cgen.PopulateTypeFeedbackCells(code); @@ -327,12 +326,10 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { code->set_compiled_optimizable(info->IsOptimizable()); #endif // ENABLE_DEBUGGER_SUPPORT code->set_allow_osr_at_loop_nesting_level(0); + code->set_profiler_ticks(0); code->set_stack_check_table_offset(table_offset); CodeGenerator::PrintCode(code, info); info->SetCode(code); // May be an empty handle. - if (!code.is_null()) { - isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size()); - } #ifdef ENABLE_GDB_JIT_INTERFACE if (FLAG_gdbjit && !code.is_null()) { GDBJITLineInfo* lineinfo = @@ -571,88 +568,91 @@ void FullCodeGenerator::DoTest(const TestContext* context) { void FullCodeGenerator::VisitDeclarations( ZoneList* declarations) { - int save_global_count = global_count_; - global_count_ = 0; + ZoneList >* saved_globals = globals_; + ZoneList > inner_globals(10); + globals_ = &inner_globals; AstVisitor::VisitDeclarations(declarations); - - // Batch declare global functions and variables. - if (global_count_ > 0) { - Handle array = - isolate()->factory()->NewFixedArray(2 * global_count_, TENURED); - int length = declarations->length(); - for (int j = 0, i = 0; i < length; i++) { - Declaration* decl = declarations->at(i); - Variable* var = decl->proxy()->var(); - - if (var->IsUnallocated()) { - array->set(j++, *(var->name())); - FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration(); - if (fun_decl == NULL) { - if (var->binding_needs_init()) { - // In case this binding needs initialization use the hole. - array->set_the_hole(j++); - } else { - array->set_undefined(j++); - } - } else { - Handle function = - Compiler::BuildFunctionInfo(fun_decl->fun(), script()); - // Check for stack-overflow exception. - if (function.is_null()) { - SetStackOverflow(); - return; - } - array->set(j++, *function); - } - } - } + if (!globals_->is_empty()) { // Invoke the platform-dependent code generator to do the actual // declaration the global functions and variables. + Handle array = + isolate()->factory()->NewFixedArray(globals_->length(), TENURED); + for (int i = 0; i < globals_->length(); ++i) + array->set(i, *globals_->at(i)); DeclareGlobals(array); } - global_count_ = save_global_count; -} - - -void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) { - EmitDeclaration(decl->proxy(), decl->mode(), NULL); + globals_ = saved_globals; } -void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) { - EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); -} - - -void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) { - EmitDeclaration(decl->proxy(), decl->mode(), NULL); -} - - -void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) { - EmitDeclaration(decl->proxy(), decl->mode(), NULL); -} +void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) { + Handle instance = module->interface()->Instance(); + ASSERT(!instance.is_null()); + // Allocate a module context statically. + Block* block = module->body(); + Scope* saved_scope = scope(); + scope_ = block->scope(); + Handle scope_info = scope_->GetScopeInfo(); + + // Generate code for module creation and linking. + Comment cmnt(masm_, "[ ModuleLiteral"); + SetStatementPosition(block); + + if (scope_info->HasContext()) { + // Set up module context. + __ Push(scope_info); + __ Push(instance); + __ CallRuntime(Runtime::kPushModuleContext, 2); + StoreToFrameField( + StandardFrameConstants::kContextOffset, context_register()); + } -void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) { - // TODO(rossberg) -} + { + Comment cmnt(masm_, "[ Declarations"); + VisitDeclarations(scope_->declarations()); + } + scope_ = saved_scope; + if (scope_info->HasContext()) { + // Pop module context. + LoadContextField(context_register(), Context::PREVIOUS_INDEX); + // Update local stack frame context field. + StoreToFrameField( + StandardFrameConstants::kContextOffset, context_register()); + } -void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) { - // TODO(rossberg) + // Populate module instance object. + const PropertyAttributes attr = + static_cast(READ_ONLY | DONT_DELETE | DONT_ENUM); + for (Interface::Iterator it = module->interface()->iterator(); + !it.done(); it.Advance()) { + if (it.interface()->IsModule()) { + Handle value = it.interface()->Instance(); + ASSERT(!value.is_null()); + JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode); + } else { + // TODO(rossberg): set proper getters instead of undefined... + // instance->DefineAccessor(*it.name(), ACCESSOR_GETTER, *getter, attr); + Handle value(isolate()->heap()->undefined_value()); + JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode); + } + } + USE(instance->PreventExtensions()); } void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) { - // TODO(rossberg) + // Noting to do. + // The instance object is resolved statically through the module's interface. } void FullCodeGenerator::VisitModulePath(ModulePath* module) { - // TODO(rossberg) + // Noting to do. + // The instance object is resolved statically through the module's interface. } @@ -914,9 +914,9 @@ void FullCodeGenerator::VisitBlock(Block* stmt) { Scope* saved_scope = scope(); // Push a block context when entering a block with block scoped variables. - if (stmt->block_scope() != NULL) { + if (stmt->scope() != NULL) { { Comment cmnt(masm_, "[ Extend block context"); - scope_ = stmt->block_scope(); + scope_ = stmt->scope(); Handle scope_info = scope_->GetScopeInfo(); int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS; __ Push(scope_info); @@ -943,7 +943,7 @@ void FullCodeGenerator::VisitBlock(Block* stmt) { PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); // Pop block context if necessary. - if (stmt->block_scope() != NULL) { + if (stmt->scope() != NULL) { LoadContextField(context_register(), Context::PREVIOUS_INDEX); // Update local stack frame context field. StoreToFrameField(StandardFrameConstants::kContextOffset, diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h index 58d59862a5..0e0ffe924b 100644 --- a/deps/v8/src/full-codegen.h +++ b/deps/v8/src/full-codegen.h @@ -83,22 +83,17 @@ class FullCodeGenerator: public AstVisitor { scope_(info->scope()), nesting_stack_(NULL), loop_depth_(0), - global_count_(0), + globals_(NULL), context_(NULL), bailout_entries_(info->HasDeoptimizationSupport() ? info->function()->ast_node_count() : 0), stack_checks_(2), // There's always at least one. type_feedback_cells_(info->HasDeoptimizationSupport() ? info->function()->ast_node_count() : 0), - ic_total_count_(0), - has_self_optimization_header_(false) { } + ic_total_count_(0) { } static bool MakeCode(CompilationInfo* info); - // Returns the platform-specific size in bytes of the self-optimization - // header. - static int self_optimization_header_size(); - // Encode state and pc-offset as a BitField. // Only use 30 bits because we encode the result as a smi. class StateField : public BitField { }; @@ -207,7 +202,7 @@ class FullCodeGenerator: public AstVisitor { virtual ~NestedBlock() {} virtual NestedStatement* Exit(int* stack_depth, int* context_length) { - if (statement()->AsBlock()->block_scope() != NULL) { + if (statement()->AsBlock()->scope() != NULL) { ++(*context_length); } return previous_; @@ -418,12 +413,9 @@ class FullCodeGenerator: public AstVisitor { Label* if_true, Label* if_false); - // Platform-specific code for a variable, constant, or function - // declaration. Functions have an initial value. - // Increments global_count_ for unallocated variables. - void EmitDeclaration(VariableProxy* proxy, - VariableMode mode, - FunctionLiteral* function); + // If enabled, emit debug code for checking that the current context is + // neither a with nor a catch context. + void EmitDebugCheckDeclarationContext(Variable* variable); // Platform-specific code for checking the stack limit at the back edge of // a loop. @@ -553,12 +545,8 @@ class FullCodeGenerator: public AstVisitor { Handle